Socket
Socket
Sign inDemoInstall

@tus/server

Package Overview
Dependencies
Maintainers
3
Versions
19
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@tus/server - npm Package Compare versions

Comparing version 1.4.1 to 1.4.2

216

dist/constants.d.ts

@@ -1,89 +0,131 @@

export declare const REQUEST_METHODS: readonly ["POST", "HEAD", "PATCH", "OPTIONS", "DELETE"];
export declare const HEADERS: readonly ["Authorization", "Content-Type", "Location", "Tus-Extension", "Tus-Max-Size", "Tus-Resumable", "Tus-Version", "Upload-Concat", "Upload-Defer-Length", "Upload-Length", "Upload-Metadata", "Upload-Offset", "X-HTTP-Method-Override", "X-Requested-With", "X-Forwarded-Host", "X-Forwarded-Proto", "Forwarded"];
export declare const HEADERS_LOWERCASE: ("authorization" | "content-type" | "location" | "tus-extension" | "tus-max-size" | "tus-resumable" | "tus-version" | "upload-concat" | "upload-defer-length" | "upload-length" | "upload-metadata" | "upload-offset" | "x-http-method-override" | "x-requested-with" | "x-forwarded-host" | "x-forwarded-proto" | "forwarded")[];
export declare const ALLOWED_HEADERS: string;
export declare const ALLOWED_METHODS: string;
export declare const EXPOSED_HEADERS: string;
export declare const REQUEST_METHODS: readonly [
'POST',
'HEAD',
'PATCH',
'OPTIONS',
'DELETE'
]
export declare const HEADERS: readonly [
'Authorization',
'Content-Type',
'Location',
'Tus-Extension',
'Tus-Max-Size',
'Tus-Resumable',
'Tus-Version',
'Upload-Concat',
'Upload-Defer-Length',
'Upload-Length',
'Upload-Metadata',
'Upload-Offset',
'X-HTTP-Method-Override',
'X-Requested-With',
'X-Forwarded-Host',
'X-Forwarded-Proto',
'Forwarded'
]
export declare const HEADERS_LOWERCASE: (
| 'authorization'
| 'content-type'
| 'location'
| 'tus-extension'
| 'tus-max-size'
| 'tus-resumable'
| 'tus-version'
| 'upload-concat'
| 'upload-defer-length'
| 'upload-length'
| 'upload-metadata'
| 'upload-offset'
| 'x-http-method-override'
| 'x-requested-with'
| 'x-forwarded-host'
| 'x-forwarded-proto'
| 'forwarded'
)[]
export declare const ALLOWED_HEADERS: string
export declare const ALLOWED_METHODS: string
export declare const EXPOSED_HEADERS: string
export declare const ERRORS: {
readonly MISSING_OFFSET: {
readonly status_code: 403;
readonly body: "Upload-Offset header required\n";
};
readonly ABORTED: {
readonly status_code: 400;
readonly body: "Request aborted due to lock acquired";
};
readonly INVALID_TERMINATION: {
readonly status_code: 400;
readonly body: "Cannot terminate an already completed upload";
};
readonly ERR_LOCK_TIMEOUT: {
readonly status_code: 500;
readonly body: "failed to acquire lock before timeout";
};
readonly INVALID_CONTENT_TYPE: {
readonly status_code: 403;
readonly body: "Content-Type header required\n";
};
readonly FILE_NOT_FOUND: {
readonly status_code: 404;
readonly body: "The file for this url was not found\n";
};
readonly INVALID_OFFSET: {
readonly status_code: 409;
readonly body: "Upload-Offset conflict\n";
};
readonly FILE_NO_LONGER_EXISTS: {
readonly status_code: 410;
readonly body: "The file for this url no longer exists\n";
};
readonly ERR_SIZE_EXCEEDED: {
readonly status_code: 413;
readonly body: "upload's size exceeded\n";
};
readonly ERR_MAX_SIZE_EXCEEDED: {
readonly status_code: 413;
readonly body: "Maximum size exceeded\n";
};
readonly INVALID_LENGTH: {
readonly status_code: 400;
readonly body: "Upload-Length or Upload-Defer-Length header required\n";
};
readonly INVALID_METADATA: {
readonly status_code: 400;
readonly body: "Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique";
};
readonly UNKNOWN_ERROR: {
readonly status_code: 500;
readonly body: "Something went wrong with that request\n";
};
readonly FILE_WRITE_ERROR: {
readonly status_code: 500;
readonly body: "Something went wrong receiving the file\n";
};
readonly UNSUPPORTED_CONCATENATION_EXTENSION: {
readonly status_code: 501;
readonly body: "Concatenation extension is not (yet) supported. Disable parallel uploads in the tus client.\n";
};
readonly UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION: {
readonly status_code: 501;
readonly body: "creation-defer-length extension is not (yet) supported.\n";
};
readonly UNSUPPORTED_EXPIRATION_EXTENSION: {
readonly status_code: 501;
readonly body: "expiration extension is not (yet) supported.\n";
};
};
export declare const POST_CREATE: "POST_CREATE";
export declare const POST_RECEIVE: "POST_RECEIVE";
export declare const POST_FINISH: "POST_FINISH";
export declare const POST_TERMINATE: "POST_TERMINATE";
readonly MISSING_OFFSET: {
readonly status_code: 403
readonly body: 'Upload-Offset header required\n'
}
readonly ABORTED: {
readonly status_code: 400
readonly body: 'Request aborted due to lock acquired'
}
readonly INVALID_TERMINATION: {
readonly status_code: 400
readonly body: 'Cannot terminate an already completed upload'
}
readonly ERR_LOCK_TIMEOUT: {
readonly status_code: 500
readonly body: 'failed to acquire lock before timeout'
}
readonly INVALID_CONTENT_TYPE: {
readonly status_code: 403
readonly body: 'Content-Type header required\n'
}
readonly FILE_NOT_FOUND: {
readonly status_code: 404
readonly body: 'The file for this url was not found\n'
}
readonly INVALID_OFFSET: {
readonly status_code: 409
readonly body: 'Upload-Offset conflict\n'
}
readonly FILE_NO_LONGER_EXISTS: {
readonly status_code: 410
readonly body: 'The file for this url no longer exists\n'
}
readonly ERR_SIZE_EXCEEDED: {
readonly status_code: 413
readonly body: "upload's size exceeded\n"
}
readonly ERR_MAX_SIZE_EXCEEDED: {
readonly status_code: 413
readonly body: 'Maximum size exceeded\n'
}
readonly INVALID_LENGTH: {
readonly status_code: 400
readonly body: 'Upload-Length or Upload-Defer-Length header required\n'
}
readonly INVALID_METADATA: {
readonly status_code: 400
readonly body: 'Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique'
}
readonly UNKNOWN_ERROR: {
readonly status_code: 500
readonly body: 'Something went wrong with that request\n'
}
readonly FILE_WRITE_ERROR: {
readonly status_code: 500
readonly body: 'Something went wrong receiving the file\n'
}
readonly UNSUPPORTED_CONCATENATION_EXTENSION: {
readonly status_code: 501
readonly body: 'Concatenation extension is not (yet) supported. Disable parallel uploads in the tus client.\n'
}
readonly UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION: {
readonly status_code: 501
readonly body: 'creation-defer-length extension is not (yet) supported.\n'
}
readonly UNSUPPORTED_EXPIRATION_EXTENSION: {
readonly status_code: 501
readonly body: 'expiration extension is not (yet) supported.\n'
}
}
export declare const POST_CREATE: 'POST_CREATE'
export declare const POST_RECEIVE: 'POST_RECEIVE'
export declare const POST_FINISH: 'POST_FINISH'
export declare const POST_TERMINATE: 'POST_TERMINATE'
export declare const EVENTS: {
readonly POST_CREATE: "POST_CREATE";
readonly POST_RECEIVE: "POST_RECEIVE";
readonly POST_FINISH: "POST_FINISH";
readonly POST_TERMINATE: "POST_TERMINATE";
};
export declare const MAX_AGE: 86400;
export declare const TUS_RESUMABLE: "1.0.0";
export declare const TUS_VERSION: readonly ["1.0.0"];
readonly POST_CREATE: 'POST_CREATE'
readonly POST_RECEIVE: 'POST_RECEIVE'
readonly POST_FINISH: 'POST_FINISH'
readonly POST_TERMINATE: 'POST_TERMINATE'
}
export declare const MAX_AGE: 86400
export declare const TUS_RESUMABLE: '1.0.0'
export declare const TUS_VERSION: readonly ['1.0.0']

@@ -1,107 +0,127 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TUS_VERSION = exports.TUS_RESUMABLE = exports.MAX_AGE = exports.EVENTS = exports.POST_TERMINATE = exports.POST_FINISH = exports.POST_RECEIVE = exports.POST_CREATE = exports.ERRORS = exports.EXPOSED_HEADERS = exports.ALLOWED_METHODS = exports.ALLOWED_HEADERS = exports.HEADERS_LOWERCASE = exports.HEADERS = exports.REQUEST_METHODS = void 0;
exports.REQUEST_METHODS = ['POST', 'HEAD', 'PATCH', 'OPTIONS', 'DELETE'];
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.TUS_VERSION =
exports.TUS_RESUMABLE =
exports.MAX_AGE =
exports.EVENTS =
exports.POST_TERMINATE =
exports.POST_FINISH =
exports.POST_RECEIVE =
exports.POST_CREATE =
exports.ERRORS =
exports.EXPOSED_HEADERS =
exports.ALLOWED_METHODS =
exports.ALLOWED_HEADERS =
exports.HEADERS_LOWERCASE =
exports.HEADERS =
exports.REQUEST_METHODS =
void 0
exports.REQUEST_METHODS = ['POST', 'HEAD', 'PATCH', 'OPTIONS', 'DELETE']
exports.HEADERS = [
'Authorization',
'Content-Type',
'Location',
'Tus-Extension',
'Tus-Max-Size',
'Tus-Resumable',
'Tus-Version',
'Upload-Concat',
'Upload-Defer-Length',
'Upload-Length',
'Upload-Metadata',
'Upload-Offset',
'X-HTTP-Method-Override',
'X-Requested-With',
'X-Forwarded-Host',
'X-Forwarded-Proto',
'Forwarded',
];
'Authorization',
'Content-Type',
'Location',
'Tus-Extension',
'Tus-Max-Size',
'Tus-Resumable',
'Tus-Version',
'Upload-Concat',
'Upload-Defer-Length',
'Upload-Length',
'Upload-Metadata',
'Upload-Offset',
'X-HTTP-Method-Override',
'X-Requested-With',
'X-Forwarded-Host',
'X-Forwarded-Proto',
'Forwarded',
]
exports.HEADERS_LOWERCASE = exports.HEADERS.map((header) => {
return header.toLowerCase();
});
exports.ALLOWED_HEADERS = exports.HEADERS.join(', ');
exports.ALLOWED_METHODS = exports.REQUEST_METHODS.join(', ');
exports.EXPOSED_HEADERS = exports.HEADERS.join(', ');
return header.toLowerCase()
})
exports.ALLOWED_HEADERS = exports.HEADERS.join(', ')
exports.ALLOWED_METHODS = exports.REQUEST_METHODS.join(', ')
exports.EXPOSED_HEADERS = exports.HEADERS.join(', ')
exports.ERRORS = {
MISSING_OFFSET: {
status_code: 403,
body: 'Upload-Offset header required\n',
},
ABORTED: {
status_code: 400,
body: 'Request aborted due to lock acquired',
},
INVALID_TERMINATION: {
status_code: 400,
body: 'Cannot terminate an already completed upload',
},
ERR_LOCK_TIMEOUT: {
status_code: 500,
body: 'failed to acquire lock before timeout',
},
INVALID_CONTENT_TYPE: {
status_code: 403,
body: 'Content-Type header required\n',
},
FILE_NOT_FOUND: {
status_code: 404,
body: 'The file for this url was not found\n',
},
INVALID_OFFSET: {
status_code: 409,
body: 'Upload-Offset conflict\n',
},
FILE_NO_LONGER_EXISTS: {
status_code: 410,
body: 'The file for this url no longer exists\n',
},
ERR_SIZE_EXCEEDED: {
status_code: 413,
body: "upload's size exceeded\n",
},
ERR_MAX_SIZE_EXCEEDED: {
status_code: 413,
body: 'Maximum size exceeded\n',
},
INVALID_LENGTH: {
status_code: 400,
body: 'Upload-Length or Upload-Defer-Length header required\n',
},
INVALID_METADATA: {
status_code: 400,
body: 'Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique',
},
UNKNOWN_ERROR: {
status_code: 500,
body: 'Something went wrong with that request\n',
},
FILE_WRITE_ERROR: {
status_code: 500,
body: 'Something went wrong receiving the file\n',
},
UNSUPPORTED_CONCATENATION_EXTENSION: {
status_code: 501,
body: 'Concatenation extension is not (yet) supported. Disable parallel uploads in the tus client.\n',
},
UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION: {
status_code: 501,
body: 'creation-defer-length extension is not (yet) supported.\n',
},
UNSUPPORTED_EXPIRATION_EXTENSION: {
status_code: 501,
body: 'expiration extension is not (yet) supported.\n',
},
};
exports.POST_CREATE = 'POST_CREATE';
exports.POST_RECEIVE = 'POST_RECEIVE';
exports.POST_FINISH = 'POST_FINISH';
exports.POST_TERMINATE = 'POST_TERMINATE';
exports.EVENTS = { POST_CREATE: exports.POST_CREATE, POST_RECEIVE: exports.POST_RECEIVE, POST_FINISH: exports.POST_FINISH, POST_TERMINATE: exports.POST_TERMINATE };
exports.MAX_AGE = 86400;
exports.TUS_RESUMABLE = '1.0.0';
exports.TUS_VERSION = ['1.0.0'];
MISSING_OFFSET: {
status_code: 403,
body: 'Upload-Offset header required\n',
},
ABORTED: {
status_code: 400,
body: 'Request aborted due to lock acquired',
},
INVALID_TERMINATION: {
status_code: 400,
body: 'Cannot terminate an already completed upload',
},
ERR_LOCK_TIMEOUT: {
status_code: 500,
body: 'failed to acquire lock before timeout',
},
INVALID_CONTENT_TYPE: {
status_code: 403,
body: 'Content-Type header required\n',
},
FILE_NOT_FOUND: {
status_code: 404,
body: 'The file for this url was not found\n',
},
INVALID_OFFSET: {
status_code: 409,
body: 'Upload-Offset conflict\n',
},
FILE_NO_LONGER_EXISTS: {
status_code: 410,
body: 'The file for this url no longer exists\n',
},
ERR_SIZE_EXCEEDED: {
status_code: 413,
body: "upload's size exceeded\n",
},
ERR_MAX_SIZE_EXCEEDED: {
status_code: 413,
body: 'Maximum size exceeded\n',
},
INVALID_LENGTH: {
status_code: 400,
body: 'Upload-Length or Upload-Defer-Length header required\n',
},
INVALID_METADATA: {
status_code: 400,
body: 'Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique',
},
UNKNOWN_ERROR: {
status_code: 500,
body: 'Something went wrong with that request\n',
},
FILE_WRITE_ERROR: {
status_code: 500,
body: 'Something went wrong receiving the file\n',
},
UNSUPPORTED_CONCATENATION_EXTENSION: {
status_code: 501,
body: 'Concatenation extension is not (yet) supported. Disable parallel uploads in the tus client.\n',
},
UNSUPPORTED_CREATION_DEFER_LENGTH_EXTENSION: {
status_code: 501,
body: 'creation-defer-length extension is not (yet) supported.\n',
},
UNSUPPORTED_EXPIRATION_EXTENSION: {
status_code: 501,
body: 'expiration extension is not (yet) supported.\n',
},
}
exports.POST_CREATE = 'POST_CREATE'
exports.POST_RECEIVE = 'POST_RECEIVE'
exports.POST_FINISH = 'POST_FINISH'
exports.POST_TERMINATE = 'POST_TERMINATE'
exports.EVENTS = {
POST_CREATE: exports.POST_CREATE,
POST_RECEIVE: exports.POST_RECEIVE,
POST_FINISH: exports.POST_FINISH,
POST_TERMINATE: exports.POST_TERMINATE,
}
exports.MAX_AGE = 86400
exports.TUS_RESUMABLE = '1.0.0'
exports.TUS_VERSION = ['1.0.0']

@@ -1,3 +0,3 @@

import { KvStore } from './Types';
import { Upload } from '../models';
import {KvStore} from './Types'
import {Upload} from '../models'
/**

@@ -8,9 +8,9 @@ * FileConfigstore writes the `Upload` JSON metadata to disk next the uploaded file itself.

export declare class FileKvStore<T = Upload> implements KvStore<T> {
directory: string;
constructor(path: string);
get(key: string): Promise<T | undefined>;
set(key: string, value: T): Promise<void>;
delete(key: string): Promise<void>;
list(): Promise<Array<string>>;
private resolve;
directory: string
constructor(path: string)
get(key: string): Promise<T | undefined>
set(key: string, value: T): Promise<void>
delete(key: string): Promise<void>
list(): Promise<Array<string>>
private resolve
}

@@ -1,9 +0,11 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.FileKvStore = void 0;
const promises_1 = __importDefault(require("node:fs/promises"));
const node_path_1 = __importDefault(require("node:path"));
'use strict'
var __importDefault =
(this && this.__importDefault) ||
function (mod) {
return mod && mod.__esModule ? mod : {default: mod}
}
Object.defineProperty(exports, '__esModule', {value: true})
exports.FileKvStore = void 0
const promises_1 = __importDefault(require('node:fs/promises'))
const node_path_1 = __importDefault(require('node:path'))
/**

@@ -14,31 +16,32 @@ * FileConfigstore writes the `Upload` JSON metadata to disk next the uploaded file itself.

class FileKvStore {
constructor(path) {
this.directory = path;
constructor(path) {
this.directory = path
}
async get(key) {
try {
const buffer = await promises_1.default.readFile(this.resolve(key), 'utf8')
return JSON.parse(buffer)
} catch {
return undefined
}
async get(key) {
try {
const buffer = await promises_1.default.readFile(this.resolve(key), 'utf8');
return JSON.parse(buffer);
}
catch {
return undefined;
}
}
async set(key, value) {
await promises_1.default.writeFile(this.resolve(key), JSON.stringify(value));
}
async delete(key) {
await promises_1.default.rm(this.resolve(key));
}
async list() {
const files = await promises_1.default.readdir(this.directory);
const sorted = files.sort((a, b) => a.localeCompare(b));
const name = (file) => node_path_1.default.basename(file, '.json');
// To only return tus file IDs we check if the file has a corresponding JSON info file
return sorted.filter((file, idx) => idx < sorted.length - 1 && name(file) === name(sorted[idx + 1]));
}
resolve(key) {
return node_path_1.default.resolve(this.directory, `${key}.json`);
}
}
async set(key, value) {
await promises_1.default.writeFile(this.resolve(key), JSON.stringify(value))
}
async delete(key) {
await promises_1.default.rm(this.resolve(key))
}
async list() {
const files = await promises_1.default.readdir(this.directory)
const sorted = files.sort((a, b) => a.localeCompare(b))
const name = (file) => node_path_1.default.basename(file, '.json')
// To only return tus file IDs we check if the file has a corresponding JSON info file
return sorted.filter(
(file, idx) => idx < sorted.length - 1 && name(file) === name(sorted[idx + 1])
)
}
resolve(key) {
return node_path_1.default.resolve(this.directory, `${key}.json`)
}
}
exports.FileKvStore = FileKvStore;
exports.FileKvStore = FileKvStore

@@ -1,4 +0,4 @@

export { FileKvStore } from './FileKvStore';
export { MemoryKvStore } from './MemoryKvStore';
export { RedisKvStore } from './RedisKvStore';
export { KvStore } from './Types';
export {FileKvStore} from './FileKvStore'
export {MemoryKvStore} from './MemoryKvStore'
export {RedisKvStore} from './RedisKvStore'
export {KvStore} from './Types'

@@ -1,9 +0,24 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RedisKvStore = exports.MemoryKvStore = exports.FileKvStore = void 0;
var FileKvStore_1 = require("./FileKvStore");
Object.defineProperty(exports, "FileKvStore", { enumerable: true, get: function () { return FileKvStore_1.FileKvStore; } });
var MemoryKvStore_1 = require("./MemoryKvStore");
Object.defineProperty(exports, "MemoryKvStore", { enumerable: true, get: function () { return MemoryKvStore_1.MemoryKvStore; } });
var RedisKvStore_1 = require("./RedisKvStore");
Object.defineProperty(exports, "RedisKvStore", { enumerable: true, get: function () { return RedisKvStore_1.RedisKvStore; } });
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.RedisKvStore = exports.MemoryKvStore = exports.FileKvStore = void 0
var FileKvStore_1 = require('./FileKvStore')
Object.defineProperty(exports, 'FileKvStore', {
enumerable: true,
get: function () {
return FileKvStore_1.FileKvStore
},
})
var MemoryKvStore_1 = require('./MemoryKvStore')
Object.defineProperty(exports, 'MemoryKvStore', {
enumerable: true,
get: function () {
return MemoryKvStore_1.MemoryKvStore
},
})
var RedisKvStore_1 = require('./RedisKvStore')
Object.defineProperty(exports, 'RedisKvStore', {
enumerable: true,
get: function () {
return RedisKvStore_1.RedisKvStore
},
})

@@ -1,3 +0,3 @@

import { Upload } from '../models';
import { KvStore } from './Types';
import {Upload} from '../models'
import {KvStore} from './Types'
/**

@@ -8,7 +8,7 @@ * Memory based configstore.

export declare class MemoryKvStore<T = Upload> implements KvStore<T> {
data: Map<string, T>;
get(key: string): Promise<T | undefined>;
set(key: string, value: T): Promise<void>;
delete(key: string): Promise<void>;
list(): Promise<Array<string>>;
data: Map<string, T>
get(key: string): Promise<T | undefined>
set(key: string, value: T): Promise<void>
delete(key: string): Promise<void>
list(): Promise<Array<string>>
}

@@ -1,4 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MemoryKvStore = void 0;
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.MemoryKvStore = void 0
/**

@@ -9,18 +9,18 @@ * Memory based configstore.

class MemoryKvStore {
constructor() {
this.data = new Map();
}
async get(key) {
return this.data.get(key);
}
async set(key, value) {
this.data.set(key, value);
}
async delete(key) {
this.data.delete(key);
}
async list() {
return [...this.data.keys()];
}
constructor() {
this.data = new Map()
}
async get(key) {
return this.data.get(key)
}
async set(key, value) {
this.data.set(key, value)
}
async delete(key) {
this.data.delete(key)
}
async list() {
return [...this.data.keys()]
}
}
exports.MemoryKvStore = MemoryKvStore;
exports.MemoryKvStore = MemoryKvStore

@@ -1,4 +0,4 @@

import { RedisClientType } from '@redis/client';
import { KvStore } from './Types';
import { Upload } from '../models';
import {RedisClientType} from '@redis/client'
import {KvStore} from './Types'
import {Upload} from '../models'
/**

@@ -10,11 +10,11 @@ * Redis based configstore.

export declare class RedisKvStore<T = Upload> implements KvStore<T> {
private redis;
private prefix;
constructor(redis: RedisClientType, prefix?: string);
get(key: string): Promise<T | undefined>;
set(key: string, value: T): Promise<void>;
delete(key: string): Promise<void>;
list(): Promise<Array<string>>;
private serializeValue;
private deserializeValue;
private redis
private prefix
constructor(redis: RedisClientType, prefix?: string)
get(key: string): Promise<T | undefined>
set(key: string, value: T): Promise<void>
delete(key: string): Promise<void>
list(): Promise<Array<string>>
private serializeValue
private deserializeValue
}

@@ -1,4 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RedisKvStore = void 0;
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.RedisKvStore = void 0
/**

@@ -10,27 +10,27 @@ * Redis based configstore.

class RedisKvStore {
constructor(redis, prefix = '') {
this.redis = redis;
this.prefix = prefix;
this.redis = redis;
this.prefix = prefix;
}
async get(key) {
return this.deserializeValue(await this.redis.get(this.prefix + key));
}
async set(key, value) {
await this.redis.set(this.prefix + key, this.serializeValue(value));
}
async delete(key) {
await this.redis.del(this.prefix + key);
}
async list() {
return this.redis.keys(this.prefix + '*');
}
serializeValue(value) {
return JSON.stringify(value);
}
deserializeValue(buffer) {
return buffer ? JSON.parse(buffer) : undefined;
}
constructor(redis, prefix = '') {
this.redis = redis
this.prefix = prefix
this.redis = redis
this.prefix = prefix
}
async get(key) {
return this.deserializeValue(await this.redis.get(this.prefix + key))
}
async set(key, value) {
await this.redis.set(this.prefix + key, this.serializeValue(value))
}
async delete(key) {
await this.redis.del(this.prefix + key)
}
async list() {
return this.redis.keys(this.prefix + '*')
}
serializeValue(value) {
return JSON.stringify(value)
}
deserializeValue(buffer) {
return buffer ? JSON.parse(buffer) : undefined
}
}
exports.RedisKvStore = RedisKvStore;
exports.RedisKvStore = RedisKvStore

@@ -1,7 +0,7 @@

import { Upload } from '../models';
import {Upload} from '../models'
export interface KvStore<T = Upload> {
get(key: string): Promise<T | undefined>;
set(key: string, value: T): Promise<void>;
delete(key: string): Promise<void>;
list?(): Promise<Array<string>>;
get(key: string): Promise<T | undefined>
set(key: string, value: T): Promise<void>
delete(key: string): Promise<void>
list?(): Promise<Array<string>>
}

@@ -1,2 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})

@@ -23,5 +23,5 @@ /// <reference types="node" />

export interface CancellationContext {
signal: AbortSignal;
abort: () => void;
cancel: () => void;
signal: AbortSignal
abort: () => void
cancel: () => void
}

@@ -1,2 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import EventEmitter from 'node:events';
import { Upload } from './Upload';
import type stream from 'node:stream';
import type http from 'node:http';
import EventEmitter from 'node:events'
import {Upload} from './Upload'
import type stream from 'node:stream'
import type http from 'node:http'
export declare class DataStore extends EventEmitter {
extensions: string[];
hasExtension(extension: string): boolean;
/**
* Called in POST requests. This method just creates a
* file, implementing the creation extension.
*
* http://tus.io/protocols/resumable-upload.html#creation
*/
create(file: Upload): Promise<Upload>;
/**
* Called in DELETE requests. This method just deletes the file from the store.
* http://tus.io/protocols/resumable-upload.html#termination
*/
remove(id: string): Promise<void>;
/**
* Called in PATCH requests. This method should write data
* to the DataStore file, and possibly implement the
* concatenation extension.
*
* http://tus.io/protocols/resumable-upload.html#concatenation
*/
write(stream: http.IncomingMessage | stream.Readable, id: string, offset: number): Promise<number>;
/**
* Called in HEAD requests. This method should return the bytes
* writen to the DataStore, for the client to know where to resume
* the upload.
*/
getUpload(id: string): Promise<Upload>;
/**
* Called in PATCH requests when upload length is known after being defered.
*/
declareUploadLength(id: string, upload_length: number): Promise<void>;
/**
* Returns number of expired uploads that were deleted.
*/
deleteExpired(): Promise<number>;
getExpiration(): number;
extensions: string[]
hasExtension(extension: string): boolean
/**
* Called in POST requests. This method just creates a
* file, implementing the creation extension.
*
* http://tus.io/protocols/resumable-upload.html#creation
*/
create(file: Upload): Promise<Upload>
/**
* Called in DELETE requests. This method just deletes the file from the store.
* http://tus.io/protocols/resumable-upload.html#termination
*/
remove(id: string): Promise<void>
/**
* Called in PATCH requests. This method should write data
* to the DataStore file, and possibly implement the
* concatenation extension.
*
* http://tus.io/protocols/resumable-upload.html#concatenation
*/
write(
stream: http.IncomingMessage | stream.Readable,
id: string,
offset: number
): Promise<number>
/**
* Called in HEAD requests. This method should return the bytes
* writen to the DataStore, for the client to know where to resume
* the upload.
*/
getUpload(id: string): Promise<Upload>
/**
* Called in PATCH requests when upload length is known after being defered.
*/
declareUploadLength(id: string, upload_length: number): Promise<void>
/**
* Returns number of expired uploads that were deleted.
*/
deleteExpired(): Promise<number>
getExpiration(): number
}

@@ -1,64 +0,66 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DataStore = void 0;
'use strict'
var __importDefault =
(this && this.__importDefault) ||
function (mod) {
return mod && mod.__esModule ? mod : {default: mod}
}
Object.defineProperty(exports, '__esModule', {value: true})
exports.DataStore = void 0
/* eslint-disable @typescript-eslint/no-unused-vars */
const node_events_1 = __importDefault(require("node:events"));
const Upload_1 = require("./Upload");
const node_events_1 = __importDefault(require('node:events'))
const Upload_1 = require('./Upload')
class DataStore extends node_events_1.default {
constructor() {
super(...arguments);
this.extensions = [];
}
hasExtension(extension) {
return this.extensions && this.extensions.includes(extension);
}
/**
* Called in POST requests. This method just creates a
* file, implementing the creation extension.
*
* http://tus.io/protocols/resumable-upload.html#creation
*/
async create(file) {
return file;
}
/**
* Called in DELETE requests. This method just deletes the file from the store.
* http://tus.io/protocols/resumable-upload.html#termination
*/
async remove(id) { }
/**
* Called in PATCH requests. This method should write data
* to the DataStore file, and possibly implement the
* concatenation extension.
*
* http://tus.io/protocols/resumable-upload.html#concatenation
*/
async write(stream, id, offset) {
return 0;
}
/**
* Called in HEAD requests. This method should return the bytes
* writen to the DataStore, for the client to know where to resume
* the upload.
*/
async getUpload(id) {
return new Upload_1.Upload({ id, size: 0, offset: 0 });
}
/**
* Called in PATCH requests when upload length is known after being defered.
*/
async declareUploadLength(id, upload_length) { }
/**
* Returns number of expired uploads that were deleted.
*/
async deleteExpired() {
return 0;
}
getExpiration() {
return 0;
}
constructor() {
super(...arguments)
this.extensions = []
}
hasExtension(extension) {
return this.extensions && this.extensions.includes(extension)
}
/**
* Called in POST requests. This method just creates a
* file, implementing the creation extension.
*
* http://tus.io/protocols/resumable-upload.html#creation
*/
async create(file) {
return file
}
/**
* Called in DELETE requests. This method just deletes the file from the store.
* http://tus.io/protocols/resumable-upload.html#termination
*/
async remove(id) {}
/**
* Called in PATCH requests. This method should write data
* to the DataStore file, and possibly implement the
* concatenation extension.
*
* http://tus.io/protocols/resumable-upload.html#concatenation
*/
async write(stream, id, offset) {
return 0
}
/**
* Called in HEAD requests. This method should return the bytes
* writen to the DataStore, for the client to know where to resume
* the upload.
*/
async getUpload(id) {
return new Upload_1.Upload({id, size: 0, offset: 0})
}
/**
* Called in PATCH requests when upload length is known after being defered.
*/
async declareUploadLength(id, upload_length) {}
/**
* Returns number of expired uploads that were deleted.
*/
async deleteExpired() {
return 0
}
getExpiration() {
return 0
}
}
exports.DataStore = DataStore;
exports.DataStore = DataStore

@@ -1,7 +0,7 @@

export { DataStore } from './DataStore';
export * as Metadata from './Metadata';
export { StreamSplitter } from './StreamSplitter';
export { Uid } from './Uid';
export { Upload } from './Upload';
export { Locker, Lock, RequestRelease } from './Locker';
export { CancellationContext } from './Context';
export {DataStore} from './DataStore'
export * as Metadata from './Metadata'
export {StreamSplitter} from './StreamSplitter'
export {Uid} from './Uid'
export {Upload} from './Upload'
export {Locker, Lock, RequestRelease} from './Locker'
export {CancellationContext} from './Context'

@@ -1,35 +0,81 @@

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Upload = exports.Uid = exports.StreamSplitter = exports.Metadata = exports.DataStore = void 0;
var DataStore_1 = require("./DataStore");
Object.defineProperty(exports, "DataStore", { enumerable: true, get: function () { return DataStore_1.DataStore; } });
exports.Metadata = __importStar(require("./Metadata"));
var StreamSplitter_1 = require("./StreamSplitter");
Object.defineProperty(exports, "StreamSplitter", { enumerable: true, get: function () { return StreamSplitter_1.StreamSplitter; } });
var Uid_1 = require("./Uid");
Object.defineProperty(exports, "Uid", { enumerable: true, get: function () { return Uid_1.Uid; } });
var Upload_1 = require("./Upload");
Object.defineProperty(exports, "Upload", { enumerable: true, get: function () { return Upload_1.Upload; } });
'use strict'
var __createBinding =
(this && this.__createBinding) ||
(Object.create
? function (o, m, k, k2) {
if (k2 === undefined) k2 = k
var desc = Object.getOwnPropertyDescriptor(m, k)
if (
!desc ||
('get' in desc ? !m.__esModule : desc.writable || desc.configurable)
) {
desc = {
enumerable: true,
get: function () {
return m[k]
},
}
}
Object.defineProperty(o, k2, desc)
}
: function (o, m, k, k2) {
if (k2 === undefined) k2 = k
o[k2] = m[k]
})
var __setModuleDefault =
(this && this.__setModuleDefault) ||
(Object.create
? function (o, v) {
Object.defineProperty(o, 'default', {enumerable: true, value: v})
}
: function (o, v) {
o['default'] = v
})
var __importStar =
(this && this.__importStar) ||
function (mod) {
if (mod && mod.__esModule) return mod
var result = {}
if (mod != null)
for (var k in mod)
if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k))
__createBinding(result, mod, k)
__setModuleDefault(result, mod)
return result
}
Object.defineProperty(exports, '__esModule', {value: true})
exports.Upload =
exports.Uid =
exports.StreamSplitter =
exports.Metadata =
exports.DataStore =
void 0
var DataStore_1 = require('./DataStore')
Object.defineProperty(exports, 'DataStore', {
enumerable: true,
get: function () {
return DataStore_1.DataStore
},
})
exports.Metadata = __importStar(require('./Metadata'))
var StreamSplitter_1 = require('./StreamSplitter')
Object.defineProperty(exports, 'StreamSplitter', {
enumerable: true,
get: function () {
return StreamSplitter_1.StreamSplitter
},
})
var Uid_1 = require('./Uid')
Object.defineProperty(exports, 'Uid', {
enumerable: true,
get: function () {
return Uid_1.Uid
},
})
var Upload_1 = require('./Upload')
Object.defineProperty(exports, 'Upload', {
enumerable: true,
get: function () {
return Upload_1.Upload
},
})

@@ -1,2 +0,2 @@

export type RequestRelease = () => Promise<void> | void;
export type RequestRelease = () => Promise<void> | void
/**

@@ -6,3 +6,3 @@ * The Locker interface creates a Lock instance for a given resource identifier.

export interface Locker {
newLock(id: string): Lock;
newLock(id: string): Lock
}

@@ -28,4 +28,4 @@ /**

export interface Lock {
lock(cancelReq: RequestRelease): Promise<void>;
unlock(): Promise<void>;
lock(cancelReq: RequestRelease): Promise<void>
unlock(): Promise<void>
}

@@ -1,2 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})

@@ -1,2 +0,2 @@

import type { Upload } from '../models';
import type {Upload} from '../models'
/**

@@ -9,7 +9,7 @@ * Memory based configstore.

export declare class MemoryConfigstore {
data: Map<string, Upload>;
get(key: string): Upload | undefined;
set(key: string, value: Upload): void;
delete(key: string): Promise<boolean>;
get all(): Record<string, Upload>;
data: Map<string, Upload>
get(key: string): Upload | undefined
set(key: string, value: Upload): void
delete(key: string): Promise<boolean>
get all(): Record<string, Upload>
}

@@ -1,4 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MemoryConfigstore = void 0;
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.MemoryConfigstore = void 0
/**

@@ -11,18 +11,18 @@ * Memory based configstore.

class MemoryConfigstore {
constructor() {
this.data = new Map();
}
get(key) {
return this.data.get(key);
}
set(key, value) {
this.data.set(key, value);
}
async delete(key) {
return this.data.delete(key);
}
get all() {
return Object.fromEntries(this.data.entries());
}
constructor() {
this.data = new Map()
}
get(key) {
return this.data.get(key)
}
set(key, value) {
this.data.set(key, value)
}
async delete(key) {
return this.data.delete(key)
}
get all() {
return Object.fromEntries(this.data.entries())
}
}
exports.MemoryConfigstore = MemoryConfigstore;
exports.MemoryConfigstore = MemoryConfigstore

@@ -1,5 +0,5 @@

import { Upload } from './Upload';
export declare function validateKey(key: string): boolean;
export declare function validateValue(value: string): boolean;
export declare function parse(str?: string): Record<string, string | null>;
export declare function stringify(metadata: NonNullable<Upload['metadata']>): string;
import {Upload} from './Upload'
export declare function validateKey(key: string): boolean
export declare function validateValue(value: string): boolean
export declare function parse(str?: string): Record<string, string | null>
export declare function stringify(metadata: NonNullable<Upload['metadata']>): string

@@ -1,61 +0,64 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.stringify = exports.parse = exports.validateValue = exports.validateKey = void 0;
const ASCII_SPACE = ' '.codePointAt(0);
const ASCII_COMMA = ','.codePointAt(0);
const BASE64_REGEX = /^[\d+/A-Za-z]*={0,2}$/;
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.stringify = exports.parse = exports.validateValue = exports.validateKey = void 0
const ASCII_SPACE = ' '.codePointAt(0)
const ASCII_COMMA = ','.codePointAt(0)
const BASE64_REGEX = /^[\d+/A-Za-z]*={0,2}$/
function validateKey(key) {
if (key.length === 0) {
return false;
if (key.length === 0) {
return false
}
for (let i = 0; i < key.length; ++i) {
const charCodePoint = key.codePointAt(i)
if (
charCodePoint > 127 ||
charCodePoint === ASCII_SPACE ||
charCodePoint === ASCII_COMMA
) {
return false
}
for (let i = 0; i < key.length; ++i) {
const charCodePoint = key.codePointAt(i);
if (charCodePoint > 127 ||
charCodePoint === ASCII_SPACE ||
charCodePoint === ASCII_COMMA) {
return false;
}
}
return true;
}
return true
}
exports.validateKey = validateKey;
exports.validateKey = validateKey
function validateValue(value) {
if (value.length % 4 !== 0) {
return false;
}
return BASE64_REGEX.test(value);
if (value.length % 4 !== 0) {
return false
}
return BASE64_REGEX.test(value)
}
exports.validateValue = validateValue;
exports.validateValue = validateValue
function parse(str) {
const meta = {};
if (!str || str.trim().length === 0) {
throw new Error('Metadata string is not valid');
const meta = {}
if (!str || str.trim().length === 0) {
throw new Error('Metadata string is not valid')
}
for (const pair of str.split(',')) {
const tokens = pair.split(' ')
const [key, value] = tokens
if (
((tokens.length === 1 && validateKey(key)) ||
(tokens.length === 2 && validateKey(key) && validateValue(value))) &&
!(key in meta)
) {
const decodedValue = value ? Buffer.from(value, 'base64').toString('utf8') : null
meta[key] = decodedValue
} else {
throw new Error('Metadata string is not valid')
}
for (const pair of str.split(',')) {
const tokens = pair.split(' ');
const [key, value] = tokens;
if (((tokens.length === 1 && validateKey(key)) ||
(tokens.length === 2 && validateKey(key) && validateValue(value))) &&
!(key in meta)) {
const decodedValue = value ? Buffer.from(value, 'base64').toString('utf8') : null;
meta[key] = decodedValue;
}
else {
throw new Error('Metadata string is not valid');
}
}
return meta;
}
return meta
}
exports.parse = parse;
exports.parse = parse
function stringify(metadata) {
return Object.entries(metadata)
.map(([key, value]) => {
if (value === null) {
return key;
}
const encodedValue = Buffer.from(value, 'utf8').toString('base64');
return `${key} ${encodedValue}`;
return Object.entries(metadata)
.map(([key, value]) => {
if (value === null) {
return key
}
const encodedValue = Buffer.from(value, 'utf8').toString('base64')
return `${key} ${encodedValue}`
})
.join(',');
.join(',')
}
exports.stringify = stringify;
exports.stringify = stringify
/// <reference types="node" />
/// <reference types="node" />
import { Transform, TransformCallback } from 'stream';
import {Transform, TransformCallback} from 'stream'
export declare class MaxFileExceededError extends Error {
status_code: number;
body: string;
constructor();
status_code: number
body: string
constructor()
}
export declare class StreamLimiter extends Transform {
private maxSize;
private currentSize;
constructor(maxSize: number);
_transform(chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback): void;
private maxSize
private currentSize
constructor(maxSize: number)
_transform(chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback): void
}

@@ -1,32 +0,31 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamLimiter = exports.MaxFileExceededError = void 0;
const stream_1 = require("stream");
const constants_1 = require("../constants");
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.StreamLimiter = exports.MaxFileExceededError = void 0
const stream_1 = require('stream')
const constants_1 = require('../constants')
// TODO: create HttpError and use it everywhere instead of throwing objects
class MaxFileExceededError extends Error {
constructor() {
super(constants_1.ERRORS.ERR_MAX_SIZE_EXCEEDED.body);
this.status_code = constants_1.ERRORS.ERR_MAX_SIZE_EXCEEDED.status_code;
this.body = constants_1.ERRORS.ERR_MAX_SIZE_EXCEEDED.body;
Object.setPrototypeOf(this, MaxFileExceededError.prototype);
}
constructor() {
super(constants_1.ERRORS.ERR_MAX_SIZE_EXCEEDED.body)
this.status_code = constants_1.ERRORS.ERR_MAX_SIZE_EXCEEDED.status_code
this.body = constants_1.ERRORS.ERR_MAX_SIZE_EXCEEDED.body
Object.setPrototypeOf(this, MaxFileExceededError.prototype)
}
}
exports.MaxFileExceededError = MaxFileExceededError;
exports.MaxFileExceededError = MaxFileExceededError
class StreamLimiter extends stream_1.Transform {
constructor(maxSize) {
super();
this.currentSize = 0;
this.maxSize = maxSize;
constructor(maxSize) {
super()
this.currentSize = 0
this.maxSize = maxSize
}
_transform(chunk, encoding, callback) {
this.currentSize += chunk.length
if (this.currentSize > this.maxSize) {
callback(new MaxFileExceededError())
} else {
callback(null, chunk)
}
_transform(chunk, encoding, callback) {
this.currentSize += chunk.length;
if (this.currentSize > this.maxSize) {
callback(new MaxFileExceededError());
}
else {
callback(null, chunk);
}
}
}
}
exports.StreamLimiter = StreamLimiter;
exports.StreamLimiter = StreamLimiter
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import fs from 'node:fs/promises';
import stream from 'node:stream';
import fs from 'node:fs/promises'
import stream from 'node:stream'
type Options = {
chunkSize: number;
directory: string;
};
type Callback = (error: Error | null) => void;
chunkSize: number
directory: string
}
type Callback = (error: Error | null) => void
export declare class StreamSplitter extends stream.Writable {
directory: Options['directory'];
currentChunkPath: string | null;
currentChunkSize: number;
fileHandle: fs.FileHandle | null;
filenameTemplate: string;
chunkSize: Options['chunkSize'];
part: number;
constructor({ chunkSize, directory }: Options, options?: stream.WritableOptions);
_write(chunk: Buffer, _: BufferEncoding, callback: Callback): Promise<void>;
_final(callback: Callback): Promise<void>;
_writeChunk(chunk: Buffer): Promise<void>;
_handleError(): Promise<void>;
_finishChunk(): Promise<void>;
_newChunk(): Promise<void>;
directory: Options['directory']
currentChunkPath: string | null
currentChunkSize: number
fileHandle: fs.FileHandle | null
filenameTemplate: string
chunkSize: Options['chunkSize']
part: number
constructor({chunkSize, directory}: Options, options?: stream.WritableOptions)
_write(chunk: Buffer, _: BufferEncoding, callback: Callback): Promise<void>
_final(callback: Callback): Promise<void>
_writeChunk(chunk: Buffer): Promise<void>
_handleError(): Promise<void>
_finishChunk(): Promise<void>
_newChunk(): Promise<void>
}
export {};
export {}

@@ -1,104 +0,107 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamSplitter = void 0;
'use strict'
var __importDefault =
(this && this.__importDefault) ||
function (mod) {
return mod && mod.__esModule ? mod : {default: mod}
}
Object.defineProperty(exports, '__esModule', {value: true})
exports.StreamSplitter = void 0
/* global BufferEncoding */
const node_crypto_1 = __importDefault(require("node:crypto"));
const promises_1 = __importDefault(require("node:fs/promises"));
const node_path_1 = __importDefault(require("node:path"));
const node_stream_1 = __importDefault(require("node:stream"));
const node_crypto_1 = __importDefault(require('node:crypto'))
const promises_1 = __importDefault(require('node:fs/promises'))
const node_path_1 = __importDefault(require('node:path'))
const node_stream_1 = __importDefault(require('node:stream'))
function randomString(size) {
return node_crypto_1.default.randomBytes(size).toString('base64url').slice(0, size);
return node_crypto_1.default.randomBytes(size).toString('base64url').slice(0, size)
}
class StreamSplitter extends node_stream_1.default.Writable {
constructor({ chunkSize, directory }, options) {
super(options);
this.chunkSize = chunkSize;
this.currentChunkPath = null;
this.currentChunkSize = 0;
this.fileHandle = null;
this.directory = directory;
this.filenameTemplate = randomString(10);
this.part = 0;
this.on('error', this._handleError.bind(this));
constructor({chunkSize, directory}, options) {
super(options)
this.chunkSize = chunkSize
this.currentChunkPath = null
this.currentChunkSize = 0
this.fileHandle = null
this.directory = directory
this.filenameTemplate = randomString(10)
this.part = 0
this.on('error', this._handleError.bind(this))
}
async _write(chunk, _, callback) {
try {
// In order to start writing a chunk, we must first create
// a file system reference for it
if (this.fileHandle === null) {
await this._newChunk()
}
let overflow = this.currentChunkSize + chunk.length - this.chunkSize
// The current chunk will be more than our defined part size if we would
// write all of it to disk.
while (overflow > 0) {
// Only write to disk the up to our defined part size.
await this._writeChunk(chunk.subarray(0, chunk.length - overflow))
await this._finishChunk()
// We still have some overflow left, so we write it to a new chunk.
await this._newChunk()
chunk = chunk.subarray(chunk.length - overflow, chunk.length)
overflow = this.currentChunkSize + chunk.length - this.chunkSize
}
// The chunk is smaller than our defined part size so we can just write it to disk.
await this._writeChunk(chunk)
callback(null)
} catch (error) {
callback(error)
}
async _write(chunk, _, callback) {
try {
// In order to start writing a chunk, we must first create
// a file system reference for it
if (this.fileHandle === null) {
await this._newChunk();
}
let overflow = this.currentChunkSize + chunk.length - this.chunkSize;
// The current chunk will be more than our defined part size if we would
// write all of it to disk.
while (overflow > 0) {
// Only write to disk the up to our defined part size.
await this._writeChunk(chunk.subarray(0, chunk.length - overflow));
await this._finishChunk();
// We still have some overflow left, so we write it to a new chunk.
await this._newChunk();
chunk = chunk.subarray(chunk.length - overflow, chunk.length);
overflow = this.currentChunkSize + chunk.length - this.chunkSize;
}
// The chunk is smaller than our defined part size so we can just write it to disk.
await this._writeChunk(chunk);
callback(null);
}
catch (error) {
callback(error);
}
}
async _final(callback) {
if (this.fileHandle === null) {
callback(null)
return
}
async _final(callback) {
if (this.fileHandle === null) {
callback(null);
return;
}
try {
await this._finishChunk();
callback(null);
}
catch (error) {
callback(error);
}
try {
await this._finishChunk()
callback(null)
} catch (error) {
callback(error)
}
async _writeChunk(chunk) {
await promises_1.default.appendFile(this.fileHandle, chunk);
this.currentChunkSize += chunk.length;
}
async _writeChunk(chunk) {
await promises_1.default.appendFile(this.fileHandle, chunk)
this.currentChunkSize += chunk.length
}
async _handleError() {
// If there was an error, we want to stop allowing to write on disk as we cannot advance further.
// At this point the chunk might be incomplete advancing further might cause data loss.
// some scenarios where this might happen is if the disk is full or if we abort the stream midway.
if (this.fileHandle === null) {
return
}
async _handleError() {
// If there was an error, we want to stop allowing to write on disk as we cannot advance further.
// At this point the chunk might be incomplete advancing further might cause data loss.
// some scenarios where this might happen is if the disk is full or if we abort the stream midway.
if (this.fileHandle === null) {
return;
}
await this.fileHandle.close();
this.currentChunkPath = null;
this.fileHandle = null;
await this.fileHandle.close()
this.currentChunkPath = null
this.fileHandle = null
}
async _finishChunk() {
if (this.fileHandle === null) {
return
}
async _finishChunk() {
if (this.fileHandle === null) {
return;
}
await this.fileHandle.close();
this.emit('chunkFinished', {
path: this.currentChunkPath,
size: this.currentChunkSize,
});
this.currentChunkPath = null;
this.fileHandle = null;
this.currentChunkSize = 0;
this.part += 1;
}
async _newChunk() {
this.currentChunkPath = node_path_1.default.join(this.directory, `${this.filenameTemplate}-${this.part}`);
const fileHandle = await promises_1.default.open(this.currentChunkPath, 'w');
this.emit('chunkStarted', this.currentChunkPath);
this.currentChunkSize = 0;
this.fileHandle = fileHandle;
}
await this.fileHandle.close()
this.emit('chunkFinished', {
path: this.currentChunkPath,
size: this.currentChunkSize,
})
this.currentChunkPath = null
this.fileHandle = null
this.currentChunkSize = 0
this.part += 1
}
async _newChunk() {
this.currentChunkPath = node_path_1.default.join(
this.directory,
`${this.filenameTemplate}-${this.part}`
)
const fileHandle = await promises_1.default.open(this.currentChunkPath, 'w')
this.emit('chunkStarted', this.currentChunkPath)
this.currentChunkSize = 0
this.fileHandle = fileHandle
}
}
exports.StreamSplitter = StreamSplitter;
exports.StreamSplitter = StreamSplitter
export declare const Uid: {
rand(): string;
};
rand(): string
}

@@ -1,12 +0,14 @@

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Uid = void 0;
const node_crypto_1 = __importDefault(require("node:crypto"));
'use strict'
var __importDefault =
(this && this.__importDefault) ||
function (mod) {
return mod && mod.__esModule ? mod : {default: mod}
}
Object.defineProperty(exports, '__esModule', {value: true})
exports.Uid = void 0
const node_crypto_1 = __importDefault(require('node:crypto'))
exports.Uid = {
rand() {
return node_crypto_1.default.randomBytes(16).toString('hex');
},
};
rand() {
return node_crypto_1.default.randomBytes(16).toString('hex')
},
}
type TUpload = {
id: string;
size?: number;
offset: number;
metadata?: Record<string, string | null>;
creation_date?: string;
};
id: string
size?: number
offset: number
metadata?: Record<string, string | null>
creation_date?: string
}
export declare class Upload {
id: TUpload['id'];
metadata: TUpload['metadata'];
size?: TUpload['size'];
offset: TUpload['offset'];
creation_date: TUpload['creation_date'];
constructor(upload: TUpload);
get sizeIsDeferred(): boolean;
id: TUpload['id']
metadata: TUpload['metadata']
size?: TUpload['size']
offset: TUpload['offset']
creation_date: TUpload['creation_date']
constructor(upload: TUpload)
get sizeIsDeferred(): boolean
}
export {};
export {}

@@ -1,19 +0,19 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Upload = void 0;
'use strict'
Object.defineProperty(exports, '__esModule', {value: true})
exports.Upload = void 0
class Upload {
constructor(upload) {
if (!upload.id) {
throw new Error('[File] constructor must be given an ID');
}
this.id = upload.id;
this.size = upload.size;
this.offset = upload.offset;
this.metadata = upload.metadata;
this.creation_date = upload.creation_date ?? new Date().toISOString();
constructor(upload) {
if (!upload.id) {
throw new Error('[File] constructor must be given an ID')
}
get sizeIsDeferred() {
return this.size === undefined;
}
this.id = upload.id
this.size = upload.size
this.offset = upload.offset
this.metadata = upload.metadata
this.creation_date = upload.creation_date ?? new Date().toISOString()
}
get sizeIsDeferred() {
return this.size === undefined
}
}
exports.Upload = Upload;
exports.Upload = Upload
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@tus/server",
"version": "1.4.1",
"version": "1.4.2",
"description": "Tus resumable upload protocol in Node.js",

@@ -31,10 +31,10 @@ "main": "dist/index.js",

"@types/node": "^20.11.5",
"@types/sinon": "^10.0.20",
"@types/sinon": "^17.0.3",
"@types/supertest": "^2.0.16",
"eslint": "^8.56.0",
"eslint": "^8.57.0",
"eslint-config-custom": "^0.0.0",
"mocha": "^10.2.0",
"mocha": "^10.4.0",
"node-mocks-http": "^1.14.1",
"should": "^13.2.3",
"sinon": "^15.2.0",
"sinon": "^17.0.1",
"supertest": "^6.3.4",

@@ -41,0 +41,0 @@ "ts-node": "^10.9.2",

# `@tus/server`
> 👉 **Note**: since 1.0.0 packages are split and published under the `@tus` scope.
> The old package, `tus-node-server`, is considered unstable and will only receive security fixes.
> Make sure to use the new package.
> 👉 **Note**: since 1.0.0 packages are split and published under the `@tus` scope. The
> old package, `tus-node-server`, is considered unstable and will only receive security
> fixes. Make sure to use the new package.

@@ -21,2 +21,3 @@ ## Contents

- [Example: store files in custom nested directories](#example-store-files-in-custom-nested-directories)
- [Example: use with Nginx](#example-use-with-nginx)
- [Types](#types)

@@ -54,4 +55,5 @@ - [Compatibility](#compatibility)

This package exports `Server` and all [`constants`][], [`types`][], [`models`][], and [`kvstores`][]. There is no default export.
You should only need the `Server`, `EVENTS`, and KV store exports.
This package exports `Server` and all [`constants`][], [`types`][], [`models`][], and
[`kvstores`][]. There is no default export. You should only need the `Server`, `EVENTS`,
and KV store exports.

@@ -68,4 +70,5 @@ ### `new Server(options)`

Max file size (in bytes) allowed when uploading (`number` | (`(req, id: string | null) => Promise<number> | number`)).
When providing a function during the OPTIONS request the id will be `null`.
Max file size (in bytes) allowed when uploading (`number` |
(`(req, id: string | null) => Promise<number> | number`)). When providing a function
during the OPTIONS request the id will be `null`.

@@ -78,3 +81,4 @@ #### `options.relativeLocation`

Allow `Forwarded`, `X-Forwarded-Proto`, and `X-Forwarded-Host` headers to override the `Location` header returned by the server (`boolean`).
Allow `Forwarded`, `X-Forwarded-Proto`, and `X-Forwarded-Host` headers to override the
`Location` header returned by the server (`boolean`).

@@ -89,21 +93,22 @@ #### `options.allowedHeaders`

This only changes the upload URL (`Location` header).
If you also want to change the file name in storage use `namingFunction`.
Returning `prefix-1234` in `namingFunction` means the `id` argument in `generateUrl` is `prefix-1234`.
This only changes the upload URL (`Location` header). If you also want to change the file
name in storage use `namingFunction`. Returning `prefix-1234` in `namingFunction` means
the `id` argument in `generateUrl` is `prefix-1234`.
`@tus/server` expects everything in the path after the last `/` to be the upload id.
If you change that you have to use `getFileIdFromRequest` as well.
`@tus/server` expects everything in the path after the last `/` to be the upload id. If
you change that you have to use `getFileIdFromRequest` as well.
A common use case of this function and `getFileIdFromRequest` is to base65 encode a complex id into the URL.
A common use case of this function and `getFileIdFromRequest` is to base65 encode a
complex id into the URL.
> [!TIP]
> Checkout the example how to [store files in custom nested directories](#example-store-files-in-custom-nested-directories).
Checkout the example how to
[store files in custom nested directories](#example-store-files-in-custom-nested-directories).
#### `options.getFileIdFromRequest`
Control how the Upload-ID is extracted from the request (`(req) => string | void`)
By default, it expects everything in the path after the last `/` to be the upload id.
Control how the Upload-ID is extracted from the request (`(req) => string | void`) By
default, it expects everything in the path after the last `/` to be the upload id.
> [!TIP]
> Checkout the example how to [store files in custom nested directories](#example-store-files-in-custom-nested-directories).
Checkout the example how to
[store files in custom nested directories](#example-store-files-in-custom-nested-directories).

@@ -114,5 +119,5 @@ #### `options.namingFunction`

In `@tus/server`, the upload ID in the URL is the same as the file name.
This means using a custom `namingFunction` will return a different `Location` header for uploading
and result in a different file name in storage.
In `@tus/server`, the upload ID in the URL is the same as the file name. This means using
a custom `namingFunction` will return a different `Location` header for uploading and
result in a different file name in storage.

@@ -122,15 +127,29 @@ It is important to make these unique to prevent data loss. Only use it if you need to.

> [!TIP]
> Checkout the example how to [store files in custom nested directories](#example-store-files-in-custom-nested-directories).
Checkout the example how to
[store files in custom nested directories](#example-store-files-in-custom-nested-directories).
#### `disableTerminationForFinishedUploads`
#### `options.locker`
Disallow the [termination extension](https://tus.io/protocols/resumable-upload#termination) for finished uploads. (`boolean`)
The locker interface to manage locks for exclusive access control over resources
([`Locker`][]).
By default it uses an in-memory locker ([`MemoryLocker`][]) for safe concurrent access to
uploads using a single server. When running multiple instances of the server, you need to
provide a locker implementation that is shared between all instances (such as a
`RedisLocker`).
#### `options.disableTerminationForFinishedUploads`
Disallow the
[termination extension](https://tus.io/protocols/resumable-upload#termination) for
finished uploads. (`boolean`)
#### `options.onUploadCreate`
`onUploadCreate` will be invoked before a new upload is created. (`(req, res, upload) => Promise<res>`).
`onUploadCreate` will be invoked before a new upload is created.
(`(req, res, upload) => Promise<res>`).
If the function returns the (modified) response, the upload will be created.
You can `throw` an Object and the HTTP request will be aborted with the provided `body` and `status_code` (or their fallbacks).
If the function returns the (modified) response, the upload will be created. You can
`throw` an Object and the HTTP request will be aborted with the provided `body` and
`status_code` (or their fallbacks).

@@ -141,6 +160,8 @@ This can be used to implement validation of upload metadata or add headers.

`onUploadFinish` will be invoked after an upload is completed but before a response is returned to the client (`(req, res, upload) => Promise<res>`).
`onUploadFinish` will be invoked after an upload is completed but before a response is
returned to the client (`(req, res, upload) => Promise<res>`).
If the function returns the (modified) response, the upload will finish.
You can `throw` an Object and the HTTP request will be aborted with the provided `body` and `status_code` (or their fallbacks).
If the function returns the (modified) response, the upload will finish. You can `throw`
an Object and the HTTP request will be aborted with the provided `body` and `status_code`
(or their fallbacks).

@@ -151,16 +172,19 @@ This can be used to implement post-processing validation.

`onIncomingRequest` is a middleware function invoked before all handlers (`(req, res) => Promise<void>`)
`onIncomingRequest` is a middleware function invoked before all handlers
(`(req, res) => Promise<void>`)
This can be used for things like access control.
You can `throw` an Object and the HTTP request will be aborted with the provided `body` and `status_code` (or their fallbacks).
This can be used for things like access control. You can `throw` an Object and the HTTP
request will be aborted with the provided `body` and `status_code` (or their fallbacks).
#### `options.onResponseError`
`onResponseError` will be invoked when an error response is about to be sent by the server.
you use this function to map custom errors to tus errors or for custom observability. (`(req, res, err) => Promise<{status_code: number; body: string} | void> | {status_code: number; body: string} | void`)
`onResponseError` will be invoked when an error response is about to be sent by the
server. you use this function to map custom errors to tus errors or for custom
observability.
(`(req, res, err) => Promise<{status_code: number; body: string} | void> | {status_code: number; body: string} | void`)
#### `server.handle(req, res)`
The main server request handler invoked on every request.
You only need to use this when you integrate tus into an existing Node.js server.
The main server request handler invoked on every request. You only need to use this when
you integrate tus into an existing Node.js server.

@@ -189,7 +213,9 @@ #### `server.get(req, res)`

Start the tus server. Supported arguments are the same as [`server.listen()`](https://nodejs.org/api/net.html#serverlisten) from `node:net`.
Start the tus server. Supported arguments are the same as
[`server.listen()`](https://nodejs.org/api/net.html#serverlisten) from `node:net`.
#### `server.cleanUpExpiredUploads()`
Clean up expired uploads. Your chosen datastore must support the [expiration][] extension for this to work.
Clean up expired uploads. Your chosen datastore must support the [expiration][] extension
for this to work.

@@ -244,4 +270,4 @@ ### `EVENTS`

All stores (as in the `datastore` option) save two files,
the uploaded file and an info file with metadata, usually adjacent to each other.
All stores (as in the `datastore` option) save two files, the uploaded file and an info
file with metadata, usually adjacent to each other.

@@ -392,3 +418,4 @@ In `@tus/file-store` the `FileKvStore` is used to persist upload info but the KV stores

Attach the tus server handler to a Next.js route handler in an [optional catch-all route file](https://nextjs.org/docs/routing/dynamic-routes#optional-catch-all-routes)
Attach the tus server handler to a Next.js route handler in an
[optional catch-all route file](https://nextjs.org/docs/routing/dynamic-routes#optional-catch-all-routes)

@@ -446,4 +473,4 @@ `/pages/api/upload/[[...file]].ts`

Access control is opinionated and can be done in different ways.
This example is psuedo-code for what it could look like with JSON Web Tokens.
Access control is opinionated and can be done in different ways. This example is
psuedo-code for what it could look like with JSON Web Tokens.

@@ -479,8 +506,8 @@ ```js

You can use `namingFunction` to change the name of the stored file.
If you’re only adding a prefix or suffix without a slash (`/`),
you don’t need to implement `generateUrl` and `getFileIdFromRequest`.
You can use `namingFunction` to change the name of the stored file. If you’re only adding
a prefix or suffix without a slash (`/`), you don’t need to implement `generateUrl` and
`getFileIdFromRequest`.
Adding a slash means you create a new directory, for which you need
to implement all three functions as we need encode the id with base64 into the URL.
Adding a slash means you create a new directory, for which you need to implement all three
functions as we need encode the id with base64 into the URL.

@@ -515,2 +542,45 @@ ```js

### Example: use with Nginx
In some cases, it is necessary to run behind a reverse proxy (Nginx, HAProxy etc), for
example for TLS termination or serving multiple services on the same hostname. To properly
do this, `@tus/server` and the proxy must be configured appropriately.
Firstly, you must set `respectForwardedHeaders` indicating that a reverse proxy is in use
and that it should respect the `X-Forwarded-*`/`Forwarded` headers:
```js
const {Server} = require('@tus/server')
// ...
const server = new Server({
// ..
respectForwardedHeaders: true,
})
```
Secondly, some of the reverse proxy's settings should be adjusted. The exact steps depend
on the used proxy, but the following points should be checked:
- _Disable request buffering._ Nginx, for example, reads the entire incoming HTTP request,
including its body, before sending it to the backend, by default. This behavior defeats
the purpose of resumability where an upload is processed and saved while it's being
transferred, allowing it be resumed. Therefore, such a feature must be disabled.
- _Adjust maximum request size._ Some proxies have default values for how big a request
may be in order to protect your services. Be sure to check these settings to match the
requirements of your application.
- _Forward hostname and scheme._ If the proxy rewrites the request URL, the tusd server
does not know the original URL which was used to reach the proxy. This behavior can lead
to situations, where tusd returns a redirect to a URL which can not be reached by the
client. To avoid this issue, you can explicitly tell tusd which hostname and scheme to
use by supplying the `X-Forwarded-Host` and `X-Forwarded-Proto` headers. Configure the
proxy to set these headers to the original hostname and protocol when forwarding
requests to tusd.
You can also take a look at the
[Nginx configuration from tusd](https://github.com/tus/tusd/blob/main/examples/nginx.conf)
which is used to power the [tusd.tusdemo.net](https://tusd.tusdemo.net) instance.
## Types

@@ -526,7 +596,9 @@

See [`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md).
See
[`contributing.md`](https://github.com/tus/tus-node-server/blob/main/.github/contributing.md).
## License
[MIT](https://github.com/tus/tus-node-server/blob/master/license) © [tus](https://github.com/tus)
[MIT](https://github.com/tus/tus-node-server/blob/master/license) ©
[tus](https://github.com/tus)

@@ -536,6 +608,13 @@ [`@tus/file-store`]: https://github.com/tus/tus-node-server/tree/main/packages/file-store

[`@tus/gcs-store`]: https://github.com/tus/tus-node-server/tree/main/packages/gcs-store
[`constants`]: https://github.com/tus/tus-node-server/blob/main/packages/server/src/constants.ts
[`constants`]:
https://github.com/tus/tus-node-server/blob/main/packages/utils/src/constants.ts
[`types`]: https://github.com/tus/tus-node-server/blob/main/packages/server/src/types.ts
[`models`]: https://github.com/tus/tus-node-server/blob/main/packages/server/src/models/index.ts
[`kvstores`]: https://github.com/tus/tus-node-server/blob/main/packages/server/src/kvstores/index.ts
[`models`]:
https://github.com/tus/tus-node-server/blob/main/packages/utils/src/models/index.ts
[`kvstores`]:
https://github.com/tus/tus-node-server/blob/main/packages/utils/src/kvstores/index.ts
[expiration]: https://tus.io/protocols/resumable-upload.html#expiration
[`Locker`]:
https://github.com/tus/tus-node-server/blob/main/packages/utils/src/models/Locker.ts
[`MemoryLocker`]:
https://github.com/tus/tus-node-server/blob/main/packages/server/src/lockers/MemoryLocker.ts
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc