Socket
Socket
Sign inDemoInstall

azurite

Package Overview
Dependencies
250
Maintainers
4
Versions
153
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 3.14.0 to 3.14.2

dist/src/table/middleware/PreflightMiddlewareFactory.js

25

ChangeLog.md

@@ -7,2 +7,27 @@ # Changelog

## 2021.9 Version 3.14.2
Blob:
- Supported rscc, rscd, rsce, rscl, rsct query parameters in SAS tokens.
- Fixed Blob_Download API by adding header `x-ms-creation-time` in responds.
Table:
- Added getServiceProperties response.
- Added setServiceProperties response.
- Fixed paged queries across partitions.
## 2021.7 Version 3.14.1
General:
- Added support for generating standalone azurite.exe.
Table:
- Correctly returning the results of paginated queries.
- Added filter support for Query Tables operation.
- Corrected tokenization of queries in table storage.
## 2021.7 Version 3.14.0

@@ -9,0 +34,0 @@

2

dist/src/blob/authentication/BlobSharedKeyAuthenticator.js

@@ -46,3 +46,3 @@ "use strict";

this.getCanonicalizedHeadersString(req) +
this.getCanonicalizedResourceString(req, account, blobContext.authenticationPath);
this.getCanonicalizedResourceString(req, account, context.context.isSecondary ? blobContext.authenticationPath + "-secondary" : blobContext.authenticationPath);
this.logger.info(`BlobSharedKeyAuthenticator:validate() [STRING TO SIGN]:${JSON.stringify(stringToSign)}`, blobContext.contextId);

@@ -49,0 +49,0 @@ const signature1 = utils_1.computeHMACSHA256(stringToSign, accountProperties.key1);

@@ -75,2 +75,3 @@ "use strict";

async getProperties(options, context) {
var _a, _b, _c, _d, _e;
const blobCtx = new BlobStorageContext_1.default(context);

@@ -95,5 +96,5 @@ const account = blobCtx.account;

}
: Object.assign({ statusCode: 200, metadata: res.metadata, isIncrementalCopy: res.properties.incrementalCopy, eTag: res.properties.etag, requestId: context.contextId, version: constants_1.BLOB_API_VERSION, date: context.startTime, acceptRanges: "bytes", blobCommittedBlockCount: res.properties.blobType === Models.BlobType.AppendBlob
: Object.assign(Object.assign({ statusCode: 200, metadata: res.metadata, isIncrementalCopy: res.properties.incrementalCopy, eTag: res.properties.etag, requestId: context.contextId, version: constants_1.BLOB_API_VERSION, date: context.startTime, acceptRanges: "bytes", blobCommittedBlockCount: res.properties.blobType === Models.BlobType.AppendBlob
? res.blobCommittedBlockCount
: undefined, isServerEncrypted: true, clientRequestId: options.requestId }, res.properties);
: undefined, isServerEncrypted: true, clientRequestId: options.requestId }, res.properties), { cacheControl: (_a = context.request.getQuery("rscc")) !== null && _a !== void 0 ? _a : res.properties.cacheControl, contentDisposition: (_b = context.request.getQuery("rscd")) !== null && _b !== void 0 ? _b : res.properties.contentDisposition, contentEncoding: (_c = context.request.getQuery("rsce")) !== null && _c !== void 0 ? _c : res.properties.contentEncoding, contentLanguage: (_d = context.request.getQuery("rscl")) !== null && _d !== void 0 ? _d : res.properties.contentLanguage, contentType: (_e = context.request.getQuery("rsct")) !== null && _e !== void 0 ? _e : res.properties.contentType });
return response;

@@ -601,2 +602,3 @@ }

async downloadBlockBlobOrAppendBlob(options, context, blob) {
var _a, _b, _c, _d, _e;
if (blob.isCommitted === false) {

@@ -654,5 +656,5 @@ throw StorageErrorFactory_1.default.getBlobNotFound(context.contextId);

}
const response = Object.assign(Object.assign({ statusCode: contentRange ? 206 : 200, body, metadata: blob.metadata, eTag: blob.properties.etag, requestId: context.contextId, date: context.startTime, version: constants_1.BLOB_API_VERSION }, blob.properties), { blobContentMD5: blob.properties.contentMD5, acceptRanges: "bytes", contentLength,
const response = Object.assign(Object.assign({ statusCode: contentRange ? 206 : 200, body, metadata: blob.metadata, eTag: blob.properties.etag, requestId: context.contextId, date: context.startTime, version: constants_1.BLOB_API_VERSION }, blob.properties), { cacheControl: (_a = context.request.getQuery("rscc")) !== null && _a !== void 0 ? _a : blob.properties.cacheControl, contentDisposition: (_b = context.request.getQuery("rscd")) !== null && _b !== void 0 ? _b : blob.properties.contentDisposition, contentEncoding: (_c = context.request.getQuery("rsce")) !== null && _c !== void 0 ? _c : blob.properties.contentEncoding, contentLanguage: (_d = context.request.getQuery("rscl")) !== null && _d !== void 0 ? _d : blob.properties.contentLanguage, contentType: (_e = context.request.getQuery("rsct")) !== null && _e !== void 0 ? _e : blob.properties.contentType, blobContentMD5: blob.properties.contentMD5, acceptRanges: "bytes", contentLength,
contentRange,
contentMD5, isServerEncrypted: true, clientRequestId: options.requestId, blobCommittedBlockCount: blob.properties.blobType === Models.BlobType.AppendBlob
contentMD5, isServerEncrypted: true, clientRequestId: options.requestId, creationTime: blob.properties.creationTime, blobCommittedBlockCount: blob.properties.blobType === Models.BlobType.AppendBlob
? (blob.committedBlocksInOrder || []).length

@@ -673,2 +675,3 @@ : undefined });

async downloadPageBlob(options, context, blob) {
var _a, _b, _c, _d, _e;
// Deserializer doesn't handle range header currently, manually parse range headers here

@@ -728,5 +731,5 @@ const rangesParts = utils_2.deserializePageBlobRangeHeader(context.request.getHeader("range"), context.request.getHeader("x-ms-range"), false);

}
const response = Object.assign(Object.assign({ statusCode: rangesParts[1] === Infinity && rangesParts[0] === 0 ? 200 : 206, body, metadata: blob.metadata, eTag: blob.properties.etag, requestId: context.contextId, date: context.startTime, version: constants_1.BLOB_API_VERSION }, blob.properties), { contentLength,
const response = Object.assign(Object.assign({ statusCode: rangesParts[1] === Infinity && rangesParts[0] === 0 ? 200 : 206, body, metadata: blob.metadata, eTag: blob.properties.etag, requestId: context.contextId, date: context.startTime, version: constants_1.BLOB_API_VERSION }, blob.properties), { cacheControl: (_a = context.request.getQuery("rscc")) !== null && _a !== void 0 ? _a : blob.properties.cacheControl, contentDisposition: (_b = context.request.getQuery("rscd")) !== null && _b !== void 0 ? _b : blob.properties.contentDisposition, contentEncoding: (_c = context.request.getQuery("rsce")) !== null && _c !== void 0 ? _c : blob.properties.contentEncoding, contentLanguage: (_d = context.request.getQuery("rscl")) !== null && _d !== void 0 ? _d : blob.properties.contentLanguage, contentType: (_e = context.request.getQuery("rsct")) !== null && _e !== void 0 ? _e : blob.properties.contentType, contentLength,
contentRange,
contentMD5, blobContentMD5: blob.properties.contentMD5, isServerEncrypted: true, clientRequestId: options.requestId });
contentMD5, blobContentMD5: blob.properties.contentMD5, isServerEncrypted: true, creationTime: blob.properties.creationTime, clientRequestId: options.requestId });
return response;

@@ -733,0 +736,0 @@ }

@@ -5,2 +5,3 @@ "use strict";

const BlobStorageContext_1 = tslib_1.__importDefault(require("../context/BlobStorageContext"));
const StorageErrorFactory_1 = tslib_1.__importDefault(require("../errors/StorageErrorFactory"));
const NotImplementedError_1 = tslib_1.__importDefault(require("../errors/NotImplementedError"));

@@ -139,2 +140,5 @@ const Models = tslib_1.__importStar(require("../generated/artifacts/models"));

async getStatistics(options, context) {
if (!context.context.isSecondary) {
throw StorageErrorFactory_1.default.getInvalidQueryParameterValue(context.contextId);
}
const response = {

@@ -141,0 +145,0 @@ statusCode: 200,

@@ -92,4 +92,8 @@ "use strict";

let urlPartIndex = 0;
if (hostname.endsWith(constants_1.PRODUCTION_STYLE_URL_HOSTNAME)) {
account = hostname.substring(0, hostname.length - constants_1.PRODUCTION_STYLE_URL_HOSTNAME.length);
const isIPAddress = constants_1.IP_REGEX.test(hostname);
const firstDotIndex = hostname.indexOf(".");
// If hostname is not an IP address and has a dot inside,
// we assume user wants to access emulator with a production-like URL.
if (!isIPAddress && firstDotIndex > 0) {
account = hostname.substring(0, firstDotIndex);
}

@@ -96,0 +100,0 @@ else {

@@ -26,10 +26,3 @@ "use strict";

const UnsupportedParametersBlocker = async (req, context, logger) => {
const UnsupportedParameterKeys = [
// https://docs.microsoft.com/en-us/rest/api/storageservices/create-service-sas#specifying-query-parameters-to-override-response-headers-blob-and-file-services-only
"rscc",
"rscc",
"rsce",
"rsce",
"rsct"
];
const UnsupportedParameterKeys = [];
for (const parameterKey of UnsupportedParameterKeys) {

@@ -36,0 +29,0 @@ const value = req.getQuery(parameterKey);

@@ -6,3 +6,3 @@ "use strict";

const Models = tslib_1.__importStar(require("../generated/artifacts/models"));
exports.VERSION = "3.14.0";
exports.VERSION = "3.14.2";
exports.BLOB_API_VERSION = "2020-10-02";

@@ -9,0 +9,0 @@ exports.DEFAULT_BLOB_SERVER_HOST_NAME = "127.0.0.1"; // Change to 0.0.0.0 when needs external access

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.EMULATOR_ACCOUNT_KEY = exports.EMULATOR_ACCOUNT_NAME = exports.VALID_ISSUE_PREFIXES = exports.HTTPS = exports.BEARER_TOKEN_PREFIX = exports.PRODUCTION_STYLE_URL_HOSTNAME = exports.DEFAULT_SQL_OPTIONS = exports.DEFAULT_SQL_COLLATE = exports.DEFAULT_SQL_CHARSET = exports.DEFAULT_EXTENT_GC_PROTECT_TIME_IN_MS = exports.DEFAULT_READ_CONCURRENCY = exports.DEFAULT_MAX_EXTENT_SIZE = exports.FD_CACHE_NUMBER_MAX = exports.FD_CACHE_NUMBER_MIN = exports.DEFAULT_FD_CACHE_NUMBER = exports.DEFAULT_ACCOUNTS_REFRESH_INTERVAL = exports.AZURITE_ACCOUNTS_ENV = void 0;
exports.EMULATOR_ACCOUNT_KEY = exports.EMULATOR_ACCOUNT_NAME = exports.VALID_ISSUE_PREFIXES = exports.HTTPS = exports.BEARER_TOKEN_PREFIX = exports.DEFAULT_SQL_OPTIONS = exports.DEFAULT_SQL_COLLATE = exports.IP_REGEX = exports.DEFAULT_SQL_CHARSET = exports.DEFAULT_EXTENT_GC_PROTECT_TIME_IN_MS = exports.DEFAULT_READ_CONCURRENCY = exports.DEFAULT_MAX_EXTENT_SIZE = exports.FD_CACHE_NUMBER_MAX = exports.FD_CACHE_NUMBER_MIN = exports.DEFAULT_FD_CACHE_NUMBER = exports.DEFAULT_ACCOUNTS_REFRESH_INTERVAL = exports.AZURITE_ACCOUNTS_ENV = void 0;
exports.AZURITE_ACCOUNTS_ENV = "AZURITE_ACCOUNTS"; // Customize account name and keys by env

@@ -13,2 +13,7 @@ exports.DEFAULT_ACCOUNTS_REFRESH_INTERVAL = 60 * 1000; // 60s

exports.DEFAULT_SQL_CHARSET = "utf8mb4";
// IP regex.
// This is to distinguish IP style hostname from others
// When host matches it, we assume user is accessing emulator by IP address.
// Otherwise, try to extract string before first dot, as account name.
exports.IP_REGEX = new RegExp("^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$");
// Use utf8mb4_bin instead of utf8mb4_general_ci to honor case sensitive

@@ -31,7 +36,2 @@ // https://dev.mysql.com/doc/refman/8.0/en/case-sensitivity.html

};
// In some scenarios, users want to test with production-style URLs like
// http[s]://devstoreaccount1.localhost[:port]/container/path/blob.dat
// (as opposed to default emulator style http[s]://hostname[:port]/devstoreaccount1/container/path/blob.dat
// When URL's hostname ends with .localhost, we assume user wants to use production-style URL format.
exports.PRODUCTION_STYLE_URL_HOSTNAME = ".localhost";
exports.BEARER_TOKEN_PREFIX = "Bearer";

@@ -38,0 +38,0 @@ exports.HTTPS = "https";

@@ -34,3 +34,3 @@ "use strict";

const stringToSign = headersToSign +
this.getCanonicalizedResourceString(authType, req, account, queueContext.authenticationPath);
this.getCanonicalizedResourceString(authType, req, account, context.context.isSecondary ? queueContext.authenticationPath + "-secondary" : queueContext.authenticationPath);
this.logger.info(`QueueSharedKeyAuthenticator:validate() [STRING TO SIGN]:${JSON.stringify(stringToSign)}`, queueContext.contextID);

@@ -37,0 +37,0 @@ const signature1 = utils_1.computeHMACSHA256(stringToSign, accountProperties.key1);

@@ -130,2 +130,5 @@ "use strict";

async getStatistics(options, context) {
if (!context.context.isSecondary) {
throw StorageErrorFactory_1.default.getInvalidQueryParameterValue(context.contextID);
}
const response = {

@@ -132,0 +135,0 @@ statusCode: 200,

@@ -115,4 +115,8 @@ "use strict";

let urlPartIndex = 0;
if (hostname.endsWith(constants_1.PRODUCTION_STYLE_URL_HOSTNAME)) {
account = hostname.substring(0, hostname.length - constants_1.PRODUCTION_STYLE_URL_HOSTNAME.length);
const isIPAddress = constants_1.IP_REGEX.test(hostname);
const firstDotIndex = hostname.indexOf(".");
// If hostname is not an IP address and has a dot inside,
// we assume user wants to access emulator with a production-like URL.
if (!isIPAddress && firstDotIndex > 0) {
account = hostname.substring(0, firstDotIndex);
}

@@ -119,0 +123,0 @@ else {

@@ -81,3 +81,2 @@ "use strict";

// CORS actual request handling.
// TODO: Should provide this handling for blob service.
// tslint:disable-next-line:max-line-length

@@ -84,0 +83,0 @@ // See as https://docs.microsoft.com/en-us/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VALID_QUEUE_AUDIENCES = exports.ValidAPIVersions = exports.DEFAULT_QUEUE_PERSISTENCE_ARRAY = exports.QUEUE_STATUSCODE = exports.SECONDARY_SUFFIX = exports.HeaderConstants = exports.MethodConstants = exports.EMPTY_EXTENT_CHUNK = exports.UPDATE_VISIBILITYTIMEOUT_MAX = exports.UPDATE_VISIBILITYTIMEOUT_MIN = exports.DEFUALT_UPDATE_VISIBILITYTIMEOUT = exports.MESSAGETTL_MIN = exports.ENQUEUE_VISIBILITYTIMEOUT_MAX = exports.ENQUEUE_VISIBILITYTIMEOUT_MIN = exports.DEFUALT_MESSAGETTL = exports.MESSAGETEXT_LENGTH_MAX = exports.DEQUEUE_NUMOFMESSAGES_MAX = exports.DEQUEUE_NUMOFMESSAGES_MIN = exports.DEQUEUE_VISIBILITYTIMEOUT_MAX = exports.DEQUEUE_VISIBILITYTIMEOUT_MIN = exports.DEFUALT_DEQUEUE_VISIBILITYTIMEOUT = exports.LIST_QUEUE_MAXRESSULTS_MAX = exports.LIST_QUEUE_MAXRESSULTS_MIN = exports.QUEUE_SERVICE_PERMISSION = exports.NEVER_EXPIRE_DATE = exports.DEFAULT_GC_INTERVAL_MS = exports.LOGGER_CONFIGS = exports.DEFAULT_QUEUE_CONTEXT_PATH = exports.DEFAULT_ENABLE_ACCESS_LOG = exports.DEFAULT_ACCESS_LOG_PATH = exports.DEFAULT_ENABLE_DEBUG_LOG = exports.DEFAULT_DEBUG_LOG_PATH = exports.DEFAULT_QUEUE_PERSISTENCE_PATH = exports.DEFAULT_QUEUE_EXTENT_LOKI_DB_PATH = exports.DEFAULT_QUEUE_LOKI_DB_PATH = exports.IS_PRODUCTION = exports.DEFAULT_QUEUE_LISTENING_PORT = exports.DEFAULT_QUEUE_SERVER_HOST_NAME = exports.QUEUE_API_VERSION = exports.VERSION = void 0;
exports.VERSION = "3.14.0";
exports.VERSION = "3.14.2";
exports.QUEUE_API_VERSION = "2020-10-02";

@@ -6,0 +6,0 @@ exports.DEFAULT_QUEUE_SERVER_HOST_NAME = "127.0.0.1"; // Change to 0.0.0.0 when needs external access

@@ -41,3 +41,6 @@ "use strict";

this.logger.info(`AccountSASAuthenticator:validate() Signature based on key1 validation ${sig1Pass ? "passed" : "failed"}.`, context.contextID);
if (accountProperties.key2 !== undefined) {
if (!sig1Pass) {
if (accountProperties.key2 === undefined) {
return false;
}
this.logger.info(`AccountSASAuthenticator:validate() Account key2 is not empty, validate signature based account key2.`, context.contextID);

@@ -49,3 +52,3 @@ const [sig2, stringToSign2] = IAccountSASSignatureValues_1.generateAccountSASSignature(values, account, accountProperties.key2);

this.logger.info(`AccountSASAuthenticator:validate() Signature based on key2 validation ${sig2Pass ? "passed" : "failed"}.`, context.contextID);
if (!sig2Pass && !sig1Pass) {
if (!sig2Pass) {
this.logger.info(`AccountSASAuthenticator:validate() Validate signature based account key1 and key2 failed.`, context.contextID);

@@ -55,7 +58,2 @@ return false;

}
else {
if (!sig1Pass) {
return false;
}
}
// When signature validation passes, we enforce account SAS validation

@@ -62,0 +60,0 @@ // Any validation errors will stop this request immediately

@@ -79,3 +79,3 @@ "use strict";

if (values.identifier !== undefined) {
const accessPolicy = await this.getTableAccessPolicyByIdentifier(tableName, values.identifier, context);
const accessPolicy = await this.getTableAccessPolicyByIdentifier(account, tableName, values.identifier, context);
if (accessPolicy === undefined) {

@@ -198,9 +198,12 @@ this.logger.warn(`TableSASAuthenticator:validate() Cannot get access policy defined for table ${tableName} with id ${values.identifier}.`, context.contextID);

}
async getTableAccessPolicyByIdentifier(table, id, context) {
async getTableAccessPolicyByIdentifier(account, table, id, context) {
try {
const containerModel = await this.tableMetadataStore.getTableAccessPolicy(context, table, {});
if (containerModel === undefined) {
const tableModel = await this.tableMetadataStore.getTable(account, table, context);
if (tableModel === undefined) {
return undefined;
}
for (const acl of containerModel) {
if (tableModel.tableAcl === undefined) {
return undefined;
}
for (const acl of tableModel.tableAcl) {
if (acl.id === id) {

@@ -210,2 +213,3 @@ return acl.accessPolicy;

}
return undefined;
}

@@ -212,0 +216,0 @@ catch (err) {

@@ -8,5 +8,5 @@ "use strict";

TableSASPermission["Add"] = "a";
TableSASPermission["Update"] = "c";
TableSASPermission["Update"] = "u";
TableSASPermission["Delete"] = "d";
})(TableSASPermission = exports.TableSASPermission || (exports.TableSASPermission = {}));
//# sourceMappingURL=TableSASPermissions.js.map

@@ -14,2 +14,3 @@ "use strict";

async validate(req, context) {
var _a, _b;
const tableContext = new TableStorageContext_1.default(context);

@@ -39,3 +40,3 @@ const account = tableContext.account;

"\n" +
this.getCanonicalizedResourceString(req, account, tableContext.authenticationPath);
this.getCanonicalizedResourceString(req, account, context.context.isSecondary ? ((_a = tableContext.authenticationPath) === null || _a === void 0 ? void 0 : _a.substring(0, ((_b = tableContext.authenticationPath) === null || _b === void 0 ? void 0 : _b.length) - 1)) + "-secondary/" : tableContext.authenticationPath);
this.logger.info(`TableSharedKeyAuthenticator:validate() [STRING TO SIGN]:${JSON.stringify(stringToSign)}`, tableContext.contextID);

@@ -42,0 +43,0 @@ const signature1 = utils_1.computeHMACSHA256(stringToSign, accountProperties.key1);

@@ -48,4 +48,10 @@ "use strict";

}
set isSecondary(isSecondary) {
this.context.isSecondary = isSecondary;
}
get isSecondary() {
return this.context.isSecondary;
}
}
exports.default = TableStorageContext;
//# sourceMappingURL=TableStorageContext.js.map

@@ -84,4 +84,19 @@ "use strict";

}
static getInvalidXmlDocument(context) {
return new StorageError_1.default(400, "InvalidXmlDocument", `XML specified is not syntactically valid.`, context.contextID || defaultID, undefined, context);
}
static getInvalidQueryParameterValue(context, additionalMessages) {
if (additionalMessages === undefined) {
additionalMessages = {};
}
return new StorageError_1.default(400, "InvalidQueryParameterValue", `Value for one of the query parameters specified in the request URI is invalid.`, context.contextID || defaultID, additionalMessages, context);
}
static getInvalidCorsHeaderValue(context, additionalMessages) {
return new StorageError_1.default(400, "InvalidHeaderValue", "A required CORS header is not present.", context.contextID || defaultID, additionalMessages, context);
}
static corsPreflightFailure(context, additionalMessages) {
return new StorageError_1.default(403, "CorsPreflightFailure", "CORS not enabled or no matching rule found for this request.", context.contextID || defaultID, additionalMessages, context);
}
}
exports.default = StorageErrorFactory;
//# sourceMappingURL=StorageErrorFactory.js.map

@@ -23,5 +23,3 @@ "use strict";

path: "Tables",
urlParameters: [
Parameters.url
],
urlParameters: [Parameters.url],
queryParameters: [

@@ -52,8 +50,4 @@ Parameters.nextTableName,

path: "Tables",
urlParameters: [
Parameters.url
],
queryParameters: [
Parameters.format
],
urlParameters: [Parameters.url],
queryParameters: [Parameters.format],
headerParameters: [

@@ -88,5 +82,3 @@ Parameters.version,

path: "$batch",
urlParameters: [
Parameters.url
],
urlParameters: [Parameters.url],
headerParameters: [

@@ -130,10 +122,4 @@ Parameters.version,

path: "Tables('{table}')",
urlParameters: [
Parameters.url,
Parameters.table
],
headerParameters: [
Parameters.version,
Parameters.requestId
],
urlParameters: [Parameters.url, Parameters.table],
headerParameters: [Parameters.version, Parameters.requestId],
responses: {

@@ -153,6 +139,3 @@ 204: {

path: "{table}()",
urlParameters: [
Parameters.url,
Parameters.table
],
urlParameters: [Parameters.url, Parameters.table],
queryParameters: [

@@ -235,6 +218,3 @@ Parameters.timeout,

],
queryParameters: [
Parameters.timeout,
Parameters.format
],
queryParameters: [Parameters.timeout, Parameters.format],
headerParameters: [

@@ -247,6 +227,3 @@ Parameters.version,

requestBody: {
parameterPath: [
"options",
"tableEntityProperties"
],
parameterPath: ["options", "tableEntityProperties"],
mapper: {

@@ -284,6 +261,3 @@ serializedName: "tableEntityProperties",

],
queryParameters: [
Parameters.timeout,
Parameters.format
],
queryParameters: [Parameters.timeout, Parameters.format],
headerParameters: [

@@ -296,6 +270,3 @@ Parameters.version,

requestBody: {
parameterPath: [
"options",
"tableEntityProperties"
],
parameterPath: ["options", "tableEntityProperties"],
mapper: {

@@ -333,6 +304,3 @@ serializedName: "tableEntityProperties",

],
queryParameters: [
Parameters.timeout,
Parameters.format
],
queryParameters: [Parameters.timeout, Parameters.format],
headerParameters: [

@@ -364,6 +332,3 @@ Parameters.version,

],
queryParameters: [
Parameters.timeout,
Parameters.format
],
queryParameters: [Parameters.timeout, Parameters.format],
headerParameters: [

@@ -376,6 +341,3 @@ Parameters.version,

requestBody: {
parameterPath: [
"options",
"tableEntityProperties"
],
parameterPath: ["options", "tableEntityProperties"],
mapper: {

@@ -407,10 +369,4 @@ serializedName: "tableEntityProperties",

path: "{table}",
urlParameters: [
Parameters.url,
Parameters.table
],
queryParameters: [
Parameters.timeout,
Parameters.format
],
urlParameters: [Parameters.url, Parameters.table],
queryParameters: [Parameters.timeout, Parameters.format],
headerParameters: [

@@ -423,6 +379,3 @@ Parameters.version,

requestBody: {
parameterPath: [
"options",
"tableEntityProperties"
],
parameterPath: ["options", "tableEntityProperties"],
mapper: {

@@ -464,14 +417,5 @@ serializedName: "tableEntityProperties",

path: "{table}",
urlParameters: [
Parameters.url,
Parameters.table
],
queryParameters: [
Parameters.timeout,
Parameters.comp0
],
headerParameters: [
Parameters.version,
Parameters.requestId
],
urlParameters: [Parameters.url, Parameters.table],
queryParameters: [Parameters.timeout, Parameters.comp0],
headerParameters: [Parameters.version, Parameters.requestId],
responses: {

@@ -504,19 +448,7 @@ 200: {

path: "{table}",
urlParameters: [
Parameters.url,
Parameters.table
],
queryParameters: [
Parameters.timeout,
Parameters.comp0
],
headerParameters: [
Parameters.version,
Parameters.requestId
],
urlParameters: [Parameters.url, Parameters.table],
queryParameters: [Parameters.timeout, Parameters.comp0],
headerParameters: [Parameters.version, Parameters.requestId],
requestBody: {
parameterPath: [
"options",
"tableAcl"
],
parameterPath: ["options", "tableAcl"],
mapper: {

@@ -552,14 +484,5 @@ xmlName: "SignedIdentifiers",

httpMethod: "PUT",
urlParameters: [
Parameters.url
],
queryParameters: [
Parameters.timeout,
Parameters.restype,
Parameters.comp1
],
headerParameters: [
Parameters.version,
Parameters.requestId
],
urlParameters: [Parameters.url],
queryParameters: [Parameters.timeout, Parameters.restype, Parameters.comp1],
headerParameters: [Parameters.version, Parameters.requestId],
requestBody: {

@@ -583,14 +506,5 @@ parameterPath: "tableServiceProperties",

httpMethod: "GET",
urlParameters: [
Parameters.url
],
queryParameters: [
Parameters.timeout,
Parameters.restype,
Parameters.comp1
],
headerParameters: [
Parameters.version,
Parameters.requestId
],
urlParameters: [Parameters.url],
queryParameters: [Parameters.restype, Parameters.comp1, Parameters.timeout],
headerParameters: [Parameters.version, Parameters.requestId],
responses: {

@@ -610,14 +524,5 @@ 200: {

httpMethod: "GET",
urlParameters: [
Parameters.url
],
queryParameters: [
Parameters.timeout,
Parameters.restype,
Parameters.comp2
],
headerParameters: [
Parameters.version,
Parameters.requestId
],
urlParameters: [Parameters.url],
queryParameters: [Parameters.timeout, Parameters.restype, Parameters.comp2],
headerParameters: [Parameters.version, Parameters.requestId],
responses: {

@@ -624,0 +529,0 @@ 200: {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const NotImplementedError_1 = tslib_1.__importDefault(require("../errors/NotImplementedError"));
const TableStorageContext_1 = tslib_1.__importDefault(require("../context/TableStorageContext"));
const StorageErrorFactory_1 = tslib_1.__importDefault(require("../errors/StorageErrorFactory"));
const Models = tslib_1.__importStar(require("../generated/artifacts/models"));
const xml_1 = require("../generated/utils/xml");
const constants_1 = require("../utils/constants");
const BaseHandler_1 = tslib_1.__importDefault(require("./BaseHandler"));
class ServiceHandler extends BaseHandler_1.default {
async setProperties(tableServiceProperties, options, context) {
// TODO Refer to Blob/Queue ServiceHandler implementation
throw new NotImplementedError_1.default(context);
constructor() {
super(...arguments);
/**
* Default service properties.
*
* @private
* @memberof ServiceHandler
*/
this.defaultServiceProperties = {
cors: [],
defaultServiceVersion: constants_1.TABLE_API_VERSION,
hourMetrics: {
enabled: false,
retentionPolicy: {
enabled: false
},
version: "1.0"
},
logging: {
deleteProperty: true,
read: true,
retentionPolicy: {
enabled: false
},
version: "1.0",
write: true
},
minuteMetrics: {
enabled: false,
retentionPolicy: {
enabled: false
},
version: "1.0"
}
};
}
// https://docs.microsoft.com/en-us/rest/api/storageservices/get-table-service-properties
async getProperties(options, context) {
// TODO Refer to Blob/Queue ServiceHandler implementation
throw new NotImplementedError_1.default(context);
const tableCtx = new TableStorageContext_1.default(context);
const accountName = tableCtx.account;
let properties = await this.metadataStore.getServiceProperties(context, accountName);
if (!properties) {
properties = Object.assign(Object.assign({}, this.defaultServiceProperties), { accountName });
}
if (properties.cors === undefined) {
properties.cors = [];
}
if (properties.hourMetrics === undefined) {
properties.hourMetrics = this.defaultServiceProperties.hourMetrics;
}
if (properties.logging === undefined) {
properties.logging = this.defaultServiceProperties.logging;
}
if (properties.minuteMetrics === undefined) {
properties.minuteMetrics = this.defaultServiceProperties.minuteMetrics;
}
const response = Object.assign(Object.assign({}, properties), { requestId: context.contextID, statusCode: 200, version: constants_1.TABLE_API_VERSION, clientRequestId: options.requestId });
return response;
}
async setProperties(tableServiceProperties, options, context) {
const tableCtx = new TableStorageContext_1.default(context);
const accountName = tableCtx.account;
// TODO: deserializor has a bug that when cors is undefined,
// it will serialize it to empty array instead of undefined
const body = tableCtx.request.getBody();
const parsedBody = await xml_1.parseXML(body || "");
if (!parsedBody.hasOwnProperty("cors") &&
!parsedBody.hasOwnProperty("Cors")) {
tableServiceProperties.cors = undefined;
}
// Azure Storage allows allowedHeaders and exposedHeaders to be empty,
// Azurite will set to empty string for this scenario
for (const cors of tableServiceProperties.cors || []) {
cors.allowedHeaders = cors.allowedHeaders || "";
cors.exposedHeaders = cors.exposedHeaders || "";
}
await this.metadataStore.setServiceProperties(context, Object.assign(Object.assign({}, tableServiceProperties), { accountName }));
const response = {
requestId: context.contextID,
statusCode: 202,
version: constants_1.TABLE_API_VERSION,
clientRequestId: options.requestId
};
return response;
}
async getStatistics(options, context) {
// TODO Refer to Blob/Queue ServiceHandler implementation
throw new NotImplementedError_1.default(context);
if (!context.context.isSecondary) {
throw StorageErrorFactory_1.default.getInvalidQueryParameterValue(context);
}
const response = {
statusCode: 200,
requestId: context.contextID,
version: constants_1.TABLE_API_VERSION,
date: context.startTime,
geoReplication: {
status: Models.GeoReplicationStatusType.Live,
lastSyncTime: context.startTime
},
clientRequestId: options.requestId
};
return response;
}

@@ -19,0 +113,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const to_readable_stream_1 = tslib_1.__importDefault(require("to-readable-stream"));
const BufferStream_1 = tslib_1.__importDefault(require("../../common/utils/BufferStream"));
const utils_1 = require("../../common/utils/utils");
const TableBatchOrchestrator_1 = tslib_1.__importDefault(require("../batch/TableBatchOrchestrator"));
const TableBatchUtils_1 = tslib_1.__importDefault(require("../batch/TableBatchUtils"));
const TableStorageContext_1 = tslib_1.__importDefault(require("../context/TableStorageContext"));
const NormalizedEntity_1 = require("../entity/NormalizedEntity");
const NotImplementedError_1 = tslib_1.__importDefault(require("../errors/NotImplementedError"));
const StorageErrorFactory_1 = tslib_1.__importDefault(require("../errors/StorageErrorFactory"));

@@ -14,4 +15,2 @@ const constants_1 = require("../utils/constants");

const BaseHandler_1 = tslib_1.__importDefault(require("./BaseHandler"));
const TableBatchUtils_1 = tslib_1.__importDefault(require("../batch/TableBatchUtils"));
const to_readable_stream_1 = tslib_1.__importDefault(require("to-readable-stream"));
/**

@@ -70,7 +69,6 @@ * TODO:

async query(options, context) {
var _a;
const tableContext = new TableStorageContext_1.default(context);
const account = this.getAndCheckAccountName(tableContext);
const accept = this.getAndCheckPayloadFormat(tableContext);
const [tableResult, nextTableName] = await this.metadataStore.queryTable(context, account, (_a = options.queryOptions) === null || _a === void 0 ? void 0 : _a.top, options.nextTableName);
const [tableResult, nextTableName] = await this.metadataStore.queryTable(context, account, options.queryOptions || {}, options.nextTableName);
const response = {

@@ -389,17 +387,64 @@ clientRequestId: options.requestId,

}
/**
* Get table access policies.
* @param {string} table
* @param {Models.TableGetAccessPolicyOptionalParams} options
* @param {Context} context
* @returns {Promise<Models.TableGetAccessPolicyResponse>}
* @memberof TableHandler
*/
async getAccessPolicy(table, options, context) {
// e.g
// const tableContext = new TableStorageContext(context);
// const accountName = tableContext.account;
// const tableName = tableContext.tableName; // Get tableName from context
// TODO
throw new NotImplementedError_1.default(context);
const tableContext = new TableStorageContext_1.default(context);
const accountName = this.getAndCheckAccountName(tableContext);
const tableName = this.getAndCheckTableName(tableContext);
const foundTable = await this.metadataStore.getTable(accountName, tableName, context);
const response = [];
const responseArray = response;
const responseObject = response;
if (foundTable.tableAcl !== undefined) {
responseArray.push(...foundTable.tableAcl);
}
responseObject.date = context.startTime;
responseObject.requestId = context.contextID;
responseObject.version = constants_1.TABLE_API_VERSION;
responseObject.statusCode = 200;
responseObject.clientRequestId = options.requestId;
return response;
}
/**
* Set table access policies.
* @param {string} table
* @param {Models.TableSetAccessPolicyOptionalParams} options
* @param {Context} context
* @returns {Promise<Models.TableSetAccessPolicyResponse>}
* @memberof TableHandler
*/
async setAccessPolicy(table, options, context) {
// e.g
// const tableContext = new TableStorageContext(context);
// const accountName = tableContext.account;
// const tableName = tableContext.tableName; // Get tableName from context
// TODO
throw new NotImplementedError_1.default(context);
const tableContext = new TableStorageContext_1.default(context);
const accountName = this.getAndCheckAccountName(tableContext);
const tableName = this.getAndCheckTableName(tableContext);
// The policy number should be within 5, the permission should follow the Table permission.
// See as https://docs.microsoft.com/en-us/rest/api/storageservices/create-service-sas.
if (options.tableAcl !== undefined) {
if (options.tableAcl.length > 5) {
throw StorageErrorFactory_1.default.getInvalidXmlDocument(context);
}
for (const acl of options.tableAcl) {
const permission = acl.accessPolicy.permission;
for (const item of permission) {
if (!constants_1.TABLE_SERVICE_PERMISSION.includes(item)) {
throw StorageErrorFactory_1.default.getInvalidXmlDocument(context);
}
}
}
}
await this.metadataStore.setTableACL(accountName, tableName, context, options.tableAcl);
const response = {
date: context.startTime,
requestId: context.contextID,
version: constants_1.TABLE_API_VERSION,
statusCode: 204,
clientRequestId: options.requestId
};
return response;
}

@@ -406,0 +451,0 @@ /**

@@ -41,3 +41,4 @@ "use strict";

// tslint:disable-next-line: prefer-const
let [account, tableSection] = extractStoragePartsFromPath(req.hostname, req.path);
let [account, tableSection, isSecondary] = extractStoragePartsFromPath(req.hostname, req.path);
tableContext.isSecondary = isSecondary;
const isGet = req.method.toUpperCase() === "GET";

@@ -54,3 +55,3 @@ // Candidate tableSection

// TODO: Not allowed create Table with Tables as name
if (tableSection === undefined) {
if (tableSection === undefined || tableSection === "") {
// Service level operation

@@ -107,2 +108,8 @@ tableContext.tableName = undefined;

tableContext.authenticationPath = req.path;
if (isSecondary) {
const pos = tableContext.authenticationPath.search(constants_2.SECONDARY_SUFFIX);
tableContext.authenticationPath =
tableContext.authenticationPath.substr(0, pos) +
tableContext.authenticationPath.substr(pos + constants_2.SECONDARY_SUFFIX.length);
}
// Emulator's URL pattern is like http://hostname[:port]/account/table

@@ -133,2 +140,3 @@ // (or, alternatively, http[s]://account.localhost[:port]/table/)

let table;
let isSecondary = false;
const decodedPath = decodeURIComponent(path);

@@ -140,4 +148,8 @@ const normalizedPath = decodedPath.startsWith("/")

let urlPartIndex = 0;
if (hostname.endsWith(constants_1.PRODUCTION_STYLE_URL_HOSTNAME)) {
account = hostname.substring(0, hostname.length - constants_1.PRODUCTION_STYLE_URL_HOSTNAME.length);
const isIPAddress = constants_1.IP_REGEX.test(hostname);
const firstDotIndex = hostname.indexOf(".");
// If hostname is not an IP address and has a dot inside,
// we assume user wants to access emulator with a production-like URL.
if (!isIPAddress && firstDotIndex > 0) {
account = hostname.substring(0, firstDotIndex);
}

@@ -148,5 +160,9 @@ else {

table = parts[urlPartIndex++];
return [account, table];
if (account.endsWith(constants_2.SECONDARY_SUFFIX)) {
account = account.substr(0, account.length - constants_2.SECONDARY_SUFFIX.length);
isSecondary = true;
}
return [account, table, isSecondary];
}
exports.extractStoragePartsFromPath = extractStoragePartsFromPath;
//# sourceMappingURL=tableStorageContext.middleware.js.map

@@ -14,2 +14,3 @@ "use strict";

this.TABLES_COLLECTION = "$TABLES_COLLECTION$";
this.SERVICES_COLLECTION = "$SERVICES_COLLECTION$";
this.initialized = false;

@@ -49,2 +50,9 @@ this.closed = false;

}
// Create service properties collection if not exists
let servicePropertiesColl = this.db.getCollection(this.SERVICES_COLLECTION);
if (servicePropertiesColl === null) {
servicePropertiesColl = this.db.addCollection(this.SERVICES_COLLECTION, {
unique: ["accountName"]
});
}
await new Promise((resolve, reject) => {

@@ -125,4 +133,31 @@ this.db.saveDatabase((err) => {

}
async queryTable(context, account, top = 1000, nextTable) {
/**
* Update the ACL of an existing table item in persistency layer.
*
* @param {string} account
* @param {string} table
* @param {TableACL} [tableACL]
* @param {Context} context
* @returns {Promise<void>}
* @memberof LokiTableMetadataStore
*/
async setTableACL(account, table, context, tableACL) {
const coll = this.db.getCollection(this.TABLES_COLLECTION);
const doc = coll.findOne({ account, table });
if (!doc) {
throw StorageErrorFactory_1.default.getTableNotFound(context);
}
doc.tableAcl = tableACL;
coll.update(doc);
}
async getTable(account, table, context) {
const coll = this.db.getCollection(this.TABLES_COLLECTION);
const doc = coll.findOne({ account, table });
if (!doc) {
throw StorageErrorFactory_1.default.getTableNotFound(context);
}
return doc;
}
async queryTable(context, account, queryOptions, nextTable) {
const coll = this.db.getCollection(this.TABLES_COLLECTION);
const filter = { account };

@@ -132,5 +167,14 @@ if (nextTable) {

}
let queryWhere;
try {
queryWhere = this.generateQueryTableWhereFunction(queryOptions.filter);
}
catch (e) {
throw StorageErrorFactory_1.default.getQueryConditionInvalid(context);
}
const top = queryOptions.top || 1000;
const docList = coll
.chain()
.find(filter)
.where(queryWhere)
.simplesort("table")

@@ -335,14 +379,26 @@ .limit(top + 1)

}
const segmentFilter = {};
if (nextPartitionKey) {
segmentFilter.PartitionKey = { $gte: nextPartitionKey };
}
if (nextRowKey) {
segmentFilter.RowKey = { $gte: nextRowKey };
}
const maxResults = queryOptions.top || constants_1.QUERY_RESULT_MAX_NUM;
// .find using a segment filter is not filtering in the same way that the sorting function sorts
// I think offset will cause more problems than it solves, as we will have to step and sort all
// results here, so I am adding 2 additional predicates here to cover the cases with
// multiple partitions and rows to paginate
const result = tableEntityCollection
.chain()
.find(segmentFilter)
.where(queryWhere)
.where((data) => {
if (nextPartitionKey !== undefined) {
if (data.PartitionKey > nextPartitionKey) {
return true;
}
}
if (nextRowKey !== undefined) {
if (data.RowKey >= nextRowKey &&
(data.PartitionKey === nextPartitionKey ||
data.PartitionKey === undefined)) {
return true;
}
return false;
}
return true;
})
.sort((obj1, obj2) => {

@@ -569,2 +625,6 @@ if (obj1.PartitionKey > obj2.PartitionKey) {

else if (query[i] === "(" || query[i] === ")") {
if (i !== 0 && query[i - 1].match(/\d/) !== null) {
// this is needed if query does not contain whitespace between number token and paren
appendToken();
}
i--;

@@ -587,6 +647,52 @@ appendToken();

/**
* @param query Query Tables $query string.
*/
generateQueryTableWhereFunction(query) {
if (query === undefined) {
return () => true;
}
const transformedQuery = LokiTableMetadataStore.transformTableQuery(query);
// tslint:disable-next-line: no-console
// console.log(query);
// tslint:disable-next-line: no-console
// console.log(transformedQuery);
return new Function("item", transformedQuery);
}
/**
* Azurite V2 query tables implementation.
*/
static transformTableQuery(query) {
const systemProperties = new Map([
["name", "table"]
]);
const allowCustomProperties = false;
return LokiTableMetadataStore.transformQuery(query, systemProperties, allowCustomProperties);
}
/**
* @param query Query Enties $query string.
*/
generateQueryEntityWhereFunction(query) {
if (query === undefined) {
return () => true;
}
const transformedQuery = LokiTableMetadataStore.transformEntityQuery(query);
// tslint:disable-next-line: no-console
// console.log(query);
// tslint:disable-next-line: no-console
// console.log(transformedQuery);
return new Function("item", transformedQuery);
}
/**
* Azurite V2 query entities implementation as temporary workaround before new refactored implementation of querying.
* TODO: Handle query types
*/
static transformQuery(query) {
static transformEntityQuery(query) {
const systemProperties = new Map([
["PartitionKey", "PartitionKey"],
["RowKey", "RowKey"]
]);
const allowCustomProperties = true;
return LokiTableMetadataStore.transformQuery(query, systemProperties, allowCustomProperties);
}
static transformQuery(query, systemProperties, allowCustomProperties) {
// If a token is neither a number, nor a boolean, nor a string enclosed with quotation marks it is an operand.

@@ -624,6 +730,6 @@ // Operands are attributes of the object used within the where clause of LokiJS, thus we need to prepend each

].includes(token)) {
if (token === "PartitionKey" || token === "RowKey") {
transformedQuery += `item.${token} `;
if (systemProperties.has(token)) {
transformedQuery += `item.${systemProperties.get(token)} `;
}
else {
else if (allowCustomProperties) {
// Datetime compare

@@ -638,2 +744,5 @@ if (counter + 2 <= tokens.length - 1 &&

}
else {
throw Error("Custom properties are not supported on this query type.");
}
}

@@ -666,17 +775,56 @@ else {

/**
* @param query Query Enties $query string.
* Get service properties for specific storage account.
*
* @param {string} account
* @returns {Promise<ServicePropertiesModel | undefined>}
* @memberof LokiBlobMetadataStore
*/
generateQueryEntityWhereFunction(query) {
if (query === undefined) {
return () => true;
async getServiceProperties(context, account) {
const coll = this.db.getCollection(this.SERVICES_COLLECTION);
if (coll) {
const doc = coll.by("accountName", account);
return doc ? doc : undefined;
}
const transformedQuery = LokiTableMetadataStore.transformQuery(query);
// tslint:disable-next-line: no-console
// console.log(query);
// tslint:disable-next-line: no-console
// console.log(transformedQuery);
return new Function("item", transformedQuery);
return undefined;
}
/**
* Update table service properties.
* THis will create service properties if they do not exist in the persistence layer.
*
* TODO: Account's service property should be created when storage account is created or metadata
* storage initialization. This method should only be responsible for updating existing record.
* In this way, we can reduce one I/O call to get account properties.
*
* @param {ServicePropertiesModel} serviceProperties
* @returns {Promise<ServicePropertiesModel>} undefined properties will be ignored during properties setup
* @memberof LokiBlobMetadataStore
*/
async setServiceProperties(context, serviceProperties) {
const coll = this.db.getCollection(this.SERVICES_COLLECTION);
const doc = coll.by("accountName", serviceProperties.accountName);
if (doc) {
doc.cors =
serviceProperties.cors === undefined
? doc.cors
: serviceProperties.cors;
doc.hourMetrics =
serviceProperties.hourMetrics === undefined
? doc.hourMetrics
: serviceProperties.hourMetrics;
doc.logging =
serviceProperties.logging === undefined
? doc.logging
: serviceProperties.logging;
doc.minuteMetrics =
serviceProperties.minuteMetrics === undefined
? doc.minuteMetrics
: serviceProperties.minuteMetrics;
return coll.update(doc);
}
else {
return coll.insert(serviceProperties);
}
}
}
exports.default = LokiTableMetadataStore;
//# sourceMappingURL=LokiTableMetadataStore.js.map

@@ -17,2 +17,3 @@ "use strict";

const constants_1 = require("./utils/constants");
const PreflightMiddlewareFactory_1 = tslib_1.__importDefault(require("./middleware/PreflightMiddlewareFactory"));
const morgan = require("morgan");

@@ -96,4 +97,14 @@ const TableSharedKeyAuthenticator_1 = tslib_1.__importDefault(require("./authentication/TableSharedKeyAuthenticator"));

app.use(middlewareFactory.createHandlerMiddleware(handlers));
// CORS request handling, preflight request and the corresponding actual request
const preflightMiddlewareFactory = new PreflightMiddlewareFactory_1.default(Logger_1.default);
// CORS actual request handling.
// tslint:disable-next-line:max-line-length
// See as https://docs.microsoft.com/en-us/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services
app.use(preflightMiddlewareFactory.createCorsRequestMiddleware(this.metadataStore, true));
app.use(preflightMiddlewareFactory.createCorsRequestMiddleware(this.metadataStore, false));
// Generated, will serialize response models into HTTP response
app.use(middlewareFactory.createSerializerMiddleware());
// CORS preflight request handling, processing OPTIONS requests.
// TODO: Should support OPTIONS in swagger and autorest, then this handling can be moved to ServiceHandler.
app.use(preflightMiddlewareFactory.createOptionsHandlerMiddleware(this.metadataStore));
// Generated, will return MiddlewareError and Errors thrown in previous middleware/handlers to HTTP response

@@ -100,0 +111,0 @@ app.use(middlewareFactory.createErrorMiddleware());

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ValidAPIVersions = exports.QUERY_RESULT_MAX_NUM = exports.RETURN_CONTENT = exports.RETURN_NO_CONTENT = exports.ODATA_TYPE = exports.XML_METADATA = exports.FULL_METADATA_ACCEPT = exports.MINIMAL_METADATA_ACCEPT = exports.NO_METADATA_ACCEPT = exports.SUPPORTED_QUERY_OPERATOR = exports.HeaderConstants = exports.VERSION = exports.TABLE_API_VERSION = exports.DEFAULT_TABLE_CONTEXT_PATH = exports.TABLE_STATUSCODE = exports.DEFAULT_ENABLE_DEBUG_LOG = exports.DEFAULT_ENABLE_ACCESS_LOG = exports.DEFAULT_TABLE_LISTENING_PORT = exports.DEFAULT_TABLE_SERVER_HOST_NAME = exports.DEFAULT_TABLE_LOKI_DB_PATH = exports.DEFAULT_TABLE_EXTENT_LOKI_DB_PATH = void 0;
exports.SECONDARY_SUFFIX = exports.TABLE_SERVICE_PERMISSION = exports.ValidAPIVersions = exports.QUERY_RESULT_MAX_NUM = exports.DEFAULT_TABLE_PERSISTENCE_ARRAY = exports.RETURN_CONTENT = exports.RETURN_NO_CONTENT = exports.ODATA_TYPE = exports.XML_METADATA = exports.FULL_METADATA_ACCEPT = exports.MINIMAL_METADATA_ACCEPT = exports.NO_METADATA_ACCEPT = exports.SUPPORTED_QUERY_OPERATOR = exports.MethodConstants = exports.HeaderConstants = exports.VERSION = exports.TABLE_API_VERSION = exports.DEFAULT_TABLE_CONTEXT_PATH = exports.TABLE_STATUSCODE = exports.DEFAULT_TABLE_PERSISTENCE_PATH = exports.DEFAULT_ENABLE_DEBUG_LOG = exports.DEFAULT_ENABLE_ACCESS_LOG = exports.DEFAULT_TABLE_LISTENING_PORT = exports.DEFAULT_TABLE_SERVER_HOST_NAME = exports.DEFAULT_TABLE_LOKI_DB_PATH = exports.DEFAULT_TABLE_EXTENT_LOKI_DB_PATH = void 0;
exports.DEFAULT_TABLE_EXTENT_LOKI_DB_PATH = "__azurite_db_table_extent__.json";

@@ -10,2 +10,3 @@ exports.DEFAULT_TABLE_LOKI_DB_PATH = "__azurite_db_table__.json";

exports.DEFAULT_ENABLE_DEBUG_LOG = true;
exports.DEFAULT_TABLE_PERSISTENCE_PATH = "__tablestorage__";
var TABLE_STATUSCODE;

@@ -18,3 +19,3 @@ (function (TABLE_STATUSCODE) {

exports.TABLE_API_VERSION = "2020-10-02";
exports.VERSION = "3.14.0";
exports.VERSION = "3.14.2";
exports.HeaderConstants = {

@@ -31,4 +32,17 @@ SERVER: "Server",

ACCEPT: "accept",
PREFER: "Prefer"
PREFER: "Prefer",
ORIGIN: "origin",
VARY: "Vary",
ACCESS_CONTROL_EXPOSE_HEADERS: "Access-Control-Expose-Headers",
ACCESS_CONTROL_ALLOW_ORIGIN: "Access-Control-Allow-Origin",
ACCESS_CONTROL_ALLOW_CREDENTIALS: "Access-Control-Allow-Credentials",
ACCESS_CONTROL_ALLOW_METHODS: "Access-Control-Allow-Methods",
ACCESS_CONTROL_ALLOW_HEADERS: "Access-Control-Allow-Headers",
ACCESS_CONTROL_MAX_AGE: "Access-Control-Max-Age",
ACCESS_CONTROL_REQUEST_METHOD: "access-control-request-method",
ACCESS_CONTROL_REQUEST_HEADERS: "access-control-request-headers"
};
exports.MethodConstants = {
OPTIONS: "OPTIONS"
};
exports.SUPPORTED_QUERY_OPERATOR = ["eq", "gt", "ge", "lt", "le", "ne"];

@@ -42,2 +56,9 @@ exports.NO_METADATA_ACCEPT = "application/json;odata=nometadata";

exports.RETURN_CONTENT = "return-content";
exports.DEFAULT_TABLE_PERSISTENCE_ARRAY = [
{
locationId: "Default",
locationPath: exports.DEFAULT_TABLE_PERSISTENCE_PATH,
maxConcurrency: 1
}
];
exports.QUERY_RESULT_MAX_NUM = 1000;

@@ -72,2 +93,4 @@ exports.ValidAPIVersions = [

];
exports.TABLE_SERVICE_PERMISSION = "raud";
exports.SECONDARY_SUFFIX = "-secondary";
//# sourceMappingURL=constants.js.map

@@ -6,3 +6,3 @@ {

"icon": "icon.png",
"version": "3.14.0",
"version": "3.14.2",
"publisher": "Azurite",

@@ -36,3 +36,3 @@ "categories": [

"sequelize": "^6.3.0",
"tedious": "^11.0.9",
"tedious": "^12.0.0",
"to-readable-stream": "^2.1.0",

@@ -46,3 +46,5 @@ "tslib": "^2.3.0",

"devDependencies": {
"@azure/data-tables": "^12.0.0-beta.2",
"@azure/core-auth": "^1.3.2",
"@azure/core-rest-pipeline": "^1.2.0",
"@azure/data-tables": "^12.1.1",
"@azure/storage-blob": "^12.1.2",

@@ -71,7 +73,10 @@ "@azure/storage-queue": "^12.0.5",

"cross-var": "^1.1.0",
"find-process": "^1.4.4",
"husky": "^7.0.0",
"lint-staged": "^11.0.0",
"mocha": "^5.2.0",
"pkg": "^5.3.0",
"prettier": "^2.2.1",
"prettier-tslint": "^0.4.2",
"rcedit": "^3.0.1",
"ts-mockito": "^2.6.1",

@@ -258,2 +263,3 @@ "ts-node": "^10.0.0",

"build:autorest:table": "autorest ./swagger/table.md --typescript --use=S:/GitHub/XiaoningLiu/autorest.typescript.server",
"build:exe": "node ./scripts/buildExe.js",
"watch": "tsc -watch -p ./",

@@ -270,2 +276,3 @@ "blob": "node -r ts-node/register src/blob/main.ts",

"test:table": "npm run lint && cross-env NODE_TLS_REJECT_UNAUTHORIZED=0 mocha --compilers ts-node/register --no-timeouts --recursive tests/table/*.test.ts tests/table/apis/*.test.ts tests/table/apis/**/*.test.ts",
"test:exe": "npm run lint && cross-env NODE_TLS_REJECT_UNAUTHORIZED=0 mocha --compilers ts-node/register --no-timeouts tests/exe.test.ts --exit",
"clean": "rimraf dist typings *.log coverage __testspersistence__ temp __testsstorage__ .nyc_output debug.log *.vsix *.tgz",

@@ -272,0 +279,0 @@ "clean:deep": "npm run clean && rimraf debug.log __*",

# Azurite V3
[![npm version](https://badge.fury.io/js/azurite.svg)](https://badge.fury.io/js/azurite)
[![Build Status](https://dev.azure.com/azure/Azurite/_apis/build/status/Azure.Azurite?branchName=master)](https://dev.azure.com/azure/Azurite/_build/latest?definitionId=20&branchName=master)
[![Build Status](https://dev.azure.com/azure/Azurite/_apis/build/status/Azure.Azurite?branchName=main)](https://dev.azure.com/azure/Azurite/_build/latest?definitionId=105&branchName=main)

@@ -12,3 +12,3 @@ > Note:

| ------------------------------------------------------------------ | ------------------------- | ------------------------------ | ------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| 3.14.0 | 2020-10-02 | Blob, Queue and Table(preview) | Azurite V3 based on TypeScript & New Architecture | [NPM](https://www.npmjs.com/package/azurite) - [Docker](https://hub.docker.com/_/microsoft-azure-storage-azurite) - [Visual Studio Code Extension](https://marketplace.visualstudio.com/items?itemName=Azurite.azurite) |
| 3.14.2 | 2020-10-02 | Blob, Queue and Table(preview) | Azurite V3 based on TypeScript & New Architecture | [NPM](https://www.npmjs.com/package/azurite) - [Docker](https://hub.docker.com/_/microsoft-azure-storage-azurite) - [Visual Studio Code Extension](https://marketplace.visualstudio.com/items?itemName=Azurite.azurite) |
| [Legacy (v2)](https://github.com/Azure/Azurite/tree/legacy-master) | 2016-05-31 | Blob, Queue and Table | Legacy Azurite V2 | [NPM](https://www.npmjs.com/package/azurite) |

@@ -686,12 +686,9 @@

> Please reach to us or open issues if you need multi storage account support.
You could enable multiple accounts by setting up environment variable `AZURITE_ACCOUNTS`. See the [section](#customized-storage-accounts--keys-1) above.
Azurite V3 supports a default account as General Storage Account V2 and provides features.
Optionally, you could modify your hosts file, to access accounts with production-style URL. See section below.
- Account name: `devstoreaccount1`
- Account key: `Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==`
### Endpoint & Connection URL
The service endpoints for Azurite are different from those of an Azure storage account. The difference is because the local computer does not perform domain name resolution, requiring Azurite endpoints to be local addresses.
The service endpoints for Azurite are different from those of an Azure storage account. The difference is because Azuite runs on local computer, and normally, no DNS resolves address to local.

@@ -710,3 +707,3 @@ When you address a resource in an Azure storage account, use the following scheme. The account name is part of the URI host name, and the resource being addressed is part of the URI path:

However, because the local computer does not perform domain name resolution, the account name is part of the URI path instead of the host name. Use the following URI format for a resource in Azurite:
However, because Azuite runs on local computer, the account name is part of the URI path instead of the host name. Use the following URI format for a resource in Azurite:

@@ -729,2 +726,30 @@ ```

Optionally, you could modify your hosts file, to access an account with production-style URL.
First, add line(s) to your hosts file, like:
```
127.0.0.1 account1.blob.localhost
127.0.0.1 account1.queue.localhost
127.0.0.1 account1.table.localhost
```
Secondly, set environment variables to enable customized storage accounts & keys:
```
set AZURITE_ACCOUNTS="account1:key1:key2"
```
You could add more accounts. See the [section](#customized-storage-accounts--keys-1) above.
Finally, start Azurite and use a customized connection string to access your account.
In the connection string below, it is assumed default ports are used.
```
DefaultEndpointsProtocol=http;AccountName=account1;AccountKey=key1;BlobEndpoint=http://account1.blob.localhost:10000;QueueEndpoint=http://account1.queue.localhost:10001;TableEndpoint=http://account1.table.localhost:10002;
```
> Note. Do not access default account in this way with Azure Storage Explorer. There is a bug that Storage Explorer is always adding account name in URL path, causing failures.
### Scalability & Performance

@@ -731,0 +756,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc