Socket
Socket
Sign inDemoInstall

twilio-sync

Package Overview
Dependencies
Maintainers
1
Versions
608
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

twilio-sync - npm Package Compare versions

Comparing version 3.2.2 to 4.0.0-canary.100

dist/_virtual/_commonjsHelpers.js

46

dist/client.js

@@ -125,2 +125,3 @@ /*

var replayEventEmitter = require('@twilio/replay-event-emitter');
var browser = require('./packages/shared/builds/browser.js');

@@ -166,2 +167,3 @@ const SYNC_PRODUCT_ID = 'data_sync';

super();
this._currentState = 'disconnected';
if (!fpaToken) {

@@ -194,15 +196,22 @@ throw new Error('Sync library needs a valid Twilio token to be passed');

}
let twilsock$1 = options.twilsockClient = (_a = options.twilsockClient) !== null && _a !== void 0 ? _a : new twilsock.Twilsock(fpaToken, productId, options);
let twilsock$1 = options.twilsockClient = (_a = options.twilsockClient) !== null && _a !== void 0 ? _a : new twilsock.TwilsockClient(fpaToken, productId, options);
twilsock$1.on('tokenAboutToExpire', () => this.emit('tokenAboutToExpire'));
twilsock$1.on('tokenExpired', () => this.emit('tokenExpired'));
twilsock$1.on('connectionError', err => this.emit('connectionError', err));
twilsock$1.on('stateChanged', (state) => {
this.emit('connectionStateChanged', state);
/**
* Handle transport establishing event
* If we have any subscriptions - we should check object for modifications
*/
this.services.subscriptions.onConnectionStateChanged(state === 'connected');
// twilsock.on('connectionError', err => this.emit('connectionError', err));
twilsock$1.on('connecting', () => {
this._currentState = 'connecting';
this.emit('connectionStateChanged', 'connecting');
this.services.subscriptions.onConnectionStateChanged(false);
});
twilsock$1.on('message', (messageType, payload) => this._routeMessage(messageType, payload));
twilsock$1.on('connected', () => {
this._currentState = 'connected';
this.emit('connectionStateChanged', 'connected');
this.services.subscriptions.onConnectionStateChanged(true);
});
twilsock$1.on('disconnected', () => {
this._currentState = 'disconnected';
this.emit('connectionStateChanged', 'disconnected');
this.services.subscriptions.onConnectionStateChanged(false);
});
twilsock$1.on('targetedMessageReceived', (messageType, payload) => this._routeMessage(messageType, JSON.parse(payload)));
let config = new configuration.Configuration(options);

@@ -228,4 +237,4 @@ let network$1 = new network.NetworkService(new clientInfo["default"](SDK_VERSION), config, twilsock$1);

static populateInitRegistrations(reg) {
reg.populateInitRegistrations([SYNC_NOTIFICATION_TYPE, SYNC_DOCUMENT_NOTIFICATION_TYPE,
SYNC_LIST_NOTIFICATION_TYPE, SYNC_MAP_NOTIFICATION_TYPE]);
// reg.populateInitRegistrations([SYNC_NOTIFICATION_TYPE, SYNC_DOCUMENT_NOTIFICATION_TYPE,
// SYNC_LIST_NOTIFICATION_TYPE, SYNC_MAP_NOTIFICATION_TYPE]);
}

@@ -278,3 +287,3 @@ /**

get connectionState() {
return this.services.twilsock.state;
return this._currentState;
}

@@ -291,9 +300,2 @@ /**

}
try {
let storageSettings = await this.services.twilsock.storageId();
this.services.storage.updateStorageId(storageSettings.id);
}
catch (e) {
logger["default"].warn('Failed to initialize storage', e);
}
}

@@ -666,5 +668,3 @@ storeRootInSessionCache(type, id, value) {

.catch((error) => {
var _a;
const status = (_a = error === null || error === void 0 ? void 0 : error.reply) === null || _a === void 0 ? void 0 : _a.status;
if ((status === null || status === void 0 ? void 0 : status.code) === 401 && (status === null || status === void 0 ? void 0 : status.status) === 'UNAUTHORIZED') {
if (error instanceof browser.TwilioError && error.errorInfo.status === 401) {
throw new syncerror.SyncError('Updated token was rejected by server', 400, 51130);

@@ -671,0 +671,0 @@ }

@@ -105,4 +105,4 @@ /*

var uuid = require('uuid');
var replayEventEmitter = require('@twilio/replay-event-emitter');
var require$$9 = require('nanoid');

@@ -113,3 +113,3 @@ class Closeable extends replayEventEmitter.ReplayEventEmitter {

this.closed = false;
this.uuid = uuid.v4();
this.uuid = require$$9.nanoid();
}

@@ -116,0 +116,0 @@ get listenerUuid() {

@@ -106,4 +106,5 @@ /*

/**
* Container for entities which are known by the client
* It's needed for deduplication when client obtain the same object several times
* Container for entities which are known by the client.
* It's needed for deduplication when the client obtains the same object several times.
* @internal
*/

@@ -110,0 +111,0 @@ class EntitiesCache {

@@ -111,3 +111,3 @@ /*

var closeable = require('./closeable.js');
var cache = require('./cache.js');
var itemsCache = require('./itemsCache.js');
var declarativeTypeValidator = require('@twilio/declarative-type-validator');

@@ -123,3 +123,11 @@ var replayEventEmitter = require('@twilio/replay-event-emitter');

*/
constructor() { }
constructor(key, value, lastEventId) {
this.key = key;
this.value = value;
this.lastEventId = lastEventId;
}
/**
* CachedValue interface.
*/
get cacheKey() { return this.key; }
}

@@ -130,6 +138,7 @@ class LiveQueryImpl extends entity.SyncEntity {

this.descriptor = descriptor;
this.cache = new cache.Cache();
this.cache = new itemsCache.ItemsCache();
if (items) {
items.forEach(item => {
this.cache.store(item.key, { key: item.key, value: item.data }, item.revision);
const newItem = new InsightsItem(item.key, item.data, item.revision); // @fixme item.lasteventid?
this.cache.store(item.key, newItem, item.revision); // @fixme item.lasteventid?
});

@@ -197,24 +206,21 @@ }

}
handleItemMutated(key, value, revision) {
if (this.shouldIgnoreEvent(key, revision)) {
logger["default"].trace(`Item ${key} update skipped, revision: ${revision}`);
handleItemMutated(key, value, lastEventId) {
if (this.shouldIgnoreEvent(key, lastEventId)) {
logger["default"].trace(`Item ${key} update skipped, revision: ${lastEventId}`);
return;
}
else {
const newItem = { key, value };
this.cache.store(key, newItem, revision);
this.broadcastEventToListeners('itemUpdated', newItem);
}
const newItem = new InsightsItem(key, value, lastEventId);
this.cache.store(key, newItem, lastEventId);
this.broadcastEventToListeners('itemUpdated', newItem);
}
handleItemRemoved(key, revision) {
const force = (revision === null);
if (this.shouldIgnoreEvent(key, revision)) {
logger["default"].trace(`Item ${key} delete skipped, revision: ${revision}`);
handleItemRemoved(key, lastEventId) {
if (this.shouldIgnoreEvent(key, lastEventId)) {
logger["default"].trace(`Item ${key} delete skipped, revision: ${lastEventId}`);
return;
}
else {
this.cache.delete(key, revision, force);
this.broadcastEventToListeners('itemRemoved', { key });
}
this.cache.delete(key, lastEventId);
this.broadcastEventToListeners('itemRemoved', { key });
}
handleBatchUpdate(items) {
// preprocess item set for easy key-based access (it's a one-time constant time operation)
// preprocess item set for easy key-based access
let newItems = {};

@@ -238,3 +244,3 @@ if (items != null) {

}
// once item is handled, remove it from incoming array
// once item is handled, remove it from the incoming array
delete newItems[key];

@@ -254,3 +260,3 @@ });

_advanceLastEventId(eventId, revision) {
// LiveQuery is not revisioned in any way, so simply ignore second param and act upon lastEventId only
// LiveQuery is not versioned in any way, so simply ignore second param and act upon lastEventId only
if (this.lastEventId < eventId) {

@@ -310,3 +316,4 @@ this.descriptor.last_event_id = eventId;

* Closes this query instance and unsubscribes from further service events.
* This will eventually stop the physical inflow of updates over the network, when all other instances of this query are closed as well.
* This will eventually stop the physical inflow of updates over the network,
* when all other instances of this query are closed as well.
*/

@@ -356,5 +363,5 @@ close() {

/**
* Allows repetitive quick searches against a specific Flex data. Unlike a
* LiveQuery, this result set does not subscribe to any updates and therefore receives no events
* beyond the initial result set.
* Allows repetitive quick searches against specific Flex data.
* Unlike a LiveQuery, this result set does not subscribe to any updates and therefore
* receives no events beyond the initial result set.
*

@@ -390,8 +397,8 @@ * Use the {@link SyncClient.instantQuery} method to create an Instant Query.

this.items = {};
return queryItems({
network: this.network,
uri: this.queryUri,
queryString: queryExpression,
})
.then((response) => {
try {
const response = await queryItems({
network: this.network,
uri: this.queryUri,
queryString: queryExpression,
});
this.queryExpression = queryExpression;

@@ -404,8 +411,8 @@ if (response.items) {

this.emit('searchResult', this.getItems());
})
.catch((err) => {
}
catch (err) {
logger["default"].error(`Error '${err.message}' while executing query '${queryExpression}'`);
this.queryExpression = null;
throw err;
});
}
}

@@ -412,0 +419,0 @@ /**

@@ -105,5 +105,5 @@ /*

var version = "3.2.2";
var version = "3.2.3-rc.0";
exports.version = version;
//# sourceMappingURL=package.json.js.map

@@ -112,21 +112,9 @@ /*

*/
constructor(items, source, prevToken, nextToken) {
this.prevToken = prevToken;
this.nextToken = nextToken;
constructor(items, source, prevItemId = null, nextItemId = null) {
this.items = items;
this.source = source;
this.prevPageItemId = prevItemId;
this.nextPageItemId = nextItemId;
}
/**
* Indicates the existence of the next page.
*/
get hasNextPage() {
return !!this.nextToken;
}
/**
* Indicates the existence of the previous page.
*/
get hasPrevPage() {
return !!this.prevToken;
}
/**
* Request next page.

@@ -139,3 +127,3 @@ * Does not modify existing object.

}
return this.source(this.nextToken);
return this.source({ nextPageItemId: this.nextPageItemId });
}

@@ -150,4 +138,16 @@ /**

}
return this.source(this.prevToken);
return this.source({ prevPageItemId: this.prevPageItemId });
}
/**
* Indicates the existence of the next page.
*/
get hasNextPage() {
return this.nextPageItemId !== null;
}
/**
* Indicates the existence of the previous page.
*/
get hasPrevPage() {
return this.prevPageItemId !== null;
}
}

@@ -154,0 +154,0 @@

@@ -105,28 +105,8 @@ /*

var uuid = require('uuid');
var syncerror = require('../utils/syncerror.js');
var logger = require('../utils/logger.js');
var operationRetrier = require('@twilio/operation-retrier');
var twilsock = require('twilsock');
var require$$9 = require('nanoid');
var browser = require('../packages/shared/builds/browser.js');
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n["default"] = e;
return Object.freeze(n);
}
var uuid__namespace = /*#__PURE__*/_interopNamespace(uuid);
const MINIMUM_RETRY_DELAY = 4000;

@@ -136,9 +116,8 @@ const MAXIMUM_RETRY_DELAY = 60000;

const RETRY_DELAY_RANDOMNESS = 0.2;
function messageFromErrorBody(transportError) {
if (transportError.body) {
if (transportError.body.message) {
return transportError.body.message;
}
const sidGenerator = require$$9.customAlphabet('1234567890abcdef', 32);
function messageFromErrorBody(transportError, payload) {
if (payload === null || payload === void 0 ? void 0 : payload.message) {
return payload.message;
}
switch (transportError.status) {
switch (transportError.statusCode) {
case 429:

@@ -152,20 +131,20 @@ return 'Throttled by server';

}
function codeFromErrorBody(trasportError) {
if (trasportError.body) {
return trasportError.body.code;
}
return 0;
function codeFromErrorBody(payload) {
var _a;
return (_a = payload === null || payload === void 0 ? void 0 : payload.code) !== null && _a !== void 0 ? _a : 0;
}
function mapTransportError(transportError) {
if (transportError.status === 409) {
return new syncerror.SyncNetworkError(messageFromErrorBody(transportError), transportError.status, codeFromErrorBody(transportError), transportError.body);
function mapTransportError(transportError, payload) {
var _a;
if (transportError.statusCode === 409) {
return new syncerror.SyncNetworkError(messageFromErrorBody(transportError, payload), transportError.statusCode, codeFromErrorBody(payload), transportError.body);
}
else if (transportError.status) {
return new syncerror.SyncError(messageFromErrorBody(transportError), transportError.status, codeFromErrorBody(transportError));
else if (transportError.statusCode) {
return new syncerror.SyncError(messageFromErrorBody(transportError, payload), transportError.statusCode, codeFromErrorBody(payload));
// todo: still idk
}
else if (transportError instanceof twilsock.TransportUnavailableError) {
else if (transportError instanceof browser.TwilioError && transportError.errorInfo.reason === 'transportDisconnected') {
return transportError;
}
else {
return new syncerror.SyncError(transportError.message, 0, 0);
return new syncerror.SyncError((_a = payload.message) !== null && _a !== void 0 ? _a : 'Unexpected error', 0, 0);
}

@@ -186,3 +165,3 @@ }

'Twilio-Sync-Client-Info': JSON.stringify(this.clientInfo),
'Twilio-Request-Id': 'RQ' + uuid__namespace.v4().replace(/-/g, '')
'Twilio-Request-Id': `RQ${sidGenerator()}`
};

@@ -207,11 +186,16 @@ }

request()
.then(result => retrier.succeeded(result))
.catch(err => {
if (codesToRetryOn.includes(err.status)) {
let delayOverride = parseInt(err.headers ? err.headers['Retry-After'] : null);
retrier.failed(mapTransportError(err), isNaN(delayOverride) ? null : delayOverride * 1000);
.then(result => {
const isError = result.statusCode >= 400 && result.statusCode < 500;
if (!isError) {
retrier.succeeded(result);
return;
}
else if (err.message === 'Twilsock disconnected') {
const parsedPayload = JSON.parse(result.payload);
if (codesToRetryOn.includes(parsedPayload.status)) {
let delayOverride = parseInt(result.headers ? result.headers['Retry-After'] : null);
retrier.failed(mapTransportError(result, parsedPayload), isNaN(delayOverride) ? null : delayOverride * 1000);
}
else if (parsedPayload.message === 'Twilsock disconnected') {
// Ugly hack. We must make a proper exceptions for twilsock
retrier.failed(mapTransportError(err));
retrier.failed(mapTransportError(result, parsedPayload));
}

@@ -222,4 +206,10 @@ else {

retrier.cancel();
reject(mapTransportError(err));
reject(mapTransportError(result, parsedPayload));
}
})
.catch(err => {
// Fatal error
retrier.removeAllListeners();
retrier.cancel();
reject(mapTransportError(err, null));
});

@@ -230,7 +220,22 @@ });

});
retrier.on('cancelled', err => reject(mapTransportError(err)));
retrier.on('failed', err => reject(mapTransportError(err)));
retrier.on('cancelled', err => reject(mapTransportError(err, null)));
retrier.on('failed', err => reject(mapTransportError(err, null)));
retrier.start();
});
}
responsePostProcess(response) {
let body = {};
try {
body = JSON.parse(response.payload);
}
catch (_a) { }
return {
status: {
code: response.statusCode,
status: response.status,
},
headers: response.headers,
body,
};
}
/**

@@ -243,3 +248,4 @@ * Make a GET request by given URI

logger["default"].debug('GET', uri, 'ID:', headers['Twilio-Request-Id']);
return this.executeWithRetry(() => this.transport.get(uri, headers, this.config.productId), true);
return this.executeWithRetry(() => this.transport.get({ url: uri, headers, grant: this.config.productId }), true)
.then((response) => this.responsePostProcess(response));
}

@@ -252,3 +258,4 @@ post(uri, body, revision, retryWhenThrottled = false) {

logger["default"].debug('POST', uri, 'ID:', headers['Twilio-Request-Id']);
return this.executeWithRetry(() => this.transport.post(uri, headers, body, this.config.productId), retryWhenThrottled);
return this.executeWithRetry(() => this.transport.post({ url: uri, headers, body, grant: this.config.productId }), retryWhenThrottled)
.then((response) => this.responsePostProcess(response));
}

@@ -261,3 +268,4 @@ put(uri, body, revision) {

logger["default"].debug('PUT', uri, 'ID:', headers['Twilio-Request-Id']);
return this.executeWithRetry(() => this.transport.put(uri, headers, body, this.config.productId), false);
return this.executeWithRetry(() => this.transport.put({ url: uri, headers, body, grant: this.config.productId }), false)
.then((response) => this.responsePostProcess(response));
}

@@ -267,3 +275,4 @@ delete(uri) {

logger["default"].debug('DELETE', uri, 'ID:', headers['Twilio-Request-Id']);
return this.executeWithRetry(() => this.transport.delete(uri, headers, this.config.productId), false);
return this.executeWithRetry(() => this.transport.delete({ url: uri, headers, grant: this.config.productId }), false)
.then((response) => this.responsePostProcess(response));
}

@@ -270,0 +279,0 @@ }

@@ -108,4 +108,5 @@ /*

var logger = require('./utils/logger.js');
var twilsock = require('twilsock');
var browser = require('./packages/shared/builds/browser.js');
// import { TransportUnavailableError } from 'twilsock';
/**

@@ -291,3 +292,4 @@ * A data container used by the Subscriptions class to track subscribed entities' local

}
if (e instanceof twilsock.TransportUnavailableError) {
// todo: transport disconnected here? idk really
if (e instanceof browser.TwilioError && e.errorInfo.reason === 'transportDisconnected') {
logger["default"].debug(`Twilsock connection (required for subscription) not ready (c:${correlationId}); waiting…`);

@@ -294,0 +296,0 @@ this.backoff.reset();

@@ -113,6 +113,8 @@ /*

var paginator = require('./paginator.js');
var cache = require('./cache.js');
var itemsCache = require('./itemsCache.js');
var mergingqueue = require('./mergingqueue.js');
var closeable = require('./closeable.js');
var declarativeTypeValidator = require('@twilio/declarative-type-validator');
require('./node_modules/lodash/lodash.js');
var lodash = require('./_virtual/lodash.js');

@@ -128,3 +130,3 @@ class SyncListImpl extends entity.SyncEntity {

this.updateMergingQueue = new mergingqueue.NamespacedMergingQueue(updateRequestReducer);
this.cache = new cache.Cache();
this.cache = new itemsCache.ItemsCache();
this.descriptor = descriptor;

@@ -187,3 +189,3 @@ this.descriptor.date_updated = new Date(this.descriptor.date_updated);

this._handleItemMutated(index, item.url, item.last_event_id, item.revision, value, item.date_updated, item.date_expires, true, false);
return this.cache.get(index);
return this.cache.getValue(index);
}

@@ -196,5 +198,5 @@ async set(index, value, itemMetadataUpdates) {

let existingItem = await this.get(index);
const itemDescriptor = await this._addOrUpdateItemOnServer(existingItem.uri, data, undefined, ttl);
const itemDescriptor = await this._addOrUpdateItemOnServer(existingItem.url, data, undefined, ttl);
this._handleItemMutated(index, itemDescriptor.url, itemDescriptor.last_event_id, itemDescriptor.revision, itemDescriptor.data, itemDescriptor.date_updated, itemDescriptor.date_expires, false, false);
return this.cache.get(index);
return this.cache.getValue(index);
}

@@ -207,5 +209,5 @@ async _updateItemWithIfMatch(index, mutatorFunction, ttl) {

try {
const itemDescriptor = await this._addOrUpdateItemOnServer(existingItem.uri, data, ifMatch, ttl);
const itemDescriptor = await this._addOrUpdateItemOnServer(existingItem.url, data, ifMatch, ttl);
this._handleItemMutated(index, itemDescriptor.url, itemDescriptor.last_event_id, itemDescriptor.revision, itemDescriptor.data, itemDescriptor.date_updated, itemDescriptor.date_expires, false, false);
return this.cache.get(index);
return this.cache.getValue(index);
}

@@ -236,3 +238,3 @@ catch (error) {

const previousItemData = sanitize.deepClone(item.data);
const response = await this.services.network.delete(item.uri);
const response = await this.services.network.delete(item.url);
this._handleItemRemoved(index, response.body.last_event_id, previousItemData, new Date(response.body.date_updated), false);

@@ -243,10 +245,9 @@ }

if (cachedItem) {
return cachedItem;
return cachedItem.value;
}
else {
return this._getItemFromServer(index);
}
return this._getItemFromServer(index);
}
async _getItemFromServer(index) {
let result = await this.queryItems({ index });
// queryItemsFromServer is used only here!
let result = await this._queryItemsFromServer({ index });
if (result.items.length < 1) {

@@ -261,35 +262,163 @@ throw new syncerror.SyncError(`No item with index ${index} found`, 404, 54151);

* Query items from the List
* @param args Argument pack for the query.
* May include the following parameters:
*
* - index -- Returns the Item with the specified index or an empty items array, if one doesn't exist.
* - from -- Item index offset specifying the starting point of the query (inclusive). If not specified,
* the query starts from either the first or the last item in the List (by index),
* depending on the Order query parameter.
* - order -- String(Optional) -- Determines the order of the query. Items are ordered numerically by index.
* One of {asc, desc}, defaults to asc.
* - pageSize -- Maximum number of Items in the response (i.e. in this page). Defaults to 50.
* The max page size is 1000. Note: PageSize between 100 and 1000 returns at most 100 items.
* - pageToken - Direct paging token, for starting from a specific page.
* - Metadata - May have the value list. If specified, returns the state of the List in the list attribute of the response.
* - HideExpired - Determines whether expired List items are filtered out from the result list. By default, is false.
*
* Stuff not accepted by the service, internal to SDK:
*
* - limit
*
* Valid query parameter combinations
* These are the valid query parameter combinations. [x] should be read as “if x is not provided, the default value is assumed”.
* The combinations are in order of precedence - e.g., if both Index and PageToken are provided, PageToken is ignored.
* +-----------------------------------------+------------------------------------------------------------------------------+
* | Index, [Metadata] | Returns 0-1 Items with the given Index |
* | PageToken, [Metadata] | Returns a page based on the PageToken returned from a previous query, |
* | | with this and all following pages modified by any provided query parameters. |
* | [From], [Order], [Pagesize], [Metadata] | Performs an initial query from a specified offset. |
* | | Returned PageTokens in the meta block should be used for pagination. |
* +-----------------------------------------+------------------------------------------------------------------------------+
* @private
*/
async queryItems(arg) {
arg = arg || {};
const url = new uri.UriBuilder(this.links.items)
.queryParam('From', arg.from)
.queryParam('PageSize', arg.limit)
.queryParam('Index', arg.index)
.queryParam('PageToken', arg.pageToken)
.queryParam('Order', arg.order)
async queryItems(args) {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
// If the index is specified, fetch only a single item.
if (args.index) {
const cached = this.cache.get(args.index);
if (cached) {
return this.makePaginator(cached.isValid ? [cached.value] : [], args, false, false);
}
return this._querySingleItemFromServer(args.index);
}
// If a paginator calls here with nextPageItemId or prevPageItemId it means we are trying to fetch a next or previous
// page accordingly. the task is to determine the direction (based on args.order and direction of queried page) and
// prefill it from the cache as much as possible.
const fillingSubsequentPage = !!args.nextPageItemId || !!args.prevPageItemId;
const oldArgs = Object.assign({}, args);
if (fillingSubsequentPage) {
args.from = (_b = (_a = args.nextPageItemId) !== null && _a !== void 0 ? _a : args.prevPageItemId) !== null && _b !== void 0 ? _b : args.from;
}
const goingBackwards = !!args.prevPageItemId;
// If from and order are specified, fetch page of items.
let cacheIterator = args.order === 'desc'
? (goingBackwards
? this.cache.getIterator((_d = (_c = args.prevPageItemId) !== null && _c !== void 0 ? _c : args.from) !== null && _d !== void 0 ? _d : null)
: this.cache.getReverseIterator((_f = (_e = args.nextPageItemId) !== null && _e !== void 0 ? _e : args.from) !== null && _f !== void 0 ? _f : null))
: (goingBackwards
? this.cache.getReverseIterator((_h = (_g = args.prevPageItemId) !== null && _g !== void 0 ? _g : args.from) !== null && _h !== void 0 ? _h : null)
: this.cache.getIterator((_k = (_j = args.nextPageItemId) !== null && _j !== void 0 ? _j : args.from) !== null && _k !== void 0 ? _k : null));
let cachedItems = [];
const itemsCountToGet = fillingSubsequentPage ? args.limit + 1 : args.limit;
for (let item of cacheIterator) {
if (fillingSubsequentPage && item.value.index == args.from) {
continue;
}
cachedItems.push(item.value);
if (cachedItems.length >= args.limit) {
if (goingBackwards) {
lodash.lodash.exports.reverse(cachedItems);
}
// We've got a full page of items, so we can easily generate prev/next page navigation
// However, the first item in the page cannot be collectionBegin, and the last item cannot be collectionEnd
return this.makePaginator(cachedItems, args, this._prevPageAllowed(cachedItems, args.order), this._nextPageAllowed(cachedItems, args.order));
}
}
// cache was not enough, so continue fetching from the server in the same direction
// whatever the last fetched item was, it will be the next `from`
if (cachedItems.length > 0) {
args.from = lodash.lodash.exports.last(cachedItems).index;
}
// Maximum sync page size is 100 items, 10 is simply 1/10th of that.
const getLimit = Math.max(itemsCountToGet - cachedItems.length, 10);
if (goingBackwards) {
args.order = args.order === 'desc' ? 'asc' : 'desc';
}
const doFetch = args.from !== (oldArgs.order === 'desc'
? (goingBackwards ? this.cache.collectionEnd : this.cache.collectionBegin)
: (goingBackwards ? this.cache.collectionBegin : this.cache.collectionEnd));
if (doFetch) {
const fetchedItems = await this._queryItemsFromServer(Object.assign(Object.assign({}, args), { limit: getLimit }));
const items = fetchedItems.items;
this.cache.populate(items, !fetchedItems.hasPrevPage, !fetchedItems.hasNextPage);
for (let item of items) {
if ((cachedItems.length > 0 || fillingSubsequentPage) && item.index == args.from) {
// Skip the key we already have in cachedItems
continue;
}
if (goingBackwards) {
cachedItems.unshift(item);
}
else {
cachedItems.push(item);
}
if (cachedItems.length >= args.limit) {
break;
}
}
}
else {
if (goingBackwards) {
lodash.lodash.exports.reverse(cachedItems);
}
}
return this.makePaginator(cachedItems, oldArgs, this._prevPageAllowed(cachedItems, oldArgs.order), this._nextPageAllowed(cachedItems, oldArgs.order));
}
_prevPageAllowed(items, order) {
return items.length > 0 && lodash.lodash.exports.head(items).index !== (order === 'desc' ? this.cache.collectionEnd : this.cache.collectionBegin);
}
_nextPageAllowed(items, order) {
return items.length > 0 && lodash.lodash.exports.last(items).index !== (order === 'desc' ? this.cache.collectionBegin : this.cache.collectionEnd);
}
// Paginator depends on:
// - items listed
// - was there a previous token (prevPage)
// - was there a next token (nextPage)
// - order of items asc/desc
makePaginator(items, args, prevPage, nextPage) {
args.nextPageItemId = args.prevPageItemId = null;
const hasPrevPage = prevPage && this._prevPageAllowed(items, args.order);
const hasNextPage = nextPage && this._nextPageAllowed(items, args.order);
return new paginator.Paginator(items, (adds) => this.queryItems(Object.assign(Object.assign({}, args), adds)), hasPrevPage ? lodash.lodash.exports.head(items).index : null, hasNextPage ? lodash.lodash.exports.last(items).index : null);
}
async _querySingleItemFromServer(index) {
const fetchedItems = await this._queryItemsFromServer({ index });
if (fetchedItems.items.length === 0) {
// Successfully failed to fetch by key, mark it as a tombstone.
this.cache.delete(index, null);
}
return fetchedItems;
}
async _queryItemsFromServer(args) {
var _a, _b;
const uri$1 = new uri.UriBuilder(this.links.items)
.queryParam('HideExpired', true)
// either
.queryParam('Index', args.index)
// or
.queryParam('PageToken', args.pageToken)
// or
.queryParam('From', args.from)
.queryParam('PageSize', args.limit) // @fixme use pageSize not limit?
.queryParam('Order', args.order)
.build();
let response = await this.services.network.get(url);
let items = response.body.items.map(el => {
let response = await this.services.network.get(uri$1);
let items = ((_b = (_a = response.body) === null || _a === void 0 ? void 0 : _a.items) !== null && _b !== void 0 ? _b : []).map(el => {
el.date_updated = new Date(el.date_updated);
let itemInCache = this.cache.get(el.index);
if (itemInCache) {
this._handleItemMutated(el.index, el.url, el.last_event_id, el.revision, el.data, el.date_updated, el.date_expires, false, true);
}
else {
this.cache.store(Number(el.index), new synclistitem.SyncListItem({
index: Number(el.index),
uri: el.url,
revision: el.revision,
lastEventId: el.last_event_id,
dateUpdated: el.date_updated,
dateExpires: el.date_expires,
data: el.data
}), el.last_event_id);
}
return this.cache.get(el.index);
// FIXME: handleItem mutated inserts items into the cache, so populate call later only updates the bounds? Split it.
this._handleItemMutated(el.index, el.url, el.last_event_id, el.revision, el.data, el.date_updated, el.date_expires, false, true);
return this.cache.getValue(el.index);
});
let meta = response.body.meta;
return new paginator.Paginator(items, pageToken => this.queryItems({ pageToken }), meta.previous_token, meta.next_token);
const meta = response.body.meta;
return this.makePaginator(items, args, !!meta.previous_token, !!meta.next_token);
}

@@ -331,3 +460,3 @@ async getItems(args) {

const requestBody = { ttl };
const response = await this.services.network.post(existingItem.uri, requestBody);
const response = await this.services.network.post(existingItem.url, requestBody);
existingItem.updateDateExpires(response.body.date_expires);

@@ -344,4 +473,4 @@ }

}
shouldIgnoreEvent(key, eventId) {
return this.cache.isKnown(key, eventId);
shouldIgnoreEvent(key, last_event_id) {
return this.cache.isKnown(key, last_event_id);
}

@@ -355,2 +484,3 @@ /**

update.date_created = new Date(update.date_created);
// update.date_expires = new Date(update.date_expires);
switch (update.type) {

@@ -360,3 +490,3 @@ case 'list_item_added':

{
this._handleItemMutated(itemIndex, update.item_url, update.id, update.item_revision, update.item_data, update.date_created, undefined, // orchestration does not include date_expires -- @todo it does now?
this._handleItemMutated(itemIndex, update.item_url, update.id, update.item_revision, update.item_data, update.date_created, update.date_expires, // orchestration events include date_expires since June 2020, SP-386
update.type === 'list_item_added', true);

@@ -399,3 +529,3 @@ }

}
_handleItemMutated(index, uri, lastEventId, revision, data, dateUpdated, dateExpires, added, remote) {
_handleItemMutated(index, url, lastEventId, revision, data, dateUpdated, dateExpires, added, remote) {
if (this.shouldIgnoreEvent(index, lastEventId)) {

@@ -406,5 +536,5 @@ logger["default"].trace(`Item ${index} update skipped, current: ${this.lastEventId}, remote: ${lastEventId}`);

this._updateRootDateUpdated(dateUpdated);
const item = this.cache.get(index);
const item = this.cache.getValue(index);
if (!item) {
const newItem = new synclistitem.SyncListItem({ index, uri, lastEventId, revision, data, dateUpdated, dateExpires });
const newItem = new synclistitem.SyncListItem({ index, url, lastEventId, revision, data, dateUpdated, dateExpires });
this.cache.store(index, newItem, lastEventId);

@@ -436,5 +566,5 @@ this.emitItemMutationEvent(newItem, remote, added);

*/
_handleItemRemoved(index, eventId, oldData, dateUpdated, remote) {
_handleItemRemoved(index, lastEventId, oldData, dateUpdated, remote) {
this._updateRootDateUpdated(dateUpdated);
this.cache.delete(index, eventId);
this.cache.delete(index, lastEventId);
this.broadcastEventToListeners('itemRemoved', { index: index, isLocal: !remote, previousItemData: oldData });

@@ -894,3 +1024,4 @@ }

from: [declarativeTypeValidator.nonNegativeInteger, 'undefined'],
pageSize: [declarativeTypeValidator.custom((value) => [sanitize.isPositiveInteger(value), 'a positive integer']), 'undefined']
pageSize: [declarativeTypeValidator.custom((value) => [sanitize.isPositiveInteger(value), 'a positive integer']), 'undefined'],
order: [declarativeTypeValidator.custom((value) => [value === 'asc' || value === 'desc', `either 'asc' or 'desc'`]), 'undefined']
})

@@ -897,0 +1028,0 @@ ]),

@@ -116,4 +116,4 @@ /*

}
get uri() {
return this.descriptor.uri;
get url() {
return this.descriptor.url;
}

@@ -149,2 +149,6 @@ get revision() {

/**
* CachedValue interface.
*/
get cacheKey() { return this.index; }
/**
* @internal

@@ -151,0 +155,0 @@ */

@@ -113,6 +113,8 @@ /*

var paginator = require('./paginator.js');
var cache = require('./cache.js');
var itemsCache = require('./itemsCache.js');
var mergingqueue = require('./mergingqueue.js');
var closeable = require('./closeable.js');
var declarativeTypeValidator = require('@twilio/declarative-type-validator');
require('./node_modules/lodash/lodash.js');
var lodash = require('./_virtual/lodash.js');

@@ -128,3 +130,3 @@ class SyncMapImpl extends entity.SyncEntity {

this.updateMergingQueue = new mergingqueue.NamespacedMergingQueue(updateRequestReducer);
this.cache = new cache.Cache();
this.cache = new itemsCache.ItemsCache();
this.descriptor = descriptor;

@@ -187,3 +189,3 @@ this.descriptor.date_updated = new Date(this.descriptor.date_updated);

if (this.cache.has(key)) {
return this.cache.get(key);
return this.cache.getValue(key);
}

@@ -195,3 +197,3 @@ else {

async _getItemFromServer(key) {
let result = await this.queryItems({ key: key });
let result = await this._queryItemsFromServer({ key: key });
if (result.items.length < 1) {

@@ -215,3 +217,3 @@ throw new syncerror.SyncError(`The specified Map Item does not exist`, 404, 54201);

this._handleItemMutated(item.key, item.url, item.last_event_id, item.revision, item.data, item.date_updated, item.date_expires, result.added, false);
return this.cache.get(item.key);
return this.cache.getValue(item.key);
}

@@ -236,3 +238,3 @@ async _putItemWithIfMatch(key, mutatorFunction, ttl) {

this._handleItemMutated(item.key, item.url, item.last_event_id, item.revision, item.data, item.date_updated, item.date_expires, result.added, false);
return this.cache.get(item.key);
return this.cache.getValue(item.key);
}

@@ -281,27 +283,163 @@ catch (error) {

/**
* @param args Argument pack for the query.
* May include the following parameters:
*
* - key -- Returns the Item with the specified key or an empty items array, if one doesn't exist.
* - from -- Item index offset specifying the starting point of the query (inclusive). If not specified,
* the query starts from either the first or the last item in the List (by index),
* depending on the Order query parameter.
* - order -- String(Optional) -- Determines the order of the query. Items are ordered numerically by index.
* One of {asc, desc}, defaults to asc.
* - pageSize -- Maximum number of Items in the response (i.e. in this page). Defaults to 50.
* The max page size is 1000. Note: PageSize between 100 and 1000 returns at most 100 items.
* - pageToken - Direct paging token, for starting from a specific page.
* - Metadata - May have the value list. If specified, returns the state of the List in the list attribute of the response.
* - HideExpired - Determines whether expired List items are filtered out from the result list. By default, is false.
*
* Stuff not accepted by the service, internal to SDK:
*
* - limit
* - isNextPage
*
* Valid query parameter combinations
* These are the valid query parameter combinations. [x] should be read as “if x is not provided, the default value is assumed”.
* The combinations are in order of precedence - e.g., if both Key and PageToken are provided, PageToken is ignored.
* +-----------------------------------------+------------------------------------------------------------------------------+
* | Key, [Metadata] | Returns 0-1 Items with the given Key |
* | PageToken, [Metadata] | Returns a page based on the PageToken returned from a previous query, |
* | | with this and all following pages modified by any provided query parameters. |
* | [From], [Order], [Pagesize], [Metadata] | Performs an initial query from a specified offset. |
* | | Returned PageTokens in the meta block should be used for pagination. |
* +-----------------------------------------+------------------------------------------------------------------------------+
* @private
*/
async queryItems(args) {
args = args || {};
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
// If the key is specified, fetch only a single item.
if (args.key) {
const cached = this.cache.get(args.key);
if (cached) {
return this.makePaginator(cached.isValid ? [cached.value] : [], args, false, false);
}
return this._querySingleItemFromServer(args.key);
}
// If a paginator calls here with nextPageItemId or prevPageItemId it means we are trying to fetch a next or previous
// page accordingly. the task is to determine the direction (based on args.order and direction of queried page) and
// prefill it from the cache as much as possible.
const fillingSubsequentPage = !!args.nextPageItemId || !!args.prevPageItemId;
const oldArgs = Object.assign({}, args);
if (fillingSubsequentPage) {
args.from = (_b = (_a = args.nextPageItemId) !== null && _a !== void 0 ? _a : args.prevPageItemId) !== null && _b !== void 0 ? _b : args.from;
}
const goingBackwards = !!args.prevPageItemId;
// If from and order are specified, fetch page of items.
let cacheIterator = args.order === 'desc'
? (goingBackwards
? this.cache.getIterator((_d = (_c = args.prevPageItemId) !== null && _c !== void 0 ? _c : args.from) !== null && _d !== void 0 ? _d : null)
: this.cache.getReverseIterator((_f = (_e = args.nextPageItemId) !== null && _e !== void 0 ? _e : args.from) !== null && _f !== void 0 ? _f : null))
: (goingBackwards
? this.cache.getReverseIterator((_h = (_g = args.prevPageItemId) !== null && _g !== void 0 ? _g : args.from) !== null && _h !== void 0 ? _h : null)
: this.cache.getIterator((_k = (_j = args.nextPageItemId) !== null && _j !== void 0 ? _j : args.from) !== null && _k !== void 0 ? _k : null));
let cachedItems = [];
const itemsCountToGet = fillingSubsequentPage ? args.limit + 1 : args.limit;
for (let item of cacheIterator) {
if (fillingSubsequentPage && item.value.key == args.from) {
continue;
}
cachedItems.push(item.value);
if (cachedItems.length >= args.limit) {
if (goingBackwards) {
lodash.lodash.exports.reverse(cachedItems);
}
// We've got a full page of items, so we can easily generate prev/next page navigation
// However, the first item in the page cannot be collectionBegin, and the last item cannot be collectionEnd
return this.makePaginator(cachedItems, args, this._prevPageAllowed(cachedItems, args.order), this._nextPageAllowed(cachedItems, args.order));
}
}
// cache was not enough, so continue fetching from the server in the same direction
// whatever the last fetched item was, it will be the next `from`
if (cachedItems.length > 0) {
args.from = lodash.lodash.exports.last(cachedItems).key;
}
// Maximum sync page size is 100 items, 10 is simply 1/10th of that.
const getLimit = Math.max(itemsCountToGet - cachedItems.length, 10);
if (goingBackwards) {
args.order = args.order === 'desc' ? 'asc' : 'desc';
}
const doFetch = args.from !== (oldArgs.order === 'desc'
? (goingBackwards ? this.cache.collectionEnd : this.cache.collectionBegin)
: (goingBackwards ? this.cache.collectionBegin : this.cache.collectionEnd));
if (doFetch) {
const fetchedItems = await this._queryItemsFromServer(Object.assign(Object.assign({}, args), { limit: getLimit }));
const items = fetchedItems.items;
this.cache.populate(items, !fetchedItems.hasPrevPage, !fetchedItems.hasNextPage);
for (let item of items) {
if ((cachedItems.length > 0 || fillingSubsequentPage) && item.key == args.from) {
// Skip the key we already have in cachedItems
continue;
}
if (goingBackwards) {
cachedItems.unshift(item);
}
else {
cachedItems.push(item);
}
if (cachedItems.length >= args.limit) {
break;
}
}
}
else {
if (goingBackwards) {
lodash.lodash.exports.reverse(cachedItems);
}
}
return this.makePaginator(cachedItems, oldArgs, this._prevPageAllowed(cachedItems, oldArgs.order), this._nextPageAllowed(cachedItems, oldArgs.order));
}
_prevPageAllowed(items, order) {
return items.length > 0 && lodash.lodash.exports.head(items).key !== (order === 'desc' ? this.cache.collectionEnd : this.cache.collectionBegin);
}
_nextPageAllowed(items, order) {
return items.length > 0 && lodash.lodash.exports.last(items).key !== (order === 'desc' ? this.cache.collectionBegin : this.cache.collectionEnd);
}
// Paginator depends on:
// - items listed
// - was there a previous token (prevPage)
// - was there a next token (nextPage)
// - order of items asc/desc
makePaginator(items, args, prevPage, nextPage) {
args.nextPageItemId = args.prevPageItemId = null;
const hasPrevPage = prevPage && this._prevPageAllowed(items, args.order);
const hasNextPage = nextPage && this._nextPageAllowed(items, args.order);
return new paginator.Paginator(items, (adds) => this.queryItems(Object.assign(Object.assign({}, args), adds)), hasPrevPage ? lodash.lodash.exports.head(items).key : null, hasNextPage ? lodash.lodash.exports.last(items).key : null);
}
async _querySingleItemFromServer(key) {
const fetchedItems = await this._queryItemsFromServer({ key });
if (fetchedItems.items.length === 0) {
// Successfully failed to fetch by key, mark it as a tombstone.
this.cache.delete(key, null);
}
return fetchedItems;
}
async _queryItemsFromServer(args) {
var _a, _b;
const uri$1 = new uri.UriBuilder(this.links.items)
.queryParam('HideExpired', true)
// either
.queryParam('Key', args.key)
// or
.queryParam('PageToken', args.pageToken)
// or
.queryParam('From', args.from)
.queryParam('PageSize', args.limit)
.queryParam('Key', args.key)
.queryParam('PageToken', args.pageToken)
.queryParam('Order', args.order)
.build();
let response = await this.services.network.get(uri$1);
let items = response.body.items.map(el => {
let items = ((_b = (_a = response.body) === null || _a === void 0 ? void 0 : _a.items) !== null && _b !== void 0 ? _b : []).map(el => {
el.date_updated = new Date(el.date_updated);
let itemInCache = this.cache.get(el.key);
if (itemInCache) {
this._handleItemMutated(el.key, el.url, el.last_event_id, el.revision, el.data, el.date_updated, el.date_expires, false, true);
}
else {
this.cache.store(el.key, new syncmapitem.SyncMapItem(el), el.last_event_id);
}
return this.cache.get(el.key);
this._handleItemMutated(el.key, el.url, el.last_event_id, el.revision, el.data, el.date_updated, el.date_expires, false, true);
return this.cache.getValue(el.key);
});
const meta = response.body.meta;
return new paginator.Paginator(items, pageToken => this.queryItems({ pageToken }), meta.previous_token, meta.next_token);
return this.makePaginator(items, args, !!meta.previous_token, !!meta.next_token);
}

@@ -324,2 +462,3 @@ async getItems(args) {

update.date_created = new Date(update.date_created);
// update.date_expires = new Date(update.date_expires);
switch (update.type) {

@@ -329,3 +468,3 @@ case 'map_item_added':

{
this._handleItemMutated(update.item_key, update.item_url, update.id, update.item_revision, update.item_data, update.date_created, undefined, // orchestration events do not include date_expires
this._handleItemMutated(update.item_key, update.item_url, update.id, update.item_revision, update.item_data, update.date_created, update.date_expires, // orchestration events include date_expires since June 2020, SP-386
update.type === 'map_item_added', true);

@@ -369,3 +508,3 @@ }

this._updateRootDateUpdated(dateUpdated);
const item = this.cache.get(key);
const item = this.cache.getValue(key);
if (!item) {

@@ -832,3 +971,4 @@ const newItem = new syncmapitem.SyncMapItem({

from: ['string', 'undefined'],
pageSize: [declarativeTypeValidator.custom((value) => [sanitize.isPositiveInteger(value), 'a positive integer']), 'undefined']
pageSize: [declarativeTypeValidator.custom((value) => [sanitize.isPositiveInteger(value), 'a positive integer']), 'undefined'],
order: [declarativeTypeValidator.custom((value) => [value === 'asc' || value === 'desc', `either 'asc' or 'desc'`]), 'undefined']
})

@@ -835,0 +975,0 @@ ]),

@@ -146,2 +146,6 @@ /*

/**
* CachedValue interface.
*/
get cacheKey() { return this.key; }
/**
* @internal

@@ -148,0 +152,0 @@ */

{
"name": "twilio-sync",
"version": "3.2.2",
"version": "4.0.0-canary.100+ffa80bf",
"description": "Twilio Sync client library",

@@ -39,15 +39,15 @@ "main": "./builds/lib.js",

"@babel/runtime": "^7.17.0",
"@twilio/declarative-type-validator": "^0.2.1",
"@twilio/operation-retrier": "^4.0.9",
"@twilio/replay-event-emitter": "^0.3.1",
"@twilio/declarative-type-validator": "~0.2.1",
"@twilio/operation-retrier": "~4.0.9",
"@twilio/replay-event-emitter": "~0.3.1",
"core-js": "^3.17.3",
"iso8601-duration": "=1.2.0",
"lodash-es": "^4.17.21",
"loglevel": "^1.8.0",
"nanoid": "^3.3.4",
"platform": "^1.3.6",
"twilsock": "~0.12.2",
"uuid": "^3.4.0"
"twilsock": "~0.13.0-canary.102+ffa80bf"
},
"devDependencies": {
"@babel/core": "^7.17.0",
"@babel/plugin-transform-runtime": "^7.17.0",
"@babel/preset-env": "^7.16.11",

@@ -62,2 +62,3 @@ "@babel/preset-typescript": "^7.16.7",

"@types/express": "^4.17.7",
"@types/lodash-es": "^4.17.6",
"@types/mocha": "^5.2.7",

@@ -97,3 +98,3 @@ "@types/node": "^12.0.4",

"rollup-plugin-terser": "^7.0.2",
"rollup-plugin-ts": "^2.0.7",
"rollup-plugin-ts": "^3.0.2",
"sinon": "^7.3.2",

@@ -106,3 +107,3 @@ "sinon-chai": "^3.3.0",

"typedoc": "^0.21.4",
"typescript": "^4.3.2",
"typescript": "^4.8.4",
"uglify-es": "^3.3.10",

@@ -127,3 +128,4 @@ "uglify-save-license": "^0.4.1",

"last 2 UCAndroid versions"
]
],
"gitHead": "ffa80bf0c716eee60adc5e02c846f92c08fe78f8"
}

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc