Join our webinar on Wednesday, June 26, at 1pm EDTHow Chia Mitigates Risk in the Crypto Industry.Register
Socket
Socket
Sign inDemoInstall

workbox-background-sync

Package Overview
Dependencies
2
Maintainers
6
Versions
97
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 6.3.0 to 6.4.0

QueueStore.d.ts

2

_version.js
"use strict";
// @ts-ignore
try {
self['workbox:background-sync:6.2.4'] && _();
self['workbox:background-sync:6.3.0'] && _();
}
catch (e) { }
this.workbox = this.workbox || {};
this.workbox.backgroundSync = (function (exports, WorkboxError_js, logger_js, assert_js, getFriendlyURL_js) {
'use strict';
'use strict';
try {
self['workbox:background-sync:6.2.4'] && _();
} catch (e) {}
function _extends() {
_extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
function _extends() {
_extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
return target;
};
return _extends.apply(this, arguments);
}
return _extends.apply(this, arguments);
}
const instanceOfAny = (object, constructors) => constructors.some(c => object instanceof c);
const instanceOfAny = (object, constructors) => constructors.some(c => object instanceof c);
let idbProxyableTypes;
let cursorAdvanceMethods; // This is a function to prevent it throwing up in node environments.
let idbProxyableTypes;
let cursorAdvanceMethods; // This is a function to prevent it throwing up in node environments.
function getIdbProxyableTypes() {
return idbProxyableTypes || (idbProxyableTypes = [IDBDatabase, IDBObjectStore, IDBIndex, IDBCursor, IDBTransaction]);
} // This is a function to prevent it throwing up in node environments.
function getIdbProxyableTypes() {
return idbProxyableTypes || (idbProxyableTypes = [IDBDatabase, IDBObjectStore, IDBIndex, IDBCursor, IDBTransaction]);
} // This is a function to prevent it throwing up in node environments.
function getCursorAdvanceMethods() {
return cursorAdvanceMethods || (cursorAdvanceMethods = [IDBCursor.prototype.advance, IDBCursor.prototype.continue, IDBCursor.prototype.continuePrimaryKey]);
}
function getCursorAdvanceMethods() {
return cursorAdvanceMethods || (cursorAdvanceMethods = [IDBCursor.prototype.advance, IDBCursor.prototype.continue, IDBCursor.prototype.continuePrimaryKey]);
}
const cursorRequestMap = new WeakMap();
const transactionDoneMap = new WeakMap();
const transactionStoreNamesMap = new WeakMap();
const transformCache = new WeakMap();
const reverseTransformCache = new WeakMap();
const cursorRequestMap = new WeakMap();
const transactionDoneMap = new WeakMap();
const transactionStoreNamesMap = new WeakMap();
const transformCache = new WeakMap();
const reverseTransformCache = new WeakMap();
function promisifyRequest(request) {
const promise = new Promise((resolve, reject) => {
const unlisten = () => {
request.removeEventListener('success', success);
request.removeEventListener('error', error);
};
function promisifyRequest(request) {
const promise = new Promise((resolve, reject) => {
const unlisten = () => {
request.removeEventListener('success', success);
request.removeEventListener('error', error);
};
const success = () => {
resolve(wrap(request.result));
unlisten();
};
const success = () => {
resolve(wrap(request.result));
unlisten();
};
const error = () => {
reject(request.error);
unlisten();
};
const error = () => {
reject(request.error);
unlisten();
};
request.addEventListener('success', success);
request.addEventListener('error', error);
});
promise.then(value => {
// Since cursoring reuses the IDBRequest (*sigh*), we cache it for later retrieval
// (see wrapFunction).
if (value instanceof IDBCursor) {
cursorRequestMap.set(value, request);
} // Catching to avoid "Uncaught Promise exceptions"
request.addEventListener('success', success);
request.addEventListener('error', error);
});
promise.then(value => {
// Since cursoring reuses the IDBRequest (*sigh*), we cache it for later retrieval
// (see wrapFunction).
if (value instanceof IDBCursor) {
cursorRequestMap.set(value, request);
} // Catching to avoid "Uncaught Promise exceptions"
}).catch(() => {}); // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This
// is because we create many promises from a single IDBRequest.
}).catch(() => {}); // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This
// is because we create many promises from a single IDBRequest.
reverseTransformCache.set(promise, request);
return promise;
}
reverseTransformCache.set(promise, request);
return promise;
}
function cacheDonePromiseForTransaction(tx) {
// Early bail if we've already created a done promise for this transaction.
if (transactionDoneMap.has(tx)) return;
const done = new Promise((resolve, reject) => {
const unlisten = () => {
tx.removeEventListener('complete', complete);
tx.removeEventListener('error', error);
tx.removeEventListener('abort', error);
};
function cacheDonePromiseForTransaction(tx) {
// Early bail if we've already created a done promise for this transaction.
if (transactionDoneMap.has(tx)) return;
const done = new Promise((resolve, reject) => {
const unlisten = () => {
tx.removeEventListener('complete', complete);
tx.removeEventListener('error', error);
tx.removeEventListener('abort', error);
};
const complete = () => {
resolve();
unlisten();
};
const complete = () => {
resolve();
unlisten();
};
const error = () => {
reject(tx.error || new DOMException('AbortError', 'AbortError'));
unlisten();
};
const error = () => {
reject(tx.error || new DOMException('AbortError', 'AbortError'));
unlisten();
};
tx.addEventListener('complete', complete);
tx.addEventListener('error', error);
tx.addEventListener('abort', error);
}); // Cache it for later retrieval.
tx.addEventListener('complete', complete);
tx.addEventListener('error', error);
tx.addEventListener('abort', error);
}); // Cache it for later retrieval.
transactionDoneMap.set(tx, done);
}
transactionDoneMap.set(tx, done);
}
let idbProxyTraps = {
get(target, prop, receiver) {
if (target instanceof IDBTransaction) {
// Special handling for transaction.done.
if (prop === 'done') return transactionDoneMap.get(target); // Polyfill for objectStoreNames because of Edge.
let idbProxyTraps = {
get(target, prop, receiver) {
if (target instanceof IDBTransaction) {
// Special handling for transaction.done.
if (prop === 'done') return transactionDoneMap.get(target); // Polyfill for objectStoreNames because of Edge.
if (prop === 'objectStoreNames') {
return target.objectStoreNames || transactionStoreNamesMap.get(target);
} // Make tx.store return the only store in the transaction, or undefined if there are many.
if (prop === 'objectStoreNames') {
return target.objectStoreNames || transactionStoreNamesMap.get(target);
} // Make tx.store return the only store in the transaction, or undefined if there are many.
if (prop === 'store') {
return receiver.objectStoreNames[1] ? undefined : receiver.objectStore(receiver.objectStoreNames[0]);
}
} // Else transform whatever we get back.
if (prop === 'store') {
return receiver.objectStoreNames[1] ? undefined : receiver.objectStore(receiver.objectStoreNames[0]);
}
} // Else transform whatever we get back.
return wrap(target[prop]);
},
return wrap(target[prop]);
},
set(target, prop, value) {
target[prop] = value;
set(target, prop, value) {
target[prop] = value;
return true;
},
has(target, prop) {
if (target instanceof IDBTransaction && (prop === 'done' || prop === 'store')) {
return true;
},
has(target, prop) {
if (target instanceof IDBTransaction && (prop === 'done' || prop === 'store')) {
return true;
}
return prop in target;
}
};
function replaceTraps(callback) {
idbProxyTraps = callback(idbProxyTraps);
return prop in target;
}
function wrapFunction(func) {
// Due to expected object equality (which is enforced by the caching in `wrap`), we
// only create one new func per func.
// Edge doesn't support objectStoreNames (booo), so we polyfill it here.
if (func === IDBDatabase.prototype.transaction && !('objectStoreNames' in IDBTransaction.prototype)) {
return function (storeNames, ...args) {
const tx = func.call(unwrap(this), storeNames, ...args);
transactionStoreNamesMap.set(tx, storeNames.sort ? storeNames.sort() : [storeNames]);
return wrap(tx);
};
} // Cursor methods are special, as the behaviour is a little more different to standard IDB. In
// IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the
// cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense
// with real promises, so each advance methods returns a new promise for the cursor object, or
// undefined if the end of the cursor has been reached.
};
function replaceTraps(callback) {
idbProxyTraps = callback(idbProxyTraps);
}
if (getCursorAdvanceMethods().includes(func)) {
return function (...args) {
// Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use
// the original object.
func.apply(unwrap(this), args);
return wrap(cursorRequestMap.get(this));
};
}
function wrapFunction(func) {
// Due to expected object equality (which is enforced by the caching in `wrap`), we
// only create one new func per func.
// Edge doesn't support objectStoreNames (booo), so we polyfill it here.
if (func === IDBDatabase.prototype.transaction && !('objectStoreNames' in IDBTransaction.prototype)) {
return function (storeNames, ...args) {
const tx = func.call(unwrap(this), storeNames, ...args);
transactionStoreNamesMap.set(tx, storeNames.sort ? storeNames.sort() : [storeNames]);
return wrap(tx);
};
} // Cursor methods are special, as the behaviour is a little more different to standard IDB. In
// IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the
// cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense
// with real promises, so each advance methods returns a new promise for the cursor object, or
// undefined if the end of the cursor has been reached.
if (getCursorAdvanceMethods().includes(func)) {
return function (...args) {
// Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use
// the original object.
return wrap(func.apply(unwrap(this), args));
func.apply(unwrap(this), args);
return wrap(cursorRequestMap.get(this));
};
}
function transformCachableValue(value) {
if (typeof value === 'function') return wrapFunction(value); // This doesn't return, it just creates a 'done' promise for the transaction,
// which is later returned for transaction.done (see idbObjectHandler).
return function (...args) {
// Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use
// the original object.
return wrap(func.apply(unwrap(this), args));
};
}
if (value instanceof IDBTransaction) cacheDonePromiseForTransaction(value);
if (instanceOfAny(value, getIdbProxyableTypes())) return new Proxy(value, idbProxyTraps); // Return the same value back if we're not going to transform it.
function transformCachableValue(value) {
if (typeof value === 'function') return wrapFunction(value); // This doesn't return, it just creates a 'done' promise for the transaction,
// which is later returned for transaction.done (see idbObjectHandler).
return value;
if (value instanceof IDBTransaction) cacheDonePromiseForTransaction(value);
if (instanceOfAny(value, getIdbProxyableTypes())) return new Proxy(value, idbProxyTraps); // Return the same value back if we're not going to transform it.
return value;
}
function wrap(value) {
// We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because
// IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.
if (value instanceof IDBRequest) return promisifyRequest(value); // If we've already transformed this value before, reuse the transformed value.
// This is faster, but it also provides object equality.
if (transformCache.has(value)) return transformCache.get(value);
const newValue = transformCachableValue(value); // Not all types are transformed.
// These may be primitive types, so they can't be WeakMap keys.
if (newValue !== value) {
transformCache.set(value, newValue);
reverseTransformCache.set(newValue, value);
}
function wrap(value) {
// We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because
// IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.
if (value instanceof IDBRequest) return promisifyRequest(value); // If we've already transformed this value before, reuse the transformed value.
// This is faster, but it also provides object equality.
return newValue;
}
if (transformCache.has(value)) return transformCache.get(value);
const newValue = transformCachableValue(value); // Not all types are transformed.
// These may be primitive types, so they can't be WeakMap keys.
const unwrap = value => reverseTransformCache.get(value);
if (newValue !== value) {
transformCache.set(value, newValue);
reverseTransformCache.set(newValue, value);
}
/**
* Open a database.
*
* @param name Name of the database.
* @param version Schema version.
* @param callbacks Additional callbacks.
*/
return newValue;
function openDB(name, version, {
blocked,
upgrade,
blocking,
terminated
} = {}) {
const request = indexedDB.open(name, version);
const openPromise = wrap(request);
if (upgrade) {
request.addEventListener('upgradeneeded', event => {
upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction));
});
}
const unwrap = value => reverseTransformCache.get(value);
if (blocked) request.addEventListener('blocked', () => blocked());
openPromise.then(db => {
if (terminated) db.addEventListener('close', () => terminated());
if (blocking) db.addEventListener('versionchange', () => blocking());
}).catch(() => {});
return openPromise;
}
const readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];
const writeMethods = ['put', 'add', 'delete', 'clear'];
const cachedMethods = new Map();
function getMethod(target, prop) {
if (!(target instanceof IDBDatabase && !(prop in target) && typeof prop === 'string')) {
return;
}
if (cachedMethods.get(prop)) return cachedMethods.get(prop);
const targetFuncName = prop.replace(/FromIndex$/, '');
const useIndex = prop !== targetFuncName;
const isWrite = writeMethods.includes(targetFuncName);
if ( // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.
!(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || !(isWrite || readMethods.includes(targetFuncName))) {
return;
}
const method = async function (storeName, ...args) {
// isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(
const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');
let target = tx.store;
if (useIndex) target = target.index(args.shift()); // Must reject if op rejects.
// If it's a write operation, must reject if tx.done rejects.
// Must reject with op rejection first.
// Must resolve with op value.
// Must handle both promises (no unhandled rejections)
return (await Promise.all([target[targetFuncName](...args), isWrite && tx.done]))[0];
};
cachedMethods.set(prop, method);
return method;
}
replaceTraps(oldTraps => _extends({}, oldTraps, {
get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),
has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop)
}));
try {
self['workbox:background-sync:6.3.0'] && _();
} catch (e) {}
/*
Copyright 2021 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
const DB_VERSION = 3;
const DB_NAME = 'workbox-background-sync';
const REQUEST_OBJECT_STORE_NAME = 'requests';
const QUEUE_NAME_INDEX = 'queueName';
/**
* A class to interact directly an IndexedDB created specifically to save and
* retrieve QueueStoreEntries. This class encapsulates all the schema details
* to store the representation of a Queue.
*
* @private
*/
class QueueDb {
constructor() {
this._db = null;
}
/**
* Open a database.
* Add QueueStoreEntry to underlying db.
*
* @param name Name of the database.
* @param version Schema version.
* @param callbacks Additional callbacks.
* @param {UnidentifiedQueueStoreEntry} entry
*/
function openDB(name, version, {
blocked,
upgrade,
blocking,
terminated
} = {}) {
const request = indexedDB.open(name, version);
const openPromise = wrap(request);
if (upgrade) {
request.addEventListener('upgradeneeded', event => {
upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction));
});
}
async addEntry(entry) {
const db = await this.getDb();
const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', {
durability: 'relaxed'
});
await tx.store.add(entry);
await tx.done;
}
/**
* Returns the first entry id in the ObjectStore.
*
* @return {number | undefined}
*/
if (blocked) request.addEventListener('blocked', () => blocked());
openPromise.then(db => {
if (terminated) db.addEventListener('close', () => terminated());
if (blocking) db.addEventListener('versionchange', () => blocking());
}).catch(() => {});
return openPromise;
async getFirstEntryId() {
const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.openCursor();
return cursor === null || cursor === void 0 ? void 0 : cursor.value.id;
}
/**
* Get all the entries filtered by index
*
* @param queueName
* @return {Promise<QueueStoreEntry[]>}
*/
const readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];
const writeMethods = ['put', 'add', 'delete', 'clear'];
const cachedMethods = new Map();
function getMethod(target, prop) {
if (!(target instanceof IDBDatabase && !(prop in target) && typeof prop === 'string')) {
return;
}
async getAllEntriesByQueueName(queueName) {
const db = await this.getDb();
const results = await db.getAllFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
return results ? results : new Array();
}
/**
* Returns the number of entries filtered by index
*
* @param queueName
* @return {Promise<number>}
*/
if (cachedMethods.get(prop)) return cachedMethods.get(prop);
const targetFuncName = prop.replace(/FromIndex$/, '');
const useIndex = prop !== targetFuncName;
const isWrite = writeMethods.includes(targetFuncName);
if ( // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.
!(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || !(isWrite || readMethods.includes(targetFuncName))) {
return;
}
async getEntryCountByQueueName(queueName) {
const db = await this.getDb();
return db.countFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
}
/**
* Deletes a single entry by id.
*
* @param {number} id the id of the entry to be deleted
*/
const method = async function (storeName, ...args) {
// isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(
const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');
let target = tx.store;
if (useIndex) target = target.index(args.shift()); // Must reject if op rejects.
// If it's a write operation, must reject if tx.done rejects.
// Must reject with op rejection first.
// Must resolve with op value.
// Must handle both promises (no unhandled rejections)
return (await Promise.all([target[targetFuncName](...args), isWrite && tx.done]))[0];
};
async deleteEntry(id) {
const db = await this.getDb();
await db.delete(REQUEST_OBJECT_STORE_NAME, id);
}
/**
*
* @param queueName
* @returns {Promise<QueueStoreEntry | undefined>}
*/
cachedMethods.set(prop, method);
return method;
async getFirstEntryByQueueName(queueName) {
return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'next');
}
/**
*
* @param queueName
* @returns {Promise<QueueStoreEntry | undefined>}
*/
replaceTraps(oldTraps => _extends({}, oldTraps, {
get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),
has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop)
}));
/*
Copyright 2021 Google LLC
async getLastEntryByQueueName(queueName) {
return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'prev');
}
/**
* Returns either the first or the last entries, depending on direction.
* Filtered by index.
*
* @param {IDBCursorDirection} direction
* @param {IDBKeyRange} query
* @return {Promise<QueueStoreEntry | undefined>}
* @private
*/
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
const DB_VERSION = 3;
const DB_NAME = 'workbox-background-sync';
const REQUEST_OBJECT_STORE_NAME = 'requests';
const QUEUE_NAME_INDEX = 'queueName';
async getEndEntryFromIndex(query, direction) {
const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.index(QUEUE_NAME_INDEX).openCursor(query, direction);
return cursor === null || cursor === void 0 ? void 0 : cursor.value;
}
/**
* A class to interact directly an IndexedDB created specifically to save and
* retrieve QueueStoreEntries. This class encapsulates all the schema details
* to store the representation of a Queue.
* Returns an open connection to the database.
*

@@ -302,899 +411,812 @@ * @private

class QueueDb {
constructor() {
this._db = null;
}
/**
* Add QueueStoreEntry to underlying db.
*
* @param {UnidentifiedQueueStoreEntry} entry
*/
async addEntry(entry) {
const db = await this.getDb();
const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', {
durability: 'relaxed'
async getDb() {
if (!this._db) {
this._db = await openDB(DB_NAME, DB_VERSION, {
upgrade: this._upgradeDb
});
await tx.store.add(entry);
await tx.done;
}
/**
* Returns the first entry id in the ObjectStore.
*
* @return {number | undefined}
*/
return this._db;
}
/**
* Upgrades QueueDB
*
* @param {IDBPDatabase<QueueDBSchema>} db
* @param {number} oldVersion
* @private
*/
async getFirstEntryId() {
const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.openCursor();
return cursor === null || cursor === void 0 ? void 0 : cursor.value.id;
_upgradeDb(db, oldVersion) {
if (oldVersion > 0 && oldVersion < DB_VERSION) {
if (db.objectStoreNames.contains(REQUEST_OBJECT_STORE_NAME)) {
db.deleteObjectStore(REQUEST_OBJECT_STORE_NAME);
}
}
/**
* Get all the entries filtered by index
*
* @param queueName
* @return {Promise<QueueStoreEntry[]>}
*/
const objStore = db.createObjectStore(REQUEST_OBJECT_STORE_NAME, {
autoIncrement: true,
keyPath: 'id'
});
objStore.createIndex(QUEUE_NAME_INDEX, QUEUE_NAME_INDEX, {
unique: false
});
}
async getAllEntriesByQueueName(queueName) {
const db = await this.getDb();
const results = await db.getAllFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
return results ? results : new Array();
}
/**
* Deletes a single entry by id.
*
* @param {number} id the id of the entry to be deleted
*/
}
/*
Copyright 2018 Google LLC
async deleteEntry(id) {
const db = await this.getDb();
await db.delete(REQUEST_OBJECT_STORE_NAME, id);
}
/**
*
* @param queueName
* @returns {Promise<QueueStoreEntry | undefined>}
*/
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
/**
* A class to manage storing requests from a Queue in IndexedDB,
* indexed by their queue name for easier access.
*
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/
class QueueStore {
/**
* Associates this instance with a Queue instance, so entries added can be
* identified by their queue name.
*
* @param {string} queueName
*/
constructor(queueName) {
this._queueName = queueName;
this._queueDb = new QueueDb();
}
/**
* Append an entry last in the queue.
*
* @param {Object} entry
* @param {Object} entry.requestData
* @param {number} [entry.timestamp]
* @param {Object} [entry.metadata]
*/
async getFirstEntryByQueueName(queueName) {
return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'next');
}
/**
*
* @param queueName
* @returns {Promise<QueueStoreEntry | undefined>}
*/
async pushEntry(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'pushEntry',
paramName: 'entry'
});
assert_js.assert.isType(entry.requestData, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'pushEntry',
paramName: 'entry.requestData'
});
} // Don't specify an ID since one is automatically generated.
async getLastEntryByQueueName(queueName) {
return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'prev');
delete entry.id;
entry.queueName = this._queueName;
await this._queueDb.addEntry(entry);
}
/**
* Prepend an entry first in the queue.
*
* @param {Object} entry
* @param {Object} entry.requestData
* @param {number} [entry.timestamp]
* @param {Object} [entry.metadata]
*/
async unshiftEntry(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'unshiftEntry',
paramName: 'entry'
});
assert_js.assert.isType(entry.requestData, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'unshiftEntry',
paramName: 'entry.requestData'
});
}
/**
* Returns either the first or the last entries, depending on direction.
* Filtered by index.
*
* @param {IDBCursorDirection} direction
* @param {IDBKeyRange} query
* @return {Promise<QueueStoreEntry | undefined>}
* @private
*/
const firstId = await this._queueDb.getFirstEntryId();
async getEndEntryFromIndex(query, direction) {
const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.index(QUEUE_NAME_INDEX).openCursor(query, direction);
return cursor === null || cursor === void 0 ? void 0 : cursor.value;
if (firstId) {
// Pick an ID one less than the lowest ID in the object store.
entry.id = firstId - 1;
} else {
// Otherwise let the auto-incrementor assign the ID.
delete entry.id;
}
/**
* Returns an open connection to the database.
*
* @private
*/
entry.queueName = this._queueName;
await this._queueDb.addEntry(entry);
}
/**
* Removes and returns the last entry in the queue matching the `queueName`.
*
* @return {Promise<QueueStoreEntry|undefined>}
*/
async getDb() {
if (!this._db) {
this._db = await openDB(DB_NAME, DB_VERSION, {
upgrade: this._upgradeDb
});
}
return this._db;
}
/**
* Upgrades QueueDB
*
* @param {IDBPDatabase<QueueDBSchema>} db
* @param {number} oldVersion
* @private
*/
async popEntry() {
return this._removeEntry(await this._queueDb.getLastEntryByQueueName(this._queueName));
}
/**
* Removes and returns the first entry in the queue matching the `queueName`.
*
* @return {Promise<QueueStoreEntry|undefined>}
*/
_upgradeDb(db, oldVersion) {
if (oldVersion > 0 && oldVersion < DB_VERSION) {
if (db.objectStoreNames.contains(REQUEST_OBJECT_STORE_NAME)) {
db.deleteObjectStore(REQUEST_OBJECT_STORE_NAME);
}
}
async shiftEntry() {
return this._removeEntry(await this._queueDb.getFirstEntryByQueueName(this._queueName));
}
/**
* Returns all entries in the store matching the `queueName`.
*
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
* @return {Promise<Array<Object>>}
*/
const objStore = db.createObjectStore(REQUEST_OBJECT_STORE_NAME, {
autoIncrement: true,
keyPath: 'id'
});
objStore.createIndex(QUEUE_NAME_INDEX, QUEUE_NAME_INDEX, {
unique: false
});
}
async getAll() {
return await this._queueDb.getAllEntriesByQueueName(this._queueName);
}
/**
* Returns the number of entries in the store matching the `queueName`.
*
* @param {Object} options See {@link module:workbox-background-sync.Queue~size}
* @return {Promise<number>}
*/
/*
Copyright 2018 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
async size() {
return await this._queueDb.getEntryCountByQueueName(this._queueName);
}
/**
* A class to manage storing requests from a Queue in IndexedDB,
* indexed by their queue name for easier access.
* Deletes the entry for the given ID.
*
* WARNING: this method does not ensure the deleted entry belongs to this
* queue (i.e. matches the `queueName`). But this limitation is acceptable
* as this class is not publicly exposed. An additional check would make
* this method slower than it needs to be.
*
* @param {number} id
*/
async deleteEntry(id) {
await this._queueDb.deleteEntry(id);
}
/**
* Removes and returns the first or last entry in the queue (based on the
* `direction` argument) matching the `queueName`.
*
* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/
class QueueStore {
/**
* Associates this instance with a Queue instance, so entries added can be
* identified by their queue name.
*
* @param {string} queueName
* @private
*/
constructor(queueName) {
this._queueName = queueName;
this._queueDb = new QueueDb();
async _removeEntry(entry) {
if (entry) {
await this.deleteEntry(entry.id);
}
/**
* Append an entry last in the queue.
*
* @param {Object} entry
* @param {Object} entry.requestData
* @param {number} [entry.timestamp]
* @param {Object} [entry.metadata]
* @private
*/
return entry;
}
async pushEntry(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'pushEntry',
paramName: 'entry'
});
assert_js.assert.isType(entry.requestData, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'pushEntry',
paramName: 'entry.requestData'
});
} // Don't specify an ID since one is automatically generated.
}
/*
Copyright 2018 Google LLC
delete entry.id;
entry.queueName = this._queueName;
await this._queueDb.addEntry(entry);
}
/**
* Prepend an entry first in the queue.
*
* @param {Object} entry
* @param {Object} entry.requestData
* @param {number} [entry.timestamp]
* @param {Object} [entry.metadata]
* @private
*/
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive'];
/**
* A class to make it easier to serialize and de-serialize requests so they
* can be stored in IndexedDB.
*
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/
class StorableRequest {
/**
* Accepts an object of request data that can be used to construct a
* `Request` but can also be stored in IndexedDB.
*
* @param {Object} requestData An object of request data that includes the
* `url` plus any relevant properties of
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
*/
constructor(requestData) {
{
assert_js.assert.isType(requestData, 'object', {
moduleName: 'workbox-background-sync',
className: 'StorableRequest',
funcName: 'constructor',
paramName: 'requestData'
});
assert_js.assert.isType(requestData.url, 'string', {
moduleName: 'workbox-background-sync',
className: 'StorableRequest',
funcName: 'constructor',
paramName: 'requestData.url'
});
} // If the request's mode is `navigate`, convert it to `same-origin` since
// navigation requests can't be constructed via script.
async unshiftEntry(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'unshiftEntry',
paramName: 'entry'
});
assert_js.assert.isType(entry.requestData, 'object', {
moduleName: 'workbox-background-sync',
className: 'QueueStore',
funcName: 'unshiftEntry',
paramName: 'entry.requestData'
});
}
const firstId = await this._queueDb.getFirstEntryId();
if (requestData['mode'] === 'navigate') {
requestData['mode'] = 'same-origin';
}
if (firstId) {
// Pick an ID one less than the lowest ID in the object store.
entry.id = firstId - 1;
} else {
// Otherwise let the auto-incrementor assign the ID.
delete entry.id;
}
this._requestData = requestData;
}
/**
* Converts a Request object to a plain object that can be structured
* cloned or JSON-stringified.
*
* @param {Request} request
* @return {Promise<StorableRequest>}
*/
entry.queueName = this._queueName;
await this._queueDb.addEntry(entry);
}
/**
* Removes and returns the last entry in the queue matching the `queueName`.
*
* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/
static async fromRequest(request) {
const requestData = {
url: request.url,
headers: {}
}; // Set the body if present.
async popEntry() {
return this._removeEntry(await this._queueDb.getLastEntryByQueueName(this._queueName));
}
/**
* Removes and returns the first entry in the queue matching the `queueName`.
*
* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/
if (request.method !== 'GET') {
// Use ArrayBuffer to support non-text request bodies.
// NOTE: we can't use Blobs becuse Safari doesn't support storing
// Blobs in IndexedDB in some cases:
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
requestData.body = await request.clone().arrayBuffer();
} // Convert the headers from an iterable to an object.
async shiftEntry() {
return this._removeEntry(await this._queueDb.getFirstEntryByQueueName(this._queueName));
}
/**
* Returns all entries in the store matching the `queueName`.
*
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
* @return {Promise<Array<Object>>}
* @private
*/
for (const [key, value] of request.headers.entries()) {
requestData.headers[key] = value;
} // Add all other serializable request properties
async getAll() {
return await this._queueDb.getAllEntriesByQueueName(this._queueName);
for (const prop of serializableProperties) {
if (request[prop] !== undefined) {
requestData[prop] = request[prop];
}
}
/**
* Deletes the entry for the given ID.
*
* WARNING: this method does not ensure the deleted entry belongs to this
* queue (i.e. matches the `queueName`). But this limitation is acceptable
* as this class is not publicly exposed. An additional check would make
* this method slower than it needs to be.
*
* @private
* @param {number} id
*/
return new StorableRequest(requestData);
}
/**
* Returns a deep clone of the instances `_requestData` object.
*
* @return {Object}
*/
async deleteEntry(id) {
await this._queueDb.deleteEntry(id);
}
/**
* Removes and returns the first or last entry in the queue (based on the
* `direction` argument) matching the `queueName`.
*
* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/
toObject() {
const requestData = Object.assign({}, this._requestData);
requestData.headers = Object.assign({}, this._requestData.headers);
async _removeEntry(entry) {
if (entry) {
await this.deleteEntry(entry.id);
}
return entry;
if (requestData.body) {
requestData.body = requestData.body.slice(0);
}
return requestData;
}
/**
* Converts this instance to a Request.
*
* @return {Request}
*/
/*
Copyright 2018 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive'];
toRequest() {
return new Request(this._requestData.url, this._requestData);
}
/**
* A class to make it easier to serialize and de-serialize requests so they
* can be stored in IndexedDB.
* Creates and returns a deep clone of the instance.
*
* @private
* @return {StorableRequest}
*/
class StorableRequest {
/**
* Accepts an object of request data that can be used to construct a
* `Request` but can also be stored in IndexedDB.
*
* @param {Object} requestData An object of request data that includes the
* `url` plus any relevant properties of
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
* @private
*/
constructor(requestData) {
{
assert_js.assert.isType(requestData, 'object', {
moduleName: 'workbox-background-sync',
className: 'StorableRequest',
funcName: 'constructor',
paramName: 'requestData'
});
assert_js.assert.isType(requestData.url, 'string', {
moduleName: 'workbox-background-sync',
className: 'StorableRequest',
funcName: 'constructor',
paramName: 'requestData.url'
});
} // If the request's mode is `navigate`, convert it to `same-origin` since
// navigation requests can't be constructed via script.
clone() {
return new StorableRequest(this.toObject());
}
if (requestData['mode'] === 'navigate') {
requestData['mode'] = 'same-origin';
}
}
this._requestData = requestData;
}
/**
* Converts a Request object to a plain object that can be structured
* cloned or JSON-stringified.
*
* @param {Request} request
* @return {Promise<StorableRequest>}
*
* @private
*/
/*
Copyright 2018 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
const TAG_PREFIX = 'workbox-background-sync';
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
static async fromRequest(request) {
const requestData = {
url: request.url,
headers: {}
}; // Set the body if present.
const queueNames = new Set();
/**
* Converts a QueueStore entry into the format exposed by Queue. This entails
* converting the request data into a real request and omitting the `id` and
* `queueName` properties.
*
* @param {UnidentifiedQueueStoreEntry} queueStoreEntry
* @return {Queue}
* @private
*/
if (request.method !== 'GET') {
// Use ArrayBuffer to support non-text request bodies.
// NOTE: we can't use Blobs becuse Safari doesn't support storing
// Blobs in IndexedDB in some cases:
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
requestData.body = await request.clone().arrayBuffer();
} // Convert the headers from an iterable to an object.
const convertEntry = queueStoreEntry => {
const queueEntry = {
request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
timestamp: queueStoreEntry.timestamp
};
if (queueStoreEntry.metadata) {
queueEntry.metadata = queueStoreEntry.metadata;
}
for (const [key, value] of request.headers.entries()) {
requestData.headers[key] = value;
} // Add all other serializable request properties
return queueEntry;
};
/**
* A class to manage storing failed requests in IndexedDB and retrying them
* later. All parts of the storing and replaying process are observable via
* callbacks.
*
* @memberof module:workbox-background-sync
*/
for (const prop of serializableProperties) {
if (request[prop] !== undefined) {
requestData[prop] = request[prop];
}
}
class Queue {
/**
* Creates an instance of Queue with the given options
*
* @param {string} name The unique name for this queue. This name must be
* unique as it's used to register sync events and store requests
* in IndexedDB specific to this instance. An error will be thrown if
* a duplicate name is detected.
* @param {Object} [options]
* @param {Function} [options.onSync] A function that gets invoked whenever
* the 'sync' event fires. The function is invoked with an object
* containing the `queue` property (referencing this instance), and you
* can use the callback to customize the replay behavior of the queue.
* When not set the `replayRequests()` method is called.
* Note: if the replay fails after a sync event, make sure you throw an
* error, so the browser knows to retry the sync event later.
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
* minutes) a request may be retried. After this amount of time has
* passed, the request will be deleted from the queue.
*/
constructor(name, {
onSync,
maxRetentionTime
} = {}) {
this._syncInProgress = false;
this._requestsAddedDuringSync = false; // Ensure the store name is not already being used
return new StorableRequest(requestData);
if (queueNames.has(name)) {
throw new WorkboxError_js.WorkboxError('duplicate-queue-name', {
name
});
} else {
queueNames.add(name);
}
/**
* Returns a deep clone of the instances `_requestData` object.
*
* @return {Object}
*
* @private
*/
this._name = name;
this._onSync = onSync || this.replayRequests;
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
this._queueStore = new QueueStore(this._name);
toObject() {
const requestData = Object.assign({}, this._requestData);
requestData.headers = Object.assign({}, this._requestData.headers);
this._addSyncListener();
}
/**
* @return {string}
*/
if (requestData.body) {
requestData.body = requestData.body.slice(0);
}
return requestData;
}
/**
* Converts this instance to a Request.
*
* @return {Request}
*
* @private
*/
get name() {
return this._name;
}
/**
* Stores the passed request in IndexedDB (with its timestamp and any
* metadata) at the end of the queue.
*
* @param {QueueEntry} entry
* @param {Request} entry.request The request to store in the queue.
* @param {Object} [entry.metadata] Any metadata you want associated with the
* stored request. When requests are replayed you'll have access to this
* metadata object in case you need to modify the request beforehand.
* @param {number} [entry.timestamp] The timestamp (Epoch time in
* milliseconds) when the request was first added to the queue. This is
* used along with `maxRetentionTime` to remove outdated requests. In
* general you don't need to set this value, as it's automatically set
* for you (defaulting to `Date.now()`), but you can update it if you
* don't want particular requests to expire.
*/
toRequest() {
return new Request(this._requestData.url, this._requestData);
async pushRequest(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'pushRequest',
paramName: 'entry'
});
assert_js.assert.isInstance(entry.request, Request, {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'pushRequest',
paramName: 'entry.request'
});
}
/**
* Creates and returns a deep clone of the instance.
*
* @return {StorableRequest}
*
* @private
*/
await this._addRequest(entry, 'push');
}
/**
* Stores the passed request in IndexedDB (with its timestamp and any
* metadata) at the beginning of the queue.
*
* @param {QueueEntry} entry
* @param {Request} entry.request The request to store in the queue.
* @param {Object} [entry.metadata] Any metadata you want associated with the
* stored request. When requests are replayed you'll have access to this
* metadata object in case you need to modify the request beforehand.
* @param {number} [entry.timestamp] The timestamp (Epoch time in
* milliseconds) when the request was first added to the queue. This is
* used along with `maxRetentionTime` to remove outdated requests. In
* general you don't need to set this value, as it's automatically set
* for you (defaulting to `Date.now()`), but you can update it if you
* don't want particular requests to expire.
*/
clone() {
return new StorableRequest(this.toObject());
async unshiftRequest(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'unshiftRequest',
paramName: 'entry'
});
assert_js.assert.isInstance(entry.request, Request, {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'unshiftRequest',
paramName: 'entry.request'
});
}
await this._addRequest(entry, 'unshift');
}
/**
* Removes and returns the last request in the queue (along with its
* timestamp and any metadata). The returned object takes the form:
* `{request, timestamp, metadata}`.
*
* @return {Promise<QueueEntry | undefined>}
*/
/*
Copyright 2018 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
const TAG_PREFIX = 'workbox-background-sync';
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
const queueNames = new Set();
async popRequest() {
return this._removeRequest('pop');
}
/**
* Converts a QueueStore entry into the format exposed by Queue. This entails
* converting the request data into a real request and omitting the `id` and
* `queueName` properties.
* Removes and returns the first request in the queue (along with its
* timestamp and any metadata). The returned object takes the form:
* `{request, timestamp, metadata}`.
*
* @param {UnidentifiedQueueStoreEntry} queueStoreEntry
* @return {Queue}
* @private
* @return {Promise<QueueEntry | undefined>}
*/
const convertEntry = queueStoreEntry => {
const queueEntry = {
request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
timestamp: queueStoreEntry.timestamp
};
if (queueStoreEntry.metadata) {
queueEntry.metadata = queueStoreEntry.metadata;
}
return queueEntry;
};
async shiftRequest() {
return this._removeRequest('shift');
}
/**
* A class to manage storing failed requests in IndexedDB and retrying them
* later. All parts of the storing and replaying process are observable via
* callbacks.
* Returns all the entries that have not expired (per `maxRetentionTime`).
* Any expired entries are removed from the queue.
*
* @memberof module:workbox-background-sync
* @return {Promise<Array<QueueEntry>>}
*/
class Queue {
/**
* Creates an instance of Queue with the given options
*
* @param {string} name The unique name for this queue. This name must be
* unique as it's used to register sync events and store requests
* in IndexedDB specific to this instance. An error will be thrown if
* a duplicate name is detected.
* @param {Object} [options]
* @param {Function} [options.onSync] A function that gets invoked whenever
* the 'sync' event fires. The function is invoked with an object
* containing the `queue` property (referencing this instance), and you
* can use the callback to customize the replay behavior of the queue.
* When not set the `replayRequests()` method is called.
* Note: if the replay fails after a sync event, make sure you throw an
* error, so the browser knows to retry the sync event later.
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
* minutes) a request may be retried. After this amount of time has
* passed, the request will be deleted from the queue.
*/
constructor(name, {
onSync,
maxRetentionTime
} = {}) {
this._syncInProgress = false;
this._requestsAddedDuringSync = false; // Ensure the store name is not already being used
async getAll() {
const allEntries = await this._queueStore.getAll();
const now = Date.now();
const unexpiredEntries = [];
if (queueNames.has(name)) {
throw new WorkboxError_js.WorkboxError('duplicate-queue-name', {
name
});
for (const entry of allEntries) {
// Ignore requests older than maxRetentionTime. Call this function
// recursively until an unexpired request is found.
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
if (now - entry.timestamp > maxRetentionTimeInMs) {
await this._queueStore.deleteEntry(entry.id);
} else {
queueNames.add(name);
unexpiredEntries.push(convertEntry(entry));
}
this._name = name;
this._onSync = onSync || this.replayRequests;
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
this._queueStore = new QueueStore(this._name);
this._addSyncListener();
}
/**
* @return {string}
*/
return unexpiredEntries;
}
/**
* Returns the number of entries present in the queue.
* Note that expired entries (per `maxRetentionTime`) are also included in this count.
*
* @return {Promise<number>}
*/
get name() {
return this._name;
}
/**
* Stores the passed request in IndexedDB (with its timestamp and any
* metadata) at the end of the queue.
*
* @param {QueueEntry} entry
* @param {Request} entry.request The request to store in the queue.
* @param {Object} [entry.metadata] Any metadata you want associated with the
* stored request. When requests are replayed you'll have access to this
* metadata object in case you need to modify the request beforehand.
* @param {number} [entry.timestamp] The timestamp (Epoch time in
* milliseconds) when the request was first added to the queue. This is
* used along with `maxRetentionTime` to remove outdated requests. In
* general you don't need to set this value, as it's automatically set
* for you (defaulting to `Date.now()`), but you can update it if you
* don't want particular requests to expire.
*/
async size() {
return await this._queueStore.size();
}
/**
* Adds the entry to the QueueStore and registers for a sync event.
*
* @param {Object} entry
* @param {Request} entry.request
* @param {Object} [entry.metadata]
* @param {number} [entry.timestamp=Date.now()]
* @param {string} operation ('push' or 'unshift')
* @private
*/
async pushRequest(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'pushRequest',
paramName: 'entry'
});
assert_js.assert.isInstance(entry.request, Request, {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'pushRequest',
paramName: 'entry.request'
});
}
await this._addRequest(entry, 'push');
}
/**
* Stores the passed request in IndexedDB (with its timestamp and any
* metadata) at the beginning of the queue.
*
* @param {QueueEntry} entry
* @param {Request} entry.request The request to store in the queue.
* @param {Object} [entry.metadata] Any metadata you want associated with the
* stored request. When requests are replayed you'll have access to this
* metadata object in case you need to modify the request beforehand.
* @param {number} [entry.timestamp] The timestamp (Epoch time in
* milliseconds) when the request was first added to the queue. This is
* used along with `maxRetentionTime` to remove outdated requests. In
* general you don't need to set this value, as it's automatically set
* for you (defaulting to `Date.now()`), but you can update it if you
* don't want particular requests to expire.
*/
async _addRequest({
request,
metadata,
timestamp = Date.now()
}, operation) {
const storableRequest = await StorableRequest.fromRequest(request.clone());
const entry = {
requestData: storableRequest.toObject(),
timestamp
}; // Only include metadata if it's present.
async unshiftRequest(entry) {
{
assert_js.assert.isType(entry, 'object', {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'unshiftRequest',
paramName: 'entry'
});
assert_js.assert.isInstance(entry.request, Request, {
moduleName: 'workbox-background-sync',
className: 'Queue',
funcName: 'unshiftRequest',
paramName: 'entry.request'
});
}
await this._addRequest(entry, 'unshift');
if (metadata) {
entry.metadata = metadata;
}
/**
* Removes and returns the last request in the queue (along with its
* timestamp and any metadata). The returned object takes the form:
* `{request, timestamp, metadata}`.
*
* @return {Promise<QueueEntry | undefined>}
*/
await this._queueStore[`${operation}Entry`](entry);
async popRequest() {
return this._removeRequest('pop');
}
/**
* Removes and returns the first request in the queue (along with its
* timestamp and any metadata). The returned object takes the form:
* `{request, timestamp, metadata}`.
*
* @return {Promise<QueueEntry | undefined>}
*/
{
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`);
} // Don't register for a sync if we're in the middle of a sync. Instead,
// we wait until the sync is complete and call register if
// `this._requestsAddedDuringSync` is true.
async shiftRequest() {
return this._removeRequest('shift');
if (this._syncInProgress) {
this._requestsAddedDuringSync = true;
} else {
await this.registerSync();
}
/**
* Returns all the entries that have not expired (per `maxRetentionTime`).
* Any expired entries are removed from the queue.
*
* @return {Promise<Array<QueueEntry>>}
*/
}
/**
* Removes and returns the first or last (depending on `operation`) entry
* from the QueueStore that's not older than the `maxRetentionTime`.
*
* @param {string} operation ('pop' or 'shift')
* @return {Object|undefined}
* @private
*/
async getAll() {
const allEntries = await this._queueStore.getAll();
const now = Date.now();
const unexpiredEntries = [];
async _removeRequest(operation) {
const now = Date.now();
const entry = await this._queueStore[`${operation}Entry`]();
for (const entry of allEntries) {
// Ignore requests older than maxRetentionTime. Call this function
// recursively until an unexpired request is found.
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
if (entry) {
// Ignore requests older than maxRetentionTime. Call this function
// recursively until an unexpired request is found.
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
if (now - entry.timestamp > maxRetentionTimeInMs) {
await this._queueStore.deleteEntry(entry.id);
} else {
unexpiredEntries.push(convertEntry(entry));
}
if (now - entry.timestamp > maxRetentionTimeInMs) {
return this._removeRequest(operation);
}
return unexpiredEntries;
return convertEntry(entry);
} else {
return undefined;
}
/**
* Adds the entry to the QueueStore and registers for a sync event.
*
* @param {Object} entry
* @param {Request} entry.request
* @param {Object} [entry.metadata]
* @param {number} [entry.timestamp=Date.now()]
* @param {string} operation ('push' or 'unshift')
* @private
*/
}
/**
* Loops through each request in the queue and attempts to re-fetch it.
* If any request fails to re-fetch, it's put back in the same position in
* the queue (which registers a retry for the next sync event).
*/
async _addRequest({
request,
metadata,
timestamp = Date.now()
}, operation) {
const storableRequest = await StorableRequest.fromRequest(request.clone());
const entry = {
requestData: storableRequest.toObject(),
timestamp
}; // Only include metadata if it's present.
async replayRequests() {
let entry;
if (metadata) {
entry.metadata = metadata;
}
while (entry = await this.shiftRequest()) {
try {
await fetch(entry.request.clone());
await this._queueStore[`${operation}Entry`](entry);
if ("dev" !== 'production') {
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}' ` + `has been replayed in queue '${this._name}'`);
}
} catch (error) {
await this.unshiftRequest(entry);
{
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`);
} // Don't register for a sync if we're in the middle of a sync. Instead,
// we wait until the sync is complete and call register if
// `this._requestsAddedDuringSync` is true.
{
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}' ` + `failed to replay, putting it back in queue '${this._name}'`);
}
if (this._syncInProgress) {
this._requestsAddedDuringSync = true;
} else {
await this.registerSync();
throw new WorkboxError_js.WorkboxError('queue-replay-failed', {
name: this._name
});
}
}
/**
* Removes and returns the first or last (depending on `operation`) entry
* from the QueueStore that's not older than the `maxRetentionTime`.
*
* @param {string} operation ('pop' or 'shift')
* @return {Object|undefined}
* @private
*/
{
logger_js.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`);
}
}
/**
* Registers a sync event with a tag unique to this instance.
*/
async _removeRequest(operation) {
const now = Date.now();
const entry = await this._queueStore[`${operation}Entry`]();
if (entry) {
// Ignore requests older than maxRetentionTime. Call this function
// recursively until an unexpired request is found.
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
if (now - entry.timestamp > maxRetentionTimeInMs) {
return this._removeRequest(operation);
async registerSync() {
if ('sync' in self.registration) {
try {
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
} catch (err) {
// This means the registration failed for some reason, possibly due to
// the user disabling it.
{
logger_js.logger.warn(`Unable to register sync event for '${this._name}'.`, err);
}
return convertEntry(entry);
} else {
return undefined;
}
}
/**
* Loops through each request in the queue and attempts to re-fetch it.
* If any request fails to re-fetch, it's put back in the same position in
* the queue (which registers a retry for the next sync event).
*/
}
/**
* In sync-supporting browsers, this adds a listener for the sync event.
* In non-sync-supporting browsers, this will retry the queue on service
* worker startup.
*
* @private
*/
async replayRequests() {
let entry;
while (entry = await this.shiftRequest()) {
try {
await fetch(entry.request.clone());
if ("dev" !== 'production') {
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}' ` + `has been replayed in queue '${this._name}'`);
}
} catch (error) {
await this.unshiftRequest(entry);
_addSyncListener() {
if ('sync' in self.registration) {
self.addEventListener('sync', event => {
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
{
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}' ` + `failed to replay, putting it back in queue '${this._name}'`);
logger_js.logger.log(`Background sync for tag '${event.tag}' ` + `has been received`);
}
throw new WorkboxError_js.WorkboxError('queue-replay-failed', {
name: this._name
});
}
}
const syncComplete = async () => {
this._syncInProgress = true;
let syncError;
{
logger_js.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`);
}
}
/**
* Registers a sync event with a tag unique to this instance.
*/
try {
await this._onSync({
queue: this
});
} catch (error) {
if (error instanceof Error) {
syncError = error; // Rethrow the error. Note: the logic in the finally clause
// will run before this gets rethrown.
throw syncError;
}
} finally {
// New items may have been added to the queue during the sync,
// so we need to register for a new sync if that's happened...
// Unless there was an error during the sync, in which
// case the browser will automatically retry later, as long
// as `event.lastChance` is not true.
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) {
await this.registerSync();
}
async registerSync() {
if ('sync' in self.registration) {
try {
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
} catch (err) {
// This means the registration failed for some reason, possibly due to
// the user disabling it.
{
logger_js.logger.warn(`Unable to register sync event for '${this._name}'.`, err);
}
}
}
}
/**
* In sync-supporting browsers, this adds a listener for the sync event.
* In non-sync-supporting browsers, this will retry the queue on service
* worker startup.
*
* @private
*/
_addSyncListener() {
if ('sync' in self.registration) {
self.addEventListener('sync', event => {
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
{
logger_js.logger.log(`Background sync for tag '${event.tag}' ` + `has been received`);
this._syncInProgress = false;
this._requestsAddedDuringSync = false;
}
};
const syncComplete = async () => {
this._syncInProgress = true;
let syncError;
event.waitUntil(syncComplete());
}
});
} else {
{
logger_js.logger.log(`Background sync replaying without background sync event`);
} // If the browser doesn't support background sync, retry
// every time the service worker starts up as a fallback.
try {
await this._onSync({
queue: this
});
} catch (error) {
if (error instanceof Error) {
syncError = error; // Rethrow the error. Note: the logic in the finally clause
// will run before this gets rethrown.
throw syncError;
}
} finally {
// New items may have been added to the queue during the sync,
// so we need to register for a new sync if that's happened...
// Unless there was an error during the sync, in which
// case the browser will automatically retry later, as long
// as `event.lastChance` is not true.
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) {
await this.registerSync();
}
void this._onSync({
queue: this
});
}
}
/**
* Returns the set of queue names. This is primarily used to reset the list
* of queue names in tests.
*
* @return {Set<string>}
*
* @private
*/
this._syncInProgress = false;
this._requestsAddedDuringSync = false;
}
};
event.waitUntil(syncComplete());
}
});
} else {
{
logger_js.logger.log(`Background sync replaying without background sync event`);
} // If the browser doesn't support background sync, retry
// every time the service worker starts up as a fallback.
static get _queueNames() {
return queueNames;
}
}
void this._onSync({
queue: this
});
}
}
/**
* Returns the set of queue names. This is primarily used to reset the list
* of queue names in tests.
*
* @return {Set<string>}
*
* @private
*/
/*
Copyright 2018 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
/**
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
* easier to add failed requests to a background sync Queue.
*
* @memberof module:workbox-background-sync
*/
static get _queueNames() {
return queueNames;
}
}
/*
Copyright 2018 Google LLC
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
*/
class BackgroundSyncPlugin {
/**
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
* easier to add failed requests to a background sync Queue.
*
* @memberof module:workbox-background-sync
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
* documentation for parameter details.
* @param {Object} [options] See the
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
* parameter details.
*/
class BackgroundSyncPlugin {
constructor(name, options) {
/**
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
* documentation for parameter details.
* @param {Object} [options] See the
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
* parameter details.
* @param {Object} options
* @param {Request} options.request
* @private
*/
constructor(name, options) {
/**
* @param {Object} options
* @param {Request} options.request
* @private
*/
this.fetchDidFail = async ({
this.fetchDidFail = async ({
request
}) => {
await this._queue.pushRequest({
request
}) => {
await this._queue.pushRequest({
request
});
};
});
};
this._queue = new Queue(name, options);
}
this._queue = new Queue(name, options);
}
exports.BackgroundSyncPlugin = BackgroundSyncPlugin;
exports.Queue = Queue;
}
return exports;
exports.BackgroundSyncPlugin = BackgroundSyncPlugin;
exports.Queue = Queue;
exports.QueueStore = QueueStore;
exports.StorableRequest = StorableRequest;
return exports;
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private));
//# sourceMappingURL=workbox-background-sync.dev.js.map

@@ -1,2 +0,2 @@

this.workbox=this.workbox||{},this.workbox.backgroundSync=function(t,e,n,s){"use strict";try{self["workbox:background-sync:6.2.4"]&&_()}catch(t){}function r(){return(r=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)Object.prototype.hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t}).apply(this,arguments)}let i,a;const o=new WeakMap,c=new WeakMap,u=new WeakMap,h=new WeakMap,y=new WeakMap;let w={get(t,e,n){if(t instanceof IDBTransaction){if("done"===e)return c.get(t);if("objectStoreNames"===e)return t.objectStoreNames||u.get(t);if("store"===e)return n.objectStoreNames[1]?void 0:n.objectStore(n.objectStoreNames[0])}return d(t[e])},set:(t,e,n)=>(t[e]=n,!0),has:(t,e)=>t instanceof IDBTransaction&&("done"===e||"store"===e)||e in t};function f(t){return t!==IDBDatabase.prototype.transaction||"objectStoreNames"in IDBTransaction.prototype?(a||(a=[IDBCursor.prototype.advance,IDBCursor.prototype.continue,IDBCursor.prototype.continuePrimaryKey])).includes(t)?function(...e){return t.apply(m(this),e),d(o.get(this))}:function(...e){return d(t.apply(m(this),e))}:function(e,...n){const s=t.call(m(this),e,...n);return u.set(s,e.sort?e.sort():[e]),d(s)}}function l(t){return"function"==typeof t?f(t):(t instanceof IDBTransaction&&function(t){if(c.has(t))return;const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("complete",r),t.removeEventListener("error",i),t.removeEventListener("abort",i)},r=()=>{e(),s()},i=()=>{n(t.error||new DOMException("AbortError","AbortError")),s()};t.addEventListener("complete",r),t.addEventListener("error",i),t.addEventListener("abort",i)}));c.set(t,e)}(t),e=t,(i||(i=[IDBDatabase,IDBObjectStore,IDBIndex,IDBCursor,IDBTransaction])).some((t=>e instanceof t))?new Proxy(t,w):t);var e}function d(t){if(t instanceof IDBRequest)return function(t){const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("success",r),t.removeEventListener("error",i)},r=()=>{e(d(t.result)),s()},i=()=>{n(t.error),s()};t.addEventListener("success",r),t.addEventListener("error",i)}));return e.then((e=>{e instanceof IDBCursor&&o.set(e,t)})).catch((()=>{})),y.set(e,t),e}(t);if(h.has(t))return h.get(t);const e=l(t);return e!==t&&(h.set(t,e),y.set(e,t)),e}const m=t=>y.get(t);const p=["get","getKey","getAll","getAllKeys","count"],g=["put","add","delete","clear"],D=new Map;function b(t,e){if(!(t instanceof IDBDatabase)||e in t||"string"!=typeof e)return;if(D.get(e))return D.get(e);const n=e.replace(/FromIndex$/,""),s=e!==n,r=g.includes(n);if(!(n in(s?IDBIndex:IDBObjectStore).prototype)||!r&&!p.includes(n))return;const i=async function(t,...e){const i=this.transaction(t,r?"readwrite":"readonly");let a=i.store;return s&&(a=a.index(e.shift())),(await Promise.all([a[n](...e),r&&i.done]))[0]};return D.set(e,i),i}w=(t=>r({},t,{get:(e,n,s)=>b(e,n)||t.get(e,n,s),has:(e,n)=>!!b(e,n)||t.has(e,n)}))(w);const I="requests",B="queueName";class q{constructor(){this.t=null}async addEntry(t){const e=(await this.getDb()).transaction(I,"readwrite",{durability:"relaxed"});await e.store.add(t),await e.done}async getFirstEntryId(){const t=await this.getDb(),e=await t.transaction(I).store.openCursor();return null==e?void 0:e.value.id}async getAllEntriesByQueueName(t){const e=await this.getDb(),n=await e.getAllFromIndex(I,B,IDBKeyRange.only(t));return n||new Array}async deleteEntry(t){const e=await this.getDb();await e.delete(I,t)}async getFirstEntryByQueueName(t){return await this.getEndEntryFromIndex(IDBKeyRange.only(t),"next")}async getLastEntryByQueueName(t){return await this.getEndEntryFromIndex(IDBKeyRange.only(t),"prev")}async getEndEntryFromIndex(t,e){const n=await this.getDb(),s=await n.transaction(I).store.index(B).openCursor(t,e);return null==s?void 0:s.value}async getDb(){return this.t||(this.t=await function(t,e,{blocked:n,upgrade:s,blocking:r,terminated:i}={}){const a=indexedDB.open(t,e),o=d(a);return s&&a.addEventListener("upgradeneeded",(t=>{s(d(a.result),t.oldVersion,t.newVersion,d(a.transaction))})),n&&a.addEventListener("blocked",(()=>n())),o.then((t=>{i&&t.addEventListener("close",(()=>i())),r&&t.addEventListener("versionchange",(()=>r()))})).catch((()=>{})),o}("workbox-background-sync",3,{upgrade:this.i})),this.t}i(t,e){e>0&&e<3&&t.objectStoreNames.contains(I)&&t.deleteObjectStore(I);t.createObjectStore(I,{autoIncrement:!0,keyPath:"id"}).createIndex(B,B,{unique:!1})}}class E{constructor(t){this.o=t,this.u=new q}async pushEntry(t){delete t.id,t.queueName=this.o,await this.u.addEntry(t)}async unshiftEntry(t){const e=await this.u.getFirstEntryId();e?t.id=e-1:delete t.id,t.queueName=this.o,await this.u.addEntry(t)}async popEntry(){return this.h(await this.u.getLastEntryByQueueName(this.o))}async shiftEntry(){return this.h(await this.u.getFirstEntryByQueueName(this.o))}async getAll(){return await this.u.getAllEntriesByQueueName(this.o)}async deleteEntry(t){await this.u.deleteEntry(t)}async h(t){return t&&await this.deleteEntry(t.id),t}}const k=["method","referrer","referrerPolicy","mode","credentials","cache","redirect","integrity","keepalive"];class x{constructor(t){"navigate"===t.mode&&(t.mode="same-origin"),this.l=t}static async fromRequest(t){const e={url:t.url,headers:{}};"GET"!==t.method&&(e.body=await t.clone().arrayBuffer());for(const[n,s]of t.headers.entries())e.headers[n]=s;for(const n of k)void 0!==t[n]&&(e[n]=t[n]);return new x(e)}toObject(){const t=Object.assign({},this.l);return t.headers=Object.assign({},this.l.headers),t.body&&(t.body=t.body.slice(0)),t}toRequest(){return new Request(this.l.url,this.l)}clone(){return new x(this.toObject())}}const R="workbox-background-sync",v=new Set,j=t=>{const e={request:new x(t.requestData).toRequest(),timestamp:t.timestamp};return t.metadata&&(e.metadata=t.metadata),e};class A{constructor(t,{onSync:n,maxRetentionTime:s}={}){if(this.m=!1,this.p=!1,v.has(t))throw new e.WorkboxError("duplicate-queue-name",{name:t});v.add(t),this.g=t,this.D=n||this.replayRequests,this.I=s||10080,this.B=new E(this.g),this.q()}get name(){return this.g}async pushRequest(t){await this.k(t,"push")}async unshiftRequest(t){await this.k(t,"unshift")}async popRequest(){return this.R("pop")}async shiftRequest(){return this.R("shift")}async getAll(){const t=await this.B.getAll(),e=Date.now(),n=[];for(const s of t){const t=60*this.I*1e3;e-s.timestamp>t?await this.B.deleteEntry(s.id):n.push(j(s))}return n}async k({request:t,metadata:e,timestamp:n=Date.now()},s){const r={requestData:(await x.fromRequest(t.clone())).toObject(),timestamp:n};e&&(r.metadata=e),await this.B[s+"Entry"](r),this.m?this.p=!0:await this.registerSync()}async R(t){const e=Date.now(),n=await this.B[t+"Entry"]();if(n){const s=60*this.I*1e3;return e-n.timestamp>s?this.R(t):j(n)}}async replayRequests(){let t;for(;t=await this.shiftRequest();)try{await fetch(t.request.clone())}catch(n){throw await this.unshiftRequest(t),new e.WorkboxError("queue-replay-failed",{name:this.g})}}async registerSync(){if("sync"in self.registration)try{await self.registration.sync.register(`${R}:${this.g}`)}catch(t){}}q(){"sync"in self.registration?self.addEventListener("sync",(t=>{if(t.tag===`${R}:${this.g}`){const e=async()=>{let e;this.m=!0;try{await this.D({queue:this})}catch(t){if(t instanceof Error)throw e=t,e}finally{!this.p||e&&!t.lastChance||await this.registerSync(),this.m=!1,this.p=!1}};t.waitUntil(e())}})):this.D({queue:this})}static get v(){return v}}return t.BackgroundSyncPlugin=class{constructor(t,e){this.fetchDidFail=async({request:t})=>{await this.j.pushRequest({request:t})},this.j=new A(t,e)}},t.Queue=A,t}({},workbox.core._private,workbox.core._private,workbox.core._private);
this.workbox=this.workbox||{},this.workbox.backgroundSync=function(t,e,n,s){"use strict";function r(){return(r=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)Object.prototype.hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t}).apply(this,arguments)}let a,i;const c=new WeakMap,o=new WeakMap,u=new WeakMap,h=new WeakMap,y=new WeakMap;let w={get(t,e,n){if(t instanceof IDBTransaction){if("done"===e)return o.get(t);if("objectStoreNames"===e)return t.objectStoreNames||u.get(t);if("store"===e)return n.objectStoreNames[1]?void 0:n.objectStore(n.objectStoreNames[0])}return d(t[e])},set:(t,e,n)=>(t[e]=n,!0),has:(t,e)=>t instanceof IDBTransaction&&("done"===e||"store"===e)||e in t};function f(t){return t!==IDBDatabase.prototype.transaction||"objectStoreNames"in IDBTransaction.prototype?(i||(i=[IDBCursor.prototype.advance,IDBCursor.prototype.continue,IDBCursor.prototype.continuePrimaryKey])).includes(t)?function(...e){return t.apply(m(this),e),d(c.get(this))}:function(...e){return d(t.apply(m(this),e))}:function(e,...n){const s=t.call(m(this),e,...n);return u.set(s,e.sort?e.sort():[e]),d(s)}}function l(t){return"function"==typeof t?f(t):(t instanceof IDBTransaction&&function(t){if(o.has(t))return;const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("complete",r),t.removeEventListener("error",a),t.removeEventListener("abort",a)},r=()=>{e(),s()},a=()=>{n(t.error||new DOMException("AbortError","AbortError")),s()};t.addEventListener("complete",r),t.addEventListener("error",a),t.addEventListener("abort",a)}));o.set(t,e)}(t),e=t,(a||(a=[IDBDatabase,IDBObjectStore,IDBIndex,IDBCursor,IDBTransaction])).some((t=>e instanceof t))?new Proxy(t,w):t);var e}function d(t){if(t instanceof IDBRequest)return function(t){const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("success",r),t.removeEventListener("error",a)},r=()=>{e(d(t.result)),s()},a=()=>{n(t.error),s()};t.addEventListener("success",r),t.addEventListener("error",a)}));return e.then((e=>{e instanceof IDBCursor&&c.set(e,t)})).catch((()=>{})),y.set(e,t),e}(t);if(h.has(t))return h.get(t);const e=l(t);return e!==t&&(h.set(t,e),y.set(e,t)),e}const m=t=>y.get(t);const g=["get","getKey","getAll","getAllKeys","count"],p=["put","add","delete","clear"],D=new Map;function b(t,e){if(!(t instanceof IDBDatabase)||e in t||"string"!=typeof e)return;if(D.get(e))return D.get(e);const n=e.replace(/FromIndex$/,""),s=e!==n,r=p.includes(n);if(!(n in(s?IDBIndex:IDBObjectStore).prototype)||!r&&!g.includes(n))return;const a=async function(t,...e){const a=this.transaction(t,r?"readwrite":"readonly");let i=a.store;return s&&(i=i.index(e.shift())),(await Promise.all([i[n](...e),r&&a.done]))[0]};return D.set(e,a),a}w=(t=>r({},t,{get:(e,n,s)=>b(e,n)||t.get(e,n,s),has:(e,n)=>!!b(e,n)||t.has(e,n)}))(w);try{self["workbox:background-sync:6.3.0"]&&_()}catch(t){}const I="requests",B="queueName";class q{constructor(){this.t=null}async addEntry(t){const e=(await this.getDb()).transaction(I,"readwrite",{durability:"relaxed"});await e.store.add(t),await e.done}async getFirstEntryId(){const t=await this.getDb(),e=await t.transaction(I).store.openCursor();return null==e?void 0:e.value.id}async getAllEntriesByQueueName(t){const e=await this.getDb(),n=await e.getAllFromIndex(I,B,IDBKeyRange.only(t));return n||new Array}async getEntryCountByQueueName(t){return(await this.getDb()).countFromIndex(I,B,IDBKeyRange.only(t))}async deleteEntry(t){const e=await this.getDb();await e.delete(I,t)}async getFirstEntryByQueueName(t){return await this.getEndEntryFromIndex(IDBKeyRange.only(t),"next")}async getLastEntryByQueueName(t){return await this.getEndEntryFromIndex(IDBKeyRange.only(t),"prev")}async getEndEntryFromIndex(t,e){const n=await this.getDb(),s=await n.transaction(I).store.index(B).openCursor(t,e);return null==s?void 0:s.value}async getDb(){return this.t||(this.t=await function(t,e,{blocked:n,upgrade:s,blocking:r,terminated:a}={}){const i=indexedDB.open(t,e),c=d(i);return s&&i.addEventListener("upgradeneeded",(t=>{s(d(i.result),t.oldVersion,t.newVersion,d(i.transaction))})),n&&i.addEventListener("blocked",(()=>n())),c.then((t=>{a&&t.addEventListener("close",(()=>a())),r&&t.addEventListener("versionchange",(()=>r()))})).catch((()=>{})),c}("workbox-background-sync",3,{upgrade:this.i})),this.t}i(t,e){e>0&&e<3&&t.objectStoreNames.contains(I)&&t.deleteObjectStore(I);t.createObjectStore(I,{autoIncrement:!0,keyPath:"id"}).createIndex(B,B,{unique:!1})}}class E{constructor(t){this.o=t,this.u=new q}async pushEntry(t){delete t.id,t.queueName=this.o,await this.u.addEntry(t)}async unshiftEntry(t){const e=await this.u.getFirstEntryId();e?t.id=e-1:delete t.id,t.queueName=this.o,await this.u.addEntry(t)}async popEntry(){return this.h(await this.u.getLastEntryByQueueName(this.o))}async shiftEntry(){return this.h(await this.u.getFirstEntryByQueueName(this.o))}async getAll(){return await this.u.getAllEntriesByQueueName(this.o)}async size(){return await this.u.getEntryCountByQueueName(this.o)}async deleteEntry(t){await this.u.deleteEntry(t)}async h(t){return t&&await this.deleteEntry(t.id),t}}const k=["method","referrer","referrerPolicy","mode","credentials","cache","redirect","integrity","keepalive"];class R{constructor(t){"navigate"===t.mode&&(t.mode="same-origin"),this.l=t}static async fromRequest(t){const e={url:t.url,headers:{}};"GET"!==t.method&&(e.body=await t.clone().arrayBuffer());for(const[n,s]of t.headers.entries())e.headers[n]=s;for(const n of k)void 0!==t[n]&&(e[n]=t[n]);return new R(e)}toObject(){const t=Object.assign({},this.l);return t.headers=Object.assign({},this.l.headers),t.body&&(t.body=t.body.slice(0)),t}toRequest(){return new Request(this.l.url,this.l)}clone(){return new R(this.toObject())}}const x="workbox-background-sync",v=new Set,j=t=>{const e={request:new R(t.requestData).toRequest(),timestamp:t.timestamp};return t.metadata&&(e.metadata=t.metadata),e};class A{constructor(t,{onSync:n,maxRetentionTime:s}={}){if(this.m=!1,this.g=!1,v.has(t))throw new e.WorkboxError("duplicate-queue-name",{name:t});v.add(t),this.p=t,this.D=n||this.replayRequests,this.I=s||10080,this.B=new E(this.p),this.q()}get name(){return this.p}async pushRequest(t){await this.k(t,"push")}async unshiftRequest(t){await this.k(t,"unshift")}async popRequest(){return this.R("pop")}async shiftRequest(){return this.R("shift")}async getAll(){const t=await this.B.getAll(),e=Date.now(),n=[];for(const s of t){const t=60*this.I*1e3;e-s.timestamp>t?await this.B.deleteEntry(s.id):n.push(j(s))}return n}async size(){return await this.B.size()}async k({request:t,metadata:e,timestamp:n=Date.now()},s){const r={requestData:(await R.fromRequest(t.clone())).toObject(),timestamp:n};e&&(r.metadata=e),await this.B[s+"Entry"](r),this.m?this.g=!0:await this.registerSync()}async R(t){const e=Date.now(),n=await this.B[t+"Entry"]();if(n){const s=60*this.I*1e3;return e-n.timestamp>s?this.R(t):j(n)}}async replayRequests(){let t;for(;t=await this.shiftRequest();)try{await fetch(t.request.clone())}catch(n){throw await this.unshiftRequest(t),new e.WorkboxError("queue-replay-failed",{name:this.p})}}async registerSync(){if("sync"in self.registration)try{await self.registration.sync.register(`${x}:${this.p}`)}catch(t){}}q(){"sync"in self.registration?self.addEventListener("sync",(t=>{if(t.tag===`${x}:${this.p}`){const e=async()=>{let e;this.m=!0;try{await this.D({queue:this})}catch(t){if(t instanceof Error)throw e=t,e}finally{!this.g||e&&!t.lastChance||await this.registerSync(),this.m=!1,this.g=!1}};t.waitUntil(e())}})):this.D({queue:this})}static get v(){return v}}return t.BackgroundSyncPlugin=class{constructor(t,e){this.fetchDidFail=async({request:t})=>{await this.j.pushRequest({request:t})},this.j=new A(t,e)}},t.Queue=A,t.QueueStore=E,t.StorableRequest=R,t}({},workbox.core._private,workbox.core._private,workbox.core._private);
//# sourceMappingURL=workbox-background-sync.prod.js.map

@@ -0,7 +1,25 @@

import { BackgroundSyncPlugin } from './BackgroundSyncPlugin.js';
import { Queue, QueueOptions } from './Queue.js';
import { BackgroundSyncPlugin } from './BackgroundSyncPlugin.js';
import { QueueStore } from './QueueStore.js';
import { StorableRequest } from './StorableRequest.js';
import './_version.js';
interface SyncManager {
getTags(): Promise<string[]>;
register(tag: string): Promise<void>;
}
declare global {
interface ServiceWorkerRegistration {
readonly sync: SyncManager;
}
interface SyncEvent extends ExtendableEvent {
readonly lastChance: boolean;
readonly tag: string;
}
interface ServiceWorkerGlobalScopeEventMap {
sync: SyncEvent;
}
}
/**
* @module workbox-background-sync
*/
export { BackgroundSyncPlugin, Queue, QueueOptions, };
export { BackgroundSyncPlugin, Queue, QueueOptions, QueueStore, StorableRequest };

@@ -8,4 +8,6 @@ /*

*/
import { BackgroundSyncPlugin } from './BackgroundSyncPlugin.js';
import { Queue } from './Queue.js';
import { BackgroundSyncPlugin } from './BackgroundSyncPlugin.js';
import { QueueStore } from './QueueStore.js';
import { StorableRequest } from './StorableRequest.js';
import './_version.js';

@@ -15,2 +17,2 @@ /**

*/
export { BackgroundSyncPlugin, Queue, };
export { BackgroundSyncPlugin, Queue, QueueStore, StorableRequest };

@@ -42,2 +42,9 @@ import { RequestData } from './StorableRequest.js';

/**
* Returns the number of entries filtered by index
*
* @param queueName
* @return {Promise<number>}
*/
getEntryCountByQueueName(queueName: string): Promise<number>;
/**
* Deletes a single entry by id.

@@ -44,0 +51,0 @@ *

@@ -32,3 +32,5 @@ /*

const db = await this.getDb();
const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', { durability: 'relaxed' });
const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', {
durability: 'relaxed',
});
await tx.store.add(entry);

@@ -44,3 +46,5 @@ await tx.done;

const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.openCursor();
const cursor = await db
.transaction(REQUEST_OBJECT_STORE_NAME)
.store.openCursor();
return cursor === null || cursor === void 0 ? void 0 : cursor.value.id;

@@ -60,2 +64,12 @@ }

/**
* Returns the number of entries filtered by index
*
* @param queueName
* @return {Promise<number>}
*/
async getEntryCountByQueueName(queueName) {
const db = await this.getDb();
return db.countFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
}
/**
* Deletes a single entry by id.

@@ -96,3 +110,4 @@ *

const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME)
const cursor = await db
.transaction(REQUEST_OBJECT_STORE_NAME)
.store.index(QUEUE_NAME_INDEX)

@@ -99,0 +114,0 @@ .openCursor(query, direction);

@@ -0,3 +1,3 @@

import { UnidentifiedQueueStoreEntry, QueueStoreEntry } from './QueueDb.js';
import '../_version.js';
import { UnidentifiedQueueStoreEntry, QueueStoreEntry } from './QueueDb.js';
/**

@@ -7,3 +7,4 @@ * A class to manage storing requests from a Queue in IndexedDB,

*
* @private
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/

@@ -18,3 +19,2 @@ export declare class QueueStore {

* @param {string} queueName
* @private
*/

@@ -29,3 +29,2 @@ constructor(queueName: string);

* @param {Object} [entry.metadata]
* @private
*/

@@ -40,3 +39,2 @@ pushEntry(entry: UnidentifiedQueueStoreEntry): Promise<void>;

* @param {Object} [entry.metadata]
* @private
*/

@@ -48,3 +46,2 @@ unshiftEntry(entry: UnidentifiedQueueStoreEntry): Promise<void>;

* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/

@@ -56,3 +53,2 @@ popEntry(): Promise<QueueStoreEntry | undefined>;

* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/

@@ -65,6 +61,12 @@ shiftEntry(): Promise<QueueStoreEntry | undefined>;

* @return {Promise<Array<Object>>}
* @private
*/
getAll(): Promise<QueueStoreEntry[]>;
/**
* Returns the number of entries in the store matching the `queueName`.
*
* @param {Object} options See {@link module:workbox-background-sync.Queue~size}
* @return {Promise<number>}
*/
size(): Promise<number>;
/**
* Deletes the entry for the given ID.

@@ -77,3 +79,2 @@ *

*
* @private
* @param {number} id

@@ -80,0 +81,0 @@ */

@@ -9,4 +9,4 @@ /*

import { assert } from 'workbox-core/_private/assert.js';
import { QueueDb, } from './QueueDb.js';
import '../_version.js';
import { QueueDb } from './QueueDb.js';
/**

@@ -16,3 +16,4 @@ * A class to manage storing requests from a Queue in IndexedDB,

*
* @private
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/

@@ -25,3 +26,2 @@ export class QueueStore {

* @param {string} queueName
* @private
*/

@@ -39,3 +39,2 @@ constructor(queueName) {

* @param {Object} [entry.metadata]
* @private
*/

@@ -69,3 +68,2 @@ async pushEntry(entry) {

* @param {Object} [entry.metadata]
* @private
*/

@@ -103,3 +101,2 @@ async unshiftEntry(entry) {

* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/

@@ -113,3 +110,2 @@ async popEntry() {

* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/

@@ -124,3 +120,2 @@ async shiftEntry() {

* @return {Promise<Array<Object>>}
* @private
*/

@@ -131,2 +126,11 @@ async getAll() {

/**
* Returns the number of entries in the store matching the `queueName`.
*
* @param {Object} options See {@link module:workbox-background-sync.Queue~size}
* @return {Promise<number>}
*/
async size() {
return await this._queueDb.getEntryCountByQueueName(this._queueName);
}
/**
* Deletes the entry for the given ID.

@@ -139,3 +143,2 @@ *

*
* @private
* @param {number} id

@@ -142,0 +145,0 @@ */

@@ -12,3 +12,4 @@ import { MapLikeObject } from 'workbox-core/types.js';

*
* @private
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/

@@ -23,4 +24,2 @@ declare class StorableRequest {

* @return {Promise<StorableRequest>}
*
* @private
*/

@@ -35,3 +34,2 @@ static fromRequest(request: Request): Promise<StorableRequest>;

* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
* @private
*/

@@ -43,4 +41,2 @@ constructor(requestData: RequestData);

* @return {Object}
*
* @private
*/

@@ -52,4 +48,2 @@ toObject(): RequestData;

* @return {Request}
*
* @private
*/

@@ -61,4 +55,2 @@ toRequest(): Request;

* @return {StorableRequest}
*
* @private
*/

@@ -65,0 +57,0 @@ clone(): StorableRequest;

@@ -25,3 +25,4 @@ /*

*
* @private
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/

@@ -36,3 +37,2 @@ class StorableRequest {

* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
* @private
*/

@@ -67,4 +67,2 @@ constructor(requestData) {

* @return {Promise<StorableRequest>}
*
* @private
*/

@@ -100,4 +98,2 @@ static async fromRequest(request) {

* @return {Object}
*
* @private
*/

@@ -116,4 +112,2 @@ toObject() {

* @return {Request}
*
* @private
*/

@@ -127,4 +121,2 @@ toRequest() {

* @return {StorableRequest}
*
* @private
*/

@@ -131,0 +123,0 @@ clone() {

{
"name": "workbox-background-sync",
"version": "6.3.0",
"version": "6.4.0",
"license": "MIT",

@@ -27,6 +27,6 @@ "author": "Google's Web DevRel Team",

"dependencies": {
"idb": "^6.0.0",
"workbox-core": "6.3.0"
"idb": "^6.1.4",
"workbox-core": "6.4.0"
},
"gitHead": "03055e64014a46f6cc977a3a50ad814c6409d36a"
"gitHead": "cdad230c678a3626c217615d5792eb0f1c77194c"
}

@@ -113,2 +113,9 @@ import './_version.js';

/**
* Returns the number of entries present in the queue.
* Note that expired entries (per `maxRetentionTime`) are also included in this count.
*
* @return {Promise<number>}
*/
size(): Promise<number>;
/**
* Adds the entry to the QueueStore and registers for a sync event.

@@ -123,3 +130,3 @@ *

*/
_addRequest({ request, metadata, timestamp, }: QueueEntry, operation: 'push' | 'unshift'): Promise<void>;
_addRequest({ request, metadata, timestamp }: QueueEntry, operation: 'push' | 'unshift'): Promise<void>;
/**

@@ -126,0 +133,0 @@ * Removes and returns the first or last (depending on `operation`) entry

@@ -196,2 +196,11 @@ /*

/**
* Returns the number of entries present in the queue.
* Note that expired entries (per `maxRetentionTime`) are also included in this count.
*
* @return {Promise<number>}
*/
async size() {
return await this._queueStore.size();
}
/**
* Adds the entry to the QueueStore and registers for a sync event.

@@ -206,3 +215,3 @@ *

*/
async _addRequest({ request, metadata, timestamp = Date.now(), }, operation) {
async _addRequest({ request, metadata, timestamp = Date.now() }, operation) {
const storableRequest = await StorableRequest.fromRequest(request.clone());

@@ -314,4 +323,3 @@ const entry = {

if (process.env.NODE_ENV !== 'production') {
logger.log(`Background sync for tag '${event.tag}' ` +
`has been received`);
logger.log(`Background sync for tag '${event.tag}' ` + `has been received`);
}

@@ -318,0 +326,0 @@ const syncComplete = async () => {

// @ts-ignore
try{self['workbox:background-sync:6.3.0']&&_()}catch(e){}
try{self['workbox:background-sync:6.4.0']&&_()}catch(e){}

@@ -40,5 +40,5 @@ /*

await this._queue.pushRequest({request});
}
};
}
export {BackgroundSyncPlugin};

@@ -9,14 +9,33 @@ /*

import {BackgroundSyncPlugin} from './BackgroundSyncPlugin.js';
import {Queue, QueueOptions} from './Queue.js';
import {BackgroundSyncPlugin} from './BackgroundSyncPlugin.js';
import {QueueStore} from './QueueStore.js';
import {StorableRequest} from './StorableRequest.js';
import './_version.js';
// See https://github.com/GoogleChrome/workbox/issues/2946
interface SyncManager {
getTags(): Promise<string[]>;
register(tag: string): Promise<void>;
}
declare global {
interface ServiceWorkerRegistration {
readonly sync: SyncManager;
}
interface SyncEvent extends ExtendableEvent {
readonly lastChance: boolean;
readonly tag: string;
}
interface ServiceWorkerGlobalScopeEventMap {
sync: SyncEvent;
}
}
/**
* @module workbox-background-sync
*/
export {
BackgroundSyncPlugin,
Queue,
QueueOptions,
};
export {BackgroundSyncPlugin, Queue, QueueOptions, QueueStore, StorableRequest};

@@ -59,3 +59,5 @@ /*

const db = await this.getDb();
const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', { durability: 'relaxed' });
const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', {
durability: 'relaxed',
});
await tx.store.add(entry as QueueStoreEntry);

@@ -72,3 +74,5 @@ await tx.done;

const db = await this.getDb();
const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.openCursor();
const cursor = await db
.transaction(REQUEST_OBJECT_STORE_NAME)
.store.openCursor();
return cursor?.value.id;

@@ -87,3 +91,7 @@ }

const db = await this.getDb();
const results = await db.getAllFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
const results = await db.getAllFromIndex(
REQUEST_OBJECT_STORE_NAME,
QUEUE_NAME_INDEX,
IDBKeyRange.only(queueName),
);
return results ? results : new Array<QueueStoreEntry>();

@@ -93,2 +101,17 @@ }

/**
* Returns the number of entries filtered by index
*
* @param queueName
* @return {Promise<number>}
*/
async getEntryCountByQueueName(queueName: string): Promise<number> {
const db = await this.getDb();
return db.countFromIndex(
REQUEST_OBJECT_STORE_NAME,
QUEUE_NAME_INDEX,
IDBKeyRange.only(queueName),
);
}
/**
* Deletes a single entry by id.

@@ -108,3 +131,5 @@ *

*/
async getFirstEntryByQueueName(queueName: string): Promise<QueueStoreEntry | undefined> {
async getFirstEntryByQueueName(
queueName: string,
): Promise<QueueStoreEntry | undefined> {
return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'next');

@@ -118,3 +143,5 @@ }

*/
async getLastEntryByQueueName(queueName: string): Promise<QueueStoreEntry | undefined> {
async getLastEntryByQueueName(
queueName: string,
): Promise<QueueStoreEntry | undefined> {
return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'prev');

@@ -138,3 +165,4 @@ }

const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME)
const cursor = await db
.transaction(REQUEST_OBJECT_STORE_NAME)
.store.index(QUEUE_NAME_INDEX)

@@ -141,0 +169,0 @@ .openCursor(query, direction);

@@ -10,4 +10,8 @@ /*

import {assert} from 'workbox-core/_private/assert.js';
import {
UnidentifiedQueueStoreEntry,
QueueStoreEntry,
QueueDb,
} from './QueueDb.js';
import '../_version.js';
import {UnidentifiedQueueStoreEntry, QueueStoreEntry, QueueDb} from './QueueDb.js';

@@ -18,3 +22,4 @@ /**

*
* @private
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/

@@ -30,3 +35,2 @@ export class QueueStore {

* @param {string} queueName
* @private
*/

@@ -45,3 +49,2 @@ constructor(queueName: string) {

* @param {Object} [entry.metadata]
* @private
*/

@@ -78,3 +81,2 @@ async pushEntry(entry: UnidentifiedQueueStoreEntry): Promise<void> {

* @param {Object} [entry.metadata]
* @private
*/

@@ -115,6 +117,7 @@ async unshiftEntry(entry: UnidentifiedQueueStoreEntry): Promise<void> {

* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/
async popEntry(): Promise<QueueStoreEntry | undefined> {
return this._removeEntry(await this._queueDb.getLastEntryByQueueName(this._queueName));
return this._removeEntry(
await this._queueDb.getLastEntryByQueueName(this._queueName),
);
}

@@ -126,6 +129,7 @@

* @return {Promise<QueueStoreEntry|undefined>}
* @private
*/
async shiftEntry(): Promise<QueueStoreEntry | undefined> {
return this._removeEntry(await this._queueDb.getFirstEntryByQueueName(this._queueName));
return this._removeEntry(
await this._queueDb.getFirstEntryByQueueName(this._queueName),
);
}

@@ -138,3 +142,2 @@

* @return {Promise<Array<Object>>}
* @private
*/

@@ -146,2 +149,12 @@ async getAll(): Promise<QueueStoreEntry[]> {

/**
* Returns the number of entries in the store matching the `queueName`.
*
* @param {Object} options See {@link module:workbox-background-sync.Queue~size}
* @return {Promise<number>}
*/
async size(): Promise<number> {
return await this._queueDb.getEntryCountByQueueName(this._queueName);
}
/**
* Deletes the entry for the given ID.

@@ -154,3 +167,2 @@ *

*
* @private
* @param {number} id

@@ -169,3 +181,5 @@ */

*/
async _removeEntry(entry?: QueueStoreEntry): Promise<QueueStoreEntry | undefined> {
async _removeEntry(
entry?: QueueStoreEntry,
): Promise<QueueStoreEntry | undefined> {
if (entry) {

@@ -172,0 +186,0 @@ await this.deleteEntry(entry.id);

@@ -13,6 +13,13 @@ /*

type SerializableProperties =
| 'method'
| 'referrer'
| 'referrerPolicy'
| 'mode'
| 'credentials'
| 'cache'
| 'redirect'
| 'integrity'
| 'keepalive';
type SerializableProperties = 'method' | 'referrer' | 'referrerPolicy' | 'mode'
| 'credentials' | 'cache' | 'redirect' | 'integrity' | 'keepalive';
const serializableProperties: SerializableProperties[] = [

@@ -40,3 +47,4 @@ 'method',

*
* @private
* Most developers will not need to access this class directly;
* it is exposed for advanced use cases.
*/

@@ -52,4 +60,2 @@ class StorableRequest {

* @return {Promise<StorableRequest>}
*
* @private
*/

@@ -93,3 +99,2 @@ static async fromRequest(request: Request): Promise<StorableRequest> {

* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
* @private
*/

@@ -125,4 +130,2 @@ constructor(requestData: RequestData) {

* @return {Object}
*
* @private
*/

@@ -143,4 +146,2 @@ toObject(): RequestData {

* @return {Request}
*
* @private
*/

@@ -155,4 +156,2 @@ toRequest(): Request {

* @return {StorableRequest}
*
* @private
*/

@@ -159,0 +158,0 @@ clone(): StorableRequest {

@@ -26,3 +26,3 @@ /*

interface OnSyncCallback {
(options: OnSyncCallbackOptions): void|Promise<void>;
(options: OnSyncCallbackOptions): void | Promise<void>;
}

@@ -58,3 +58,5 @@

*/
const convertEntry = (queueStoreEntry: UnidentifiedQueueStoreEntry): QueueEntry => {
const convertEntry = (
queueStoreEntry: UnidentifiedQueueStoreEntry,
): QueueEntry => {
const queueEntry: QueueEntry = {

@@ -104,6 +106,3 @@ request: new StorableRequest(queueStoreEntry.requestData).toRequest(),

*/
constructor(name: string, {
onSync,
maxRetentionTime
}: QueueOptions = {}) {
constructor(name: string, {onSync, maxRetentionTime}: QueueOptions = {}) {
// Ensure the store name is not already being used

@@ -249,2 +248,12 @@ if (queueNames.has(name)) {

/**
* Returns the number of entries present in the queue.
* Note that expired entries (per `maxRetentionTime`) are also included in this count.
*
* @return {Promise<number>}
*/
async size(): Promise<number> {
return await this._queueStore.size();
}
/**
* Adds the entry to the QueueStore and registers for a sync event.

@@ -259,7 +268,6 @@ *

*/
async _addRequest({
request,
metadata,
timestamp = Date.now(),
}: QueueEntry, operation: 'push' | 'unshift'): Promise<void> {
async _addRequest(
{request, metadata, timestamp = Date.now()}: QueueEntry,
operation: 'push' | 'unshift',
): Promise<void> {
const storableRequest = await StorableRequest.fromRequest(request.clone());

@@ -276,8 +284,9 @@ const entry: UnidentifiedQueueStoreEntry = {

await this._queueStore[
`${operation}Entry` as 'pushEntry' | 'unshiftEntry'](entry);
await this._queueStore[`${operation}Entry`](entry);
if (process.env.NODE_ENV !== 'production') {
logger.log(`Request for '${getFriendlyURL(request.url)}' has ` +
`been added to background sync queue '${this._name}'.`);
logger.log(
`Request for '${getFriendlyURL(request.url)}' has ` +
`been added to background sync queue '${this._name}'.`,
);
}

@@ -303,6 +312,7 @@

*/
async _removeRequest(operation: 'pop' | 'shift'): Promise<QueueEntry | undefined> {
async _removeRequest(
operation: 'pop' | 'shift',
): Promise<QueueEntry | undefined> {
const now = Date.now();
const entry = await this._queueStore[
`${operation}Entry` as 'popEntry' | 'shiftEntry']();
const entry = await this._queueStore[`${operation}Entry`]();

@@ -335,4 +345,6 @@ if (entry) {

if (process.env.NODE_ENV !== 'production') {
logger.log(`Request for '${getFriendlyURL(entry.request.url)}' ` +
`has been replayed in queue '${this._name}'`);
logger.log(
`Request for '${getFriendlyURL(entry.request.url)}' ` +
`has been replayed in queue '${this._name}'`,
);
}

@@ -343,4 +355,6 @@ } catch (error) {

if (process.env.NODE_ENV !== 'production') {
logger.log(`Request for '${getFriendlyURL(entry.request.url)}' ` +
`failed to replay, putting it back in queue '${this._name}'`);
logger.log(
`Request for '${getFriendlyURL(entry.request.url)}' ` +
`failed to replay, putting it back in queue '${this._name}'`,
);
}

@@ -351,4 +365,6 @@ throw new WorkboxError('queue-replay-failed', {name: this._name});

if (process.env.NODE_ENV !== 'production') {
logger.log(`All requests in queue '${this.name}' have successfully ` +
`replayed; the queue is now empty!`);
logger.log(
`All requests in queue '${this.name}' have successfully ` +
`replayed; the queue is now empty!`,
);
}

@@ -369,3 +385,5 @@ }

logger.warn(
`Unable to register sync event for '${this._name}'.`, err);
`Unable to register sync event for '${this._name}'.`,
err,
);
}

@@ -388,4 +406,5 @@ }

if (process.env.NODE_ENV !== 'production') {
logger.log(`Background sync for tag '${event.tag}' ` +
`has been received`);
logger.log(
`Background sync for tag '${event.tag}' ` + `has been received`,
);
}

@@ -413,4 +432,6 @@

// as `event.lastChance` is not true.
if (this._requestsAddedDuringSync &&
!(syncError && !event.lastChance)) {
if (
this._requestsAddedDuringSync &&
!(syncError && !event.lastChance)
) {
await this.registerSync();

@@ -417,0 +438,0 @@ }

@@ -8,8 +8,4 @@ {

},
"include": [
"src/**/*.ts"
],
"references": [
{ "path": "../workbox-core/" }
]
"include": ["src/**/*.ts"],
"references": [{"path": "../workbox-core/"}]
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc