workbox-background-sync
Advanced tools
Comparing version 5.0.0-alpha.0 to 5.0.0-alpha.1
this.workbox = this.workbox || {}; | ||
this.workbox.backgroundSync = (function (exports, WorkboxError_mjs, logger_mjs, assert_mjs, getFriendlyURL_mjs, DBWrapper_mjs) { | ||
'use strict'; | ||
this.workbox.backgroundSync = (function (exports, WorkboxError_js, logger_js, assert_js, getFriendlyURL_js, DBWrapper_js) { | ||
'use strict'; | ||
try { | ||
self['workbox:background-sync:5.0.0-alpha.0'] && _(); | ||
} catch (e) {} // eslint-disable-line | ||
// @ts-ignore | ||
try { | ||
self['workbox:background-sync:5.0.0-alpha.1'] && _(); | ||
} catch (e) {} | ||
/* | ||
Copyright 2018 Google LLC | ||
/* | ||
Copyright 2018 Google LLC | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
const DB_VERSION = 3; | ||
const DB_NAME = 'workbox-background-sync'; | ||
const OBJECT_STORE_NAME = 'requests'; | ||
const INDEXED_PROP = 'queueName'; | ||
/** | ||
* A class to manage storing requests from a Queue in IndexedbDB, | ||
* indexed by their queue name for easier access. | ||
* | ||
* @private | ||
*/ | ||
class QueueStore { | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
const DB_VERSION = 3; | ||
const DB_NAME = 'workbox-background-sync'; | ||
const OBJECT_STORE_NAME = 'requests'; | ||
const INDEXED_PROP = 'queueName'; | ||
/** | ||
* Associates this instance with a Queue instance, so entries added can be | ||
* identified by their queue name. | ||
* A class to manage storing requests from a Queue in IndexedbDB, | ||
* indexed by their queue name for easier access. | ||
* | ||
* @param {string} queueName | ||
* @private | ||
*/ | ||
constructor(queueName) { | ||
this._queueName = queueName; | ||
this._db = new DBWrapper_mjs.DBWrapper(DB_NAME, DB_VERSION, { | ||
onupgradeneeded: this._upgradeDb | ||
}); | ||
} | ||
/** | ||
* Append an entry last in the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Object} entry.requestData | ||
* @param {number} [entry.timestamp] | ||
* @param {Object} [entry.metadata] | ||
* @private | ||
*/ | ||
async pushEntry(entry) { | ||
{ | ||
assert_mjs.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'pushEntry', | ||
paramName: 'entry' | ||
class QueueStore { | ||
/** | ||
* Associates this instance with a Queue instance, so entries added can be | ||
* identified by their queue name. | ||
* | ||
* @param {string} queueName | ||
* @private | ||
*/ | ||
constructor(queueName) { | ||
this._queueName = queueName; | ||
this._db = new DBWrapper_js.DBWrapper(DB_NAME, DB_VERSION, { | ||
onupgradeneeded: this._upgradeDb | ||
}); | ||
assert_mjs.assert.isType(entry.requestData, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'pushEntry', | ||
paramName: 'entry.requestData' | ||
}); | ||
} // Don't specify an ID since one is automatically generated. | ||
} | ||
/** | ||
* Append an entry last in the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Object} entry.requestData | ||
* @param {number} [entry.timestamp] | ||
* @param {Object} [entry.metadata] | ||
* @private | ||
*/ | ||
delete entry.id; | ||
entry.queueName = this._queueName; | ||
await this._db.add(OBJECT_STORE_NAME, entry); | ||
} | ||
/** | ||
* Preppend an entry first in the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Object} entry.requestData | ||
* @param {number} [entry.timestamp] | ||
* @param {Object} [entry.metadata] | ||
* @private | ||
*/ | ||
async pushEntry(entry) { | ||
{ | ||
assert_js.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'pushEntry', | ||
paramName: 'entry' | ||
}); | ||
assert_js.assert.isType(entry.requestData, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'pushEntry', | ||
paramName: 'entry.requestData' | ||
}); | ||
} // Don't specify an ID since one is automatically generated. | ||
async unshiftEntry(entry) { | ||
{ | ||
assert_mjs.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'unshiftEntry', | ||
paramName: 'entry' | ||
}); | ||
assert_mjs.assert.isType(entry.requestData, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'unshiftEntry', | ||
paramName: 'entry.requestData' | ||
}); | ||
delete entry.id; | ||
entry.queueName = this._queueName; | ||
await this._db.add(OBJECT_STORE_NAME, entry); | ||
} | ||
/** | ||
* Preppend an entry first in the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Object} entry.requestData | ||
* @param {number} [entry.timestamp] | ||
* @param {Object} [entry.metadata] | ||
* @private | ||
*/ | ||
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, { | ||
count: 1 | ||
}); | ||
if (firstEntry) { | ||
// Pick an ID one less than the lowest ID in the object store. | ||
entry.id = firstEntry.id - 1; | ||
} else { | ||
// Otherwise let the auto-incrementor assign the ID. | ||
delete entry.id; | ||
async unshiftEntry(entry) { | ||
{ | ||
assert_js.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'unshiftEntry', | ||
paramName: 'entry' | ||
}); | ||
assert_js.assert.isType(entry.requestData, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'QueueStore', | ||
funcName: 'unshiftEntry', | ||
paramName: 'entry.requestData' | ||
}); | ||
} | ||
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, { | ||
count: 1 | ||
}); | ||
if (firstEntry) { | ||
// Pick an ID one less than the lowest ID in the object store. | ||
entry.id = firstEntry.id - 1; | ||
} else { | ||
// Otherwise let the auto-incrementor assign the ID. | ||
delete entry.id; | ||
} | ||
entry.queueName = this._queueName; | ||
await this._db.add(OBJECT_STORE_NAME, entry); | ||
} | ||
/** | ||
* Removes and returns the last entry in the queue matching the `queueName`. | ||
* | ||
* @return {Promise<Object>} | ||
* @private | ||
*/ | ||
entry.queueName = this._queueName; | ||
await this._db.add(OBJECT_STORE_NAME, entry); | ||
} | ||
/** | ||
* Removes and returns the last entry in the queue matching the `queueName`. | ||
* | ||
* @return {Promise<Object>} | ||
* @private | ||
*/ | ||
async popEntry() { | ||
return this._removeEntry({ | ||
direction: 'prev' | ||
}); | ||
} | ||
/** | ||
* Removes and returns the first entry in the queue matching the `queueName`. | ||
* | ||
* @return {Promise<Object>} | ||
* @private | ||
*/ | ||
async popEntry() { | ||
return this._removeEntry({ | ||
direction: 'prev' | ||
}); | ||
} | ||
/** | ||
* Removes and returns the first entry in the queue matching the `queueName`. | ||
* | ||
* @return {Promise<Object>} | ||
* @private | ||
*/ | ||
async shiftEntry() { | ||
return this._removeEntry({ | ||
direction: 'next' | ||
}); | ||
} | ||
/** | ||
* Returns all entries in the store matching the `queueName`. | ||
* | ||
* @param {Object} options See workbox.backgroundSync.Queue~getAll} | ||
* @return {Promise<Array<Object>>} | ||
* @private | ||
*/ | ||
async shiftEntry() { | ||
return this._removeEntry({ | ||
direction: 'next' | ||
}); | ||
} | ||
/** | ||
* Returns all entries in the store matching the `queueName`. | ||
* | ||
* @param {Object} options See workbox.backgroundSync.Queue~getAll} | ||
* @return {Promise<Array<Object>>} | ||
* @private | ||
*/ | ||
async getAll() { | ||
return await this._db.getAllMatching(OBJECT_STORE_NAME, { | ||
index: INDEXED_PROP, | ||
query: IDBKeyRange.only(this._queueName) | ||
}); | ||
} | ||
/** | ||
* Deletes the entry for the given ID. | ||
* | ||
* WARNING: this method does not ensure the deleted enry belongs to this | ||
* queue (i.e. matches the `queueName`). But this limitation is acceptable | ||
* as this class is not publicly exposed. An additional check would make | ||
* this method slower than it needs to be. | ||
* | ||
* @private | ||
* @param {number} id | ||
*/ | ||
async getAll() { | ||
return await this._db.getAllMatching(OBJECT_STORE_NAME, { | ||
index: INDEXED_PROP, | ||
query: IDBKeyRange.only(this._queueName) | ||
}); | ||
} | ||
/** | ||
* Deletes the entry for the given ID. | ||
* | ||
* WARNING: this method does not ensure the deleted enry belongs to this | ||
* queue (i.e. matches the `queueName`). But this limitation is acceptable | ||
* as this class is not publicly exposed. An additional check would make | ||
* this method slower than it needs to be. | ||
* | ||
* @private | ||
* @param {number} id | ||
*/ | ||
async deleteEntry(id) { | ||
await this._db.delete(OBJECT_STORE_NAME, id); | ||
} | ||
/** | ||
* Removes and returns the first or last entry in the queue (based on the | ||
* `direction` argument) matching the `queueName`. | ||
* | ||
* @return {Promise<Object>} | ||
* @private | ||
*/ | ||
async deleteEntry(id) { | ||
await this._db.delete(OBJECT_STORE_NAME, id); | ||
} | ||
/** | ||
* Removes and returns the first or last entry in the queue (based on the | ||
* `direction` argument) matching the `queueName`. | ||
* | ||
* @return {Promise<Object>} | ||
* @private | ||
*/ | ||
async _removeEntry({ | ||
direction | ||
}) { | ||
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, { | ||
direction, | ||
index: INDEXED_PROP, | ||
query: IDBKeyRange.only(this._queueName), | ||
count: 1 | ||
}); | ||
async _removeEntry({ | ||
direction | ||
}) { | ||
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, { | ||
direction, | ||
index: INDEXED_PROP, | ||
query: IDBKeyRange.only(this._queueName), | ||
count: 1 | ||
}); | ||
if (entry) { | ||
await this.deleteEntry(entry.id); | ||
return entry; | ||
if (entry) { | ||
await this.deleteEntry(entry.id); | ||
return entry; | ||
} | ||
} | ||
} | ||
/** | ||
* Upgrades the database given an `upgradeneeded` event. | ||
* | ||
* @param {Event} event | ||
* @private | ||
*/ | ||
/** | ||
* Upgrades the database given an `upgradeneeded` event. | ||
* | ||
* @param {Event} event | ||
* @private | ||
*/ | ||
_upgradeDb(event) { | ||
const db = event.target.result; | ||
_upgradeDb(event) { | ||
const db = event.target.result; | ||
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) { | ||
if (db.objectStoreNames.contains(OBJECT_STORE_NAME)) { | ||
db.deleteObjectStore(OBJECT_STORE_NAME); | ||
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) { | ||
if (db.objectStoreNames.contains(OBJECT_STORE_NAME)) { | ||
db.deleteObjectStore(OBJECT_STORE_NAME); | ||
} | ||
} | ||
const objStore = db.createObjectStore(OBJECT_STORE_NAME, { | ||
autoIncrement: true, | ||
keyPath: 'id' | ||
}); | ||
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, { | ||
unique: false | ||
}); | ||
} | ||
const objStore = db.createObjectStore(OBJECT_STORE_NAME, { | ||
autoIncrement: true, | ||
keyPath: 'id' | ||
}); | ||
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, { | ||
unique: false | ||
}); | ||
} | ||
} | ||
/* | ||
Copyright 2018 Google LLC | ||
/* | ||
Copyright 2018 Google LLC | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive']; | ||
/** | ||
* A class to make it easier to serialize and de-serialize requests so they | ||
* can be stored in IndexedDB. | ||
* | ||
* @private | ||
*/ | ||
class StorableRequest { | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive']; | ||
/** | ||
* Converts a Request object to a plain object that can be structured | ||
* cloned or JSON-stringified. | ||
* A class to make it easier to serialize and de-serialize requests so they | ||
* can be stored in IndexedDB. | ||
* | ||
* @param {Request} request | ||
* @return {Promise<StorableRequest>} | ||
* | ||
* @private | ||
*/ | ||
static async fromRequest(request) { | ||
const requestData = { | ||
url: request.url, | ||
headers: {} | ||
}; // Set the body if present. | ||
if (request.method !== 'GET') { | ||
// Use ArrayBuffer to support non-text request bodies. | ||
// NOTE: we can't use Blobs becuse Safari doesn't support storing | ||
// Blobs in IndexedDB in some cases: | ||
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457 | ||
requestData.body = await request.clone().arrayBuffer(); | ||
} // Convert the headers from an iterable to an object. | ||
class StorableRequest { | ||
/** | ||
* Converts a Request object to a plain object that can be structured | ||
* cloned or JSON-stringified. | ||
* | ||
* @param {Request} request | ||
* @return {Promise<StorableRequest>} | ||
* | ||
* @private | ||
*/ | ||
static async fromRequest(request) { | ||
const requestData = { | ||
url: request.url, | ||
headers: {} | ||
}; // Set the body if present. | ||
if (request.method !== 'GET') { | ||
// Use ArrayBuffer to support non-text request bodies. | ||
// NOTE: we can't use Blobs becuse Safari doesn't support storing | ||
// Blobs in IndexedDB in some cases: | ||
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457 | ||
requestData.body = await request.clone().arrayBuffer(); | ||
} // Convert the headers from an iterable to an object. | ||
for (const [key, value] of request.headers.entries()) { | ||
requestData.headers[key] = value; | ||
} // Add all other serializable request properties | ||
for (const [key, value] of request.headers.entries()) { | ||
requestData.headers[key] = value; | ||
} // Add all other serializable request properties | ||
for (const prop of serializableProperties) { | ||
if (request[prop] !== undefined) { | ||
requestData[prop] = request[prop]; | ||
for (const prop of serializableProperties) { | ||
if (request[prop] !== undefined) { | ||
requestData[prop] = request[prop]; | ||
} | ||
} | ||
return new StorableRequest(requestData); | ||
} | ||
/** | ||
* Accepts an object of request data that can be used to construct a | ||
* `Request` but can also be stored in IndexedDB. | ||
* | ||
* @param {Object} requestData An object of request data that includes the | ||
* `url` plus any relevant properties of | ||
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}. | ||
* @private | ||
*/ | ||
return new StorableRequest(requestData); | ||
} | ||
/** | ||
* Accepts an object of request data that can be used to construct a | ||
* `Request` but can also be stored in IndexedDB. | ||
* | ||
* @param {Object} requestData An object of request data that includes the | ||
* `url` plus any relevant properties of | ||
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}. | ||
* @private | ||
*/ | ||
constructor(requestData) { | ||
{ | ||
assert_js.assert.isType(requestData, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'StorableRequest', | ||
funcName: 'constructor', | ||
paramName: 'requestData' | ||
}); | ||
assert_js.assert.isType(requestData.url, 'string', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'StorableRequest', | ||
funcName: 'constructor', | ||
paramName: 'requestData.url' | ||
}); | ||
} // If the request's mode is `navigate`, convert it to `same-origin` since | ||
// navigation requests can't be constructed via script. | ||
constructor(requestData) { | ||
{ | ||
assert_mjs.assert.isType(requestData, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'StorableRequest', | ||
funcName: 'constructor', | ||
paramName: 'requestData' | ||
}); | ||
assert_mjs.assert.isType(requestData.url, 'string', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'StorableRequest', | ||
funcName: 'constructor', | ||
paramName: 'requestData.url' | ||
}); | ||
} // If the request's mode is `navigate`, convert it to `same-origin` since | ||
// navigation requests can't be constructed via script. | ||
if (requestData.mode === 'navigate') { | ||
requestData.mode = 'same-origin'; | ||
} | ||
if (requestData.mode === 'navigate') { | ||
requestData.mode = 'same-origin'; | ||
this._requestData = requestData; | ||
} | ||
/** | ||
* Returns a deep clone of the instances `_requestData` object. | ||
* | ||
* @return {Object} | ||
* | ||
* @private | ||
*/ | ||
this._requestData = requestData; | ||
} | ||
/** | ||
* Returns a deep clone of the instances `_requestData` object. | ||
* | ||
* @return {Object} | ||
* | ||
* @private | ||
*/ | ||
toObject() { | ||
const requestData = Object.assign({}, this._requestData); | ||
requestData.headers = Object.assign({}, this._requestData.headers); | ||
toObject() { | ||
const requestData = Object.assign({}, this._requestData); | ||
requestData.headers = Object.assign({}, this._requestData.headers); | ||
if (requestData.body) { | ||
requestData.body = requestData.body.slice(0); | ||
} | ||
if (requestData.body) { | ||
requestData.body = requestData.body.slice(0); | ||
return requestData; | ||
} | ||
/** | ||
* Converts this instance to a Request. | ||
* | ||
* @return {Request} | ||
* | ||
* @private | ||
*/ | ||
return requestData; | ||
} | ||
/** | ||
* Converts this instance to a Request. | ||
* | ||
* @return {Request} | ||
* | ||
* @private | ||
*/ | ||
toRequest() { | ||
return new Request(this._requestData.url, this._requestData); | ||
} | ||
/** | ||
* Creates and returns a deep clone of the instance. | ||
* | ||
* @return {StorableRequest} | ||
* | ||
* @private | ||
*/ | ||
toRequest() { | ||
return new Request(this._requestData.url, this._requestData); | ||
} | ||
/** | ||
* Creates and returns a deep clone of the instance. | ||
* | ||
* @return {StorableRequest} | ||
* | ||
* @private | ||
*/ | ||
clone() { | ||
return new StorableRequest(this.toObject()); | ||
} | ||
clone() { | ||
return new StorableRequest(this.toObject()); | ||
} | ||
} | ||
/* | ||
Copyright 2018 Google LLC | ||
/* | ||
Copyright 2018 Google LLC | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
const TAG_PREFIX = 'workbox-background-sync'; | ||
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
const TAG_PREFIX = 'workbox-background-sync'; | ||
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes | ||
const queueNames = new Set(); | ||
/** | ||
* A class to manage storing failed requests in IndexedDB and retrying them | ||
* later. All parts of the storing and replaying process are observable via | ||
* callbacks. | ||
* | ||
* @memberof workbox.backgroundSync | ||
*/ | ||
class Queue { | ||
const queueNames = new Set(); | ||
/** | ||
* Creates an instance of Queue with the given options | ||
* A class to manage storing failed requests in IndexedDB and retrying them | ||
* later. All parts of the storing and replaying process are observable via | ||
* callbacks. | ||
* | ||
* @param {string} name The unique name for this queue. This name must be | ||
* unique as it's used to register sync events and store requests | ||
* in IndexedDB specific to this instance. An error will be thrown if | ||
* a duplicate name is detected. | ||
* @param {Object} [options] | ||
* @param {Function} [options.onSync] A function that gets invoked whenever | ||
* the 'sync' event fires. The function is invoked with an object | ||
* containing the `queue` property (referencing this instance), and you | ||
* can use the callback to customize the replay behavior of the queue. | ||
* When not set the `replayRequests()` method is called. | ||
* Note: if the replay fails after a sync event, make sure you throw an | ||
* error, so the browser knows to retry the sync event later. | ||
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in | ||
* minutes) a request may be retried. After this amount of time has | ||
* passed, the request will be deleted from the queue. | ||
* @memberof workbox.backgroundSync | ||
*/ | ||
constructor(name, { | ||
onSync, | ||
maxRetentionTime | ||
} = {}) { | ||
// Ensure the store name is not already being used | ||
if (queueNames.has(name)) { | ||
throw new WorkboxError_mjs.WorkboxError('duplicate-queue-name', { | ||
name | ||
}); | ||
} else { | ||
queueNames.add(name); | ||
} | ||
this._name = name; | ||
this._onSync = onSync || this.replayRequests; | ||
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME; | ||
this._queueStore = new QueueStore(this._name); | ||
class Queue { | ||
/** | ||
* Creates an instance of Queue with the given options | ||
* | ||
* @param {string} name The unique name for this queue. This name must be | ||
* unique as it's used to register sync events and store requests | ||
* in IndexedDB specific to this instance. An error will be thrown if | ||
* a duplicate name is detected. | ||
* @param {Object} [options] | ||
* @param {Function} [options.onSync] A function that gets invoked whenever | ||
* the 'sync' event fires. The function is invoked with an object | ||
* containing the `queue` property (referencing this instance), and you | ||
* can use the callback to customize the replay behavior of the queue. | ||
* When not set the `replayRequests()` method is called. | ||
* Note: if the replay fails after a sync event, make sure you throw an | ||
* error, so the browser knows to retry the sync event later. | ||
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in | ||
* minutes) a request may be retried. After this amount of time has | ||
* passed, the request will be deleted from the queue. | ||
*/ | ||
constructor(name, { | ||
onSync, | ||
maxRetentionTime | ||
} = {}) { | ||
this._syncInProgress = false; | ||
this._requestsAddedDuringSync = false; // Ensure the store name is not already being used | ||
this._addSyncListener(); | ||
} | ||
/** | ||
* @return {string} | ||
*/ | ||
if (queueNames.has(name)) { | ||
throw new WorkboxError_js.WorkboxError('duplicate-queue-name', { | ||
name | ||
}); | ||
} else { | ||
queueNames.add(name); | ||
} | ||
this._name = name; | ||
this._onSync = onSync || this.replayRequests; | ||
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME; | ||
this._queueStore = new QueueStore(this._name); | ||
get name() { | ||
return this._name; | ||
} | ||
/** | ||
* Stores the passed request in IndexedDB (with its timestamp and any | ||
* metadata) at the end of the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Request} entry.request The request to store in the queue. | ||
* @param {Object} [entry.metadata] Any metadata you want associated with the | ||
* stored request. When requests are replayed you'll have access to this | ||
* metadata object in case you need to modify the request beforehand. | ||
* @param {number} [entry.timestamp] The timestamp (Epoch time in | ||
* milliseconds) when the request was first added to the queue. This is | ||
* used along with `maxRetentionTime` to remove outdated requests. In | ||
* general you don't need to set this value, as it's automatically set | ||
* for you (defaulting to `Date.now()`), but you can update it if you | ||
* don't want particular requests to expire. | ||
*/ | ||
this._addSyncListener(); | ||
} | ||
/** | ||
* @return {string} | ||
*/ | ||
async pushRequest(entry) { | ||
{ | ||
assert_mjs.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'pushRequest', | ||
paramName: 'entry' | ||
}); | ||
assert_mjs.assert.isInstance(entry.request, Request, { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'pushRequest', | ||
paramName: 'entry.request' | ||
}); | ||
get name() { | ||
return this._name; | ||
} | ||
/** | ||
* Stores the passed request in IndexedDB (with its timestamp and any | ||
* metadata) at the end of the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Request} entry.request The request to store in the queue. | ||
* @param {Object} [entry.metadata] Any metadata you want associated with the | ||
* stored request. When requests are replayed you'll have access to this | ||
* metadata object in case you need to modify the request beforehand. | ||
* @param {number} [entry.timestamp] The timestamp (Epoch time in | ||
* milliseconds) when the request was first added to the queue. This is | ||
* used along with `maxRetentionTime` to remove outdated requests. In | ||
* general you don't need to set this value, as it's automatically set | ||
* for you (defaulting to `Date.now()`), but you can update it if you | ||
* don't want particular requests to expire. | ||
*/ | ||
await this._addRequest(entry, 'push'); | ||
} | ||
/** | ||
* Stores the passed request in IndexedDB (with its timestamp and any | ||
* metadata) at the beginning of the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Request} entry.request The request to store in the queue. | ||
* @param {Object} [entry.metadata] Any metadata you want associated with the | ||
* stored request. When requests are replayed you'll have access to this | ||
* metadata object in case you need to modify the request beforehand. | ||
* @param {number} [entry.timestamp] The timestamp (Epoch time in | ||
* milliseconds) when the request was first added to the queue. This is | ||
* used along with `maxRetentionTime` to remove outdated requests. In | ||
* general you don't need to set this value, as it's automatically set | ||
* for you (defaulting to `Date.now()`), but you can update it if you | ||
* don't want particular requests to expire. | ||
*/ | ||
async pushRequest(entry) { | ||
{ | ||
assert_js.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'pushRequest', | ||
paramName: 'entry' | ||
}); | ||
assert_js.assert.isInstance(entry.request, Request, { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'pushRequest', | ||
paramName: 'entry.request' | ||
}); | ||
} | ||
async unshiftRequest(entry) { | ||
{ | ||
assert_mjs.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'unshiftRequest', | ||
paramName: 'entry' | ||
}); | ||
assert_mjs.assert.isInstance(entry.request, Request, { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'unshiftRequest', | ||
paramName: 'entry.request' | ||
}); | ||
await this._addRequest(entry, 'push'); | ||
} | ||
/** | ||
* Stores the passed request in IndexedDB (with its timestamp and any | ||
* metadata) at the beginning of the queue. | ||
* | ||
* @param {Object} entry | ||
* @param {Request} entry.request The request to store in the queue. | ||
* @param {Object} [entry.metadata] Any metadata you want associated with the | ||
* stored request. When requests are replayed you'll have access to this | ||
* metadata object in case you need to modify the request beforehand. | ||
* @param {number} [entry.timestamp] The timestamp (Epoch time in | ||
* milliseconds) when the request was first added to the queue. This is | ||
* used along with `maxRetentionTime` to remove outdated requests. In | ||
* general you don't need to set this value, as it's automatically set | ||
* for you (defaulting to `Date.now()`), but you can update it if you | ||
* don't want particular requests to expire. | ||
*/ | ||
await this._addRequest(entry, 'unshift'); | ||
} | ||
/** | ||
* Removes and returns the last request in the queue (along with its | ||
* timestamp and any metadata). The returned object takes the form: | ||
* `{request, timestamp, metadata}`. | ||
* | ||
* @return {Promise<Object>} | ||
*/ | ||
async unshiftRequest(entry) { | ||
{ | ||
assert_js.assert.isType(entry, 'object', { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'unshiftRequest', | ||
paramName: 'entry' | ||
}); | ||
assert_js.assert.isInstance(entry.request, Request, { | ||
moduleName: 'workbox-background-sync', | ||
className: 'Queue', | ||
funcName: 'unshiftRequest', | ||
paramName: 'entry.request' | ||
}); | ||
} | ||
async popRequest() { | ||
return this._removeRequest('pop'); | ||
} | ||
/** | ||
* Removes and returns the first request in the queue (along with its | ||
* timestamp and any metadata). The returned object takes the form: | ||
* `{request, timestamp, metadata}`. | ||
* | ||
* @return {Promise<Object>} | ||
*/ | ||
await this._addRequest(entry, 'unshift'); | ||
} | ||
/** | ||
* Removes and returns the last request in the queue (along with its | ||
* timestamp and any metadata). The returned object takes the form: | ||
* `{request, timestamp, metadata}`. | ||
* | ||
* @return {Promise<Object>} | ||
*/ | ||
async shiftRequest() { | ||
return this._removeRequest('shift'); | ||
} | ||
/** | ||
* Returns all the entries that have not expired (per `maxRetentionTime`). | ||
* Any expired entries are removed from the queue. | ||
* | ||
* @return {Promise<Array<Object>>} | ||
*/ | ||
async popRequest() { | ||
return this._removeRequest('pop'); | ||
} | ||
/** | ||
* Removes and returns the first request in the queue (along with its | ||
* timestamp and any metadata). The returned object takes the form: | ||
* `{request, timestamp, metadata}`. | ||
* | ||
* @return {Promise<Object>} | ||
*/ | ||
async getAll() { | ||
const allEntries = await this._queueStore.getAll(); | ||
const now = Date.now(); | ||
const unexpiredEntries = []; | ||
async shiftRequest() { | ||
return this._removeRequest('shift'); | ||
} | ||
/** | ||
* Returns all the entries that have not expired (per `maxRetentionTime`). | ||
* Any expired entries are removed from the queue. | ||
* | ||
* @return {Promise<Array<Object>>} | ||
*/ | ||
for (const entry of allEntries) { | ||
// Ignore requests older than maxRetentionTime. Call this function | ||
// recursively until an unexpired request is found. | ||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000; | ||
if (now - entry.timestamp > maxRetentionTimeInMs) { | ||
await this._queueStore.deleteEntry(entry.id); | ||
} else { | ||
unexpiredEntries.push(convertEntry(entry)); | ||
async getAll() { | ||
const allEntries = await this._queueStore.getAll(); | ||
const now = Date.now(); | ||
const unexpiredEntries = []; | ||
for (const entry of allEntries) { | ||
// Ignore requests older than maxRetentionTime. Call this function | ||
// recursively until an unexpired request is found. | ||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000; | ||
if (now - entry.timestamp > maxRetentionTimeInMs) { | ||
await this._queueStore.deleteEntry(entry.id); | ||
} else { | ||
unexpiredEntries.push(convertEntry(entry)); | ||
} | ||
} | ||
return unexpiredEntries; | ||
} | ||
/** | ||
* Adds the entry to the QueueStore and registers for a sync event. | ||
* | ||
* @param {Object} entry | ||
* @param {Request} entry.request | ||
* @param {Object} [entry.metadata] | ||
* @param {number} [entry.timestamp=Date.now()] | ||
* @param {string} operation ('push' or 'unshift') | ||
* @private | ||
*/ | ||
return unexpiredEntries; | ||
} | ||
/** | ||
* Adds the entry to the QueueStore and registers for a sync event. | ||
* | ||
* @param {Object} entry | ||
* @param {Request} entry.request | ||
* @param {Object} [entry.metadata] | ||
* @param {number} [entry.timestamp=Date.now()] | ||
* @param {string} operation ('push' or 'unshift') | ||
* @private | ||
*/ | ||
async _addRequest({ | ||
request, | ||
metadata, | ||
timestamp = Date.now() | ||
}, operation) { | ||
const storableRequest = await StorableRequest.fromRequest(request.clone()); | ||
const entry = { | ||
requestData: storableRequest.toObject(), | ||
timestamp | ||
}; // Only include metadata if it's present. | ||
async _addRequest({ | ||
request, | ||
metadata, | ||
timestamp = Date.now() | ||
}, operation) { | ||
const storableRequest = await StorableRequest.fromRequest(request.clone()); | ||
const entry = { | ||
requestData: storableRequest.toObject(), | ||
timestamp | ||
}; // Only include metadata if it's present. | ||
if (metadata) { | ||
entry.metadata = metadata; | ||
} | ||
if (metadata) { | ||
entry.metadata = metadata; | ||
} | ||
await this._queueStore[`${operation}Entry`](entry); | ||
await this._queueStore[`${operation}Entry`](entry); | ||
{ | ||
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`); | ||
} // Don't register for a sync if we're in the middle of a sync. Instead, | ||
// we wait until the sync is complete and call register if | ||
// `this._requestsAddedDuringSync` is true. | ||
{ | ||
logger_mjs.logger.log(`Request for '${getFriendlyURL_mjs.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`); | ||
} // Don't register for a sync if we're in the middle of a sync. Instead, | ||
// we wait until the sync is complete and call register if | ||
// `this._requestsAddedDuringSync` is true. | ||
if (this._syncInProgress) { | ||
this._requestsAddedDuringSync = true; | ||
} else { | ||
await this.registerSync(); | ||
if (this._syncInProgress) { | ||
this._requestsAddedDuringSync = true; | ||
} else { | ||
await this.registerSync(); | ||
} | ||
} | ||
} | ||
/** | ||
* Removes and returns the first or last (depending on `operation`) entry | ||
* from the QueueStore that's not older than the `maxRetentionTime`. | ||
* | ||
* @param {string} operation ('pop' or 'shift') | ||
* @return {Object|undefined} | ||
* @private | ||
*/ | ||
/** | ||
* Removes and returns the first or last (depending on `operation`) entry | ||
* from the QueueStore that's not older than the `maxRetentionTime`. | ||
* | ||
* @param {string} operation ('pop' or 'shift') | ||
* @return {Object|undefined} | ||
* @private | ||
*/ | ||
async _removeRequest(operation) { | ||
const now = Date.now(); | ||
const entry = await this._queueStore[`${operation}Entry`](); | ||
async _removeRequest(operation) { | ||
const now = Date.now(); | ||
const entry = await this._queueStore[`${operation}Entry`](); | ||
if (entry) { | ||
// Ignore requests older than maxRetentionTime. Call this function | ||
// recursively until an unexpired request is found. | ||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000; | ||
if (entry) { | ||
// Ignore requests older than maxRetentionTime. Call this function | ||
// recursively until an unexpired request is found. | ||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000; | ||
if (now - entry.timestamp > maxRetentionTimeInMs) { | ||
return this._removeRequest(operation); | ||
if (now - entry.timestamp > maxRetentionTimeInMs) { | ||
return this._removeRequest(operation); | ||
} | ||
return convertEntry(entry); | ||
} else { | ||
return undefined; | ||
} | ||
return convertEntry(entry); | ||
} | ||
} | ||
/** | ||
* Loops through each request in the queue and attempts to re-fetch it. | ||
* If any request fails to re-fetch, it's put back in the same position in | ||
* the queue (which registers a retry for the next sync event). | ||
*/ | ||
/** | ||
* Loops through each request in the queue and attempts to re-fetch it. | ||
* If any request fails to re-fetch, it's put back in the same position in | ||
* the queue (which registers a retry for the next sync event). | ||
*/ | ||
async replayRequests() { | ||
let entry; | ||
async replayRequests() { | ||
let entry; | ||
while (entry = await this.shiftRequest()) { | ||
try { | ||
await fetch(entry.request.clone()); | ||
while (entry = await this.shiftRequest()) { | ||
try { | ||
await fetch(entry.request.clone()); | ||
{ | ||
logger_mjs.logger.log(`Request for '${getFriendlyURL_mjs.getFriendlyURL(entry.request.url)}'` + `has been replayed in queue '${this._name}'`); | ||
} | ||
} catch (error) { | ||
await this.unshiftRequest(entry); | ||
if ("dev" !== 'production') { | ||
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}'` + `has been replayed in queue '${this._name}'`); | ||
} | ||
} catch (error) { | ||
await this.unshiftRequest(entry); | ||
{ | ||
logger_mjs.logger.log(`Request for '${getFriendlyURL_mjs.getFriendlyURL(entry.request.url)}'` + `failed to replay, putting it back in queue '${this._name}'`); | ||
{ | ||
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}'` + `failed to replay, putting it back in queue '${this._name}'`); | ||
} | ||
throw new WorkboxError_js.WorkboxError('queue-replay-failed', { | ||
name: this._name | ||
}); | ||
} | ||
} | ||
throw new WorkboxError_mjs.WorkboxError('queue-replay-failed', { | ||
name: this._name | ||
}); | ||
{ | ||
logger_js.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`); | ||
} | ||
} | ||
/** | ||
* Registers a sync event with a tag unique to this instance. | ||
*/ | ||
{ | ||
logger_mjs.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`); | ||
} | ||
} | ||
/** | ||
* Registers a sync event with a tag unique to this instance. | ||
*/ | ||
async registerSync() { | ||
if ('sync' in registration) { | ||
try { | ||
await registration.sync.register(`${TAG_PREFIX}:${this._name}`); | ||
} catch (err) { | ||
// This means the registration failed for some reason, possibly due to | ||
// the user disabling it. | ||
{ | ||
logger_mjs.logger.warn(`Unable to register sync event for '${this._name}'.`, err); | ||
async registerSync() { | ||
if ('sync' in self.registration) { | ||
try { | ||
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`); | ||
} catch (err) { | ||
// This means the registration failed for some reason, possibly due to | ||
// the user disabling it. | ||
{ | ||
logger_js.logger.warn(`Unable to register sync event for '${this._name}'.`, err); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
/** | ||
* In sync-supporting browsers, this adds a listener for the sync event. | ||
* In non-sync-supporting browsers, this will retry the queue on service | ||
* worker startup. | ||
* | ||
* @private | ||
*/ | ||
/** | ||
* In sync-supporting browsers, this adds a listener for the sync event. | ||
* In non-sync-supporting browsers, this will retry the queue on service | ||
* worker startup. | ||
* | ||
* @private | ||
*/ | ||
_addSyncListener() { | ||
if ('sync' in registration) { | ||
self.addEventListener('sync', event => { | ||
if (event.tag === `${TAG_PREFIX}:${this._name}`) { | ||
{ | ||
logger_mjs.logger.log(`Background sync for tag '${event.tag}'` + `has been received`); | ||
} | ||
_addSyncListener() { | ||
if ('sync' in self.registration) { | ||
self.addEventListener('sync', event => { | ||
if (event.tag === `${TAG_PREFIX}:${this._name}`) { | ||
{ | ||
logger_js.logger.log(`Background sync for tag '${event.tag}'` + `has been received`); | ||
} | ||
const syncComplete = async () => { | ||
this._syncInProgress = true; | ||
let syncError; | ||
const syncComplete = async () => { | ||
this._syncInProgress = true; | ||
let syncError; | ||
try { | ||
await this._onSync({ | ||
queue: this | ||
}); | ||
} catch (error) { | ||
syncError = error; // Rethrow the error. Note: the logic in the finally clause | ||
// will run before this gets rethrown. | ||
try { | ||
await this._onSync({ | ||
queue: this | ||
}); | ||
} catch (error) { | ||
syncError = error; // Rethrow the error. Note: the logic in the finally clause | ||
// will run before this gets rethrown. | ||
throw syncError; | ||
} finally { | ||
// New items may have been added to the queue during the sync, | ||
// so we need to register for a new sync if that's happened... | ||
// Unless there was an error during the sync, in which | ||
// case the browser will automatically retry later, as long | ||
// as `event.lastChance` is not true. | ||
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) { | ||
await this.registerSync(); | ||
throw syncError; | ||
} finally { | ||
// New items may have been added to the queue during the sync, | ||
// so we need to register for a new sync if that's happened... | ||
// Unless there was an error during the sync, in which | ||
// case the browser will automatically retry later, as long | ||
// as `event.lastChance` is not true. | ||
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) { | ||
await this.registerSync(); | ||
} | ||
this._syncInProgress = false; | ||
this._requestsAddedDuringSync = false; | ||
} | ||
}; | ||
this._syncInProgress = false; | ||
this._requestsAddedDuringSync = false; | ||
} | ||
}; | ||
event.waitUntil(syncComplete()); | ||
} | ||
}); | ||
} else { | ||
{ | ||
logger_js.logger.log(`Background sync replaying without background sync event`); | ||
} // If the browser doesn't support background sync, retry | ||
// every time the service worker starts up as a fallback. | ||
event.waitUntil(syncComplete()); | ||
} | ||
}); | ||
} else { | ||
{ | ||
logger_mjs.logger.log(`Background sync replaying without background sync event`); | ||
} // If the browser doesn't support background sync, retry | ||
// every time the service worker starts up as a fallback. | ||
this._onSync({ | ||
queue: this | ||
}); | ||
} | ||
} | ||
/** | ||
* Returns the set of queue names. This is primarily used to reset the list | ||
* of queue names in tests. | ||
* | ||
* @return {Set} | ||
* | ||
* @private | ||
*/ | ||
this._onSync({ | ||
queue: this | ||
}); | ||
static get _queueNames() { | ||
return queueNames; | ||
} | ||
} | ||
/** | ||
* Returns the set of queue names. This is primarily used to reset the list | ||
* of queue names in tests. | ||
* Converts a QueueStore entry into the format exposed by Queue. This entails | ||
* converting the request data into a real request and omitting the `id` and | ||
* `queueName` properties. | ||
* | ||
* @return {Set} | ||
* | ||
* @param {Object} queueStoreEntry | ||
* @return {Object} | ||
* @private | ||
@@ -738,86 +759,70 @@ */ | ||
static get _queueNames() { | ||
return queueNames; | ||
} | ||
const convertEntry = queueStoreEntry => { | ||
const queueEntry = { | ||
request: new StorableRequest(queueStoreEntry.requestData).toRequest(), | ||
timestamp: queueStoreEntry.timestamp | ||
}; | ||
} | ||
/** | ||
* Converts a QueueStore entry into the format exposed by Queue. This entails | ||
* converting the request data into a real request and omitting the `id` and | ||
* `queueName` properties. | ||
* | ||
* @param {Object} queueStoreEntry | ||
* @return {Object} | ||
* @private | ||
*/ | ||
if (queueStoreEntry.metadata) { | ||
queueEntry.metadata = queueStoreEntry.metadata; | ||
} | ||
const convertEntry = queueStoreEntry => { | ||
const queueEntry = { | ||
request: new StorableRequest(queueStoreEntry.requestData).toRequest(), | ||
timestamp: queueStoreEntry.timestamp | ||
return queueEntry; | ||
}; | ||
if (queueStoreEntry.metadata) { | ||
queueEntry.metadata = queueStoreEntry.metadata; | ||
} | ||
/* | ||
Copyright 2018 Google LLC | ||
return queueEntry; | ||
}; | ||
/* | ||
Copyright 2018 Google LLC | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
/** | ||
* A class implementing the `fetchDidFail` lifecycle callback. This makes it | ||
* easier to add failed requests to a background sync Queue. | ||
* | ||
* @memberof workbox.backgroundSync | ||
*/ | ||
class Plugin { | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
/** | ||
* @param {...*} queueArgs Args to forward to the composed Queue instance. | ||
* See the [Queue]{@link workbox.backgroundSync.Queue} documentation for | ||
* parameter details. | ||
* A class implementing the `fetchDidFail` lifecycle callback. This makes it | ||
* easier to add failed requests to a background sync Queue. | ||
* | ||
* @memberof workbox.backgroundSync | ||
*/ | ||
constructor(...queueArgs) { | ||
this._queue = new Queue(...queueArgs); | ||
this.fetchDidFail = this.fetchDidFail.bind(this); | ||
} | ||
/** | ||
* @param {Object} options | ||
* @param {Request} options.request | ||
* @private | ||
*/ | ||
class Plugin { | ||
/** | ||
* @param {string} name See the [Queue]{@link workbox.backgroundSync.Queue} | ||
* documentation for parameter details. | ||
* @param {Object} [options] See the | ||
* [Queue]{@link workbox.backgroundSync.Queue} documentation for | ||
* parameter details. | ||
*/ | ||
constructor(name, options) { | ||
/** | ||
* @param {Object} options | ||
* @param {Request} options.request | ||
* @private | ||
*/ | ||
this.fetchDidFail = async ({ | ||
request | ||
}) => { | ||
await this._queue.pushRequest({ | ||
request | ||
}); | ||
}; | ||
async fetchDidFail({ | ||
request | ||
}) { | ||
await this._queue.pushRequest({ | ||
request | ||
}); | ||
this._queue = new Queue(name, options); | ||
} | ||
} | ||
} | ||
/* | ||
Copyright 2018 Google LLC | ||
/* | ||
Copyright 2018 Google LLC | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
Use of this source code is governed by an MIT-style | ||
license that can be found in the LICENSE file or at | ||
https://opensource.org/licenses/MIT. | ||
*/ | ||
exports.Plugin = Plugin; | ||
exports.Queue = Queue; | ||
exports.Queue = Queue; | ||
exports.Plugin = Plugin; | ||
return exports; | ||
return exports; | ||
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private)); | ||
//# sourceMappingURL=workbox-background-sync.dev.js.map |
@@ -1,2 +0,2 @@ | ||
this.workbox=this.workbox||{},this.workbox.backgroundSync=function(t,e,s){"use strict";try{self["workbox:background-sync:5.0.0-alpha.0"]&&_()}catch(t){}const i=3,n="workbox-background-sync",a="requests",r="queueName";class c{constructor(t){this.t=t,this.s=new s.DBWrapper(n,i,{onupgradeneeded:this.i})}async pushEntry(t){delete t.id,t.queueName=this.t,await this.s.add(a,t)}async unshiftEntry(t){const[e]=await this.s.getAllMatching(a,{count:1});e?t.id=e.id-1:delete t.id,t.queueName=this.t,await this.s.add(a,t)}async popEntry(){return this.h({direction:"prev"})}async shiftEntry(){return this.h({direction:"next"})}async getAll(){return await this.s.getAllMatching(a,{index:r,query:IDBKeyRange.only(this.t)})}async deleteEntry(t){await this.s.delete(a,t)}async h({direction:t}){const[e]=await this.s.getAllMatching(a,{direction:t,index:r,query:IDBKeyRange.only(this.t),count:1});if(e)return await this.deleteEntry(e.id),e}i(t){const e=t.target.result;t.oldVersion>0&&t.oldVersion<i&&e.objectStoreNames.contains(a)&&e.deleteObjectStore(a),e.createObjectStore(a,{autoIncrement:!0,keyPath:"id"}).createIndex(r,r,{unique:!1})}}const h=["method","referrer","referrerPolicy","mode","credentials","cache","redirect","integrity","keepalive"];class o{static async fromRequest(t){const e={url:t.url,headers:{}};"GET"!==t.method&&(e.body=await t.clone().arrayBuffer());for(const[s,i]of t.headers.entries())e.headers[s]=i;for(const s of h)void 0!==t[s]&&(e[s]=t[s]);return new o(e)}constructor(t){"navigate"===t.mode&&(t.mode="same-origin"),this.o=t}toObject(){const t=Object.assign({},this.o);return t.headers=Object.assign({},this.o.headers),t.body&&(t.body=t.body.slice(0)),t}toRequest(){return new Request(this.o.url,this.o)}clone(){return new o(this.toObject())}}const u="workbox-background-sync",y=10080,w=new Set;class d{constructor(t,{onSync:s,maxRetentionTime:i}={}){if(w.has(t))throw new e.WorkboxError("duplicate-queue-name",{name:t});w.add(t),this.u=t,this.l=s||this.replayRequests,this.q=i||y,this.m=new c(this.u),this.p()}get name(){return this.u}async pushRequest(t){await this.g(t,"push")}async unshiftRequest(t){await this.g(t,"unshift")}async popRequest(){return this.R("pop")}async shiftRequest(){return this.R("shift")}async getAll(){const t=await this.m.getAll(),e=Date.now(),s=[];for(const i of t){const t=60*this.q*1e3;e-i.timestamp>t?await this.m.deleteEntry(i.id):s.push(f(i))}return s}async g({request:t,metadata:e,timestamp:s=Date.now()},i){const n={requestData:(await o.fromRequest(t.clone())).toObject(),timestamp:s};e&&(n.metadata=e),await this.m[`${i}Entry`](n),this.k?this.D=!0:await this.registerSync()}async R(t){const e=Date.now(),s=await this.m[`${t}Entry`]();if(s){const i=60*this.q*1e3;return e-s.timestamp>i?this.R(t):f(s)}}async replayRequests(){let t;for(;t=await this.shiftRequest();)try{await fetch(t.request.clone())}catch(s){throw await this.unshiftRequest(t),new e.WorkboxError("queue-replay-failed",{name:this.u})}}async registerSync(){if("sync"in registration)try{await registration.sync.register(`${u}:${this.u}`)}catch(t){}}p(){"sync"in registration?self.addEventListener("sync",t=>{if(t.tag===`${u}:${this.u}`){const e=async()=>{let e;this.k=!0;try{await this.l({queue:this})}catch(t){throw e=t}finally{!this.D||e&&!t.lastChance||await this.registerSync(),this.k=!1,this.D=!1}};t.waitUntil(e())}}):this.l({queue:this})}static get _(){return w}}const f=t=>{const e={request:new o(t.requestData).toRequest(),timestamp:t.timestamp};return t.metadata&&(e.metadata=t.metadata),e};return t.Queue=d,t.Plugin=class{constructor(...t){this.v=new d(...t),this.fetchDidFail=this.fetchDidFail.bind(this)}async fetchDidFail({request:t}){await this.v.pushRequest({request:t})}},t}({},workbox.core._private,workbox.core._private); | ||
this.workbox=this.workbox||{},this.workbox.backgroundSync=function(t,e,s,i,n){"use strict";try{self["workbox:background-sync:5.0.0-alpha.1"]&&_()}catch(t){}const a=3,r="workbox-background-sync",c="requests",h="queueName";class o{constructor(t){this.t=t,this.s=new n.DBWrapper(r,a,{onupgradeneeded:this.i})}async pushEntry(t){delete t.id,t.queueName=this.t,await this.s.add(c,t)}async unshiftEntry(t){const[e]=await this.s.getAllMatching(c,{count:1});e?t.id=e.id-1:delete t.id,t.queueName=this.t,await this.s.add(c,t)}async popEntry(){return this.h({direction:"prev"})}async shiftEntry(){return this.h({direction:"next"})}async getAll(){return await this.s.getAllMatching(c,{index:h,query:IDBKeyRange.only(this.t)})}async deleteEntry(t){await this.s.delete(c,t)}async h({direction:t}){const[e]=await this.s.getAllMatching(c,{direction:t,index:h,query:IDBKeyRange.only(this.t),count:1});if(e)return await this.deleteEntry(e.id),e}i(t){const e=t.target.result;t.oldVersion>0&&t.oldVersion<a&&e.objectStoreNames.contains(c)&&e.deleteObjectStore(c),e.createObjectStore(c,{autoIncrement:!0,keyPath:"id"}).createIndex(h,h,{unique:!1})}}const u=["method","referrer","referrerPolicy","mode","credentials","cache","redirect","integrity","keepalive"];class y{static async fromRequest(t){const e={url:t.url,headers:{}};"GET"!==t.method&&(e.body=await t.clone().arrayBuffer());for(const[s,i]of t.headers.entries())e.headers[s]=i;for(const s of u)void 0!==t[s]&&(e[s]=t[s]);return new y(e)}constructor(t){"navigate"===t.mode&&(t.mode="same-origin"),this.o=t}toObject(){const t=Object.assign({},this.o);return t.headers=Object.assign({},this.o.headers),t.body&&(t.body=t.body.slice(0)),t}toRequest(){return new Request(this.o.url,this.o)}clone(){return new y(this.toObject())}}const w="workbox-background-sync",f=10080,d=new Set;class l{constructor(t,{onSync:s,maxRetentionTime:i}={}){if(this.u=!1,this.l=!1,d.has(t))throw new e.WorkboxError("duplicate-queue-name",{name:t});d.add(t),this.q=t,this.m=s||this.replayRequests,this.p=i||f,this.g=new o(this.q),this.R()}get name(){return this.q}async pushRequest(t){await this.k(t,"push")}async unshiftRequest(t){await this.k(t,"unshift")}async popRequest(){return this.D("pop")}async shiftRequest(){return this.D("shift")}async getAll(){const t=await this.g.getAll(),e=Date.now(),s=[];for(const i of t){const t=60*this.p*1e3;e-i.timestamp>t?await this.g.deleteEntry(i.id):s.push(q(i))}return s}async k({request:t,metadata:e,timestamp:s=Date.now()},i){const n={requestData:(await y.fromRequest(t.clone())).toObject(),timestamp:s};e&&(n.metadata=e),await this.g[`${i}Entry`](n),this.u?this.l=!0:await this.registerSync()}async D(t){const e=Date.now(),s=await this.g[`${t}Entry`]();if(s){const i=60*this.p*1e3;return e-s.timestamp>i?this.D(t):q(s)}}async replayRequests(){let t;for(;t=await this.shiftRequest();)try{await fetch(t.request.clone())}catch(s){throw await this.unshiftRequest(t),new e.WorkboxError("queue-replay-failed",{name:this.q})}}async registerSync(){if("sync"in self.registration)try{await self.registration.sync.register(`${w}:${this.q}`)}catch(t){}}R(){"sync"in self.registration?self.addEventListener("sync",t=>{if(t.tag===`${w}:${this.q}`){const e=async()=>{let e;this.u=!0;try{await this.m({queue:this})}catch(t){throw e=t}finally{!this.l||e&&!t.lastChance||await this.registerSync(),this.u=!1,this.l=!1}};t.waitUntil(e())}}):this.m({queue:this})}static get _(){return d}}const q=t=>{const e={request:new y(t.requestData).toRequest(),timestamp:t.timestamp};return t.metadata&&(e.metadata=t.metadata),e};return t.Plugin=class{constructor(t,e){this.fetchDidFail=(async({request:t})=>{await this.v.pushRequest({request:t})}),this.v=new l(t,e)}},t.Queue=l,t}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private); | ||
//# sourceMappingURL=workbox-background-sync.prod.js.map |
{ | ||
"name": "workbox-background-sync", | ||
"version": "5.0.0-alpha.0", | ||
"version": "5.0.0-alpha.1", | ||
"license": "MIT", | ||
@@ -28,8 +28,9 @@ "author": "Google's Web DevRel Team", | ||
}, | ||
"main": "build/workbox-background-sync.prod.js", | ||
"main": "index.js", | ||
"module": "index.mjs", | ||
"types": "index.d.ts", | ||
"dependencies": { | ||
"workbox-core": "^5.0.0-alpha.0" | ||
"workbox-core": "^5.0.0-alpha.1" | ||
}, | ||
"gitHead": "7f231c04023669bc42d5a939d1359b0867e2efda" | ||
"gitHead": "20d2110ddace710a46af06addd4977cae08f5942" | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 3 instances in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 2 instances in 1 package
195325
33
2584
5
4
Updatedworkbox-core@^5.0.0-alpha.1