workbox-expiration
Advanced tools
Comparing version 7.0.0 to 7.1.0
@@ -6,6 +6,5 @@ this.workbox = this.workbox || {}; | ||
function _extends() { | ||
_extends = Object.assign || function (target) { | ||
_extends = Object.assign ? Object.assign.bind() : function (target) { | ||
for (var i = 1; i < arguments.length; i++) { | ||
var source = arguments[i]; | ||
for (var key in source) { | ||
@@ -17,6 +16,4 @@ if (Object.prototype.hasOwnProperty.call(source, key)) { | ||
} | ||
return target; | ||
}; | ||
return _extends.apply(this, arguments); | ||
@@ -26,15 +23,12 @@ } | ||
const instanceOfAny = (object, constructors) => constructors.some(c => object instanceof c); | ||
let idbProxyableTypes; | ||
let cursorAdvanceMethods; // This is a function to prevent it throwing up in node environments. | ||
let cursorAdvanceMethods; | ||
// This is a function to prevent it throwing up in node environments. | ||
function getIdbProxyableTypes() { | ||
return idbProxyableTypes || (idbProxyableTypes = [IDBDatabase, IDBObjectStore, IDBIndex, IDBCursor, IDBTransaction]); | ||
} // This is a function to prevent it throwing up in node environments. | ||
} | ||
// This is a function to prevent it throwing up in node environments. | ||
function getCursorAdvanceMethods() { | ||
return cursorAdvanceMethods || (cursorAdvanceMethods = [IDBCursor.prototype.advance, IDBCursor.prototype.continue, IDBCursor.prototype.continuePrimaryKey]); | ||
} | ||
const cursorRequestMap = new WeakMap(); | ||
@@ -45,3 +39,2 @@ const transactionDoneMap = new WeakMap(); | ||
const reverseTransformCache = new WeakMap(); | ||
function promisifyRequest(request) { | ||
@@ -53,3 +46,2 @@ const promise = new Promise((resolve, reject) => { | ||
}; | ||
const success = () => { | ||
@@ -59,3 +51,2 @@ resolve(wrap(request.result)); | ||
}; | ||
const error = () => { | ||
@@ -65,3 +56,2 @@ reject(request.error); | ||
}; | ||
request.addEventListener('success', success); | ||
@@ -75,11 +65,10 @@ request.addEventListener('error', error); | ||
cursorRequestMap.set(value, request); | ||
} // Catching to avoid "Uncaught Promise exceptions" | ||
}).catch(() => {}); // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This | ||
} | ||
// Catching to avoid "Uncaught Promise exceptions" | ||
}).catch(() => {}); | ||
// This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This | ||
// is because we create many promises from a single IDBRequest. | ||
reverseTransformCache.set(promise, request); | ||
return promise; | ||
} | ||
function cacheDonePromiseForTransaction(tx) { | ||
@@ -94,3 +83,2 @@ // Early bail if we've already created a done promise for this transaction. | ||
}; | ||
const complete = () => { | ||
@@ -100,3 +88,2 @@ resolve(); | ||
}; | ||
const error = () => { | ||
@@ -106,11 +93,9 @@ reject(tx.error || new DOMException('AbortError', 'AbortError')); | ||
}; | ||
tx.addEventListener('complete', complete); | ||
tx.addEventListener('error', error); | ||
tx.addEventListener('abort', error); | ||
}); // Cache it for later retrieval. | ||
}); | ||
// Cache it for later retrieval. | ||
transactionDoneMap.set(tx, done); | ||
} | ||
let idbProxyTraps = { | ||
@@ -120,18 +105,15 @@ get(target, prop, receiver) { | ||
// Special handling for transaction.done. | ||
if (prop === 'done') return transactionDoneMap.get(target); // Polyfill for objectStoreNames because of Edge. | ||
if (prop === 'done') return transactionDoneMap.get(target); | ||
// Polyfill for objectStoreNames because of Edge. | ||
if (prop === 'objectStoreNames') { | ||
return target.objectStoreNames || transactionStoreNamesMap.get(target); | ||
} // Make tx.store return the only store in the transaction, or undefined if there are many. | ||
} | ||
// Make tx.store return the only store in the transaction, or undefined if there are many. | ||
if (prop === 'store') { | ||
return receiver.objectStoreNames[1] ? undefined : receiver.objectStore(receiver.objectStoreNames[0]); | ||
} | ||
} // Else transform whatever we get back. | ||
} | ||
// Else transform whatever we get back. | ||
return wrap(target[prop]); | ||
}, | ||
set(target, prop, value) { | ||
@@ -141,3 +123,2 @@ target[prop] = value; | ||
}, | ||
has(target, prop) { | ||
@@ -147,12 +128,8 @@ if (target instanceof IDBTransaction && (prop === 'done' || prop === 'store')) { | ||
} | ||
return prop in target; | ||
} | ||
}; | ||
function replaceTraps(callback) { | ||
idbProxyTraps = callback(idbProxyTraps); | ||
} | ||
function wrapFunction(func) { | ||
@@ -168,3 +145,4 @@ // Due to expected object equality (which is enforced by the caching in `wrap`), we | ||
}; | ||
} // Cursor methods are special, as the behaviour is a little more different to standard IDB. In | ||
} | ||
// Cursor methods are special, as the behaviour is a little more different to standard IDB. In | ||
// IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the | ||
@@ -174,4 +152,2 @@ // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense | ||
// undefined if the end of the cursor has been reached. | ||
if (getCursorAdvanceMethods().includes(func)) { | ||
@@ -185,3 +161,2 @@ return function (...args) { | ||
} | ||
return function (...args) { | ||
@@ -193,23 +168,21 @@ // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use | ||
} | ||
function transformCachableValue(value) { | ||
if (typeof value === 'function') return wrapFunction(value); // This doesn't return, it just creates a 'done' promise for the transaction, | ||
if (typeof value === 'function') return wrapFunction(value); | ||
// This doesn't return, it just creates a 'done' promise for the transaction, | ||
// which is later returned for transaction.done (see idbObjectHandler). | ||
if (value instanceof IDBTransaction) cacheDonePromiseForTransaction(value); | ||
if (instanceOfAny(value, getIdbProxyableTypes())) return new Proxy(value, idbProxyTraps); // Return the same value back if we're not going to transform it. | ||
if (instanceOfAny(value, getIdbProxyableTypes())) return new Proxy(value, idbProxyTraps); | ||
// Return the same value back if we're not going to transform it. | ||
return value; | ||
} | ||
function wrap(value) { | ||
// We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because | ||
// IDB is weird and a single IDBRequest can yield many responses, so these can't be cached. | ||
if (value instanceof IDBRequest) return promisifyRequest(value); // If we've already transformed this value before, reuse the transformed value. | ||
if (value instanceof IDBRequest) return promisifyRequest(value); | ||
// If we've already transformed this value before, reuse the transformed value. | ||
// This is faster, but it also provides object equality. | ||
if (transformCache.has(value)) return transformCache.get(value); | ||
const newValue = transformCachableValue(value); // Not all types are transformed. | ||
const newValue = transformCachableValue(value); | ||
// Not all types are transformed. | ||
// These may be primitive types, so they can't be WeakMap keys. | ||
if (newValue !== value) { | ||
@@ -219,6 +192,4 @@ transformCache.set(value, newValue); | ||
} | ||
return newValue; | ||
} | ||
const unwrap = value => reverseTransformCache.get(value); | ||
@@ -233,3 +204,2 @@ | ||
*/ | ||
function openDB(name, version, { | ||
@@ -243,3 +213,2 @@ blocked, | ||
const openPromise = wrap(request); | ||
if (upgrade) { | ||
@@ -250,3 +219,2 @@ request.addEventListener('upgradeneeded', event => { | ||
} | ||
if (blocked) request.addEventListener('blocked', () => blocked()); | ||
@@ -264,4 +232,2 @@ openPromise.then(db => { | ||
*/ | ||
function deleteDB(name, { | ||
@@ -274,7 +240,5 @@ blocked | ||
} | ||
const readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count']; | ||
const writeMethods = ['put', 'add', 'delete', 'clear']; | ||
const cachedMethods = new Map(); | ||
function getMethod(target, prop) { | ||
@@ -284,3 +248,2 @@ if (!(target instanceof IDBDatabase && !(prop in target) && typeof prop === 'string')) { | ||
} | ||
if (cachedMethods.get(prop)) return cachedMethods.get(prop); | ||
@@ -290,8 +253,7 @@ const targetFuncName = prop.replace(/FromIndex$/, ''); | ||
const isWrite = writeMethods.includes(targetFuncName); | ||
if ( // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge. | ||
if ( | ||
// Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge. | ||
!(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || !(isWrite || readMethods.includes(targetFuncName))) { | ||
return; | ||
} | ||
const method = async function (storeName, ...args) { | ||
@@ -301,3 +263,4 @@ // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :( | ||
let target = tx.store; | ||
if (useIndex) target = target.index(args.shift()); // Must reject if op rejects. | ||
if (useIndex) target = target.index(args.shift()); | ||
// Must reject if op rejects. | ||
// If it's a write operation, must reject if tx.done rejects. | ||
@@ -307,10 +270,7 @@ // Must reject with op rejection first. | ||
// Must handle both promises (no unhandled rejections) | ||
return (await Promise.all([target[targetFuncName](...args), isWrite && tx.done]))[0]; | ||
}; | ||
cachedMethods.set(prop, method); | ||
return method; | ||
} | ||
replaceTraps(oldTraps => _extends({}, oldTraps, { | ||
@@ -321,2 +281,3 @@ get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver), | ||
// @ts-ignore | ||
try { | ||
@@ -335,3 +296,2 @@ self['workbox:expiration:7.0.0'] && _(); | ||
const CACHE_OBJECT_STORE = 'cache-entries'; | ||
const normalizeURL = unNormalizedUrl => { | ||
@@ -347,4 +307,2 @@ const url = new URL(unNormalizedUrl, location.href); | ||
*/ | ||
class CacheTimestampsModel { | ||
@@ -368,4 +326,2 @@ /** | ||
*/ | ||
_upgradeDb(db) { | ||
@@ -378,6 +334,6 @@ // TODO(philipwalton): EdgeHTML doesn't support arrays as a keyPath, so we | ||
keyPath: 'id' | ||
}); // TODO(philipwalton): once we don't have to support EdgeHTML, we can | ||
}); | ||
// TODO(philipwalton): once we don't have to support EdgeHTML, we can | ||
// create a single index with the keyPath `['cacheName', 'timestamp']` | ||
// instead of doing both these indexes. | ||
objStore.createIndex('cacheName', 'cacheName', { | ||
@@ -397,7 +353,4 @@ unique: false | ||
*/ | ||
_upgradeDbAndDeleteOldDbs(db) { | ||
this._upgradeDb(db); | ||
if (this._cacheName) { | ||
@@ -413,4 +366,2 @@ void deleteDB(this._cacheName); | ||
*/ | ||
async setTimestamp(url, timestamp) { | ||
@@ -442,4 +393,2 @@ url = normalizeURL(url); | ||
*/ | ||
async getTimestamp(url) { | ||
@@ -461,4 +410,2 @@ const db = await this.getDb(); | ||
*/ | ||
async expireEntries(minTimestamp, maxCount) { | ||
@@ -469,7 +416,6 @@ const db = await this.getDb(); | ||
let entriesNotDeletedCount = 0; | ||
while (cursor) { | ||
const result = cursor.value; // TODO(philipwalton): once we can use a multi-key index, we | ||
const result = cursor.value; | ||
// TODO(philipwalton): once we can use a multi-key index, we | ||
// won't have to check `cacheName` here. | ||
if (result.cacheName === this._cacheName) { | ||
@@ -492,12 +438,9 @@ // Delete an entry if it's older than the max age or | ||
} | ||
cursor = await cursor.continue(); | ||
} // TODO(philipwalton): once the Safari bug in the following issue is fixed, | ||
} | ||
// TODO(philipwalton): once the Safari bug in the following issue is fixed, | ||
// we should be able to remove this loop and do the entry deletion in the | ||
// cursor loop above: | ||
// https://github.com/GoogleChrome/workbox/issues/1978 | ||
const urlsDeleted = []; | ||
for (const entry of entriesToDelete) { | ||
@@ -507,3 +450,2 @@ await db.delete(CACHE_OBJECT_STORE, entry.id); | ||
} | ||
return urlsDeleted; | ||
@@ -519,4 +461,2 @@ } | ||
*/ | ||
_getId(url) { | ||
@@ -533,4 +473,2 @@ // Creating an ID from the URL and cache name won't be necessary once | ||
*/ | ||
async getDb() { | ||
@@ -542,6 +480,4 @@ if (!this._db) { | ||
} | ||
return this._db; | ||
} | ||
} | ||
@@ -563,3 +499,2 @@ | ||
*/ | ||
class CacheExpiration { | ||
@@ -582,3 +517,2 @@ /** | ||
this._rerunRequested = false; | ||
{ | ||
@@ -591,3 +525,2 @@ assert_js.assert.isType(cacheName, 'string', { | ||
}); | ||
if (!(config.maxEntries || config.maxAgeSeconds)) { | ||
@@ -600,3 +533,2 @@ throw new WorkboxError_js.WorkboxError('max-entries-or-age-required', { | ||
} | ||
if (config.maxEntries) { | ||
@@ -610,3 +542,2 @@ assert_js.assert.isType(config.maxEntries, 'number', { | ||
} | ||
if (config.maxAgeSeconds) { | ||
@@ -621,3 +552,2 @@ assert_js.assert.isType(config.maxAgeSeconds, 'number', { | ||
} | ||
this._maxEntries = config.maxEntries; | ||
@@ -632,4 +562,2 @@ this._maxAgeSeconds = config.maxAgeSeconds; | ||
*/ | ||
async expireEntries() { | ||
@@ -640,13 +568,10 @@ if (this._isRunning) { | ||
} | ||
this._isRunning = true; | ||
const minTimestamp = this._maxAgeSeconds ? Date.now() - this._maxAgeSeconds * 1000 : 0; | ||
const urlsExpired = await this._timestampModel.expireEntries(minTimestamp, this._maxEntries); // Delete URLs from the cache | ||
const urlsExpired = await this._timestampModel.expireEntries(minTimestamp, this._maxEntries); | ||
// Delete URLs from the cache | ||
const cache = await self.caches.open(this._cacheName); | ||
for (const url of urlsExpired) { | ||
await cache.delete(url, this._matchOptions); | ||
} | ||
{ | ||
@@ -662,5 +587,3 @@ if (urlsExpired.length > 0) { | ||
} | ||
this._isRunning = false; | ||
if (this._rerunRequested) { | ||
@@ -678,4 +601,2 @@ this._rerunRequested = false; | ||
*/ | ||
async updateTimestamp(url) { | ||
@@ -690,3 +611,2 @@ { | ||
} | ||
await this._timestampModel.setTimestamp(url, Date.now()); | ||
@@ -705,4 +625,2 @@ } | ||
*/ | ||
async isURLExpired(url) { | ||
@@ -726,4 +644,2 @@ if (!this._maxAgeSeconds) { | ||
*/ | ||
async delete() { | ||
@@ -735,3 +651,2 @@ // Make sure we don't attempt another rerun if we're called in the middle of | ||
} | ||
} | ||
@@ -769,3 +684,2 @@ | ||
*/ | ||
class ExpirationPlugin { | ||
@@ -810,14 +724,10 @@ /** | ||
} | ||
const isFresh = this._isResponseDateFresh(cachedResponse); // Expire entries to ensure that even if the expiration date has | ||
const isFresh = this._isResponseDateFresh(cachedResponse); | ||
// Expire entries to ensure that even if the expiration date has | ||
// expired, it'll only be used once. | ||
const cacheExpiration = this._getCacheExpiration(cacheName); | ||
dontWaitFor_js.dontWaitFor(cacheExpiration.expireEntries()); // Update the metadata for the request URL to the current timestamp, | ||
dontWaitFor_js.dontWaitFor(cacheExpiration.expireEntries()); | ||
// Update the metadata for the request URL to the current timestamp, | ||
// but don't `await` it as we don't want to block the response. | ||
const updateTimestampDone = cacheExpiration.updateTimestamp(request.url); | ||
if (event) { | ||
@@ -835,3 +745,2 @@ try { | ||
} | ||
return isFresh ? cachedResponse : null; | ||
@@ -849,4 +758,2 @@ }; | ||
*/ | ||
this.cacheDidUpdate = async ({ | ||
@@ -870,9 +777,6 @@ cacheName, | ||
} | ||
const cacheExpiration = this._getCacheExpiration(cacheName); | ||
await cacheExpiration.updateTimestamp(request.url); | ||
await cacheExpiration.expireEntries(); | ||
}; | ||
{ | ||
@@ -886,3 +790,2 @@ if (!(config.maxEntries || config.maxAgeSeconds)) { | ||
} | ||
if (config.maxEntries) { | ||
@@ -896,3 +799,2 @@ assert_js.assert.isType(config.maxEntries, 'number', { | ||
} | ||
if (config.maxAgeSeconds) { | ||
@@ -907,7 +809,5 @@ assert_js.assert.isType(config.maxAgeSeconds, 'number', { | ||
} | ||
this._config = config; | ||
this._maxAgeSeconds = config.maxAgeSeconds; | ||
this._cacheExpirations = new Map(); | ||
if (config.purgeOnQuotaError) { | ||
@@ -926,4 +826,2 @@ registerQuotaErrorCallback_js.registerQuotaErrorCallback(() => this.deleteCacheAndMetadata()); | ||
*/ | ||
_getCacheExpiration(cacheName) { | ||
@@ -933,11 +831,7 @@ if (cacheName === cacheNames_js.cacheNames.getRuntimeName()) { | ||
} | ||
let cacheExpiration = this._cacheExpirations.get(cacheName); | ||
if (!cacheExpiration) { | ||
cacheExpiration = new CacheExpiration(cacheName, this._config); | ||
this._cacheExpirations.set(cacheName, cacheExpiration); | ||
} | ||
return cacheExpiration; | ||
@@ -951,4 +845,2 @@ } | ||
*/ | ||
_isResponseDateFresh(cachedResponse) { | ||
@@ -958,16 +850,13 @@ if (!this._maxAgeSeconds) { | ||
return true; | ||
} // Check if the 'date' header will suffice a quick expiration check. | ||
} | ||
// Check if the 'date' header will suffice a quick expiration check. | ||
// See https://github.com/GoogleChromeLabs/sw-toolbox/issues/164 for | ||
// discussion. | ||
const dateHeaderTimestamp = this._getDateHeaderTimestamp(cachedResponse); | ||
if (dateHeaderTimestamp === null) { | ||
// Unable to parse date, so assume it's fresh. | ||
return true; | ||
} // If we have a valid headerTime, then our response is fresh iff the | ||
} | ||
// If we have a valid headerTime, then our response is fresh iff the | ||
// headerTime plus maxAgeSeconds is greater than the current time. | ||
const now = Date.now(); | ||
@@ -985,4 +874,2 @@ return dateHeaderTimestamp >= now - this._maxAgeSeconds * 1000; | ||
*/ | ||
_getDateHeaderTimestamp(cachedResponse) { | ||
@@ -992,12 +879,10 @@ if (!cachedResponse.headers.has('date')) { | ||
} | ||
const dateHeader = cachedResponse.headers.get('date'); | ||
const parsedDate = new Date(dateHeader); | ||
const headerTime = parsedDate.getTime(); // If the Date header was invalid for some reason, parsedDate.getTime() | ||
const headerTime = parsedDate.getTime(); | ||
// If the Date header was invalid for some reason, parsedDate.getTime() | ||
// will return NaN. | ||
if (isNaN(headerTime)) { | ||
return null; | ||
} | ||
return headerTime; | ||
@@ -1021,4 +906,2 @@ } | ||
*/ | ||
async deleteCacheAndMetadata() { | ||
@@ -1030,8 +913,6 @@ // Do this one at a time instead of all at once via `Promise.all()` to | ||
await cacheExpiration.delete(); | ||
} // Reset this._cacheExpirations to its initial state. | ||
} | ||
// Reset this._cacheExpirations to its initial state. | ||
this._cacheExpirations = new Map(); | ||
} | ||
} | ||
@@ -1044,3 +925,3 @@ | ||
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core)); | ||
})({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core); | ||
//# sourceMappingURL=workbox-expiration.dev.js.map |
@@ -1,2 +0,2 @@ | ||
this.workbox=this.workbox||{},this.workbox.expiration=function(t,e,n,s,i){"use strict";function r(){return r=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)Object.prototype.hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},r.apply(this,arguments)}const a=(t,e)=>e.some((e=>t instanceof e));let o,c;const u=new WeakMap,h=new WeakMap,f=new WeakMap,l=new WeakMap,d=new WeakMap;let w={get(t,e,n){if(t instanceof IDBTransaction){if("done"===e)return h.get(t);if("objectStoreNames"===e)return t.objectStoreNames||f.get(t);if("store"===e)return n.objectStoreNames[1]?void 0:n.objectStore(n.objectStoreNames[0])}return m(t[e])},set:(t,e,n)=>(t[e]=n,!0),has:(t,e)=>t instanceof IDBTransaction&&("done"===e||"store"===e)||e in t};function p(t){return t!==IDBDatabase.prototype.transaction||"objectStoreNames"in IDBTransaction.prototype?(c||(c=[IDBCursor.prototype.advance,IDBCursor.prototype.continue,IDBCursor.prototype.continuePrimaryKey])).includes(t)?function(...e){return t.apply(b(this),e),m(u.get(this))}:function(...e){return m(t.apply(b(this),e))}:function(e,...n){const s=t.call(b(this),e,...n);return f.set(s,e.sort?e.sort():[e]),m(s)}}function D(t){return"function"==typeof t?p(t):(t instanceof IDBTransaction&&function(t){if(h.has(t))return;const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("complete",i),t.removeEventListener("error",r),t.removeEventListener("abort",r)},i=()=>{e(),s()},r=()=>{n(t.error||new DOMException("AbortError","AbortError")),s()};t.addEventListener("complete",i),t.addEventListener("error",r),t.addEventListener("abort",r)}));h.set(t,e)}(t),a(t,o||(o=[IDBDatabase,IDBObjectStore,IDBIndex,IDBCursor,IDBTransaction]))?new Proxy(t,w):t)}function m(t){if(t instanceof IDBRequest)return function(t){const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("success",i),t.removeEventListener("error",r)},i=()=>{e(m(t.result)),s()},r=()=>{n(t.error),s()};t.addEventListener("success",i),t.addEventListener("error",r)}));return e.then((e=>{e instanceof IDBCursor&&u.set(e,t)})).catch((()=>{})),d.set(e,t),e}(t);if(l.has(t))return l.get(t);const e=D(t);return e!==t&&(l.set(t,e),d.set(e,t)),e}const b=t=>d.get(t);const y=["get","getKey","getAll","getAllKeys","count"],I=["put","add","delete","clear"],B=new Map;function g(t,e){if(!(t instanceof IDBDatabase)||e in t||"string"!=typeof e)return;if(B.get(e))return B.get(e);const n=e.replace(/FromIndex$/,""),s=e!==n,i=I.includes(n);if(!(n in(s?IDBIndex:IDBObjectStore).prototype)||!i&&!y.includes(n))return;const r=async function(t,...e){const r=this.transaction(t,i?"readwrite":"readonly");let a=r.store;return s&&(a=a.index(e.shift())),(await Promise.all([a[n](...e),i&&r.done]))[0]};return B.set(e,r),r}w=(t=>r({},t,{get:(e,n,s)=>g(e,n)||t.get(e,n,s),has:(e,n)=>!!g(e,n)||t.has(e,n)}))(w);try{self["workbox:expiration:7.0.0"]&&_()}catch(t){}const x="cache-entries",k=t=>{const e=new URL(t,location.href);return e.hash="",e.href};class v{constructor(t){this.t=null,this.M=t}i(t){const e=t.createObjectStore(x,{keyPath:"id"});e.createIndex("cacheName","cacheName",{unique:!1}),e.createIndex("timestamp","timestamp",{unique:!1})}N(t){this.i(t),this.M&&function(t,{blocked:e}={}){const n=indexedDB.deleteDatabase(t);e&&n.addEventListener("blocked",(()=>e())),m(n).then((()=>{}))}(this.M)}async setTimestamp(t,e){const n={url:t=k(t),timestamp:e,cacheName:this.M,id:this.T(t)},s=(await this.getDb()).transaction(x,"readwrite",{durability:"relaxed"});await s.store.put(n),await s.done}async getTimestamp(t){const e=await this.getDb(),n=await e.get(x,this.T(t));return null==n?void 0:n.timestamp}async expireEntries(t,e){const n=await this.getDb();let s=await n.transaction(x).store.index("timestamp").openCursor(null,"prev");const i=[];let r=0;for(;s;){const n=s.value;n.cacheName===this.M&&(t&&n.timestamp<t||e&&r>=e?i.push(s.value):r++),s=await s.continue()}const a=[];for(const t of i)await n.delete(x,t.id),a.push(t.url);return a}T(t){return this.M+"|"+k(t)}async getDb(){return this.t||(this.t=await function(t,e,{blocked:n,upgrade:s,blocking:i,terminated:r}={}){const a=indexedDB.open(t,e),o=m(a);return s&&a.addEventListener("upgradeneeded",(t=>{s(m(a.result),t.oldVersion,t.newVersion,m(a.transaction))})),n&&a.addEventListener("blocked",(()=>n())),o.then((t=>{r&&t.addEventListener("close",(()=>r())),i&&t.addEventListener("versionchange",(()=>i()))})).catch((()=>{})),o}("workbox-expiration",1,{upgrade:this.N.bind(this)})),this.t}}class M{constructor(t,e={}){this.P=!1,this.W=!1,this.K=e.maxEntries,this.L=e.maxAgeSeconds,this.H=e.matchOptions,this.M=t,this.$=new v(t)}async expireEntries(){if(this.P)return void(this.W=!0);this.P=!0;const t=this.L?Date.now()-1e3*this.L:0,n=await this.$.expireEntries(t,this.K),s=await self.caches.open(this.M);for(const t of n)await s.delete(t,this.H);this.P=!1,this.W&&(this.W=!1,e.dontWaitFor(this.expireEntries()))}async updateTimestamp(t){await this.$.setTimestamp(t,Date.now())}async isURLExpired(t){if(this.L){const e=await this.$.getTimestamp(t),n=Date.now()-1e3*this.L;return void 0===e||e<n}return!1}async delete(){this.W=!1,await this.$.expireEntries(1/0)}}return t.CacheExpiration=M,t.ExpirationPlugin=class{constructor(t={}){this.cachedResponseWillBeUsed=async({event:t,request:n,cacheName:s,cachedResponse:i})=>{if(!i)return null;const r=this.J(i),a=this.V(s);e.dontWaitFor(a.expireEntries());const o=a.updateTimestamp(n.url);if(t)try{t.waitUntil(o)}catch(t){}return r?i:null},this.cacheDidUpdate=async({cacheName:t,request:e})=>{const n=this.V(t);await n.updateTimestamp(e.url),await n.expireEntries()},this.X=t,this.L=t.maxAgeSeconds,this.Y=new Map,t.purgeOnQuotaError&&s.registerQuotaErrorCallback((()=>this.deleteCacheAndMetadata()))}V(t){if(t===n.cacheNames.getRuntimeName())throw new i.WorkboxError("expire-custom-caches-only");let e=this.Y.get(t);return e||(e=new M(t,this.X),this.Y.set(t,e)),e}J(t){if(!this.L)return!0;const e=this.Z(t);if(null===e)return!0;return e>=Date.now()-1e3*this.L}Z(t){if(!t.headers.has("date"))return null;const e=t.headers.get("date"),n=new Date(e).getTime();return isNaN(n)?null:n}async deleteCacheAndMetadata(){for(const[t,e]of this.Y)await self.caches.delete(t),await e.delete();this.Y=new Map}},t}({},workbox.core._private,workbox.core._private,workbox.core,workbox.core._private); | ||
this.workbox=this.workbox||{},this.workbox.expiration=function(t,e,n,s,i){"use strict";function r(){return r=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)Object.prototype.hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},r.apply(this,arguments)}const a=(t,e)=>e.some((e=>t instanceof e));let o,c;const u=new WeakMap,h=new WeakMap,f=new WeakMap,l=new WeakMap,d=new WeakMap;let w={get(t,e,n){if(t instanceof IDBTransaction){if("done"===e)return h.get(t);if("objectStoreNames"===e)return t.objectStoreNames||f.get(t);if("store"===e)return n.objectStoreNames[1]?void 0:n.objectStore(n.objectStoreNames[0])}return m(t[e])},set:(t,e,n)=>(t[e]=n,!0),has:(t,e)=>t instanceof IDBTransaction&&("done"===e||"store"===e)||e in t};function p(t){return t!==IDBDatabase.prototype.transaction||"objectStoreNames"in IDBTransaction.prototype?(c||(c=[IDBCursor.prototype.advance,IDBCursor.prototype.continue,IDBCursor.prototype.continuePrimaryKey])).includes(t)?function(...e){return t.apply(b(this),e),m(u.get(this))}:function(...e){return m(t.apply(b(this),e))}:function(e,...n){const s=t.call(b(this),e,...n);return f.set(s,e.sort?e.sort():[e]),m(s)}}function D(t){return"function"==typeof t?p(t):(t instanceof IDBTransaction&&function(t){if(h.has(t))return;const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("complete",i),t.removeEventListener("error",r),t.removeEventListener("abort",r)},i=()=>{e(),s()},r=()=>{n(t.error||new DOMException("AbortError","AbortError")),s()};t.addEventListener("complete",i),t.addEventListener("error",r),t.addEventListener("abort",r)}));h.set(t,e)}(t),a(t,o||(o=[IDBDatabase,IDBObjectStore,IDBIndex,IDBCursor,IDBTransaction]))?new Proxy(t,w):t)}function m(t){if(t instanceof IDBRequest)return function(t){const e=new Promise(((e,n)=>{const s=()=>{t.removeEventListener("success",i),t.removeEventListener("error",r)},i=()=>{e(m(t.result)),s()},r=()=>{n(t.error),s()};t.addEventListener("success",i),t.addEventListener("error",r)}));return e.then((e=>{e instanceof IDBCursor&&u.set(e,t)})).catch((()=>{})),d.set(e,t),e}(t);if(l.has(t))return l.get(t);const e=D(t);return e!==t&&(l.set(t,e),d.set(e,t)),e}const b=t=>d.get(t);const y=["get","getKey","getAll","getAllKeys","count"],I=["put","add","delete","clear"],B=new Map;function g(t,e){if(!(t instanceof IDBDatabase)||e in t||"string"!=typeof e)return;if(B.get(e))return B.get(e);const n=e.replace(/FromIndex$/,""),s=e!==n,i=I.includes(n);if(!(n in(s?IDBIndex:IDBObjectStore).prototype)||!i&&!y.includes(n))return;const r=async function(t,...e){const r=this.transaction(t,i?"readwrite":"readonly");let a=r.store;return s&&(a=a.index(e.shift())),(await Promise.all([a[n](...e),i&&r.done]))[0]};return B.set(e,r),r}w=(t=>r({},t,{get:(e,n,s)=>g(e,n)||t.get(e,n,s),has:(e,n)=>!!g(e,n)||t.has(e,n)}))(w);try{self["workbox:expiration:7.0.0"]&&_()}catch(t){}const x="cache-entries",k=t=>{const e=new URL(t,location.href);return e.hash="",e.href};class v{constructor(t){this.t=null,this.M=t}i(t){const e=t.createObjectStore(x,{keyPath:"id"});e.createIndex("cacheName","cacheName",{unique:!1}),e.createIndex("timestamp","timestamp",{unique:!1})}N(t){this.i(t),this.M&&function(t,{blocked:e}={}){const n=indexedDB.deleteDatabase(t);e&&n.addEventListener("blocked",(()=>e())),m(n).then((()=>{}))}(this.M)}async setTimestamp(t,e){const n={url:t=k(t),timestamp:e,cacheName:this.M,id:this.T(t)},s=(await this.getDb()).transaction(x,"readwrite",{durability:"relaxed"});await s.store.put(n),await s.done}async getTimestamp(t){const e=await this.getDb(),n=await e.get(x,this.T(t));return null==n?void 0:n.timestamp}async expireEntries(t,e){const n=await this.getDb();let s=await n.transaction(x).store.index("timestamp").openCursor(null,"prev");const i=[];let r=0;for(;s;){const n=s.value;n.cacheName===this.M&&(t&&n.timestamp<t||e&&r>=e?i.push(s.value):r++),s=await s.continue()}const a=[];for(const t of i)await n.delete(x,t.id),a.push(t.url);return a}T(t){return this.M+"|"+k(t)}async getDb(){return this.t||(this.t=await function(t,e,{blocked:n,upgrade:s,blocking:i,terminated:r}={}){const a=indexedDB.open(t,e),o=m(a);return s&&a.addEventListener("upgradeneeded",(t=>{s(m(a.result),t.oldVersion,t.newVersion,m(a.transaction))})),n&&a.addEventListener("blocked",(()=>n())),o.then((t=>{r&&t.addEventListener("close",(()=>r())),i&&t.addEventListener("versionchange",(()=>i()))})).catch((()=>{})),o}("workbox-expiration",1,{upgrade:this.N.bind(this)})),this.t}}class M{constructor(t,e={}){this.P=!1,this.W=!1,this.S=e.maxEntries,this.K=e.maxAgeSeconds,this.L=e.matchOptions,this.M=t,this.$=new v(t)}async expireEntries(){if(this.P)return void(this.W=!0);this.P=!0;const t=this.K?Date.now()-1e3*this.K:0,n=await this.$.expireEntries(t,this.S),s=await self.caches.open(this.M);for(const t of n)await s.delete(t,this.L);this.P=!1,this.W&&(this.W=!1,e.dontWaitFor(this.expireEntries()))}async updateTimestamp(t){await this.$.setTimestamp(t,Date.now())}async isURLExpired(t){if(this.K){const e=await this.$.getTimestamp(t),n=Date.now()-1e3*this.K;return void 0===e||e<n}return!1}async delete(){this.W=!1,await this.$.expireEntries(1/0)}}return t.CacheExpiration=M,t.ExpirationPlugin=class{constructor(t={}){this.cachedResponseWillBeUsed=async({event:t,request:n,cacheName:s,cachedResponse:i})=>{if(!i)return null;const r=this.J(i),a=this.V(s);e.dontWaitFor(a.expireEntries());const o=a.updateTimestamp(n.url);if(t)try{t.waitUntil(o)}catch(t){}return r?i:null},this.cacheDidUpdate=async({cacheName:t,request:e})=>{const n=this.V(t);await n.updateTimestamp(e.url),await n.expireEntries()},this.X=t,this.K=t.maxAgeSeconds,this.Y=new Map,t.purgeOnQuotaError&&s.registerQuotaErrorCallback((()=>this.deleteCacheAndMetadata()))}V(t){if(t===n.cacheNames.getRuntimeName())throw new i.WorkboxError("expire-custom-caches-only");let e=this.Y.get(t);return e||(e=new M(t,this.X),this.Y.set(t,e)),e}J(t){if(!this.K)return!0;const e=this.Z(t);if(null===e)return!0;return e>=Date.now()-1e3*this.K}Z(t){if(!t.headers.has("date"))return null;const e=t.headers.get("date"),n=new Date(e).getTime();return isNaN(n)?null:n}async deleteCacheAndMetadata(){for(const[t,e]of this.Y)await self.caches.delete(t),await e.delete();this.Y=new Map}},t}({},workbox.core._private,workbox.core._private,workbox.core,workbox.core._private); | ||
//# sourceMappingURL=workbox-expiration.prod.js.map |
{ | ||
"name": "workbox-expiration", | ||
"version": "7.0.0", | ||
"version": "7.1.0", | ||
"license": "MIT", | ||
@@ -26,5 +26,5 @@ "author": "Google's Web DevRel Team", | ||
"idb": "^7.0.1", | ||
"workbox-core": "7.0.0" | ||
"workbox-core": "7.1.0" | ||
}, | ||
"gitHead": "c1d11636823e5e3a89520f7a531970a39304b14a" | ||
"gitHead": "9e69c4269c35e2db9fbba4d13e4e6206c7b66d2a" | ||
} |
// @ts-ignore | ||
try{self['workbox:expiration:7.0.0']&&_()}catch(e){} | ||
try{self['workbox:expiration:7.1.0']&&_()}catch(e){} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 1 instance in 1 package
238726
2443
+ Addedworkbox-core@7.1.0(transitive)
- Removedworkbox-core@7.0.0(transitive)
Updatedworkbox-core@7.1.0