Socket
Socket
Sign inDemoInstall

kinto

Package Overview
Dependencies
Maintainers
8
Versions
67
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

kinto - npm Package Compare versions

Comparing version 14.0.2 to 15.0.0

dist/moz-kinto-http-client.js

0

lib/adapters/base.d.ts

@@ -0,0 +0,0 @@ import { RecordStatus } from "../types";

@@ -0,0 +0,0 @@ export class AbstractBaseAdapter {

4

lib/adapters/IDB.d.ts
import BaseAdapter, { StorageProxy } from "./base";
import { RecordStatus } from "../types";
import { KintoObject } from "kinto-http";
import { KintoObject } from "../http";
/**

@@ -140,3 +140,3 @@ * Small helper that wraps the opening of an IndexedDB into a Promise.

*/
get(id: string): Promise<B | undefined>;
get(id: string): Promise<B | null>;
/**

@@ -143,0 +143,0 @@ * Lists all records from the IndexedDB database.

@@ -209,3 +209,3 @@ import BaseAdapter from "./base";

// If no filters on custom attribute, get all results in one bulk.
if (remainingFilters.length === 0) {
if (Object.keys(remainingFilters).length === 0) {
const request = indexStore.getAll(IDBKeyRange.only([cid, value]));

@@ -233,2 +233,6 @@ request.onsuccess = (event) => done(event.target.result);

export default class IDB extends BaseAdapter {
/* Expose the IDBError class publicly */
static get IDBError() {
return IDBError;
}
/**

@@ -249,6 +253,2 @@ * Constructor.

}
/* Expose the IDBError class publicly */
static get IDBError() {
return IDBError;
}
_handleError(method, err) {

@@ -306,3 +306,3 @@ throw new IDBError(method, err);

await this.importBulk(records);
await this.saveLastModified(timestamp !== null && timestamp !== void 0 ? timestamp : 0);
await this.saveLastModified(timestamp ?? 0);
console.log(`${this.cid}: data was migrated successfully.`);

@@ -433,3 +433,4 @@ // Delete the old database.

if (!options.preload) {
return runCallback();
runCallback();
return;
}

@@ -442,3 +443,3 @@ // Preload specified records using a list request.

for (const record of records) {
delete record["_cid"];
delete record._cid;
preloaded[record.id] = record;

@@ -469,2 +470,3 @@ }

}
return null;
}

@@ -489,3 +491,3 @@ /**

for (const result of _results) {
delete result["_cid"];
delete result._cid;
}

@@ -604,2 +606,3 @@ results = _results;

this._handleError("saveMetadata", e);
return null;
}

@@ -617,2 +620,3 @@ }

this._handleError("getMetadata", e);
return null;
}

@@ -634,6 +638,6 @@ }

create(record) {
store.add(Object.assign(Object.assign({}, record), { _cid }));
store.add({ ...record, _cid });
},
update(record) {
return store.put(Object.assign(Object.assign({}, record), { _cid }));
return store.put({ ...record, _cid });
},

@@ -640,0 +644,0 @@ delete(id) {

@@ -0,0 +0,0 @@ import BaseAdapter, { AbstractBaseAdapter, StorageProxy } from "./base";

@@ -143,6 +143,6 @@ import BaseAdapter from "./base";

// To support rollback, we take a copy of all the records in the store.
const originalRecords = Object.assign({}, this._records);
const originalRecords = { ...this._records };
// If an error occurs, we rollback the store to the original state.
const rollback = () => {
this._records = Object.assign({}, originalRecords);
this._records = { ...originalRecords };
};

@@ -149,0 +149,0 @@ // `execute` needs to return the return value of the provided callback.

@@ -0,0 +0,0 @@ "use strict";

@@ -116,3 +116,3 @@ "use strict";

const { value } = cursor;
if (utils_1.filterObject(filters, value)) {
if ((0, utils_1.filterObject)(filters, value)) {
results.push(value);

@@ -149,6 +149,6 @@ }

const isEqual = Array.isArray(key)
? utils_1.arrayEqual(key, values[i])
? (0, utils_1.arrayEqual)(key, values[i])
: key === values[i];
if (isEqual) {
if (utils_1.filterObject(filters, value)) {
if ((0, utils_1.filterObject)(filters, value)) {
results.push(value);

@@ -192,3 +192,3 @@ }

if (isSubQuery) {
const newFilter = utils_1.transformSubObjectFilters(filters);
const newFilter = (0, utils_1.transformSubObjectFilters)(filters);
const request = store.index("cid").openCursor(IDBKeyRange.only(cid));

@@ -203,3 +203,3 @@ request.onsuccess = cursorHandlers.all(newFilter, done);

// If `indexField` was used already, don't filter again.
const remainingFilters = utils_1.omitKeys(filters, [indexField]);
const remainingFilters = (0, utils_1.omitKeys)(filters, [indexField]);
// value specified in the filter (eg. `filters: { _status: ["created", "updated"] }`)

@@ -221,3 +221,3 @@ const value = filters[indexField];

// If no filters on custom attribute, get all results in one bulk.
if (remainingFilters.length === 0) {
if (Object.keys(remainingFilters).length === 0) {
const request = indexStore.getAll(IDBKeyRange.only([cid, value]));

@@ -245,2 +245,6 @@ request.onsuccess = (event) => done(event.target.result);

class IDB extends base_1.default {
/* Expose the IDBError class publicly */
static get IDBError() {
return IDBError;
}
/**

@@ -261,6 +265,2 @@ * Constructor.

}
/* Expose the IDBError class publicly */
static get IDBError() {
return IDBError;
}
_handleError(method, err) {

@@ -318,3 +318,3 @@ throw new IDBError(method, err);

await this.importBulk(records);
await this.saveLastModified(timestamp !== null && timestamp !== void 0 ? timestamp : 0);
await this.saveLastModified(timestamp ?? 0);
console.log(`${this.cid}: data was migrated successfully.`);

@@ -445,3 +445,4 @@ // Delete the old database.

if (!options.preload) {
return runCallback();
runCallback();
return;
}

@@ -454,3 +455,3 @@ // Preload specified records using a list request.

for (const record of records) {
delete record["_cid"];
delete record._cid;
preloaded[record.id] = record;

@@ -481,2 +482,3 @@ }

}
return null;
}

@@ -501,3 +503,3 @@ /**

for (const result of _results) {
delete result["_cid"];
delete result._cid;
}

@@ -509,3 +511,3 @@ results = _results;

// XXX: with some efforts, this could be fully implemented using IDB API.
return params.order ? utils_1.sortObjects(params.order, results) : results;
return params.order ? (0, utils_1.sortObjects)(params.order, results) : results;
}

@@ -617,2 +619,3 @@ catch (e) {

this._handleError("saveMetadata", e);
return null;
}

@@ -630,2 +633,3 @@ }

this._handleError("getMetadata", e);
return null;
}

@@ -648,6 +652,6 @@ }

create(record) {
store.add(Object.assign(Object.assign({}, record), { _cid }));
store.add({ ...record, _cid });
},
update(record) {
return store.put(Object.assign(Object.assign({}, record), { _cid }));
return store.put({ ...record, _cid });
},

@@ -654,0 +658,0 @@ delete(id) {

@@ -101,3 +101,3 @@ "use strict";

Object.entries(params.filters).forEach(([key, value]) => {
const recordValue = utils_1.getDeepKey(record, key);
const recordValue = (0, utils_1.getDeepKey)(record, key);
if (Array.isArray(value)) {

@@ -149,6 +149,6 @@ if (!value.includes(recordValue)) {

// To support rollback, we take a copy of all the records in the store.
const originalRecords = Object.assign({}, this._records);
const originalRecords = { ...this._records };
// If an error occurs, we rollback the store to the original state.
const rollback = () => {
this._records = Object.assign({}, originalRecords);
this._records = { ...originalRecords };
};

@@ -155,0 +155,0 @@ // `execute` needs to return the return value of the provided callback.

@@ -29,4 +29,4 @@ "use strict";

];
const cleanLocal = (r) => utils_2.omitKeys(r, fieldsToClean);
return utils_1.deepEqual(cleanLocal(a), cleanLocal(b));
const cleanLocal = (r) => (0, utils_2.omitKeys)(r, fieldsToClean);
return (0, utils_1.deepEqual)(cleanLocal(a), cleanLocal(b));
}

@@ -81,6 +81,6 @@ exports.recordsEqual = recordsEqual;

get errors() {
return this._lists["errors"];
return this._lists.errors;
}
get conflicts() {
return this._lists["conflicts"];
return this._lists.conflicts;
}

@@ -165,3 +165,3 @@ get skipped() {

generate() {
return uuid_1.v4();
return (0, uuid_1.v4)();
},

@@ -195,3 +195,3 @@ validate(id) {

function markStatus(record, status) {
return Object.assign(Object.assign({}, record), { _status: status });
return { ...record, _status: status };
}

@@ -226,3 +226,3 @@ function markDeleted(record) {

// Apply remote changes on local record.
const synced = Object.assign(Object.assign({}, local), markSynced(remote));
const synced = { ...local, ...markSynced(remote) };
// With pull only, we don't need to compare records since we override them.

@@ -266,3 +266,3 @@ if (strategy === Collection.strategy.PULL_ONLY) {

type: "conflicts",
data: { type: "incoming", local: local, remote: remote },
data: { type: "incoming", local, remote },
};

@@ -489,3 +489,3 @@ }

}
return utils_1.waterfall(transformers.map((transformer) => {
return (0, utils_1.waterfall)(transformers.map((transformer) => {
return (record) => transformer.encode(record);

@@ -506,3 +506,3 @@ }), record);

}
return utils_1.waterfall(transformers.reverse().map((transformer) => {
return (0, utils_1.waterfall)(transformers.reverse().map((transformer) => {
return (record) => transformer.decode(record);

@@ -550,5 +550,9 @@ }), record);

}
const newRecord = Object.assign(Object.assign({}, record), { id: options.synced || options.useRecordId
const newRecord = {
...record,
id: options.synced || options.useRecordId
? record.id
: this.idSchema.generate(record), _status: options.synced ? "synced" : "created" });
: this.idSchema.generate(record),
_status: options.synced ? "synced" : "created",
};
if (!this.idSchema.validate(newRecord.id)) {

@@ -594,9 +598,6 @@ return reject(`Invalid Id: ${newRecord.id}`);

}
return this.execute((txn) => {
var _a, _b;
return txn.update(record, {
synced: (_a = options.synced) !== null && _a !== void 0 ? _a : false,
patch: (_b = options.patch) !== null && _b !== void 0 ? _b : false,
});
}, {
return this.execute((txn) => txn.update(record, {
synced: options.synced ?? false,
patch: options.patch ?? false,
}), {
preloadIds: [record.id],

@@ -703,3 +704,3 @@ });

async list(params = {}, options = { includeDeleted: false }) {
params = Object.assign({ order: "-last_modified", filters: {} }, params);
params = { order: "-last_modified", filters: {}, ...params };
const results = await this.db.list(params);

@@ -817,3 +818,3 @@ let data = results;

// We can get "null" from the remote side if we got a conflict
// and there is no remote version available; see kinto-http.js
// and there is no remote version available; see src/http
// batch.js:aggregate.

@@ -891,3 +892,7 @@ transaction.delete(conflict.local.id);

// Records that were synced become «created».
transaction.update(Object.assign(Object.assign({}, record), { last_modified: undefined, _status: "created" }));
transaction.update({
...record,
last_modified: undefined,
_status: "created",
});
}

@@ -943,3 +948,8 @@ });

: await this.db.getLastModified();
options = Object.assign({ strategy: Collection.strategy.MANUAL, lastModified: since, headers: {} }, options);
options = {
strategy: Collection.strategy.MANUAL,
lastModified: since,
headers: {},
...options,
};
// Optionally ignore some records when pulling for changes.

@@ -959,3 +969,6 @@ // (avoid redownloading our own changes on last step of #sync())

if (options.expectedTimestamp) {
filters = Object.assign(Object.assign({}, filters), { _expected: options.expectedTimestamp });
filters = {
...filters,
_expected: options.expectedTimestamp,
};
}

@@ -1011,3 +1024,3 @@ // First fetch remote changes from the server

}
return utils_1.waterfall(this.hooks[hookName].map((hook) => {
return (0, utils_1.waterfall)(this.hooks[hookName].map((hook) => {
return (record) => {

@@ -1074,3 +1087,3 @@ const result = hook(payload, this);

// Store outgoing errors into sync result object
syncResultObject.add("errors", synced.errors.map((e) => (Object.assign(Object.assign({}, e), { type: "outgoing" }))));
syncResultObject.add("errors", synced.errors.map((e) => ({ ...e, type: "outgoing" })));
// Store outgoing conflicts into sync result object

@@ -1084,3 +1097,3 @@ const conflicts = [];

// We can get "null" from the remote side if we got a conflict
// and there is no remote version available; see kinto-http.js
// and there is no remote version available; see src/http
// batch.js:aggregate.

@@ -1094,3 +1107,6 @@ const realRemote = remote && (await this._decodeRecord("remote", remote));

// to server (published) or deleted records that were never pushed (skipped).
const missingRemotely = synced.skipped.map((r) => (Object.assign(Object.assign({}, r), { deleted: true })));
const missingRemotely = synced.skipped.map((r) => ({
...r,
deleted: true,
}));
// For created and updated records, the last_modified coming from server

@@ -1121,3 +1137,3 @@ // will be stored locally.

const localKeys = [...RECORD_FIELDS_TO_CLEAN, ...this.localFields];
return utils_2.omitKeys(record, localKeys);
return (0, utils_2.omitKeys)(record, localKeys);
}

@@ -1144,9 +1160,11 @@ /**

_resolveRaw(conflict, resolution) {
const resolved = Object.assign(Object.assign({}, resolution), {
const resolved = {
...resolution,
// Ensure local record has the latest authoritative timestamp
last_modified: conflict.remote && conflict.remote.last_modified });
last_modified: conflict.remote && conflict.remote.last_modified,
};
// If the resolution object is strictly equal to the
// remote record, then we can mark it as synced locally.
// Otherwise, mark it as updated (so that the resolution is pushed).
const synced = utils_1.deepEqual(resolved, conflict.remote);
const synced = (0, utils_1.deepEqual)(resolved, conflict.remote);
return markStatus(resolved, synced ? "synced" : "updated");

@@ -1186,4 +1204,7 @@ }

}) {
var _a, _b;
options = Object.assign(Object.assign({}, options), { bucket: options.bucket || this.bucket, collection: options.collection || this.name });
options = {
...options,
bucket: options.bucket || this.bucket,
collection: options.collection || this.name,
};
const previousRemote = this.api.remote;

@@ -1229,3 +1250,7 @@ if (options.remote) {

// Avoid redownloading our own changes during the last pull.
const pullOpts = Object.assign(Object.assign({}, options), { lastModified, exclude: result.published });
const pullOpts = {
...options,
lastModified,
exclude: result.published,
};
await this.pullChanges(client, result, pullOpts);

@@ -1241,3 +1266,3 @@ }

catch (e) {
(_a = this.events) === null || _a === void 0 ? void 0 : _a.emit("sync:error", Object.assign(Object.assign({}, options), { error: e }));
this.events?.emit("sync:error", { ...options, error: e });
throw e;

@@ -1249,3 +1274,3 @@ }

}
(_b = this.events) === null || _b === void 0 ? void 0 : _b.emit("sync:success", Object.assign(Object.assign({}, options), { result }));
this.events?.emit("sync:success", { ...options, result });
return result;

@@ -1316,3 +1341,6 @@ }

: undefined;
const metadata = await client.getData(Object.assign(Object.assign({}, query), { headers }));
const metadata = await client.getData({
...query,
headers,
});
return this.db.saveMetadata(metadata);

@@ -1346,9 +1374,11 @@ }

emitEvents() {
var _a, _b;
for (const { action, payload } of this._events) {
(_a = this.collection.events) === null || _a === void 0 ? void 0 : _a.emit(action, payload);
this.collection.events?.emit(action, payload);
}
if (this._events.length > 0) {
const targets = this._events.map(({ action, payload }) => (Object.assign({ action }, payload)));
(_b = this.collection.events) === null || _b === void 0 ? void 0 : _b.emit("change", { targets });
const targets = this._events.map(({ action, payload }) => ({
action,
...payload,
}));
this.collection.events?.emit("change", { targets });
}

@@ -1446,3 +1476,3 @@ this._events = [];

return {
data: Object.assign({ id }, existing),
data: { id, ...existing },
deleted: !!existing,

@@ -1502,3 +1532,3 @@ permissions: {},

}
const newRecord = options.patch ? Object.assign(Object.assign({}, oldRecord), record) : record;
const newRecord = options.patch ? { ...oldRecord, ...record } : record;
const updated = this._updateRaw(oldRecord, newRecord, options);

@@ -1518,3 +1548,3 @@ this.adapterTransaction.update(updated);

_updateRaw(oldRecord, newRecord, { synced = false } = {}) {
const updated = Object.assign({}, newRecord);
const updated = { ...newRecord };
// Make sure to never loose the existing timestamp.

@@ -1521,0 +1551,0 @@ if (oldRecord && oldRecord.last_modified && !updated.last_modified) {

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {

@@ -25,4 +29,5 @@ if (k2 === undefined) k2 = k;

Object.defineProperty(exports, "__esModule", { value: true });
exports.getDeepKey = exports.AbstractBaseAdapter = exports.BaseAdapter = exports.KintoBase = void 0;
const kinto_http_1 = __importDefault(require("kinto-http"));
exports.getDeepKey = exports.AbstractBaseAdapter = exports.BaseAdapter = exports.KintoBase = exports.KintoClient = void 0;
const http_1 = __importDefault(require("./http"));
exports.KintoClient = http_1.default;
const base_1 = __importStar(require("./adapters/base"));

@@ -50,3 +55,3 @@ exports.BaseAdapter = base_1.default;

get ApiClass() {
return kinto_http_1.default;
return http_1.default;
}

@@ -59,5 +64,5 @@ constructor(options = {}) {

};
super(Object.assign(Object.assign({}, defaults), options));
super({ ...defaults, ...options });
}
}
exports.default = Kinto;

@@ -14,4 +14,28 @@ "use strict";

*/
/* eslint-disable @typescript-eslint/no-unused-vars */
class KintoBase {
/**
* Provides a public access to the base adapter class. Users can create a
* custom DB adapter by extending {@link BaseAdapter}.
*
* @type {Object}
*/
static get adapters() {
return {
BaseAdapter: base_1.default,
};
}
/**
* Synchronization strategies. Available strategies are:
*
* - `MANUAL`: Conflicts will be reported in a dedicated array.
* - `SERVER_WINS`: Conflicts are resolved using remote data.
* - `CLIENT_WINS`: Conflicts are resolved using local data.
*
* @type {Object}
*/
static get syncStrategy() {
return collection_1.default.strategy;
}
/**
* Constructor.

@@ -38,3 +62,3 @@ *

};
this._options = Object.assign(Object.assign({}, defaults), options);
this._options = { ...defaults, ...options };
if (!this._options.adapter) {

@@ -50,25 +74,2 @@ throw new Error("No adapter provided");

}
/**
* Provides a public access to the base adapter class. Users can create a
* custom DB adapter by extending {@link BaseAdapter}.
*
* @type {Object}
*/
static get adapters() {
return {
BaseAdapter: base_1.default,
};
}
/**
* Synchronization strategies. Available strategies are:
*
* - `MANUAL`: Conflicts will be reported in a dedicated array.
* - `SERVER_WINS`: Conflicts are resolved using remote data.
* - `CLIENT_WINS`: Conflicts are resolved using local data.
*
* @type {Object}
*/
static get syncStrategy() {
return collection_1.default.strategy;
}
get ApiClass() {

@@ -82,3 +83,3 @@ throw new Error("ApiClass() must be implemented by subclasses.");

get api() {
const { events, headers, remote, requestMode, retry, timeout, } = this._options;
const { events, headers, remote, requestMode, retry, timeout } = this._options;
if (!this._api) {

@@ -111,3 +112,6 @@ this._api = new this.ApiClass(remote, {

}
const { bucket, events, adapter, adapterOptions } = Object.assign(Object.assign({}, this._options), options);
const { bucket, events, adapter, adapterOptions } = {
...this._options,
...options,
};
const { idSchema, remoteTransformers, hooks, localFields } = options;

@@ -114,0 +118,0 @@ return new collection_1.default(bucket, collName, this, {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getDeepKey = exports.transformSubObjectFilters = exports.arrayEqual = exports.omitKeys = exports.deepEqual = exports.waterfall = exports.filterObjects = exports.filterObject = exports.sortObjects = exports.RE_RECORD_ID = void 0;
exports.obscureAuthorizationHeader = exports.addEndpointOptions = exports.cleanUndefinedProperties = exports.createFormData = exports.extractFileInfo = exports.parseDataURL = exports.isObject = exports.nobatch = exports.capable = exports.support = exports.checkVersion = exports.qsify = exports.toDataBody = exports.delay = exports.partition = exports.getDeepKey = exports.transformSubObjectFilters = exports.arrayEqual = exports.omitKeys = exports.deepEqual = exports.waterfall = exports.filterObjects = exports.filterObject = exports.sortObjects = exports.RE_RECORD_ID = void 0;
exports.RE_RECORD_ID = /^[a-zA-Z0-9][a-zA-Z0-9_-]*$/;

@@ -129,3 +129,3 @@ /**

function omitKeys(obj, keys = []) {
const result = Object.assign({}, obj);
const result = { ...obj };
for (const key of keys) {

@@ -158,5 +158,3 @@ delete result[key];

}
else {
return (acc[cv] = {});
}
return (acc[cv] = {});
}, nestedFiltersObj);

@@ -185,4 +183,323 @@ }

}
return result !== null && result !== void 0 ? result : undefined;
return result ?? undefined;
}
exports.getDeepKey = getDeepKey;
/**
* Chunks an array into n pieces.
*
* @private
* @param {Array} array
* @param {Number} n
* @return {Array}
*/
function partition(array, n) {
if (n <= 0) {
return [array];
}
return array.reduce((acc, x, i) => {
if (i === 0 || i % n === 0) {
acc.push([x]);
}
else {
acc[acc.length - 1].push(x);
}
return acc;
}, []);
}
exports.partition = partition;
/**
* Returns a Promise always resolving after the specified amount in milliseconds.
*
* @return Promise<void>
*/
function delay(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
exports.delay = delay;
/**
* Always returns a resource data object from the provided argument.
*
* @private
* @param {Object|String} resource
* @return {Object}
*/
function toDataBody(resource) {
if (isObject(resource)) {
return resource;
}
if (typeof resource === "string") {
return { id: resource };
}
throw new Error("Invalid argument.");
}
exports.toDataBody = toDataBody;
/**
* Transforms an object into an URL query string, stripping out any undefined
* values.
*
* @param {Object} obj
* @return {String}
*/
function qsify(obj) {
const encode = (v) => encodeURIComponent(typeof v === "boolean" ? String(v) : v);
const stripped = cleanUndefinedProperties(obj);
return Object.keys(stripped)
.map((k) => {
const ks = encode(k) + "=";
if (Array.isArray(stripped[k])) {
return ks + stripped[k].map((v) => encode(v)).join(",");
}
return ks + encode(stripped[k]);
})
.join("&");
}
exports.qsify = qsify;
/**
* Checks if a version is within the provided range.
*
* @param {String} version The version to check.
* @param {String} minVersion The minimum supported version (inclusive).
* @param {String} maxVersion The minimum supported version (exclusive).
* @throws {Error} If the version is outside of the provided range.
*/
function checkVersion(version, minVersion, maxVersion) {
const extract = (str) => str.split(".").map((x) => parseInt(x, 10));
const [verMajor, verMinor] = extract(version);
const [minMajor, minMinor] = extract(minVersion);
const [maxMajor, maxMinor] = extract(maxVersion);
const checks = [
verMajor < minMajor,
verMajor === minMajor && verMinor < minMinor,
verMajor > maxMajor,
verMajor === maxMajor && verMinor >= maxMinor,
];
if (checks.some((x) => x)) {
throw new Error(`Version ${version} doesn't satisfy ${minVersion} <= x < ${maxVersion}`);
}
}
exports.checkVersion = checkVersion;
/**
* Generates a decorator function ensuring a version check is performed against
* the provided requirements before executing it.
*
* @param {String} min The required min version (inclusive).
* @param {String} max The required max version (inclusive).
* @return {Function}
*/
function support(min, max) {
return function (
// @ts-ignore
target, key, descriptor) {
const fn = descriptor.value;
return {
configurable: true,
get() {
const wrappedMethod = (...args) => {
// "this" is the current instance which its method is decorated.
const client = this.client ? this.client : this;
return client
.fetchHTTPApiVersion()
.then((version) => checkVersion(version, min, max))
.then(() => fn.apply(this, args));
};
Object.defineProperty(this, key, {
value: wrappedMethod,
configurable: true,
writable: true,
});
return wrappedMethod;
},
};
};
}
exports.support = support;
/**
* Generates a decorator function ensuring that the specified capabilities are
* available on the server before executing it.
*
* @param {Array<String>} capabilities The required capabilities.
* @return {Function}
*/
function capable(capabilities) {
return function (
// @ts-ignore
target, key, descriptor) {
const fn = descriptor.value;
return {
configurable: true,
get() {
const wrappedMethod = (...args) => {
// "this" is the current instance which its method is decorated.
const client = this.client ? this.client : this;
return client
.fetchServerCapabilities()
.then((available) => {
const missing = capabilities.filter((c) => !(c in available));
if (missing.length) {
const missingStr = missing.join(", ");
throw new Error(`Required capabilities ${missingStr} not present on server`);
}
})
.then(() => fn.apply(this, args));
};
Object.defineProperty(this, key, {
value: wrappedMethod,
configurable: true,
writable: true,
});
return wrappedMethod;
},
};
};
}
exports.capable = capable;
/**
* Generates a decorator function ensuring an operation is not performed from
* within a batch request.
*
* @param {String} message The error message to throw.
* @return {Function}
*/
function nobatch(message) {
return function (
// @ts-ignore
target, key, descriptor) {
const fn = descriptor.value;
return {
configurable: true,
get() {
const wrappedMethod = (...args) => {
// "this" is the current instance which its method is decorated.
if (this._isBatch) {
throw new Error(message);
}
return fn.apply(this, args);
};
Object.defineProperty(this, key, {
value: wrappedMethod,
configurable: true,
writable: true,
});
return wrappedMethod;
},
};
};
}
exports.nobatch = nobatch;
/**
* Returns true if the specified value is an object (i.e. not an array nor null).
* @param {Object} thing The value to inspect.
* @return {bool}
*/
function isObject(thing) {
return typeof thing === "object" && thing !== null && !Array.isArray(thing);
}
exports.isObject = isObject;
/**
* Parses a data url.
* @param {String} dataURL The data url.
* @return {Object}
*/
function parseDataURL(dataURL) {
const regex = /^data:(.*);base64,(.*)/;
const match = dataURL.match(regex);
if (!match) {
throw new Error(`Invalid data-url: ${String(dataURL).substring(0, 32)}...`);
}
const props = match[1];
const base64 = match[2];
const [type, ...rawParams] = props.split(";");
const params = rawParams.reduce((acc, param) => {
const [key, value] = param.split("=");
return { ...acc, [key]: value };
}, {});
return { ...params, type, base64 };
}
exports.parseDataURL = parseDataURL;
/**
* Extracts file information from a data url.
* @param {String} dataURL The data url.
* @return {Object}
*/
function extractFileInfo(dataURL) {
const { name, type, base64 } = parseDataURL(dataURL);
const binary = atob(base64);
const array = [];
for (let i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
const blob = new Blob([new Uint8Array(array)], { type });
return { blob, name };
}
exports.extractFileInfo = extractFileInfo;
/**
* Creates a FormData instance from a data url and an existing JSON response
* body.
* @param {String} dataURL The data url.
* @param {Object} body The response body.
* @param {Object} [options={}] The options object.
* @param {Object} [options.filename] Force attachment file name.
* @return {FormData}
*/
function createFormData(dataURL, body, options = {}) {
const { filename = "untitled" } = options;
const { blob, name } = extractFileInfo(dataURL);
const formData = new FormData();
formData.append("attachment", blob, name || filename);
for (const property in body) {
if (typeof body[property] !== "undefined") {
formData.append(property, JSON.stringify(body[property]));
}
}
return formData;
}
exports.createFormData = createFormData;
/**
* Clones an object with all its undefined keys removed.
* @private
*/
function cleanUndefinedProperties(obj) {
const result = {};
for (const key in obj) {
if (typeof obj[key] !== "undefined") {
result[key] = obj[key];
}
}
return result;
}
exports.cleanUndefinedProperties = cleanUndefinedProperties;
/**
* Handle common query parameters for Kinto requests.
*
* @param {String} [path] The endpoint base path.
* @param {Array} [options.fields] Fields to limit the
* request to.
* @param {Object} [options.query={}] Additional query arguments.
*/
function addEndpointOptions(path, options = {}) {
const query = { ...options.query };
if (options.fields) {
query._fields = options.fields;
}
const queryString = qsify(query);
if (queryString) {
return path + "?" + queryString;
}
return path;
}
exports.addEndpointOptions = addEndpointOptions;
/**
* Replace authorization header with an obscured version
*/
function obscureAuthorizationHeader(headers) {
const h = new Headers(headers);
if (h.has("authorization")) {
h.set("authorization", "**** (suppressed)");
}
const obscuredHeaders = {};
for (const [header, value] of h.entries()) {
obscuredHeaders[header] = value;
}
return obscuredHeaders;
}
exports.obscureAuthorizationHeader = obscureAuthorizationHeader;
import BaseAdapter, { StorageProxy } from "./adapters/base";
import KintoBase from "./KintoBase";
import { Collection as KintoCollection } from "kinto-http";
import { Collection as KintoCollection } from "./http";
import { KintoRepresentation, IdSchema, RemoteTransformer, AvailableHook, Hooks, UndefiendKintoRepresentation, UpdateRepresentation, WithOptional, RecordStatus, SyncResult, KintoError, Conflict, Emitter, CollectionSyncOptions } from "./types";

@@ -122,3 +122,3 @@ /**

*/
get api(): import("kinto-http").default;
get api(): import("./http").default;
/**

@@ -458,3 +458,3 @@ * The collection name.

*/
cleanLocalFields(record: B): Partial<B>;
cleanLocalFields(record: B): Omit<B, string>;
/**

@@ -461,0 +461,0 @@ * Resolves a conflict, updating local record according to proposed

@@ -73,6 +73,6 @@ import BaseAdapter from "./adapters/base";

get errors() {
return this._lists["errors"];
return this._lists.errors;
}
get conflicts() {
return this._lists["conflicts"];
return this._lists.conflicts;
}

@@ -183,3 +183,3 @@ get skipped() {

function markStatus(record, status) {
return Object.assign(Object.assign({}, record), { _status: status });
return { ...record, _status: status };
}

@@ -214,3 +214,3 @@ function markDeleted(record) {

// Apply remote changes on local record.
const synced = Object.assign(Object.assign({}, local), markSynced(remote));
const synced = { ...local, ...markSynced(remote) };
// With pull only, we don't need to compare records since we override them.

@@ -254,3 +254,3 @@ if (strategy === Collection.strategy.PULL_ONLY) {

type: "conflicts",
data: { type: "incoming", local: local, remote: remote },
data: { type: "incoming", local, remote },
};

@@ -536,5 +536,9 @@ }

}
const newRecord = Object.assign(Object.assign({}, record), { id: options.synced || options.useRecordId
const newRecord = {
...record,
id: options.synced || options.useRecordId
? record.id
: this.idSchema.generate(record), _status: options.synced ? "synced" : "created" });
: this.idSchema.generate(record),
_status: options.synced ? "synced" : "created",
};
if (!this.idSchema.validate(newRecord.id)) {

@@ -580,9 +584,6 @@ return reject(`Invalid Id: ${newRecord.id}`);

}
return this.execute((txn) => {
var _a, _b;
return txn.update(record, {
synced: (_a = options.synced) !== null && _a !== void 0 ? _a : false,
patch: (_b = options.patch) !== null && _b !== void 0 ? _b : false,
});
}, {
return this.execute((txn) => txn.update(record, {
synced: options.synced ?? false,
patch: options.patch ?? false,
}), {
preloadIds: [record.id],

@@ -689,3 +690,3 @@ });

async list(params = {}, options = { includeDeleted: false }) {
params = Object.assign({ order: "-last_modified", filters: {} }, params);
params = { order: "-last_modified", filters: {}, ...params };
const results = await this.db.list(params);

@@ -803,3 +804,3 @@ let data = results;

// We can get "null" from the remote side if we got a conflict
// and there is no remote version available; see kinto-http.js
// and there is no remote version available; see src/http
// batch.js:aggregate.

@@ -877,3 +878,7 @@ transaction.delete(conflict.local.id);

// Records that were synced become «created».
transaction.update(Object.assign(Object.assign({}, record), { last_modified: undefined, _status: "created" }));
transaction.update({
...record,
last_modified: undefined,
_status: "created",
});
}

@@ -929,3 +934,8 @@ });

: await this.db.getLastModified();
options = Object.assign({ strategy: Collection.strategy.MANUAL, lastModified: since, headers: {} }, options);
options = {
strategy: Collection.strategy.MANUAL,
lastModified: since,
headers: {},
...options,
};
// Optionally ignore some records when pulling for changes.

@@ -945,3 +955,6 @@ // (avoid redownloading our own changes on last step of #sync())

if (options.expectedTimestamp) {
filters = Object.assign(Object.assign({}, filters), { _expected: options.expectedTimestamp });
filters = {
...filters,
_expected: options.expectedTimestamp,
};
}

@@ -1059,3 +1072,3 @@ // First fetch remote changes from the server

// Store outgoing errors into sync result object
syncResultObject.add("errors", synced.errors.map((e) => (Object.assign(Object.assign({}, e), { type: "outgoing" }))));
syncResultObject.add("errors", synced.errors.map((e) => ({ ...e, type: "outgoing" })));
// Store outgoing conflicts into sync result object

@@ -1069,3 +1082,3 @@ const conflicts = [];

// We can get "null" from the remote side if we got a conflict
// and there is no remote version available; see kinto-http.js
// and there is no remote version available; see src/http
// batch.js:aggregate.

@@ -1079,3 +1092,6 @@ const realRemote = remote && (await this._decodeRecord("remote", remote));

// to server (published) or deleted records that were never pushed (skipped).
const missingRemotely = synced.skipped.map((r) => (Object.assign(Object.assign({}, r), { deleted: true })));
const missingRemotely = synced.skipped.map((r) => ({
...r,
deleted: true,
}));
// For created and updated records, the last_modified coming from server

@@ -1128,5 +1144,7 @@ // will be stored locally.

_resolveRaw(conflict, resolution) {
const resolved = Object.assign(Object.assign({}, resolution), {
const resolved = {
...resolution,
// Ensure local record has the latest authoritative timestamp
last_modified: conflict.remote && conflict.remote.last_modified });
last_modified: conflict.remote && conflict.remote.last_modified,
};
// If the resolution object is strictly equal to the

@@ -1170,4 +1188,7 @@ // remote record, then we can mark it as synced locally.

}) {
var _a, _b;
options = Object.assign(Object.assign({}, options), { bucket: options.bucket || this.bucket, collection: options.collection || this.name });
options = {
...options,
bucket: options.bucket || this.bucket,
collection: options.collection || this.name,
};
const previousRemote = this.api.remote;

@@ -1213,3 +1234,7 @@ if (options.remote) {

// Avoid redownloading our own changes during the last pull.
const pullOpts = Object.assign(Object.assign({}, options), { lastModified, exclude: result.published });
const pullOpts = {
...options,
lastModified,
exclude: result.published,
};
await this.pullChanges(client, result, pullOpts);

@@ -1225,3 +1250,3 @@ }

catch (e) {
(_a = this.events) === null || _a === void 0 ? void 0 : _a.emit("sync:error", Object.assign(Object.assign({}, options), { error: e }));
this.events?.emit("sync:error", { ...options, error: e });
throw e;

@@ -1233,3 +1258,3 @@ }

}
(_b = this.events) === null || _b === void 0 ? void 0 : _b.emit("sync:success", Object.assign(Object.assign({}, options), { result }));
this.events?.emit("sync:success", { ...options, result });
return result;

@@ -1300,3 +1325,6 @@ }

: undefined;
const metadata = await client.getData(Object.assign(Object.assign({}, query), { headers }));
const metadata = await client.getData({
...query,
headers,
});
return this.db.saveMetadata(metadata);

@@ -1329,9 +1357,11 @@ }

emitEvents() {
var _a, _b;
for (const { action, payload } of this._events) {
(_a = this.collection.events) === null || _a === void 0 ? void 0 : _a.emit(action, payload);
this.collection.events?.emit(action, payload);
}
if (this._events.length > 0) {
const targets = this._events.map(({ action, payload }) => (Object.assign({ action }, payload)));
(_b = this.collection.events) === null || _b === void 0 ? void 0 : _b.emit("change", { targets });
const targets = this._events.map(({ action, payload }) => ({
action,
...payload,
}));
this.collection.events?.emit("change", { targets });
}

@@ -1429,3 +1459,3 @@ this._events = [];

return {
data: Object.assign({ id }, existing),
data: { id, ...existing },
deleted: !!existing,

@@ -1485,3 +1515,3 @@ permissions: {},

}
const newRecord = options.patch ? Object.assign(Object.assign({}, oldRecord), record) : record;
const newRecord = options.patch ? { ...oldRecord, ...record } : record;
const updated = this._updateRaw(oldRecord, newRecord, options);

@@ -1501,3 +1531,3 @@ this.adapterTransaction.update(updated);

_updateRaw(oldRecord, newRecord, { synced = false } = {}) {
const updated = Object.assign({}, newRecord);
const updated = { ...newRecord };
// Make sure to never loose the existing timestamp.

@@ -1504,0 +1534,0 @@ if (oldRecord && oldRecord.last_modified && !updated.last_modified) {

@@ -1,2 +0,2 @@

import Api from "kinto-http";
import KintoClient from "./http";
import BaseAdapter, { AbstractBaseAdapter } from "./adapters/base";

@@ -25,6 +25,6 @@ import IDB from "./adapters/IDB";

};
get ApiClass(): typeof Api;
get ApiClass(): typeof KintoClient;
constructor(options?: KintoBaseOptions);
}
export type { StorageProxy, RecordStatus, KintoBaseOptions, Collection, CollectionSyncOptions, Conflict, };
export { KintoBase, BaseAdapter, AbstractBaseAdapter, getDeepKey };
export { KintoClient, KintoBase, BaseAdapter, AbstractBaseAdapter, getDeepKey };

@@ -1,2 +0,2 @@

import Api from "kinto-http";
import KintoClient from "./http";
import BaseAdapter, { AbstractBaseAdapter } from "./adapters/base";

@@ -20,3 +20,3 @@ import IDB from "./adapters/IDB";

get ApiClass() {
return Api;
return KintoClient;
}

@@ -29,5 +29,5 @@ constructor(options = {}) {

};
super(Object.assign(Object.assign({}, defaults), options));
super({ ...defaults, ...options });
}
}
export { KintoBase, BaseAdapter, AbstractBaseAdapter, getDeepKey };
export { KintoClient, KintoBase, BaseAdapter, AbstractBaseAdapter, getDeepKey };

@@ -1,2 +0,2 @@

import Api from "kinto-http";
import Api from "./http";
import Collection from "./collection";

@@ -3,0 +3,0 @@ import BaseAdapter from "./adapters/base";

@@ -9,4 +9,28 @@ import Collection from "./collection";

*/
/* eslint-disable @typescript-eslint/no-unused-vars */
export default class KintoBase {
/**
* Provides a public access to the base adapter class. Users can create a
* custom DB adapter by extending {@link BaseAdapter}.
*
* @type {Object}
*/
static get adapters() {
return {
BaseAdapter,
};
}
/**
* Synchronization strategies. Available strategies are:
*
* - `MANUAL`: Conflicts will be reported in a dedicated array.
* - `SERVER_WINS`: Conflicts are resolved using remote data.
* - `CLIENT_WINS`: Conflicts are resolved using local data.
*
* @type {Object}
*/
static get syncStrategy() {
return Collection.strategy;
}
/**
* Constructor.

@@ -33,3 +57,3 @@ *

};
this._options = Object.assign(Object.assign({}, defaults), options);
this._options = { ...defaults, ...options };
if (!this._options.adapter) {

@@ -45,25 +69,2 @@ throw new Error("No adapter provided");

}
/**
* Provides a public access to the base adapter class. Users can create a
* custom DB adapter by extending {@link BaseAdapter}.
*
* @type {Object}
*/
static get adapters() {
return {
BaseAdapter: BaseAdapter,
};
}
/**
* Synchronization strategies. Available strategies are:
*
* - `MANUAL`: Conflicts will be reported in a dedicated array.
* - `SERVER_WINS`: Conflicts are resolved using remote data.
* - `CLIENT_WINS`: Conflicts are resolved using local data.
*
* @type {Object}
*/
static get syncStrategy() {
return Collection.strategy;
}
get ApiClass() {

@@ -77,3 +78,3 @@ throw new Error("ApiClass() must be implemented by subclasses.");

get api() {
const { events, headers, remote, requestMode, retry, timeout, } = this._options;
const { events, headers, remote, requestMode, retry, timeout } = this._options;
if (!this._api) {

@@ -106,3 +107,6 @@ this._api = new this.ApiClass(remote, {

}
const { bucket, events, adapter, adapterOptions } = Object.assign(Object.assign({}, this._options), options);
const { bucket, events, adapter, adapterOptions } = {
...this._options,
...options,
};
const { idSchema, remoteTransformers, hooks, localFields } = options;

@@ -109,0 +113,0 @@ return new Collection(bucket, collName, this, {

@@ -1,6 +0,119 @@

import { KintoIdObject, Permission } from "kinto-http";
import Collection from "./collection";
export declare type $TSFixMe = any;
export declare type WithOptional<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
export declare type WithRequired<T, K extends keyof T> = T & Required<Pick<T, K>>;
export type $TSFixMe = any;
export type WithOptional<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
export type WithRequired<T, K extends keyof T> = T & Required<Pick<T, K>>;
export type HttpMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE" | "HEAD";
export interface KintoRequest {
method?: HttpMethod;
path: string;
headers: Record<string, unknown>;
body?: any;
}
export interface KintoIdObject {
id: string;
[key: string]: unknown;
}
export interface KintoObject extends KintoIdObject {
last_modified: number;
}
export type Permission = "bucket:create" | "read" | "write" | "collection:create" | "group:create" | "record:create";
export interface User {
id: string;
principals: string[];
bucket: string;
}
export interface ServerCapability {
description: string;
url: string;
version?: string;
[key: string]: unknown;
}
export interface ServerSettings {
readonly: boolean;
batch_max_requests: number;
}
export interface HelloResponse {
project_name: string;
project_version: string;
http_api_version: string;
project_docs: string;
url: string;
settings: ServerSettings;
user?: User;
capabilities: {
[key: string]: ServerCapability;
};
}
export interface OperationResponse<T = KintoObject> {
status: number;
path: string;
body: {
data: T;
};
headers: Record<string, string>;
}
export interface BatchResponse {
responses: OperationResponse[];
}
export interface DataResponse<T> {
data: T;
}
export type MappableObject = {
[key in string | number]: unknown;
};
export interface KintoResponse<T = unknown> {
data: KintoObject & T;
permissions: {
[key in Permission]?: string[];
};
}
export interface HistoryEntry<T> {
action: "create" | "update" | "delete";
collection_id: string;
date: string;
id: string;
last_modified: number;
record_id: string;
resource_name: string;
target: KintoResponse<T>;
timestamp: number;
uri: string;
user_id: string;
}
export interface PermissionData {
bucket_id: string;
collection_id?: string;
id: string;
permissions: Permission[];
resource_name: string;
uri: string;
}
export interface Attachment {
filename: string;
hash: string;
location: string;
mimetype: string;
size: number;
}
export interface Group extends KintoObject {
members: string[];
}
export interface Emitter {
emit(type: string, event?: any): void;
on(type: string, handler: (event?: any) => void): void;
off(type: string, handler: (event?: any) => void): void;
}
export interface FetchHeaders {
keys(): IterableIterator<string> | string[];
entries(): IterableIterator<[string, string]> | [string, string][];
get(name: string): string | null;
has(name: string): boolean;
}
export interface FetchResponse {
status: number;
statusText: string;
text(): Promise<string>;
headers: FetchHeaders;
}
export type FetchFunction = (input: RequestInfo, init?: RequestInit | undefined) => Promise<FetchResponse>;
export interface IdSchema {

@@ -14,4 +127,4 @@ generate(record?: any): string;

}
export declare type AvailableHook = "incoming-changes";
export declare type Hooks<T extends {
export type AvailableHook = "incoming-changes";
export type Hooks<T extends {
id: string;

@@ -32,7 +145,7 @@ [key: string]: unknown;

}
export declare type UndefiendKintoRepresentation<T> = WithOptional<KintoRepresentation<T>, "data">;
export type UndefiendKintoRepresentation<T> = WithOptional<KintoRepresentation<T>, "data">;
export interface UpdateRepresentation<T = unknown> extends KintoRepresentation<T> {
oldRecord: KintoIdObject & T;
}
export declare type RecordStatus = "created" | "updated" | "deleted" | "synced";
export type RecordStatus = "created" | "updated" | "deleted" | "synced";
export interface Conflict<T> {

@@ -99,3 +212,3 @@ type: "incoming" | "outgoing";

}
export declare type Change<T> = CreatedChange<T> | UpdatedChange<T> | DeletedChange<T> | ResolvedChange | ErrorChange | PublishedChange | ConflictsChange<T> | SkippedChange<T> | VoidChange;
export type Change<T> = CreatedChange<T> | UpdatedChange<T> | DeletedChange<T> | ResolvedChange | ErrorChange | PublishedChange | ConflictsChange<T> | SkippedChange<T> | VoidChange;
export interface Emitter {

@@ -102,0 +215,0 @@ emit(type: string, event?: any): void;

export {};

@@ -31,4 +31,6 @@ export declare const RE_RECORD_ID: RegExp;

*/
export declare function filterObjects<T>(filters: {
export declare function filterObjects<T extends {
[key: string]: any;
}>(filters: {
[key: string]: any;
}, list: T[]): T[];

@@ -61,4 +63,4 @@ /**

export declare function omitKeys<T extends {
[key: string]: any;
}>(obj: T, keys?: string[]): Partial<T>;
[key: string]: unknown;
}, K extends string>(obj: T, keys?: K[]): Omit<T, K>;
export declare function arrayEqual(a: unknown[], b: unknown[]): boolean;

@@ -74,1 +76,145 @@ export declare function transformSubObjectFilters(filtersObj: {

export declare function getDeepKey(obj: any, key: string): unknown;
/**
* Chunks an array into n pieces.
*
* @private
* @param {Array} array
* @param {Number} n
* @return {Array}
*/
export declare function partition<T>(array: T[], n: number): T[][];
/**
* Returns a Promise always resolving after the specified amount in milliseconds.
*
* @return Promise<void>
*/
export declare function delay(ms: number): Promise<void>;
interface Entity {
id: string;
}
/**
* Always returns a resource data object from the provided argument.
*
* @private
* @param {Object|String} resource
* @return {Object}
*/
export declare function toDataBody<T extends Entity>(resource: T | string): Entity;
/**
* Transforms an object into an URL query string, stripping out any undefined
* values.
*
* @param {Object} obj
* @return {String}
*/
export declare function qsify(obj: {
[key: string]: any;
}): string;
/**
* Checks if a version is within the provided range.
*
* @param {String} version The version to check.
* @param {String} minVersion The minimum supported version (inclusive).
* @param {String} maxVersion The minimum supported version (exclusive).
* @throws {Error} If the version is outside of the provided range.
*/
export declare function checkVersion(version: string, minVersion: string, maxVersion: string): void;
type DecoratorReturn = (target: any, key: string, descriptor: TypedPropertyDescriptor<(...args: any[]) => any>) => {
configurable: boolean;
get(): (...args: any) => Promise<any>;
};
/**
* Generates a decorator function ensuring a version check is performed against
* the provided requirements before executing it.
*
* @param {String} min The required min version (inclusive).
* @param {String} max The required max version (inclusive).
* @return {Function}
*/
export declare function support(min: string, max: string): DecoratorReturn;
/**
* Generates a decorator function ensuring that the specified capabilities are
* available on the server before executing it.
*
* @param {Array<String>} capabilities The required capabilities.
* @return {Function}
*/
export declare function capable(capabilities: string[]): DecoratorReturn;
/**
* Generates a decorator function ensuring an operation is not performed from
* within a batch request.
*
* @param {String} message The error message to throw.
* @return {Function}
*/
export declare function nobatch(message: string): DecoratorReturn;
/**
* Returns true if the specified value is an object (i.e. not an array nor null).
* @param {Object} thing The value to inspect.
* @return {bool}
*/
export declare function isObject(thing: unknown): boolean;
interface TypedDataURL {
type: string;
base64: string;
[key: string]: string;
}
/**
* Parses a data url.
* @param {String} dataURL The data url.
* @return {Object}
*/
export declare function parseDataURL(dataURL: string): TypedDataURL;
/**
* Extracts file information from a data url.
* @param {String} dataURL The data url.
* @return {Object}
*/
export declare function extractFileInfo(dataURL: string): {
blob: Blob;
name: string;
};
/**
* Creates a FormData instance from a data url and an existing JSON response
* body.
* @param {String} dataURL The data url.
* @param {Object} body The response body.
* @param {Object} [options={}] The options object.
* @param {Object} [options.filename] Force attachment file name.
* @return {FormData}
*/
export declare function createFormData(dataURL: string, body: {
[key: string]: any;
}, options?: {
filename?: string;
}): FormData;
/**
* Clones an object with all its undefined keys removed.
* @private
*/
export declare function cleanUndefinedProperties(obj: {
[key: string]: any;
}): {
[key: string]: any;
};
/**
* Handle common query parameters for Kinto requests.
*
* @param {String} [path] The endpoint base path.
* @param {Array} [options.fields] Fields to limit the
* request to.
* @param {Object} [options.query={}] Additional query arguments.
*/
export declare function addEndpointOptions(path: string, options?: {
fields?: string[];
query?: {
[key: string]: string;
};
}): string;
/**
* Replace authorization header with an obscured version
*/
export declare function obscureAuthorizationHeader(headers: HeadersInit): {
[key: string]: string;
};
export {};

@@ -121,3 +121,3 @@ export const RE_RECORD_ID = /^[a-zA-Z0-9][a-zA-Z0-9_-]*$/;

export function omitKeys(obj, keys = []) {
const result = Object.assign({}, obj);
const result = { ...obj };
for (const key of keys) {

@@ -148,5 +148,3 @@ delete result[key];

}
else {
return (acc[cv] = {});
}
return (acc[cv] = {});
}, nestedFiltersObj);

@@ -174,3 +172,307 @@ }

}
return result !== null && result !== void 0 ? result : undefined;
return result ?? undefined;
}
/**
* Chunks an array into n pieces.
*
* @private
* @param {Array} array
* @param {Number} n
* @return {Array}
*/
export function partition(array, n) {
if (n <= 0) {
return [array];
}
return array.reduce((acc, x, i) => {
if (i === 0 || i % n === 0) {
acc.push([x]);
}
else {
acc[acc.length - 1].push(x);
}
return acc;
}, []);
}
/**
* Returns a Promise always resolving after the specified amount in milliseconds.
*
* @return Promise<void>
*/
export function delay(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
/**
* Always returns a resource data object from the provided argument.
*
* @private
* @param {Object|String} resource
* @return {Object}
*/
export function toDataBody(resource) {
if (isObject(resource)) {
return resource;
}
if (typeof resource === "string") {
return { id: resource };
}
throw new Error("Invalid argument.");
}
/**
* Transforms an object into an URL query string, stripping out any undefined
* values.
*
* @param {Object} obj
* @return {String}
*/
export function qsify(obj) {
const encode = (v) => encodeURIComponent(typeof v === "boolean" ? String(v) : v);
const stripped = cleanUndefinedProperties(obj);
return Object.keys(stripped)
.map((k) => {
const ks = encode(k) + "=";
if (Array.isArray(stripped[k])) {
return ks + stripped[k].map((v) => encode(v)).join(",");
}
return ks + encode(stripped[k]);
})
.join("&");
}
/**
* Checks if a version is within the provided range.
*
* @param {String} version The version to check.
* @param {String} minVersion The minimum supported version (inclusive).
* @param {String} maxVersion The minimum supported version (exclusive).
* @throws {Error} If the version is outside of the provided range.
*/
export function checkVersion(version, minVersion, maxVersion) {
const extract = (str) => str.split(".").map((x) => parseInt(x, 10));
const [verMajor, verMinor] = extract(version);
const [minMajor, minMinor] = extract(minVersion);
const [maxMajor, maxMinor] = extract(maxVersion);
const checks = [
verMajor < minMajor,
verMajor === minMajor && verMinor < minMinor,
verMajor > maxMajor,
verMajor === maxMajor && verMinor >= maxMinor,
];
if (checks.some((x) => x)) {
throw new Error(`Version ${version} doesn't satisfy ${minVersion} <= x < ${maxVersion}`);
}
}
/**
* Generates a decorator function ensuring a version check is performed against
* the provided requirements before executing it.
*
* @param {String} min The required min version (inclusive).
* @param {String} max The required max version (inclusive).
* @return {Function}
*/
export function support(min, max) {
return function (
// @ts-ignore
target, key, descriptor) {
const fn = descriptor.value;
return {
configurable: true,
get() {
const wrappedMethod = (...args) => {
// "this" is the current instance which its method is decorated.
const client = this.client ? this.client : this;
return client
.fetchHTTPApiVersion()
.then((version) => checkVersion(version, min, max))
.then(() => fn.apply(this, args));
};
Object.defineProperty(this, key, {
value: wrappedMethod,
configurable: true,
writable: true,
});
return wrappedMethod;
},
};
};
}
/**
* Generates a decorator function ensuring that the specified capabilities are
* available on the server before executing it.
*
* @param {Array<String>} capabilities The required capabilities.
* @return {Function}
*/
export function capable(capabilities) {
return function (
// @ts-ignore
target, key, descriptor) {
const fn = descriptor.value;
return {
configurable: true,
get() {
const wrappedMethod = (...args) => {
// "this" is the current instance which its method is decorated.
const client = this.client ? this.client : this;
return client
.fetchServerCapabilities()
.then((available) => {
const missing = capabilities.filter((c) => !(c in available));
if (missing.length) {
const missingStr = missing.join(", ");
throw new Error(`Required capabilities ${missingStr} not present on server`);
}
})
.then(() => fn.apply(this, args));
};
Object.defineProperty(this, key, {
value: wrappedMethod,
configurable: true,
writable: true,
});
return wrappedMethod;
},
};
};
}
/**
* Generates a decorator function ensuring an operation is not performed from
* within a batch request.
*
* @param {String} message The error message to throw.
* @return {Function}
*/
export function nobatch(message) {
return function (
// @ts-ignore
target, key, descriptor) {
const fn = descriptor.value;
return {
configurable: true,
get() {
const wrappedMethod = (...args) => {
// "this" is the current instance which its method is decorated.
if (this._isBatch) {
throw new Error(message);
}
return fn.apply(this, args);
};
Object.defineProperty(this, key, {
value: wrappedMethod,
configurable: true,
writable: true,
});
return wrappedMethod;
},
};
};
}
/**
* Returns true if the specified value is an object (i.e. not an array nor null).
* @param {Object} thing The value to inspect.
* @return {bool}
*/
export function isObject(thing) {
return typeof thing === "object" && thing !== null && !Array.isArray(thing);
}
/**
* Parses a data url.
* @param {String} dataURL The data url.
* @return {Object}
*/
export function parseDataURL(dataURL) {
const regex = /^data:(.*);base64,(.*)/;
const match = dataURL.match(regex);
if (!match) {
throw new Error(`Invalid data-url: ${String(dataURL).substring(0, 32)}...`);
}
const props = match[1];
const base64 = match[2];
const [type, ...rawParams] = props.split(";");
const params = rawParams.reduce((acc, param) => {
const [key, value] = param.split("=");
return { ...acc, [key]: value };
}, {});
return { ...params, type, base64 };
}
/**
* Extracts file information from a data url.
* @param {String} dataURL The data url.
* @return {Object}
*/
export function extractFileInfo(dataURL) {
const { name, type, base64 } = parseDataURL(dataURL);
const binary = atob(base64);
const array = [];
for (let i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
const blob = new Blob([new Uint8Array(array)], { type });
return { blob, name };
}
/**
* Creates a FormData instance from a data url and an existing JSON response
* body.
* @param {String} dataURL The data url.
* @param {Object} body The response body.
* @param {Object} [options={}] The options object.
* @param {Object} [options.filename] Force attachment file name.
* @return {FormData}
*/
export function createFormData(dataURL, body, options = {}) {
const { filename = "untitled" } = options;
const { blob, name } = extractFileInfo(dataURL);
const formData = new FormData();
formData.append("attachment", blob, name || filename);
for (const property in body) {
if (typeof body[property] !== "undefined") {
formData.append(property, JSON.stringify(body[property]));
}
}
return formData;
}
/**
* Clones an object with all its undefined keys removed.
* @private
*/
export function cleanUndefinedProperties(obj) {
const result = {};
for (const key in obj) {
if (typeof obj[key] !== "undefined") {
result[key] = obj[key];
}
}
return result;
}
/**
* Handle common query parameters for Kinto requests.
*
* @param {String} [path] The endpoint base path.
* @param {Array} [options.fields] Fields to limit the
* request to.
* @param {Object} [options.query={}] Additional query arguments.
*/
export function addEndpointOptions(path, options = {}) {
const query = { ...options.query };
if (options.fields) {
query._fields = options.fields;
}
const queryString = qsify(query);
if (queryString) {
return path + "?" + queryString;
}
return path;
}
/**
* Replace authorization header with an obscured version
*/
export function obscureAuthorizationHeader(headers) {
const h = new Headers(headers);
if (h.has("authorization")) {
h.set("authorization", "**** (suppressed)");
}
const obscuredHeaders = {};
for (const [header, value] of h.entries()) {
obscuredHeaders[header] = value;
}
return obscuredHeaders;
}
{
"name": "kinto",
"version": "14.0.2",
"version": "15.0.0",
"description": "An Offline-First JavaScript client for Kinto.",

@@ -16,3 +16,3 @@ "main": "lib/cjs/index.js",

"build": "npm run build:es && npm run build:cjs",
"build:es": "tsc -p .",
"build:es": "tsc -p . --outDir ./lib/ --declaration true",
"build:cjs": "tsc -p . --outDir lib/cjs --module commonjs --declaration false",

@@ -25,4 +25,6 @@ "build:test-browser": "rollup --environment BROWSER_TESTING -c",

"demo": "npm run build-demo && http-server demo",
"dist": "cross-env NODE_ENV=production rollup -c",
"dist:dev": "rollup -c",
"dist": "npx cross-env NODE_ENV=production rollup -c && npm run dist:fx && npm run dist:fx:format",
"dist:dev": "npx rollup -c && npm run dist:fx",
"dist:fx": "npx ts-node --skip-project bin/dist-fx.ts",
"dist:fx:format": "npx prettier --config fx-src/.prettierrc.js dist/ --write",
"lint": "eslint \"src/**/*.{js,ts}\" \"test/**/*.{js,ts}\"",

@@ -132,2 +134,6 @@ "publish-demo": "npm run dist-prod && cp dist/kinto.js demo/kinto.js && gh-pages -d demo",

"url": "https://github.com/agawish"
},
{
"name": "Matt Boris",
"url": "https://github.com/matt-boris"
}

@@ -144,24 +150,27 @@ ],

"engines": {
"node": ">=10"
"node": ">=16"
},
"dependencies": {
"btoa": "^1.1.2",
"kinto-http": "^5.0.0-alpha.3",
"uuid": "^8.0.0"
"btoa": "^1.2.1",
"uuid": "^9.0.0"
},
"devDependencies": {
"@rollup/plugin-multi-entry": "^4.0.0",
"@rollup/plugin-replace": "^2.3.2",
"@rollup/plugin-commonjs": "^25.0.1",
"@rollup/plugin-multi-entry": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.0.1",
"@rollup/plugin-replace": "^5.0.1",
"@rollup/plugin-terser": "^0.4.0",
"@rollup/plugin-typescript": "^11.0.0",
"@types/btoa": "^1.2.3",
"@types/node-fetch": "^2.5.6",
"@types/shelljs": "^0.8.6",
"@types/sinon": "^9.0.0",
"@types/uuid": "^8.0.0",
"@typescript-eslint/eslint-plugin": "^2.2.0",
"@typescript-eslint/parser": "^2.2.0",
"@types/node-fetch": "^2.5.12",
"@types/shelljs": "^0.8.9",
"@types/sinon": "^10.0.6",
"@types/uuid": "^9.0.1",
"@typescript-eslint/eslint-plugin": "^5.4.0",
"@typescript-eslint/parser": "^5.4.0",
"atob": "^2.1.2",
"co-task": "^1.0.0",
"coveralls": "^3.0.0",
"cross-env": "^7.0.0",
"esdoc": "^1.0.1",
"coveralls": "^3.1.1",
"cross-env": "^7.0.3",
"esdoc": "^1.1.0",
"esdoc-accessor-plugin": "^1.0.0",

@@ -175,29 +184,29 @@ "esdoc-ecmascript-proposal-plugin": "^1.0.0",

"esdoc-unexported-identifier-plugin": "^1.0.0",
"eslint": "^6.1.0",
"eslint-config-prettier": "^6.0.0",
"estraverse-fb": "^1.3.1",
"fake-indexeddb": "3.1.2",
"form-data": "^3.0.0",
"gh-pages": "^3.0.0",
"http-server": "^0.12.0",
"intern": "^4.8.4",
"eslint": "^8.3.0",
"eslint-config-prettier": "^8.3.0",
"estraverse-fb": "^1.3.2",
"fake-indexeddb": "^4.0.1",
"form-data": "^4.0.0",
"gh-pages": "^5.0.0",
"http-server": "^14.0.0",
"intern": "^4.10.0",
"kinto-node-test-server": "^2.0.0",
"node-fetch": "^2.6.0",
"nyc": "^15.0.0",
"open-cli": "^6.0.1",
"prettier": "^2.0.5",
"rollup": "^2.0.3",
"rollup-plugin-commonjs": "^10.0.2",
"rollup-plugin-node-builtins": "^2.1.2",
"rollup-plugin-node-resolve": "^5.2.0",
"rollup-plugin-terser": "^7.0.0",
"rollup-plugin-typescript": "^1.0.1",
"shx": "^0.3.2",
"sinon": "^9.0.0",
"source-map-support": "^0.5.13",
"ts-node": "^9.0.0",
"tslib": "^2.0.0",
"typescript": "^4.0.2",
"uglifyify": "^5.0.0"
"mitt": "^3.0.0",
"node-fetch": "^2.6.6",
"nyc": "^15.1.0",
"open-cli": "^7.0.1",
"prettier": "^2.4.1",
"rollup": "^3.19.1",
"rollup-plugin-polyfill-node": "^0.12.0",
"shx": "^0.3.3",
"sinon": "^15.0.1",
"source-map-support": "^0.5.21",
"ts-node": "^10.4.0",
"tslib": "^2.3.1",
"typescript": "^5.0.3",
"uglifyify": "^5.0.2"
},
"overrides": {
"ts-node": "$ts-node"
}
}

@@ -9,3 +9,3 @@ # Kinto.js

> Note: If you're looking for a pure HTTP js client for Kinto, check out [kinto-http.js](https://github.com/Kinto/kinto-http.js).
> Note: This library also includes a pure JS HTTP client for Kinto. You can learn more in [the docs](https://kintojs.readthedocs.io/en/latest/http/).

@@ -15,7 +15,7 @@ The idea is to persist data locally in the browser by default, then synchronizing them with the server explicitly when connectivity is guaranteed:

```js
const kinto = new Kinto({remote: "https://kinto.dev.mozaws.net/v1/"});
const kinto = new Kinto({ remote: "https://demo.kinto-storage.org/v1/" });
const posts = kinto.collection("posts");
// Create and store a new post in the browser local database
await posts.create({title: "first post"});
await posts.create({ title: "first post" });

@@ -26,3 +26,2 @@ // Publish all local data to the server, import remote changes

## Documentation

@@ -29,0 +28,0 @@

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc