pouchdb-adapter-utils
Advanced tools
Comparing version 6.0.3 to 6.0.4
959
lib/index.js
@@ -5,441 +5,9 @@ 'use strict'; | ||
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | ||
var pouchdbUtils = require('pouchdb-utils'); | ||
var pouchdbErrors = require('pouchdb-errors'); | ||
var pouchdbMerge = require('pouchdb-merge'); | ||
var pouchdbBinaryUtils = require('pouchdb-binary-utils'); | ||
var pouchdbMd5 = require('pouchdb-md5'); | ||
var pouchdbCollections = require('pouchdb-collections'); | ||
var lie = _interopDefault(require('lie')); | ||
var getArguments = _interopDefault(require('argsarray')); | ||
var debug = _interopDefault(require('debug')); | ||
var events = require('events'); | ||
var inherits = _interopDefault(require('inherits')); | ||
var crypto = _interopDefault(require('crypto')); | ||
// most of this is borrowed from lodash.isPlainObject: | ||
// https://github.com/fis-components/lodash.isplainobject/ | ||
// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js | ||
var funcToString = Function.prototype.toString; | ||
var objectCtorString = funcToString.call(Object); | ||
var log = debug('pouchdb:api'); | ||
// like underscore/lodash _.pick() | ||
function pick(obj, arr) { | ||
var res = {}; | ||
for (var i = 0, len = arr.length; i < len; i++) { | ||
var prop = arr[i]; | ||
if (prop in obj) { | ||
res[prop] = obj[prop]; | ||
} | ||
} | ||
return res; | ||
} | ||
// in Node of course this is false | ||
function isChromeApp() { | ||
return false; | ||
} | ||
// in Node of course this is false | ||
function hasLocalStorage() { | ||
return false; | ||
} | ||
inherits(Changes, events.EventEmitter); | ||
/* istanbul ignore next */ | ||
function attachBrowserEvents(self) { | ||
if (isChromeApp()) { | ||
chrome.storage.onChanged.addListener(function (e) { | ||
// make sure it's event addressed to us | ||
if (e.db_name != null) { | ||
//object only has oldValue, newValue members | ||
self.emit(e.dbName.newValue); | ||
} | ||
}); | ||
} else if (hasLocalStorage()) { | ||
if (typeof addEventListener !== 'undefined') { | ||
addEventListener("storage", function (e) { | ||
self.emit(e.key); | ||
}); | ||
} else { // old IE | ||
window.attachEvent("storage", function (e) { | ||
self.emit(e.key); | ||
}); | ||
} | ||
} | ||
} | ||
function Changes() { | ||
events.EventEmitter.call(this); | ||
this._listeners = {}; | ||
attachBrowserEvents(this); | ||
} | ||
Changes.prototype.addListener = function (dbName, id, db, opts) { | ||
/* istanbul ignore if */ | ||
if (this._listeners[id]) { | ||
return; | ||
} | ||
var self = this; | ||
var inprogress = false; | ||
function eventFunction() { | ||
/* istanbul ignore if */ | ||
if (!self._listeners[id]) { | ||
return; | ||
} | ||
if (inprogress) { | ||
inprogress = 'waiting'; | ||
return; | ||
} | ||
inprogress = true; | ||
var changesOpts = pick(opts, [ | ||
'style', 'include_docs', 'attachments', 'conflicts', 'filter', | ||
'doc_ids', 'view', 'since', 'query_params', 'binary' | ||
]); | ||
/* istanbul ignore next */ | ||
function onError() { | ||
inprogress = false; | ||
} | ||
db.changes(changesOpts).on('change', function (c) { | ||
if (c.seq > opts.since && !opts.cancelled) { | ||
opts.since = c.seq; | ||
opts.onChange(c); | ||
} | ||
}).on('complete', function () { | ||
if (inprogress === 'waiting') { | ||
setTimeout(function (){ | ||
eventFunction(); | ||
},0); | ||
} | ||
inprogress = false; | ||
}).on('error', onError); | ||
} | ||
this._listeners[id] = eventFunction; | ||
this.on(dbName, eventFunction); | ||
}; | ||
Changes.prototype.removeListener = function (dbName, id) { | ||
/* istanbul ignore if */ | ||
if (!(id in this._listeners)) { | ||
return; | ||
} | ||
events.EventEmitter.prototype.removeListener.call(this, dbName, | ||
this._listeners[id]); | ||
delete this._listeners[id]; | ||
}; | ||
/* istanbul ignore next */ | ||
Changes.prototype.notifyLocalWindows = function (dbName) { | ||
//do a useless change on a storage thing | ||
//in order to get other windows's listeners to activate | ||
if (isChromeApp()) { | ||
chrome.storage.local.set({dbName: dbName}); | ||
} else if (hasLocalStorage()) { | ||
localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; | ||
} | ||
}; | ||
Changes.prototype.notify = function (dbName) { | ||
this.emit(dbName); | ||
this.notifyLocalWindows(dbName); | ||
}; | ||
inherits(PouchError, Error); | ||
function PouchError(opts) { | ||
Error.call(this, opts.reason); | ||
this.status = opts.status; | ||
this.name = opts.error; | ||
this.message = opts.reason; | ||
this.error = true; | ||
} | ||
PouchError.prototype.toString = function () { | ||
return JSON.stringify({ | ||
status: this.status, | ||
name: this.name, | ||
message: this.message, | ||
reason: this.reason | ||
}); | ||
}; | ||
var UNAUTHORIZED = new PouchError({ | ||
status: 401, | ||
error: 'unauthorized', | ||
reason: "Name or password is incorrect." | ||
}); | ||
var MISSING_BULK_DOCS = new PouchError({ | ||
status: 400, | ||
error: 'bad_request', | ||
reason: "Missing JSON list of 'docs'" | ||
}); | ||
var MISSING_DOC = new PouchError({ | ||
status: 404, | ||
error: 'not_found', | ||
reason: 'missing' | ||
}); | ||
var REV_CONFLICT = new PouchError({ | ||
status: 409, | ||
error: 'conflict', | ||
reason: 'Document update conflict' | ||
}); | ||
var INVALID_ID = new PouchError({ | ||
status: 400, | ||
error: 'bad_request', | ||
reason: '_id field must contain a string' | ||
}); | ||
var MISSING_ID = new PouchError({ | ||
status: 412, | ||
error: 'missing_id', | ||
reason: '_id is required for puts' | ||
}); | ||
var RESERVED_ID = new PouchError({ | ||
status: 400, | ||
error: 'bad_request', | ||
reason: 'Only reserved document ids may start with underscore.' | ||
}); | ||
var NOT_OPEN = new PouchError({ | ||
status: 412, | ||
error: 'precondition_failed', | ||
reason: 'Database not open' | ||
}); | ||
var UNKNOWN_ERROR = new PouchError({ | ||
status: 500, | ||
error: 'unknown_error', | ||
reason: 'Database encountered an unknown error' | ||
}); | ||
var BAD_ARG = new PouchError({ | ||
status: 500, | ||
error: 'badarg', | ||
reason: 'Some query argument is invalid' | ||
}); | ||
var INVALID_REQUEST = new PouchError({ | ||
status: 400, | ||
error: 'invalid_request', | ||
reason: 'Request was invalid' | ||
}); | ||
var QUERY_PARSE_ERROR = new PouchError({ | ||
status: 400, | ||
error: 'query_parse_error', | ||
reason: 'Some query parameter is invalid' | ||
}); | ||
var DOC_VALIDATION = new PouchError({ | ||
status: 500, | ||
error: 'doc_validation', | ||
reason: 'Bad special document member' | ||
}); | ||
var BAD_REQUEST = new PouchError({ | ||
status: 400, | ||
error: 'bad_request', | ||
reason: 'Something wrong with the request' | ||
}); | ||
var NOT_AN_OBJECT = new PouchError({ | ||
status: 400, | ||
error: 'bad_request', | ||
reason: 'Document must be a JSON object' | ||
}); | ||
var DB_MISSING = new PouchError({ | ||
status: 404, | ||
error: 'not_found', | ||
reason: 'Database not found' | ||
}); | ||
var IDB_ERROR = new PouchError({ | ||
status: 500, | ||
error: 'indexed_db_went_bad', | ||
reason: 'unknown' | ||
}); | ||
var WSQ_ERROR = new PouchError({ | ||
status: 500, | ||
error: 'web_sql_went_bad', | ||
reason: 'unknown' | ||
}); | ||
var LDB_ERROR = new PouchError({ | ||
status: 500, | ||
error: 'levelDB_went_went_bad', | ||
reason: 'unknown' | ||
}); | ||
var FORBIDDEN = new PouchError({ | ||
status: 403, | ||
error: 'forbidden', | ||
reason: 'Forbidden by design doc validate_doc_update function' | ||
}); | ||
var INVALID_REV = new PouchError({ | ||
status: 400, | ||
error: 'bad_request', | ||
reason: 'Invalid rev format' | ||
}); | ||
var FILE_EXISTS = new PouchError({ | ||
status: 412, | ||
error: 'file_exists', | ||
reason: 'The database could not be created, the file already exists.' | ||
}); | ||
var MISSING_STUB = new PouchError({ | ||
status: 412, | ||
error: 'missing_stub' | ||
}); | ||
var INVALID_URL = new PouchError({ | ||
status: 413, | ||
error: 'invalid_url', | ||
reason: 'Provided URL is invalid' | ||
}); | ||
function createError(error, reason) { | ||
function CustomPouchError(reason) { | ||
// inherit error properties from our parent error manually | ||
// so as to allow proper JSON parsing. | ||
/* jshint ignore:start */ | ||
for (var p in error) { | ||
if (typeof error[p] !== 'function') { | ||
this[p] = error[p]; | ||
} | ||
} | ||
/* jshint ignore:end */ | ||
if (reason !== undefined) { | ||
this.reason = reason; | ||
} | ||
} | ||
CustomPouchError.prototype = PouchError.prototype; | ||
return new CustomPouchError(reason); | ||
} | ||
// Determine id an ID is valid | ||
// - invalid IDs begin with an underescore that does not begin '_design' or | ||
// '_local' | ||
// - any other string value is a valid id | ||
// Returns the specific error object for each case | ||
function invalidIdError(id) { | ||
var err; | ||
if (!id) { | ||
err = createError(MISSING_ID); | ||
} else if (typeof id !== 'string') { | ||
err = createError(INVALID_ID); | ||
} else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { | ||
err = createError(RESERVED_ID); | ||
} | ||
if (err) { | ||
throw err; | ||
} | ||
} | ||
function parseDesignDocFunctionName(s) { | ||
if (!s) { | ||
return null; | ||
} | ||
var parts = s.split('/'); | ||
if (parts.length === 2) { | ||
return parts; | ||
} | ||
if (parts.length === 1) { | ||
return [s, s]; | ||
} | ||
return null; | ||
} | ||
function normalizeDesignDocFunctionName(s) { | ||
var normalized = parseDesignDocFunctionName(s); | ||
return normalized ? normalized.join('/') : null; | ||
} | ||
// BEGIN Math.uuid.js | ||
/*! | ||
Math.uuid.js (v1.4) | ||
http://www.broofa.com | ||
mailto:robert@broofa.com | ||
Copyright (c) 2010 Robert Kieffer | ||
Dual licensed under the MIT and GPL licenses. | ||
*/ | ||
/* | ||
* Generate a random uuid. | ||
* | ||
* USAGE: Math.uuid(length, radix) | ||
* length - the desired number of characters | ||
* radix - the number of allowable values for each character. | ||
* | ||
* EXAMPLES: | ||
* // No arguments - returns RFC4122, version 4 ID | ||
* >>> Math.uuid() | ||
* "92329D39-6F5C-4520-ABFC-AAB64544E172" | ||
* | ||
* // One argument - returns ID of the specified length | ||
* >>> Math.uuid(15) // 15 character ID (default base=62) | ||
* "VcydxgltxrVZSTV" | ||
* | ||
* // Two arguments - returns ID of the specified length, and radix. | ||
* // (Radix must be <= 62) | ||
* >>> Math.uuid(8, 2) // 8 character ID (base=2) | ||
* "01001010" | ||
* >>> Math.uuid(8, 10) // 8 character ID (base=10) | ||
* "47473046" | ||
* >>> Math.uuid(8, 16) // 8 character ID (base=16) | ||
* "098F4D35" | ||
*/ | ||
var chars = ( | ||
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + | ||
'abcdefghijklmnopqrstuvwxyz' | ||
).split(''); | ||
function getValue(radix) { | ||
return 0 | Math.random() * radix; | ||
} | ||
function uuid(len, radix) { | ||
radix = radix || chars.length; | ||
var out = ''; | ||
var i = -1; | ||
if (len) { | ||
// Compact form | ||
while (++i < len) { | ||
out += chars[getValue(radix)]; | ||
} | ||
return out; | ||
} | ||
// rfc4122, version 4 form | ||
// Fill in random data. At i==19 set the high bits of clock sequence as | ||
// per rfc4122, sec. 4.1.5 | ||
while (++i < 36) { | ||
switch (i) { | ||
case 8: | ||
case 13: | ||
case 18: | ||
case 23: | ||
out += '-'; | ||
break; | ||
case 19: | ||
out += chars[(getValue(16) & 0x3) | 0x8]; | ||
break; | ||
default: | ||
out += chars[getValue(16)]; | ||
} | ||
} | ||
return out; | ||
} | ||
function toObject(array) { | ||
@@ -486,3 +54,3 @@ return array.reduce(function (obj, item) { | ||
if (!/^\d+\-./.test(rev)) { | ||
return createError(INVALID_REV); | ||
return pouchdbErrors.createError(pouchdbErrors.INVALID_REV); | ||
} | ||
@@ -528,5 +96,5 @@ var idx = rev.indexOf('-'); | ||
if (!doc._id) { | ||
doc._id = uuid(); | ||
doc._id = pouchdbUtils.uuid(); | ||
} | ||
newRevId = uuid(32, 16).toLowerCase(); | ||
newRevId = pouchdbUtils.uuid(32, 16).toLowerCase(); | ||
if (doc._rev) { | ||
@@ -569,3 +137,3 @@ revInfo = parseRevisionInfo(doc._rev); | ||
invalidIdError(doc._id); | ||
pouchdbUtils.invalidIdError(doc._id); | ||
@@ -580,4 +148,4 @@ doc._rev = nRevNum + '-' + newRevId; | ||
if (specialKey && !reservedWords[key]) { | ||
var error = createError(DOC_VALIDATION, key); | ||
error.message = DOC_VALIDATION.message + ': ' + key; | ||
var error = pouchdbErrors.createError(pouchdbErrors.DOC_VALIDATION, key); | ||
error.message = pouchdbErrors.DOC_VALIDATION.message + ': ' + key; | ||
throw error; | ||
@@ -594,407 +162,7 @@ } else if (specialKey && !dataWords[key]) { | ||
// We fetch all leafs of the revision tree, and sort them based on tree length | ||
// and whether they were deleted, undeleted documents with the longest revision | ||
// tree (most edits) win | ||
// The final sort algorithm is slightly documented in a sidebar here: | ||
// http://guide.couchdb.org/draft/conflicts.html | ||
function winningRev(metadata) { | ||
var winningId; | ||
var winningPos; | ||
var winningDeleted; | ||
var toVisit = metadata.rev_tree.slice(); | ||
var node; | ||
while ((node = toVisit.pop())) { | ||
var tree = node.ids; | ||
var branches = tree[2]; | ||
var pos = node.pos; | ||
if (branches.length) { // non-leaf | ||
for (var i = 0, len = branches.length; i < len; i++) { | ||
toVisit.push({pos: pos + 1, ids: branches[i]}); | ||
} | ||
continue; | ||
} | ||
var deleted = !!tree[1].deleted; | ||
var id = tree[0]; | ||
// sort by deleted, then pos, then id | ||
if (!winningId || (winningDeleted !== deleted ? winningDeleted : | ||
winningPos !== pos ? winningPos < pos : winningId < id)) { | ||
winningId = id; | ||
winningPos = pos; | ||
winningDeleted = deleted; | ||
} | ||
} | ||
return winningPos + '-' + winningId; | ||
} | ||
// Pretty much all below can be combined into a higher order function to | ||
// traverse revisions | ||
// The return value from the callback will be passed as context to all | ||
// children of that node | ||
function traverseRevTree(revs, callback) { | ||
var toVisit = revs.slice(); | ||
var node; | ||
while ((node = toVisit.pop())) { | ||
var pos = node.pos; | ||
var tree = node.ids; | ||
var branches = tree[2]; | ||
var newCtx = | ||
callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); | ||
for (var i = 0, len = branches.length; i < len; i++) { | ||
toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); | ||
} | ||
} | ||
} | ||
// build up a list of all the paths to the leafs in this revision tree | ||
function rootToLeaf(revs) { | ||
var paths = []; | ||
var toVisit = revs.slice(); | ||
var node; | ||
while ((node = toVisit.pop())) { | ||
var pos = node.pos; | ||
var tree = node.ids; | ||
var id = tree[0]; | ||
var opts = tree[1]; | ||
var branches = tree[2]; | ||
var isLeaf = branches.length === 0; | ||
var history = node.history ? node.history.slice() : []; | ||
history.push({id: id, opts: opts}); | ||
if (isLeaf) { | ||
paths.push({pos: (pos + 1 - history.length), ids: history}); | ||
} | ||
for (var i = 0, len = branches.length; i < len; i++) { | ||
toVisit.push({pos: pos + 1, ids: branches[i], history: history}); | ||
} | ||
} | ||
return paths.reverse(); | ||
} | ||
// for a better overview of what this is doing, read: | ||
// https://github.com/apache/couchdb-couch/blob/master/src/couch_key_tree.erl | ||
// | ||
// But for a quick intro, CouchDB uses a revision tree to store a documents | ||
// history, A -> B -> C, when a document has conflicts, that is a branch in the | ||
// tree, A -> (B1 | B2 -> C), We store these as a nested array in the format | ||
// | ||
// KeyTree = [Path ... ] | ||
// Path = {pos: position_from_root, ids: Tree} | ||
// Tree = [Key, Opts, [Tree, ...]], in particular single node: [Key, []] | ||
function sortByPos$1(a, b) { | ||
return a.pos - b.pos; | ||
} | ||
// classic binary search | ||
function binarySearch(arr, item, comparator) { | ||
var low = 0; | ||
var high = arr.length; | ||
var mid; | ||
while (low < high) { | ||
mid = (low + high) >>> 1; | ||
if (comparator(arr[mid], item) < 0) { | ||
low = mid + 1; | ||
} else { | ||
high = mid; | ||
} | ||
} | ||
return low; | ||
} | ||
// assuming the arr is sorted, insert the item in the proper place | ||
function insertSorted(arr, item, comparator) { | ||
var idx = binarySearch(arr, item, comparator); | ||
arr.splice(idx, 0, item); | ||
} | ||
// Turn a path as a flat array into a tree with a single branch. | ||
// If any should be stemmed from the beginning of the array, that's passed | ||
// in as the second argument | ||
function pathToTree(path, numStemmed) { | ||
var root; | ||
var leaf; | ||
for (var i = numStemmed, len = path.length; i < len; i++) { | ||
var node = path[i]; | ||
var currentLeaf = [node.id, node.opts, []]; | ||
if (leaf) { | ||
leaf[2].push(currentLeaf); | ||
leaf = currentLeaf; | ||
} else { | ||
root = leaf = currentLeaf; | ||
} | ||
} | ||
return root; | ||
} | ||
// compare the IDs of two trees | ||
function compareTree(a, b) { | ||
return a[0] < b[0] ? -1 : 1; | ||
} | ||
// Merge two trees together | ||
// The roots of tree1 and tree2 must be the same revision | ||
function mergeTree(in_tree1, in_tree2) { | ||
var queue = [{tree1: in_tree1, tree2: in_tree2}]; | ||
var conflicts = false; | ||
while (queue.length > 0) { | ||
var item = queue.pop(); | ||
var tree1 = item.tree1; | ||
var tree2 = item.tree2; | ||
if (tree1[1].status || tree2[1].status) { | ||
tree1[1].status = | ||
(tree1[1].status === 'available' || | ||
tree2[1].status === 'available') ? 'available' : 'missing'; | ||
} | ||
for (var i = 0; i < tree2[2].length; i++) { | ||
if (!tree1[2][0]) { | ||
conflicts = 'new_leaf'; | ||
tree1[2][0] = tree2[2][i]; | ||
continue; | ||
} | ||
var merged = false; | ||
for (var j = 0; j < tree1[2].length; j++) { | ||
if (tree1[2][j][0] === tree2[2][i][0]) { | ||
queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); | ||
merged = true; | ||
} | ||
} | ||
if (!merged) { | ||
conflicts = 'new_branch'; | ||
insertSorted(tree1[2], tree2[2][i], compareTree); | ||
} | ||
} | ||
} | ||
return {conflicts: conflicts, tree: in_tree1}; | ||
} | ||
function doMerge(tree, path, dontExpand) { | ||
var restree = []; | ||
var conflicts = false; | ||
var merged = false; | ||
var res; | ||
if (!tree.length) { | ||
return {tree: [path], conflicts: 'new_leaf'}; | ||
} | ||
for (var i = 0, len = tree.length; i < len; i++) { | ||
var branch = tree[i]; | ||
if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | ||
// Paths start at the same position and have the same root, so they need | ||
// merged | ||
res = mergeTree(branch.ids, path.ids); | ||
restree.push({pos: branch.pos, ids: res.tree}); | ||
conflicts = conflicts || res.conflicts; | ||
merged = true; | ||
} else if (dontExpand !== true) { | ||
// The paths start at a different position, take the earliest path and | ||
// traverse up until it as at the same point from root as the path we | ||
// want to merge. If the keys match we return the longer path with the | ||
// other merged After stemming we dont want to expand the trees | ||
var t1 = branch.pos < path.pos ? branch : path; | ||
var t2 = branch.pos < path.pos ? path : branch; | ||
var diff = t2.pos - t1.pos; | ||
var candidateParents = []; | ||
var trees = []; | ||
trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | ||
while (trees.length > 0) { | ||
var item = trees.pop(); | ||
if (item.diff === 0) { | ||
if (item.ids[0] === t2.ids[0]) { | ||
candidateParents.push(item); | ||
} | ||
continue; | ||
} | ||
var elements = item.ids[2]; | ||
for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) { | ||
trees.push({ | ||
ids: elements[j], | ||
diff: item.diff - 1, | ||
parent: item.ids, | ||
parentIdx: j | ||
}); | ||
} | ||
} | ||
var el = candidateParents[0]; | ||
if (!el) { | ||
restree.push(branch); | ||
} else { | ||
res = mergeTree(el.ids, t2.ids); | ||
el.parent[2][el.parentIdx] = res.tree; | ||
restree.push({pos: t1.pos, ids: t1.ids}); | ||
conflicts = conflicts || res.conflicts; | ||
merged = true; | ||
} | ||
} else { | ||
restree.push(branch); | ||
} | ||
} | ||
// We didnt find | ||
if (!merged) { | ||
restree.push(path); | ||
} | ||
restree.sort(sortByPos$1); | ||
return { | ||
tree: restree, | ||
conflicts: conflicts || 'internal_node' | ||
}; | ||
} | ||
// To ensure we dont grow the revision tree infinitely, we stem old revisions | ||
function stem(tree, depth) { | ||
// First we break out the tree into a complete list of root to leaf paths | ||
var paths = rootToLeaf(tree); | ||
var maybeStem = {}; | ||
var result; | ||
for (var i = 0, len = paths.length; i < len; i++) { | ||
// Then for each path, we cut off the start of the path based on the | ||
// `depth` to stem to, and generate a new set of flat trees | ||
var path = paths[i]; | ||
var stemmed = path.ids; | ||
var numStemmed = Math.max(0, stemmed.length - depth); | ||
var stemmedNode = { | ||
pos: path.pos + numStemmed, | ||
ids: pathToTree(stemmed, numStemmed) | ||
}; | ||
for (var s = 0; s < numStemmed; s++) { | ||
var rev = (path.pos + s) + '-' + stemmed[s].id; | ||
maybeStem[rev] = true; | ||
} | ||
// Then we remerge all those flat trees together, ensuring that we dont | ||
// connect trees that would go beyond the depth limit | ||
if (result) { | ||
result = doMerge(result, stemmedNode, true).tree; | ||
} else { | ||
result = [stemmedNode]; | ||
} | ||
} | ||
traverseRevTree(result, function (isLeaf, pos, revHash) { | ||
// some revisions may have been removed in a branch but not in another | ||
delete maybeStem[pos + '-' + revHash]; | ||
}); | ||
return { | ||
tree: result, | ||
revs: Object.keys(maybeStem) | ||
}; | ||
} | ||
function merge(tree, path, depth) { | ||
var newTree = doMerge(tree, path); | ||
var stemmed = stem(newTree.tree, depth); | ||
return { | ||
tree: stemmed.tree, | ||
stemmedRevs: stemmed.revs, | ||
conflicts: newTree.conflicts | ||
}; | ||
} | ||
// return true if a rev exists in the rev tree, false otherwise | ||
function revExists(revs, rev) { | ||
var toVisit = revs.slice(); | ||
var splitRev = rev.split('-'); | ||
var targetPos = parseInt(splitRev[0], 10); | ||
var targetId = splitRev[1]; | ||
var node; | ||
while ((node = toVisit.pop())) { | ||
if (node.pos === targetPos && node.ids[0] === targetId) { | ||
return true; | ||
} | ||
var branches = node.ids[2]; | ||
for (var i = 0, len = branches.length; i < len; i++) { | ||
toVisit.push({pos: node.pos + 1, ids: branches[i]}); | ||
} | ||
} | ||
return false; | ||
} | ||
function getTrees(node) { | ||
return node.ids; | ||
} | ||
// check if a specific revision of a doc has been deleted | ||
// - metadata: the metadata object from the doc store | ||
// - rev: (optional) the revision to check. defaults to winning revision | ||
function isDeleted(metadata, rev) { | ||
if (!rev) { | ||
rev = winningRev(metadata); | ||
} | ||
var id = rev.substring(rev.indexOf('-') + 1); | ||
var toVisit = metadata.rev_tree.map(getTrees); | ||
var tree; | ||
while ((tree = toVisit.pop())) { | ||
if (tree[0] === id) { | ||
return !!tree[1].deleted; | ||
} | ||
toVisit = toVisit.concat(tree[2]); | ||
} | ||
} | ||
function isLocalId(id) { | ||
return (/^_local/).test(id); | ||
} | ||
function thisAtob(str) { | ||
var base64 = new Buffer(str, 'base64'); | ||
// Node.js will just skip the characters it can't decode instead of | ||
// throwing an exception | ||
if (base64.toString('base64') !== str) { | ||
throw new Error("attachment is not a valid base64 string"); | ||
} | ||
return base64.toString('binary'); | ||
} | ||
function thisBtoa(str) { | ||
return new Buffer(str, 'binary').toString('base64'); | ||
} | ||
function typedBuffer(binString, buffType, type) { | ||
// buffType is either 'binary' or 'base64' | ||
var buff = new Buffer(binString, buffType); | ||
buff.type = type; // non-standard, but used for consistency with the browser | ||
return buff; | ||
} | ||
function binStringToBluffer(binString, type) { | ||
return typedBuffer(binString, 'binary', type); | ||
} | ||
function blobToBase64(blobOrBuffer, callback) { | ||
callback(blobOrBuffer.toString('base64')); | ||
} | ||
// not used in Node, but here for completeness | ||
function blobToBase64$1(blobOrBuffer, callback) { | ||
callback(blobOrBuffer.toString('binary')); | ||
} | ||
function binaryMd5(data, callback) { | ||
var base64 = crypto.createHash('md5').update(data, 'binary').digest('base64'); | ||
callback(base64); | ||
} | ||
function parseBase64(data) { | ||
try { | ||
return thisAtob(data); | ||
return pouchdbBinaryUtils.atob(data); | ||
} catch (e) { | ||
var err = createError(BAD_ARG, | ||
var err = pouchdbErrors.createError(pouchdbErrors.BAD_ARG, | ||
'Attachment is not a valid base64 string'); | ||
@@ -1013,9 +181,9 @@ return {error: err}; | ||
if (blobType === 'blob') { | ||
att.data = binStringToBluffer(asBinary, att.content_type); | ||
att.data = pouchdbBinaryUtils.binaryStringToBlobOrBuffer(asBinary, att.content_type); | ||
} else if (blobType === 'base64') { | ||
att.data = thisBtoa(asBinary); | ||
att.data = pouchdbBinaryUtils.btoa(asBinary); | ||
} else { // binary | ||
att.data = asBinary; | ||
} | ||
binaryMd5(asBinary, function (result) { | ||
pouchdbMd5.binaryMd5(asBinary, function (result) { | ||
att.digest = 'md5-' + result; | ||
@@ -1027,3 +195,3 @@ callback(); | ||
function preprocessBlob(att, blobType, callback) { | ||
binaryMd5(att.data, function (md5) { | ||
pouchdbMd5.binaryMd5(att.data, function (md5) { | ||
att.digest = 'md5-' + md5; | ||
@@ -1033,3 +201,3 @@ // size is for blobs (browser), length is for buffers (node) | ||
if (blobType === 'binary') { | ||
blobToBase64$1(att.data, function (binString) { | ||
pouchdbBinaryUtils.blobOrBufferToBinaryString(att.data, function (binString) { | ||
att.data = binString; | ||
@@ -1039,3 +207,3 @@ callback(); | ||
} else if (blobType === 'base64') { | ||
blobToBase64(att.data, function (b64) { | ||
pouchdbBinaryUtils.blobOrBufferToBase64(att.data, function (b64) { | ||
att.data = b64; | ||
@@ -1110,3 +278,3 @@ callback(); | ||
if (revExists(prev.rev_tree, docInfo.metadata.rev)) { | ||
if (pouchdbMerge.revExists(prev.rev_tree, docInfo.metadata.rev)) { | ||
results[i] = docInfo; | ||
@@ -1117,7 +285,7 @@ return cb(); | ||
// sometimes this is pre-calculated. historically not always | ||
var previousWinningRev = prev.winningRev || winningRev(prev); | ||
var previousWinningRev = prev.winningRev || pouchdbMerge.winningRev(prev); | ||
var previouslyDeleted = 'deleted' in prev ? prev.deleted : | ||
isDeleted(prev, previousWinningRev); | ||
pouchdbMerge.isDeleted(prev, previousWinningRev); | ||
var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted : | ||
isDeleted(docInfo.metadata); | ||
pouchdbMerge.isDeleted(docInfo.metadata); | ||
var isRoot = /^1-/.test(docInfo.metadata.rev); | ||
@@ -1132,3 +300,3 @@ | ||
var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit); | ||
var merged = pouchdbMerge.merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit); | ||
@@ -1140,3 +308,3 @@ var inConflict = newEdits && (((previouslyDeleted && deleted) || | ||
if (inConflict) { | ||
var err = createError(REV_CONFLICT); | ||
var err = pouchdbErrors.createError(pouchdbErrors.REV_CONFLICT); | ||
results[i] = err; | ||
@@ -1155,4 +323,4 @@ return cb(); | ||
// recalculate | ||
var winningRev$$ = winningRev(docInfo.metadata); | ||
var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$); | ||
var winningRev = pouchdbMerge.winningRev(docInfo.metadata); | ||
var winningRevIsDeleted = pouchdbMerge.isDeleted(docInfo.metadata, winningRev); | ||
@@ -1165,3 +333,3 @@ // calculate the total number of documents that were added/removed, | ||
var newRevIsDeleted; | ||
if (newRev === winningRev$$) { | ||
if (newRev === winningRev) { | ||
// if the new rev is the same as the winning rev, we can reuse that value | ||
@@ -1171,48 +339,9 @@ newRevIsDeleted = winningRevIsDeleted; | ||
// if they're not the same, then we need to recalculate | ||
newRevIsDeleted = isDeleted(docInfo.metadata, newRev); | ||
newRevIsDeleted = pouchdbMerge.isDeleted(docInfo.metadata, newRev); | ||
} | ||
writeDoc(docInfo, winningRev$$, winningRevIsDeleted, newRevIsDeleted, | ||
writeDoc(docInfo, winningRev, winningRevIsDeleted, newRevIsDeleted, | ||
true, delta, i, cb); | ||
} | ||
// based on https://github.com/montagejs/collections | ||
function mangle(key) { | ||
return '$' + key; | ||
} | ||
function unmangle(key) { | ||
return key.substring(1); | ||
} | ||
function LazyMap() { | ||
this.store = {}; | ||
} | ||
LazyMap.prototype.get = function (key) { | ||
var mangled = mangle(key); | ||
return this.store[mangled]; | ||
}; | ||
LazyMap.prototype.set = function (key, value) { | ||
var mangled = mangle(key); | ||
this.store[mangled] = value; | ||
return true; | ||
}; | ||
LazyMap.prototype.has = function (key) { | ||
var mangled = mangle(key); | ||
return mangled in this.store; | ||
}; | ||
LazyMap.prototype.delete = function (key) { | ||
var mangled = mangle(key); | ||
var res = mangled in this.store; | ||
delete this.store[mangled]; | ||
return res; | ||
}; | ||
LazyMap.prototype.forEach = function (cb) { | ||
var keys = Object.keys(this.store); | ||
for (var i = 0, len = keys.length; i < len; i++) { | ||
var key = keys[i]; | ||
var value = this.store[key]; | ||
key = unmangle(key); | ||
cb(value, key); | ||
} | ||
}; | ||
function rootIsMissing(docInfo) { | ||
@@ -1230,6 +359,6 @@ return docInfo.metadata.rev_tree[0].ids[1].status === 'missing'; | ||
// Cant insert new deleted documents | ||
var winningRev$$ = winningRev(docInfo.metadata); | ||
var deleted = isDeleted(docInfo.metadata, winningRev$$); | ||
var winningRev = pouchdbMerge.winningRev(docInfo.metadata); | ||
var deleted = pouchdbMerge.isDeleted(docInfo.metadata, winningRev); | ||
if ('was_delete' in opts && deleted) { | ||
results[resultsIdx] = createError(MISSING_DOC, 'deleted'); | ||
results[resultsIdx] = pouchdbErrors.createError(pouchdbErrors.MISSING_DOC, 'deleted'); | ||
return callback(); | ||
@@ -1242,3 +371,3 @@ } | ||
if (inConflict) { | ||
var err = createError(REV_CONFLICT); | ||
var err = pouchdbErrors.createError(pouchdbErrors.REV_CONFLICT); | ||
results[resultsIdx] = err; | ||
@@ -1250,3 +379,3 @@ return callback(); | ||
writeDoc(docInfo, winningRev$$, deleted, deleted, false, | ||
writeDoc(docInfo, winningRev, deleted, deleted, false, | ||
delta, resultsIdx, callback); | ||
@@ -1256,3 +385,3 @@ } | ||
var newEdits = opts.new_edits; | ||
var idsToDocs = new LazyMap(); | ||
var idsToDocs = new pouchdbCollections.Map(); | ||
@@ -1270,3 +399,3 @@ var docsDone = 0; | ||
if (currentDoc._id && isLocalId(currentDoc._id)) { | ||
if (currentDoc._id && pouchdbMerge.isLocalId(currentDoc._id)) { | ||
var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal'; | ||
@@ -1311,3 +440,3 @@ api[fun](currentDoc, {ctx: tx}, function (err, res) { | ||
// Ensure stemming applies to new writes as well | ||
var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit); | ||
var merged = pouchdbMerge.merge([], currentDoc.metadata.rev_tree[0], revLimit); | ||
currentDoc.metadata.rev_tree = merged.tree; | ||
@@ -1322,7 +451,7 @@ currentDoc.stemmedRevs = merged.stemmedRevs || []; | ||
exports.invalidIdError = invalidIdError; | ||
exports.isDeleted = isDeleted; | ||
exports.isLocalId = isLocalId; | ||
exports.normalizeDdocFunctionName = normalizeDesignDocFunctionName; | ||
exports.parseDdocFunctionName = parseDesignDocFunctionName; | ||
exports.invalidIdError = pouchdbUtils.invalidIdError; | ||
exports.isDeleted = pouchdbMerge.isDeleted; | ||
exports.isLocalId = pouchdbMerge.isLocalId; | ||
exports.normalizeDdocFunctionName = pouchdbUtils.normalizeDdocFunctionName; | ||
exports.parseDdocFunctionName = pouchdbUtils.parseDdocFunctionName; | ||
exports.parseDoc = parseDoc; | ||
@@ -1329,0 +458,0 @@ exports.preprocessAttachments = preprocessAttachments; |
{ | ||
"name": "pouchdb-adapter-utils", | ||
"version": "6.0.3", | ||
"version": "6.0.4", | ||
"description": "Utilities for PouchDB adapters.", | ||
@@ -16,9 +16,9 @@ "main": "./lib/index.js", | ||
"dependencies": { | ||
"pouchdb-binary-utils": "6.0.3", | ||
"pouchdb-collections": "6.0.3", | ||
"pouchdb-errors": "6.0.3", | ||
"pouchdb-merge": "6.0.3", | ||
"pouchdb-md5": "6.0.3", | ||
"pouchdb-utils": "6.0.3" | ||
"pouchdb-binary-utils": "6.0.4", | ||
"pouchdb-collections": "6.0.4", | ||
"pouchdb-errors": "6.0.4", | ||
"pouchdb-merge": "6.0.4", | ||
"pouchdb-md5": "6.0.4", | ||
"pouchdb-utils": "6.0.4" | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
37544
793
+ Addedpouchdb-binary-utils@6.0.4(transitive)
+ Addedpouchdb-collections@6.0.4(transitive)
+ Addedpouchdb-errors@6.0.4(transitive)
+ Addedpouchdb-md5@6.0.4(transitive)
+ Addedpouchdb-merge@6.0.4(transitive)
+ Addedpouchdb-promise@6.0.4(transitive)
+ Addedpouchdb-utils@6.0.4(transitive)
- Removedpouchdb-binary-utils@6.0.3(transitive)
- Removedpouchdb-collections@6.0.3(transitive)
- Removedpouchdb-errors@6.0.3(transitive)
- Removedpouchdb-md5@6.0.3(transitive)
- Removedpouchdb-merge@6.0.3(transitive)
- Removedpouchdb-promise@6.0.3(transitive)
- Removedpouchdb-utils@6.0.3(transitive)
Updatedpouchdb-binary-utils@6.0.4
Updatedpouchdb-collections@6.0.4
Updatedpouchdb-errors@6.0.4
Updatedpouchdb-md5@6.0.4
Updatedpouchdb-merge@6.0.4
Updatedpouchdb-utils@6.0.4