New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@sanity/mutator

Package Overview
Dependencies
Maintainers
7
Versions
1471
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@sanity/mutator - npm Package Compare versions

Comparing version 0.112.0 to 0.113.0-alpha.f6d0f98d

30

lib/document/BufferedDocument.js

@@ -144,7 +144,7 @@ 'use strict';

}
(0, _debug2.default)('Committing local changes'
(0, _debug2.default)('Committing local changes');
// Collect current staged mutations into a commit and ...
);this.commits.push(new Commit([this.buffer.purge()])
this.commits.push(new Commit([this.buffer.purge()]));
// ... clear the table for the next commit.
);this.buffer = new _SquashingBuffer2.default(this.LOCAL);
this.buffer = new _SquashingBuffer2.default(this.LOCAL);
this.performCommits();

@@ -189,10 +189,10 @@ }

(0, _debug2.default)('Commit succeeded');
docResponder.success
docResponder.success();
// Keep running the committer until no more commits
();_this2._cycleCommitter();
_this2._cycleCommitter();
},
failure: function failure() {
(0, _debug2.default)('Commit failed'
(0, _debug2.default)('Commit failed');
// Re stage commit
);commit.tries += 1;
commit.tries += 1;
if (_this2.LOCAL !== null) {

@@ -203,6 +203,6 @@ // Only schedule this commit for a retry of the document still exist to avoid looping

}
docResponder.failure
docResponder.failure();
// Retry
// TODO: Be able to _not_ retry if failure is permanent
();setTimeout(function () {
setTimeout(function () {
return _this2._cycleCommitter();

@@ -227,7 +227,7 @@ }, 1000);

value: function handleDocumentDeleted() {
(0, _debug2.default)('Document deleted'
(0, _debug2.default)('Document deleted');
// If the document was just deleted, fire the onDelete event with the absolutely latest version of the document
// before someone deleted it so that the client may revive the document in the last state the user saw it, should
// she so desire.
);if (this.LOCAL !== null && this.onDelete) {
if (this.LOCAL !== null && this.onDelete) {
this.onDelete(this.LOCAL);

@@ -251,6 +251,6 @@ }

}
(0, _debug2.default)('Document mutated from remote with local changes'
(0, _debug2.default)('Document mutated from remote with local changes');
// If there are local edits, and the document was deleted, we need to purge those local edits now
);if (this.document.EDGE === null) {
if (this.document.EDGE === null) {
this.handleDocumentDeleted();

@@ -274,5 +274,5 @@ }

}, this.document.EDGE);
this.LOCAL = this.buffer.rebase(this.LOCAL
this.LOCAL = this.buffer.rebase(this.LOCAL);
// Copy over rev, since we don't care if it changed, we only care about the content
);if (oldLocal !== null && this.LOCAL !== null) {
if (oldLocal !== null && this.LOCAL !== null) {
oldLocal._rev = this.LOCAL._rev;

@@ -279,0 +279,0 @@ }

@@ -238,6 +238,6 @@ 'use strict';

this.HEAD = mut.apply(this.HEAD
this.HEAD = mut.apply(this.HEAD);
// Eliminate from incoming set
);this.incoming = this.incoming.filter(function (m) {
this.incoming = this.incoming.filter(function (m) {
return m.transactionId != mut.transactionId;

@@ -307,6 +307,6 @@ });

}
(0, _debug2.default)('The mutation was not the upcoming mutation, scrubbing. Pending: ' + this.pending.length + ', Submitted: ' + this.submitted.length
(0, _debug2.default)('The mutation was not the upcoming mutation, scrubbing. Pending: ' + this.pending.length + ', Submitted: ' + this.submitted.length);
// The mutation was not the upcoming mutation, so we'll have to check everything to
// see if we have an out of order situation
);this.submitted = this.submitted.filter(function (mut) {
this.submitted = this.submitted.filter(function (mut) {
return mut.transactionId != txnId;

@@ -317,6 +317,6 @@ });

});
(0, _debug2.default)('After scrubbing: Pending: ' + this.pending.length + ', Submitted: ' + this.submitted.length
(0, _debug2.default)('After scrubbing: Pending: ' + this.pending.length + ', Submitted: ' + this.submitted.length);
// Whether we had it or not we have either a reordering, or an unexpected mutation
// so must rebase
);return true;
return true;
}

@@ -359,5 +359,5 @@ }, {

return mutation.transactionId != pendingTxnId;
}
});
// Rebase to revert document to what it looked like before the failed mutation
);this.rebase();
this.rebase();
}

@@ -368,5 +368,5 @@ }, {

var oldEdge = this.EDGE;
this.EDGE = _Mutation2.default.applyAll(this.HEAD, this.submitted.concat(this.pending)
this.EDGE = _Mutation2.default.applyAll(this.HEAD, this.submitted.concat(this.pending));
// Copy over rev, since we don't care if it changed, we only care about the content
);if (oldEdge !== null && this.EDGE !== null) {
if (oldEdge !== null && this.EDGE !== null) {
oldEdge._rev = this.EDGE._rev;

@@ -373,0 +373,0 @@ }

@@ -153,6 +153,6 @@ 'use strict';

}
(0, _debug2.default)('Unoptimizable mutation detected, purging optimization buffer'
(0, _debug2.default)('Unoptimizable mutation detected, purging optimization buffer');
// console.log("Unoptimizable operation, stashing", JSON.stringify(op))
// Un-optimisable operations causes everything to be stashed
);this.staged.push(op);
this.staged.push(op);
this.stashStagedOperations();

@@ -175,5 +175,5 @@ }

// we won't optimise
var matches = (0, _extractWithPath2.default)(path, this.PRESTAGE
var matches = (0, _extractWithPath2.default)(path, this.PRESTAGE);
// If we are not overwriting exactly one key, this cannot be optimised, so we bail
);if (matches.length !== 1) {
if (matches.length !== 1) {
// console.log('Not optimisable because match count is != 1', JSON.stringify(matches))

@@ -211,5 +211,5 @@ return false;

// operation touching this path in the buffer.
var canonicalPath = (0, _arrayToJSONMatchPath2.default)(match.path
var canonicalPath = (0, _arrayToJSONMatchPath2.default)(match.path);
// Store this operation, overwriting any previous operations touching this same path
);if (op) {
if (op) {
this.setOperations[canonicalPath] = op;

@@ -216,0 +216,0 @@ } else {

@@ -169,6 +169,6 @@ 'use strict';

}
}
});
// If there are recursive terms, we need to add a lead for every descendant ...
);if (this.hasRecursives()) {
if (this.hasRecursives()) {
// The recustives matcher will have no active set, only inherit recursives from this

@@ -175,0 +175,0 @@ var recursivesMatcher = new Matcher([], this);

@@ -58,5 +58,5 @@ 'use strict';

value: function consume() {
var result = this.peek
var result = this.peek();
// console.log("consumed", result)
();this.i += 1;
this.i += 1;
return result;

@@ -70,5 +70,5 @@ }

value: function probe(pattern) {
var token = this.peek
var token = this.peek();
// console.log("Probing", token, "for", pattern)
();if (!token) {
if (!token) {
// console.log(" -> nay", token)

@@ -260,5 +260,5 @@ return null;

while (expr) {
terms.push(expr
terms.push(expr);
// End of union?
);if (this.match({ type: 'paren', symbol: ']' })) {
if (this.match({ type: 'paren', symbol: ']' })) {
break;

@@ -265,0 +265,0 @@ }

@@ -78,5 +78,5 @@ 'use strict';

function minIndex(targets, accessor) {
var result = (0, _min3.default)((0, _util.targetsToIndicies)(targets, accessor)
var result = (0, _min3.default)((0, _util.targetsToIndicies)(targets, accessor));
// Ranges may be zero-length and not turn up in indices
);targets.forEach(function (target) {
targets.forEach(function (target) {
if (target.isRange()) {

@@ -95,5 +95,5 @@ var _target$expandRange = target.expandRange(),

function maxIndex(targets, accessor) {
var result = (0, _max3.default)((0, _util.targetsToIndicies)(targets, accessor)
var result = (0, _max3.default)((0, _util.targetsToIndicies)(targets, accessor));
// Ranges may be zero-length and not turn up in indices
);targets.forEach(function (target) {
targets.forEach(function (target) {
if (target.isRange()) {

@@ -100,0 +100,0 @@ var _target$expandRange2 = target.expandRange(),

@@ -110,3 +110,3 @@ 'use strict';

}
}
});
// Each time we run the matcher, we might also get a delivery. This means that a

@@ -117,3 +117,3 @@ // term in the jsonpath terminated here and the patch should be applied. The delivery

// to do its work, so here we just pass those to the patch and we're done.
);if (delivery) {
if (delivery) {
var patch = delivery.payload;

@@ -120,0 +120,0 @@ result = patch.apply(delivery.targets, result);

{
"name": "@sanity/mutator",
"version": "0.112.0",
"version": "0.113.0-alpha.f6d0f98d",
"description": "A set of models to make it easier to utilize the powerful real time collaborative features of Sanity",

@@ -21,12 +21,12 @@ "main": "lib/index.js",

"devDependencies": {
"babel-cli": "^6.24.0",
"babel-cli": "^6.24.1",
"babel-core": "^6.24.0",
"babel-eslint": "^7.2.1",
"babel-eslint": "^7.2.3",
"babel-plugin-lodash": "^3.2.11",
"babel-plugin-syntax-flow": "^6.18.0",
"babel-plugin-transform-flow-strip-types": "^6.22.0",
"eslint": "^3.19.0",
"eslint-config-sanity": "^2.1.4",
"eslint": "^4.6.1",
"eslint-config-sanity": "^3.0.1",
"eslint-plugin-flowtype-errors": "^3.0.3",
"eslint-plugin-import": "^2.2.0",
"eslint-plugin-import": "^2.3.0",
"flow-bin": "^0.42.0",

@@ -33,0 +33,0 @@ "rimraf": "^2.6.1",

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc