isomorphic-git
Advanced tools
Comparing version 0.0.24 to 0.0.25
@@ -20,3 +20,2 @@ 'use strict'; | ||
var buffer = require('buffer'); | ||
var pako = _interopDefault(require('pako')); | ||
var shasum = _interopDefault(require('shasum')); | ||
@@ -497,40 +496,2 @@ var _slicedToArray = _interopDefault(require('babel-runtime/helpers/slicedToArray')); | ||
// @flow | ||
function wrapObject(_ref /*: {type: string, object: Buffer} */) { | ||
var type = _ref.type, | ||
object = _ref.object; | ||
var buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(type + ' '), buffer.Buffer.from(object.byteLength.toString()), buffer.Buffer.from([0]), buffer.Buffer.from(object)]); | ||
var oid = shasum(buffer$$1); | ||
return { | ||
oid: oid, | ||
file: buffer.Buffer.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
function unwrapObject(_ref2 /*: {oid: string, file: Buffer} */) { | ||
var oid = _ref2.oid, | ||
file = _ref2.file; | ||
var inflated = buffer.Buffer.from(pako.inflate(file)); | ||
if (oid) { | ||
var sha = shasum(inflated); | ||
if (sha !== oid) { | ||
throw new Error('SHA check failed! Expected ' + oid + ', computed ' + sha); | ||
} | ||
} | ||
var s = inflated.indexOf(32); // first space | ||
var i = inflated.indexOf(0); // first null value | ||
var type = inflated.slice(0, s).toString('utf8'); // get type of object | ||
var length = inflated.slice(s + 1, i).toString('utf8'); // get type of object | ||
var actualLength = inflated.length - (i + 1); | ||
// verify length | ||
if (parseInt(length) !== actualLength) { | ||
throw new Error('Length mismatch: expected ' + length + ' bytes but got ' + actualLength + ' instead.'); | ||
} | ||
return { | ||
type: type, | ||
object: buffer.Buffer.from(inflated.slice(i + 1)) | ||
}; | ||
} | ||
var GitObjectManager = function () { | ||
@@ -544,7 +505,7 @@ function GitObjectManager() { | ||
value: function () { | ||
var _ref4 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(_ref3 /*: {gitdir: string, oid: string} */) { | ||
var gitdir = _ref3.gitdir, | ||
oid = _ref3.oid; | ||
var _ref2 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(_ref /*: {gitdir: string, oid: string} */) { | ||
var gitdir = _ref.gitdir, | ||
oid = _ref.oid; | ||
var file, text, _unwrapObject, type, object; | ||
var file, text, _GitObject$unwrap, type, object; | ||
@@ -583,3 +544,3 @@ return _regeneratorRuntime.wrap(function _callee$(_context) { | ||
case 12: | ||
_unwrapObject = unwrapObject({ oid: oid, file: file }), type = _unwrapObject.type, object = _unwrapObject.object; | ||
_GitObject$unwrap = models_js.GitObject.unwrap({ oid: oid, file: file }), type = _GitObject$unwrap.type, object = _GitObject$unwrap.object; | ||
return _context.abrupt('return', { type: type, object: object }); | ||
@@ -596,3 +557,3 @@ | ||
function read$$1(_x) { | ||
return _ref4.apply(this, arguments); | ||
return _ref2.apply(this, arguments); | ||
} | ||
@@ -605,6 +566,6 @@ | ||
value: function () { | ||
var _ref6 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee2(_ref5) { | ||
var gitdir = _ref5.gitdir, | ||
type = _ref5.type, | ||
object = _ref5.object; | ||
var _ref4 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee2(_ref3) { | ||
var gitdir = _ref3.gitdir, | ||
type = _ref3.type, | ||
object = _ref3.object; | ||
var buffer$$1, oid; | ||
@@ -628,3 +589,3 @@ return _regeneratorRuntime.wrap(function _callee2$(_context2) { | ||
function hash(_x2) { | ||
return _ref6.apply(this, arguments); | ||
return _ref4.apply(this, arguments); | ||
} | ||
@@ -637,8 +598,13 @@ | ||
value: function () { | ||
var _ref8 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee3(_ref7) { | ||
var gitdir = _ref7.gitdir, | ||
type = _ref7.type, | ||
object = _ref7.object; | ||
var _ref6 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee3(_ref5 /*: { | ||
gitdir: string, | ||
type: string, | ||
object: Buffer | ||
} */ | ||
) { | ||
var gitdir = _ref5.gitdir, | ||
type = _ref5.type, | ||
object = _ref5.object; | ||
var _wrapObject, file, oid, filepath; | ||
var _GitObject$wrap, file, oid, filepath; | ||
@@ -649,3 +615,3 @@ return _regeneratorRuntime.wrap(function _callee3$(_context3) { | ||
case 0: | ||
_wrapObject = wrapObject({ type: type, object: object }), file = _wrapObject.file, oid = _wrapObject.oid; | ||
_GitObject$wrap = models_js.GitObject.wrap({ type: type, object: object }), file = _GitObject$wrap.file, oid = _GitObject$wrap.oid; | ||
filepath = gitdir + '/objects/' + oid.slice(0, 2) + '/' + oid.slice(2); | ||
@@ -680,8 +646,7 @@ // Don't overwrite existing git objects - this helps avoid EPERM errors. | ||
function write$$1(_x3) { | ||
return _ref8.apply(this, arguments); | ||
return _ref6.apply(this, arguments); | ||
} | ||
return write$$1; | ||
}() /*: {gitdir: string, type: string, object: Buffer} */ | ||
}() | ||
}]); | ||
@@ -738,3 +703,3 @@ | ||
_ref4 = _slicedToArray(_ref3, 2); | ||
key = _ref4[0]; | ||
value = _ref4[1]; | ||
@@ -741,0 +706,0 @@ |
@@ -22,2 +22,4 @@ 'use strict'; | ||
var _set = _interopDefault(require('lodash/set')); | ||
var pako = _interopDefault(require('pako')); | ||
var shasum = _interopDefault(require('shasum')); | ||
var BufferCursor = _interopDefault(require('buffercursor')); | ||
@@ -27,6 +29,7 @@ var pad = _interopDefault(require('pad')); | ||
var _toConsumableArray = _interopDefault(require('babel-runtime/helpers/toConsumableArray')); | ||
var _Object$assign = _interopDefault(require('babel-runtime/core-js/object/assign')); | ||
var _Array$from = _interopDefault(require('babel-runtime/core-js/array/from')); | ||
var _Map = _interopDefault(require('babel-runtime/core-js/map')); | ||
var _Symbol$iterator = _interopDefault(require('babel-runtime/core-js/symbol/iterator')); | ||
var _Map = _interopDefault(require('babel-runtime/core-js/map')); | ||
var sortby = _interopDefault(require('lodash/sortBy')); | ||
var shasum = _interopDefault(require('shasum')); | ||
@@ -598,2 +601,52 @@ // @flow | ||
var GitObject = function () { | ||
function GitObject() { | ||
_classCallCheck(this, GitObject); | ||
} | ||
_createClass(GitObject, null, [{ | ||
key: 'wrap', | ||
value: function wrap(_ref /*: {type: string, object: Buffer} */) { | ||
var type = _ref.type, | ||
object = _ref.object; | ||
var buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(type + ' '), buffer.Buffer.from(object.byteLength.toString()), buffer.Buffer.from([0]), buffer.Buffer.from(object)]); | ||
var oid = shasum(buffer$$1); | ||
return { | ||
oid: oid, | ||
file: buffer.Buffer.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
}, { | ||
key: 'unwrap', | ||
value: function unwrap(_ref2 /*: {oid: string, file: Buffer} */) { | ||
var oid = _ref2.oid, | ||
file = _ref2.file; | ||
var inflated = buffer.Buffer.from(pako.inflate(file)); | ||
if (oid) { | ||
var sha = shasum(inflated); | ||
if (sha !== oid) { | ||
throw new Error('SHA check failed! Expected ' + oid + ', computed ' + sha); | ||
} | ||
} | ||
var s = inflated.indexOf(32); // first space | ||
var i = inflated.indexOf(0); // first null value | ||
var type = inflated.slice(0, s).toString('utf8'); // get type of object | ||
var length = inflated.slice(s + 1, i).toString('utf8'); // get type of object | ||
var actualLength = inflated.length - (i + 1); | ||
// verify length | ||
if (parseInt(length) !== actualLength) { | ||
throw new Error('Length mismatch: expected ' + length + ' bytes but got ' + actualLength + ' instead.'); | ||
} | ||
return { | ||
type: type, | ||
object: buffer.Buffer.from(inflated.slice(i + 1)) | ||
}; | ||
} | ||
}]); | ||
return GitObject; | ||
}(); | ||
// @flow | ||
@@ -748,2 +801,166 @@ /** | ||
function parseIDX(buffer$$1) { | ||
var reader = new BufferCursor(buffer$$1); | ||
var magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
var version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error('Unable to read version ' + version + ' packfile IDX. (Only version 2 supported)'); | ||
} | ||
// Verify checksums | ||
var shaComputed = shasum(buffer$$1.slice(0, -20)); | ||
var shaClaimed = buffer$$1.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error('Invalid checksum in IDX buffer: expected ' + shaClaimed + ' but saw ' + shaComputed); | ||
} | ||
if (buffer$$1.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error('To keep implementation simple, I haven\'t implemented the layer 5 feature needed to support packfiles > 2GB in size.'); | ||
} | ||
var fanout = []; | ||
for (var i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
var size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
var hashes = []; | ||
for (var _i = 0; _i < size; _i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
var crcs = new _Map(); | ||
for (var _i2 = 0; _i2 < size; _i2++) { | ||
crcs.set(hashes[_i2], reader.readUInt32BE()); | ||
} | ||
var offsets = new _Map(); | ||
for (var _i3 = 0; _i3 < size; _i3++) { | ||
offsets.set(hashes[_i3], reader.readUInt32BE()); | ||
} | ||
var packfileSha = reader.slice(20).toString('hex'); | ||
// This part is gratuitous, but since we lack good unzipping arbitrary streams with extra at the end in the browser... | ||
var lengths = _Array$from(offsets); | ||
lengths.sort(function (a, b) { | ||
return a[1] - b[1]; | ||
}); // List objects in order by offset | ||
var sizes = new _Map(); | ||
var slices = new _Map(); | ||
for (var _i4 = 0; _i4 < size - 1; _i4++) { | ||
sizes.set(lengths[_i4][0], lengths[_i4 + 1][1] - lengths[_i4][1]); | ||
slices.set(lengths[_i4][0], [lengths[_i4][1], lengths[_i4 + 1][1]]); | ||
} | ||
slices.set(lengths[size - 1][0], [lengths[size - 1][1], undefined]); | ||
return { size: size, fanout: fanout, hashes: hashes, crcs: crcs, packfileSha: packfileSha, slices: slices }; | ||
} | ||
var GitPackfile = function () { | ||
function GitPackfile(_ref) { | ||
var size = _ref.size, | ||
fanout = _ref.fanout, | ||
hashes = _ref.hashes, | ||
crcs = _ref.crcs, | ||
packfileSha = _ref.packfileSha, | ||
slices = _ref.slices, | ||
pack = _ref.pack; | ||
_classCallCheck(this, GitPackfile); | ||
// Compare checksums | ||
var shaClaimed = pack.slice(-20).toString('hex'); | ||
if (packfileSha !== shaClaimed) { | ||
throw new Error('Invalid packfile shasum in IDX buffer: expected ' + packfileSha + ' but saw ' + shaClaimed); | ||
} | ||
_Object$assign(this, { | ||
size: size, | ||
fanout: fanout, | ||
hashes: hashes, | ||
crcs: crcs, | ||
packfileSha: packfileSha, | ||
slices: slices, | ||
pack: pack | ||
}); | ||
} | ||
_createClass(GitPackfile, [{ | ||
key: 'read', | ||
value: function () { | ||
var _ref3 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(_ref2 /*: {oid: string} */) { | ||
var _pack; | ||
var oid = _ref2.oid; | ||
var raw; | ||
return _regeneratorRuntime.wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
if (this.slices.has(oid)) { | ||
_context.next = 2; | ||
break; | ||
} | ||
return _context.abrupt('return', null); | ||
case 2: | ||
raw = (_pack = this.pack).slice.apply(_pack, _toConsumableArray(this.slices.get(oid))); | ||
/* | ||
- The header is followed by number of object entries, each of | ||
which looks like this: | ||
(undeltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
compressed data | ||
(deltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
20-byte base object name if OBJ_REF_DELTA or a negative relative | ||
offset from the delta object's position in the pack if this | ||
is an OBJ_OFS_DELTA object | ||
compressed delta data | ||
Observation: length of each object is encoded in a variable | ||
length format and is not constrained to 32-bit or anything. | ||
*/ | ||
case 3: | ||
case 'end': | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function read(_x) { | ||
return _ref3.apply(this, arguments); | ||
} | ||
return read; | ||
}() | ||
}], [{ | ||
key: 'fromIDX', | ||
value: function () { | ||
var _ref5 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee2(_ref4) { | ||
var idx = _ref4.idx, | ||
pack = _ref4.pack; | ||
return _regeneratorRuntime.wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
return _context2.abrupt('return', new GitPackfile(_extends({ pack: pack }, parseIDX(idx)))); | ||
case 1: | ||
case 'end': | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function fromIDX(_x2) { | ||
return _ref5.apply(this, arguments); | ||
} | ||
return fromIDX; | ||
}() | ||
}]); | ||
return GitPackfile; | ||
}(); | ||
// @flow | ||
@@ -1238,5 +1455,7 @@ /*:: | ||
exports.GitConfig = GitConfig; | ||
exports.GitObject = GitObject; | ||
exports.GitPktLine = GitPktLine; | ||
exports.GitPackfile = GitPackfile; | ||
exports.GitIndex = GitIndex; | ||
exports.GitTree = GitTree; | ||
//# sourceMappingURL=models.js.map |
@@ -540,3 +540,3 @@ 'use strict'; | ||
var name = "isomorphic-git"; | ||
var version = "0.0.24"; | ||
var version = "0.0.25"; | ||
var description = "A pure JavaScript implementation of git for node and browsers!"; | ||
@@ -558,3 +558,3 @@ var main = "dist/for-node/"; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "thru": "git+https://github.com/wmhilton-contrib/thru.git#master" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.1", "rollup": "^0.50.0", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watchify": "^3.9.0" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.1", "rollup": "^0.51.6", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watchify": "^3.9.0" }; | ||
var ava = { "source": ["dist/for-node/*"] }; | ||
@@ -561,0 +561,0 @@ var browserify = { "transform": ["browserify-shim"] }; |
@@ -834,2 +834,17 @@ import path from 'path'; | ||
// @flow | ||
/*:: | ||
import type { Stats } from 'fs' | ||
import type { CacheEntry } from '../models/GitIndex' | ||
*/ | ||
function cacheIsStale({ entry, stats /*: { | ||
entry: CacheEntry, | ||
stats: Stats | ||
} */ | ||
}) { | ||
// Comparison based on the description in Paragraph 4 of | ||
// https://www.kernel.org/pub/software/scm/git/docs/technical/racy-git.txt | ||
return entry.mode !== stats.mode || entry.mtime.valueOf() !== stats.mtime.valueOf() || entry.ctime.valueOf() !== stats.ctime.valueOf() || entry.uid !== stats.uid || entry.gid !== stats.gid || entry.ino !== stats.ino >> 0 || entry.size !== stats.size; | ||
} | ||
async function getOidAtPath({ | ||
@@ -939,2 +954,8 @@ gitdir, | ||
} | ||
} else if (indexEntry !== null && !cacheIsStale({ entry: indexEntry, stats })) { | ||
if (indexEntry.oid === treeOid) { | ||
return 'unmodified'; | ||
} else { | ||
return 'modified'; | ||
} | ||
} else { | ||
@@ -985,3 +1006,3 @@ let object = await read(path.join(workdir, pathname)); | ||
var version$1 = "0.0.24"; | ||
var version$1 = "0.0.25"; | ||
@@ -988,0 +1009,0 @@ function version() { |
@@ -1,2 +0,2 @@ | ||
import { GitConfig, GitIndex, GitPktLine } from './models.js'; | ||
import { GitConfig, GitIndex, GitObject, GitPktLine } from './models.js'; | ||
import { exists, pkg, read, write } from './utils.js'; | ||
@@ -7,3 +7,2 @@ import path from 'path'; | ||
import { Buffer } from 'buffer'; | ||
import pako from 'pako'; | ||
import shasum from 'shasum'; | ||
@@ -149,34 +148,2 @@ import simpleGet from 'simple-get'; | ||
// @flow | ||
function wrapObject({ type, object /*: {type: string, object: Buffer} */ }) { | ||
let buffer$$1 = Buffer.concat([Buffer.from(type + ' '), Buffer.from(object.byteLength.toString()), Buffer.from([0]), Buffer.from(object)]); | ||
let oid = shasum(buffer$$1); | ||
return { | ||
oid, | ||
file: Buffer.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
function unwrapObject({ oid, file /*: {oid: string, file: Buffer} */ }) { | ||
let inflated = Buffer.from(pako.inflate(file)); | ||
if (oid) { | ||
let sha = shasum(inflated); | ||
if (sha !== oid) { | ||
throw new Error(`SHA check failed! Expected ${oid}, computed ${sha}`); | ||
} | ||
} | ||
let s = inflated.indexOf(32); // first space | ||
let i = inflated.indexOf(0); // first null value | ||
let type = inflated.slice(0, s).toString('utf8'); // get type of object | ||
let length = inflated.slice(s + 1, i).toString('utf8'); // get type of object | ||
let actualLength = inflated.length - (i + 1); | ||
// verify length | ||
if (parseInt(length) !== actualLength) { | ||
throw new Error(`Length mismatch: expected ${length} bytes but got ${actualLength} instead.`); | ||
} | ||
return { | ||
type, | ||
object: Buffer.from(inflated.slice(i + 1)) | ||
}; | ||
} | ||
class GitObjectManager { | ||
@@ -194,3 +161,3 @@ static async read({ gitdir, oid /*: {gitdir: string, oid: string} */ }) { | ||
} | ||
let { type, object } = unwrapObject({ oid, file }); | ||
let { type, object } = GitObject.unwrap({ oid, file }); | ||
return { type, object }; | ||
@@ -205,4 +172,12 @@ } | ||
static async write({ gitdir, type, object }) /*: Promise<string> */{ | ||
let { file, oid } = wrapObject({ type, object }); | ||
static async write({ | ||
gitdir, | ||
type, | ||
object /*: { | ||
gitdir: string, | ||
type: string, | ||
object: Buffer | ||
} */ | ||
}) /*: Promise<string> */{ | ||
let { file, oid } = GitObject.wrap({ type, object }); | ||
let filepath = `${gitdir}/objects/${oid.slice(0, 2)}/${oid.slice(2)}`; | ||
@@ -214,3 +189,3 @@ // Don't overwrite existing git objects - this helps avoid EPERM errors. | ||
return oid; | ||
} /*: {gitdir: string, type: string, object: Buffer} */ | ||
} | ||
} | ||
@@ -217,0 +192,0 @@ |
@@ -6,2 +6,4 @@ import { Buffer } from 'buffer'; | ||
import set from 'lodash/set'; | ||
import pako from 'pako'; | ||
import shasum from 'shasum'; | ||
import BufferCursor from 'buffercursor'; | ||
@@ -11,3 +13,2 @@ import pad from 'pad'; | ||
import sortby from 'lodash/sortBy'; | ||
import shasum from 'shasum'; | ||
@@ -271,2 +272,35 @@ var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key$$1 in source) { if (Object.prototype.hasOwnProperty.call(source, key$$1)) { target[key$$1] = source[key$$1]; } } } return target; }; | ||
class GitObject { | ||
static wrap({ type, object /*: {type: string, object: Buffer} */ }) { | ||
let buffer$$1 = Buffer.concat([Buffer.from(type + ' '), Buffer.from(object.byteLength.toString()), Buffer.from([0]), Buffer.from(object)]); | ||
let oid = shasum(buffer$$1); | ||
return { | ||
oid, | ||
file: Buffer.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
static unwrap({ oid, file /*: {oid: string, file: Buffer} */ }) { | ||
let inflated = Buffer.from(pako.inflate(file)); | ||
if (oid) { | ||
let sha = shasum(inflated); | ||
if (sha !== oid) { | ||
throw new Error(`SHA check failed! Expected ${oid}, computed ${sha}`); | ||
} | ||
} | ||
let s = inflated.indexOf(32); // first space | ||
let i = inflated.indexOf(0); // first null value | ||
let type = inflated.slice(0, s).toString('utf8'); // get type of object | ||
let length = inflated.slice(s + 1, i).toString('utf8'); // get type of object | ||
let actualLength = inflated.length - (i + 1); | ||
// verify length | ||
if (parseInt(length) !== actualLength) { | ||
throw new Error(`Length mismatch: expected ${length} bytes but got ${actualLength} instead.`); | ||
} | ||
return { | ||
type, | ||
object: Buffer.from(inflated.slice(i + 1)) | ||
}; | ||
} | ||
} | ||
// @flow | ||
@@ -368,2 +402,97 @@ /** | ||
var _extends$1 = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key$$1 in source) { if (Object.prototype.hasOwnProperty.call(source, key$$1)) { target[key$$1] = source[key$$1]; } } } return target; }; | ||
function parseIDX(buffer$$1) { | ||
let reader = new BufferCursor(buffer$$1); | ||
let magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
let version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error(`Unable to read version ${version} packfile IDX. (Only version 2 supported)`); | ||
} | ||
// Verify checksums | ||
let shaComputed = shasum(buffer$$1.slice(0, -20)); | ||
let shaClaimed = buffer$$1.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error(`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`); | ||
} | ||
if (buffer$$1.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error(`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`); | ||
} | ||
let fanout = []; | ||
for (let i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
let size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
let hashes = []; | ||
for (let i = 0; i < size; i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
let crcs = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
crcs.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let offsets = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
offsets.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let packfileSha = reader.slice(20).toString('hex'); | ||
// This part is gratuitous, but since we lack good unzipping arbitrary streams with extra at the end in the browser... | ||
let lengths = Array.from(offsets); | ||
lengths.sort((a, b) => a[1] - b[1]); // List objects in order by offset | ||
let sizes = new Map(); | ||
let slices = new Map(); | ||
for (let i = 0; i < size - 1; i++) { | ||
sizes.set(lengths[i][0], lengths[i + 1][1] - lengths[i][1]); | ||
slices.set(lengths[i][0], [lengths[i][1], lengths[i + 1][1]]); | ||
} | ||
slices.set(lengths[size - 1][0], [lengths[size - 1][1], undefined]); | ||
return { size, fanout, hashes, crcs, packfileSha, slices }; | ||
} | ||
class GitPackfile { | ||
constructor({ size, fanout, hashes, crcs, packfileSha, slices, pack }) { | ||
// Compare checksums | ||
let shaClaimed = pack.slice(-20).toString('hex'); | ||
if (packfileSha !== shaClaimed) { | ||
throw new Error(`Invalid packfile shasum in IDX buffer: expected ${packfileSha} but saw ${shaClaimed}`); | ||
} | ||
Object.assign(this, { | ||
size, | ||
fanout, | ||
hashes, | ||
crcs, | ||
packfileSha, | ||
slices, | ||
pack | ||
}); | ||
} | ||
static async fromIDX({ idx, pack }) { | ||
return new GitPackfile(_extends$1({ pack }, parseIDX(idx))); | ||
} | ||
async read({ oid /*: {oid: string} */ }) { | ||
if (!this.slices.has(oid)) return null; | ||
let raw = this.pack.slice(...this.slices.get(oid)); | ||
/* | ||
- The header is followed by number of object entries, each of | ||
which looks like this: | ||
(undeltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
compressed data | ||
(deltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
20-byte base object name if OBJ_REF_DELTA or a negative relative | ||
offset from the delta object's position in the pack if this | ||
is an OBJ_OFS_DELTA object | ||
compressed delta data | ||
Observation: length of each object is encoded in a variable | ||
length format and is not constrained to 32-bit or anything. | ||
*/ | ||
} | ||
} | ||
// @flow | ||
@@ -660,2 +789,2 @@ /*:: | ||
export { GitCommit, GitConfig, GitPktLine, GitIndex, GitTree }; | ||
export { GitCommit, GitConfig, GitObject, GitPktLine, GitPackfile, GitIndex, GitTree }; |
@@ -187,3 +187,3 @@ import systemfs from 'fs'; | ||
var name = "isomorphic-git"; | ||
var version = "0.0.24"; | ||
var version = "0.0.25"; | ||
var description = "A pure JavaScript implementation of git for node and browsers!"; | ||
@@ -205,3 +205,3 @@ var main = "dist/for-node/"; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "thru": "git+https://github.com/wmhilton-contrib/thru.git#master" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.1", "rollup": "^0.50.0", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watchify": "^3.9.0" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.1", "rollup": "^0.51.6", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watchify": "^3.9.0" }; | ||
var ava = { "source": ["dist/for-node/*"] }; | ||
@@ -208,0 +208,0 @@ var browserify = { "transform": ["browserify-shim"] }; |
@@ -840,2 +840,17 @@ 'use strict'; | ||
// @flow | ||
/*:: | ||
import type { Stats } from 'fs' | ||
import type { CacheEntry } from '../models/GitIndex' | ||
*/ | ||
function cacheIsStale({ entry, stats /*: { | ||
entry: CacheEntry, | ||
stats: Stats | ||
} */ | ||
}) { | ||
// Comparison based on the description in Paragraph 4 of | ||
// https://www.kernel.org/pub/software/scm/git/docs/technical/racy-git.txt | ||
return entry.mode !== stats.mode || entry.mtime.valueOf() !== stats.mtime.valueOf() || entry.ctime.valueOf() !== stats.ctime.valueOf() || entry.uid !== stats.uid || entry.gid !== stats.gid || entry.ino !== stats.ino >> 0 || entry.size !== stats.size; | ||
} | ||
async function getOidAtPath({ | ||
@@ -945,2 +960,8 @@ gitdir, | ||
} | ||
} else if (indexEntry !== null && !cacheIsStale({ entry: indexEntry, stats })) { | ||
if (indexEntry.oid === treeOid) { | ||
return 'unmodified'; | ||
} else { | ||
return 'modified'; | ||
} | ||
} else { | ||
@@ -991,3 +1012,3 @@ let object = await utils_js.read(path.join(workdir, pathname)); | ||
var version$1 = "0.0.24"; | ||
var version$1 = "0.0.25"; | ||
@@ -994,0 +1015,0 @@ function version() { |
@@ -13,3 +13,2 @@ 'use strict'; | ||
var buffer = require('buffer'); | ||
var pako = _interopDefault(require('pako')); | ||
var shasum = _interopDefault(require('shasum')); | ||
@@ -155,34 +154,2 @@ var simpleGet = _interopDefault(require('simple-get')); | ||
// @flow | ||
function wrapObject({ type, object /*: {type: string, object: Buffer} */ }) { | ||
let buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(type + ' '), buffer.Buffer.from(object.byteLength.toString()), buffer.Buffer.from([0]), buffer.Buffer.from(object)]); | ||
let oid = shasum(buffer$$1); | ||
return { | ||
oid, | ||
file: buffer.Buffer.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
function unwrapObject({ oid, file /*: {oid: string, file: Buffer} */ }) { | ||
let inflated = buffer.Buffer.from(pako.inflate(file)); | ||
if (oid) { | ||
let sha = shasum(inflated); | ||
if (sha !== oid) { | ||
throw new Error(`SHA check failed! Expected ${oid}, computed ${sha}`); | ||
} | ||
} | ||
let s = inflated.indexOf(32); // first space | ||
let i = inflated.indexOf(0); // first null value | ||
let type = inflated.slice(0, s).toString('utf8'); // get type of object | ||
let length = inflated.slice(s + 1, i).toString('utf8'); // get type of object | ||
let actualLength = inflated.length - (i + 1); | ||
// verify length | ||
if (parseInt(length) !== actualLength) { | ||
throw new Error(`Length mismatch: expected ${length} bytes but got ${actualLength} instead.`); | ||
} | ||
return { | ||
type, | ||
object: buffer.Buffer.from(inflated.slice(i + 1)) | ||
}; | ||
} | ||
class GitObjectManager { | ||
@@ -200,3 +167,3 @@ static async read({ gitdir, oid /*: {gitdir: string, oid: string} */ }) { | ||
} | ||
let { type, object } = unwrapObject({ oid, file }); | ||
let { type, object } = models_js.GitObject.unwrap({ oid, file }); | ||
return { type, object }; | ||
@@ -211,4 +178,12 @@ } | ||
static async write({ gitdir, type, object }) /*: Promise<string> */{ | ||
let { file, oid } = wrapObject({ type, object }); | ||
static async write({ | ||
gitdir, | ||
type, | ||
object /*: { | ||
gitdir: string, | ||
type: string, | ||
object: Buffer | ||
} */ | ||
}) /*: Promise<string> */{ | ||
let { file, oid } = models_js.GitObject.wrap({ type, object }); | ||
let filepath = `${gitdir}/objects/${oid.slice(0, 2)}/${oid.slice(2)}`; | ||
@@ -220,3 +195,3 @@ // Don't overwrite existing git objects - this helps avoid EPERM errors. | ||
return oid; | ||
} /*: {gitdir: string, type: string, object: Buffer} */ | ||
} | ||
} | ||
@@ -223,0 +198,0 @@ |
@@ -12,2 +12,4 @@ 'use strict'; | ||
var set = _interopDefault(require('lodash/set')); | ||
var pako = _interopDefault(require('pako')); | ||
var shasum = _interopDefault(require('shasum')); | ||
var BufferCursor = _interopDefault(require('buffercursor')); | ||
@@ -17,3 +19,2 @@ var pad = _interopDefault(require('pad')); | ||
var sortby = _interopDefault(require('lodash/sortBy')); | ||
var shasum = _interopDefault(require('shasum')); | ||
@@ -277,2 +278,35 @@ var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key$$1 in source) { if (Object.prototype.hasOwnProperty.call(source, key$$1)) { target[key$$1] = source[key$$1]; } } } return target; }; | ||
class GitObject { | ||
static wrap({ type, object /*: {type: string, object: Buffer} */ }) { | ||
let buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(type + ' '), buffer.Buffer.from(object.byteLength.toString()), buffer.Buffer.from([0]), buffer.Buffer.from(object)]); | ||
let oid = shasum(buffer$$1); | ||
return { | ||
oid, | ||
file: buffer.Buffer.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
static unwrap({ oid, file /*: {oid: string, file: Buffer} */ }) { | ||
let inflated = buffer.Buffer.from(pako.inflate(file)); | ||
if (oid) { | ||
let sha = shasum(inflated); | ||
if (sha !== oid) { | ||
throw new Error(`SHA check failed! Expected ${oid}, computed ${sha}`); | ||
} | ||
} | ||
let s = inflated.indexOf(32); // first space | ||
let i = inflated.indexOf(0); // first null value | ||
let type = inflated.slice(0, s).toString('utf8'); // get type of object | ||
let length = inflated.slice(s + 1, i).toString('utf8'); // get type of object | ||
let actualLength = inflated.length - (i + 1); | ||
// verify length | ||
if (parseInt(length) !== actualLength) { | ||
throw new Error(`Length mismatch: expected ${length} bytes but got ${actualLength} instead.`); | ||
} | ||
return { | ||
type, | ||
object: buffer.Buffer.from(inflated.slice(i + 1)) | ||
}; | ||
} | ||
} | ||
// @flow | ||
@@ -374,2 +408,97 @@ /** | ||
var _extends$1 = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key$$1 in source) { if (Object.prototype.hasOwnProperty.call(source, key$$1)) { target[key$$1] = source[key$$1]; } } } return target; }; | ||
function parseIDX(buffer$$1) { | ||
let reader = new BufferCursor(buffer$$1); | ||
let magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
let version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error(`Unable to read version ${version} packfile IDX. (Only version 2 supported)`); | ||
} | ||
// Verify checksums | ||
let shaComputed = shasum(buffer$$1.slice(0, -20)); | ||
let shaClaimed = buffer$$1.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error(`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`); | ||
} | ||
if (buffer$$1.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error(`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`); | ||
} | ||
let fanout = []; | ||
for (let i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
let size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
let hashes = []; | ||
for (let i = 0; i < size; i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
let crcs = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
crcs.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let offsets = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
offsets.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let packfileSha = reader.slice(20).toString('hex'); | ||
// This part is gratuitous, but since we lack good unzipping arbitrary streams with extra at the end in the browser... | ||
let lengths = Array.from(offsets); | ||
lengths.sort((a, b) => a[1] - b[1]); // List objects in order by offset | ||
let sizes = new Map(); | ||
let slices = new Map(); | ||
for (let i = 0; i < size - 1; i++) { | ||
sizes.set(lengths[i][0], lengths[i + 1][1] - lengths[i][1]); | ||
slices.set(lengths[i][0], [lengths[i][1], lengths[i + 1][1]]); | ||
} | ||
slices.set(lengths[size - 1][0], [lengths[size - 1][1], undefined]); | ||
return { size, fanout, hashes, crcs, packfileSha, slices }; | ||
} | ||
class GitPackfile { | ||
constructor({ size, fanout, hashes, crcs, packfileSha, slices, pack }) { | ||
// Compare checksums | ||
let shaClaimed = pack.slice(-20).toString('hex'); | ||
if (packfileSha !== shaClaimed) { | ||
throw new Error(`Invalid packfile shasum in IDX buffer: expected ${packfileSha} but saw ${shaClaimed}`); | ||
} | ||
Object.assign(this, { | ||
size, | ||
fanout, | ||
hashes, | ||
crcs, | ||
packfileSha, | ||
slices, | ||
pack | ||
}); | ||
} | ||
static async fromIDX({ idx, pack }) { | ||
return new GitPackfile(_extends$1({ pack }, parseIDX(idx))); | ||
} | ||
async read({ oid /*: {oid: string} */ }) { | ||
if (!this.slices.has(oid)) return null; | ||
let raw = this.pack.slice(...this.slices.get(oid)); | ||
/* | ||
- The header is followed by number of object entries, each of | ||
which looks like this: | ||
(undeltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
compressed data | ||
(deltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
20-byte base object name if OBJ_REF_DELTA or a negative relative | ||
offset from the delta object's position in the pack if this | ||
is an OBJ_OFS_DELTA object | ||
compressed delta data | ||
Observation: length of each object is encoded in a variable | ||
length format and is not constrained to 32-bit or anything. | ||
*/ | ||
} | ||
} | ||
// @flow | ||
@@ -668,4 +797,6 @@ /*:: | ||
exports.GitConfig = GitConfig; | ||
exports.GitObject = GitObject; | ||
exports.GitPktLine = GitPktLine; | ||
exports.GitPackfile = GitPackfile; | ||
exports.GitIndex = GitIndex; | ||
exports.GitTree = GitTree; |
@@ -193,3 +193,3 @@ 'use strict'; | ||
var name = "isomorphic-git"; | ||
var version = "0.0.24"; | ||
var version = "0.0.25"; | ||
var description = "A pure JavaScript implementation of git for node and browsers!"; | ||
@@ -211,3 +211,3 @@ var main = "dist/for-node/"; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "thru": "git+https://github.com/wmhilton-contrib/thru.git#master" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.1", "rollup": "^0.50.0", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watchify": "^3.9.0" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.1", "rollup": "^0.51.6", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watchify": "^3.9.0" }; | ||
var ava = { "source": ["dist/for-node/*"] }; | ||
@@ -214,0 +214,0 @@ var browserify = { "transform": ["browserify-shim"] }; |
{ | ||
"name": "isomorphic-git", | ||
"version": "0.0.24", | ||
"version": "0.0.25", | ||
"description": "A pure JavaScript implementation of git for node and browsers!", | ||
@@ -90,3 +90,3 @@ "main": "dist/for-node/", | ||
"prettier-standard": "^7.0.1", | ||
"rollup": "^0.50.0", | ||
"rollup": "^0.51.6", | ||
"rollup-plugin-babel": "^3.0.2", | ||
@@ -93,0 +93,0 @@ "rollup-plugin-json": "^2.3.0", |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
9349353
19058