isomorphic-git
Advanced tools
Comparing version 0.0.33 to 0.0.34
14
cli.js
@@ -11,7 +11,13 @@ #!/usr/bin/env node | ||
// What's the command? | ||
let cmd = `git.${command}({fs, dir: '.', ${JSON.stringify(opts).slice(1)})` | ||
let cmd = `>> git.${command}({fs, dir: '.', ${JSON.stringify(opts).slice(1)})` | ||
console.log(cmd) | ||
let result = await git[command](Object.assign({ fs, dir: '.' }, opts)) | ||
if (result === undefined) return | ||
console.log(JSON.stringify(result, null, 2)) | ||
try { | ||
let result = await git[command](Object.assign({ fs, dir: '.' }, opts)) | ||
if (result === undefined) return | ||
console.log(JSON.stringify(result, null, 2)) | ||
} catch (err) { | ||
process.stderr.write(err.message + '\n') | ||
console.log(err) | ||
process.exit(1) | ||
} | ||
}) |
@@ -29,2 +29,4 @@ 'use strict'; | ||
var marky = _interopDefault(require('marky')); | ||
var pify = _interopDefault(require('pify')); | ||
var concat = _interopDefault(require('simple-concat')); | ||
var utils_js = require('./utils.js'); | ||
@@ -120,5 +122,17 @@ | ||
blob: 48, | ||
tag: 64 | ||
tag: 64, | ||
ofs_delta: 96, | ||
ref_delta: 112 | ||
/** | ||
* | ||
* If there were no errors, then there will be no `errors` property. | ||
* There can be a mix of `ok` messages and `errors` messages. | ||
* | ||
* @typedef {Object} PushResponse | ||
* @property {Array<string>} [ok] - The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | ||
* @property {Array<string>} [errors] - If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | ||
*/ | ||
/** | ||
* Push a branch | ||
@@ -130,8 +144,8 @@ * | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.ref=undefined] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.remote='origin'] - If URL is not specified, determines which remote to use. | ||
* @param {string} [args.url] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword] - The password to use with Basic Auth | ||
* @returns {Promise<void>} - Resolves successfully when push completes | ||
* @param {string} [args.url=undefined] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername=undefined] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword=undefined] - The password to use with Basic Auth | ||
* @returns {Promise<PushResponse>} - Resolves successfully when push completes with a detailed description of the operation from the server. | ||
* | ||
@@ -244,5 +258,3 @@ * @example | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
var listCommits = function () { | ||
@@ -576,5 +588,3 @@ var _ref4 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee3(_ref3) { | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
var listObjects = function () { | ||
@@ -789,5 +799,3 @@ var _ref9 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee5(_ref8) { | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
var pack = function () { | ||
@@ -1065,3 +1073,3 @@ var _ref13 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee6(_ref12) { | ||
// Github uses all ref-deltas when I fetch packfiles instead of all ofs-deltas. Nice! | ||
capabilities = 'multi_ack_detailed no-done side-band-64k thin-pack agent=git/' + utils_js.pkg.name + '@' + utils_js.pkg.version + (relative ? ' deepen-relative' : ''); | ||
capabilities = 'multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/' + utils_js.pkg.name + '@' + utils_js.pkg.version + (relative ? ' deepen-relative' : ''); | ||
packstream = new stream.PassThrough(); | ||
@@ -1328,3 +1336,3 @@ | ||
onprogress = _ref.onprogress; | ||
var response; | ||
var response, packfile, packfileSha; | ||
return _regeneratorRuntime.wrap(function _callee$(_context) { | ||
@@ -1352,5 +1360,11 @@ while (1) { | ||
_context.next = 5; | ||
return unpack({ fs: fs, gitdir: gitdir, inputStream: response.packfile, onprogress: onprogress }); | ||
return pify(concat)(response.packfile); | ||
case 5: | ||
packfile = _context.sent; | ||
packfileSha = packfile.slice(-20).toString('hex'); | ||
_context.next = 9; | ||
return fs.write(path.join(gitdir, 'objects/pack/pack-' + packfileSha + '.pack'), packfile); | ||
case 9: | ||
case 'end': | ||
@@ -1357,0 +1371,0 @@ return _context.stop(); |
@@ -519,2 +519,4 @@ 'use strict'; | ||
var PackfileCache = new _Map(); | ||
/** @ignore */ | ||
@@ -534,3 +536,3 @@ var GitObjectManager = function () { | ||
var fs, file, text, _GitObject$unwrap, type, object; | ||
var fs, file, getExternalRefDelta, list, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, filename, p, idxName, idx, pack, _pack, text, _GitObject$unwrap, type, object; | ||
@@ -542,2 +544,4 @@ return _regeneratorRuntime.wrap(function _callee$(_context) { | ||
fs = new models_js.FileSystem(_fs); | ||
// Look for it in the loose object directory. | ||
_context.next = 3; | ||
@@ -550,14 +554,161 @@ return fs.read(gitdir + '/objects/' + oid.slice(0, 2) + '/' + oid.slice(2)); | ||
if (file) { | ||
_context.next = 13; | ||
_context.next = 65; | ||
break; | ||
} | ||
_context.next = 7; | ||
// Curry the current read method so that the packfile un-deltification | ||
// process can acquire external ref-deltas. | ||
getExternalRefDelta = function getExternalRefDelta(oid) { | ||
return GitObjectManager.read({ fs: _fs, gitdir: gitdir, oid: oid }); | ||
}; | ||
// Iterate through all the .pack files | ||
_context.next = 8; | ||
return fs.readdir(path.join(gitdir, '/objects/pack')); | ||
case 8: | ||
list = _context.sent; | ||
list = list.filter(function (x) { | ||
return x.endsWith('.pack'); | ||
}); | ||
_iteratorNormalCompletion = true; | ||
_didIteratorError = false; | ||
_iteratorError = undefined; | ||
_context.prev = 13; | ||
_iterator = _getIterator(list); | ||
case 15: | ||
if (_iteratorNormalCompletion = (_step = _iterator.next()).done) { | ||
_context.next = 51; | ||
break; | ||
} | ||
filename = _step.value; | ||
// Try to get the packfile from the in-memory cache | ||
p = PackfileCache.get(filename); | ||
if (p) { | ||
_context.next = 40; | ||
break; | ||
} | ||
// If not there, load it from a .idx file | ||
idxName = filename.replace(/pack$/, 'idx'); | ||
_context.next = 22; | ||
return fs.exists(gitdir + '/objects/pack/' + idxName); | ||
case 22: | ||
if (!_context.sent) { | ||
_context.next = 31; | ||
break; | ||
} | ||
_context.next = 25; | ||
return fs.read(gitdir + '/objects/pack/' + idxName); | ||
case 25: | ||
idx = _context.sent; | ||
_context.next = 28; | ||
return models_js.GitPackIndex.fromIdx({ idx: idx, getExternalRefDelta: getExternalRefDelta }); | ||
case 28: | ||
p = _context.sent; | ||
_context.next = 39; | ||
break; | ||
case 31: | ||
_context.next = 33; | ||
return fs.read(gitdir + '/objects/pack/' + filename); | ||
case 33: | ||
pack = _context.sent; | ||
_context.next = 36; | ||
return models_js.GitPackIndex.fromPack({ pack: pack, getExternalRefDelta: getExternalRefDelta }); | ||
case 36: | ||
p = _context.sent; | ||
_context.next = 39; | ||
return fs.write(gitdir + '/objects/pack/' + idxName, p.toBuffer()); | ||
case 39: | ||
PackfileCache.set(filename, p); | ||
case 40: | ||
if (!p.hashes.includes(oid)) { | ||
_context.next = 48; | ||
break; | ||
} | ||
if (p.pack) { | ||
_context.next = 47; | ||
break; | ||
} | ||
_context.next = 44; | ||
return fs.read(gitdir + '/objects/pack/' + filename); | ||
case 44: | ||
_pack = _context.sent; | ||
_context.next = 47; | ||
return p.load({ pack: _pack }); | ||
case 47: | ||
return _context.abrupt('return', p.read({ oid: oid, getExternalRefDelta: getExternalRefDelta })); | ||
case 48: | ||
_iteratorNormalCompletion = true; | ||
_context.next = 15; | ||
break; | ||
case 51: | ||
_context.next = 57; | ||
break; | ||
case 53: | ||
_context.prev = 53; | ||
_context.t0 = _context['catch'](13); | ||
_didIteratorError = true; | ||
_iteratorError = _context.t0; | ||
case 57: | ||
_context.prev = 57; | ||
_context.prev = 58; | ||
if (!_iteratorNormalCompletion && _iterator.return) { | ||
_iterator.return(); | ||
} | ||
case 60: | ||
_context.prev = 60; | ||
if (!_didIteratorError) { | ||
_context.next = 63; | ||
break; | ||
} | ||
throw _iteratorError; | ||
case 63: | ||
return _context.finish(60); | ||
case 64: | ||
return _context.finish(57); | ||
case 65: | ||
if (file) { | ||
_context.next = 71; | ||
break; | ||
} | ||
_context.next = 68; | ||
return fs.read(gitdir + '/shallow', { encoding: 'utf8' }); | ||
case 7: | ||
case 68: | ||
text = _context.sent; | ||
if (!(text !== null && text.includes(oid))) { | ||
_context.next = 12; | ||
_context.next = 71; | ||
break; | ||
@@ -568,10 +719,15 @@ } | ||
case 12: | ||
case 71: | ||
if (file) { | ||
_context.next = 73; | ||
break; | ||
} | ||
throw new Error('Failed to read git object with oid ' + oid); | ||
case 13: | ||
case 73: | ||
_GitObject$unwrap = models_js.GitObject.unwrap({ oid: oid, file: file }), type = _GitObject$unwrap.type, object = _GitObject$unwrap.object; | ||
return _context.abrupt('return', { type: type, object: object }); | ||
case 15: | ||
case 75: | ||
case 'end': | ||
@@ -581,3 +737,3 @@ return _context.stop(); | ||
} | ||
}, _callee, this); | ||
}, _callee, this, [[13, 53, 57, 65], [58,, 60, 64]]); | ||
})); | ||
@@ -834,2 +990,16 @@ | ||
// Update files | ||
// TODO: For large repos with a history of thousands of pull requests | ||
// (i.e. gitlab-ce) it would be vastly more efficient to write them | ||
// to .git/packed-refs. | ||
// The trick is to make sure we a) don't write a packed ref that is | ||
// already shadowed by a loose ref and b) don't loose any refs already | ||
// in packed-refs. Doing this efficiently may be difficult. A | ||
// solution that might work is | ||
// a) load the current packed-refs file | ||
// b) add actualRefsToWrite, overriding the existing values if present | ||
// c) enumerate all the loose refs currently in .git/refs/remotes/${remote} | ||
// d) overwrite their value with the new value. | ||
// Examples of refs we need to avoid writing in loose format for efficieny's sake | ||
// are .git/refs/remotes/origin/refs/remotes/remote_mirror_3059 | ||
// and .git/refs/remotes/origin/refs/merge-requests | ||
_context.next = 58; | ||
@@ -1391,3 +1561,4 @@ break; | ||
var _ref5 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee4(stream$$1 /*: ReadableStream */) { | ||
var service, res; | ||
var service, _ref6, packetlines, packfile, result, response, read, line, lines, _iteratorNormalCompletion2, _didIteratorError2, _iteratorError2, _iterator2, _step2, _line, status, refAndMessage; | ||
return _regeneratorRuntime.wrap(function _callee4$(_context4) { | ||
@@ -1399,9 +1570,126 @@ while (1) { | ||
_context4.next = 3; | ||
return this.stream({ stream: stream$$1, service: service }); | ||
return this.stream({ | ||
stream: stream$$1, | ||
service: service | ||
}); | ||
case 3: | ||
res = _context4.sent; | ||
return _context4.abrupt('return', res); | ||
_ref6 = _context4.sent; | ||
packetlines = _ref6.packetlines; | ||
packfile = _ref6.packfile; | ||
_context4.next = 8; | ||
return pify(concat)(packfile); | ||
case 5: | ||
case 8: | ||
packfile = _context4.sent; | ||
_context4.next = 11; | ||
return pify(concat)(packetlines); | ||
case 11: | ||
packetlines = _context4.sent; | ||
result = {}; | ||
// Parse the response! | ||
// I'm combining the side-band-64k and regular streams | ||
// because Github returns the first line in the sideband while | ||
// git-http-server returns it without the sideband. | ||
response = ''; | ||
read = models_js.GitPktLine.reader(packfile); | ||
_context4.next = 17; | ||
return read(); | ||
case 17: | ||
line = _context4.sent; | ||
case 18: | ||
if (!(line !== null && line !== true)) { | ||
_context4.next = 25; | ||
break; | ||
} | ||
response += line.toString('utf8') + '\n'; | ||
_context4.next = 22; | ||
return read(); | ||
case 22: | ||
line = _context4.sent; | ||
_context4.next = 18; | ||
break; | ||
case 25: | ||
response += packetlines.toString('utf8'); | ||
lines = response.toString('utf8').split('\n'); | ||
// We're expecting "unpack {unpack-result}" | ||
line = lines.shift(); | ||
if (line.startsWith('unpack ')) { | ||
_context4.next = 30; | ||
break; | ||
} | ||
throw new Error('Unparsable response from server! Expected \'unpack ok\' or \'unpack [error message]\' but got \'' + line + '\''); | ||
case 30: | ||
if (line === 'unpack ok') { | ||
result.ok = ['unpack']; | ||
} else { | ||
result.errors = [line.trim()]; | ||
} | ||
_iteratorNormalCompletion2 = true; | ||
_didIteratorError2 = false; | ||
_iteratorError2 = undefined; | ||
_context4.prev = 34; | ||
for (_iterator2 = _getIterator(lines); !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { | ||
_line = _step2.value; | ||
status = _line.slice(0, 2); | ||
refAndMessage = _line.slice(3); | ||
if (status === 'ok') { | ||
result.ok = result.ok || []; | ||
result.ok.push(refAndMessage); | ||
} else if (status === 'ng') { | ||
result.errors = result.errors || []; | ||
result.errors.push(refAndMessage); | ||
} | ||
} | ||
_context4.next = 42; | ||
break; | ||
case 38: | ||
_context4.prev = 38; | ||
_context4.t0 = _context4['catch'](34); | ||
_didIteratorError2 = true; | ||
_iteratorError2 = _context4.t0; | ||
case 42: | ||
_context4.prev = 42; | ||
_context4.prev = 43; | ||
if (!_iteratorNormalCompletion2 && _iterator2.return) { | ||
_iterator2.return(); | ||
} | ||
case 45: | ||
_context4.prev = 45; | ||
if (!_didIteratorError2) { | ||
_context4.next = 48; | ||
break; | ||
} | ||
throw _iteratorError2; | ||
case 48: | ||
return _context4.finish(45); | ||
case 49: | ||
return _context4.finish(42); | ||
case 50: | ||
console.log(result); | ||
return _context4.abrupt('return', result); | ||
case 52: | ||
case 'end': | ||
@@ -1411,3 +1699,3 @@ return _context4.stop(); | ||
} | ||
}, _callee4, this); | ||
}, _callee4, this, [[34, 38, 42, 50], [43,, 45, 49]]); | ||
})); | ||
@@ -1424,3 +1712,3 @@ | ||
value: function () { | ||
var _ref6 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee5(stream$$1 /*: ReadableStream */) { | ||
var _ref7 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee5(stream$$1 /*: ReadableStream */) { | ||
var service, res; | ||
@@ -1448,3 +1736,3 @@ return _regeneratorRuntime.wrap(function _callee5$(_context5) { | ||
function pull(_x3) { | ||
return _ref6.apply(this, arguments); | ||
return _ref7.apply(this, arguments); | ||
} | ||
@@ -1457,6 +1745,6 @@ | ||
value: function () { | ||
var _ref8 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee7(_ref7) { | ||
var _stream = _ref7.stream, | ||
service = _ref7.service; | ||
var headers, res, read, packetlines, packfile, progress, nextBit; | ||
var _ref9 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee7(_ref8) { | ||
var _stream = _ref8.stream, | ||
service = _ref8.service; | ||
var headers, res, data, read, packetlines, packfile, progress, nextBit; | ||
return _regeneratorRuntime.wrap(function _callee7$(_context7) { | ||
@@ -1484,13 +1772,10 @@ while (1) { | ||
res = _context7.sent; | ||
_context7.next = 10; | ||
return pify(concat)(res); | ||
if (!(service === 'git-receive-pack')) { | ||
_context7.next = 10; | ||
break; | ||
} | ||
case 10: | ||
data = _context7.sent; | ||
return _context7.abrupt('return', res); | ||
case 10: | ||
// Parse the response! | ||
read = models_js.GitPktLine.streamReader(res); | ||
read = models_js.GitPktLine.reader(data); | ||
// And now for the ridiculous side-band-64k protocol | ||
@@ -1504,3 +1789,3 @@ | ||
nextBit = function () { | ||
var _ref9 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee6() { | ||
var _ref10 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee6() { | ||
var line, error; | ||
@@ -1518,6 +1803,14 @@ return _regeneratorRuntime.wrap(function _callee6$(_context6) { | ||
if (!(line === null)) { | ||
_context6.next = 8; | ||
_context6.next = 5; | ||
break; | ||
} | ||
return _context6.abrupt('return', nextBit()); | ||
case 5: | ||
if (!(line === true)) { | ||
_context6.next = 10; | ||
break; | ||
} | ||
packetlines.end(); | ||
@@ -1528,18 +1821,18 @@ progress.end(); | ||
case 8: | ||
case 10: | ||
_context6.t0 = line[0]; | ||
_context6.next = _context6.t0 === 1 ? 11 : _context6.t0 === 2 ? 13 : _context6.t0 === 3 ? 15 : 19; | ||
_context6.next = _context6.t0 === 1 ? 13 : _context6.t0 === 2 ? 15 : _context6.t0 === 3 ? 17 : 21; | ||
break; | ||
case 11: | ||
case 13: | ||
// pack data | ||
packfile.write(line.slice(1)); | ||
return _context6.abrupt('break', 20); | ||
return _context6.abrupt('break', 22); | ||
case 13: | ||
case 15: | ||
// progress message | ||
progress.write(line.slice(1)); | ||
return _context6.abrupt('break', 20); | ||
return _context6.abrupt('break', 22); | ||
case 15: | ||
case 17: | ||
// fatal error message just before stream aborts | ||
@@ -1552,7 +1845,7 @@ error = line.slice(1); | ||
case 19: | ||
case 21: | ||
// Not part of the side-band-64k protocol | ||
packetlines.write(line.slice(0)); | ||
case 20: | ||
case 22: | ||
// Careful not to blow up the stack. | ||
@@ -1562,3 +1855,3 @@ // I think Promises in a tail-call position should be OK. | ||
case 21: | ||
case 23: | ||
case 'end': | ||
@@ -1572,3 +1865,3 @@ return _context6.stop(); | ||
return function nextBit() { | ||
return _ref9.apply(this, arguments); | ||
return _ref10.apply(this, arguments); | ||
}; | ||
@@ -1584,3 +1877,3 @@ }(); | ||
case 17: | ||
case 18: | ||
case 'end': | ||
@@ -1594,11 +1887,7 @@ return _context7.stop(); | ||
function stream$$1(_x4) { | ||
return _ref8.apply(this, arguments); | ||
return _ref9.apply(this, arguments); | ||
} | ||
return stream$$1; | ||
}() /*: { | ||
stream: ReadableStream, | ||
service: string | ||
} */ | ||
}() | ||
}]); | ||
@@ -1605,0 +1894,0 @@ |
@@ -136,4 +136,4 @@ 'use strict'; | ||
var files = ["dist", "cli.js"]; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "through2": "^2.0.3" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "esdoc": "^1.0.4", "esdoc-ecmascript-proposal-plugin": "^1.0.0", "esdoc-importpath-plugin": "^1.0.1", "esdoc-standard-plugin": "^1.0.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.3", "rollup": "^0.51.6", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watch": "^1.0.2", "watchify": "^3.9.0", "semantic-release": "^8.2.0" }; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "crc": "^3.5.0", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "through2": "^2.0.3" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "esdoc": "^1.0.4", "esdoc-ecmascript-proposal-plugin": "^1.0.0", "esdoc-importpath-plugin": "^1.0.1", "esdoc-standard-plugin": "^1.0.0", "husky": "^0.14.3", "jest": "^22.0.4", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^2.0.0", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^8.0.0", "rollup": "^0.53.0", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "semantic-release": "^8.2.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watch": "^1.0.2", "watchify": "^3.9.0" }; | ||
var ava = { "source": ["dist/for-node/*"] }; | ||
@@ -140,0 +140,0 @@ var browserify = { "transform": ["browserify-shim"] }; |
import path from 'path'; | ||
import { GitConfigManager, GitIgnoreManager, GitIndexManager, GitObjectManager, GitRefManager, GitRemoteHTTP, GitShallowManager } from './managers.js'; | ||
import { FileSystem, GitCommit, GitPktLine, GitTree, SignedGitCommit } from './models.js'; | ||
import { FileSystem, GitCommit, GitPackIndex, GitPktLine, GitTree, SignedGitCommit } from './models.js'; | ||
import { Buffer } from 'buffer'; | ||
@@ -11,2 +11,4 @@ import { PassThrough } from 'stream'; | ||
import marky from 'marky'; | ||
import pify from 'pify'; | ||
import concat from 'simple-concat'; | ||
import { flatFileListToDirectoryStructure, pkg } from './utils.js'; | ||
@@ -188,3 +190,5 @@ import pad from 'pad'; | ||
}); | ||
await unpack({ fs, gitdir, inputStream: response.packfile, onprogress }); | ||
let packfile = await pify(concat)(response.packfile); | ||
let packfileSha = packfile.slice(-20).toString('hex'); | ||
await fs.write(path.join(gitdir, `objects/pack/pack-${packfileSha}.pack`), packfile); | ||
} | ||
@@ -255,3 +259,3 @@ | ||
// Github uses all ref-deltas when I fetch packfiles instead of all ofs-deltas. Nice! | ||
const capabilities = `multi_ack_detailed no-done side-band-64k thin-pack agent=git/${pkg.name}@${pkg.version}${relative ? ' deepen-relative' : ''}`; | ||
const capabilities = `multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/${pkg.name}@${pkg.version}${relative ? ' deepen-relative' : ''}`; | ||
let packstream = new PassThrough(); | ||
@@ -307,23 +311,2 @@ packstream.write(GitPktLine.encode(`want ${want} ${capabilities}\n`)); | ||
const types = { | ||
1: 'commit', | ||
2: 'tree', | ||
3: 'blob', | ||
4: 'tag', | ||
6: 'ofs-delta', | ||
7: 'ref-delta' | ||
}; | ||
function parseVarInt(buffer$$1 /*: Buffer */) { | ||
let n = 0; | ||
for (var i = 0; i < buffer$$1.byteLength; i++) { | ||
n = (buffer$$1[i] & 0b01111111) + (n << 7); | ||
if ((buffer$$1[i] & 0b10000000) === 0) { | ||
if (i !== buffer$$1.byteLength - 1) throw new Error('Invalid varint buffer'); | ||
return n; | ||
} | ||
} | ||
throw new Error('Invalid varint buffer'); | ||
} | ||
/** | ||
@@ -338,119 +321,2 @@ * @ignore | ||
*/ | ||
async function unpack({ | ||
dir, | ||
gitdir = path.join(dir, '.git'), | ||
fs: _fs, | ||
inputStream, | ||
onprogress | ||
}) { | ||
const fs = new FileSystem(_fs); | ||
return new Promise(function (resolve, reject) { | ||
// Read header | ||
peek(inputStream, 12, (err, data, inputStream) => { | ||
if (err) return reject(err); | ||
let iden = data.slice(0, 4).toString('utf8'); | ||
if (iden !== 'PACK') { | ||
throw new Error(`Packfile started with '${iden}'. Expected 'PACK'`); | ||
} | ||
let ver = data.slice(4, 8).toString('hex'); | ||
if (ver !== '00000002') { | ||
throw new Error(`Unknown packfile version '${ver}'. Expected 00000002.`); | ||
} | ||
// Read a 4 byte (32-bit) int | ||
let numObjects = data.readInt32BE(8); | ||
if (onprogress !== undefined) { | ||
onprogress({ loaded: 0, total: numObjects, lengthComputable: true }); | ||
} | ||
if (numObjects === 0) return resolve(); | ||
// And on our merry way | ||
let totalTime = 0; | ||
let totalApplyDeltaTime = 0; | ||
let totalWriteFileTime = 0; | ||
let totalReadFileTime = 0; | ||
let offsetMap = new Map(); | ||
inputStream.pipe(listpack()).pipe(through2.obj(async ({ data, type, reference, offset, num }, enc, next) => { | ||
type = types[type]; | ||
marky.mark(`${type} #${num} ${data.length}B`); | ||
if (type === 'ref-delta') { | ||
let oid = Buffer.from(reference).toString('hex'); | ||
try { | ||
marky.mark(`readFile`); | ||
let { object, type } = await GitObjectManager.read({ | ||
fs, | ||
gitdir, | ||
oid | ||
}); | ||
totalReadFileTime += marky.stop(`readFile`).duration; | ||
marky.mark(`applyDelta`); | ||
let result = applyDelta(data, object); | ||
totalApplyDeltaTime += marky.stop(`applyDelta`).duration; | ||
marky.mark(`writeFile`); | ||
let newoid = await GitObjectManager.write({ | ||
fs, | ||
gitdir, | ||
type, | ||
object: result | ||
}); | ||
totalWriteFileTime += marky.stop(`writeFile`).duration; | ||
// console.log(`${type} ${newoid} ref-delta ${oid}`) | ||
offsetMap.set(offset, newoid); | ||
} catch (err) { | ||
throw new Error(`Could not find object ${reference} ${oid} that is referenced by a ref-delta object in packfile at byte offset ${offset}.`); | ||
} | ||
} else if (type === 'ofs-delta') { | ||
// Note: this might be not working because offsets might not be | ||
// guaranteed to be on object boundaries? In which case we'd need | ||
// to write the packfile to disk first, I think. | ||
// For now I've "solved" it by simply not advertising ofs-delta as a capability | ||
// during the HTTP request, so Github will only send ref-deltas not ofs-deltas. | ||
let absoluteOffset = offset - parseVarInt(reference); | ||
let referenceOid = offsetMap.get(absoluteOffset); | ||
// console.log(`${offset} ofs-delta ${absoluteOffset} ${referenceOid}`) | ||
let { type, object } = await GitObjectManager.read({ | ||
fs, | ||
gitdir, | ||
oid: referenceOid | ||
}); | ||
let result = applyDelta(data, object); | ||
let oid = await GitObjectManager.write({ | ||
fs, | ||
gitdir, | ||
type, | ||
object: result | ||
}); | ||
// console.log(`${offset} ${type} ${oid} ofs-delta ${referenceOid}`) | ||
offsetMap.set(offset, oid); | ||
} else { | ||
marky.mark(`writeFile`); | ||
let oid = await GitObjectManager.write({ | ||
fs, | ||
gitdir, | ||
type, | ||
object: data | ||
}); | ||
totalWriteFileTime += marky.stop(`writeFile`).duration; | ||
// console.log(`${offset} ${type} ${oid}`) | ||
offsetMap.set(offset, oid); | ||
} | ||
if (onprogress !== undefined) { | ||
onprogress({ | ||
loaded: numObjects - num, | ||
total: numObjects, | ||
lengthComputable: true | ||
}); | ||
} | ||
let perfentry = marky.stop(`${type} #${num} ${data.length}B`); | ||
totalTime += perfentry.duration; | ||
if (num === 0) { | ||
console.log(`Total time unpacking objects: ${totalTime}`); | ||
console.log(`Total time applying deltas: ${totalApplyDeltaTime}`); | ||
console.log(`Total time reading files: ${totalReadFileTime}`); | ||
console.log(`Total time writing files: ${totalWriteFileTime}`); | ||
return resolve(); | ||
} | ||
next(null); | ||
})).on('error', reject).on('finish', resolve); | ||
}); | ||
}); | ||
} | ||
@@ -917,5 +783,17 @@ async function writeTreeToDisk({ gitdir, dir, index, prefix, tree, fs }) { | ||
blob: 0b0110000, | ||
tag: 0b1000000 | ||
tag: 0b1000000, | ||
ofs_delta: 0b1100000, | ||
ref_delta: 0b1110000 | ||
/** | ||
* | ||
* If there were no errors, then there will be no `errors` property. | ||
* There can be a mix of `ok` messages and `errors` messages. | ||
* | ||
* @typedef {Object} PushResponse | ||
* @property {Array<string>} [ok] - The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | ||
* @property {Array<string>} [errors] - If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | ||
*/ | ||
/** | ||
* Push a branch | ||
@@ -927,8 +805,8 @@ * | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.ref=undefined] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.remote='origin'] - If URL is not specified, determines which remote to use. | ||
* @param {string} [args.url] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword] - The password to use with Basic Auth | ||
* @returns {Promise<void>} - Resolves successfully when push completes | ||
* @param {string} [args.url=undefined] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername=undefined] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword=undefined] - The password to use with Basic Auth | ||
* @returns {Promise<PushResponse>} - Resolves successfully when push completes with a detailed description of the operation from the server. | ||
* | ||
@@ -991,5 +869,3 @@ * @example | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listCommits({ | ||
@@ -1042,5 +918,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listObjects({ | ||
@@ -1084,5 +958,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function pack({ | ||
@@ -1432,2 +1304,28 @@ dir, | ||
export { add, clone, checkout, commit, fetch, init, listFiles, listBranches, log, push, remove, config, verify, status, findRoot, version }; | ||
/** | ||
* Create the .idx file for a given .pack file | ||
* | ||
* @param {Object} args - Arguments object | ||
* @param {FSModule} args.fs - The filesystem holding the git repo | ||
* @param {string} args.dir - The path to the [working tree](index.html#dir-vs-gitdir) directory | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} args.filepath - The path to the .pack file to index. | ||
* @returns {Promise<void>} - Resolves successfully once the .idx file been written. | ||
* | ||
* @example | ||
* let repo = {fs, dir: '.'} | ||
* await indexPack({...repo, filepath: 'pack-9cbd243a1caa4cb4bef976062434a958d82721a9.pack'}) | ||
*/ | ||
async function indexPack({ | ||
dir, | ||
gitdir = path.join(dir, '.git'), | ||
fs: _fs, | ||
filepath | ||
}) { | ||
const fs = new FileSystem(_fs); | ||
const pack = await fs.read(path.join(dir, filepath)); | ||
const idx = await GitPackIndex.fromPack({ pack }); | ||
await fs.write(filepath.replace(/\.pack$/, '.idx'), idx.toBuffer()); | ||
} | ||
export { add, clone, checkout, commit, fetch, init, listFiles, listBranches, log, push, remove, config, verify, status, findRoot, version, indexPack }; |
@@ -14,2 +14,4 @@ import path from 'path'; | ||
import marky from 'marky'; | ||
import pify from 'pify'; | ||
import concat from 'simple-concat'; | ||
import './utils.js'; | ||
@@ -51,5 +53,17 @@ | ||
blob: 0b0110000, | ||
tag: 0b1000000 | ||
tag: 0b1000000, | ||
ofs_delta: 0b1100000, | ||
ref_delta: 0b1110000 | ||
/** | ||
* | ||
* If there were no errors, then there will be no `errors` property. | ||
* There can be a mix of `ok` messages and `errors` messages. | ||
* | ||
* @typedef {Object} PushResponse | ||
* @property {Array<string>} [ok] - The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | ||
* @property {Array<string>} [errors] - If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | ||
*/ | ||
/** | ||
* Push a branch | ||
@@ -61,8 +75,8 @@ * | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.ref=undefined] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.remote='origin'] - If URL is not specified, determines which remote to use. | ||
* @param {string} [args.url] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword] - The password to use with Basic Auth | ||
* @returns {Promise<void>} - Resolves successfully when push completes | ||
* @param {string} [args.url=undefined] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername=undefined] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword=undefined] - The password to use with Basic Auth | ||
* @returns {Promise<PushResponse>} - Resolves successfully when push completes with a detailed description of the operation from the server. | ||
* | ||
@@ -81,5 +95,3 @@ * @example | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listCommits({ | ||
@@ -132,5 +144,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listObjects({ | ||
@@ -174,5 +184,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function pack({ | ||
@@ -179,0 +187,0 @@ dir, |
@@ -1,2 +0,2 @@ | ||
import { FileSystem, GitConfig, GitIndex, GitObject, GitPktLine } from './models.js'; | ||
import { FileSystem, GitConfig, GitIndex, GitObject, GitPackIndex, GitPktLine } from './models.js'; | ||
import path from 'path'; | ||
@@ -156,2 +156,4 @@ import AsyncLock from 'async-lock'; | ||
const PackfileCache = new Map(); | ||
/** @ignore */ | ||
@@ -161,12 +163,54 @@ class GitObjectManager { | ||
const fs = new FileSystem(_fs); | ||
// Look for it in the loose object directory. | ||
let file = await fs.read(`${gitdir}/objects/${oid.slice(0, 2)}/${oid.slice(2)}`); | ||
// Check to see if it's in a packfile. | ||
if (!file) { | ||
// Check to see if it's in shallow commits. | ||
// Curry the current read method so that the packfile un-deltification | ||
// process can acquire external ref-deltas. | ||
const getExternalRefDelta = oid => GitObjectManager.read({ fs: _fs, gitdir, oid }); | ||
// Iterate through all the .pack files | ||
let list = await fs.readdir(path.join(gitdir, '/objects/pack')); | ||
list = list.filter(x => x.endsWith('.pack')); | ||
for (let filename of list) { | ||
// Try to get the packfile from the in-memory cache | ||
let p = PackfileCache.get(filename); | ||
if (!p) { | ||
// If not there, load it from a .idx file | ||
const idxName = filename.replace(/pack$/, 'idx'); | ||
if (await fs.exists(`${gitdir}/objects/pack/${idxName}`)) { | ||
const idx = await fs.read(`${gitdir}/objects/pack/${idxName}`); | ||
p = await GitPackIndex.fromIdx({ idx, getExternalRefDelta }); | ||
} else { | ||
// If the .idx file isn't available, generate one. | ||
const pack = await fs.read(`${gitdir}/objects/pack/${filename}`); | ||
p = await GitPackIndex.fromPack({ pack, getExternalRefDelta }); | ||
// Save .idx file | ||
await fs.write(`${gitdir}/objects/pack/${idxName}`, p.toBuffer()); | ||
} | ||
PackfileCache.set(filename, p); | ||
} | ||
// console.log(p) | ||
// If the packfile DOES have the oid we're looking for... | ||
if (p.hashes.includes(oid)) { | ||
// Make sure the packfile is loaded in memory | ||
if (!p.pack) { | ||
const pack = await fs.read(`${gitdir}/objects/pack/${filename}`); | ||
await p.load({ pack }); | ||
} | ||
// Get the resolved git object from the packfile | ||
return p.read({ oid, getExternalRefDelta }); | ||
} | ||
} | ||
} | ||
// Check to see if it's in shallow commits. | ||
if (!file) { | ||
let text = await fs.read(`${gitdir}/shallow`, { encoding: 'utf8' }); | ||
if (text !== null && text.includes(oid)) { | ||
throw new Error(`Failed to read git object with oid ${oid} because it is a shallow commit`); | ||
} else { | ||
throw new Error(`Failed to read git object with oid ${oid}`); | ||
} | ||
} | ||
// Finally | ||
if (!file) { | ||
throw new Error(`Failed to read git object with oid ${oid}`); | ||
} | ||
let { type, object } = GitObject.unwrap({ oid, file }); | ||
@@ -221,2 +265,16 @@ return { type, object }; | ||
// Update files | ||
// TODO: For large repos with a history of thousands of pull requests | ||
// (i.e. gitlab-ce) it would be vastly more efficient to write them | ||
// to .git/packed-refs. | ||
// The trick is to make sure we a) don't write a packed ref that is | ||
// already shadowed by a loose ref and b) don't loose any refs already | ||
// in packed-refs. Doing this efficiently may be difficult. A | ||
// solution that might work is | ||
// a) load the current packed-refs file | ||
// b) add actualRefsToWrite, overriding the existing values if present | ||
// c) enumerate all the loose refs currently in .git/refs/remotes/${remote} | ||
// d) overwrite their value with the new value. | ||
// Examples of refs we need to avoid writing in loose format for efficieny's sake | ||
// are .git/refs/remotes/origin/refs/remotes/remote_mirror_3059 | ||
// and .git/refs/remotes/origin/refs/merge-requests | ||
const normalizeValue = value => value.trim() + '\n'; | ||
@@ -376,4 +434,47 @@ for (let [key, value] of actualRefsToWrite) { | ||
const service = 'git-receive-pack'; | ||
let res = await this.stream({ stream: stream$$1, service }); | ||
return res; | ||
let { packetlines, packfile } = await this.stream({ | ||
stream: stream$$1, | ||
service | ||
}); | ||
// TODO: Someday, maybe we will make this a streaming parser. | ||
packfile = await pify(concat)(packfile); | ||
packetlines = await pify(concat)(packetlines); | ||
let result = {}; | ||
// Parse the response! | ||
// I'm combining the side-band-64k and regular streams | ||
// because Github returns the first line in the sideband while | ||
// git-http-server returns it without the sideband. | ||
let response = ''; | ||
let read = GitPktLine.reader(packfile); | ||
let line = await read(); | ||
while (line !== null && line !== true) { | ||
response += line.toString('utf8') + '\n'; | ||
line = await read(); | ||
} | ||
response += packetlines.toString('utf8'); | ||
let lines = response.toString('utf8').split('\n'); | ||
// We're expecting "unpack {unpack-result}" | ||
line = lines.shift(); | ||
if (!line.startsWith('unpack ')) { | ||
throw new Error(`Unparsable response from server! Expected 'unpack ok' or 'unpack [error message]' but got '${line}'`); | ||
} | ||
if (line === 'unpack ok') { | ||
result.ok = ['unpack']; | ||
} else { | ||
result.errors = [line.trim()]; | ||
} | ||
for (let line of lines) { | ||
let status = line.slice(0, 2); | ||
let refAndMessage = line.slice(3); | ||
if (status === 'ok') { | ||
result.ok = result.ok || []; | ||
result.ok.push(refAndMessage); | ||
} else if (status === 'ng') { | ||
result.errors = result.errors || []; | ||
result.errors.push(refAndMessage); | ||
} | ||
} | ||
console.log(result); | ||
return result; | ||
} | ||
@@ -402,6 +503,5 @@ async pull(stream$$1 /*: ReadableStream */) { | ||
}); | ||
// Don't try to parse git pushes for now. | ||
if (service === 'git-receive-pack') return res; | ||
let data = await pify(concat)(res); | ||
// Parse the response! | ||
let read = GitPktLine.streamReader(res); | ||
let read = GitPktLine.reader(data); | ||
// And now for the ridiculous side-band-64k protocol | ||
@@ -414,4 +514,6 @@ let packetlines = new PassThrough(); | ||
let line = await read(); | ||
// Skip over flush packets | ||
if (line === null) return nextBit(); | ||
// A made up convention to signal there's no more to read. | ||
if (line === null) { | ||
if (line === true) { | ||
packetlines.end(); | ||
@@ -452,8 +554,5 @@ progress.end(); | ||
}; | ||
} /*: { | ||
stream: ReadableStream, | ||
service: string | ||
} */ | ||
} | ||
} | ||
export { GitConfigManager, GitShallowManager, GitIndexManager, GitIgnoreManager, GitObjectManager, GitRefManager, GitRemoteHTTP }; |
import path from 'path'; | ||
import pify from 'pify'; | ||
import { sleep } from './utils.js'; | ||
import { Buffer } from 'buffer'; | ||
import { Buffer as Buffer$1 } from 'buffer'; | ||
import { key, message, sign, util } from 'openpgp/dist/openpgp.min.js'; | ||
@@ -15,2 +15,7 @@ import ini from 'ini'; | ||
import { readBytes } from 'gartal'; | ||
import applyDelta from 'git-apply-delta'; | ||
import listpack from 'git-list-pack'; | ||
import crc32 from 'crc/lib/crc32.js'; | ||
import { PassThrough } from 'stream'; | ||
import marky from 'marky'; | ||
import sortby from 'lodash/sortBy'; | ||
@@ -112,2 +117,12 @@ | ||
/** | ||
* Read a directory without throwing an error is the directory doesn't exist | ||
*/ | ||
async readdir(filepath) { | ||
try { | ||
return await this._readdir(filepath); | ||
} catch (err) { | ||
return []; | ||
} | ||
} | ||
/** | ||
* Return a flast list of all the files nested inside a directory | ||
@@ -219,3 +234,3 @@ * | ||
this._commit = commit; | ||
} else if (Buffer.isBuffer(commit)) { | ||
} else if (Buffer$1.isBuffer(commit)) { | ||
this._commit = commit.toString('utf8'); | ||
@@ -241,3 +256,3 @@ } else if (typeof commit === 'object') { | ||
toObject() { | ||
return Buffer.from(this._commit, 'utf8'); | ||
return Buffer$1.from(this._commit, 'utf8'); | ||
} | ||
@@ -301,3 +316,3 @@ | ||
} | ||
if (obj.parent) { | ||
if (obj.parent && obj.parent.length) { | ||
headers += 'parent'; | ||
@@ -448,12 +463,17 @@ for (let p of obj.parent) { | ||
class GitObject { | ||
static hash({ type, object }) /*: Promise<string> */{ | ||
let buffer$$1 = Buffer$1.concat([Buffer$1.from(`${type} ${object.byteLength.toString()}\0`), Buffer$1.from(object)]); | ||
let oid = shasum(buffer$$1); | ||
return oid; | ||
} | ||
static wrap({ type, object /*: {type: string, object: Buffer} */ }) { | ||
let buffer$$1 = Buffer.concat([Buffer.from(type + ' '), Buffer.from(object.byteLength.toString()), Buffer.from([0]), Buffer.from(object)]); | ||
let buffer$$1 = Buffer$1.concat([Buffer$1.from(`${type} ${object.byteLength.toString()}\0`), object]); | ||
let oid = shasum(buffer$$1); | ||
return { | ||
oid, | ||
file: Buffer.from(pako.deflate(buffer$$1)) | ||
file: Buffer$1.from(pako.deflate(buffer$$1)) | ||
}; | ||
} | ||
static unwrap({ oid, file /*: {oid: string, file: Buffer} */ }) { | ||
let inflated = Buffer.from(pako.inflate(file)); | ||
let inflated = Buffer$1.from(pako.inflate(file)); | ||
if (oid) { | ||
@@ -476,3 +496,3 @@ let sha = shasum(inflated); | ||
type, | ||
object: Buffer.from(inflated.slice(i + 1)) | ||
object: Buffer$1.from(inflated.slice(i + 1)) | ||
}; | ||
@@ -540,3 +560,3 @@ } | ||
static flush() { | ||
return Buffer.from('0000', 'utf8'); | ||
return Buffer$1.from('0000', 'utf8'); | ||
} | ||
@@ -546,7 +566,7 @@ | ||
if (typeof line === 'string') { | ||
line = Buffer.from(line); | ||
line = Buffer$1.from(line); | ||
} | ||
let length = line.length + 4; | ||
let hexlength = pad(4, length.toString(16), '0'); | ||
return Buffer.concat([Buffer.from(hexlength, 'utf8'), line]); | ||
return Buffer$1.concat([Buffer$1.from(hexlength, 'utf8'), line]); | ||
} | ||
@@ -563,7 +583,14 @@ | ||
} | ||
static streamReader(stream /*: ReadableStream */) { | ||
static streamReader(stream$$1 /*: ReadableStream */) { | ||
let done = false; | ||
stream$$1.on('end', () => { | ||
console.log('THE END I TELL YOU...'); | ||
done = true; | ||
}); | ||
stream$$1.resume(); | ||
return async function read() { | ||
if (done) return null; | ||
let hexlength, length, bytes; | ||
try { | ||
hexlength = await readBytes(stream, 4); | ||
hexlength = await readBytes(stream$$1, 4); | ||
} catch (err) { | ||
@@ -577,3 +604,3 @@ // No more file to read | ||
// otherwise return the packet content | ||
bytes = await readBytes(stream, length - 4); | ||
bytes = await readBytes(stream$$1, length - 4); | ||
return bytes; | ||
@@ -584,97 +611,395 @@ }; | ||
var _extends$1 = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key$$1 in source) { if (Object.prototype.hasOwnProperty.call(source, key$$1)) { target[key$$1] = source[key$$1]; } } } return target; }; | ||
function buffer2stream(buffer$$1) { | ||
let stream$$1 = new PassThrough(); | ||
stream$$1.end(buffer$$1); | ||
return stream$$1; | ||
} | ||
function parseIDX(buffer$$1) { | ||
let reader = new BufferCursor(buffer$$1); | ||
let magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
let version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error(`Unable to read version ${version} packfile IDX. (Only version 2 supported)`); | ||
} | ||
// Verify checksums | ||
let shaComputed = shasum(buffer$$1.slice(0, -20)); | ||
let shaClaimed = buffer$$1.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error(`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`); | ||
} | ||
if (buffer$$1.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error(`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`); | ||
} | ||
let fanout = []; | ||
for (let i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
let size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
let hashes = []; | ||
for (let i = 0; i < size; i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
let crcs = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
crcs.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let offsets = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
offsets.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let packfileSha = reader.slice(20).toString('hex'); | ||
// This part is gratuitous, but since we lack good unzipping arbitrary streams with extra at the end in the browser... | ||
let lengths = Array.from(offsets); | ||
lengths.sort((a, b) => a[1] - b[1]); // List objects in order by offset | ||
let sizes = new Map(); | ||
let slices = new Map(); | ||
for (let i = 0; i < size - 1; i++) { | ||
sizes.set(lengths[i][0], lengths[i + 1][1] - lengths[i][1]); | ||
slices.set(lengths[i][0], [lengths[i][1], lengths[i + 1][1]]); | ||
} | ||
slices.set(lengths[size - 1][0], [lengths[size - 1][1], undefined]); | ||
return { size, fanout, hashes, crcs, packfileSha, slices }; | ||
function decodeVarInt(reader) { | ||
let bytes = []; | ||
let byte = 0; | ||
let multibyte = 0; | ||
do { | ||
byte = reader.readUInt8(); | ||
// We keep bits 6543210 | ||
const lastSeven = byte & 0b01111111; | ||
bytes.push(lastSeven); | ||
// Whether the next byte is part of the variable-length encoded number | ||
// is encoded in bit 7 | ||
multibyte = byte & 0b10000000; | ||
} while (multibyte); | ||
// Now that all the bytes are in big-endian order, | ||
// alternate shifting the bits left by 7 and OR-ing the next byte. | ||
// And... do a weird increment-by-one thing that I don't quite understand. | ||
return bytes.reduce((a, b) => a + 1 << 7 | b, -1); | ||
} | ||
// I'm pretty much copying this one from the git C source code, | ||
// because it makes no sense. | ||
function otherVarIntDecode(reader, startWith) { | ||
let result = startWith; | ||
let shift = 4; | ||
let byte = null; | ||
do { | ||
byte = reader.readUInt8(); | ||
result |= (byte & 0b01111111) << shift; | ||
shift += 7; | ||
} while (byte & 0b10000000); | ||
return result; | ||
} | ||
/** @ignore */ | ||
class GitPackfile { | ||
constructor({ size, fanout, hashes, crcs, packfileSha, slices, pack }) { | ||
// Compare checksums | ||
let shaClaimed = pack.slice(-20).toString('hex'); | ||
if (packfileSha !== shaClaimed) { | ||
throw new Error(`Invalid packfile shasum in IDX buffer: expected ${packfileSha} but saw ${shaClaimed}`); | ||
class GitPackIndex { | ||
constructor(stuff) { | ||
Object.assign(this, stuff); | ||
this.offsetCache = {}; | ||
} | ||
static async fromIdx({ idx, getExternalRefDelta }) { | ||
let reader = new BufferCursor(idx); | ||
let magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
Object.assign(this, { | ||
size, | ||
fanout, | ||
let version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error(`Unable to read version ${version} packfile IDX. (Only version 2 supported)`); | ||
} | ||
// Verify checksums | ||
let shaComputed = shasum(idx.slice(0, -20)); | ||
let shaClaimed = idx.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error(`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`); | ||
} | ||
if (idx.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error(`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`); | ||
} | ||
let fanout = []; | ||
for (let i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
let size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
let hashes = []; | ||
for (let i = 0; i < size; i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
let crcs = {}; | ||
for (let i = 0; i < size; i++) { | ||
crcs[hashes[i]] = reader.readUInt32BE(); | ||
} | ||
let offsets = {}; | ||
for (let i = 0; i < size; i++) { | ||
offsets[hashes[i]] = reader.readUInt32BE(); | ||
} | ||
let packfileSha = reader.slice(20).toString('hex'); | ||
return new GitPackIndex({ | ||
hashes, | ||
crcs, | ||
offsets, | ||
packfileSha, | ||
slices, | ||
pack | ||
getExternalRefDelta | ||
}); | ||
} | ||
static async fromIDX({ idx, pack }) { | ||
return new GitPackfile(_extends$1({ pack }, parseIDX(idx))); | ||
static async fromPack({ pack, getExternalRefDelta }) { | ||
const listpackTypes = { | ||
1: 'commit', | ||
2: 'tree', | ||
3: 'blob', | ||
4: 'tag', | ||
6: 'ofs-delta', | ||
7: 'ref-delta' | ||
}; | ||
let offsetToObject = {}; | ||
// Older packfiles do NOT use the shasum of the pack itself, | ||
// so it is recommended to just use whatever bytes are in the trailer. | ||
// Source: https://github.com/git/git/commit/1190a1acf800acdcfd7569f87ac1560e2d077414 | ||
// let packfileSha = shasum(pack.slice(0, -20)) | ||
let packfileSha = pack.slice(-20).toString('hex'); | ||
let hashes = []; | ||
let crcs = {}; | ||
let offsets = {}; | ||
let totalObjectCount = null; | ||
let lastPercent = null; | ||
let times = { | ||
hash: 0, | ||
readSlice: 0, | ||
offsets: 0, | ||
crcs: 0, | ||
sort: 0 | ||
}; | ||
let histogram = { | ||
commit: 0, | ||
tree: 0, | ||
blob: 0, | ||
tag: 0, | ||
'ofs-delta': 0, | ||
'ref-delta': 0 | ||
}; | ||
let bytesProcessed = 0; | ||
console.log('Indexing objects'); | ||
console.log(`percent\tmilliseconds\tkilobytes\tbytesProcessed\tcommits\ttrees\tblobs\ttags\tofs-deltas\tref-deltas`); | ||
marky.mark('total'); | ||
marky.mark('offsets'); | ||
marky.mark('percent'); | ||
await new Promise((resolve, reject) => { | ||
buffer2stream(pack).pipe(listpack()).on('data', async ({ data, type, reference, offset, num }) => { | ||
if (totalObjectCount === null) totalObjectCount = num; | ||
let percent = Math.floor((totalObjectCount - num) * 100 / totalObjectCount); | ||
if (percent !== lastPercent) { | ||
console.log(`${percent}%\t${Math.floor(marky.stop('percent').duration)}\t${Math.floor(process.memoryUsage().rss / 1000)}\t${bytesProcessed}\t${histogram.commit}\t${histogram.tree}\t${histogram.blob}\t${histogram.tag}\t${histogram['ofs-delta']}\t${histogram['ref-delta']}`); | ||
histogram = { | ||
commit: 0, | ||
tree: 0, | ||
blob: 0, | ||
tag: 0, | ||
'ofs-delta': 0, | ||
'ref-delta': 0 | ||
}; | ||
bytesProcessed = 0; | ||
marky.mark('percent'); | ||
} | ||
lastPercent = percent; | ||
// Change type from a number to a meaningful string | ||
type = listpackTypes[type]; | ||
histogram[type]++; | ||
bytesProcessed += data.byteLength; | ||
if (['commit', 'tree', 'blob', 'tag'].includes(type)) { | ||
offsetToObject[offset] = { | ||
type, | ||
offset | ||
}; | ||
} else if (type === 'ofs-delta') { | ||
offsetToObject[offset] = { | ||
type, | ||
offset | ||
}; | ||
} else if (type === 'ref-delta') { | ||
offsetToObject[offset] = { | ||
type, | ||
offset | ||
}; | ||
} | ||
if (num === 0) resolve(); | ||
}); | ||
}); | ||
times['offsets'] = Math.floor(marky.stop('offsets').duration); | ||
console.log('Computing CRCs'); | ||
marky.mark('crcs'); | ||
// We need to know the lengths of the slices to compute the CRCs. | ||
let offsetArray = Object.keys(offsetToObject).map(Number); | ||
for (let [i, start] of offsetArray.entries()) { | ||
let end = i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1]; | ||
let o = offsetToObject[start]; | ||
let crc = crc32(pack.slice(start, end)); | ||
o.end = end; | ||
o.crc = crc; | ||
} | ||
times['crcs'] = Math.floor(marky.stop('crcs').duration); | ||
// We don't have the hashes yet. But we can generate them using the .readSlice function! | ||
const p = new GitPackIndex({ | ||
pack, | ||
packfileSha, | ||
crcs, | ||
hashes, | ||
offsets, | ||
getExternalRefDelta | ||
}); | ||
// Resolve deltas and compute the oids | ||
console.log('Resolving deltas'); | ||
console.log(`percent2\tmilliseconds2\tkilobytes2\tcallsToReadSlice\tcallsToGetExternal`); | ||
marky.mark('percent'); | ||
lastPercent = null; | ||
let count = 0; | ||
let callsToReadSlice = 0; | ||
let callsToGetExternal = 0; | ||
let timeByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
let objectsByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
for (let offset in offsetToObject) { | ||
offset = Number(offset); | ||
let percent = Math.floor(count++ * 100 / totalObjectCount); | ||
if (percent !== lastPercent) { | ||
console.log(`${percent}%\t${Math.floor(marky.stop('percent').duration)}\t${Math.floor(process.memoryUsage().rss / 1000)}\t${callsToReadSlice}\t${callsToGetExternal}`); | ||
marky.mark('percent'); | ||
callsToReadSlice = 0; | ||
callsToGetExternal = 0; | ||
} | ||
lastPercent = percent; | ||
let o = offsetToObject[offset]; | ||
if (o.oid) continue; | ||
try { | ||
p.readDepth = 0; | ||
p.externalReadDepth = 0; | ||
marky.mark('readSlice'); | ||
let { type, object } = await p.readSlice({ start: offset }); | ||
let time = marky.stop('readSlice').duration; | ||
times.readSlice += time; | ||
callsToReadSlice += p.readDepth; | ||
callsToGetExternal += p.externalReadDepth; | ||
timeByDepth[p.readDepth] += time; | ||
objectsByDepth[p.readDepth] += 1; | ||
marky.mark('hash'); | ||
let oid = GitObject.hash({ type, object }); | ||
times.hash += marky.stop('hash').duration; | ||
o.oid = oid; | ||
hashes.push(oid); | ||
offsets[oid] = offset; | ||
crcs[oid] = o.crc; | ||
} catch (err) { | ||
console.log('ERROR', err); | ||
continue; | ||
} | ||
} | ||
marky.mark('sort'); | ||
hashes.sort(); | ||
times['sort'] = Math.floor(marky.stop('sort').duration); | ||
let totalElapsedTime = marky.stop('total').duration; | ||
times.hash = Math.floor(times.hash); | ||
times.readSlice = Math.floor(times.readSlice); | ||
times.misc = Math.floor(Object.values(times).reduce((a, b) => a - b, totalElapsedTime)); | ||
console.log(Object.keys(times).join('\t')); | ||
console.log(Object.values(times).join('\t')); | ||
console.log('by depth:'); | ||
console.log([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11].join('\t')); | ||
console.log(objectsByDepth.slice(0, 12).join('\t')); | ||
console.log(timeByDepth.map(Math.floor).slice(0, 12).join('\t')); | ||
// CONTINUE HERE: Probably we need an LRU cache to speed up deltas. | ||
// We could plot a histogram of oids to see how many oids we need to cache to have a big impact. | ||
return p; | ||
} | ||
toBuffer() { | ||
let buffers = []; | ||
let write = (str, encoding) => { | ||
buffers.push(Buffer.from(str, encoding)); | ||
}; | ||
// Write out IDX v2 magic number | ||
write('ff744f63', 'hex'); | ||
// Write out version number 2 | ||
write('00000002', 'hex'); | ||
// Write fanout table | ||
let fanoutBuffer = new BufferCursor(Buffer.alloc(256 * 4)); | ||
for (let i = 0; i < 256; i++) { | ||
let count = 0; | ||
for (let hash of this.hashes) { | ||
if (parseInt(hash.slice(0, 2), 16) <= i) count++; | ||
} | ||
fanoutBuffer.writeUInt32BE(count); | ||
} | ||
buffers.push(fanoutBuffer.buffer); | ||
// Write out hashes | ||
for (let hash of this.hashes) { | ||
write(hash, 'hex'); | ||
} | ||
// Write out crcs | ||
let crcsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); | ||
for (let hash of this.hashes) { | ||
crcsBuffer.writeUInt32BE(this.crcs[hash]); | ||
} | ||
buffers.push(crcsBuffer.buffer); | ||
// Write out offsets | ||
let offsetsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); | ||
for (let hash of this.hashes) { | ||
offsetsBuffer.writeUInt32BE(this.offsets[hash]); | ||
} | ||
buffers.push(offsetsBuffer.buffer); | ||
// Write out packfile checksum | ||
write(this.packfileSha, 'hex'); | ||
// Write out shasum | ||
let totalBuffer = Buffer.concat(buffers); | ||
let sha = shasum(totalBuffer); | ||
let shaBuffer = Buffer.alloc(20); | ||
shaBuffer.write(sha, 'hex'); | ||
return Buffer.concat([totalBuffer, shaBuffer]); | ||
} | ||
async load({ pack }) { | ||
this.pack = pack; | ||
} | ||
async unload() { | ||
this.pack = null; | ||
} | ||
async read({ oid /*: {oid: string} */ }) { | ||
if (!this.slices.has(oid)) return null; | ||
let raw = this.pack.slice(...this.slices.get(oid)); | ||
console.log(raw); | ||
/* | ||
- The header is followed by number of object entries, each of | ||
which looks like this: | ||
(undeltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
compressed data | ||
(deltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
20-byte base object name if OBJ_REF_DELTA or a negative relative | ||
offset from the delta object's position in the pack if this | ||
is an OBJ_OFS_DELTA object | ||
compressed delta data | ||
Observation: length of each object is encoded in a variable | ||
length format and is not constrained to 32-bit or anything. | ||
*/ | ||
if (!this.offsets[oid]) { | ||
if (this.getExternalRefDelta) { | ||
this.externalReadDepth++; | ||
return this.getExternalRefDelta(oid); | ||
} else { | ||
throw new Error(`Could not read object ${oid} from packfile`); | ||
} | ||
} | ||
let start = this.offsets[oid]; | ||
return this.readSlice({ start }); | ||
} | ||
async readSlice({ start }) { | ||
if (this.offsetCache[start]) return this.offsetCache[start]; | ||
this.readDepth++; | ||
const types = { | ||
0b0010000: 'commit', | ||
0b0100000: 'tree', | ||
0b0110000: 'blob', | ||
0b1000000: 'tag', | ||
0b1100000: 'ofs_delta', | ||
0b1110000: 'ref_delta' | ||
}; | ||
if (!this.pack) { | ||
throw new Error('Tried to read from a GitPackIndex with no packfile loaded into memory'); | ||
} | ||
let raw = this.pack.slice(start); | ||
let reader = new BufferCursor(raw); | ||
let byte = reader.readUInt8(); | ||
// Object type is encoded in bits 654 | ||
let btype = byte & 0b1110000; | ||
let type = types[btype]; | ||
if (type === undefined) { | ||
throw new Error('Unrecognized type: 0b' + btype.toString(2)); | ||
} | ||
// The length encoding get complicated. | ||
// Last four bits of length is encoded in bits 3210 | ||
let lastFour = byte & 0b1111; | ||
let length = lastFour; | ||
// Whether the next byte is part of the variable-length encoded number | ||
// is encoded in bit 7 | ||
let multibyte = byte & 0b10000000; | ||
if (multibyte) { | ||
length = otherVarIntDecode(reader, lastFour); | ||
} | ||
let base = null; | ||
let object = null; | ||
// Handle deltified objects | ||
if (type === 'ofs_delta') { | ||
let offset = decodeVarInt(reader); | ||
let baseOffset = start - offset;({ object: base, type } = await this.readSlice({ start: baseOffset })); | ||
} | ||
if (type === 'ref_delta') { | ||
let oid = reader.slice(20).toString('hex');({ object: base, type } = await this.read({ oid })); | ||
} | ||
// Handle undeltified objects | ||
let buffer$$1 = raw.slice(reader.tell()); | ||
object = Buffer.from(pako.inflate(buffer$$1)); | ||
// Assert that the object length is as expected. | ||
if (object.byteLength !== length) { | ||
throw new Error(`Packfile told us object would have length ${length} but it had length ${object.byteLength}`); | ||
} | ||
if (base) { | ||
object = applyDelta(object, base); | ||
} | ||
// Cache the result based on depth. | ||
if (this.readDepth > 3) { | ||
// hand tuned for speed / memory usage tradeoff | ||
this.offsetCache[start] = { type, object }; | ||
} | ||
return { type, object }; | ||
} | ||
} | ||
@@ -787,3 +1112,3 @@ | ||
this._dirty = false; | ||
if (Buffer.isBuffer(index)) { | ||
if (Buffer$1.isBuffer(index)) { | ||
this._entries = parseBuffer(index); | ||
@@ -853,3 +1178,3 @@ } else if (index === null) { | ||
toObject() { | ||
let header = Buffer.alloc(12); | ||
let header = Buffer$1.alloc(12); | ||
let writer = new BufferCursor(header); | ||
@@ -859,6 +1184,6 @@ writer.write('DIRC', 4, 'utf8'); | ||
writer.writeUInt32BE(this.entries.length); | ||
let body = Buffer.concat(this.entries.map(entry => { | ||
let body = Buffer$1.concat(this.entries.map(entry => { | ||
// the fixed length + the filename + at least one null char => align by 8 | ||
let length = Math.ceil((62 + entry.path.length + 1) / 8) * 8; | ||
let written = Buffer.alloc(length); | ||
let written = Buffer$1.alloc(length); | ||
let writer = new BufferCursor(written); | ||
@@ -886,5 +1211,5 @@ let ctimeMilliseconds = entry.ctime.valueOf(); | ||
})); | ||
let main = Buffer.concat([header, body]); | ||
let main = Buffer$1.concat([header, body]); | ||
let sum = shasum(main); | ||
return Buffer.concat([main, Buffer.from(sum, 'hex')]); | ||
return Buffer$1.concat([main, Buffer$1.from(sum, 'hex')]); | ||
} | ||
@@ -945,3 +1270,3 @@ } | ||
constructor(entries /*: any */) { | ||
if (Buffer.isBuffer(entries)) { | ||
if (Buffer$1.isBuffer(entries)) { | ||
this._entries = parseBuffer$1(entries); | ||
@@ -961,9 +1286,9 @@ } else if (Array.isArray(entries)) { | ||
toObject() { | ||
return Buffer.concat(this._entries.map(entry => { | ||
let mode = Buffer.from(entry.mode.replace(/^0/, '')); | ||
let space = Buffer.from(' '); | ||
let path$$1 = Buffer.from(entry.path, { encoding: 'utf8' }); | ||
let nullchar = Buffer.from([0]); | ||
let oid = Buffer.from(entry.oid.match(/../g).map(n => parseInt(n, 16))); | ||
return Buffer.concat([mode, space, path$$1, nullchar, oid]); | ||
return Buffer$1.concat(this._entries.map(entry => { | ||
let mode = Buffer$1.from(entry.mode.replace(/^0/, '')); | ||
let space = Buffer$1.from(' '); | ||
let path$$1 = Buffer$1.from(entry.path, { encoding: 'utf8' }); | ||
let nullchar = Buffer$1.from([0]); | ||
let oid = Buffer$1.from(entry.oid.match(/../g).map(n => parseInt(n, 16))); | ||
return Buffer$1.concat([mode, space, path$$1, nullchar, oid]); | ||
})); | ||
@@ -981,2 +1306,2 @@ } | ||
export { FileSystem, GitCommit, SignedGitCommit, GitConfig, GitObject, GitPktLine, GitPackfile, GitIndex, GitTree }; | ||
export { FileSystem, GitCommit, SignedGitCommit, GitConfig, GitObject, GitPktLine, GitPackIndex, GitIndex, GitTree }; |
@@ -84,4 +84,4 @@ import path from 'path'; | ||
var files = ["dist", "cli.js"]; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "through2": "^2.0.3" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "esdoc": "^1.0.4", "esdoc-ecmascript-proposal-plugin": "^1.0.0", "esdoc-importpath-plugin": "^1.0.1", "esdoc-standard-plugin": "^1.0.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.3", "rollup": "^0.51.6", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watch": "^1.0.2", "watchify": "^3.9.0", "semantic-release": "^8.2.0" }; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "crc": "^3.5.0", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "through2": "^2.0.3" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "esdoc": "^1.0.4", "esdoc-ecmascript-proposal-plugin": "^1.0.0", "esdoc-importpath-plugin": "^1.0.1", "esdoc-standard-plugin": "^1.0.0", "husky": "^0.14.3", "jest": "^22.0.4", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^2.0.0", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^8.0.0", "rollup": "^0.53.0", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "semantic-release": "^8.2.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watch": "^1.0.2", "watchify": "^3.9.0" }; | ||
var ava = { "source": ["dist/for-node/*"] }; | ||
@@ -88,0 +88,0 @@ var browserify = { "transform": ["browserify-shim"] }; |
@@ -17,2 +17,4 @@ 'use strict'; | ||
var marky = _interopDefault(require('marky')); | ||
var pify = _interopDefault(require('pify')); | ||
var concat = _interopDefault(require('simple-concat')); | ||
var utils_js = require('./utils.js'); | ||
@@ -194,3 +196,5 @@ var pad = _interopDefault(require('pad')); | ||
}); | ||
await unpack({ fs, gitdir, inputStream: response.packfile, onprogress }); | ||
let packfile = await pify(concat)(response.packfile); | ||
let packfileSha = packfile.slice(-20).toString('hex'); | ||
await fs.write(path.join(gitdir, `objects/pack/pack-${packfileSha}.pack`), packfile); | ||
} | ||
@@ -261,3 +265,3 @@ | ||
// Github uses all ref-deltas when I fetch packfiles instead of all ofs-deltas. Nice! | ||
const capabilities = `multi_ack_detailed no-done side-band-64k thin-pack agent=git/${utils_js.pkg.name}@${utils_js.pkg.version}${relative ? ' deepen-relative' : ''}`; | ||
const capabilities = `multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/${utils_js.pkg.name}@${utils_js.pkg.version}${relative ? ' deepen-relative' : ''}`; | ||
let packstream = new stream.PassThrough(); | ||
@@ -313,23 +317,2 @@ packstream.write(models_js.GitPktLine.encode(`want ${want} ${capabilities}\n`)); | ||
const types = { | ||
1: 'commit', | ||
2: 'tree', | ||
3: 'blob', | ||
4: 'tag', | ||
6: 'ofs-delta', | ||
7: 'ref-delta' | ||
}; | ||
function parseVarInt(buffer$$1 /*: Buffer */) { | ||
let n = 0; | ||
for (var i = 0; i < buffer$$1.byteLength; i++) { | ||
n = (buffer$$1[i] & 0b01111111) + (n << 7); | ||
if ((buffer$$1[i] & 0b10000000) === 0) { | ||
if (i !== buffer$$1.byteLength - 1) throw new Error('Invalid varint buffer'); | ||
return n; | ||
} | ||
} | ||
throw new Error('Invalid varint buffer'); | ||
} | ||
/** | ||
@@ -344,119 +327,2 @@ * @ignore | ||
*/ | ||
async function unpack({ | ||
dir, | ||
gitdir = path.join(dir, '.git'), | ||
fs: _fs, | ||
inputStream, | ||
onprogress | ||
}) { | ||
const fs = new models_js.FileSystem(_fs); | ||
return new Promise(function (resolve, reject) { | ||
// Read header | ||
peek(inputStream, 12, (err, data, inputStream) => { | ||
if (err) return reject(err); | ||
let iden = data.slice(0, 4).toString('utf8'); | ||
if (iden !== 'PACK') { | ||
throw new Error(`Packfile started with '${iden}'. Expected 'PACK'`); | ||
} | ||
let ver = data.slice(4, 8).toString('hex'); | ||
if (ver !== '00000002') { | ||
throw new Error(`Unknown packfile version '${ver}'. Expected 00000002.`); | ||
} | ||
// Read a 4 byte (32-bit) int | ||
let numObjects = data.readInt32BE(8); | ||
if (onprogress !== undefined) { | ||
onprogress({ loaded: 0, total: numObjects, lengthComputable: true }); | ||
} | ||
if (numObjects === 0) return resolve(); | ||
// And on our merry way | ||
let totalTime = 0; | ||
let totalApplyDeltaTime = 0; | ||
let totalWriteFileTime = 0; | ||
let totalReadFileTime = 0; | ||
let offsetMap = new Map(); | ||
inputStream.pipe(listpack()).pipe(through2.obj(async ({ data, type, reference, offset, num }, enc, next) => { | ||
type = types[type]; | ||
marky.mark(`${type} #${num} ${data.length}B`); | ||
if (type === 'ref-delta') { | ||
let oid = buffer.Buffer.from(reference).toString('hex'); | ||
try { | ||
marky.mark(`readFile`); | ||
let { object, type } = await managers_js.GitObjectManager.read({ | ||
fs, | ||
gitdir, | ||
oid | ||
}); | ||
totalReadFileTime += marky.stop(`readFile`).duration; | ||
marky.mark(`applyDelta`); | ||
let result = applyDelta(data, object); | ||
totalApplyDeltaTime += marky.stop(`applyDelta`).duration; | ||
marky.mark(`writeFile`); | ||
let newoid = await managers_js.GitObjectManager.write({ | ||
fs, | ||
gitdir, | ||
type, | ||
object: result | ||
}); | ||
totalWriteFileTime += marky.stop(`writeFile`).duration; | ||
// console.log(`${type} ${newoid} ref-delta ${oid}`) | ||
offsetMap.set(offset, newoid); | ||
} catch (err) { | ||
throw new Error(`Could not find object ${reference} ${oid} that is referenced by a ref-delta object in packfile at byte offset ${offset}.`); | ||
} | ||
} else if (type === 'ofs-delta') { | ||
// Note: this might be not working because offsets might not be | ||
// guaranteed to be on object boundaries? In which case we'd need | ||
// to write the packfile to disk first, I think. | ||
// For now I've "solved" it by simply not advertising ofs-delta as a capability | ||
// during the HTTP request, so Github will only send ref-deltas not ofs-deltas. | ||
let absoluteOffset = offset - parseVarInt(reference); | ||
let referenceOid = offsetMap.get(absoluteOffset); | ||
// console.log(`${offset} ofs-delta ${absoluteOffset} ${referenceOid}`) | ||
let { type, object } = await managers_js.GitObjectManager.read({ | ||
fs, | ||
gitdir, | ||
oid: referenceOid | ||
}); | ||
let result = applyDelta(data, object); | ||
let oid = await managers_js.GitObjectManager.write({ | ||
fs, | ||
gitdir, | ||
type, | ||
object: result | ||
}); | ||
// console.log(`${offset} ${type} ${oid} ofs-delta ${referenceOid}`) | ||
offsetMap.set(offset, oid); | ||
} else { | ||
marky.mark(`writeFile`); | ||
let oid = await managers_js.GitObjectManager.write({ | ||
fs, | ||
gitdir, | ||
type, | ||
object: data | ||
}); | ||
totalWriteFileTime += marky.stop(`writeFile`).duration; | ||
// console.log(`${offset} ${type} ${oid}`) | ||
offsetMap.set(offset, oid); | ||
} | ||
if (onprogress !== undefined) { | ||
onprogress({ | ||
loaded: numObjects - num, | ||
total: numObjects, | ||
lengthComputable: true | ||
}); | ||
} | ||
let perfentry = marky.stop(`${type} #${num} ${data.length}B`); | ||
totalTime += perfentry.duration; | ||
if (num === 0) { | ||
console.log(`Total time unpacking objects: ${totalTime}`); | ||
console.log(`Total time applying deltas: ${totalApplyDeltaTime}`); | ||
console.log(`Total time reading files: ${totalReadFileTime}`); | ||
console.log(`Total time writing files: ${totalWriteFileTime}`); | ||
return resolve(); | ||
} | ||
next(null); | ||
})).on('error', reject).on('finish', resolve); | ||
}); | ||
}); | ||
} | ||
@@ -923,5 +789,17 @@ async function writeTreeToDisk({ gitdir, dir, index, prefix, tree, fs }) { | ||
blob: 0b0110000, | ||
tag: 0b1000000 | ||
tag: 0b1000000, | ||
ofs_delta: 0b1100000, | ||
ref_delta: 0b1110000 | ||
/** | ||
* | ||
* If there were no errors, then there will be no `errors` property. | ||
* There can be a mix of `ok` messages and `errors` messages. | ||
* | ||
* @typedef {Object} PushResponse | ||
* @property {Array<string>} [ok] - The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | ||
* @property {Array<string>} [errors] - If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | ||
*/ | ||
/** | ||
* Push a branch | ||
@@ -933,8 +811,8 @@ * | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.ref=undefined] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.remote='origin'] - If URL is not specified, determines which remote to use. | ||
* @param {string} [args.url] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword] - The password to use with Basic Auth | ||
* @returns {Promise<void>} - Resolves successfully when push completes | ||
* @param {string} [args.url=undefined] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername=undefined] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword=undefined] - The password to use with Basic Auth | ||
* @returns {Promise<PushResponse>} - Resolves successfully when push completes with a detailed description of the operation from the server. | ||
* | ||
@@ -997,5 +875,3 @@ * @example | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listCommits({ | ||
@@ -1048,5 +924,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listObjects({ | ||
@@ -1090,5 +964,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function pack({ | ||
@@ -1438,2 +1310,28 @@ dir, | ||
/** | ||
* Create the .idx file for a given .pack file | ||
* | ||
* @param {Object} args - Arguments object | ||
* @param {FSModule} args.fs - The filesystem holding the git repo | ||
* @param {string} args.dir - The path to the [working tree](index.html#dir-vs-gitdir) directory | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} args.filepath - The path to the .pack file to index. | ||
* @returns {Promise<void>} - Resolves successfully once the .idx file been written. | ||
* | ||
* @example | ||
* let repo = {fs, dir: '.'} | ||
* await indexPack({...repo, filepath: 'pack-9cbd243a1caa4cb4bef976062434a958d82721a9.pack'}) | ||
*/ | ||
async function indexPack({ | ||
dir, | ||
gitdir = path.join(dir, '.git'), | ||
fs: _fs, | ||
filepath | ||
}) { | ||
const fs = new models_js.FileSystem(_fs); | ||
const pack = await fs.read(path.join(dir, filepath)); | ||
const idx = await models_js.GitPackIndex.fromPack({ pack }); | ||
await fs.write(filepath.replace(/\.pack$/, '.idx'), idx.toBuffer()); | ||
} | ||
exports.add = add; | ||
@@ -1455,1 +1353,2 @@ exports.clone = clone; | ||
exports.version = version; | ||
exports.indexPack = indexPack; |
@@ -20,2 +20,4 @@ 'use strict'; | ||
var marky = _interopDefault(require('marky')); | ||
var pify = _interopDefault(require('pify')); | ||
var concat = _interopDefault(require('simple-concat')); | ||
require('./utils.js'); | ||
@@ -57,5 +59,17 @@ | ||
blob: 0b0110000, | ||
tag: 0b1000000 | ||
tag: 0b1000000, | ||
ofs_delta: 0b1100000, | ||
ref_delta: 0b1110000 | ||
/** | ||
* | ||
* If there were no errors, then there will be no `errors` property. | ||
* There can be a mix of `ok` messages and `errors` messages. | ||
* | ||
* @typedef {Object} PushResponse | ||
* @property {Array<string>} [ok] - The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | ||
* @property {Array<string>} [errors] - If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | ||
*/ | ||
/** | ||
* Push a branch | ||
@@ -67,8 +81,8 @@ * | ||
* @param {string} [args.gitdir=path.join(dir, '.git')] - The path to the [git directory](index.html#dir-vs-gitdir) | ||
* @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.ref=undefined] - Which branch to push. By default this is the currently checked out branch of the repository. | ||
* @param {string} [args.remote='origin'] - If URL is not specified, determines which remote to use. | ||
* @param {string} [args.url] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword] - The password to use with Basic Auth | ||
* @returns {Promise<void>} - Resolves successfully when push completes | ||
* @param {string} [args.url=undefined] - The URL of the remote git server. The default is the value set in the git config for that remote. | ||
* @param {string} [args.authUsername=undefined] - The username to use with Basic Auth | ||
* @param {string} [args.authPassword=undefined] - The password to use with Basic Auth | ||
* @returns {Promise<PushResponse>} - Resolves successfully when push completes with a detailed description of the operation from the server. | ||
* | ||
@@ -87,5 +101,3 @@ * @example | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listCommits({ | ||
@@ -138,5 +150,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function listObjects({ | ||
@@ -180,5 +190,3 @@ dir, | ||
/** | ||
* @ignore | ||
*/ | ||
/** @ignore */ | ||
async function pack({ | ||
@@ -185,0 +193,0 @@ dir, |
@@ -162,2 +162,4 @@ 'use strict'; | ||
const PackfileCache = new Map(); | ||
/** @ignore */ | ||
@@ -167,12 +169,54 @@ class GitObjectManager { | ||
const fs = new models_js.FileSystem(_fs); | ||
// Look for it in the loose object directory. | ||
let file = await fs.read(`${gitdir}/objects/${oid.slice(0, 2)}/${oid.slice(2)}`); | ||
// Check to see if it's in a packfile. | ||
if (!file) { | ||
// Check to see if it's in shallow commits. | ||
// Curry the current read method so that the packfile un-deltification | ||
// process can acquire external ref-deltas. | ||
const getExternalRefDelta = oid => GitObjectManager.read({ fs: _fs, gitdir, oid }); | ||
// Iterate through all the .pack files | ||
let list = await fs.readdir(path.join(gitdir, '/objects/pack')); | ||
list = list.filter(x => x.endsWith('.pack')); | ||
for (let filename of list) { | ||
// Try to get the packfile from the in-memory cache | ||
let p = PackfileCache.get(filename); | ||
if (!p) { | ||
// If not there, load it from a .idx file | ||
const idxName = filename.replace(/pack$/, 'idx'); | ||
if (await fs.exists(`${gitdir}/objects/pack/${idxName}`)) { | ||
const idx = await fs.read(`${gitdir}/objects/pack/${idxName}`); | ||
p = await models_js.GitPackIndex.fromIdx({ idx, getExternalRefDelta }); | ||
} else { | ||
// If the .idx file isn't available, generate one. | ||
const pack = await fs.read(`${gitdir}/objects/pack/${filename}`); | ||
p = await models_js.GitPackIndex.fromPack({ pack, getExternalRefDelta }); | ||
// Save .idx file | ||
await fs.write(`${gitdir}/objects/pack/${idxName}`, p.toBuffer()); | ||
} | ||
PackfileCache.set(filename, p); | ||
} | ||
// console.log(p) | ||
// If the packfile DOES have the oid we're looking for... | ||
if (p.hashes.includes(oid)) { | ||
// Make sure the packfile is loaded in memory | ||
if (!p.pack) { | ||
const pack = await fs.read(`${gitdir}/objects/pack/${filename}`); | ||
await p.load({ pack }); | ||
} | ||
// Get the resolved git object from the packfile | ||
return p.read({ oid, getExternalRefDelta }); | ||
} | ||
} | ||
} | ||
// Check to see if it's in shallow commits. | ||
if (!file) { | ||
let text = await fs.read(`${gitdir}/shallow`, { encoding: 'utf8' }); | ||
if (text !== null && text.includes(oid)) { | ||
throw new Error(`Failed to read git object with oid ${oid} because it is a shallow commit`); | ||
} else { | ||
throw new Error(`Failed to read git object with oid ${oid}`); | ||
} | ||
} | ||
// Finally | ||
if (!file) { | ||
throw new Error(`Failed to read git object with oid ${oid}`); | ||
} | ||
let { type, object } = models_js.GitObject.unwrap({ oid, file }); | ||
@@ -227,2 +271,16 @@ return { type, object }; | ||
// Update files | ||
// TODO: For large repos with a history of thousands of pull requests | ||
// (i.e. gitlab-ce) it would be vastly more efficient to write them | ||
// to .git/packed-refs. | ||
// The trick is to make sure we a) don't write a packed ref that is | ||
// already shadowed by a loose ref and b) don't loose any refs already | ||
// in packed-refs. Doing this efficiently may be difficult. A | ||
// solution that might work is | ||
// a) load the current packed-refs file | ||
// b) add actualRefsToWrite, overriding the existing values if present | ||
// c) enumerate all the loose refs currently in .git/refs/remotes/${remote} | ||
// d) overwrite their value with the new value. | ||
// Examples of refs we need to avoid writing in loose format for efficieny's sake | ||
// are .git/refs/remotes/origin/refs/remotes/remote_mirror_3059 | ||
// and .git/refs/remotes/origin/refs/merge-requests | ||
const normalizeValue = value => value.trim() + '\n'; | ||
@@ -382,4 +440,47 @@ for (let [key, value] of actualRefsToWrite) { | ||
const service = 'git-receive-pack'; | ||
let res = await this.stream({ stream: stream$$1, service }); | ||
return res; | ||
let { packetlines, packfile } = await this.stream({ | ||
stream: stream$$1, | ||
service | ||
}); | ||
// TODO: Someday, maybe we will make this a streaming parser. | ||
packfile = await pify(concat)(packfile); | ||
packetlines = await pify(concat)(packetlines); | ||
let result = {}; | ||
// Parse the response! | ||
// I'm combining the side-band-64k and regular streams | ||
// because Github returns the first line in the sideband while | ||
// git-http-server returns it without the sideband. | ||
let response = ''; | ||
let read = models_js.GitPktLine.reader(packfile); | ||
let line = await read(); | ||
while (line !== null && line !== true) { | ||
response += line.toString('utf8') + '\n'; | ||
line = await read(); | ||
} | ||
response += packetlines.toString('utf8'); | ||
let lines = response.toString('utf8').split('\n'); | ||
// We're expecting "unpack {unpack-result}" | ||
line = lines.shift(); | ||
if (!line.startsWith('unpack ')) { | ||
throw new Error(`Unparsable response from server! Expected 'unpack ok' or 'unpack [error message]' but got '${line}'`); | ||
} | ||
if (line === 'unpack ok') { | ||
result.ok = ['unpack']; | ||
} else { | ||
result.errors = [line.trim()]; | ||
} | ||
for (let line of lines) { | ||
let status = line.slice(0, 2); | ||
let refAndMessage = line.slice(3); | ||
if (status === 'ok') { | ||
result.ok = result.ok || []; | ||
result.ok.push(refAndMessage); | ||
} else if (status === 'ng') { | ||
result.errors = result.errors || []; | ||
result.errors.push(refAndMessage); | ||
} | ||
} | ||
console.log(result); | ||
return result; | ||
} | ||
@@ -408,6 +509,5 @@ async pull(stream$$1 /*: ReadableStream */) { | ||
}); | ||
// Don't try to parse git pushes for now. | ||
if (service === 'git-receive-pack') return res; | ||
let data = await pify(concat)(res); | ||
// Parse the response! | ||
let read = models_js.GitPktLine.streamReader(res); | ||
let read = models_js.GitPktLine.reader(data); | ||
// And now for the ridiculous side-band-64k protocol | ||
@@ -420,4 +520,6 @@ let packetlines = new stream.PassThrough(); | ||
let line = await read(); | ||
// Skip over flush packets | ||
if (line === null) return nextBit(); | ||
// A made up convention to signal there's no more to read. | ||
if (line === null) { | ||
if (line === true) { | ||
packetlines.end(); | ||
@@ -458,6 +560,3 @@ progress.end(); | ||
}; | ||
} /*: { | ||
stream: ReadableStream, | ||
service: string | ||
} */ | ||
} | ||
} | ||
@@ -464,0 +563,0 @@ |
@@ -21,2 +21,7 @@ 'use strict'; | ||
var gartal = require('gartal'); | ||
var applyDelta = _interopDefault(require('git-apply-delta')); | ||
var listpack = _interopDefault(require('git-list-pack')); | ||
var crc32 = _interopDefault(require('crc/lib/crc32.js')); | ||
var stream = require('stream'); | ||
var marky = _interopDefault(require('marky')); | ||
var sortby = _interopDefault(require('lodash/sortBy')); | ||
@@ -118,2 +123,12 @@ | ||
/** | ||
* Read a directory without throwing an error is the directory doesn't exist | ||
*/ | ||
async readdir(filepath) { | ||
try { | ||
return await this._readdir(filepath); | ||
} catch (err) { | ||
return []; | ||
} | ||
} | ||
/** | ||
* Return a flast list of all the files nested inside a directory | ||
@@ -305,3 +320,3 @@ * | ||
} | ||
if (obj.parent) { | ||
if (obj.parent && obj.parent.length) { | ||
headers += 'parent'; | ||
@@ -452,4 +467,9 @@ for (let p of obj.parent) { | ||
class GitObject { | ||
static hash({ type, object }) /*: Promise<string> */{ | ||
let buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(`${type} ${object.byteLength.toString()}\0`), buffer.Buffer.from(object)]); | ||
let oid = shasum(buffer$$1); | ||
return oid; | ||
} | ||
static wrap({ type, object /*: {type: string, object: Buffer} */ }) { | ||
let buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(type + ' '), buffer.Buffer.from(object.byteLength.toString()), buffer.Buffer.from([0]), buffer.Buffer.from(object)]); | ||
let buffer$$1 = buffer.Buffer.concat([buffer.Buffer.from(`${type} ${object.byteLength.toString()}\0`), object]); | ||
let oid = shasum(buffer$$1); | ||
@@ -564,7 +584,14 @@ return { | ||
} | ||
static streamReader(stream /*: ReadableStream */) { | ||
static streamReader(stream$$1 /*: ReadableStream */) { | ||
let done = false; | ||
stream$$1.on('end', () => { | ||
console.log('THE END I TELL YOU...'); | ||
done = true; | ||
}); | ||
stream$$1.resume(); | ||
return async function read() { | ||
if (done) return null; | ||
let hexlength, length, bytes; | ||
try { | ||
hexlength = await gartal.readBytes(stream, 4); | ||
hexlength = await gartal.readBytes(stream$$1, 4); | ||
} catch (err) { | ||
@@ -578,3 +605,3 @@ // No more file to read | ||
// otherwise return the packet content | ||
bytes = await gartal.readBytes(stream, length - 4); | ||
bytes = await gartal.readBytes(stream$$1, length - 4); | ||
return bytes; | ||
@@ -585,97 +612,395 @@ }; | ||
var _extends$1 = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key$$1 in source) { if (Object.prototype.hasOwnProperty.call(source, key$$1)) { target[key$$1] = source[key$$1]; } } } return target; }; | ||
function buffer2stream(buffer$$1) { | ||
let stream$$1 = new stream.PassThrough(); | ||
stream$$1.end(buffer$$1); | ||
return stream$$1; | ||
} | ||
function parseIDX(buffer$$1) { | ||
let reader = new BufferCursor(buffer$$1); | ||
let magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
let version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error(`Unable to read version ${version} packfile IDX. (Only version 2 supported)`); | ||
} | ||
// Verify checksums | ||
let shaComputed = shasum(buffer$$1.slice(0, -20)); | ||
let shaClaimed = buffer$$1.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error(`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`); | ||
} | ||
if (buffer$$1.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error(`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`); | ||
} | ||
let fanout = []; | ||
for (let i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
let size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
let hashes = []; | ||
for (let i = 0; i < size; i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
let crcs = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
crcs.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let offsets = new Map(); | ||
for (let i = 0; i < size; i++) { | ||
offsets.set(hashes[i], reader.readUInt32BE()); | ||
} | ||
let packfileSha = reader.slice(20).toString('hex'); | ||
// This part is gratuitous, but since we lack good unzipping arbitrary streams with extra at the end in the browser... | ||
let lengths = Array.from(offsets); | ||
lengths.sort((a, b) => a[1] - b[1]); // List objects in order by offset | ||
let sizes = new Map(); | ||
let slices = new Map(); | ||
for (let i = 0; i < size - 1; i++) { | ||
sizes.set(lengths[i][0], lengths[i + 1][1] - lengths[i][1]); | ||
slices.set(lengths[i][0], [lengths[i][1], lengths[i + 1][1]]); | ||
} | ||
slices.set(lengths[size - 1][0], [lengths[size - 1][1], undefined]); | ||
return { size, fanout, hashes, crcs, packfileSha, slices }; | ||
function decodeVarInt(reader) { | ||
let bytes = []; | ||
let byte = 0; | ||
let multibyte = 0; | ||
do { | ||
byte = reader.readUInt8(); | ||
// We keep bits 6543210 | ||
const lastSeven = byte & 0b01111111; | ||
bytes.push(lastSeven); | ||
// Whether the next byte is part of the variable-length encoded number | ||
// is encoded in bit 7 | ||
multibyte = byte & 0b10000000; | ||
} while (multibyte); | ||
// Now that all the bytes are in big-endian order, | ||
// alternate shifting the bits left by 7 and OR-ing the next byte. | ||
// And... do a weird increment-by-one thing that I don't quite understand. | ||
return bytes.reduce((a, b) => a + 1 << 7 | b, -1); | ||
} | ||
// I'm pretty much copying this one from the git C source code, | ||
// because it makes no sense. | ||
function otherVarIntDecode(reader, startWith) { | ||
let result = startWith; | ||
let shift = 4; | ||
let byte = null; | ||
do { | ||
byte = reader.readUInt8(); | ||
result |= (byte & 0b01111111) << shift; | ||
shift += 7; | ||
} while (byte & 0b10000000); | ||
return result; | ||
} | ||
/** @ignore */ | ||
class GitPackfile { | ||
constructor({ size, fanout, hashes, crcs, packfileSha, slices, pack }) { | ||
// Compare checksums | ||
let shaClaimed = pack.slice(-20).toString('hex'); | ||
if (packfileSha !== shaClaimed) { | ||
throw new Error(`Invalid packfile shasum in IDX buffer: expected ${packfileSha} but saw ${shaClaimed}`); | ||
class GitPackIndex { | ||
constructor(stuff) { | ||
Object.assign(this, stuff); | ||
this.offsetCache = {}; | ||
} | ||
static async fromIdx({ idx, getExternalRefDelta }) { | ||
let reader = new BufferCursor(idx); | ||
let magic = reader.slice(4).toString('hex'); | ||
// Check for IDX v2 magic number | ||
if (magic !== 'ff744f63') { | ||
return; // undefined | ||
} | ||
Object.assign(this, { | ||
size, | ||
fanout, | ||
let version = reader.readUInt32BE(); | ||
if (version !== 2) { | ||
throw new Error(`Unable to read version ${version} packfile IDX. (Only version 2 supported)`); | ||
} | ||
// Verify checksums | ||
let shaComputed = shasum(idx.slice(0, -20)); | ||
let shaClaimed = idx.slice(-20).toString('hex'); | ||
if (shaClaimed !== shaComputed) { | ||
throw new Error(`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`); | ||
} | ||
if (idx.byteLength > 2048 * 1024 * 1024) { | ||
throw new Error(`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`); | ||
} | ||
let fanout = []; | ||
for (let i = 0; i < 256; i++) { | ||
fanout.push(reader.readUInt32BE()); | ||
} | ||
let size = fanout[255]; | ||
// For now we'll parse the whole thing. We can optimize later if we need to. | ||
let hashes = []; | ||
for (let i = 0; i < size; i++) { | ||
hashes.push(reader.slice(20).toString('hex')); | ||
} | ||
let crcs = {}; | ||
for (let i = 0; i < size; i++) { | ||
crcs[hashes[i]] = reader.readUInt32BE(); | ||
} | ||
let offsets = {}; | ||
for (let i = 0; i < size; i++) { | ||
offsets[hashes[i]] = reader.readUInt32BE(); | ||
} | ||
let packfileSha = reader.slice(20).toString('hex'); | ||
return new GitPackIndex({ | ||
hashes, | ||
crcs, | ||
offsets, | ||
packfileSha, | ||
slices, | ||
pack | ||
getExternalRefDelta | ||
}); | ||
} | ||
static async fromIDX({ idx, pack }) { | ||
return new GitPackfile(_extends$1({ pack }, parseIDX(idx))); | ||
static async fromPack({ pack, getExternalRefDelta }) { | ||
const listpackTypes = { | ||
1: 'commit', | ||
2: 'tree', | ||
3: 'blob', | ||
4: 'tag', | ||
6: 'ofs-delta', | ||
7: 'ref-delta' | ||
}; | ||
let offsetToObject = {}; | ||
// Older packfiles do NOT use the shasum of the pack itself, | ||
// so it is recommended to just use whatever bytes are in the trailer. | ||
// Source: https://github.com/git/git/commit/1190a1acf800acdcfd7569f87ac1560e2d077414 | ||
// let packfileSha = shasum(pack.slice(0, -20)) | ||
let packfileSha = pack.slice(-20).toString('hex'); | ||
let hashes = []; | ||
let crcs = {}; | ||
let offsets = {}; | ||
let totalObjectCount = null; | ||
let lastPercent = null; | ||
let times = { | ||
hash: 0, | ||
readSlice: 0, | ||
offsets: 0, | ||
crcs: 0, | ||
sort: 0 | ||
}; | ||
let histogram = { | ||
commit: 0, | ||
tree: 0, | ||
blob: 0, | ||
tag: 0, | ||
'ofs-delta': 0, | ||
'ref-delta': 0 | ||
}; | ||
let bytesProcessed = 0; | ||
console.log('Indexing objects'); | ||
console.log(`percent\tmilliseconds\tkilobytes\tbytesProcessed\tcommits\ttrees\tblobs\ttags\tofs-deltas\tref-deltas`); | ||
marky.mark('total'); | ||
marky.mark('offsets'); | ||
marky.mark('percent'); | ||
await new Promise((resolve, reject) => { | ||
buffer2stream(pack).pipe(listpack()).on('data', async ({ data, type, reference, offset, num }) => { | ||
if (totalObjectCount === null) totalObjectCount = num; | ||
let percent = Math.floor((totalObjectCount - num) * 100 / totalObjectCount); | ||
if (percent !== lastPercent) { | ||
console.log(`${percent}%\t${Math.floor(marky.stop('percent').duration)}\t${Math.floor(process.memoryUsage().rss / 1000)}\t${bytesProcessed}\t${histogram.commit}\t${histogram.tree}\t${histogram.blob}\t${histogram.tag}\t${histogram['ofs-delta']}\t${histogram['ref-delta']}`); | ||
histogram = { | ||
commit: 0, | ||
tree: 0, | ||
blob: 0, | ||
tag: 0, | ||
'ofs-delta': 0, | ||
'ref-delta': 0 | ||
}; | ||
bytesProcessed = 0; | ||
marky.mark('percent'); | ||
} | ||
lastPercent = percent; | ||
// Change type from a number to a meaningful string | ||
type = listpackTypes[type]; | ||
histogram[type]++; | ||
bytesProcessed += data.byteLength; | ||
if (['commit', 'tree', 'blob', 'tag'].includes(type)) { | ||
offsetToObject[offset] = { | ||
type, | ||
offset | ||
}; | ||
} else if (type === 'ofs-delta') { | ||
offsetToObject[offset] = { | ||
type, | ||
offset | ||
}; | ||
} else if (type === 'ref-delta') { | ||
offsetToObject[offset] = { | ||
type, | ||
offset | ||
}; | ||
} | ||
if (num === 0) resolve(); | ||
}); | ||
}); | ||
times['offsets'] = Math.floor(marky.stop('offsets').duration); | ||
console.log('Computing CRCs'); | ||
marky.mark('crcs'); | ||
// We need to know the lengths of the slices to compute the CRCs. | ||
let offsetArray = Object.keys(offsetToObject).map(Number); | ||
for (let [i, start] of offsetArray.entries()) { | ||
let end = i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1]; | ||
let o = offsetToObject[start]; | ||
let crc = crc32(pack.slice(start, end)); | ||
o.end = end; | ||
o.crc = crc; | ||
} | ||
times['crcs'] = Math.floor(marky.stop('crcs').duration); | ||
// We don't have the hashes yet. But we can generate them using the .readSlice function! | ||
const p = new GitPackIndex({ | ||
pack, | ||
packfileSha, | ||
crcs, | ||
hashes, | ||
offsets, | ||
getExternalRefDelta | ||
}); | ||
// Resolve deltas and compute the oids | ||
console.log('Resolving deltas'); | ||
console.log(`percent2\tmilliseconds2\tkilobytes2\tcallsToReadSlice\tcallsToGetExternal`); | ||
marky.mark('percent'); | ||
lastPercent = null; | ||
let count = 0; | ||
let callsToReadSlice = 0; | ||
let callsToGetExternal = 0; | ||
let timeByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
let objectsByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | ||
for (let offset in offsetToObject) { | ||
offset = Number(offset); | ||
let percent = Math.floor(count++ * 100 / totalObjectCount); | ||
if (percent !== lastPercent) { | ||
console.log(`${percent}%\t${Math.floor(marky.stop('percent').duration)}\t${Math.floor(process.memoryUsage().rss / 1000)}\t${callsToReadSlice}\t${callsToGetExternal}`); | ||
marky.mark('percent'); | ||
callsToReadSlice = 0; | ||
callsToGetExternal = 0; | ||
} | ||
lastPercent = percent; | ||
let o = offsetToObject[offset]; | ||
if (o.oid) continue; | ||
try { | ||
p.readDepth = 0; | ||
p.externalReadDepth = 0; | ||
marky.mark('readSlice'); | ||
let { type, object } = await p.readSlice({ start: offset }); | ||
let time = marky.stop('readSlice').duration; | ||
times.readSlice += time; | ||
callsToReadSlice += p.readDepth; | ||
callsToGetExternal += p.externalReadDepth; | ||
timeByDepth[p.readDepth] += time; | ||
objectsByDepth[p.readDepth] += 1; | ||
marky.mark('hash'); | ||
let oid = GitObject.hash({ type, object }); | ||
times.hash += marky.stop('hash').duration; | ||
o.oid = oid; | ||
hashes.push(oid); | ||
offsets[oid] = offset; | ||
crcs[oid] = o.crc; | ||
} catch (err) { | ||
console.log('ERROR', err); | ||
continue; | ||
} | ||
} | ||
marky.mark('sort'); | ||
hashes.sort(); | ||
times['sort'] = Math.floor(marky.stop('sort').duration); | ||
let totalElapsedTime = marky.stop('total').duration; | ||
times.hash = Math.floor(times.hash); | ||
times.readSlice = Math.floor(times.readSlice); | ||
times.misc = Math.floor(Object.values(times).reduce((a, b) => a - b, totalElapsedTime)); | ||
console.log(Object.keys(times).join('\t')); | ||
console.log(Object.values(times).join('\t')); | ||
console.log('by depth:'); | ||
console.log([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11].join('\t')); | ||
console.log(objectsByDepth.slice(0, 12).join('\t')); | ||
console.log(timeByDepth.map(Math.floor).slice(0, 12).join('\t')); | ||
// CONTINUE HERE: Probably we need an LRU cache to speed up deltas. | ||
// We could plot a histogram of oids to see how many oids we need to cache to have a big impact. | ||
return p; | ||
} | ||
toBuffer() { | ||
let buffers = []; | ||
let write = (str, encoding) => { | ||
buffers.push(Buffer.from(str, encoding)); | ||
}; | ||
// Write out IDX v2 magic number | ||
write('ff744f63', 'hex'); | ||
// Write out version number 2 | ||
write('00000002', 'hex'); | ||
// Write fanout table | ||
let fanoutBuffer = new BufferCursor(Buffer.alloc(256 * 4)); | ||
for (let i = 0; i < 256; i++) { | ||
let count = 0; | ||
for (let hash of this.hashes) { | ||
if (parseInt(hash.slice(0, 2), 16) <= i) count++; | ||
} | ||
fanoutBuffer.writeUInt32BE(count); | ||
} | ||
buffers.push(fanoutBuffer.buffer); | ||
// Write out hashes | ||
for (let hash of this.hashes) { | ||
write(hash, 'hex'); | ||
} | ||
// Write out crcs | ||
let crcsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); | ||
for (let hash of this.hashes) { | ||
crcsBuffer.writeUInt32BE(this.crcs[hash]); | ||
} | ||
buffers.push(crcsBuffer.buffer); | ||
// Write out offsets | ||
let offsetsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); | ||
for (let hash of this.hashes) { | ||
offsetsBuffer.writeUInt32BE(this.offsets[hash]); | ||
} | ||
buffers.push(offsetsBuffer.buffer); | ||
// Write out packfile checksum | ||
write(this.packfileSha, 'hex'); | ||
// Write out shasum | ||
let totalBuffer = Buffer.concat(buffers); | ||
let sha = shasum(totalBuffer); | ||
let shaBuffer = Buffer.alloc(20); | ||
shaBuffer.write(sha, 'hex'); | ||
return Buffer.concat([totalBuffer, shaBuffer]); | ||
} | ||
async load({ pack }) { | ||
this.pack = pack; | ||
} | ||
async unload() { | ||
this.pack = null; | ||
} | ||
async read({ oid /*: {oid: string} */ }) { | ||
if (!this.slices.has(oid)) return null; | ||
let raw = this.pack.slice(...this.slices.get(oid)); | ||
console.log(raw); | ||
/* | ||
- The header is followed by number of object entries, each of | ||
which looks like this: | ||
(undeltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
compressed data | ||
(deltified representation) | ||
n-byte type and length (3-bit type, (n-1)*7+4-bit length) | ||
20-byte base object name if OBJ_REF_DELTA or a negative relative | ||
offset from the delta object's position in the pack if this | ||
is an OBJ_OFS_DELTA object | ||
compressed delta data | ||
Observation: length of each object is encoded in a variable | ||
length format and is not constrained to 32-bit or anything. | ||
*/ | ||
if (!this.offsets[oid]) { | ||
if (this.getExternalRefDelta) { | ||
this.externalReadDepth++; | ||
return this.getExternalRefDelta(oid); | ||
} else { | ||
throw new Error(`Could not read object ${oid} from packfile`); | ||
} | ||
} | ||
let start = this.offsets[oid]; | ||
return this.readSlice({ start }); | ||
} | ||
async readSlice({ start }) { | ||
if (this.offsetCache[start]) return this.offsetCache[start]; | ||
this.readDepth++; | ||
const types = { | ||
0b0010000: 'commit', | ||
0b0100000: 'tree', | ||
0b0110000: 'blob', | ||
0b1000000: 'tag', | ||
0b1100000: 'ofs_delta', | ||
0b1110000: 'ref_delta' | ||
}; | ||
if (!this.pack) { | ||
throw new Error('Tried to read from a GitPackIndex with no packfile loaded into memory'); | ||
} | ||
let raw = this.pack.slice(start); | ||
let reader = new BufferCursor(raw); | ||
let byte = reader.readUInt8(); | ||
// Object type is encoded in bits 654 | ||
let btype = byte & 0b1110000; | ||
let type = types[btype]; | ||
if (type === undefined) { | ||
throw new Error('Unrecognized type: 0b' + btype.toString(2)); | ||
} | ||
// The length encoding get complicated. | ||
// Last four bits of length is encoded in bits 3210 | ||
let lastFour = byte & 0b1111; | ||
let length = lastFour; | ||
// Whether the next byte is part of the variable-length encoded number | ||
// is encoded in bit 7 | ||
let multibyte = byte & 0b10000000; | ||
if (multibyte) { | ||
length = otherVarIntDecode(reader, lastFour); | ||
} | ||
let base = null; | ||
let object = null; | ||
// Handle deltified objects | ||
if (type === 'ofs_delta') { | ||
let offset = decodeVarInt(reader); | ||
let baseOffset = start - offset;({ object: base, type } = await this.readSlice({ start: baseOffset })); | ||
} | ||
if (type === 'ref_delta') { | ||
let oid = reader.slice(20).toString('hex');({ object: base, type } = await this.read({ oid })); | ||
} | ||
// Handle undeltified objects | ||
let buffer$$1 = raw.slice(reader.tell()); | ||
object = Buffer.from(pako.inflate(buffer$$1)); | ||
// Assert that the object length is as expected. | ||
if (object.byteLength !== length) { | ||
throw new Error(`Packfile told us object would have length ${length} but it had length ${object.byteLength}`); | ||
} | ||
if (base) { | ||
object = applyDelta(object, base); | ||
} | ||
// Cache the result based on depth. | ||
if (this.readDepth > 3) { | ||
// hand tuned for speed / memory usage tradeoff | ||
this.offsetCache[start] = { type, object }; | ||
} | ||
return { type, object }; | ||
} | ||
} | ||
@@ -982,4 +1307,4 @@ | ||
exports.GitPktLine = GitPktLine; | ||
exports.GitPackfile = GitPackfile; | ||
exports.GitPackIndex = GitPackIndex; | ||
exports.GitIndex = GitIndex; | ||
exports.GitTree = GitTree; |
@@ -90,4 +90,4 @@ 'use strict'; | ||
var files = ["dist", "cli.js"]; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "through2": "^2.0.3" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "esdoc": "^1.0.4", "esdoc-ecmascript-proposal-plugin": "^1.0.0", "esdoc-importpath-plugin": "^1.0.1", "esdoc-standard-plugin": "^1.0.0", "husky": "^0.14.3", "jest": "^21.2.1", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^1.7.1", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^7.0.3", "rollup": "^0.51.6", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watch": "^1.0.2", "watchify": "^3.9.0", "semantic-release": "^8.2.0" }; | ||
var dependencies = { "async-lock": "^1.0.0", "await-stream-ready": "^1.0.1", "babel-runtime": "^6.26.0", "buffer": "^5.0.7", "buffer-peek-stream": "^1.0.1", "buffercursor": "0.0.12", "crc": "^3.5.0", "gartal": "^1.1.2", "git-apply-delta": "0.0.7", "git-list-pack": "0.0.10", "ignore": "^3.3.6", "ini": "^1.3.4", "marky": "^1.2.0", "minimisted": "^2.0.0", "openpgp": "^2.5.10", "pad": "^2.0.1", "pako": "^1.0.5", "pify": "^3.0.0", "shasum": "^1.0.2", "simple-concat": "^1.0.0", "simple-get": "^2.7.0", "through2": "^2.0.3" }; | ||
var devDependencies = { "babel-plugin-external-helpers": "^6.22.0", "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", "babel-plugin-transform-object-rest-spread": "^6.23.0", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.6.0", "babel-preset-flow": "^6.23.0", "ban-sensitive-files": "^1.9.0", "browserfs": "^1.4.3", "browserify": "^14.4.0", "browserify-shim": "^3.8.14", "codecov": "^3.0.0", "doctoc": "^1.3.0", "esdoc": "^1.0.4", "esdoc-ecmascript-proposal-plugin": "^1.0.0", "esdoc-importpath-plugin": "^1.0.1", "esdoc-standard-plugin": "^1.0.0", "husky": "^0.14.3", "jest": "^22.0.4", "jest-fixtures": "^0.6.0", "jsonfile": "^4.0.0", "karma": "^2.0.0", "karma-browserify": "^5.1.1", "karma-chrome-launcher": "^2.2.0", "karma-firefox-launcher": "^1.0.1", "karma-sauce-launcher": "^1.2.0", "karma-tap": "^3.1.1", "lodash": "^4.17.4", "nock": "^9.0.17", "npm-run-all": "^4.1.1", "nps": "^5.7.1", "nps-utils": "^1.4.0", "parse-header-stream": "^1.1.1", "prettier-standard": "^8.0.0", "rollup": "^0.53.0", "rollup-plugin-babel": "^3.0.2", "rollup-plugin-json": "^2.3.0", "semantic-release": "^8.2.0", "standard": "^10.0.3", "stream-equal": "^1.0.1", "tape": "^4.8.0", "uglify-es": "^3.1.2", "watch": "^1.0.2", "watchify": "^3.9.0" }; | ||
var ava = { "source": ["dist/for-node/*"] }; | ||
@@ -94,0 +94,0 @@ var browserify = { "transform": ["browserify-shim"] }; |
@@ -1,1 +0,1 @@ | ||
{"name":"isomorphic-git","version":"0.0.33","description":"A pure JavaScript implementation of git for node and browsers!","typings":"./src/index.d.ts","main":"dist/for-node/","browser":"dist/for-browserify/","module":"dist/for-future/","unpkg":"dist/bundle.umd.min.js","bin":{"isogit":"./cli.js"},"engines":{"node":">=7.6.0"},"scripts":{"start":"nps","test":"nps test","precommit":"nps format toc","semantic-release":"semantic-release pre && npm publish && semantic-release post"},"repository":{"type":"git","url":"https://github.com/wmhilton/isomorphic-git.git"},"keywords":["git"],"author":"William Hilton <wmhilton@gmail.com>","license":"Unlicense","bugs":{"url":"https://github.com/wmhilton/isomorphic-git/issues"},"homepage":"https://github.com/wmhilton/isomorphic-git#readme","files":["dist","cli.js"],"dependencies":{"async-lock":"^1.0.0","await-stream-ready":"^1.0.1","babel-runtime":"^6.26.0","buffer":"^5.0.7","buffer-peek-stream":"^1.0.1","buffercursor":"0.0.12","gartal":"^1.1.2","git-apply-delta":"0.0.7","git-list-pack":"0.0.10","ignore":"^3.3.6","ini":"^1.3.4","marky":"^1.2.0","minimisted":"^2.0.0","openpgp":"^2.5.10","pad":"^2.0.1","pako":"^1.0.5","pify":"^3.0.0","shasum":"^1.0.2","simple-concat":"^1.0.0","simple-get":"^2.7.0","through2":"^2.0.3"},"devDependencies":{"babel-plugin-external-helpers":"^6.22.0","babel-plugin-transform-es2015-modules-commonjs":"^6.24.1","babel-plugin-transform-object-rest-spread":"^6.23.0","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.6.0","babel-preset-flow":"^6.23.0","ban-sensitive-files":"^1.9.0","browserfs":"^1.4.3","browserify":"^14.4.0","browserify-shim":"^3.8.14","codecov":"^3.0.0","doctoc":"^1.3.0","esdoc":"^1.0.4","esdoc-ecmascript-proposal-plugin":"^1.0.0","esdoc-importpath-plugin":"^1.0.1","esdoc-standard-plugin":"^1.0.0","husky":"^0.14.3","jest":"^21.2.1","jest-fixtures":"^0.6.0","jsonfile":"^4.0.0","karma":"^1.7.1","karma-browserify":"^5.1.1","karma-chrome-launcher":"^2.2.0","karma-firefox-launcher":"^1.0.1","karma-sauce-launcher":"^1.2.0","karma-tap":"^3.1.1","lodash":"^4.17.4","nock":"^9.0.17","npm-run-all":"^4.1.1","nps":"^5.7.1","nps-utils":"^1.4.0","parse-header-stream":"^1.1.1","prettier-standard":"^7.0.3","rollup":"^0.51.6","rollup-plugin-babel":"^3.0.2","rollup-plugin-json":"^2.3.0","standard":"^10.0.3","stream-equal":"^1.0.1","tape":"^4.8.0","uglify-es":"^3.1.2","watch":"^1.0.2","watchify":"^3.9.0","semantic-release":"^8.2.0"},"ava":{"source":["dist/for-node/*"]},"browserify":{"transform":["browserify-shim"]},"browserify-shim":{"fs":"global:fs"},"testling":{"files":"testling/basic-test.js","browsers":["chrome/latest","firefox/latest","ie/latest"]},"jest":{"testPathIgnorePatterns":["__helpers__"],"testEnvironment":"node"}} | ||
{"name":"isomorphic-git","version":"0.0.34","description":"A pure JavaScript implementation of git for node and browsers!","typings":"./src/index.d.ts","main":"dist/for-node/","browser":"dist/for-browserify/","module":"dist/for-future/","unpkg":"dist/bundle.umd.min.js","bin":{"isogit":"./cli.js"},"engines":{"node":">=7.6.0"},"scripts":{"start":"nps","test":"nps test","precommit":"nps format toc","semantic-release":"semantic-release pre && npm publish && semantic-release post"},"repository":{"type":"git","url":"https://github.com/wmhilton/isomorphic-git.git"},"keywords":["git"],"author":"William Hilton <wmhilton@gmail.com>","license":"Unlicense","bugs":{"url":"https://github.com/wmhilton/isomorphic-git/issues"},"homepage":"https://github.com/wmhilton/isomorphic-git#readme","files":["dist","cli.js"],"dependencies":{"async-lock":"^1.0.0","await-stream-ready":"^1.0.1","babel-runtime":"^6.26.0","buffer":"^5.0.7","buffer-peek-stream":"^1.0.1","buffercursor":"0.0.12","crc":"^3.5.0","gartal":"^1.1.2","git-apply-delta":"0.0.7","git-list-pack":"0.0.10","ignore":"^3.3.6","ini":"^1.3.4","marky":"^1.2.0","minimisted":"^2.0.0","openpgp":"^2.5.10","pad":"^2.0.1","pako":"^1.0.5","pify":"^3.0.0","shasum":"^1.0.2","simple-concat":"^1.0.0","simple-get":"^2.7.0","through2":"^2.0.3"},"devDependencies":{"babel-plugin-external-helpers":"^6.22.0","babel-plugin-transform-es2015-modules-commonjs":"^6.24.1","babel-plugin-transform-object-rest-spread":"^6.23.0","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.6.0","babel-preset-flow":"^6.23.0","ban-sensitive-files":"^1.9.0","browserfs":"^1.4.3","browserify":"^14.4.0","browserify-shim":"^3.8.14","codecov":"^3.0.0","doctoc":"^1.3.0","esdoc":"^1.0.4","esdoc-ecmascript-proposal-plugin":"^1.0.0","esdoc-importpath-plugin":"^1.0.1","esdoc-standard-plugin":"^1.0.0","husky":"^0.14.3","jest":"^22.0.4","jest-fixtures":"^0.6.0","jsonfile":"^4.0.0","karma":"^2.0.0","karma-browserify":"^5.1.1","karma-chrome-launcher":"^2.2.0","karma-firefox-launcher":"^1.0.1","karma-sauce-launcher":"^1.2.0","karma-tap":"^3.1.1","lodash":"^4.17.4","nock":"^9.0.17","npm-run-all":"^4.1.1","nps":"^5.7.1","nps-utils":"^1.4.0","parse-header-stream":"^1.1.1","prettier-standard":"^8.0.0","rollup":"^0.53.0","rollup-plugin-babel":"^3.0.2","rollup-plugin-json":"^2.3.0","semantic-release":"^8.2.0","standard":"^10.0.3","stream-equal":"^1.0.1","tape":"^4.8.0","uglify-es":"^3.1.2","watch":"^1.0.2","watchify":"^3.9.0"},"ava":{"source":["dist/for-node/*"]},"browserify":{"transform":["browserify-shim"]},"browserify-shim":{"fs":"global:fs"},"testling":{"files":"testling/basic-test.js","browsers":["chrome/latest","firefox/latest","ie/latest"]},"jest":{"testPathIgnorePatterns":["__helpers__"],"testEnvironment":"node"}} |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
9326617
26502
22
+ Addedcrc@^3.5.0
+ Addedcrc@3.8.0(transitive)