Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

isomorphic-git

Package Overview
Dependencies
Maintainers
1
Versions
408
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

isomorphic-git - npm Package Compare versions

Comparing version 0.0.28 to 0.0.29

317

dist/for-browserify/models.js

@@ -12,4 +12,6 @@ 'use strict';

var _createClass = _interopDefault(require('babel-runtime/helpers/createClass'));
var _Map = _interopDefault(require('babel-runtime/core-js/map'));
var path = _interopDefault(require('path'));
var pify = _interopDefault(require('pify'));
var utils_js = require('./utils.js');
var _getIterator = _interopDefault(require('babel-runtime/core-js/get-iterator'));

@@ -21,2 +23,5 @@ var _extends = _interopDefault(require('babel-runtime/helpers/extends'));

var buffer = require('buffer');
var _Object$getPrototypeOf = _interopDefault(require('babel-runtime/core-js/object/get-prototype-of'));
var _possibleConstructorReturn = _interopDefault(require('babel-runtime/helpers/possibleConstructorReturn'));
var _inherits = _interopDefault(require('babel-runtime/helpers/inherits'));
var openpgp = require('openpgp/dist/openpgp.min.js');

@@ -35,6 +40,6 @@ var _Object$keys = _interopDefault(require('babel-runtime/core-js/object/keys'));

var _Array$from = _interopDefault(require('babel-runtime/core-js/array/from'));
var _Map = _interopDefault(require('babel-runtime/core-js/map'));
var _Symbol$iterator = _interopDefault(require('babel-runtime/core-js/symbol/iterator'));
var sortby = _interopDefault(require('lodash/sortBy'));
var delayedReleases = new _Map();
/**

@@ -51,2 +56,3 @@ * This is just a collection of helper functions really. At least that's how it started.

this._mkdir = pify(fs.mkdir.bind(fs));
this._rmdir = pify(fs.rmdir.bind(fs));
this._unlink = pify(fs.unlink.bind(fs));

@@ -432,2 +438,119 @@ this._stat = pify(fs.stat.bind(fs));

}()
}, {
key: 'lock',
value: function () {
var _ref9 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee9(filename) {
var triesLeft = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 3;
return _regeneratorRuntime.wrap(function _callee9$(_context9) {
while (1) {
switch (_context9.prev = _context9.next) {
case 0:
if (!delayedReleases.has(filename)) {
_context9.next = 4;
break;
}
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return _context9.abrupt('return');
case 4:
if (!(triesLeft === 0)) {
_context9.next = 6;
break;
}
throw new Error('Unable to acquire lockfile \'' + filename + '\'. Exhausted tries.');
case 6:
_context9.prev = 6;
_context9.next = 9;
return this.mkdir(filename + '.lock');
case 9:
_context9.next = 18;
break;
case 11:
_context9.prev = 11;
_context9.t0 = _context9['catch'](6);
if (!(_context9.t0.code === 'EEXIST')) {
_context9.next = 18;
break;
}
_context9.next = 16;
return utils_js.sleep(100);
case 16:
_context9.next = 18;
return this.lock(filename, triesLeft - 1);
case 18:
case 'end':
return _context9.stop();
}
}
}, _callee9, this, [[6, 11]]);
}));
function lock(_x14) {
return _ref9.apply(this, arguments);
}
return lock;
}()
}, {
key: 'unlock',
value: function () {
var _ref10 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee11(filename) {
var _this3 = this;
var delayRelease = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 50;
return _regeneratorRuntime.wrap(function _callee11$(_context11) {
while (1) {
switch (_context11.prev = _context11.next) {
case 0:
if (!delayedReleases.has(filename)) {
_context11.next = 2;
break;
}
throw new Error('Cannot double-release lockfile');
case 2:
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(filename, setTimeout(_asyncToGenerator(_regeneratorRuntime.mark(function _callee10() {
return _regeneratorRuntime.wrap(function _callee10$(_context10) {
while (1) {
switch (_context10.prev = _context10.next) {
case 0:
delayedReleases.delete(filename);
_context10.next = 3;
return _this3._rmdir(filename + '.lock');
case 3:
case 'end':
return _context10.stop();
}
}
}, _callee10, _this3);
})), delayRelease));
case 3:
case 'end':
return _context11.stop();
}
}
}, _callee11, this);
}));
function unlock(_x16) {
return _ref10.apply(this, arguments);
}
return unlock;
}()
}]);

@@ -638,3 +761,111 @@

}
}], [{
key: 'fromPayloadSignature',
value: function fromPayloadSignature(_ref) {
var payload = _ref.payload,
signature = _ref.signature;
var headers = GitCommit.justHeaders(payload);
var message$$1 = GitCommit.justMessage(payload);
var commit = normalize(headers + '\ngpgsig' + indent(signature) + '\n' + message$$1);
return new GitCommit(commit);
}
}, {
key: 'from',
value: function from(commit) {
return new GitCommit(commit);
}
}, {
key: 'justMessage',
value: function justMessage(commit) {
return normalize(commit.slice(commit.indexOf('\n\n') + 2));
}
}, {
key: 'justHeaders',
value: function justHeaders(commit) {
return commit.slice(0, commit.indexOf('\n\n'));
}
}, {
key: 'renderHeaders',
value: function renderHeaders(obj) {
var headers = '';
if (obj.tree) {
headers += 'tree ' + obj.tree + '\n';
} else {
headers += 'tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904\n'; // the null tree
}
if (obj.parent) {
headers += 'parent';
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = _getIterator(obj.parent), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
var p = _step3.value;
headers += ' ' + p;
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3.return) {
_iterator3.return();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
headers += '\n';
}
var author = obj.author;
headers += 'author ' + author.name + ' <' + author.email + '> ' + author.timestamp + ' ' + formatTimezoneOffset(author.timezoneOffset) + '\n';
var committer = obj.committer || obj.author;
headers += 'committer ' + committer.name + ' <' + committer.email + '> ' + committer.timestamp + ' ' + formatTimezoneOffset(committer.timezoneOffset) + '\n';
if (obj.gpgsig) {
headers += 'gpgsig' + indent(obj.gpgsig);
}
return headers;
}
}, {
key: 'render',
value: function render(obj) {
return GitCommit.renderHeaders(obj) + '\n' + normalize(obj.message);
}
}]);
return GitCommit;
}();
function normalize$1(str) {
// remove all <CR>
str = str.replace(/\r/g, '');
// no extra newlines up front
str = str.replace(/^\n+/, '');
// and a single newline at the end
str = str.replace(/\n+$/, '') + '\n';
return str;
}
function indent$1(str) {
return str.trim().split('\n').map(function (x) {
return ' ' + x;
}).join('\n') + '\n';
}
var SignedGitCommit = function (_GitCommit) {
_inherits(SignedGitCommit, _GitCommit);
function SignedGitCommit() {
_classCallCheck(this, SignedGitCommit);
return _possibleConstructorReturn(this, (SignedGitCommit.__proto__ || _Object$getPrototypeOf(SignedGitCommit)).apply(this, arguments));
}
_createClass(SignedGitCommit, [{
key: 'sign',

@@ -666,4 +897,4 @@ value: function () {

// renormalize the line endings to the one true line-ending
signature = normalize(signature);
signedCommit = headers + '\n' + 'gpgsig' + indent(signature) + '\n' + message$$1;
signature = normalize$1(signature);
signedCommit = headers + '\n' + 'gpgsig' + indent$1(signature) + '\n' + message$$1;
// return a new commit object

@@ -747,83 +978,10 @@

}], [{
key: 'fromPayloadSignature',
value: function fromPayloadSignature(_ref5) {
var payload = _ref5.payload,
signature = _ref5.signature;
var headers = GitCommit.justHeaders(payload);
var message$$1 = GitCommit.justMessage(payload);
var commit = normalize(headers + '\ngpgsig' + indent(signature) + '\n' + message$$1);
return new GitCommit(commit);
}
}, {
key: 'from',
value: function from(commit) {
return new GitCommit(commit);
return new SignedGitCommit(commit);
}
}, {
key: 'justMessage',
value: function justMessage(commit) {
return normalize(commit.slice(commit.indexOf('\n\n') + 2));
}
}, {
key: 'justHeaders',
value: function justHeaders(commit) {
return commit.slice(0, commit.indexOf('\n\n'));
}
}, {
key: 'renderHeaders',
value: function renderHeaders(obj) {
var headers = '';
if (obj.tree) {
headers += 'tree ' + obj.tree + '\n';
} else {
headers += 'tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904\n'; // the null tree
}
if (obj.parent) {
headers += 'parent';
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = _getIterator(obj.parent), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
var p = _step3.value;
headers += ' ' + p;
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3.return) {
_iterator3.return();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
headers += '\n';
}
var author = obj.author;
headers += 'author ' + author.name + ' <' + author.email + '> ' + author.timestamp + ' ' + formatTimezoneOffset(author.timezoneOffset) + '\n';
var committer = obj.committer || obj.author;
headers += 'committer ' + committer.name + ' <' + committer.email + '> ' + committer.timestamp + ' ' + formatTimezoneOffset(committer.timezoneOffset) + '\n';
if (obj.gpgsig) {
headers += 'gpgsig' + indent(obj.gpgsig);
}
return headers;
}
}, {
key: 'render',
value: function render(obj) {
return GitCommit.renderHeaders(obj) + '\n' + normalize(obj.message);
}
}]);
return GitCommit;
}();
return SignedGitCommit;
}(GitCommit);

@@ -1860,2 +2018,3 @@ var complexKeys = ['remote', 'branch'];

exports.GitCommit = GitCommit;
exports.SignedGitCommit = SignedGitCommit;
exports.GitConfig = GitConfig;

@@ -1862,0 +2021,0 @@ exports.GitObject = GitObject;

@@ -7,83 +7,9 @@ 'use strict';

var _regeneratorRuntime = _interopDefault(require('babel-runtime/regenerator'));
var _asyncToGenerator = _interopDefault(require('babel-runtime/helpers/asyncToGenerator'));
var pify = _interopDefault(require('pify'));
var _Promise = _interopDefault(require('babel-runtime/core-js/promise'));
var _getIterator = _interopDefault(require('babel-runtime/core-js/get-iterator'));
var _Map = _interopDefault(require('babel-runtime/core-js/map'));
var path = _interopDefault(require('path'));
var _regeneratorRuntime = _interopDefault(require('babel-runtime/regenerator'));
var _Promise = _interopDefault(require('babel-runtime/core-js/promise'));
var _asyncToGenerator = _interopDefault(require('babel-runtime/helpers/asyncToGenerator'));
var fs;
var fs$1 = function () {
return fs;
};
function setfs(newFs) {
fs = newFs;
}
var rm = function () {
var _ref = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(filepath) {
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.prev = 0;
_context.next = 3;
return pify(fs$1().unlink)(filepath);
case 3:
_context.next = 9;
break;
case 5:
_context.prev = 5;
_context.t0 = _context['catch'](0);
if (!(_context.t0.code !== 'ENOENT')) {
_context.next = 9;
break;
}
throw _context.t0;
case 9:
case 'end':
return _context.stop();
}
}
}, _callee, this, [[0, 5]]);
}));
return function rm(_x) {
return _ref.apply(this, arguments);
};
}();
// An async exists variant
var exists = function () {
var _ref = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(file, options) {
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
return _context.abrupt('return', new _Promise(function (resolve, reject) {
fs$1().stat(file, function (err, stats) {
if (err) return err.code === 'ENOENT' ? resolve(false) : reject(err);
resolve(true);
});
}));
case 1:
case 'end':
return _context.stop();
}
}
}, _callee, this);
}));
return function exists(_x, _x2) {
return _ref.apply(this, arguments);
};
}();
// @flow

@@ -191,278 +117,2 @@ /*::

// @flow
// This is modeled after the lockfile strategy used by the git source code.
var delayedReleases = new _Map();
var lock = function () {
var _ref = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(filename /*: string */
) {
var triesLeft /*: number */ = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 3;
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
if (!delayedReleases.has(filename)) {
_context.next = 4;
break;
}
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return _context.abrupt('return');
case 4:
if (!(triesLeft === 0)) {
_context.next = 6;
break;
}
throw new Error('Unable to acquire lockfile \'' + filename + '\'. Exhausted tries.');
case 6:
_context.prev = 6;
_context.next = 9;
return pify(fs$1().mkdir)(filename + '.lock');
case 9:
_context.next = 18;
break;
case 11:
_context.prev = 11;
_context.t0 = _context['catch'](6);
if (!(_context.t0.code === 'EEXIST')) {
_context.next = 18;
break;
}
_context.next = 16;
return sleep(100);
case 16:
_context.next = 18;
return lock(filename, triesLeft - 1);
case 18:
case 'end':
return _context.stop();
}
}
}, _callee, this, [[6, 11]]);
}));
return function lock(_x2) {
return _ref.apply(this, arguments);
};
}();
var unlock = function () {
var _ref2 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee3(filename /*: string */
) {
var _this = this;
return _regeneratorRuntime.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
if (!delayedReleases.has(filename)) {
_context3.next = 2;
break;
}
throw new Error('Cannot double-release lockfile');
case 2:
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(filename, setTimeout(_asyncToGenerator(_regeneratorRuntime.mark(function _callee2() {
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
delayedReleases.delete(filename);
_context2.next = 3;
return pify(fs$1().rmdir)(filename + '.lock');
case 3:
case 'end':
return _context2.stop();
}
}
}, _callee2, _this);
}))));
case 3:
case 'end':
return _context3.stop();
}
}
}, _callee3, this);
}));
return function unlock(_x4) {
return _ref2.apply(this, arguments);
};
}();
// @flow
var mkdir = function () {
var _ref = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(dirpath /*: string */) {
var parent;
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.prev = 0;
_context.next = 3;
return pify(fs$1().mkdir)(dirpath);
case 3:
return _context.abrupt('return');
case 6:
_context.prev = 6;
_context.t0 = _context['catch'](0);
if (!(_context.t0 === null)) {
_context.next = 10;
break;
}
return _context.abrupt('return');
case 10:
if (!(_context.t0.code === 'EEXIST')) {
_context.next = 12;
break;
}
return _context.abrupt('return');
case 12:
if (!(_context.t0.code === 'ENOENT')) {
_context.next = 20;
break;
}
parent = path.dirname(dirpath);
// Check to see if we've gone too far
if (!(parent === '.' || parent === '/' || parent === dirpath)) {
_context.next = 16;
break;
}
throw _context.t0;
case 16:
_context.next = 18;
return mkdir(parent);
case 18:
_context.next = 20;
return mkdir(dirpath);
case 20:
case 'end':
return _context.stop();
}
}
}, _callee, this, [[0, 6]]);
}));
return function mkdir(_x) {
return _ref.apply(this, arguments);
};
}();
var mkdirs = function () {
var _ref2 = _asyncToGenerator(_regeneratorRuntime.mark(function _callee2(dirlist /*: string[] */) {
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
return _context2.abrupt('return', _Promise.all(dirlist.map(mkdir)));
case 1:
case 'end':
return _context2.stop();
}
}
}, _callee2, this);
}));
return function mkdirs(_x2) {
return _ref2.apply(this, arguments);
};
}();
// An async readFile variant that returns null instead of throwing errors
var read = function () {
var _ref = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(file, options) {
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
return _context.abrupt('return', new _Promise(function (resolve, reject) {
fs$1().readFile(file, options, function (err, file) {
return err ? resolve(null) : resolve(file);
});
}));
case 1:
case 'end':
return _context.stop();
}
}
}, _callee, this);
}));
return function read(_x, _x2) {
return _ref.apply(this, arguments);
};
}();
// @flow
// An async writeFile variant that automatically creates missing directories,
// and returns null instead of throwing errors.
var write = function () {
var _ref = _asyncToGenerator(_regeneratorRuntime.mark(function _callee(filepath /*: string */
, contents /*: string|Buffer */
) {
var options /*: Object */ = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.prev = 0;
_context.next = 3;
return pify(fs$1().writeFile)(filepath, contents, options);
case 3:
return _context.abrupt('return');
case 6:
_context.prev = 6;
_context.t0 = _context['catch'](0);
_context.next = 10;
return mkdir(path.dirname(filepath));
case 10:
_context.next = 12;
return pify(fs$1().writeFile)(filepath, contents, options);
case 12:
case 'end':
return _context.stop();
}
}
}, _callee, this, [[0, 6]]);
}));
return function write(_x2, _x3) {
return _ref.apply(this, arguments);
};
}();
var name = "isomorphic-git";

@@ -632,13 +282,4 @@ var version = "0.0.0-development";

exports.rm = rm;
exports.exists = exists;
exports.flatFileListToDirectoryStructure = flatFileListToDirectoryStructure;
exports.fs = fs$1;
exports.setfs = setfs;
exports.lock = lock;
exports.unlock = unlock;
exports.mkdirs = mkdirs;
exports.read = read;
exports.sleep = sleep;
exports.write = write;
exports.pkg = _package$1;

@@ -645,0 +286,0 @@ exports.oauth2 = oauth2;

import path from 'path';
import { GitConfigManager, GitIgnoreManager, GitIndexManager, GitObjectManager, GitRefManager, GitRemoteHTTP, GitShallowManager } from './managers.js';
import { FileSystem, GitCommit, GitPktLine, GitTree } from './models.js';
import { FileSystem, GitCommit, GitPktLine, GitTree, SignedGitCommit } from './models.js';
import { Buffer } from 'buffer';

@@ -11,3 +11,3 @@ import { PassThrough } from 'stream';

import marky from 'marky';
import { flatFileListToDirectoryStructure, fs, pkg, setfs } from './utils.js';
import { flatFileListToDirectoryStructure, pkg } from './utils.js';
import pad from 'pad';

@@ -31,9 +31,9 @@ import pako from 'pako';

async function add({ gitdir, workdir, fs: _fs }, { filepath }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
const type = 'blob';
const object = await fs$$1.read(path.join(workdir, filepath));
const object = await fs.read(path.join(workdir, filepath));
if (object === null) throw new Error(`Could not read file '${filepath}'`);
const oid = await GitObjectManager.write({ fs: fs$$1, gitdir, type, object });
await GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
let stats = await fs$$1._lstat(path.join(workdir, filepath));
const oid = await GitObjectManager.write({ fs, gitdir, type, object });
await GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
let stats = await fs._lstat(path.join(workdir, filepath));
index.insert({ filepath, stats, oid });

@@ -55,8 +55,8 @@ });

async function init({ gitdir, fs: _fs }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let folders = ['hooks', 'info', 'objects/info', 'objects/pack', 'refs/heads', 'refs/tags'];
folders = folders.map(dir => gitdir + '/' + dir);
await fs$$1.mkdirs(folders);
await fs$$1.write(gitdir + '/config', '[core]\n' + '\trepositoryformatversion = 0\n' + '\tfilemode = false\n' + '\tbare = false\n' + '\tlogallrefupdates = true\n' + '\tsymlinks = false\n' + '\tignorecase = true\n');
await fs$$1.write(gitdir + '/HEAD', 'ref: refs/heads/master\n');
await fs.mkdirs(folders);
await fs.write(gitdir + '/config', '[core]\n' + '\trepositoryformatversion = 0\n' + '\tfilemode = false\n' + '\tbare = false\n' + '\tlogallrefupdates = true\n' + '\tsymlinks = false\n' + '\tignorecase = true\n');
await fs.write(gitdir + '/HEAD', 'ref: refs/heads/master\n');
}

@@ -90,5 +90,5 @@

async function config({ gitdir, fs: _fs }, args) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let { path: path$$1, value } = args;
const config = await GitConfigManager.get({ fs: fs$$1, gitdir });
const config = await GitConfigManager.get({ fs, gitdir });
// This carefully distinguishes between

@@ -103,3 +103,3 @@ // 1) there is no 'value' argument (do a "get")

await config.set(path$$1, value);
await GitConfigManager.save({ fs: fs$$1, gitdir, config });
await GitConfigManager.save({ fs, gitdir, config });
}

@@ -132,3 +132,3 @@ }

*/
async function fetch({ gitdir, fs: fs$$1 }, {
async function fetch({ gitdir, fs }, {
ref = 'HEAD',

@@ -147,3 +147,3 @@ remote,

gitdir,
fs: fs$$1
fs
}, {

@@ -160,3 +160,3 @@ ref,

});
await unpack({ fs: fs$$1, gitdir }, { inputStream: response.packfile, onprogress });
await unpack({ fs, gitdir }, { inputStream: response.packfile, onprogress });
}

@@ -175,3 +175,3 @@

}) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
if (depth !== null) {

@@ -185,3 +185,3 @@ if (Number.isNaN(parseInt(depth))) {

if (url === undefined) {
url = await config({ fs: fs$$1, gitdir }, {
url = await config({ fs, gitdir }, {
path: `remote.${remote}.url`

@@ -212,3 +212,3 @@ });

await GitRefManager.updateRemoteRefs({
fs: fs$$1,
fs,
gitdir,

@@ -220,3 +220,3 @@ remote,

let want = await GitRefManager.resolve({
fs: fs$$1,
fs,
gitdir,

@@ -230,3 +230,3 @@ ref: `refs/remotes/${remote}/${ref}`

packstream.write(GitPktLine.encode(`want ${want} ${capabilities}\n`));
let oids = await GitShallowManager.read({ fs: fs$$1, gitdir });
let oids = await GitShallowManager.read({ fs, gitdir });
if (oids.size > 0 && remoteHTTP.capabilities.has('shallow')) {

@@ -249,3 +249,3 @@ for (let oid of oids) {

try {
have = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
have = await GitRefManager.resolve({ fs, gitdir, ref });
} catch (err) {}

@@ -266,3 +266,3 @@ if (have) {

oids.add(oid);
await GitShallowManager.write({ fs: fs$$1, gitdir, oids });
await GitShallowManager.write({ fs, gitdir, oids });
} else if (line.startsWith('unshallow')) {

@@ -274,3 +274,3 @@ let oid = line.slice(-41).trim();

oids.delete(oid);
await GitShallowManager.write({ fs: fs$$1, gitdir, oids });
await GitShallowManager.write({ fs, gitdir, oids });
}

@@ -311,3 +311,3 @@ next(null, data);

async function unpack({ gitdir, fs: _fs }, { inputStream, onprogress }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
return new Promise(function (resolve, reject) {

@@ -345,3 +345,3 @@ // Read header

let { object, type } = await GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -356,3 +356,3 @@ oid

let newoid = await GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -378,3 +378,3 @@ type,

let { type, object } = await GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -385,3 +385,3 @@ oid: referenceOid

let oid = await GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -396,3 +396,3 @@ type,

let oid = await GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -428,6 +428,6 @@ type,

async function writeTreeToDisk({ gitdir, workdir, index, prefix, tree, fs: fs$$1 }) {
async function writeTreeToDisk({ gitdir, workdir, index, prefix, tree, fs }) {
for (let entry of tree) {
let { type, object } = await GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -440,4 +440,4 @@ oid: entry.oid

case 'blob':
await fs$$1.write(filepath, object);
let stats = await fs$$1._lstat(filepath);
await fs.write(filepath, object);
let stats = await fs._lstat(filepath);
index.insert({

@@ -457,3 +457,3 @@ filepath: entrypath,

tree,
fs: fs$$1
fs
});

@@ -480,3 +480,3 @@ break;

async function checkout({ workdir, gitdir, fs: _fs }, { remote, ref }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
// Get tree oid

@@ -488,3 +488,3 @@ let oid;

remoteRef = await GitRefManager.resolve({
fs: fs$$1,
fs,
gitdir,

@@ -498,5 +498,5 @@ ref: `${remote}/HEAD`,

}
oid = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref: remoteRef });
oid = await GitRefManager.resolve({ fs, gitdir, ref: remoteRef });
// Make the remote ref our own!
await fs$$1.write(`${gitdir}/refs/heads/${ref}`, oid + '\n');
await fs.write(`${gitdir}/refs/heads/${ref}`, oid + '\n');
} else {

@@ -506,5 +506,5 @@ if (ref === undefined) {

}
oid = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
oid = await GitRefManager.resolve({ fs, gitdir, ref });
}
let commit = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let commit = await GitObjectManager.read({ fs, gitdir, oid });
if (commit.type !== 'commit') {

@@ -516,7 +516,7 @@ throw new Error(`Unexpected type: ${commit.type}`);

// Get top-level tree
let { type, object } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid: sha });
let { type, object } = await GitObjectManager.read({ fs, gitdir, oid: sha });
if (type !== 'tree') throw new Error(`Unexpected type: ${type}`);
let tree = GitTree.from(object);
// Acquire a lock on the index
await GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
// TODO: Big optimization possible here.

@@ -528,3 +528,3 @@ // Instead of deleting and rewriting everything, only delete files

try {
await fs$$1.rm(path.join(workdir, entry.path));
await fs.rm(path.join(workdir, entry.path));
} catch (err) {}

@@ -534,5 +534,5 @@ }

// Write files. TODO: Write them atomically
await writeTreeToDisk({ fs: fs$$1, gitdir, workdir, index, prefix: '', tree });
await writeTreeToDisk({ fs, gitdir, workdir, index, prefix: '', tree });
// Update HEAD TODO: Handle non-branch cases
fs$$1.write(`${gitdir}/HEAD`, `ref: refs/heads/${ref}`);
fs.write(`${gitdir}/HEAD`, `ref: refs/heads/${ref}`);
});

@@ -565,3 +565,3 @@ }

*/
async function clone({ workdir, gitdir, fs: fs$$1 = fs() }, {
async function clone({ workdir, gitdir, fs: _fs }, {
url,

@@ -578,9 +578,9 @@ remote,

}) {
setfs(fs$$1);
const fs = new FileSystem(_fs);
remote = remote || 'origin';
await init({ gitdir, fs: fs$$1 });
await init({ gitdir, fs });
// Add remote
await config({
gitdir,
fs: fs$$1
fs
}, {

@@ -593,3 +593,3 @@ path: `remote.${remote}.url`,

gitdir,
fs: fs$$1
fs
}, {

@@ -610,3 +610,3 @@ ref,

gitdir,
fs: fs$$1
fs
}, {

@@ -618,3 +618,3 @@ ref,

async function constructTree({ fs: fs$$1, gitdir, inode }) /*: string */{
async function constructTree({ fs, gitdir, inode }) /*: string */{
// use depth first traversal

@@ -625,3 +625,3 @@ let children = inode.children;

inode.metadata.mode = '040000';
inode.metadata.oid = await constructTree({ fs: fs$$1, gitdir, inode });
inode.metadata.oid = await constructTree({ fs, gitdir, inode });
}

@@ -637,3 +637,3 @@ }

let oid = await GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -673,10 +673,10 @@ type: 'tree',

async function commit({ gitdir, fs: _fs }, { message, author, committer, privateKeys }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
// Fill in missing arguments with default values
if (author === undefined) author = {};
if (author.name === undefined) {
author.name = await config({ fs: fs$$1, gitdir, path: 'user.name' });
author.name = await config({ fs, gitdir, path: 'user.name' });
}
if (author.email === undefined) {
author.email = await config({ fs: fs$$1, gitdir, path: 'user.email' });
author.email = await config({ fs, gitdir, path: 'user.email' });
}

@@ -687,8 +687,8 @@ committer = committer || author;

let oid;
await GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
const inode = flatFileListToDirectoryStructure(index.entries);
const treeRef = await constructTree({ fs: fs$$1, gitdir, inode });
const treeRef = await constructTree({ fs, gitdir, inode });
let parents;
try {
let parent = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref: 'HEAD' });
let parent = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' });
parents = [parent];

@@ -699,3 +699,3 @@ } catch (err) {

}
let comm = GitCommit.from({
let comm = SignedGitCommit.from({
tree: treeRef,

@@ -721,3 +721,3 @@ parent: parents,

oid = await GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -729,3 +729,3 @@ type: 'commit',

const branch = await GitRefManager.resolve({
fs: fs$$1,
fs,
gitdir,

@@ -735,3 +735,3 @@ ref: 'HEAD',

});
await fs$$1.write(path.join(gitdir, branch), oid + '\n');
await fs.write(path.join(gitdir, branch), oid + '\n');
});

@@ -752,5 +752,5 @@ return oid;

async function listFiles({ gitdir, fs: _fs }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let filenames;
await GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
filenames = index.entries.map(x => x.path);

@@ -772,6 +772,6 @@ });

async function listBranches({ gitdir, fs: _fs }) {
const fs$$1 = new FileSystem(_fs);
let files = await fs$$1.readdirDeep(`${gitdir}/refs/heads`);
const fs = new FileSystem(_fs);
let files = await fs.readdirDeep(`${gitdir}/refs/heads`);
files = files.map(x => x.replace(`${gitdir}/refs/heads/`, ''));
let text = await fs$$1.read(`${gitdir}/packed-refs`, { encoding: 'utf8' });
let text = await fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' });
if (text) {

@@ -826,3 +826,3 @@ let refs = text.trim().split('\n').filter(x => x.includes('refs/heads')).map(x => x.replace(/^.+ refs\/heads\//, '').trim()).filter(x => !files.includes(x)); // remove duplicates

}) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let sinceTimestamp = since === undefined ? undefined : Math.floor(since.valueOf() / 1000);

@@ -832,4 +832,4 @@ // TODO: In the future, we may want to have an API where we return a

let commits = [];
let start = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
let { type, object } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid: start });
let start = await GitRefManager.resolve({ fs, gitdir, ref });
let { type, object } = await GitObjectManager.read({ fs, gitdir, oid: start });
if (type !== 'commit') {

@@ -846,3 +846,3 @@ throw new Error(`The given ref ${ref} did not resolve to a commit but to a ${type}`);

try {
gitobject = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
gitobject = await GitObjectManager.read({ fs, gitdir, oid });
} catch (err) {

@@ -901,10 +901,10 @@ commits.push({

};async function push({ gitdir, fs: _fs }, { ref, remote, url, authUsername, authPassword }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
// TODO: Figure out how pushing tags works. (This only works for branches.)
remote = remote || 'origin';
if (url === undefined) {
url = await config({ fs: fs$$1, gitdir }, { path: `remote.${remote}.url` });
url = await config({ fs, gitdir }, { path: `remote.${remote}.url` });
}
let fullRef = ref.startsWith('refs/') ? ref : `refs/heads/${ref}`;
let oid = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
let oid = await GitRefManager.resolve({ fs, gitdir, ref });
let httpRemote = new GitRemoteHTTP(url);

@@ -918,7 +918,7 @@ if (authUsername !== undefined && authPassword !== undefined) {

await httpRemote.preparePush();
let commits = await listCommits({ fs: fs$$1, gitdir }, {
let commits = await listCommits({ fs, gitdir }, {
start: [oid],
finish: httpRemote.refs.values()
});
let objects = await listObjects({ fs: fs$$1, gitdir }, { oids: commits });
let objects = await listObjects({ fs, gitdir }, { oids: commits });
let packstream = new PassThrough();

@@ -928,3 +928,3 @@ let oldoid = httpRemote.refs.get(fullRef) || '0000000000000000000000000000000000000000';

packstream.write(GitPktLine.flush());
pack({ fs: fs$$1, gitdir }, {
pack({ fs, gitdir }, {
oids: [...objects],

@@ -941,7 +941,7 @@ outputStream: packstream

async function listCommits({ gitdir, fs: _fs }, { start, finish }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let startingSet = new Set();
let finishingSet = new Set();
for (let ref of start) {
startingSet.add((await GitRefManager.resolve({ fs: fs$$1, gitdir, ref })));
startingSet.add((await GitRefManager.resolve({ fs, gitdir, ref })));
}

@@ -951,3 +951,3 @@ for (let ref of finish) {

try {
let oid = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
let oid = await GitRefManager.resolve({ fs, gitdir, ref });
finishingSet.add(oid);

@@ -963,3 +963,3 @@ } catch (err) {}

visited.add(oid);
let { type, object } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let { type, object } = await GitObjectManager.read({ fs, gitdir, oid });
if (type !== 'commit') {

@@ -989,3 +989,3 @@ throw new Error(`Expected type commit but type is ${type}`);

}) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let visited /*: Set<string> */ = new Set();

@@ -998,3 +998,3 @@

visited.add(oid);
let { type, object } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let { type, object } = await GitObjectManager.read({ fs, gitdir, oid });
if (type === 'commit') {

@@ -1027,3 +1027,3 @@ let commit = GitCommit.from(object);

async function pack({ gitdir, fs: _fs }, { oids, outputStream }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let hash = crypto.createHash('sha1');

@@ -1068,3 +1068,3 @@ function write(chunk, enc) {

for (let oid of oids) {
let { type, object } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let { type, object } = await GitObjectManager.read({ fs, gitdir, oid });
writeObject({ write, object, stype: type });

@@ -1093,4 +1093,4 @@ }

async function remove({ gitdir, fs: _fs }, { filepath }) {
const fs$$1 = new FileSystem(_fs);
await GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
const fs = new FileSystem(_fs);
await GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
index.delete({ filepath });

@@ -1105,9 +1105,9 @@ });

async function verify({ gitdir, fs: _fs }, { ref, publicKeys }) {
const fs$$1 = new FileSystem(_fs);
const oid = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
const { type, object } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
const fs = new FileSystem(_fs);
const oid = await GitRefManager.resolve({ fs, gitdir, ref });
const { type, object } = await GitObjectManager.read({ fs, gitdir, oid });
if (type !== 'commit') {
throw new Error(`git.verify() was expecting a ref type 'commit' but got type '${type}'`);
}
let commit = GitCommit.from(object);
let commit = SignedGitCommit.from(object);
let author = commit.headers().author;

@@ -1140,3 +1140,3 @@ let keys = await commit.listSigningKeys();

async function getOidAtPath({ fs: fs$$1, gitdir, tree, path: path$$1 }) {
async function getOidAtPath({ fs, gitdir, tree, path: path$$1 }) {
if (typeof path$$1 === 'string') path$$1 = path$$1.split('/');

@@ -1150,3 +1150,3 @@ let dirname = path$$1.shift();

let { type, object } = await GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -1157,3 +1157,3 @@ oid: entry.oid

let tree = GitTree.from(object);
return getOidAtPath({ fs: fs$$1, gitdir, tree, path: path$$1 });
return getOidAtPath({ fs, gitdir, tree, path: path$$1 });
}

@@ -1168,9 +1168,9 @@ if (type === 'blob') {

async function getHeadTree({ fs: fs$$1, gitdir }) {
async function getHeadTree({ fs, gitdir }) {
// Get the tree from the HEAD commit.
let oid = await GitRefManager.resolve({ fs: fs$$1, gitdir, ref: 'HEAD' });
let { object: cobject } = await GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let oid = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' });
let { object: cobject } = await GitObjectManager.read({ fs, gitdir, oid });
let commit = GitCommit.from(cobject);
let { object: tobject } = await GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -1211,3 +1211,3 @@ oid: commit.parseHeaders().tree

async function status({ workdir, gitdir, fs: _fs }, { filepath }) {
const fs$$1 = new FileSystem(_fs);
const fs = new FileSystem(_fs);
let ignored = await GitIgnoreManager.isIgnored({

@@ -1217,3 +1217,3 @@ gitdir,

filepath,
fs: fs$$1
fs
});

@@ -1223,5 +1223,5 @@ if (ignored) {

}
let headTree = await getHeadTree({ fs: fs$$1, gitdir });
let headTree = await getHeadTree({ fs, gitdir });
let treeOid = await getOidAtPath({
fs: fs$$1,
fs,
gitdir,

@@ -1233,3 +1233,3 @@ tree: headTree,

// Acquire a lock on the index
await GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
for (let entry of index) {

@@ -1244,3 +1244,3 @@ if (entry.path === filepath) {

try {
stats = await fs$$1._lstat(path.join(workdir, filepath));
stats = await fs._lstat(path.join(workdir, filepath));
} catch (err) {

@@ -1260,3 +1260,3 @@ if (err.code !== 'ENOENT') {

} else {
let object = await fs$$1.read(path.join(workdir, filepath));
let object = await fs.read(path.join(workdir, filepath));
let workdirOid = await GitObjectManager.hash({

@@ -1330,8 +1330,8 @@ gitdir,

async function findRoot({ fs: _fs }, { filepath }) {
const fs$$1 = new FileSystem(_fs);
return _findRoot(fs$$1, filepath);
const fs = new FileSystem(_fs);
return _findRoot(fs, filepath);
}
async function _findRoot(fs$$1, filepath) {
if (await fs$$1.exists(path.join(filepath, '.git'))) {
async function _findRoot(fs, filepath) {
if (await fs.exists(path.join(filepath, '.git'))) {
return filepath;

@@ -1341,3 +1341,3 @@ } else {

if (parent === filepath) throw new Error('Unable to find git root');
return _findRoot(fs$$1, parent);
return _findRoot(fs, parent);
}

@@ -1407,9 +1407,8 @@ }

*/
constructor({ fs: fs$$1, dir, workdir, gitdir }) {
if (fs$$1) {
constructor({ fs, dir, workdir, gitdir }) {
if (fs) {
/**
* @type {FSModule}
*/
this.fs = fs$$1;
setfs(fs$$1);
this.fs = fs;
}

@@ -1416,0 +1415,0 @@ if (dir) {

import path from 'path';
import pify from 'pify';
import { sleep } from './utils.js';
import { Buffer } from 'buffer';

@@ -15,2 +16,3 @@ import { key, message, sign, util } from 'openpgp/dist/openpgp.min.js';

const delayedReleases = new Map();
/**

@@ -25,2 +27,3 @@ * This is just a collection of helper functions really. At least that's how it started.

this._mkdir = pify(fs.mkdir.bind(fs));
this._rmdir = pify(fs.rmdir.bind(fs));
this._unlink = pify(fs.unlink.bind(fs));

@@ -127,2 +130,34 @@ this._stat = pify(fs.stat.bind(fs));

}
async lock(filename, triesLeft = 3) {
// check to see if we still have it
if (delayedReleases.has(filename)) {
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return;
}
if (triesLeft === 0) {
throw new Error(`Unable to acquire lockfile '${filename}'. Exhausted tries.`);
}
try {
await this.mkdir(`${filename}.lock`);
} catch (err) {
if (err.code === 'EEXIST') {
await sleep(100);
await this.lock(filename, triesLeft - 1);
}
}
}
async unlock(filename, delayRelease = 50) {
if (delayedReleases.has(filename)) {
throw new Error('Cannot double-release lockfile');
}
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(filename, setTimeout(async () => {
delayedReleases.delete(filename);
await this._rmdir(`${filename}.lock`);
}, delayRelease));
}
}

@@ -305,3 +340,22 @@

}
}
function normalize$1(str) {
// remove all <CR>
str = str.replace(/\r/g, '');
// no extra newlines up front
str = str.replace(/^\n+/, '');
// and a single newline at the end
str = str.replace(/\n+$/, '') + '\n';
return str;
}
function indent$1(str) {
return str.trim().split('\n').map(x => ' ' + x).join('\n') + '\n';
}
class SignedGitCommit extends GitCommit {
static from(commit) {
return new SignedGitCommit(commit);
}
async sign(privateKeys /*: string */) {

@@ -319,4 +373,4 @@ let commit = this.withoutSignature();

// renormalize the line endings to the one true line-ending
signature = normalize(signature);
let signedCommit = headers + '\n' + 'gpgsig' + indent(signature) + '\n' + message$$1;
signature = normalize$1(signature);
let signedCommit = headers + '\n' + 'gpgsig' + indent$1(signature) + '\n' + message$$1;
// return a new commit object

@@ -905,2 +959,2 @@ return GitCommit.from(signedCommit);

export { FileSystem, GitCommit, GitConfig, GitObject, GitPktLine, GitPackfile, GitIndex, GitTree };
export { FileSystem, GitCommit, SignedGitCommit, GitConfig, GitObject, GitPktLine, GitPackfile, GitIndex, GitTree };

@@ -1,30 +0,3 @@

import pify from 'pify';
import path from 'path';
var fs;
var fs$1 = function () {
return fs;
};
function setfs(newFs) {
fs = newFs;
}
async function rm(filepath) {
try {
await pify(fs$1().unlink)(filepath);
} catch (err) {
if (err.code !== 'ENOENT') throw err;
}
}
// An async exists variant
async function exists(file, options) {
return new Promise(function (resolve, reject) {
fs$1().stat(file, (err, stats) => {
if (err) return err.code === 'ENOENT' ? resolve(false) : reject(err);
resolve(true);
});
});
}
// @flow

@@ -91,89 +64,2 @@ /*::

// @flow
// This is modeled after the lockfile strategy used by the git source code.
const delayedReleases = new Map();
async function lock(filename /*: string */
, triesLeft /*: number */ = 3) {
// check to see if we still have it
if (delayedReleases.has(filename)) {
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return;
}
if (triesLeft === 0) {
throw new Error(`Unable to acquire lockfile '${filename}'. Exhausted tries.`);
}
try {
await pify(fs$1().mkdir)(`${filename}.lock`);
} catch (err) {
if (err.code === 'EEXIST') {
await sleep(100);
await lock(filename, triesLeft - 1);
}
}
}
async function unlock(filename /*: string */
, delayRelease /*: number */ = 50) {
if (delayedReleases.has(filename)) {
throw new Error('Cannot double-release lockfile');
}
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(filename, setTimeout(async () => {
delayedReleases.delete(filename);
await pify(fs$1().rmdir)(`${filename}.lock`);
}));
}
// @flow
async function mkdir(dirpath /*: string */) {
try {
await pify(fs$1().mkdir)(dirpath);
return;
} catch (err) {
// If err is null then operation succeeded!
if (err === null) return;
// If the directory already exists, that's OK!
if (err.code === 'EEXIST') return;
// If we got a "no such file or directory error" backup and try again.
if (err.code === 'ENOENT') {
let parent = path.dirname(dirpath);
// Check to see if we've gone too far
if (parent === '.' || parent === '/' || parent === dirpath) throw err;
// Infinite recursion, what could go wrong?
await mkdir(parent);
await mkdir(dirpath);
}
}
}
async function mkdirs(dirlist /*: string[] */) {
return Promise.all(dirlist.map(mkdir));
}
// An async readFile variant that returns null instead of throwing errors
async function read(file, options) {
return new Promise(function (resolve, reject) {
fs$1().readFile(file, options, (err, file) => err ? resolve(null) : resolve(file));
});
}
// @flow
// An async writeFile variant that automatically creates missing directories,
// and returns null instead of throwing errors.
async function write(filepath /*: string */
, contents /*: string|Buffer */
, options /*: Object */ = {}) {
try {
await pify(fs$1().writeFile)(filepath, contents, options);
return;
} catch (err) {
// Hmm. Let's try mkdirp and try again.
await mkdir(path.dirname(filepath));
await pify(fs$1().writeFile)(filepath, contents, options);
}
}
var name = "isomorphic-git";

@@ -343,2 +229,2 @@ var version = "0.0.0-development";

export { rm, exists, flatFileListToDirectoryStructure, fs$1 as fs, setfs, lock, unlock, mkdirs, read, sleep, write, _package$1 as pkg, oauth2, auth };
export { flatFileListToDirectoryStructure, sleep, _package$1 as pkg, oauth2, auth };

@@ -36,9 +36,9 @@ 'use strict';

async function add({ gitdir, workdir, fs: _fs }, { filepath }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
const type = 'blob';
const object = await fs$$1.read(path.join(workdir, filepath));
const object = await fs.read(path.join(workdir, filepath));
if (object === null) throw new Error(`Could not read file '${filepath}'`);
const oid = await managers_js.GitObjectManager.write({ fs: fs$$1, gitdir, type, object });
await managers_js.GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
let stats = await fs$$1._lstat(path.join(workdir, filepath));
const oid = await managers_js.GitObjectManager.write({ fs, gitdir, type, object });
await managers_js.GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
let stats = await fs._lstat(path.join(workdir, filepath));
index.insert({ filepath, stats, oid });

@@ -60,8 +60,8 @@ });

async function init({ gitdir, fs: _fs }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let folders = ['hooks', 'info', 'objects/info', 'objects/pack', 'refs/heads', 'refs/tags'];
folders = folders.map(dir => gitdir + '/' + dir);
await fs$$1.mkdirs(folders);
await fs$$1.write(gitdir + '/config', '[core]\n' + '\trepositoryformatversion = 0\n' + '\tfilemode = false\n' + '\tbare = false\n' + '\tlogallrefupdates = true\n' + '\tsymlinks = false\n' + '\tignorecase = true\n');
await fs$$1.write(gitdir + '/HEAD', 'ref: refs/heads/master\n');
await fs.mkdirs(folders);
await fs.write(gitdir + '/config', '[core]\n' + '\trepositoryformatversion = 0\n' + '\tfilemode = false\n' + '\tbare = false\n' + '\tlogallrefupdates = true\n' + '\tsymlinks = false\n' + '\tignorecase = true\n');
await fs.write(gitdir + '/HEAD', 'ref: refs/heads/master\n');
}

@@ -95,5 +95,5 @@

async function config({ gitdir, fs: _fs }, args) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let { path: path$$1, value } = args;
const config = await managers_js.GitConfigManager.get({ fs: fs$$1, gitdir });
const config = await managers_js.GitConfigManager.get({ fs, gitdir });
// This carefully distinguishes between

@@ -108,3 +108,3 @@ // 1) there is no 'value' argument (do a "get")

await config.set(path$$1, value);
await managers_js.GitConfigManager.save({ fs: fs$$1, gitdir, config });
await managers_js.GitConfigManager.save({ fs, gitdir, config });
}

@@ -137,3 +137,3 @@ }

*/
async function fetch({ gitdir, fs: fs$$1 }, {
async function fetch({ gitdir, fs }, {
ref = 'HEAD',

@@ -152,3 +152,3 @@ remote,

gitdir,
fs: fs$$1
fs
}, {

@@ -165,3 +165,3 @@ ref,

});
await unpack({ fs: fs$$1, gitdir }, { inputStream: response.packfile, onprogress });
await unpack({ fs, gitdir }, { inputStream: response.packfile, onprogress });
}

@@ -180,3 +180,3 @@

}) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
if (depth !== null) {

@@ -190,3 +190,3 @@ if (Number.isNaN(parseInt(depth))) {

if (url === undefined) {
url = await config({ fs: fs$$1, gitdir }, {
url = await config({ fs, gitdir }, {
path: `remote.${remote}.url`

@@ -217,3 +217,3 @@ });

await managers_js.GitRefManager.updateRemoteRefs({
fs: fs$$1,
fs,
gitdir,

@@ -225,3 +225,3 @@ remote,

let want = await managers_js.GitRefManager.resolve({
fs: fs$$1,
fs,
gitdir,

@@ -235,3 +235,3 @@ ref: `refs/remotes/${remote}/${ref}`

packstream.write(models_js.GitPktLine.encode(`want ${want} ${capabilities}\n`));
let oids = await managers_js.GitShallowManager.read({ fs: fs$$1, gitdir });
let oids = await managers_js.GitShallowManager.read({ fs, gitdir });
if (oids.size > 0 && remoteHTTP.capabilities.has('shallow')) {

@@ -254,3 +254,3 @@ for (let oid of oids) {

try {
have = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
have = await managers_js.GitRefManager.resolve({ fs, gitdir, ref });
} catch (err) {}

@@ -271,3 +271,3 @@ if (have) {

oids.add(oid);
await managers_js.GitShallowManager.write({ fs: fs$$1, gitdir, oids });
await managers_js.GitShallowManager.write({ fs, gitdir, oids });
} else if (line.startsWith('unshallow')) {

@@ -279,3 +279,3 @@ let oid = line.slice(-41).trim();

oids.delete(oid);
await managers_js.GitShallowManager.write({ fs: fs$$1, gitdir, oids });
await managers_js.GitShallowManager.write({ fs, gitdir, oids });
}

@@ -316,3 +316,3 @@ next(null, data);

async function unpack({ gitdir, fs: _fs }, { inputStream, onprogress }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
return new Promise(function (resolve, reject) {

@@ -350,3 +350,3 @@ // Read header

let { object, type } = await managers_js.GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -361,3 +361,3 @@ oid

let newoid = await managers_js.GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -383,3 +383,3 @@ type,

let { type, object } = await managers_js.GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -390,3 +390,3 @@ oid: referenceOid

let oid = await managers_js.GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -401,3 +401,3 @@ type,

let oid = await managers_js.GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -433,6 +433,6 @@ type,

async function writeTreeToDisk({ gitdir, workdir, index, prefix, tree, fs: fs$$1 }) {
async function writeTreeToDisk({ gitdir, workdir, index, prefix, tree, fs }) {
for (let entry of tree) {
let { type, object } = await managers_js.GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -445,4 +445,4 @@ oid: entry.oid

case 'blob':
await fs$$1.write(filepath, object);
let stats = await fs$$1._lstat(filepath);
await fs.write(filepath, object);
let stats = await fs._lstat(filepath);
index.insert({

@@ -462,3 +462,3 @@ filepath: entrypath,

tree,
fs: fs$$1
fs
});

@@ -485,3 +485,3 @@ break;

async function checkout({ workdir, gitdir, fs: _fs }, { remote, ref }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
// Get tree oid

@@ -493,3 +493,3 @@ let oid;

remoteRef = await managers_js.GitRefManager.resolve({
fs: fs$$1,
fs,
gitdir,

@@ -503,5 +503,5 @@ ref: `${remote}/HEAD`,

}
oid = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref: remoteRef });
oid = await managers_js.GitRefManager.resolve({ fs, gitdir, ref: remoteRef });
// Make the remote ref our own!
await fs$$1.write(`${gitdir}/refs/heads/${ref}`, oid + '\n');
await fs.write(`${gitdir}/refs/heads/${ref}`, oid + '\n');
} else {

@@ -511,5 +511,5 @@ if (ref === undefined) {

}
oid = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
oid = await managers_js.GitRefManager.resolve({ fs, gitdir, ref });
}
let commit = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let commit = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
if (commit.type !== 'commit') {

@@ -521,7 +521,7 @@ throw new Error(`Unexpected type: ${commit.type}`);

// Get top-level tree
let { type, object } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid: sha });
let { type, object } = await managers_js.GitObjectManager.read({ fs, gitdir, oid: sha });
if (type !== 'tree') throw new Error(`Unexpected type: ${type}`);
let tree = models_js.GitTree.from(object);
// Acquire a lock on the index
await managers_js.GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await managers_js.GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
// TODO: Big optimization possible here.

@@ -533,3 +533,3 @@ // Instead of deleting and rewriting everything, only delete files

try {
await fs$$1.rm(path.join(workdir, entry.path));
await fs.rm(path.join(workdir, entry.path));
} catch (err) {}

@@ -539,5 +539,5 @@ }

// Write files. TODO: Write them atomically
await writeTreeToDisk({ fs: fs$$1, gitdir, workdir, index, prefix: '', tree });
await writeTreeToDisk({ fs, gitdir, workdir, index, prefix: '', tree });
// Update HEAD TODO: Handle non-branch cases
fs$$1.write(`${gitdir}/HEAD`, `ref: refs/heads/${ref}`);
fs.write(`${gitdir}/HEAD`, `ref: refs/heads/${ref}`);
});

@@ -570,3 +570,3 @@ }

*/
async function clone({ workdir, gitdir, fs: fs$$1 = utils_js.fs() }, {
async function clone({ workdir, gitdir, fs: _fs }, {
url,

@@ -583,9 +583,9 @@ remote,

}) {
utils_js.setfs(fs$$1);
const fs = new models_js.FileSystem(_fs);
remote = remote || 'origin';
await init({ gitdir, fs: fs$$1 });
await init({ gitdir, fs });
// Add remote
await config({
gitdir,
fs: fs$$1
fs
}, {

@@ -598,3 +598,3 @@ path: `remote.${remote}.url`,

gitdir,
fs: fs$$1
fs
}, {

@@ -615,3 +615,3 @@ ref,

gitdir,
fs: fs$$1
fs
}, {

@@ -623,3 +623,3 @@ ref,

async function constructTree({ fs: fs$$1, gitdir, inode }) /*: string */{
async function constructTree({ fs, gitdir, inode }) /*: string */{
// use depth first traversal

@@ -630,3 +630,3 @@ let children = inode.children;

inode.metadata.mode = '040000';
inode.metadata.oid = await constructTree({ fs: fs$$1, gitdir, inode });
inode.metadata.oid = await constructTree({ fs, gitdir, inode });
}

@@ -642,3 +642,3 @@ }

let oid = await managers_js.GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -678,10 +678,10 @@ type: 'tree',

async function commit({ gitdir, fs: _fs }, { message, author, committer, privateKeys }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
// Fill in missing arguments with default values
if (author === undefined) author = {};
if (author.name === undefined) {
author.name = await config({ fs: fs$$1, gitdir, path: 'user.name' });
author.name = await config({ fs, gitdir, path: 'user.name' });
}
if (author.email === undefined) {
author.email = await config({ fs: fs$$1, gitdir, path: 'user.email' });
author.email = await config({ fs, gitdir, path: 'user.email' });
}

@@ -692,8 +692,8 @@ committer = committer || author;

let oid;
await managers_js.GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await managers_js.GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
const inode = utils_js.flatFileListToDirectoryStructure(index.entries);
const treeRef = await constructTree({ fs: fs$$1, gitdir, inode });
const treeRef = await constructTree({ fs, gitdir, inode });
let parents;
try {
let parent = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref: 'HEAD' });
let parent = await managers_js.GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' });
parents = [parent];

@@ -704,3 +704,3 @@ } catch (err) {

}
let comm = models_js.GitCommit.from({
let comm = models_js.SignedGitCommit.from({
tree: treeRef,

@@ -726,3 +726,3 @@ parent: parents,

oid = await managers_js.GitObjectManager.write({
fs: fs$$1,
fs,
gitdir,

@@ -734,3 +734,3 @@ type: 'commit',

const branch = await managers_js.GitRefManager.resolve({
fs: fs$$1,
fs,
gitdir,

@@ -740,3 +740,3 @@ ref: 'HEAD',

});
await fs$$1.write(path.join(gitdir, branch), oid + '\n');
await fs.write(path.join(gitdir, branch), oid + '\n');
});

@@ -757,5 +757,5 @@ return oid;

async function listFiles({ gitdir, fs: _fs }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let filenames;
await managers_js.GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await managers_js.GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
filenames = index.entries.map(x => x.path);

@@ -777,6 +777,6 @@ });

async function listBranches({ gitdir, fs: _fs }) {
const fs$$1 = new models_js.FileSystem(_fs);
let files = await fs$$1.readdirDeep(`${gitdir}/refs/heads`);
const fs = new models_js.FileSystem(_fs);
let files = await fs.readdirDeep(`${gitdir}/refs/heads`);
files = files.map(x => x.replace(`${gitdir}/refs/heads/`, ''));
let text = await fs$$1.read(`${gitdir}/packed-refs`, { encoding: 'utf8' });
let text = await fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' });
if (text) {

@@ -831,3 +831,3 @@ let refs = text.trim().split('\n').filter(x => x.includes('refs/heads')).map(x => x.replace(/^.+ refs\/heads\//, '').trim()).filter(x => !files.includes(x)); // remove duplicates

}) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let sinceTimestamp = since === undefined ? undefined : Math.floor(since.valueOf() / 1000);

@@ -837,4 +837,4 @@ // TODO: In the future, we may want to have an API where we return a

let commits = [];
let start = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
let { type, object } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid: start });
let start = await managers_js.GitRefManager.resolve({ fs, gitdir, ref });
let { type, object } = await managers_js.GitObjectManager.read({ fs, gitdir, oid: start });
if (type !== 'commit') {

@@ -851,3 +851,3 @@ throw new Error(`The given ref ${ref} did not resolve to a commit but to a ${type}`);

try {
gitobject = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
gitobject = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
} catch (err) {

@@ -906,10 +906,10 @@ commits.push({

};async function push({ gitdir, fs: _fs }, { ref, remote, url, authUsername, authPassword }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
// TODO: Figure out how pushing tags works. (This only works for branches.)
remote = remote || 'origin';
if (url === undefined) {
url = await config({ fs: fs$$1, gitdir }, { path: `remote.${remote}.url` });
url = await config({ fs, gitdir }, { path: `remote.${remote}.url` });
}
let fullRef = ref.startsWith('refs/') ? ref : `refs/heads/${ref}`;
let oid = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
let oid = await managers_js.GitRefManager.resolve({ fs, gitdir, ref });
let httpRemote = new managers_js.GitRemoteHTTP(url);

@@ -923,7 +923,7 @@ if (authUsername !== undefined && authPassword !== undefined) {

await httpRemote.preparePush();
let commits = await listCommits({ fs: fs$$1, gitdir }, {
let commits = await listCommits({ fs, gitdir }, {
start: [oid],
finish: httpRemote.refs.values()
});
let objects = await listObjects({ fs: fs$$1, gitdir }, { oids: commits });
let objects = await listObjects({ fs, gitdir }, { oids: commits });
let packstream = new stream.PassThrough();

@@ -933,3 +933,3 @@ let oldoid = httpRemote.refs.get(fullRef) || '0000000000000000000000000000000000000000';

packstream.write(models_js.GitPktLine.flush());
pack({ fs: fs$$1, gitdir }, {
pack({ fs, gitdir }, {
oids: [...objects],

@@ -946,7 +946,7 @@ outputStream: packstream

async function listCommits({ gitdir, fs: _fs }, { start, finish }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let startingSet = new Set();
let finishingSet = new Set();
for (let ref of start) {
startingSet.add((await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref })));
startingSet.add((await managers_js.GitRefManager.resolve({ fs, gitdir, ref })));
}

@@ -956,3 +956,3 @@ for (let ref of finish) {

try {
let oid = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
let oid = await managers_js.GitRefManager.resolve({ fs, gitdir, ref });
finishingSet.add(oid);

@@ -968,3 +968,3 @@ } catch (err) {}

visited.add(oid);
let { type, object } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let { type, object } = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
if (type !== 'commit') {

@@ -994,3 +994,3 @@ throw new Error(`Expected type commit but type is ${type}`);

}) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let visited /*: Set<string> */ = new Set();

@@ -1003,3 +1003,3 @@

visited.add(oid);
let { type, object } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let { type, object } = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
if (type === 'commit') {

@@ -1032,3 +1032,3 @@ let commit = models_js.GitCommit.from(object);

async function pack({ gitdir, fs: _fs }, { oids, outputStream }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let hash = crypto.createHash('sha1');

@@ -1073,3 +1073,3 @@ function write(chunk, enc) {

for (let oid of oids) {
let { type, object } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let { type, object } = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
writeObject({ write, object, stype: type });

@@ -1098,4 +1098,4 @@ }

async function remove({ gitdir, fs: _fs }, { filepath }) {
const fs$$1 = new models_js.FileSystem(_fs);
await managers_js.GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
const fs = new models_js.FileSystem(_fs);
await managers_js.GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
index.delete({ filepath });

@@ -1110,9 +1110,9 @@ });

async function verify({ gitdir, fs: _fs }, { ref, publicKeys }) {
const fs$$1 = new models_js.FileSystem(_fs);
const oid = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref });
const { type, object } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
const fs = new models_js.FileSystem(_fs);
const oid = await managers_js.GitRefManager.resolve({ fs, gitdir, ref });
const { type, object } = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
if (type !== 'commit') {
throw new Error(`git.verify() was expecting a ref type 'commit' but got type '${type}'`);
}
let commit = models_js.GitCommit.from(object);
let commit = models_js.SignedGitCommit.from(object);
let author = commit.headers().author;

@@ -1145,3 +1145,3 @@ let keys = await commit.listSigningKeys();

async function getOidAtPath({ fs: fs$$1, gitdir, tree, path: path$$1 }) {
async function getOidAtPath({ fs, gitdir, tree, path: path$$1 }) {
if (typeof path$$1 === 'string') path$$1 = path$$1.split('/');

@@ -1155,3 +1155,3 @@ let dirname = path$$1.shift();

let { type, object } = await managers_js.GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -1162,3 +1162,3 @@ oid: entry.oid

let tree = models_js.GitTree.from(object);
return getOidAtPath({ fs: fs$$1, gitdir, tree, path: path$$1 });
return getOidAtPath({ fs, gitdir, tree, path: path$$1 });
}

@@ -1173,9 +1173,9 @@ if (type === 'blob') {

async function getHeadTree({ fs: fs$$1, gitdir }) {
async function getHeadTree({ fs, gitdir }) {
// Get the tree from the HEAD commit.
let oid = await managers_js.GitRefManager.resolve({ fs: fs$$1, gitdir, ref: 'HEAD' });
let { object: cobject } = await managers_js.GitObjectManager.read({ fs: fs$$1, gitdir, oid });
let oid = await managers_js.GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' });
let { object: cobject } = await managers_js.GitObjectManager.read({ fs, gitdir, oid });
let commit = models_js.GitCommit.from(cobject);
let { object: tobject } = await managers_js.GitObjectManager.read({
fs: fs$$1,
fs,
gitdir,

@@ -1216,3 +1216,3 @@ oid: commit.parseHeaders().tree

async function status({ workdir, gitdir, fs: _fs }, { filepath }) {
const fs$$1 = new models_js.FileSystem(_fs);
const fs = new models_js.FileSystem(_fs);
let ignored = await managers_js.GitIgnoreManager.isIgnored({

@@ -1222,3 +1222,3 @@ gitdir,

filepath,
fs: fs$$1
fs
});

@@ -1228,5 +1228,5 @@ if (ignored) {

}
let headTree = await getHeadTree({ fs: fs$$1, gitdir });
let headTree = await getHeadTree({ fs, gitdir });
let treeOid = await getOidAtPath({
fs: fs$$1,
fs,
gitdir,

@@ -1238,3 +1238,3 @@ tree: headTree,

// Acquire a lock on the index
await managers_js.GitIndexManager.acquire({ fs: fs$$1, filepath: `${gitdir}/index` }, async function (index) {
await managers_js.GitIndexManager.acquire({ fs, filepath: `${gitdir}/index` }, async function (index) {
for (let entry of index) {

@@ -1249,3 +1249,3 @@ if (entry.path === filepath) {

try {
stats = await fs$$1._lstat(path.join(workdir, filepath));
stats = await fs._lstat(path.join(workdir, filepath));
} catch (err) {

@@ -1265,3 +1265,3 @@ if (err.code !== 'ENOENT') {

} else {
let object = await fs$$1.read(path.join(workdir, filepath));
let object = await fs.read(path.join(workdir, filepath));
let workdirOid = await managers_js.GitObjectManager.hash({

@@ -1335,8 +1335,8 @@ gitdir,

async function findRoot({ fs: _fs }, { filepath }) {
const fs$$1 = new models_js.FileSystem(_fs);
return _findRoot(fs$$1, filepath);
const fs = new models_js.FileSystem(_fs);
return _findRoot(fs, filepath);
}
async function _findRoot(fs$$1, filepath) {
if (await fs$$1.exists(path.join(filepath, '.git'))) {
async function _findRoot(fs, filepath) {
if (await fs.exists(path.join(filepath, '.git'))) {
return filepath;

@@ -1346,3 +1346,3 @@ } else {

if (parent === filepath) throw new Error('Unable to find git root');
return _findRoot(fs$$1, parent);
return _findRoot(fs, parent);
}

@@ -1412,9 +1412,8 @@ }

*/
constructor({ fs: fs$$1, dir, workdir, gitdir }) {
if (fs$$1) {
constructor({ fs, dir, workdir, gitdir }) {
if (fs) {
/**
* @type {FSModule}
*/
this.fs = fs$$1;
utils_js.setfs(fs$$1);
this.fs = fs;
}

@@ -1421,0 +1420,0 @@ if (dir) {

@@ -9,2 +9,3 @@ 'use strict';

var pify = _interopDefault(require('pify'));
var utils_js = require('./utils.js');
var buffer = require('buffer');

@@ -22,2 +23,3 @@ var openpgp = require('openpgp/dist/openpgp.min.js');

const delayedReleases = new Map();
/**

@@ -32,2 +34,3 @@ * This is just a collection of helper functions really. At least that's how it started.

this._mkdir = pify(fs.mkdir.bind(fs));
this._rmdir = pify(fs.rmdir.bind(fs));
this._unlink = pify(fs.unlink.bind(fs));

@@ -134,2 +137,34 @@ this._stat = pify(fs.stat.bind(fs));

}
async lock(filename, triesLeft = 3) {
// check to see if we still have it
if (delayedReleases.has(filename)) {
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return;
}
if (triesLeft === 0) {
throw new Error(`Unable to acquire lockfile '${filename}'. Exhausted tries.`);
}
try {
await this.mkdir(`${filename}.lock`);
} catch (err) {
if (err.code === 'EEXIST') {
await utils_js.sleep(100);
await this.lock(filename, triesLeft - 1);
}
}
}
async unlock(filename, delayRelease = 50) {
if (delayedReleases.has(filename)) {
throw new Error('Cannot double-release lockfile');
}
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(filename, setTimeout(async () => {
delayedReleases.delete(filename);
await this._rmdir(`${filename}.lock`);
}, delayRelease));
}
}

@@ -312,3 +347,22 @@

}
}
function normalize$1(str) {
// remove all <CR>
str = str.replace(/\r/g, '');
// no extra newlines up front
str = str.replace(/^\n+/, '');
// and a single newline at the end
str = str.replace(/\n+$/, '') + '\n';
return str;
}
function indent$1(str) {
return str.trim().split('\n').map(x => ' ' + x).join('\n') + '\n';
}
class SignedGitCommit extends GitCommit {
static from(commit) {
return new SignedGitCommit(commit);
}
async sign(privateKeys /*: string */) {

@@ -326,4 +380,4 @@ let commit = this.withoutSignature();

// renormalize the line endings to the one true line-ending
signature = normalize(signature);
let signedCommit = headers + '\n' + 'gpgsig' + indent(signature) + '\n' + message$$1;
signature = normalize$1(signature);
let signedCommit = headers + '\n' + 'gpgsig' + indent$1(signature) + '\n' + message$$1;
// return a new commit object

@@ -914,2 +968,3 @@ return GitCommit.from(signedCommit);

exports.GitCommit = GitCommit;
exports.SignedGitCommit = SignedGitCommit;
exports.GitConfig = GitConfig;

@@ -916,0 +971,0 @@ exports.GitObject = GitObject;

@@ -7,31 +7,4 @@ 'use strict';

var pify = _interopDefault(require('pify'));
var path = _interopDefault(require('path'));
var fs;
var fs$1 = function () {
return fs;
};
function setfs(newFs) {
fs = newFs;
}
async function rm(filepath) {
try {
await pify(fs$1().unlink)(filepath);
} catch (err) {
if (err.code !== 'ENOENT') throw err;
}
}
// An async exists variant
async function exists(file, options) {
return new Promise(function (resolve, reject) {
fs$1().stat(file, (err, stats) => {
if (err) return err.code === 'ENOENT' ? resolve(false) : reject(err);
resolve(true);
});
});
}
// @flow

@@ -98,89 +71,2 @@ /*::

// @flow
// This is modeled after the lockfile strategy used by the git source code.
const delayedReleases = new Map();
async function lock(filename /*: string */
, triesLeft /*: number */ = 3) {
// check to see if we still have it
if (delayedReleases.has(filename)) {
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return;
}
if (triesLeft === 0) {
throw new Error(`Unable to acquire lockfile '${filename}'. Exhausted tries.`);
}
try {
await pify(fs$1().mkdir)(`${filename}.lock`);
} catch (err) {
if (err.code === 'EEXIST') {
await sleep(100);
await lock(filename, triesLeft - 1);
}
}
}
async function unlock(filename /*: string */
, delayRelease /*: number */ = 50) {
if (delayedReleases.has(filename)) {
throw new Error('Cannot double-release lockfile');
}
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(filename, setTimeout(async () => {
delayedReleases.delete(filename);
await pify(fs$1().rmdir)(`${filename}.lock`);
}));
}
// @flow
async function mkdir(dirpath /*: string */) {
try {
await pify(fs$1().mkdir)(dirpath);
return;
} catch (err) {
// If err is null then operation succeeded!
if (err === null) return;
// If the directory already exists, that's OK!
if (err.code === 'EEXIST') return;
// If we got a "no such file or directory error" backup and try again.
if (err.code === 'ENOENT') {
let parent = path.dirname(dirpath);
// Check to see if we've gone too far
if (parent === '.' || parent === '/' || parent === dirpath) throw err;
// Infinite recursion, what could go wrong?
await mkdir(parent);
await mkdir(dirpath);
}
}
}
async function mkdirs(dirlist /*: string[] */) {
return Promise.all(dirlist.map(mkdir));
}
// An async readFile variant that returns null instead of throwing errors
async function read(file, options) {
return new Promise(function (resolve, reject) {
fs$1().readFile(file, options, (err, file) => err ? resolve(null) : resolve(file));
});
}
// @flow
// An async writeFile variant that automatically creates missing directories,
// and returns null instead of throwing errors.
async function write(filepath /*: string */
, contents /*: string|Buffer */
, options /*: Object */ = {}) {
try {
await pify(fs$1().writeFile)(filepath, contents, options);
return;
} catch (err) {
// Hmm. Let's try mkdirp and try again.
await mkdir(path.dirname(filepath));
await pify(fs$1().writeFile)(filepath, contents, options);
}
}
var name = "isomorphic-git";

@@ -350,15 +236,6 @@ var version = "0.0.0-development";

exports.rm = rm;
exports.exists = exists;
exports.flatFileListToDirectoryStructure = flatFileListToDirectoryStructure;
exports.fs = fs$1;
exports.setfs = setfs;
exports.lock = lock;
exports.unlock = unlock;
exports.mkdirs = mkdirs;
exports.read = read;
exports.sleep = sleep;
exports.write = write;
exports.pkg = _package$1;
exports.oauth2 = oauth2;
exports.auth = auth;

@@ -1,1 +0,1 @@

{"name":"isomorphic-git","version":"0.0.28","description":"A pure JavaScript implementation of git for node and browsers!","typings":"./src/index.d.ts","main":"dist/for-node/","browser":"dist/for-browserify/","module":"dist/for-future/","unpkg":"dist/bundle.umd.min.js","bin":{"isogit":"./cli.js"},"engines":{"node":">=7.6.0"},"scripts":{"start":"nps","test":"nps test","precommit":"nps format toc","semantic-release":"semantic-release pre && npm publish && semantic-release post"},"repository":{"type":"git","url":"https://github.com/wmhilton/isomorphic-git.git"},"keywords":["git"],"author":"William Hilton <wmhilton@gmail.com>","license":"Unlicense","bugs":{"url":"https://github.com/wmhilton/isomorphic-git/issues"},"homepage":"https://github.com/wmhilton/isomorphic-git#readme","files":["dist","cli.js"],"dependencies":{"async-lock":"^1.0.0","await-stream-ready":"^1.0.1","babel-runtime":"^6.26.0","buffer":"^5.0.7","buffer-peek-stream":"^1.0.1","buffercursor":"0.0.12","gartal":"^1.1.2","git-apply-delta":"0.0.7","git-list-pack":"0.0.10","ignore":"^3.3.6","ini":"^1.3.4","marky":"^1.2.0","minimisted":"^2.0.0","openpgp":"^2.5.10","pad":"^2.0.1","pako":"^1.0.5","pify":"^3.0.0","shasum":"^1.0.2","simple-concat":"^1.0.0","simple-get":"^2.7.0","through2":"^2.0.3"},"devDependencies":{"babel-plugin-external-helpers":"^6.22.0","babel-plugin-transform-es2015-modules-commonjs":"^6.24.1","babel-plugin-transform-object-rest-spread":"^6.23.0","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.6.0","babel-preset-flow":"^6.23.0","ban-sensitive-files":"^1.9.0","browserfs":"^1.4.3","browserify":"^14.4.0","browserify-shim":"^3.8.14","codecov":"^3.0.0","doctoc":"^1.3.0","esdoc":"^1.0.4","esdoc-ecmascript-proposal-plugin":"^1.0.0","esdoc-importpath-plugin":"^1.0.1","esdoc-standard-plugin":"^1.0.0","husky":"^0.14.3","jest":"^21.2.1","jest-fixtures":"^0.6.0","jsonfile":"^4.0.0","karma":"^1.7.1","karma-browserify":"^5.1.1","karma-chrome-launcher":"^2.2.0","karma-firefox-launcher":"^1.0.1","karma-sauce-launcher":"^1.2.0","karma-tap":"^3.1.1","lodash":"^4.17.4","nock":"^9.0.17","npm-run-all":"^4.1.1","nps":"^5.7.1","nps-utils":"^1.4.0","parse-header-stream":"^1.1.1","prettier-standard":"^7.0.3","rollup":"^0.51.6","rollup-plugin-babel":"^3.0.2","rollup-plugin-json":"^2.3.0","standard":"^10.0.3","stream-equal":"^1.0.1","tape":"^4.8.0","uglify-es":"^3.1.2","watch":"^1.0.2","watchify":"^3.9.0","semantic-release":"^8.2.0"},"ava":{"source":["dist/for-node/*"]},"browserify":{"transform":["browserify-shim"]},"browserify-shim":{"fs":"global:fs"},"testling":{"files":"testling/basic-test.js","browsers":["chrome/latest","firefox/latest","ie/latest"]},"jest":{"testPathIgnorePatterns":["__helpers__"],"testEnvironment":"node"}}
{"name":"isomorphic-git","version":"0.0.29","description":"A pure JavaScript implementation of git for node and browsers!","typings":"./src/index.d.ts","main":"dist/for-node/","browser":"dist/for-browserify/","module":"dist/for-future/","unpkg":"dist/bundle.umd.min.js","bin":{"isogit":"./cli.js"},"engines":{"node":">=7.6.0"},"scripts":{"start":"nps","test":"nps test","precommit":"nps format toc","semantic-release":"semantic-release pre && npm publish && semantic-release post"},"repository":{"type":"git","url":"https://github.com/wmhilton/isomorphic-git.git"},"keywords":["git"],"author":"William Hilton <wmhilton@gmail.com>","license":"Unlicense","bugs":{"url":"https://github.com/wmhilton/isomorphic-git/issues"},"homepage":"https://github.com/wmhilton/isomorphic-git#readme","files":["dist","cli.js"],"dependencies":{"async-lock":"^1.0.0","await-stream-ready":"^1.0.1","babel-runtime":"^6.26.0","buffer":"^5.0.7","buffer-peek-stream":"^1.0.1","buffercursor":"0.0.12","gartal":"^1.1.2","git-apply-delta":"0.0.7","git-list-pack":"0.0.10","ignore":"^3.3.6","ini":"^1.3.4","marky":"^1.2.0","minimisted":"^2.0.0","openpgp":"^2.5.10","pad":"^2.0.1","pako":"^1.0.5","pify":"^3.0.0","shasum":"^1.0.2","simple-concat":"^1.0.0","simple-get":"^2.7.0","through2":"^2.0.3"},"devDependencies":{"babel-plugin-external-helpers":"^6.22.0","babel-plugin-transform-es2015-modules-commonjs":"^6.24.1","babel-plugin-transform-object-rest-spread":"^6.23.0","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.6.0","babel-preset-flow":"^6.23.0","ban-sensitive-files":"^1.9.0","browserfs":"^1.4.3","browserify":"^14.4.0","browserify-shim":"^3.8.14","codecov":"^3.0.0","doctoc":"^1.3.0","esdoc":"^1.0.4","esdoc-ecmascript-proposal-plugin":"^1.0.0","esdoc-importpath-plugin":"^1.0.1","esdoc-standard-plugin":"^1.0.0","husky":"^0.14.3","jest":"^21.2.1","jest-fixtures":"^0.6.0","jsonfile":"^4.0.0","karma":"^1.7.1","karma-browserify":"^5.1.1","karma-chrome-launcher":"^2.2.0","karma-firefox-launcher":"^1.0.1","karma-sauce-launcher":"^1.2.0","karma-tap":"^3.1.1","lodash":"^4.17.4","nock":"^9.0.17","npm-run-all":"^4.1.1","nps":"^5.7.1","nps-utils":"^1.4.0","parse-header-stream":"^1.1.1","prettier-standard":"^7.0.3","rollup":"^0.51.6","rollup-plugin-babel":"^3.0.2","rollup-plugin-json":"^2.3.0","standard":"^10.0.3","stream-equal":"^1.0.1","tape":"^4.8.0","uglify-es":"^3.1.2","watch":"^1.0.2","watchify":"^3.9.0","semantic-release":"^8.2.0"},"ava":{"source":["dist/for-node/*"]},"browserify":{"transform":["browserify-shim"]},"browserify-shim":{"fs":"global:fs"},"testling":{"files":"testling/basic-test.js","browsers":["chrome/latest","firefox/latest","ie/latest"]},"jest":{"testPathIgnorePatterns":["__helpers__"],"testEnvironment":"node"}}

@@ -44,3 +44,3 @@ # isomorphic-git ![node version](https://img.shields.io/node/v/isomorphic-git.svg) [![Build Status](https://travis-ci.org/wmhilton/isomorphic-git.svg?branch=master)](https://travis-ci.org/wmhilton/isomorphic-git) [![codecov](https://codecov.io/gh/wmhilton/isomorphic-git/branch/master/graph/badge.svg)](https://codecov.io/gh/wmhilton/isomorphic-git) [![dependencies](https://david-dm.org/wmhilton/isomorphic-git/status.svg)](https://david-dm.org/wmhilton/isomorphic-git) [![Known Vulnerabilities](https://snyk.io/test/github/wmhilton/isomorphic-git/badge.svg)](https://snyk.io/test/github/wmhilton/isomorphic-git)

So rather than relying on the 'fs' module, `isomorphic-git` is BYOFS (Bring Your Own File System).
The `git.utils.setfs( fs )` line tells git what module to use for file system operations.
When creating a new Git object, you pass it the fs module to use.

@@ -47,0 +47,0 @@ If you're only using `isomorphic-git` in Node, you can just use the native `fs` module.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc