Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

tiny-commit-walker

Package Overview
Dependencies
Maintainers
1
Versions
15
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

tiny-commit-walker - npm Package Compare versions

Comparing version 1.1.0 to 1.1.1

16

dist/commit.d.ts
import { Packs } from './pack';
export interface AuthorOrCommitter {
name: string;
email: string;
date: Date;
timezoneOffset: number;
export interface Author {
readonly name: string;
readonly email: string;
readonly date: Date;
readonly timezoneOffset: number;
}
export interface Committer extends Author {
}
export declare class Commit {

@@ -22,4 +24,4 @@ readonly gitDir: string;

private _parseAuthorOrCommitter(type);
readonly author: AuthorOrCommitter;
readonly committer: AuthorOrCommitter;
readonly author: Author;
readonly committer: Author;
readonly hasParents: boolean;

@@ -26,0 +28,0 @@ readonly isMergeCommit: boolean;

@@ -98,8 +98,10 @@ "use strict";

if (s.startsWith('object')) {
return yield Commit.readCommit(gitDir, s.match(/[a-f0-9]{40}/)[0], packs);
return yield Commit.readCommit(gitDir, getCommitHashFromAnnotatedTag(s), packs);
}
const data = `commit ${body.length}\u0000${body.toString('utf8')}`;
return new Commit(gitDir, hash, data, packs);
return new Commit(gitDir, hash, createCommitData(body), packs);
}
catch (e) { }
catch (e) {
if (e instanceof InvalidAnnotatedTagError)
throw e;
}
}

@@ -109,3 +111,3 @@ const deflatedData = yield readFileAsync(getObjectPath(gitDir, hash));

if (data.startsWith('tag')) {
return yield Commit.readCommit(gitDir, data.match(/[a-f0-9]{40}/)[0], packs);
return yield Commit.readCommit(gitDir, getCommitHashFromAnnotatedTag(data), packs);
}

@@ -121,8 +123,10 @@ return new Commit(gitDir, hash, data, packs);

if (s.startsWith('object')) {
return Commit.readCommitSync(gitDir, s.match(/[a-f0-9]{40}/)[0], packs);
return Commit.readCommitSync(gitDir, getCommitHashFromAnnotatedTag(s), packs);
}
const data = `commit ${body.length}\u0000${body.toString('utf8')}`;
return new Commit(gitDir, hash, data, packs);
return new Commit(gitDir, hash, createCommitData(body), packs);
}
catch (e) { }
catch (e) {
if (e instanceof InvalidAnnotatedTagError)
throw e;
}
}

@@ -132,3 +136,3 @@ const deflatedData = fs.readFileSync(getObjectPath(gitDir, hash));

if (data.startsWith('tag')) {
return Commit.readCommitSync(gitDir, data.match(/[a-f0-9]{40}/)[0], packs);
return Commit.readCommitSync(gitDir, getCommitHashFromAnnotatedTag(data), packs);
}

@@ -139,5 +143,39 @@ return new Commit(gitDir, hash, data, packs);

exports.Commit = Commit;
function createCommitData(body) {
return `commit ${body.length}\u0000${body.toString('utf8')}`;
}
function getObjectPath(gitDir, hash) {
return path.join(gitDir, 'objects', hash.replace(/^(.{2})(.{38})$/, `$1${path.sep}$2`));
}
function getGitObjectBody(s) {
let i = 0;
while (s[i++] !== '\u0000')
;
return s.slice(i);
}
class InvalidAnnotatedTagError extends Error {
constructor(message) {
super(message);
this.message = `Invalid git object type: ${message}`;
}
}
function getCommitHashFromAnnotatedTag(tagBody) {
if (tagBody.startsWith('tag'))
tagBody = getGitObjectBody(tagBody);
const lines = tagBody.split('\n');
const tag = {};
lines.some(line => {
if (!line.length)
return true;
const [k, v] = line.split(/\s/);
switch (k) {
case 'object':
case 'type': tag[k] = v;
}
return false;
});
if (tag.type !== 'commit')
throw new InvalidAnnotatedTagError(tag.type);
return tag.object;
}
//# sourceMappingURL=commit.js.map
/// <reference types="node" />
export declare function readBaseOffset(buff: Buffer, offset: number): number[];
export declare function patchDelta(src: Buffer, delta: Buffer): Buffer;

@@ -0,0 +0,0 @@ "use strict";

export * from './repo';
export * from './commit';

@@ -0,0 +0,0 @@ "use strict";

/// <reference types="node" />
export interface PackedIndex {
offset: number;
fileIndex: number;
readonly offset: number;
readonly fileIndex: number;
}

@@ -17,2 +17,6 @@ export declare type PackedIndexMap = Map<string, PackedIndex>;

static initializeSync(gitDir: string): Packs;
private _getPackedIndexFromCache(hash);
private _getPackedObjectBufferFromCach(idx);
private _setPackedObjectBuffrToCache(idx, buff);
private _getPackFilePath(idx);
unpackGitObject(hash: string): Promise<Buffer>;

@@ -19,0 +23,0 @@ unpackGitObjectSync(hash: string): Buffer;

@@ -58,5 +58,5 @@ "use strict";

let index = 255 * 4;
if (idxFileBuffer.readUInt32BE(0) === 0xff744f63 && idxFileBuffer.readUInt32BE(4) === 2) {
idxVersion = 2;
index += 2 * 4;
if (idxFileBuffer.readUInt32BE(0) === 0xff744f63) {
idxVersion = idxFileBuffer.readUInt32BE(4);
index += 8;
}

@@ -72,3 +72,4 @@ else {

for (let i = 0; i < n; i++) {
const hash = idxFileBuffer.slice(index + i * 20, index + (i + 1) * 20).toString('hex');
const hash = readHash(idxFileBuffer, index);
index += 20;
let offset = idxFileBuffer.readUInt32BE(off32);

@@ -86,4 +87,5 @@ off32 += 4;

for (let i = 0; i < n; i++) {
const offset = idxFileBuffer.readUInt32BE(index + 24 * i);
const hash = idxFileBuffer.slice(index + 24 * i + 4, index + 24 * (i + 1)).toString('hex');
const offset = idxFileBuffer.readUInt32BE(index);
const hash = readHash(idxFileBuffer, index += 4);
index += 20;
map.set(hash, { offset, fileIndex });

@@ -172,15 +174,26 @@ }

}
_getPackedIndexFromCache(hash) {
const idx = this.packedIndexMap.get(hash);
if (!idx) {
throw new Error(`${hash} is not found.`);
}
return idx;
}
_getPackedObjectBufferFromCach(idx) {
return this._packedObjectCache.get(`${idx.fileIndex}:${idx.offset}`);
}
_setPackedObjectBuffrToCache(idx, buff) {
return this._packedObjectCache.set(`${idx.fileIndex}:${idx.offset}`, buff);
}
_getPackFilePath(idx) {
return path.join(this.packDir, this.packFileNames[idx.fileIndex] + '.pack');
}
unpackGitObject(hash) {
return __awaiter(this, void 0, void 0, function* () {
const idx = this.packedIndexMap.get(hash);
if (!idx) {
throw new Error(`${hash} is not found.`);
}
const key = `${idx.fileIndex}:${idx.offset}`;
let dst = this._packedObjectCache.get(key);
const idx = this._getPackedIndexFromCache(hash);
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {
return dst;
}
const filePath = path.join(this.packDir, this.packFileNames[idx.fileIndex] + '.pack');
const fd = yield openFileAsync(filePath, 'r');
const fd = yield openFileAsync(this._getPackFilePath(idx), 'r');
try {

@@ -199,13 +212,8 @@ dst = yield this._unpackGitObject(fd, idx);

unpackGitObjectSync(hash) {
const idx = this.packedIndexMap.get(hash);
if (!idx) {
throw new Error(`${hash} is not found.`);
}
const key = `${idx.fileIndex}:${idx.offset}`;
let dst = this._packedObjectCache.get(key);
const idx = this._getPackedIndexFromCache(hash);
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {
return dst;
}
const filePath = path.join(this.packDir, this.packFileNames[idx.fileIndex] + '.pack');
const fd = fs.openSync(filePath, 'r');
const fd = fs.openSync(this._getPackFilePath(idx), 'r');
try {

@@ -224,4 +232,3 @@ dst = this._unpackGitObjectSync(fd, idx);

return __awaiter(this, void 0, void 0, function* () {
const key = `${idx.fileIndex}:${idx.offset}`;
let dst = this._packedObjectCache.get(key);
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {

@@ -231,12 +238,10 @@ return dst;

const head = Buffer.alloc(32);
yield readAsync(fd, head, 0, 32, idx.offset);
yield readAsync(fd, head, 0, head.length, idx.offset);
const po = new PackedObject(idx, head);
switch (po.type) {
case ObjectTypeEnum.COMMIT:
case ObjectTypeEnum.TREE:
case ObjectTypeEnum.BLOB:
// case ObjectTypeEnum.TREE:
// case ObjectTypeEnum.BLOB:
case ObjectTypeEnum.TAG: {
const buff = Buffer.alloc(po.size * 2 + 32);
yield readAsync(fd, buff, 0, buff.length, idx.offset + po.offset);
dst = yield inflateAsync(buff);
dst = yield inf(fd, idx, po);
break;

@@ -252,3 +257,3 @@ }

}
this._packedObjectCache.set(key, dst);
this._setPackedObjectBuffrToCache(idx, dst);
return dst;

@@ -258,4 +263,3 @@ });

_unpackGitObjectSync(fd, idx) {
const key = `${idx.fileIndex}:${idx.offset}`;
let dst = this._packedObjectCache.get(key);
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {

@@ -265,12 +269,10 @@ return dst;

const head = Buffer.alloc(32);
fs.readSync(fd, head, 0, 32, idx.offset);
fs.readSync(fd, head, 0, head.length, idx.offset);
const po = new PackedObject(idx, head);
switch (po.type) {
case ObjectTypeEnum.COMMIT:
case ObjectTypeEnum.TREE:
case ObjectTypeEnum.BLOB:
// case ObjectTypeEnum.TREE:
// case ObjectTypeEnum.BLOB:
case ObjectTypeEnum.TAG: {
const buff = Buffer.alloc(po.size * 2 + 32);
fs.readSync(fd, buff, 0, buff.length, idx.offset + po.offset);
dst = zlib.inflateSync(buff);
dst = infSync(fd, idx, po);
break;

@@ -286,3 +288,3 @@ }

}
this._packedObjectCache.set(key, dst);
this._setPackedObjectBuffrToCache(idx, dst);
return dst;

@@ -302,10 +304,6 @@ }

else {
const hash = head.slice(po.offset, po.offset += 20).toString('hex');
src = (yield this.unpackGitObject(hash));
src = (yield this.unpackGitObject(readHash(head, po.offset)));
po.offset += 20;
}
const buff = Buffer.alloc(po.size * 2 + 32);
yield readAsync(fd, buff, 0, buff.length, idx.offset + po.offset);
const delta = yield inflateAsync(buff);
const dst = delta_1.patchDelta(src, delta);
return dst;
return delta_1.patchDelta(src, yield inf(fd, idx, po));
});

@@ -324,13 +322,40 @@ }

else {
const hash = head.slice(po.offset, po.offset += 20).toString('hex');
src = this.unpackGitObjectSync(hash);
src = this.unpackGitObjectSync(readHash(head, po.offset));
po.offset += 20;
}
const buff = Buffer.alloc(po.size * 2 + 32);
fs.readSync(fd, buff, 0, buff.length, idx.offset + po.offset);
const delta = zlib.inflateSync(buff);
const dst = delta_1.patchDelta(src, delta);
return dst;
return delta_1.patchDelta(src, infSync(fd, idx, po));
}
}
exports.Packs = Packs;
function readHash(buff, offset) {
return buff.slice(offset, offset + 20).toString('hex');
}
function infSync(fd, idx, po, size = po.size * 2 + 32) {
const buff = Buffer.allocUnsafe(size);
fs.readSync(fd, buff, 0, buff.length, idx.offset + po.offset);
try {
return zlib.inflateSync(buff);
}
catch (e) {
if (e.errno !== -5) {
throw e;
}
return infSync(fd, idx, po, size + 128);
}
}
function inf(fd, idx, po, size = po.size * 2 + 32) {
return __awaiter(this, void 0, void 0, function* () {
const buff = Buffer.allocUnsafe(size);
yield readAsync(fd, buff, 0, buff.length, idx.offset + po.offset);
try {
return yield inflateAsync(buff);
}
catch (e) {
if (e.errno !== -5) {
throw e;
}
return yield inf(fd, idx, po, size + 128);
}
});
}
//# sourceMappingURL=pack.js.map
import { Commit } from './commit';
import { Packs } from "./pack";
export declare type REFS_DIR = 'heads' | 'tags' | 'remotes';
export declare type BRANCH_DIR = 'heads' | 'remotes';
export declare type StringMap = Map<string, string>;
export interface Branch {
name: string;
commit: Commit;
export declare type RefMap = Map<string, Ref>;
export declare class Ref {
readonly name: string;
private _gitDir;
private _hash;
private _packs;
private _commit;
constructor(name: string, _gitDir: string, _hash: string, _packs: Packs);
readonly commit: Commit;
}
export interface Tag {
name: string;
commit: Commit;
}
export interface HEAD {
type: 'branch' | 'commit';
branch?: Branch;
commit?: Commit;
readonly type: 'branch' | 'commit';
readonly branch?: Ref;
readonly commit?: Commit;
}

@@ -21,3 +24,5 @@ export declare class Repository {

private _refs;
private _refMaps;
private _packs;
private _refsDir;
constructor(gitDir: string);

@@ -34,2 +39,3 @@ /**

private _initRefsSync();
private _initRefMaps();
/**

@@ -53,7 +59,7 @@ * Read a infomation of `.git/HEAD`.

readHeadSync(): HEAD;
private _readBranchesOrTags(dir);
private _readBranchesOrTagsSync(dir);
private _findCommitFromRefs(dir, name);
private _readCommitByBranchOrTag(dir, name);
private _readCommitByBranchOrTagSync(dir, name);
private _readRefs(dir);
private _readRefsSync(dir);
private _findRef(dir, name);
private _readRef(dir, name);
private _readRefSync(dir, name);
/**

@@ -69,7 +75,7 @@ * Read branches.

*/
readBranches(dirs?: BRANCH_DIR[] | BRANCH_DIR): Promise<Branch[]>;
readBranches(dirs?: BRANCH_DIR[] | BRANCH_DIR): Promise<Ref[]>;
/**
* Read branches sync.
*/
readBranchesSync(dirs?: BRANCH_DIR[] | BRANCH_DIR): Branch[];
readBranchesSync(dirs?: BRANCH_DIR[] | BRANCH_DIR): Ref[];
/**

@@ -95,7 +101,7 @@ * Read a commit by branch name.

*/
readTags(): Promise<Tag[]>;
readTags(): Promise<Ref[]>;
/**
* Read tags sync.
*/
readTagsSync(): Tag[];
readTagsSync(): Ref[];
/**

@@ -102,0 +108,0 @@ * Read a commit by tag name.

@@ -19,2 +19,17 @@ "use strict";

const readDirAsync = promisify(fs.readdir);
class Ref {
constructor(name, _gitDir, _hash, _packs) {
this.name = name;
this._gitDir = _gitDir;
this._hash = _hash;
this._packs = _packs;
}
get commit() {
if (this._commit) {
return this._commit;
}
return this._commit = commit_1.Commit.readCommitSync(this._gitDir, this._hash, this._packs);
}
}
exports.Ref = Ref;
const processings = {};

@@ -24,2 +39,4 @@ class Repository {

this.gitDir = gitDir;
this._refMaps = {};
this._refsDir = path.join(gitDir, 'refs');
}

@@ -86,3 +103,3 @@ /**

try {
const names = yield readDirAsync(path.join(this.gitDir, 'refs', dir));
const names = yield readDirAsync(path.join(this._refsDir, dir));
for (let i = 0; i < names.length; i++) {

@@ -92,3 +109,3 @@ const name = names[i];

continue;
const hash = (yield readFileAsync(path.join(this.gitDir, 'refs', dir, name), 'utf8')).trim();
const hash = (yield readFileAsync(path.join(this._refsDir, dir, name), 'utf8')).trim();
map.set(prefix + name, hash);

@@ -101,10 +118,7 @@ }

const readCommits = (map) => __awaiter(this, void 0, void 0, function* () {
const brachOrTags = [];
const refs = [];
for (const [name, hash] of map.entries()) {
brachOrTags[brachOrTags.length] = {
name,
commit: yield commit_1.Commit.readCommit(this.gitDir, hash, this._packs)
};
refs[refs.length] = new Ref(name, this.gitDir, hash, this._packs);
}
return brachOrTags;
return refs;
});

@@ -115,3 +129,3 @@ const branchMap = yield createMap('heads');

try {
const dirs = yield readDirAsync(path.join(this.gitDir, 'refs', 'remotes'));
const dirs = yield readDirAsync(path.join(this._refsDir, 'remotes'));
for (let i = 0; i < dirs.length; i++) {

@@ -127,3 +141,3 @@ const dir = dirs[i];

try {
const s = yield readFileAsync(path.join(this.gitDir, 'info', 'refs'), 'utf8');
const s = yield readFileAsync(path.join(this.gitDir, 'packed-refs'), 'utf8');
addInfoRefs(s, branchMap, tagMap, remoteBranchMap);

@@ -138,2 +152,3 @@ }

this._refs = { heads, tags, remotes };
this._initRefMaps();
delete processings[this.gitDir];

@@ -155,6 +170,8 @@ });

try {
const names = fs.readdirSync(path.join(this.gitDir, 'refs', dir));
const names = fs.readdirSync(path.join(this._refsDir, dir));
for (let i = 0; i < names.length; i++) {
const name = names[i];
const hash = fs.readFileSync(path.join(this.gitDir, 'refs', dir, name), 'utf8').trim();
if (name === 'HEAD')
continue;
const hash = fs.readFileSync(path.join(this._refsDir, dir, name), 'utf8').trim();
map.set(prefix + name, hash);

@@ -167,10 +184,7 @@ }

const readCommits = (map) => {
const brachOrTags = [];
const refs = [];
for (const [name, hash] of map.entries()) {
brachOrTags[brachOrTags.length] = {
name,
commit: commit_1.Commit.readCommitSync(this.gitDir, hash, this._packs)
};
refs[refs.length] = new Ref(name, this.gitDir, hash, this._packs);
}
return brachOrTags;
return refs;
};

@@ -181,3 +195,3 @@ const branchMap = createMap('heads');

try {
const dirs = fs.readdirSync(path.join(this.gitDir, 'refs', 'remotes'));
const dirs = fs.readdirSync(path.join(this._refsDir, 'remotes'));
for (let i = 0; i < dirs.length; i++) {

@@ -193,3 +207,3 @@ const dir = dirs[i];

try {
const s = fs.readFileSync(path.join(this.gitDir, 'info', 'refs'), 'utf8');
const s = fs.readFileSync(path.join(this.gitDir, 'packed-refs'), 'utf8');
addInfoRefs(s, branchMap, tagMap, remoteBranchMap);

@@ -203,3 +217,11 @@ }

};
this._initRefMaps();
}
_initRefMaps() {
Object.keys(this._refs).forEach((k) => {
const m = new Map();
this._refs[k].forEach(ref => m.set(ref.name, ref));
this._refMaps[k] = m;
});
}
/**

@@ -228,6 +250,3 @@ * Read a infomation of `.git/HEAD`.

type: 'branch',
branch: {
name,
commit: yield this.readCommitByBranch(name),
}
branch: yield this._readRef('heads', name),
};

@@ -255,6 +274,3 @@ }

type: 'branch',
branch: {
name,
commit: this.readCommitByBranchSync(name),
}
branch: this._readRefSync('heads', name),
};

@@ -269,3 +285,3 @@ }

}
_readBranchesOrTags(dir) {
_readRefs(dir) {
return __awaiter(this, void 0, void 0, function* () {

@@ -277,3 +293,3 @@ if (!this._refs)

}
_readBranchesOrTagsSync(dir) {
_readRefsSync(dir) {
if (!this._refs)

@@ -283,19 +299,19 @@ this._initRefsSync();

}
_findCommitFromRefs(dir, name) {
const branchOrTag = this._refs[dir].find(o => o.name === name);
if (!branchOrTag)
_findRef(dir, name) {
const ref = this._refMaps[dir].get(name);
if (!ref)
throw new Error(`refs/${dir}/${name} is not found.`);
return branchOrTag.commit;
return ref;
}
_readCommitByBranchOrTag(dir, name) {
_readRef(dir, name) {
return __awaiter(this, void 0, void 0, function* () {
if (!this._refs)
yield this._initRefs();
return this._findCommitFromRefs(dir, name);
return this._findRef(dir, name);
});
}
_readCommitByBranchOrTagSync(dir, name) {
_readRefSync(dir, name) {
if (!this._refs)
this._initRefsSync();
return this._findCommitFromRefs(dir, name);
return this._findRef(dir, name);
}

@@ -318,3 +334,3 @@ /**

return yield Promise
.all(dirs.map((dir) => __awaiter(this, void 0, void 0, function* () { return yield this._readBranchesOrTags(dir); })))
.all(dirs.map((dir) => __awaiter(this, void 0, void 0, function* () { return yield this._readRefs(dir); })))
.then(results => Array.prototype.concat.apply([], results));

@@ -330,3 +346,3 @@ });

}
return Array.prototype.concat.apply([], dirs.map(dir => this._readBranchesOrTagsSync(dir)));
return Array.prototype.concat.apply([], dirs.map(dir => this._readRefsSync(dir)));
}

@@ -344,6 +360,6 @@ /**

try {
return yield this._readCommitByBranchOrTag('heads', branchName);
return (yield this._readRef('heads', branchName)).commit;
}
catch (e) {
return yield this._readCommitByBranchOrTag('remotes', branchName);
return (yield this._readRef('remotes', branchName)).commit;
}

@@ -357,6 +373,6 @@ });

try {
return this._readCommitByBranchOrTagSync('heads', branchName);
return this._readRefSync('heads', branchName).commit;
}
catch (e) {
return this._readCommitByBranchOrTagSync('remotes', branchName);
return this._readRefSync('remotes', branchName).commit;
}

@@ -373,3 +389,3 @@ }

return __awaiter(this, void 0, void 0, function* () {
return yield this._readBranchesOrTags('tags');
return yield this._readRefs('tags');
});

@@ -381,3 +397,3 @@ }

readTagsSync() {
return this._readBranchesOrTagsSync('tags');
return this._readRefsSync('tags');
}

@@ -393,3 +409,3 @@ /**

return __awaiter(this, void 0, void 0, function* () {
return this._readCommitByBranchOrTag('tags', tagName);
return (yield this._readRef('tags', tagName)).commit;
});

@@ -401,3 +417,3 @@ }

readCommitByTagSync(tagName) {
return this._readCommitByBranchOrTagSync('tags', tagName);
return this._readRefSync('tags', tagName).commit;
}

@@ -409,27 +425,20 @@ }

const lines = s.trim().split('\n').forEach((line) => {
const [hash, ref] = line.split(/\s+/);
let name = ref.split('/').pop();
if (/^refs\/heads\//.test(ref)) {
if (branchMap.has(name))
return;
branchMap.set(name, hash);
const m = line.match(/([a-f\d]{40}) refs\/(heads|remotes|tags)\/(.+)/);
console.log(m, line);
if (!m)
return;
const [, hash, type, name] = m;
switch (type) {
case 'heads':
!branchMap.has(name) && branchMap.set(name, hash);
break;
case 'remotes':
!remoteBranchMap.has(name) && remoteBranchMap.set(name, hash);
break;
case 'tags':
!tagMap.has(name) && tagMap.set(name, hash);
break;
}
else if (/^refs\/remotes\//.test(ref)) {
name = ref.slice('refs/remotes/'.length);
if (remoteBranchMap.has(name) || name.split('/').pop() === 'HEAD')
return;
remoteBranchMap.set(name, hash);
}
else {
if (name.endsWith('^{}')) {
name = name.slice(0, -3);
}
_tagMap.set(name, hash);
}
});
for (const [name, hash] of _tagMap.entries()) {
if (!tagMap.has(name))
tagMap.set(name, hash);
}
}
//# sourceMappingURL=repo.js.map
module.exports = require('./dist');
{
"name": "tiny-commit-walker",
"version": "1.1.0",
"version": "1.1.1",
"description": "tiny commit walker",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -0,0 +0,0 @@ # Tiny commit walker

@@ -0,0 +0,0 @@ {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc