Comparing version 0.5.2 to 0.6.0
@@ -1,3 +0,1 @@ | ||
const logger = require('../../lib/logger.js'); | ||
exports.command = 'create <name>'; | ||
@@ -14,2 +12,3 @@ exports.desc = 'Create a holobranch named <name>'; | ||
exports.handler = async function createBranch ({ name, template }) { | ||
const logger = require('../../lib/logger.js'); | ||
const { Repo } = require('../../lib'); | ||
@@ -16,0 +15,0 @@ |
@@ -1,3 +0,1 @@ | ||
const logger = require('../lib/logger.js'); | ||
exports.command = 'init'; | ||
@@ -12,2 +10,3 @@ exports.desc = 'Initialize hologit for current repository'; | ||
exports.handler = async function init ({ name = null }) { | ||
const logger = require('../lib/logger.js'); | ||
const { Repo } = require('../lib'); | ||
@@ -14,0 +13,0 @@ const path = require('path'); |
@@ -1,3 +0,1 @@ | ||
const logger = require('../../lib/logger.js'); | ||
exports.command = 'export-tree <treeish>'; | ||
@@ -7,2 +5,3 @@ exports.desc = 'Export given <treeish> to current index and working tree'; | ||
exports.handler = async function exportTree ({ treeish }) { | ||
const logger = require('../../lib/logger.js'); | ||
const hololib = require('../../lib'); | ||
@@ -9,0 +8,0 @@ |
@@ -1,3 +0,1 @@ | ||
const logger = require('../../lib/logger.js'); | ||
exports.command = 'merge-trees <treeish-base> <treeish-input>'; | ||
@@ -22,2 +20,3 @@ exports.desc = 'Merge <treeish-input> into <treeish-base>'; | ||
exports.handler = async function mergeTrees ({ treeishBase, treeishInput, method }) { | ||
const logger = require('../lib/logger.js'); | ||
const hololib = require('../../lib'); | ||
@@ -24,0 +23,0 @@ const git = await hololib.getGit(); |
@@ -1,3 +0,1 @@ | ||
const logger = require('../lib/logger.js'); | ||
exports.command = 'project <holobranch>'; | ||
@@ -20,5 +18,10 @@ exports.desc = 'Projects holobranch named <holobranch> and outputs resulting tree hash'; | ||
'working': { | ||
decribe: 'Set to use the (possibly uncommited) contents of the working tree', | ||
describe: 'Set to use the (possibly uncommited) contents of the working tree', | ||
type: 'boolean', | ||
default: false | ||
}, | ||
'lens': { | ||
describe: 'Whether to apply lensing to the composite tree', | ||
type: 'boolean', | ||
default: true | ||
} | ||
@@ -43,2 +46,3 @@ }; | ||
ref = 'HEAD', | ||
lens = true, | ||
working = false, | ||
@@ -49,5 +53,5 @@ debug = false, | ||
}) { | ||
const hab = await require('hab-client').requireVersion('>=0.62'); | ||
const logger = require('../lib/logger.js'); | ||
const handlebars = require('handlebars'); | ||
const { Repo } = require('../lib'); | ||
const { Repo, Projection } = require('../lib'); | ||
const mkdirp = require('mz-modules/mkdirp'); | ||
@@ -67,10 +71,13 @@ const path = require('path'); | ||
// load .holo info | ||
// load holorepo | ||
const repo = await Repo.getFromEnvironment({ ref, working }); | ||
// instantiate projection | ||
const projection = new Projection(repo.getBranch(holobranch)); | ||
// read holobranch mappings | ||
holobranch = repo.getBranch(holobranch); | ||
logger.info('reading mappings from holobranch:', holobranch); | ||
const mappings = await holobranch.getMappings(); | ||
logger.info('reading mappings from holobranch:', projection.branch); | ||
const mappings = await projection.branch.getMappings(); | ||
@@ -125,4 +132,2 @@ | ||
logger.info('compositing tree...'); | ||
const outputTree = await repo.createTree(); | ||
for (const mapping of sortedMappings) { | ||
@@ -141,3 +146,3 @@ const { layer, root, files, output, holosource } = await mapping.getCachedConfig(); | ||
// merge source into target | ||
const targetTree = output == '.' ? outputTree : await outputTree.getSubtree(output, true); | ||
const targetTree = output == '.' ? projection.output : await projection.output.getSubtree(output, true); | ||
await targetTree.merge(sourceTree, { | ||
@@ -152,213 +157,66 @@ files: files | ||
logger.info('writing output tree before lensing...'); | ||
const outputTreeHashBeforeLensing = await outputTree.write(); | ||
const outputTreeHashBeforeLensing = await projection.output.write(); | ||
logger.info('output tree before lensing:', outputTreeHashBeforeLensing); | ||
} | ||
// read lens tree from output | ||
const lensFiles = {}; | ||
const holoTree = await outputTree.getSubtree('.holo'); | ||
if (holoTree) { | ||
const lensesTree = await holoTree.getSubtree('lenses'); | ||
if (lens) { | ||
// read lenses | ||
const lenses = await projection.getLenses(); | ||
if (lensesTree) { | ||
const lensesTreeChildren = await lensesTree.getChildren(); | ||
for (const lensName in lensesTreeChildren) { | ||
lensFiles[lensName] = lensesTreeChildren[lensName]; | ||
} | ||
// apply lenses | ||
const scratchRoot = path.join('/hab/cache/holo', repo.gitDir.substr(1).replace(/\//g, '--'), projection.branch.name); | ||
holoTree.deleteChild('lenses'); | ||
} | ||
} | ||
for (const lens of lenses) { | ||
const { | ||
input: { | ||
root: inputRoot, | ||
files: inputFiles | ||
}, | ||
output: { | ||
root: outputRoot, | ||
merge: outputMerge | ||
} | ||
} = await lens.getCachedConfig(); | ||
// build tree of matching files to input to lens | ||
logger.info(`building input tree for lens ${lens.name} from ${inputRoot == '.' ? '' : (path.join(inputRoot, '.')+'/')}{${inputFiles}}`); | ||
const { hash: specHash, ref: specRef } = await lens.buildSpec(await lens.buildInputTree()); | ||
// read lenses | ||
const lenses = []; | ||
const lensesByName = {}; | ||
const lensNameFromPathRe = /^([^\/]+)\.toml$/; | ||
for (const lensPath in lensFiles) { | ||
const lensFile = lensFiles[lensPath]; | ||
// check for existing output tree | ||
let outputTreeHash = await repo.resolveRef(`${specRef}^{tree}`); | ||
if (!lensFile || !lensFile.isBlob) { | ||
continue; | ||
} | ||
// TODO: use a Configurable class to instantiate and load | ||
const name = lensPath.replace(lensNameFromPathRe, '$1'); | ||
const config = TOML.parse(await repo.git.catFile({ p: true }, lensFile.hash)); | ||
if (!config.hololens || !config.hololens.package) { | ||
throw new Error(`lens config missing hololens.package: ${lensPath}`); | ||
} | ||
if (!config.input || !config.input.files) { | ||
throw new Error(`lens config missing input.files: ${lensPath}`); | ||
} | ||
// parse and normalize lens config | ||
const hololens = config.hololens; | ||
hololens.package = hololens.package; | ||
hololens.command = hololens.command || 'lens-tree {{ input }}'; | ||
// parse and normalize input config | ||
const input = {}; | ||
input.files = typeof config.input.files == 'string' ? [config.input.files] : config.input.files; | ||
input.root = config.input.root || '.'; | ||
if (config.input.before) { | ||
input.before = | ||
typeof config.input.before == 'string' | ||
? [config.input.before] | ||
: config.input.before; | ||
} | ||
if (config.input.after) { | ||
input.after = | ||
typeof config.input.after == 'string' | ||
? [config.input.after] | ||
: config.input.after; | ||
} | ||
// parse and normalize output config | ||
const output = {}; | ||
output.root = config.output && config.output.root || input.root; | ||
output.merge = config.output && config.output.merge || 'overlay'; | ||
lenses.push(lensesByName[name] = { name, hololens, input, output }); | ||
} | ||
// compile edges formed by before/after requirements | ||
const lensEdges = []; | ||
for (const lens of lenses) { | ||
if (lens.input.after) { | ||
for (const afterLens of lens.input.after) { | ||
lensEdges.push([lensesByName[afterLens], lens]); | ||
// apply lens if existing tree not found | ||
if (outputTreeHash) { | ||
logger.info(`found existing output tree matching holospec(${specHash})`); | ||
} else { | ||
outputTreeHash = await lens.execute(specHash); | ||
} | ||
} | ||
if (lens.input.before) { | ||
for (const beforeLens of lens.input.before) { | ||
lensEdges.push([lens, lensesByName[beforeLens]]); | ||
} | ||
} | ||
} | ||
// apply lense output to main output tree | ||
logger.info(`merging lens output tree(${outputTreeHash}) into /${outputRoot != '.' ? outputRoot+'/' : ''}`); | ||
// sort specs by before/after requirements | ||
const sortedLenses = toposort.array(lenses, lensEdges); | ||
const lensedTree = await repo.createTreeFromRef(outputTreeHash); | ||
const lensTargetStack = await projection.output.getSubtree(outputRoot, true, true); | ||
const lensTargetTree = lensTargetStack.pop(); | ||
// apply lenses | ||
const scratchRoot = path.join('/hab/cache/holo', repo.gitDir.substr(1).replace(/\//g, '--'), holobranch.name); | ||
for (const lens of sortedLenses) { | ||
// build tree of matching files to input to lens | ||
logger.info(`building input tree for lens ${lens.name} from ${lens.input.root == '.' ? '' : (path.join(lens.input.root, '.')+'/')}{${lens.input.files}}`); | ||
const lensInputTree = repo.git.createTree(); | ||
const lensInputRoot = lens.input.root == '.' ? outputTree : await outputTree.getSubtree(lens.input.root); | ||
// merge input root into tree with any filters applied | ||
await lensInputTree.merge(lensInputRoot, { | ||
files: lens.input.files | ||
}); | ||
const lensInputTreeHash = await lensInputTree.write(); | ||
// execute lens via habitat | ||
let pkgPath; | ||
try { | ||
pkgPath = await hab.pkg('path', lens.hololens.package); | ||
} catch (err) { | ||
if (err.code != 1) { | ||
throw err; | ||
} | ||
// try to install package | ||
logger.info('installing package for', lens.hololens.package); | ||
await hab.pkg('install', lens.hololens.package); | ||
pkgPath = await hab.pkg('path', lens.hololens.package); | ||
} | ||
// trim path to leave just fully-qualified ident | ||
lens.hololens.package = pkgPath.substr(10); | ||
// build and hash spec | ||
const spec = { | ||
hololens: sortKeys(lens.hololens, { deep: true }), | ||
input: lensInputTreeHash | ||
}; | ||
const specToml = TOML.stringify(spec); | ||
const specHash = await repo.git.BlobObject.write(specToml, repo.git); | ||
const specRef = `refs/holo/specs/${specHash.substr(0, 2)}/${specHash.substr(2)}`; | ||
// check for existing output tree | ||
let lensedTreeHash = await repo.git.revParse(`${specRef}^{tree}`, { $nullOnError: true }); | ||
// apply lens if existing tree not found | ||
if (lensedTreeHash) { | ||
logger.info(`found existing output tree matching holospec(${specHash})`); | ||
} else { | ||
// assign scratch directory for lens | ||
const scratchPath = `${scratchRoot}/${lens.name}`; | ||
await mkdirp(scratchPath); | ||
// compile and execute command | ||
const command = handlebars.compile(lens.hololens.command)(spec); | ||
logger.info('executing lens %s: %s', lens.hololens.package, command); | ||
lensedTreeHash = await hab.pkg('exec', lens.hololens.package, ...shellParse(command), { | ||
$env: Object.assign( | ||
squish({ | ||
hololens: spec.hololens | ||
}, { seperator: '_', modifyKey: 'uppercase' }), | ||
{ | ||
HOLOSPEC: specHash, | ||
GIT_DIR: repo.gitDir, | ||
GIT_WORK_TREE: scratchPath, | ||
GIT_INDEX_FILE: `${scratchPath}.index` | ||
} | ||
) | ||
await lensTargetTree.merge(lensedTree, { | ||
mode: outputMerge | ||
}); | ||
if (!repo.git.isHash(lensedTreeHash)) { | ||
throw new Error(`lens "${command}" did not return hash: ${lensedTreeHash}`); | ||
} | ||
// save spec output | ||
await repo.git.updateRef(specRef, lensedTreeHash); | ||
} | ||
// apply lense output to main output tree | ||
logger.info(`merging lens output tree(${lensedTreeHash}) into /${lens.output.root != '.' ? lens.output.root+'/' : ''}`); | ||
// strip .holo/ from output | ||
logger.info('stripping .holo/ tree from output tree...'); | ||
projection.output.deleteChild('.holo'); | ||
} else { | ||
const holoTree = await projection.output.getSubtree('.holo'); | ||
const lensedTree = await repo.git.createTreeFromRef(lensedTreeHash); | ||
const lensTargetStack = await outputTree.getSubtree(lens.output.root, true, true); | ||
const lensTargetTree = lensTargetStack.pop(); | ||
await lensTargetTree.merge(lensedTree, { | ||
mode: lens.output.merge | ||
}); | ||
if (lensTargetTree !== outputTree && lensTargetTree.dirty) { | ||
// mark parents of lens target | ||
for (const parent of lensTargetStack) { | ||
parent.dirty = true; | ||
for (const childName in await holoTree.getChildren()) { | ||
if (childName != 'lenses') { | ||
holoTree.deleteChild(childName); | ||
} | ||
@@ -369,10 +227,5 @@ } | ||
// strip .holo/ from output | ||
logger.info('stripping .holo/ tree from output tree...'); | ||
outputTree.deleteChild('.holo'); | ||
// write tree | ||
logger.info('writing final output tree...'); | ||
const rootTreeHash = await outputTree.write(); | ||
const rootTreeHash = await projection.output.write(); | ||
@@ -391,3 +244,3 @@ | ||
p: parentHash, | ||
m: commitMessage || `Projected ${holobranch.name} from ${await git.describe({ always: true, tags: true })}` | ||
m: commitMessage || `Projected ${projection.branch.name} from ${await git.describe({ always: true, tags: true })}` | ||
}, | ||
@@ -394,0 +247,0 @@ rootTreeHash |
@@ -1,3 +0,1 @@ | ||
const logger = require('../lib/logger.js'); | ||
exports.command = 'watch'; | ||
@@ -7,2 +5,3 @@ exports.desc = 'Watch the current working tree and automatically update projection'; | ||
exports.handler = async function watch (options) { | ||
const logger = require('../lib/logger.js'); | ||
const fs = require('mz/fs'); | ||
@@ -9,0 +8,0 @@ const watchman = require('fb-watchman'); |
{ | ||
"compilerOptions": { | ||
"target": "es6" | ||
"target": "es2018", | ||
"checkJs": true, | ||
"noUnusedLocals": true, | ||
"module": "commonjs", | ||
"moduleResolution": "node" | ||
}, | ||
@@ -5,0 +9,0 @@ "exclude": [ |
@@ -1,4 +0,1 @@ | ||
const path = require('path'); | ||
const Configurable = require('./Configurable.js'); | ||
@@ -65,2 +62,3 @@ const Mapping = require('./Mapping.js'); | ||
const treePath = `.holo/branches/${this.name}`; | ||
const mappingPathRe = /^(.+)\.toml$/; | ||
const mappings = new Map(); | ||
@@ -76,3 +74,3 @@ | ||
for (const mappingPath of tree) { | ||
const mappingPathMatches = mappingPath.match(/^(.+)\.toml$/); | ||
const mappingPathMatches = mappingPath.match(mappingPathRe); | ||
@@ -79,0 +77,0 @@ // skip any file not ending in .toml |
@@ -5,2 +5,5 @@ exports.Git = require('./Git.js'); | ||
exports.Source = require('./Source.js'); | ||
exports.Lens = require('./Lens.js'); | ||
exports.Projection = require('./Projection.js'); | ||
exports.Studio = require('./Studio.js'); | ||
@@ -7,0 +10,0 @@ exports.BlobObject = require('./BlobObject.js'); |
@@ -77,7 +77,4 @@ const path = require('path'); | ||
const { hash } = await SpecObject.write(this.repo, 'source', data); | ||
const spec = { | ||
hash, | ||
ref: `refs/holo/sources/${hash}`, | ||
...await SpecObject.write(this.repo, 'source', data), | ||
data | ||
@@ -84,0 +81,0 @@ }; |
@@ -13,5 +13,13 @@ const sortKeys = require('sort-keys'); | ||
holospec[kind] = sortKeys(data, { deep: true }); | ||
return super.write(repo, TOML.stringify({ holospec })); | ||
const { hash } = await super.write(repo, TOML.stringify({ holospec })); | ||
return { | ||
hash, | ||
ref: SpecObject.buildRef(kind, hash) | ||
}; | ||
} | ||
static buildRef (kind, hash) { | ||
return `refs/holo/${kind}/${hash.substr(0, 2)}/${hash.substr(2)}`; | ||
} | ||
} | ||
@@ -18,0 +26,0 @@ |
@@ -54,6 +54,7 @@ const path = require('path'); | ||
constructor (repo, { hash = EMPTY_TREE_HASH } = {}) { | ||
constructor (repo, { hash = EMPTY_TREE_HASH, parent = null } = {}) { | ||
this.repo = repo; | ||
this.dirty = false; | ||
this.hash = hash; | ||
this.parent = parent; | ||
this._children = {}; | ||
@@ -77,2 +78,16 @@ this._baseChildren = null; | ||
markDirty () { | ||
if (this.dirty) { | ||
return; | ||
} | ||
this.dirty = true; | ||
let parent = this.parent; | ||
while (parent) { | ||
parent.dirty = true; | ||
parent = parent.parent; | ||
} | ||
} | ||
async _loadBaseChildren (preloadChildren = false) { | ||
@@ -161,3 +176,3 @@ if (!this.hash || this.hash == EMPTY_TREE_HASH) { | ||
this._children[childName] = null; | ||
this.dirty = true; | ||
this.markDirty(); | ||
} | ||
@@ -193,4 +208,3 @@ } | ||
nextTree = tree._children[subtreeName] = new TreeObject(this.repo, { }); | ||
nextTree = tree._children[subtreeName] = new TreeObject(this.repo, { parent: this }); | ||
for (const parent of parents) { | ||
@@ -340,3 +354,3 @@ parent.dirty = true; | ||
this._children[childName] = inputChild; | ||
this.dirty = true; | ||
this.markDirty(); | ||
continue; | ||
@@ -353,3 +367,3 @@ } | ||
// finish merging its decendents into an empty tree and skip if it stays empty | ||
baseChild = new TreeObject(this.repo); | ||
baseChild = new TreeObject(this.repo, { parent: this }); | ||
await baseChild.merge(inputChild, options, childPath); | ||
@@ -359,3 +373,3 @@ | ||
this._children[childName] = baseChild; | ||
this.dirty = true; | ||
this.markDirty(); | ||
} | ||
@@ -367,4 +381,4 @@ | ||
if (!inputChild.dirty) { | ||
this._children[childName] = new TreeObject(this.repo, { hash: inputChild.hash }); | ||
this.dirty = true; | ||
this._children[childName] = new TreeObject(this.repo, { hash: inputChild.hash, parent: this }); | ||
this.markDirty(); | ||
continue; | ||
@@ -374,4 +388,4 @@ } | ||
// create an empty tree to merge input into | ||
baseChild = this._children[childName] = new TreeObject(this.repo); | ||
this.dirty = true; | ||
baseChild = this._children[childName] = new TreeObject(this.repo, { parent: this }); | ||
this.markDirty(); | ||
baseChildEmpty = true; | ||
@@ -388,3 +402,3 @@ } | ||
if (baseChild.dirty) { | ||
this.dirty = true; | ||
this.markDirty(); | ||
} | ||
@@ -391,0 +405,0 @@ }); |
{ | ||
"name": "hologit", | ||
"version": "0.5.2", | ||
"version": "0.6.0", | ||
"description": "Hologit automates the projection of layered composite file trees based on flat, declarative plans", | ||
@@ -12,2 +12,3 @@ "repository": "https://github.com/EmergencePlatform/hologit", | ||
"@iarna/toml": "^2.0.0", | ||
"dockerode": "^2.5.7", | ||
"fb-watchman": "^2.0.0", | ||
@@ -14,0 +15,0 @@ "git-client": "^1.1.1", |
Network access
Supply chain riskThis module accesses the network.
Found 2 instances in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 2 instances in 1 package
96059
38
2166
16
4
2
+ Addeddockerode@^2.5.7
+ AddedJSONStream@1.3.2(transitive)
+ Addedbl@1.2.3(transitive)
+ Addedbuffer-alloc@1.2.0(transitive)
+ Addedbuffer-alloc-unsafe@1.1.0(transitive)
+ Addedbuffer-fill@1.0.0(transitive)
+ Addedbuffer-from@1.1.2(transitive)
+ Addedchownr@1.1.4(transitive)
+ Addedconcat-stream@1.6.2(transitive)
+ Addedcore-util-is@1.0.3(transitive)
+ Addeddebug@3.2.7(transitive)
+ Addeddocker-modem@1.0.9(transitive)
+ Addeddockerode@2.5.8(transitive)
+ Addedfs-constants@1.0.0(transitive)
+ Addedisarray@0.0.11.0.0(transitive)
+ Addedjsonparse@1.3.1(transitive)
+ Addedprocess-nextick-args@2.0.1(transitive)
+ Addedpump@1.0.3(transitive)
+ Addedreadable-stream@1.0.342.3.8(transitive)
+ Addedsafe-buffer@5.1.2(transitive)
+ Addedsplit-ca@1.0.1(transitive)
+ Addedstring_decoder@0.10.311.1.1(transitive)
+ Addedtar-fs@1.16.4(transitive)
+ Addedtar-stream@1.6.2(transitive)
+ Addedthrough@2.3.8(transitive)
+ Addedto-buffer@1.1.1(transitive)
+ Addedtypedarray@0.0.6(transitive)
+ Addedutil-deprecate@1.0.2(transitive)
+ Addedxtend@4.0.2(transitive)