@antora/content-aggregator
Advanced tools
Comparing version 3.0.0-alpha.8 to 3.0.0-alpha.9
'use strict' | ||
if (!Promise.allSettled) require('./promise-all-settled-polyfill') | ||
const camelCaseKeys = require('camelcase-keys') | ||
@@ -17,3 +19,3 @@ const { createHash } = require('crypto') | ||
const { NotFoundError, ObjectTypeError, UnknownTransportError, UrlParseError } = git.Errors | ||
const invariably = { false: () => false, void: () => undefined, emptyArray: () => [] } | ||
const invariably = { true: () => true, false: () => false, void: () => undefined, emptyArray: () => [] } | ||
const { makeRe: makePicomatchRx } = require('picomatch') | ||
@@ -93,44 +95,66 @@ const matcher = require('matcher') | ||
const { branches, editUrl, tags, sources } = playbook.content | ||
const { cacheDir, fetch, quiet } = playbook.runtime | ||
return ensureCacheDir(cacheDir, startDir).then((resolvedCacheDir) => { | ||
const gitPlugins = loadGitPlugins( | ||
Object.assign({ ensureGitSuffix: true }, playbook.git), | ||
playbook.network || {}, | ||
startDir | ||
) | ||
const sourcesByUrl = sources.reduce( | ||
(accum, source) => accum.set(source.url, [...(accum.get(source.url) || []), source]), | ||
new Map() | ||
) | ||
const sourceDefaults = { branches, editUrl, tags } | ||
const { cacheDir: requestedCacheDir, fetch, quiet } = playbook.runtime | ||
return ensureCacheDir(requestedCacheDir, startDir).then((cacheDir) => { | ||
const gitConfig = Object.assign({ ensureGitSuffix: true }, playbook.git) | ||
const gitPlugins = loadGitPlugins(gitConfig, playbook.network || {}, startDir) | ||
const fetchConcurrency = Math.max(gitConfig.fetchConcurrency || Infinity, 1) | ||
const sourcesByUrl = sources.reduce((accum, source) => { | ||
return accum.set(source.url, [...(accum.get(source.url) || []), Object.assign({}, sourceDefaults, source)]) | ||
}, new Map()) | ||
const progress = !quiet && createProgress(sourcesByUrl.keys(), process.stdout) | ||
return Promise.all( | ||
[...sourcesByUrl.entries()].map(([url, sources]) => | ||
loadRepository(url, { | ||
cacheDir: resolvedCacheDir, | ||
gitPlugins, | ||
fetchTags: tagsSpecified(sources, tags), | ||
progress, | ||
fetch, | ||
startDir, | ||
}).then(({ repo, authStatus }) => | ||
Promise.all( | ||
sources.map((source) => { | ||
source = Object.assign({ branches, editUrl, tags }, source) | ||
// NOTE if repository is managed (has a url property), we can assume the remote name is origin | ||
// TODO if the repo has no remotes, then remoteName should be undefined | ||
const remoteName = repo.url ? 'origin' : source.remote || 'origin' | ||
return collectFilesFromSource(source, repo, remoteName, authStatus) | ||
}) | ||
) | ||
) | ||
) | ||
) | ||
.then(buildAggregate) | ||
.catch((err) => { | ||
progress && progress.terminate() | ||
throw err | ||
}) | ||
const loadOpts = { cacheDir, fetch, gitPlugins, progress, startDir } | ||
return collectFiles(sourcesByUrl, loadOpts, fetchConcurrency).then(buildAggregate, (err) => { | ||
progress && progress.terminate() | ||
throw err | ||
}) | ||
}) | ||
} | ||
async function collectFiles (sourcesByUrl, loadOpts, concurrency) { | ||
const tasks = [...sourcesByUrl.entries()].map(([url, sources]) => [ | ||
() => loadRepository(url, Object.assign({ fetchTags: tagsSpecified(sources) }, loadOpts)), | ||
({ repo, authStatus }) => | ||
Promise.all( | ||
sources.map((source) => { | ||
// NOTE if repository is managed (has a url property), we can assume the remote name is origin | ||
// TODO if the repo has no remotes, then remoteName should be undefined | ||
const remoteName = repo.url ? 'origin' : source.remote || 'origin' | ||
return collectFilesFromSource(source, repo, remoteName, authStatus) | ||
}) | ||
), | ||
]) | ||
let rejected, started | ||
const startedContinuations = [] | ||
const recordRejection = (err) => { | ||
throw (rejected = true) && err | ||
} | ||
const runTask = (primary, continuation, idx) => | ||
primary().then((value) => { | ||
if (!rejected) startedContinuations[idx] = continuation(value).catch(recordRejection) | ||
}, recordRejection) | ||
if (tasks.length > concurrency) { | ||
started = [] | ||
const pending = [] | ||
for (const [primary, continuation] of tasks) { | ||
const current = runTask(primary, continuation, started.length).finally(() => | ||
pending.splice(pending.indexOf(current), 1) | ||
) | ||
started.push(current) | ||
if (pending.push(current) < concurrency) continue | ||
if (await Promise.race(pending).then(invariably.true, invariably.false)) continue | ||
break | ||
} | ||
} else { | ||
started = tasks.map(([primary, continuation], idx) => runTask(primary, continuation, idx)) | ||
} | ||
return Promise.allSettled(started).then((outcomes) => | ||
Promise.allSettled(startedContinuations).then((continuationOutcomes) => { | ||
const rejection = outcomes.push(...continuationOutcomes) && outcomes.find(({ status }) => status === 'rejected') | ||
if (rejection) throw rejection.reason | ||
return continuationOutcomes.map(({ value }) => value) | ||
}) | ||
) | ||
} | ||
function buildAggregate (componentVersionBuckets) { | ||
@@ -153,8 +177,5 @@ return [ | ||
async function loadRepository (url, opts) { | ||
let dir | ||
let repo | ||
let authStatus | ||
let authStatus, dir, repo | ||
if (~url.indexOf(':') && GIT_URI_DETECTOR_RX.test(url)) { | ||
let displayUrl | ||
let credentials | ||
let credentials, displayUrl | ||
;({ displayUrl, url, credentials } = extractCredentials(url)) | ||
@@ -200,3 +221,3 @@ const { cacheDir, fetch, fetchTags, gitPlugins, progress } = opts | ||
}) | ||
.catch(async (cloneErr) => { | ||
.catch((cloneErr) => { | ||
// FIXME triggering the error handler here causes assertion problems in the test suite | ||
@@ -424,8 +445,4 @@ //if (fetchOpts.onProgress) fetchOpts.onProgress.finish(cloneErr) | ||
const cwd = ospath.join(worktreePath, startPath) | ||
return fsp | ||
.stat(cwd) | ||
.catch(() => { | ||
throw new Error(`the start path '${startPath}' does not exist`) | ||
}) | ||
.then((stat) => { | ||
return fsp.stat(cwd).then( | ||
(stat) => { | ||
if (!stat.isDirectory()) throw new Error(`the start path '${startPath}' is not a directory`) | ||
@@ -448,5 +465,9 @@ return new Promise((resolve, reject) => | ||
.pipe(relativizeFiles()) | ||
.pipe(collectFiles(resolve)) | ||
.pipe(collectDataFromStream(resolve)) | ||
) | ||
}) | ||
}, | ||
() => { | ||
throw new Error(`the start path '${startPath}' does not exist`) | ||
} | ||
) | ||
} | ||
@@ -480,7 +501,7 @@ | ||
function collectFiles (done) { | ||
function collectDataFromStream (done) { | ||
const accum = [] | ||
return map( | ||
(file, enc, next) => { | ||
accum.push(file) | ||
(obj, _, next) => { | ||
accum.push(obj) | ||
next() | ||
@@ -503,9 +524,9 @@ }, | ||
function getGitTreeAtStartPath (repo, oid, startPath) { | ||
return git | ||
.readTree(Object.assign({ oid, filepath: startPath }, repo)) | ||
.catch((err) => { | ||
return git.readTree(Object.assign({ oid, filepath: startPath }, repo)).then( | ||
(result) => Object.assign(result, { dirname: startPath }), | ||
(err) => { | ||
const m = err instanceof ObjectTypeError && err.data.expected === 'tree' ? 'is not a directory' : 'does not exist' | ||
throw new Error(`the start path '${startPath}' ${m}`) | ||
}) | ||
.then((result) => Object.assign(result, { dirname: startPath })) | ||
} | ||
) | ||
} | ||
@@ -529,5 +550,7 @@ | ||
visitGitTree(this, repo, root, filter, start) | ||
.then(() => this.emit('end')) | ||
// NOTE if error is thrown, promises already being resolved won't halt | ||
.catch((err) => this.emit('error', err)) | ||
.then( | ||
() => this.emit('end'), | ||
(err) => this.emit('error', err) | ||
) | ||
) | ||
@@ -555,4 +578,11 @@ }, | ||
reads.push( | ||
readGitSymlink(repo, root, parent, entry, following) | ||
.catch((err) => { | ||
readGitSymlink(repo, root, parent, entry, following).then( | ||
(target) => { | ||
if (target.type === 'tree') { | ||
return visitGitTree(emitter, repo, root, filter, target, vfilePath, new Set(following).add(entry.oid)) | ||
} else if (target.type === 'blob' && filterVerdict === true && (mode = FILE_MODES[target.mode])) { | ||
emitter.emit('entry', Object.assign({ mode, oid: target.oid, path: vfilePath }, repo)) | ||
} | ||
}, | ||
(err) => { | ||
// NOTE this error could be caught after promise chain has already been rejected | ||
@@ -565,10 +595,4 @@ if (err instanceof NotFoundError) { | ||
throw err | ||
}) | ||
.then((target) => { | ||
if (target.type === 'tree') { | ||
return visitGitTree(emitter, repo, root, filter, target, vfilePath, new Set(following).add(entry.oid)) | ||
} else if (target.type === 'blob' && filterVerdict === true && (mode = FILE_MODES[target.mode])) { | ||
emitter.emit('entry', Object.assign({ mode, oid: target.oid, path: vfilePath }, repo)) | ||
} | ||
}) | ||
} | ||
) | ||
) | ||
@@ -905,6 +929,3 @@ } else if ((mode = FILE_MODES[entry.mode])) { | ||
function isDirectory (url) { | ||
return fsp | ||
.stat(url) | ||
.then((stat) => stat.isDirectory()) | ||
.catch(invariably.false) | ||
return fsp.stat(url).then((stat) => stat.isDirectory(), invariably.false) | ||
} | ||
@@ -942,7 +963,4 @@ | ||
function tagsSpecified (sources, defaultTags) { | ||
return ~sources.findIndex((source) => { | ||
const tags = source.tags || defaultTags || [] | ||
return Array.isArray(tags) ? tags.length : true | ||
}) | ||
function tagsSpecified (sources) { | ||
return sources.some(({ tags }) => tags && (Array.isArray(tags) ? tags.length : true)) | ||
} | ||
@@ -985,13 +1003,12 @@ | ||
const cacheDir = ospath.join(baseCacheDir, CONTENT_CACHE_FOLDER) | ||
return fsp | ||
.mkdir(cacheDir, { recursive: true }) | ||
.then(() => cacheDir) | ||
.catch((err) => { | ||
return fsp.mkdir(cacheDir, { recursive: true }).then( | ||
() => cacheDir, | ||
(err) => { | ||
throw Object.assign(err, { message: `Failed to create content cache directory: ${cacheDir}; ${err.message}` }) | ||
}) | ||
} | ||
) | ||
} | ||
function transformGitCloneError (err, displayUrl) { | ||
let wrappedMsg | ||
let trimMessage | ||
let wrappedMsg, trimMessage | ||
if (HTTP_ERROR_CODE_RX.test(err.code)) { | ||
@@ -1042,4 +1059,3 @@ switch (err.data.statusCode) { | ||
.readdir((worktreesDir = ospath.join(repo.dir, '.git', 'worktrees'))) | ||
.catch(invariably.emptyArray) | ||
.then((worktreeNames) => matcher(worktreeNames, [...patterns])) | ||
.then((worktreeNames) => matcher(worktreeNames, [...patterns]), invariably.emptyArray) | ||
.then((worktreeNames) => | ||
@@ -1046,0 +1062,0 @@ worktreeNames.length |
@@ -5,3 +5,3 @@ 'use strict' | ||
COMPONENT_DESC_FILENAME: 'antora.yml', | ||
CONTENT_CACHE_FOLDER: 'content/2', | ||
CONTENT_CACHE_FOLDER: 'content', | ||
CONTENT_SRC_GLOB: '**/*[!~]', | ||
@@ -8,0 +8,0 @@ CONTENT_SRC_OPTS: { follow: true, nomount: true, nosort: true, nounique: true, removeBOM: false, uniqueBy: (m) => m }, |
@@ -5,2 +5,3 @@ 'use strict' | ||
const expandPath = require('@antora/expand-path-helper') | ||
const invariably = { void: () => undefined } | ||
const { promises: fsp } = require('fs') | ||
@@ -13,8 +14,6 @@ const ospath = require('path') | ||
this.urls = {} | ||
if ((this.contents = (config = config || {}).contents)) { | ||
if ((this.contents = (config = config || {}).contents) || !config.path) { | ||
this.path = undefined | ||
} else if (config.path) { | ||
} else { | ||
this.path = expandPath(config.path, { dot: startDir }) | ||
} else { | ||
this.path = undefined | ||
} | ||
@@ -33,6 +32,3 @@ return this | ||
} else if (this.path) { | ||
contentsPromise = fsp | ||
.access(this.path) | ||
.then(() => fsp.readFile(this.path, 'utf8')) | ||
.catch(() => undefined) | ||
contentsPromise = fsp.access(this.path).then(() => fsp.readFile(this.path, 'utf8'), invariably.void) | ||
} else { | ||
@@ -51,4 +47,3 @@ const homeGitCredentialsPath = ospath.join(homedir(), '.git-credentials') | ||
.access(xdgConfigGitCredentialsPath) | ||
.then(() => fsp.readFile(xdgConfigGitCredentialsPath, 'utf8')) | ||
.catch(() => undefined) | ||
.then(() => fsp.readFile(xdgConfigGitCredentialsPath, 'utf8'), invariably.void) | ||
) | ||
@@ -55,0 +50,0 @@ } |
@@ -29,3 +29,3 @@ 'use strict' | ||
module.exports = ({ httpProxy, httpsProxy, noProxy }, userAgent) => { | ||
if (httpsProxy || httpProxy) { | ||
if ((httpsProxy || httpProxy) && noProxy !== '*') { | ||
const { HttpProxyAgent, HttpsProxyAgent } = require('hpagent') | ||
@@ -32,0 +32,0 @@ const shouldProxy = require('should-proxy') |
@@ -127,6 +127,7 @@ 'use strict' | ||
.readTree(Object.assign({ oid: treeOid, filepath: '' }, repo)) | ||
.then(({ tree: entries }) => | ||
entries.map(({ type, oid, path: name }) => ({ name, oid, isDirectory: invariably[type === 'tree'] })) | ||
.then( | ||
({ tree: entries }) => | ||
entries.map(({ type, oid, path: name }) => ({ name, oid, isDirectory: invariably[type === 'tree'] })), | ||
invariably.emptyArray | ||
) | ||
.catch(invariably.emptyArray) | ||
} | ||
@@ -151,6 +152,3 @@ | ||
function retrievePathFs (base, { path }, subpath) { | ||
return fsp | ||
.access(base + '/' + joinPath(path, subpath)) | ||
.then(invariably.true) | ||
.catch(invariably.false) | ||
return fsp.access(base + '/' + joinPath(path, subpath)).then(invariably.true, invariably.false) | ||
} | ||
@@ -157,0 +155,0 @@ |
{ | ||
"name": "@antora/content-aggregator", | ||
"version": "3.0.0-alpha.8", | ||
"version": "3.0.0-alpha.9", | ||
"description": "Fetches and aggregates content from distributed sources for use in an Antora documentation pipeline.", | ||
@@ -26,3 +26,3 @@ "license": "MPL-2.0", | ||
"hpagent": "~0.1.0", | ||
"isomorphic-git": "~1.9", | ||
"isomorphic-git": "~1.10", | ||
"js-yaml": "~4.1", | ||
@@ -54,3 +54,3 @@ "matcher": "~4.0", | ||
], | ||
"gitHead": "2e5695bea11fb5719989c329c97e66d36e29659f" | ||
"gitHead": "a504d6889819b548e8a5416a7194cbb6f9a93e93" | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
77363
14
1403
+ Addedisomorphic-git@1.10.5(transitive)
- Removedisomorphic-git@1.9.2(transitive)
Updatedisomorphic-git@~1.10