@antora/content-aggregator
Advanced tools
Comparing version 3.1.5 to 3.1.6
@@ -8,3 +8,2 @@ 'use strict' | ||
const deepClone = require('./deep-clone') | ||
const deepFlatten = require('./deep-flatten') | ||
const EventEmitter = require('events') | ||
@@ -97,3 +96,6 @@ const expandPath = require('@antora/expand-path-helper') | ||
const gitPlugins = loadGitPlugins(gitConfig, playbook.network || {}, startDir) | ||
const fetchConcurrency = Math.max(gitConfig.fetchConcurrency || Infinity, 1) | ||
const concurrency = { | ||
fetch: Math.max(gitConfig.fetchConcurrency || Infinity, 1), | ||
read: Math.max(gitConfig.readConcurrency || Infinity, 1), | ||
} | ||
const sourcesByUrl = sources.reduce((accum, source) => { | ||
@@ -105,3 +107,3 @@ return accum.set(source.url, [...(accum.get(source.url) || []), Object.assign({}, sourceDefaults, source)]) | ||
const loadOpts = { cacheDir, fetch, gitPlugins, progress, startDir, refPatternCache } | ||
return collectFiles(sourcesByUrl, loadOpts, fetchConcurrency).then(buildAggregate, (err) => { | ||
return collectFiles(sourcesByUrl, loadOpts, concurrency).then(buildAggregate, (err) => { | ||
progress && progress.terminate() | ||
@@ -113,54 +115,37 @@ throw err | ||
async function collectFiles (sourcesByUrl, loadOpts, concurrency) { | ||
const tasks = [...sourcesByUrl.entries()].map(([url, sources]) => [ | ||
() => loadRepository(url, Object.assign({ fetchTags: tagsSpecified(sources) }, loadOpts)), | ||
({ repo, authStatus }) => | ||
Promise.all( | ||
sources.map((source) => { | ||
// NOTE if repository is managed (has a url property), we can assume the remote name is origin | ||
// TODO if the repo has no remotes, then remoteName should be undefined | ||
const remoteName = repo.url ? 'origin' : source.remote || 'origin' | ||
return collectFilesFromSource(source, repo, remoteName, authStatus) | ||
}) | ||
), | ||
]) | ||
let rejection, started | ||
const startedContinuations = [] | ||
const recordRejection = (err) => { | ||
rejection = err | ||
} | ||
const runTask = (primary, continuation, idx) => | ||
primary().then((value) => { | ||
if (!rejection) startedContinuations[idx] = continuation(value).catch(recordRejection) | ||
}, recordRejection) | ||
if (tasks.length > concurrency) { | ||
started = [] | ||
const pending = [] | ||
for (const [primary, continuation] of tasks) { | ||
const current = runTask(primary, continuation, started.length).finally(() => | ||
pending.splice(pending.indexOf(current), 1) | ||
async function collectFiles (sourcesByUrl, loadOpts, concurrency, fetchedUrls) { | ||
const loadTasks = [...sourcesByUrl.entries()].map(([url, sources]) => { | ||
const loadOptsForUrl = Object.assign({}, loadOpts) | ||
if (loadOpts.fetch && fetchedUrls && fetchedUrls.length && fetchedUrls.includes(url)) loadOptsForUrl.fetch = false | ||
if (tagsSpecified(sources)) loadOptsForUrl.fetchTags = true | ||
return loadRepository.bind(null, url, loadOptsForUrl, { url, sources }) | ||
}) | ||
return gracefulPromiseAllWithLimit(loadTasks, concurrency.fetch).then(([results, rejections]) => { | ||
if (rejections.length) { | ||
if (concurrency.fetch > 1 && rejections.every(({ recoverable }) => recoverable)) { | ||
if (loadOpts.progress) loadOpts.progress.terminate() // reset cursor position and allow it be reused | ||
const msg0 = 'An unexpected error occurred while concurrently fetching content sources.' | ||
const msg1 = 'Retrying with git.fetch_concurrency value of 1.' | ||
logger.warn(msg0 + ' ' + msg1) | ||
const fulfilledUrls = results.map((it) => it && it.repo.url && it.url).filter((it) => it) | ||
return collectFiles(sourcesByUrl, loadOpts, Object.assign(concurrency, { fetch: 1 }), fulfilledUrls) | ||
} | ||
throw rejections[0] | ||
} | ||
return Promise.all( | ||
results.map(({ repo, authStatus, sources }) => | ||
selectStartPathsForRepository(repo, authStatus, sources).then((startPaths) => | ||
collectFilesFromStartPaths.bind(null, startPaths, repo, authStatus) | ||
) | ||
) | ||
started.push(current) | ||
if (pending.push(current) < concurrency) continue | ||
await Promise.race(pending) | ||
if (rejection) break | ||
} | ||
} else { | ||
started = tasks.map(([primary, continuation], idx) => runTask(primary, continuation, idx)) | ||
} | ||
return Promise.all(started).then(() => | ||
Promise.all(startedContinuations).then((result) => { | ||
if (rejection) throw rejection | ||
return result | ||
}) | ||
) | ||
).then((collectTasks) => promiseAllWithLimit(collectTasks, concurrency.read)) | ||
}) | ||
} | ||
function buildAggregate (componentVersionBuckets) { | ||
return [ | ||
...deepFlatten(componentVersionBuckets) | ||
.reduce((accum, batch) => { | ||
const key = batch.version + '@' + batch.name | ||
const entry = accum.get(key) | ||
if (!entry) return accum.set(key, batch) | ||
const entries = Object.assign(new Map(), { accum: [] }) | ||
for (const batchesForOrigin of componentVersionBuckets) { | ||
for (const batch of batchesForOrigin) { | ||
let key, entry | ||
if ((entry = entries.get((key = batch.version + '@' + batch.name)))) { | ||
const { files, origins } = batch | ||
@@ -170,9 +155,11 @@ ;(batch.files = entry.files).push(...files) | ||
Object.assign(entry, batch) | ||
return accum | ||
}, new Map()) | ||
.values(), | ||
] | ||
} else { | ||
entries.set(key, batch).accum.push(batch) | ||
} | ||
} | ||
} | ||
return entries.accum | ||
} | ||
async function loadRepository (url, opts) { | ||
async function loadRepository (url, opts, result = {}) { | ||
let authStatus, dir, repo | ||
@@ -187,2 +174,3 @@ const cache = { [REF_PATTERN_CACHE_KEY]: opts.refPatternCache } | ||
repo = { cache, dir, fs, gitdir: dir, noCheckout: true, url } | ||
const { credentialManager } = gitPlugins | ||
const validStateFile = ospath.join(dir, VALID_STATE_FILENAME) | ||
@@ -197,4 +185,3 @@ try { | ||
.then(() => { | ||
const credentialManager = gitPlugins.credentialManager | ||
authStatus = credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined | ||
authStatus = identifyAuthStatus(credentialManager, credentials, url) | ||
return git.setConfig(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) | ||
@@ -220,10 +207,9 @@ }) | ||
.then(() => { | ||
const credentialManager = gitPlugins.credentialManager | ||
authStatus = credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined | ||
authStatus = identifyAuthStatus(credentialManager, credentials, url) | ||
return git.setConfig(Object.assign({ path: 'remote.origin.private', value: authStatus }, repo)) | ||
}) | ||
.catch((cloneErr) => { | ||
// FIXME triggering the error handler here causes assertion problems in the test suite | ||
//if (fetchOpts.onProgress) fetchOpts.onProgress.finish(cloneErr) | ||
throw transformGitCloneError(cloneErr, displayUrl) | ||
if (fetchOpts.onProgress) fetchOpts.onProgress.finish(cloneErr) | ||
const authRequested = credentialManager.status({ url }) === 'requested' | ||
throw transformGitCloneError(cloneErr, displayUrl, authRequested) | ||
}) | ||
@@ -245,3 +231,3 @@ .then(() => fsp.writeFile(validStateFile, '').catch(invariably.void)) | ||
} | ||
return { repo, authStatus } | ||
return Object.assign(result, { repo, authStatus }) | ||
} | ||
@@ -268,15 +254,29 @@ | ||
async function collectFilesFromSource (source, repo, remoteName, authStatus) { | ||
const originUrl = repo.url || (await resolveRemoteUrl(repo, remoteName)) | ||
return selectReferences(source, repo, remoteName).then((refs) => { | ||
if (!refs.length) { | ||
const { url, branches, tags, startPath, startPaths } = source | ||
async function selectStartPathsForRepository (repo, authStatus, sources) { | ||
const startPaths = [] | ||
const originUrls = {} | ||
for (const source of sources) { | ||
const { version, editUrl } = source | ||
// NOTE if repository is managed (has a url property), we can assume the remote name is origin | ||
// TODO if the repo has no remotes, then remoteName should be undefined | ||
const remoteName = repo.url ? 'origin' : source.remote || 'origin' | ||
const originUrl = repo.url || (originUrls[remoteName] ||= await resolveRemoteUrl(repo, remoteName)) | ||
const refs = await selectReferences(source, repo, remoteName) | ||
if (refs.length) { | ||
for (const ref of refs) { | ||
for (const startPath of await selectStartPaths(source, repo, remoteName, ref)) { | ||
startPaths.push({ startPath, ref, originUrl, editUrl, version }) | ||
} | ||
} | ||
} else { | ||
const { url, branches, tags } = source | ||
const startPathInfo = | ||
'startPaths' in source ? { 'start paths': startPaths || undefined } : { 'start path': startPath || undefined } | ||
'startPaths' in source | ||
? { 'start paths': source.startPaths || undefined } | ||
: { 'start path': source.startPath || undefined } | ||
const sourceInfo = yaml.dump({ url, branches, tags, ...startPathInfo }, { flowLevel: 1 }).trimRight() | ||
logger.info(`No matching references found for content source entry (${sourceInfo.replace(NEWLINE_RX, ' | ')})`) | ||
return [] | ||
} | ||
return Promise.all(refs.map((it) => collectFilesFromReference(source, repo, remoteName, authStatus, it, originUrl))) | ||
}) | ||
} | ||
return startPaths | ||
} | ||
@@ -410,6 +410,5 @@ | ||
async function collectFilesFromReference (source, repo, remoteName, authStatus, ref, originUrl) { | ||
async function selectStartPaths (source, repo, remoteName, ref) { | ||
const url = repo.url | ||
const displayUrl = url || repo.dir | ||
const { version, editUrl } = source | ||
const worktreePath = ref.head | ||
@@ -434,13 +433,18 @@ if (!worktreePath) { | ||
} | ||
return Promise.all( | ||
startPaths.map((startPath) => | ||
collectFilesFromStartPath(startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl, version) | ||
) | ||
) | ||
return startPaths | ||
} | ||
const startPath = cleanStartPath(coerceToString(source.startPath)) | ||
return collectFilesFromStartPath(startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl, version) | ||
return [cleanStartPath(coerceToString(source.startPath))] | ||
} | ||
function collectFilesFromStartPath (startPath, repo, authStatus, ref, worktreePath, originUrl, editUrl, version) { | ||
async function collectFilesFromStartPaths (startPaths, repo, authStatus) { | ||
const buckets = [] | ||
for (const { startPath, ref, originUrl, editUrl, version } of startPaths) { | ||
buckets.push(await collectFilesFromStartPath(startPath, repo, authStatus, ref, originUrl, editUrl, version)) | ||
} | ||
repo.cache = undefined | ||
return buckets | ||
} | ||
function collectFilesFromStartPath (startPath, repo, authStatus, ref, originUrl, editUrl, version) { | ||
const worktreePath = ref.head | ||
const origin = computeOrigin(originUrl, authStatus, repo.gitdir, ref, startPath, worktreePath, editUrl) | ||
@@ -525,22 +529,21 @@ return (worktreePath ? readFilesFromWorktree(origin) : readFilesFromGitTree(repo, ref.oid, startPath)) | ||
function readFilesFromGitTree (repo, oid, startPath) { | ||
return git | ||
.readTree(Object.assign({ oid }, repo)) | ||
.then((root) => | ||
getGitTreeAtStartPath(repo, oid, startPath).then((start) => | ||
srcGitTree(repo, Object.assign(root, { dirname: '' }), start) | ||
) | ||
) | ||
return git.readTree(Object.assign({ oid }, repo)).then((root) => { | ||
Object.assign(root, { dirname: '' }) | ||
return startPath | ||
? getGitTreeAtStartPath(repo, oid, startPath).then((start) => { | ||
Object.assign(start, { dirname: startPath }) | ||
return srcGitTree(repo, root, start) | ||
}) | ||
: srcGitTree(repo, root) | ||
}) | ||
} | ||
function getGitTreeAtStartPath (repo, oid, startPath) { | ||
return git.readTree(Object.assign({ oid, filepath: startPath }, repo)).then( | ||
(result) => Object.assign(result, { dirname: startPath }), | ||
(err) => { | ||
const m = err instanceof ObjectTypeError && err.data.expected === 'tree' ? 'is not a directory' : 'does not exist' | ||
throw new Error(`the start path '${startPath}' ${m}`) | ||
} | ||
) | ||
return git.readTree(Object.assign({ oid, filepath: startPath }, repo)).catch((err) => { | ||
const m = err instanceof ObjectTypeError && err.data.expected === 'tree' ? 'is not a directory' : 'does not exist' | ||
throw new Error(`the start path '${startPath}' ${m}`) | ||
}) | ||
} | ||
function srcGitTree (repo, root, start) { | ||
function srcGitTree (repo, root, start = root) { | ||
return new Promise((resolve, reject) => { | ||
@@ -832,3 +835,2 @@ const files = [] | ||
if (err) { | ||
// TODO could use progressBar.interrupt() to replace bar with message instead | ||
this.chars.incomplete = '?' | ||
@@ -863,2 +865,6 @@ this.update(0) | ||
function identifyAuthStatus (credentialManager, credentials, url) { | ||
return credentials ? 'auth-embedded' : credentialManager.status({ url }) ? 'auth-required' : undefined | ||
} | ||
/** | ||
@@ -975,4 +981,4 @@ * Generates a safe, unique folder name for a git URL. | ||
function transformGitCloneError (err, displayUrl) { | ||
let wrappedMsg, trimMessage | ||
function transformGitCloneError (err, displayUrl, authRequested) { | ||
let wrappedMsg, recoverable, trimMessage | ||
if (HTTP_ERROR_CODE_RX.test(err.code)) { | ||
@@ -986,7 +992,9 @@ switch (err.data.statusCode) { | ||
case 404: | ||
wrappedMsg = 'Content repository not found' | ||
wrappedMsg = authRequested | ||
? 'Content repository not found or credentials were rejected' | ||
: 'Content repository not found' | ||
break | ||
default: | ||
wrappedMsg = err.message | ||
trimMessage = true | ||
recoverable = trimMessage = true | ||
} | ||
@@ -999,10 +1007,10 @@ } else if (err instanceof UrlParseError || err instanceof UnknownTransportError) { | ||
wrappedMsg = `${err.name}: ${err.message}` | ||
trimMessage = true | ||
recoverable = trimMessage = true | ||
} | ||
if (trimMessage) { | ||
wrappedMsg = ~(wrappedMsg = wrappedMsg.trimRight()).indexOf('. ') ? wrappedMsg : wrappedMsg.replace(/\.$/, '') | ||
wrappedMsg = ~(wrappedMsg = wrappedMsg.trimEnd()).indexOf('. ') ? wrappedMsg : wrappedMsg.replace(/\.$/, '') | ||
} | ||
const errWrapper = new Error(`${wrappedMsg} (url: ${displayUrl})`) | ||
errWrapper.stack += `\nCaused by: ${err.stack || 'unknown'}` | ||
return errWrapper | ||
return recoverable ? Object.assign(errWrapper, { recoverable }) : errWrapper | ||
} | ||
@@ -1055,3 +1063,3 @@ | ||
.readFile(ospath.join(gitdir, 'gitdir'), 'utf8') | ||
.then((contents) => ({ branch, dir: ospath.dirname(contents.trimRight()) })) | ||
.then((contents) => ({ branch, dir: ospath.dirname(contents.trimEnd()) })) | ||
) | ||
@@ -1070,2 +1078,35 @@ }) | ||
async function gracefulPromiseAllWithLimit (tasks, limit = Infinity) { | ||
const rejections = [] | ||
const recordRejection = (err) => rejections.push(err) && undefined | ||
const started = [] | ||
if (tasks.length <= limit) { | ||
for (const task of tasks) started.push(task().catch(recordRejection)) | ||
} else { | ||
const pending = [] | ||
for (const task of tasks) { | ||
const current = task() | ||
.catch(recordRejection) | ||
.finally(() => pending.splice(pending.indexOf(current), 1)) | ||
started.push(current) | ||
if (pending.push(current) < limit) continue | ||
await Promise.race(pending) | ||
if (rejections.length) break | ||
} | ||
} | ||
return Promise.all(started).then((results) => [results, rejections]) | ||
} | ||
async function promiseAllWithLimit (tasks, limit = Infinity) { | ||
if (tasks.length <= limit) return Promise.all(tasks.map((task) => task())) | ||
const started = [] | ||
const pending = [] | ||
for (const task of tasks) { | ||
const current = task().finally(() => pending.splice(pending.indexOf(current), 1)) | ||
started.push(current) | ||
if (pending.push(current) >= limit) await Promise.race(pending) | ||
} | ||
return Promise.all(started) | ||
} | ||
module.exports = aggregateContent |
{ | ||
"name": "@antora/content-aggregator", | ||
"version": "3.1.5", | ||
"version": "3.1.6", | ||
"description": "Fetches and aggregates content from distributed sources for use in an Antora documentation pipeline.", | ||
@@ -32,3 +32,3 @@ "license": "MPL-2.0", | ||
"@antora/expand-path-helper": "~2.0", | ||
"@antora/logger": "3.1.5", | ||
"@antora/logger": "3.1.6", | ||
"@antora/user-require-helper": "~2.0", | ||
@@ -35,0 +35,0 @@ "braces": "~3.0", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
68429
1537
+ Added@antora/logger@3.1.6(transitive)
- Removed@antora/logger@3.1.5(transitive)
Updated@antora/logger@3.1.6