🚨 Active Supply Chain Attack:node-ipc Package Compromised.Learn More
Socket
Book a DemoSign in
Socket

@existdb/xst

Package Overview
Dependencies
Maintainers
3
Versions
34
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@existdb/xst - npm Package Compare versions

Comparing version
3.0.0
to
3.1.0
+57
-25
commands/get.js
import { resolve, join, posix, dirname, basename } from 'node:path'
import { writeFileSync, statSync, existsSync, mkdirSync } from 'node:fs'
import { statSync, existsSync, mkdirSync } from 'node:fs'
import { writeFile } from 'node:fs/promises'
import { connect } from '@existdb/node-exist'
import Bottleneck from 'bottleneck'

@@ -17,2 +19,4 @@ /**

* @prop {String[]} exclude filter items
* @prop {Number} threads How many resources should be downloaded at the same time
* @prop {Number} mintime How long a downloads should take at least
*/

@@ -61,6 +65,3 @@

}
const serializationOptionNames = [
'insert-final-newline',
'omit-xml-declaration'
]
const serializationOptionNames = ['insert-final-newline', 'omit-xml-declaration']

@@ -76,3 +77,3 @@ const serializationDefaults = {

const serializationOptions = serializationDefaults
serializationOptionNames.forEach(o => {
serializationOptionNames.forEach((o) => {
if (o in options) {

@@ -89,2 +90,3 @@ serializationOptions[o] = options[o]

* @param {NodeExist.BoundModules} db NodeExist client
* @param {GetOptions} options
* @param {Boolean} verbose

@@ -107,3 +109,3 @@ * @param {ResourceInfo} resource

const localPath = join(directory, localName)
await writeFileSync(localPath, fileContents)
await writeFile(localPath, fileContents)

@@ -123,7 +125,9 @@ if (verbose) {

* @param {NodeExist} db NodeExist client
* @param {GetOptions} options
* @param {boolean} verbose
* @param {String} collection
* @param {String} baseCollection
* @param {Bottleneck} limiter
*/
async function downloadCollection (db, options, collection, baseCollection, directory) {
async function downloadCollection (db, options, collection, baseCollection, directory, limiter) {
const absCollection = posix.join(baseCollection, collection)

@@ -143,9 +147,18 @@ const { verbose } = options

const targetDir = posix.join(directory, collection)
await collectionMeta.documents.forEach(
async resource => downloadResource(db, options, resource, targetDir, absCollection))
// Download all documents. Do this in parallel, but not everything at once. Pool that work so we don't take down the
// server
await Promise.all(
collectionMeta.documents.map(async (resource) => {
await limiter.schedule(() => downloadResource(db, options, resource, targetDir, absCollection))
})
)
// recursive (optional?)
await collectionMeta.collections.forEach(
async collection => downloadCollection(db, options, collection, absCollection, targetDir))
// There should always be fewer collections than resources, so no need for pooling. Go over them one by one. No need
// to do this in parallel
for (const collection of collectionMeta.collections) {
await downloadCollection(db, options, collection, absCollection, targetDir, limiter)
}
return true

@@ -199,18 +212,14 @@ } catch (e) {

// read parameters
// const start = Date.now()
// const start = Date.now()
const root = resolve(target)
if (options.verbose) {
console.log('Downloading:', source, 'to', root)
console.log('Server:',
(db.client.isSecure ? 'https' : 'http') + '://' + db.client.options.host + ':' + db.client.options.port,
'(v' + options.version + ')'
)
console.log('User:', db.client.options.basic_auth.user)
console.error('Downloading:', source, 'to', root)
if (options.include.length > 1 || options.include[0] !== '**') {
console.log('Include:\n', ...options.include, '\n')
console.error('Include:\n', ...options.include, '\n')
}
if (options.exclude.length) {
console.log('Exclude:\n', ...options.exclude, '\n')
console.error('Exclude:\n', ...options.exclude, '\n')
}
console.error(`Downloading up to ${options.threads} resources at a time`)
}

@@ -271,6 +280,9 @@

const limiter = new Bottleneck({
maxConcurrent: options.threads,
minTime: options.mintime
})
// download collection into a folder
return await downloadCollection(db, options,
posix.basename(info.name),
posix.dirname(info.name), root)
return await downloadCollection(db, options, posix.basename(info.name), posix.dirname(info.name), root, limiter)
}

@@ -313,2 +325,14 @@

})
.option('t', {
alias: 'threads',
describe: 'The maximum number of concurrent threads that will be used to dowload data',
type: 'number',
default: 4
})
.option('m', {
alias: 'mintime',
describe: 'The minimum time each dowload will take',
type: 'number',
default: 0
})
.nargs({ i: 1, e: 1 })

@@ -321,4 +345,12 @@ }

}
const { source } = argv
const { threads, mintime, source } = argv
if (typeof mintime !== 'number' || mintime < 0) {
throw Error('Invalid value for option "mintime"; must be an integer equal or greater than zero.')
}
if (typeof threads !== 'number' || threads <= 0) {
throw Error('Invalid value for option "threads"; must be an integer equal or greater than zero.')
}
const target = argv.target ? argv.target : '.'

@@ -325,0 +357,0 @@

{
"name": "@existdb/xst",
"version": "3.0.0",
"version": "3.1.0",
"description": "Command line tool to interact with exist-db instances",

@@ -61,3 +61,3 @@ "main": "cli.js",

"c8": "^10.1.3",
"conventional-changelog-conventionalcommits": "^8.0.0",
"conventional-changelog-conventionalcommits": "^9.0.0",
"semantic-release": "^24.2.3",

@@ -64,0 +64,0 @@ "standard": "^17.0.0",