New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@discoveryjs/cli

Package Overview
Dependencies
Maintainers
3
Versions
71
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@discoveryjs/cli - npm Package Compare versions

Comparing version 2.0.0-beta.4 to 2.0.0-beta.5

16

CHANGELOG.md

@@ -0,8 +1,20 @@

## 2.0.0-beta.5 (11-02-2021)
- Fixed crash on temporary cache files cleanup and related improvements
- Reworked model data cache settings:
- `cacheTtl` can take a cron expression as a value
- `cacheBgUpdate` can take a boolean (enabled or disabled) and `"only"` value. When `"only"` is specified, manual cache reset is not available
- Background updates are scheduling based on `cacheTtl` setting
- Added `warnings` field on model config normalization, which contain all issues around config if any
- Renamed `--cleanup` build option into `--clean`
- Added `--cache-check-ttl` option for build and archive commands
- Added passing `meta` from model's config to model's context
## 2.0.0-beta.4 (08-02-2021)
- Add `--tmpdir` option to customise a dir for data cache temp files
- Added `--tmpdir` option to customise a dir for data cache temp files
## 2.0.0-beta.3 (05-02-2021)
- Fix bundling in `@discoveryjs/discovery` itself
- Fixed bundling in `@discoveryjs/discovery` itself

@@ -9,0 +21,0 @@ ## 2.0.0-beta.2 (05-02-2021)

6

lib/build.js

@@ -124,3 +124,3 @@ const path = require('path');

async function build(options, config, configFile) {
const cacheDispatcher = createCacheDispatcher(config.models, { cache: options.cache, cachedir: options.cachedir, tmpdir: options.tmpdir });
const cacheDispatcher = createCacheDispatcher(config.models, { configFile, ...options });
const outputDir = options.output;

@@ -179,3 +179,3 @@ const outputFiles = new Map();

await utils.process('Build bundles', () => bundle(config, options).then(bundles => {
await utils.process('Build bundles', () => bundle(config, options, null, { cacheDispatcher }).then(bundles => {
for (const file of bundles.outputFiles) {

@@ -227,3 +227,3 @@ const filepath = !/index(-loader)?\.(js|css)$/.test(file.path)

if (options.cleanup && fs.existsSync(options.output)) {
if (options.clean && fs.existsSync(options.output)) {
await utils.process(`Clean up dest dir before write (${options.output})`, () =>

@@ -230,0 +230,0 @@ fs.readdirSync(options.output).forEach(name => {

const fs = require('fs');
const path = require('path');
const os = require('os');
const chalk = require('chalk');

@@ -34,3 +33,3 @@ const { stringifyStream } = require('@discoveryjs/json-ext');

const tmpCacheFilename = path.join(path.resolve(tmpdir || os.tmpdir()), path.basename(cacheDispatcher.genModelCacheFilename(slug)) + '.tmp');
const tmpCacheFilename = path.join(cacheDispatcher.tmpdir, path.basename(cacheDispatcher.genModelCacheTempFilename(slug)));
let startTime = Date.now();

@@ -37,0 +36,0 @@

@@ -26,3 +26,7 @@ const path = require('path');

'Disable data caching'
],
checkCacheTtl: [
'--check-cache-ttl',
'Check data cache TTL before using it, option enforces to used actual (according to TTL) data only'
]
};
const path = require('path');
const url = require('url');
const cronstrue = require('cronstrue');
const express = require('express');

@@ -18,10 +19,2 @@ const chalk = require('chalk');

const DISABLED = chalk.yellow('disabled');
const ROUTE_DATA = '/data.json';
const ROUTE_DROP_CACHE = '/drop-cache';
const ROUTE_MODEL_BUILD = '/gen/build.zip';
const defaultRoutes = {
[ROUTE_DATA]: (req, res) => res.json(null),
[ROUTE_DROP_CACHE]: (req, res) => res.status(403).send('Feature is disabled for the model'),
[ROUTE_MODEL_BUILD]: (req, res) => res.status(403).send('Feature is disabled for the model')
};

@@ -57,3 +50,3 @@ function ensureTrailingSlash(req, res, next) {

function createModelRouter(modelConfig, options, config, addBeforeReadyTask, cacheDispatcher, routes = {}) {
function createModelRouter(modelConfig, options, config, addBeforeReadyTask, cacheDispatcher) {
const { slug } = modelConfig;

@@ -66,2 +59,8 @@ const modelCache = cacheDispatcher.getModelCacheInfo(slug);

if (modelConfig.warnings && modelConfig.warnings.length) {
for (const msg of modelConfig.warnings) {
utils.println(chalk.bgYellow.black('WARNING'), msg);
}
}
if (modelConfig.routers && modelConfig.routers.length) {

@@ -77,9 +76,18 @@ utils.process('Extend with custom routers', () => {

// set up routes
Object.keys(defaultRoutes).forEach(path =>
router.get(path, routes[path] || defaultRoutes[path])
);
if (modelConfig.data) {
router.get('/data.json', modelDataHandler(modelConfig, options, cacheDispatcher));
}
if (modelCache.manualReset) {
router.get('/drop-cache', modelDropDataCacheHandler(modelConfig, cacheDispatcher));
}
if (options.modelDownload && modelConfig.download) {
router.get(modelConfig.download, modelDownloadHandler(modelConfig, options));
}
// assets
if (options.prebuild) {
router.use(express.static(
config.mode === 'single' ? options.prebuild : path.join(options.prebuild, slug)
// FIXME: should prebuild in the root dir when single model mode
// config.mode === 'single' ? options.prebuild :
path.join(options.prebuild, slug)
));

@@ -92,3 +100,3 @@ } else {

router.get('/', generate('/model.html', modelConfig, options, config));
addAssetRoutes(router, 'model', config, options, modelConfig);
addAssetRoutes(router, 'model', config, options, modelConfig, cacheDispatcher);
}

@@ -100,10 +108,14 @@ });

if (modelCache.cache && ROUTE_DATA in routes) {
if (modelCache.cache && modelConfig.data) {
utils.sectionStart('Cache:');
utils.println(`File: ${path.relative(process.cwd(), modelCache.cache)}`);
utils.println(`TTL: ${typeof modelCache.ttl === 'string'
? `${modelCache.ttl} (${cronstrue.toString(modelCache.ttl, { verbose: true, use24HourTimeFormat: true }).replace(/^./, m => m.toLowerCase())})`
: modelCache.ttl
? utils.prettyDuration(modelCache.ttl, true)
: 'forever'
}`);
utils.println(`Background update: ${JSON.stringify(modelCache.bgUpdate)}`);
utils.println(`Manual reset: ${JSON.stringify(modelCache.manualReset)}`);
if (modelCache.bgUpdate) {
utils.println(`Background update every ${utils.prettyDuration(modelCache.bgUpdate, true)}`);
}
if (options.warmup) {

@@ -178,7 +190,3 @@ utils.process('Warming up', () => {

() => utils.sortModels(config.models).map(modelConfig =>
createModelRouter(modelConfig, options, config, beforeReady.add, cacheDispatcher, {
[ROUTE_DATA]: modelDataHandler(modelConfig, options, cacheDispatcher),
[ROUTE_DROP_CACHE]: options.modelResetCache && modelDropDataCacheHandler(modelConfig, cacheDispatcher),
[ROUTE_MODEL_BUILD]: options.modelDownload && modelDownloadHandler(modelConfig, options)
})
createModelRouter(modelConfig, options, config, beforeReady.add, cacheDispatcher)
)

@@ -206,3 +214,3 @@ );

} else {
addAssetRoutes(app, 'index', config, options, null);
addAssetRoutes(app, 'index', config, options, null, cacheDispatcher);
}

@@ -225,16 +233,3 @@ });

beforeReady.add('Start data cache sync and background updates', () => {
const stat = cacheDispatcher.startBgUpdatesAndSync();
const agg = new Map();
for (const { slug, interval } of stat) {
agg.set(interval, (agg.get(interval) || []).concat(slug));
}
// output stat aggregated
for (const [interval, slugs] of [...agg.entries()].sort(([a], [b]) => a - b)) {
utils.logMsg('every',
chalk.green(utils.prettyDuration(interval).padStart(5)) + ' ',
slugs.map(slug => chalk.cyan(slug)).join(', ')
);
}
cacheDispatcher.startBgUpdatesAndSync();
});

@@ -241,0 +236,0 @@ }

@@ -15,3 +15,3 @@ const path = require('path');

module.exports = function addAssetRouters(router, name, config, options, modelConfig) {
module.exports = function addAssetRouters(router, name, config, options, modelConfig, cacheDispatcher) {
let currentBundles = new Map();

@@ -28,3 +28,6 @@ const getAsset = async (type) => {

sourcemap: true
}, entrypoint => path.basename(entrypoint) === filename);
}, {
cacheDispatcher,
filter: entrypoint => path.basename(entrypoint) === filename
});

@@ -31,0 +34,0 @@ currentBundles.set(filename, currentBundle);

@@ -5,4 +5,4 @@ const { logSlugMsg} = require('../shared/utils');

return (req, res) => {
logSlugMsg(slug, 'Force cache update');
cacheDispatcher.write(slug, true);
logSlugMsg(slug, 'Enforce cache update');
cacheDispatcher.reset(slug);

@@ -9,0 +9,0 @@ res.status(200).send('OK');

@@ -25,5 +25,5 @@ const utils = require('../shared/utils');

args.push('--no-data');
// args.push('--cleanup');
// args.push('--clean');
return utils.runScript(command.build, args);
};

@@ -5,3 +5,2 @@ const fs = require('fs');

const gen = require('./gen');
const resolve = require('resolve');
const discoveryDir = require('./discovery-dir');

@@ -19,3 +18,3 @@ const staticSrc = path.join(__dirname, '../static');

module.exports = async function(config, options, esbuildConfig, filter) {
module.exports = async function(config, options, esbuildConfig, { cacheDispatcher, filter } = {}) {
const outputDir = options.output;

@@ -34,3 +33,3 @@ const files = new Map();

files.set(`${jsLoaderRef}`, () => fs.readFileSync(staticSrc + '/model-loader.js'));
files.set(`${modelConfig.slug}/setup.js`, () => gen['/gen/setup.js'](modelConfig, options, config));
files.set(`${modelConfig.slug}/setup.js`, () => gen['/gen/setup.js'](modelConfig, options, config, cacheDispatcher));
files.set(`${modelConfig.slug}/prepare.js`, () => modelConfig.prepare);

@@ -55,3 +54,3 @@ files.set(`${modelConfig.slug}/extensions.js`, () => selectAssets(modelConfig)

files.set(`${jsLoaderRef}`, () => fs.readFileSync(staticSrc + '/index-loader.js'));
files.set('index/setup.js', () => gen['/gen/setup.js'](null, options, config));
files.set('index/setup.js', () => gen['/gen/setup.js'](null, options, config, cacheDispatcher));
files.set('index/extensions.js', () => selectAssets(config)

@@ -58,0 +57,0 @@ .filter(fn => /\.[tj]sx?/.test(path.extname(fn)))

const fs = require('fs');
const os = require('os');
const path = require('path');
const chalk = require('chalk');
const cron = require('cron-parser');
const esbuild = require('esbuild');
const crypto = require('crypto');
const { logError, logSlugMsg, logSlugError, runScript, prettyDuration, serializeErrorForClient } = require('./utils');
const { logError, logMsg, logSlugMsg, logSlugError, runScript, prettyDuration, serializeErrorForClient } = require('./utils');
const command = require('./commands');
const matchCacheFilename = /(?:^|\/)\.discoveryjs\.(.*?)\.(\d+)\.?([a-f0-9]+)?\.cache$/;
const OBSOLETE_CACHE_CHECK_INTERVAL = 60 * 1000; // 1 min
const TMP_CACHE_CLEAN_INTERVAL = 60 * 60 * 1000;
const TMP_CACHE_FILE_TTL = 60 * 60 * 1000; // 1 hour
const TMP_CACHE_EXTNAME = '.discoveryjs-cache-tmp';

@@ -45,2 +48,64 @@ function getDataHash(modelConfig) {

function getPrevModelCacheTimestamp(modelCache, cacheInfo) {
const { ttl } = modelCache;
switch (typeof ttl) {
case 'string': {
cacheInfo = cacheInfo || getModelLastCache(modelCache);
const options = {
currentDate: cacheInfo.timestamp || Date.now(),
utc: true
};
return cron.parseExpression(ttl, options).prev().getTime();
}
case 'number': {
if (ttl > 0) {
cacheInfo = cacheInfo || getModelLastCache(modelCache);
if (cacheInfo) {
return Math.max(cacheInfo.timestamp, Date.now() - ttl);
}
return Date.now();
}
}
}
return null;
}
function getNextModelCacheTimestamp(modelCache, cacheInfo) {
const { ttl } = modelCache;
switch (typeof ttl) {
case 'string': {
cacheInfo = cacheInfo || getModelLastCache(modelCache);
const options = {
currentDate: cacheInfo.timestamp || Date.now(),
utc: true
};
return cron.parseExpression(ttl, options).next().getTime();
}
case 'number': {
if (ttl > 0) {
cacheInfo = cacheInfo || getModelLastCache(modelCache);
if (cacheInfo) {
return cacheInfo.timestamp + ttl;
}
return Date.now();
}
}
}
return null;
}
function getCacheFileInfo(filename, stat = fs.statSync(filename)) {

@@ -75,5 +140,11 @@ const [, slug, timestamp, hash] = filename.match(matchCacheFilename) || [];

function getModelActualCache({ slug, file, ttl, hash }) {
const [cacheCandidate] = getCaches(path.dirname(file), [slug])[slug] || [];
function getModelLastCache({ slug, file }) {
const caches = getCaches(path.dirname(file), [slug])[slug] || [];
return caches[0] || null;
}
function getModelActualCache(modelCache, checkTtl) {
const { hash } = modelCache;
const cacheCandidate = getModelLastCache(modelCache);
if (!cacheCandidate) {

@@ -83,7 +154,2 @@ return null;

// out of date
if (ttl && ttl < Date.now() - cacheCandidate.timestamp) {
return null;
}
// cache hash doesn't match to model's hash

@@ -94,2 +160,10 @@ if (hash && cacheCandidate.hash !== hash) {

// out of date
if (checkTtl) {
const timestamp = getPrevModelCacheTimestamp(modelCache, cacheCandidate);
if (timestamp && timestamp > cacheCandidate.timestamp) {
return null;
}
}
return cacheCandidate;

@@ -150,3 +224,17 @@ };

function createCacheDispatcher(models, { configFile, cache = true, cachedir, tmpdir, bgUpdate, prettyData, cachePersistent }) {
function createCacheDispatcher(models, options) {
const {
configFile,
cache = true,
checkCacheTtl: checkTtl = true,
bgUpdate,
modelResetCache,
prettyData,
cachePersistent
} = options;
let {
cachedir,
tmpdir
} = options;
const modelSlugs = [];

@@ -162,2 +250,3 @@ const cacheBySlug = new Map();

ttl: model.cacheTtl || false,
manualReset: modelResetCache && model.cacheBgUpdate !== 'only',
bgUpdate: (bgUpdate && model.cacheBgUpdate) || false,

@@ -217,3 +306,3 @@ bgUpdateTimer: null,

if (model.cache) {
if (model.cache && model.data) {
const modelCache = createModelCache(model);

@@ -259,2 +348,3 @@ cacheBySlug.set(slug, modelCache);

ttl: modelCache.ttl,
manualReset: modelCache.manualReset,
bgUpdate: modelCache.bgUpdate,

@@ -279,3 +369,2 @@ bgUpdateScheduled: modelCache.bgUpdateScheduled,

// get cache requests
const writeCacheRequest = new Map();
const readCache = (slug) => {

@@ -285,3 +374,7 @@ const modelCache = cacheBySlug.get(slug);

if (!modelCache) {
return Promise.resolve();
if (modelSlugs.includes(slug)) {
return Promise.resolve(null);
}
return Promise.reject(`No model with slug "${slug}" is found`);
}

@@ -292,3 +385,3 @@

if (!modelCache.read.ignoreActual) {
const actualCache = getModelActualCache(modelCache);
const actualCache = getModelActualCache(modelCache, checkTtl);

@@ -303,2 +396,4 @@ if (actualCache) {

};
const writeCacheRequest = new Map();
let writeCacheLast = Promise.resolve(); // to avoid data cache updates in parallel (out of resources)
const writeCache = (slug, ignoreActual) => {

@@ -308,3 +403,3 @@ const modelCache = cacheBySlug.get(slug);

if (!modelCache) {
return Promise.resolve();
return Promise.reject(`No model with slug "${slug}" is found`);
}

@@ -316,3 +411,6 @@

const startTime = Date.now();
const cache = runCacheCommand(slug, { configFile, cachedir, tmpdir, prettyData, hash: modelCache.hash })
const cache = writeCacheLast
.then(() =>
runCacheCommand(slug, { configFile, cachedir, tmpdir, prettyData, hash: modelCache.hash })
)
.finally(() => {

@@ -324,7 +422,7 @@ modelCache.write.lastTime = Date.now() - startTime;

modelCache.write.writes++;
cache.then(() => {
writeCacheLast = cache.then(cacheInfo => {
modelCache.read.ignoreActual = false; // remove ignore on success only
if (modelCache.bgUpdateTimer) {
scheduleBgUpdate(modelCache); // re-schedule bg update on success only
scheduleBgUpdate(modelCache, cacheInfo); // re-schedule bg update on success only
}

@@ -335,2 +433,6 @@ }, (error) => {

modelCache.write.errors++;
}).catch((error) => {
logSlugError(slug, 'Finalize cache generation error:', error);
}).finally(() => {
logMsg(`Data cache updating queue${writeCacheRequest.size ? ': ' + writeCacheRequest.size + ' left' : ' is empty'}`);
});

@@ -350,3 +452,3 @@

let cleanupObsoleteCachesTimer = null;
const scheduleBgUpdate = (modelCache, init) => {
const scheduleBgUpdate = (modelCache, cacheInfo) => {
const { slug, bgUpdate, bgUpdateTimer } = modelCache;

@@ -358,4 +460,23 @@

!init && logSlugMsg(slug, `${bgUpdateTimer ? 'Re-schedule' : 'Schedule'} background data cache update in ${prettyDuration(bgUpdate, true)}`);
modelCache.bgUpdateScheduled = new Date(Date.now() + bgUpdate);
if (!bgUpdate) {
return;
}
const nextTimestamp = getNextModelCacheTimestamp(modelCache, cacheInfo);
if (nextTimestamp === null) {
return;
}
const awaitTime = Math.max(0, nextTimestamp - Date.now());
const awaitDate = new Date(Date.now() + awaitTime);
const awaitTimeHuman = !awaitTime
? 'asap'
: `in ${
prettyDuration(awaitTime, { secondsDecimalDigits: 0, spaces: true })
} (${awaitDate.toISOString().replace(/[TZ]/g, ' ')}GMT)`;
logSlugMsg(slug, `${bgUpdateTimer ? 'Re-schedule' : 'Schedule'} background data cache update ${awaitTimeHuman}`);
modelCache.bgUpdateScheduled = awaitDate;
modelCache.bgUpdateTimer = setTimeout(

@@ -365,18 +486,19 @@ () => {

logSlugMsg(slug, 'Start background data cache update');
logSlugMsg(slug, 'Queue background data cache update');
modelCache.bgUpdateTimer = null;
writeCache(slug)
.then(
() => logSlugMsg(slug, `Background data cache update done in ${prettyDuration(Date.now() - bgUpdateStartTime)}`),
(error) => logSlugError(slug, 'Background data cache update error:', error)
)
.then(() => {
.then((cacheInfo) => {
logSlugMsg(slug, `Background data cache update done in ${prettyDuration(Date.now() - bgUpdateStartTime)}`);
// make sure that modelCache is still actual descriptor
if (modelCache === cacheBySlug.get(slug)) {
scheduleBgUpdate(modelCache);
scheduleBgUpdate(modelCache, cacheInfo);
}
});
})
.catch((error) =>
logSlugError(slug, 'Background data cache update error:', error)
);
},
bgUpdate
awaitTime
);

@@ -387,15 +509,2 @@ };

const cleanupObsoleteCaches = () => {
// clean *.tmp files
fs.readdir(cachedir, (err, files) => {
for (const file of files) {
const filePath = path.join(cachedir, file);
const { mtime } = fs.statSync(filePath);
if (path.extname(file) === '.tmp' && Date.now() - Date.parse(mtime) > TMP_CACHE_CLEAN_INTERVAL) {
fs.unlink(path.join(cachedir, file), (err) => {
err && logError(err);
});
}
}
});
for (const fsModelCaches of Object.values(getCaches(cachedir, modelSlugs))) {

@@ -414,4 +523,24 @@ for (const { slug, file } of fsModelCaches.slice(2)) {

}
// clean *.tmp files
for (const file of fs.readdirSync(tmpdir)) {
if (path.extname(file) !== TMP_CACHE_EXTNAME) {
continue;
}
const filePath = path.join(tmpdir, file);
const { mtime } = fs.statSync(filePath);
if (Date.now() - Date.parse(mtime) > TMP_CACHE_FILE_TTL) {
fs.unlink(path.join(tmpdir, file), (err) => {
err && logError(err);
});
}
}
};
// normalize paths
cachedir = path.resolve(process.cwd(), cachedir);
tmpdir = path.resolve(process.cwd(), tmpdir || os.tmpdir());
// add models

@@ -424,2 +553,4 @@ models.forEach(addModel);

cachedir,
tmpdir,
checkTtl,
get used() {

@@ -429,3 +560,15 @@ return cacheBySlug.size > 0;

read: readCache,
write: writeCache,
reset(slug) {
const modelCache = cacheBySlug.get(slug);
if (!modelCache) {
return Promise.reject(`No model with slug "${slug}" is found`);
}
if (!modelCache.manualReset) {
return Promise.reject(`No reset cache for "${slug}" model is enabled`);
}
return writeCache(slug, true);
},
addModel,

@@ -437,11 +580,16 @@ removeModel,

},
genModelCacheTempFilename(slug) {
return genModelCacheFilename(cacheBySlug.get(slug)) + TMP_CACHE_EXTNAME;
},
getModelActualCache(slug) {
return cacheBySlug.has(slug)
? getModelActualCache(cacheBySlug.get(slug))
? getModelActualCache(cacheBySlug.get(slug), checkTtl)
: null;
},
warmup(slug) {
return getModelActualCache(cacheBySlug.get(slug))
return getModelActualCache(cacheBySlug.get(slug), false)
? Promise.resolve()
: writeCache(slug).then(() => {});
: writeCache(slug).catch(() => {
// avoid uncaught rejection warnings
});
},

@@ -456,6 +604,7 @@ startBgUpdatesAndSync() {

if (modelCache.bgUpdate) {
scheduleBgUpdate(modelCache, true);
scheduleBgUpdate(modelCache);
stat.push({
slug: modelCache.slug,
interval: modelCache.bgUpdate
scheduled: modelCache.bgUpdateScheduled
});

@@ -462,0 +611,0 @@ }

const fs = require('fs');
const path = require('path');
const cron = require('cron-validator');
const discoveryDir = require('./discovery-dir');

@@ -18,2 +19,6 @@ const { parseDuration } = require('./utils');

function unique(arr) {
return [...new Set(arr)];
}
function resolveFilename(filepath, basedir) {

@@ -77,10 +82,12 @@ return require.resolve(path.resolve(basedir, filepath));

function normalizeModelConfig(modelConfig, basedir) {
const warnings = [];
let {
basedir: modelBasedir,
routers,
data,
prepare,
view,
routers,
cacheTtl,
cacheBgUpdate
cache,
cacheTtl = 0, // 0 – TTL check is disabled
cacheBgUpdate = false
} = modelConfig || {};

@@ -109,16 +116,44 @@

// cache
cache = Boolean(cache);
// cacheTtl
cacheTtl = typeof cacheTtl === 'string'
? parseDuration(cacheTtl)
: cacheTtl || 0; // 0 – TTL check is disabled
if (typeof cacheTtl === 'string') {
const duration = parseDuration(cacheTtl);
if (duration !== null) {
cacheTtl = duration;
} else if (!cron.isValidCron(cacheTtl)) {
warnings.push(`Bad cron expression in modelConfig.cacheTtl: "${cacheTtl}"`);
cacheTtl = 0;
}
} else if (!isFinite(cacheTtl) || !Number.isInteger(cacheTtl) || cacheTtl < 0) {
warnings.push(`Bad duration value in modelConfig.cacheTtl: "${cacheTtl}"`);
cacheTtl = 0;
}
// cacheBgUpdate
if (typeof cacheBgUpdate === 'string') {
cacheBgUpdate = parseDuration(cacheBgUpdate);
if (typeof cacheBgUpdate !== 'boolean' && cacheBgUpdate !== 'only') {
warnings.push(`Bad value for modelConfig.cacheBgUpdate (should be boolean or "only"): ${cacheBgUpdate}`);
cacheBgUpdate = false;
}
// validation
if (modelConfig.view.libs) {
warnings.push('modelConfig.view.libs is not supported anymore, use require() or ES6 import expressions instead');
}
if (modelConfig.extendRouter) {
warnings.push('modelConfig.extendRouter is not supported anymore, use modelConfig.routes instead');
}
if (cacheBgUpdate && !cacheTtl) {
warnings.push('modelConfig.cacheBgUpdate is enabled, but modelConfig.cacheTtl is not set (cacheBgUpdate setting is ignored)');
cacheBgUpdate = false;
}
return {
name: 'Untitled model',
cache: undefined,
...modelConfig,
...stripKeys(modelConfig || {}, ['slug', 'basedir']),
data,

@@ -128,4 +163,6 @@ prepare,

routers,
cache,
cacheTtl,
cacheBgUpdate
cacheBgUpdate,
warnings
};

@@ -199,17 +236,11 @@ }

if (!model || model === slug) {
const [resolvedConfig, modelBasedir] = resolveModelConfig(result.models[slug], configBasedir);
const modelConfig = {
slug, // for a position
...modelBaseConfig,
...normalizeModelConfig(...resolveModelConfig(result.models[slug], configBasedir)),
slug // for a value
slug,
...normalizeModelConfig({
...modelBaseConfig,
...resolvedConfig
}, modelBasedir)
};
if (modelConfig.view.libs) {
console.error(`[${slug}] modelConfig.view.libs is not supported anymore, use require() or ES6 import expressions instead`);
}
if (modelConfig.extendRouter) {
console.error(`[${slug}] modelConfig.extendRouter is not supported anymore, use modelConfig.routes instead`);
}
modelConfig.darkmode = 'darkmode' in modelConfig

@@ -225,14 +256,12 @@ ? modelConfig.darkmode

modelConfig.routers = [
modelConfig.routers = unique([
...modelBaseConfig.routers,
...modelConfig.routers
];
]);
modelConfig.view.assets = [
modelConfig.view.assets = unique([
...modelBaseConfig.view.assets,
...modelConfig.view.assets
];
]);
modelConfig.cache = Boolean(modelConfig.cache);
res.push(modelConfig);

@@ -239,0 +268,0 @@ }

@@ -49,7 +49,20 @@ const path = require('path');

function prepareModel({ name, slug, cache, meta, download, darkmode }) {
function prepareModel(modelConfig, cacheDispatcher) {
const {
name,
slug,
meta,
download,
darkmode
} = modelConfig;
const {
cache,
manualReset: cacheReset
} = cacheDispatcher.getModelCacheInfo(slug);
return {
name,
slug,
cache: Boolean(cache),
cache,
cacheReset,
download,

@@ -62,3 +75,3 @@ darkmode,

module.exports = {
'/gen/setup.js': function(modelConfig, options, config, data = 'data.json') {
'/gen/setup.js': function(modelConfig, options, config, cacheDispatcher) {
let setup = {

@@ -68,9 +81,9 @@ name: config.name,

darkmode: config.darkmode,
data: modelConfig && modelConfig.data ? data : null
data: modelConfig && modelConfig.data ? 'data.json' : null
};
if (modelConfig) {
setup.model = prepareModel(modelConfig);
setup.model = prepareModel(modelConfig, cacheDispatcher);
} else {
setup.models = Array.isArray(config.models) ? config.models.map(model => prepareModel(model)) : [];
setup.models = Array.isArray(config.models) ? config.models.map(model => prepareModel(model, cacheDispatcher)) : [];
}

@@ -77,0 +90,0 @@

@@ -15,3 +15,5 @@ /* eslint-env browser */

download: model.download,
cache: model.cache
cache: model.cache,
cacheReset: model.cacheReset,
meta: model.meta
}

@@ -18,0 +20,0 @@ : null

@@ -13,2 +13,3 @@ /* eslint-env browser */

app.apply(extensions);
context = { ...context, meta: options.meta };

@@ -25,3 +26,3 @@ if (MODEL_DOWNLOAD && options.download) {

if (MODEL_RESET_CACHE && options.cache) {
if (MODEL_RESET_CACHE && options.cacheReset) {
app.nav.menu.append({

@@ -48,3 +49,4 @@ name: 'drop-cache',

app.context = context;
return app.renderPage();
};
{
"name": "@discoveryjs/cli",
"version": "2.0.0-beta.4",
"version": "2.0.0-beta.5",
"description": "CLI tools to serve & build projects based on Discovery.js",

@@ -36,3 +36,6 @@ "author": "Roman Dvornov <rdvornov@gmail.com> (https://github.com/lahmatiy)",

"clap": "^2.0.1",
"esbuild": "^0.8.42",
"cron-parser": "^3.1.0",
"cron-validator": "^1.2.0",
"cronstrue": "^1.109.0",
"esbuild": "^0.8.44",
"express": "^4.17.1",

@@ -39,0 +42,0 @@ "mime": "^2.4.4",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc