New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

dev-dep-tool

Package Overview
Dependencies
Maintainers
1
Versions
60
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dev-dep-tool - npm Package Compare versions

Comparing version 0.4.1 to 0.4.2-dev.0

bin/function.js

72

bin/checkOutdated/collectDependency.js

@@ -1,48 +0,24 @@

var _path = require("path");
var _fs = require("fs");
var _Object = require("dr-js/library/common/mutable/Object");
var _File = require("dr-js/library/node/file/File");
var _Directory = require("dr-js/library/node/file/Directory");
const loadPackage = (pathInput, path, collect) => {
const packageSource = (0, _path.relative)(pathInput, path);
const {dependencies, devDependencies, peerDependencies, optionalDependencies} = JSON.parse((0, _fs.readFileSync)(path, "utf8"));
dependencies && collect(dependencies, packageSource);
devDependencies && collect(devDependencies, packageSource);
peerDependencies && collect(peerDependencies, packageSource);
optionalDependencies && collect(optionalDependencies, packageSource);
};
exports.collectDependency = (async pathInput => {
const {collect, getResult} = (() => {
let packageInfoMap = {};
let dependencyMap = {};
return {
collect: (dependencyObject, source) => Object.entries(dependencyObject).forEach(([name, version]) => {
if (packageInfoMap[name]) return console.warn(`[collect] dropped duplicate package: ${name} at ${source} with version: ${version}, checking: ${packageInfoMap[name].version}`);
packageInfoMap[name] = {
name,
version,
source
};
dependencyMap[name] = version;
}),
getResult: () => {
(0, _Object.objectSortKey)(dependencyMap);
const result = {
packageInfoMap,
dependencyMap
};
packageInfoMap = {};
dependencyMap = {};
return result;
}
};
})();
const isDirectory = (await (0, _File.getPathStat)(pathInput)).isDirectory();
isDirectory ? (await (0, _Directory.getFileList)(pathInput)).filter(path => path.endsWith("package.json")).forEach(path => loadPackage(pathInput, path, collect)) : loadPackage(pathInput, pathInput, collect);
const {packageInfoMap, dependencyMap} = getResult();
return {
isDirectory,
packageInfoMap,
dependencyMap
};
});
var e=require("path")
var r=require("fs")
var t=require("dr-js/library/common/mutable/Object")
var c=require("dr-js/library/node/file/File")
var n=require("dr-js/library/node/file/Directory")
const a=(t,c,n)=>{const a=(0,e.relative)(t,c)
const{dependencies:o,devDependencies:i,peerDependencies:s,optionalDependencies:p}=JSON.parse((0,r.readFileSync)(c,"utf8"))
o&&n(o,a)
i&&n(i,a)
s&&n(s,a)
p&&n(p,a)}
exports.collectDependency=(async e=>{const{collect:r,getResult:o}=(()=>{let e={}
let r={}
return{collect:(t,c)=>Object.entries(t).forEach(([t,n])=>{if(e[t])return console.warn(`[collect] dropped duplicate package: ${t} at ${c} with version: ${n}, checking: ${e[t].version}`)
e[t]={name:t,version:n,source:c}
r[t]=n}),getResult:()=>{(0,t.objectSortKey)(r)
const c={packageInfoMap:e,dependencyMap:r}
e={}
r={}
return c}}})()
const i=(await(0,c.getPathStat)(e)).isDirectory()
i?(await(0,n.getFileList)(e)).filter(e=>e.endsWith("package.json")).forEach(t=>a(e,t,r)):a(e,e,r)
const{packageInfoMap:s,dependencyMap:p}=o()
return{isDirectory:i,packageInfoMap:s,dependencyMap:p}})

@@ -1,16 +0,10 @@

var _path = require("path");
var _fs = require("fs");
var _Modify = require("dr-js/library/node/file/Modify");
var _collectDependency = require("./collectDependency");
var _logResult = require("./logResult");
exports.doCheckOutdated = (async ({pathInput, pathTemp = (0, _path.resolve)(pathInput, "check-outdated-gitignore")}) => {
console.log(`[checkOutdated] checking '${pathInput}'`);
const {isDirectory, packageInfoMap, dependencyMap} = await (0, _collectDependency.collectDependency)(pathInput);
const outdatedCount = isDirectory ? await (0, _Modify.withTempDirectory)(pathTemp, async () => {
(0, _fs.writeFileSync)((0, _path.resolve)(pathTemp, "package.json"), JSON.stringify({
dependencies: dependencyMap
}));
return (0, _logResult.logCheckOutdatedResult)(packageInfoMap, pathTemp);
}) : await (0, _logResult.logCheckOutdatedResult)(packageInfoMap, (0, _path.dirname)(pathInput));
process.exit(outdatedCount);
});
var e=require("path")
var t=require("fs")
var r=require("dr-js/library/node/file/Modify")
var a=require("./collectDependency")
var c=require("./logResult")
exports.doCheckOutdated=(async({pathInput:o,pathTemp:i=(0,e.resolve)(o,"check-outdated-gitignore")})=>{console.log(`[checkOutdated] checking '${o}'`)
const{isDirectory:d,packageInfoMap:n,dependencyMap:s}=await(0,a.collectDependency)(o)
const l=d?await(0,r.withTempDirectory)(i,async()=>{(0,t.writeFileSync)((0,e.resolve)(i,"package.json"),JSON.stringify({dependencies:s}))
return(0,c.logCheckOutdatedResult)(n,i)}):await(0,c.logCheckOutdatedResult)(n,(0,e.dirname)(o))
process.exit(l)})

@@ -1,53 +0,29 @@

var _format = require("dr-js/library/common/format");
var _function = require("dr-js/library/common/function");
var _SemVer = require("dr-js/library/common/module/SemVer");
var _Run = require("dr-js/library/node/system/Run");
const REGEXP_ANSI_ESCAPE_CODE = /\033\[[0-9;]*[a-zA-Z]/g;
const REGEXP_NPM_OUTDATED_OUTPUT = /(\S+)\s+\S+\s+(\S+)\s+(\S+)/;
const sortTableRow = ([nameA, , , sourceA], [nameB, , , sourceB]) => sourceA !== sourceB ? sourceA.localeCompare(sourceB) : nameA.localeCompare(nameB);
const formatPadTable = table => (0, _format.padTable)({
table,
cellPad: " | ",
padFuncList: [ "R", "L", "L", "L" ]
});
exports.logCheckOutdatedResult = (async (packageInfoMap, pathPackage) => (async (packageInfoMap, npmOutdatedOutputString) => {
const sameTable = [];
const outdatedTable = [];
npmOutdatedOutputString.split("\n").forEach(outputLine => {
const [, name, versionWanted, versionLatest] = REGEXP_NPM_OUTDATED_OUTPUT.exec(outputLine.replace(REGEXP_ANSI_ESCAPE_CODE, "")) || [];
if (!packageInfoMap[name]) return;
const {version, source} = packageInfoMap[name];
const versionTarget = (0, _SemVer.compareSemVer)(versionWanted, versionLatest) <= 0 ? versionLatest : versionWanted;
const rowList = [ name, version, versionTarget, source ];
version.endsWith(versionTarget) ? sameTable.push(rowList) : outdatedTable.push(rowList);
});
const total = sameTable.length + outdatedTable.length;
sameTable.sort(sortTableRow);
sameTable.length && console.log(`SAME[${sameTable.length}/${total}]:\n${formatPadTable(sameTable)}`);
outdatedTable.sort(sortTableRow);
outdatedTable.length && console.error(`OUTDATED[${outdatedTable.length}/${total}]:\n${formatPadTable(outdatedTable)}`);
return outdatedTable.length;
})(packageInfoMap, await (async pathPackage => {
const {promise: runPromise, subProcess, stdoutBufferPromise} = (0, _Run.runQuiet)({
command: "npm",
argList: [ "--no-update-notifier", "outdated" ],
option: {
cwd: pathPackage
}
});
const {promise, resolve, reject} = (0, _function.createInsideOutPromise)();
runPromise.then(resolve, resolve);
setTimeout(reject, 42e3);
const {code, signal} = await promise.catch(async () => {
console.warn("[checkNpmOutdated] timeout");
"win32" === process.platform ? (0, _Run.runSync)({
command: "taskkill",
argList: [ "-F", "-T", "-PID", subProcess.pid ],
option: {
stdio: "ignore"
}
}) : subProcess.kill();
throw new Error("[checkNpmOutdated] timeout");
});
return (await stdoutBufferPromise).toString();
})(pathPackage)));
var e=require("dr-js/library/common/format")
var t=require("dr-js/library/common/function")
var o=require("dr-js/library/common/module/SemVer")
var r=require("dr-js/library/node/system/Run")
const s=/\033\[[0-9;]*[a-zA-Z]/g
const n=/(\S+)\s+\S+\s+(\S+)\s+(\S+)/
const c=([e,,,t],[o,,,r])=>t!==r?t.localeCompare(r):e.localeCompare(o)
const a=t=>(0,e.padTable)({table:t,cellPad:" | ",padFuncList:["R","L","L","L"]})
exports.logCheckOutdatedResult=(async(e,i)=>(async(e,t)=>{const r=[]
const i=[]
t.split("\n").forEach(t=>{const[,c,a,l]=n.exec(t.replace(s,""))||[]
if(!e[c])return
const{version:u,source:d}=e[c]
const m=(0,o.compareSemVer)(a,l)<=0?l:a
const p=[c,u,m,d]
u.endsWith(m)?r.push(p):i.push(p)})
const l=r.length+i.length
r.sort(c)
r.length&&console.log(`SAME[${r.length}/${l}]:\n${a(r)}`)
i.sort(c)
i.length&&console.error(`OUTDATED[${i.length}/${l}]:\n${a(i)}`)
return i.length})(e,await(async e=>{const{promise:o,subProcess:s,stdoutBufferPromise:n}=(0,r.runQuiet)({command:"npm",argList:["--no-update-notifier","outdated"],option:{cwd:e}})
const{promise:c,resolve:a,reject:i}=(0,t.createInsideOutPromise)()
o.then(a,a)
setTimeout(i,42e3)
const{code:l,signal:u}=await c.catch(async()=>{console.warn("[checkNpmOutdated] timeout")
"win32"===process.platform?(0,r.runSync)({command:"taskkill",argList:["-F","-T","-PID",s.pid],option:{stdio:"ignore"}}):s.kill()
throw new Error("[checkNpmOutdated] timeout")})
return(await n).toString()})(i)))
#!/usr/bin/env node
var _option = require("./option");
var _checkOutdated = require("./checkOutdated");
var _pack = require("./pack");
var _package = require("../package.json");
(async () => {
const {getSingleOption, getSingleOptionOptional} = await (0, _option.parseOption)();
const isCheckOutdated = getSingleOptionOptional("check-outdated");
const isPack = getSingleOptionOptional("pack");
if (!isCheckOutdated && !isPack) return getSingleOptionOptional("version") ? console.log(JSON.stringify({
packageName: _package.name,
packageVersion: _package.version
}, null, " ")) : console.log((0, _option.formatUsage)(null, getSingleOptionOptional("help") ? null : "simple"));
await (async ({isCheckOutdated, isPack}, {getSingleOption, getSingleOptionOptional}) => {
isCheckOutdated && await (0, _checkOutdated.doCheckOutdated)({
pathInput: getSingleOption("path-input"),
pathTemp: getSingleOptionOptional("path-temp")
});
isPack && await (0, _pack.doPack)({
pathInput: getSingleOption("path-input"),
pathOutput: getSingleOption("path-output"),
outputName: getSingleOptionOptional("output-name"),
outputVersion: getSingleOptionOptional("output-version"),
outputDescription: getSingleOptionOptional("output-description"),
isPublish: getSingleOptionOptional("publish"),
isPublishDev: getSingleOptionOptional("publish-dev")
});
})({
isCheckOutdated,
isPack
}, {
getSingleOption,
getSingleOptionOptional
}).catch(error => {
console.warn("[Error]", error.stack || error);
process.exit(2);
});
})().catch(error => {
console.warn((0, _option.formatUsage)(error.stack || error, "simple"));
process.exit(1);
});
var t=require("./checkOutdated")
var e=require("./pack")
var a=require("./stepPackageVersion")
var i=require("./option")
var o=require("../package.json");(async()=>{const p=await(0,i.parseOption)()
const{getOptionOptional:s}=p
const n=s("check-outdated")
const c=s("pack")
const r=s("step-package-version")
if(!n&&!c&&!r)return s("version")?console.log(JSON.stringify({packageName:o.name,packageVersion:o.version},null," ")):console.log((0,i.formatUsage)(null,s("help")?null:"simple"))
await(async({isCheckOutdated:i,isPack:o,isStepPackageVersion:p},{getOptionOptional:s,getSingleOption:n,getSingleOptionOptional:c})=>{i&&await(0,t.doCheckOutdated)({pathInput:n("path-input"),pathTemp:c("path-temp")})
o&&await(0,e.doPack)({pathInput:n("path-input"),pathOutput:n("path-output"),outputName:c("output-name"),outputVersion:c("output-version"),outputDescription:c("output-description"),isPublish:s("publish"),isPublishDev:s("publish-dev")})
p&&await(0,a.doStepPackageVersion)({pathInput:c("path-input")||".",isSortKey:s("sort-key"),isGitCommit:s("git-commit")})})({isCheckOutdated:n,isPack:c,isStepPackageVersion:r},p).catch(t=>{console.warn("[Error]",t.stack||t)
process.exit(2)})})().catch(t=>{console.warn((0,i.formatUsage)(t.stack||t,"simple"))
process.exit(1)})

@@ -1,67 +0,7 @@

var _preset = require("dr-js/library/common/module/Option/preset");
var _Option = require("dr-js/library/node/module/Option");
const {SingleString, SinglePath, BooleanFlag, Config} = _Option.ConfigPresetNode;
const OPTION_CONFIG = {
prefixENV: "dev-dep",
prefixJSON: "dev-dep",
formatList: [ Config, {
...BooleanFlag,
name: "help",
shortName: "h"
}, {
...BooleanFlag,
name: "version",
shortName: "v"
}, {
...SinglePath,
optional: (0, _preset.getOptionalFormatFlag)("check-outdated", "pack"),
name: "path-input",
shortName: "i",
description: "path to 'package.json', or directory with 'package.json' inside"
}, {
...BooleanFlag,
name: "check-outdated",
shortName: "C",
extendFormatList: [ {
...SinglePath,
optional: !0,
name: "path-temp"
} ]
}, {
...BooleanFlag,
name: "pack",
shortName: "P",
extendFormatList: [ {
...SinglePath,
name: "path-output",
shortName: "o",
description: "output path"
}, {
...SingleString,
optional: !0,
name: "output-name",
description: "output package name"
}, {
...SingleString,
optional: !0,
name: "output-version",
description: "output package version"
}, {
...SingleString,
optional: !0,
name: "output-description",
description: "output package description"
}, {
...BooleanFlag,
name: "publish",
description: "run npm publish"
}, {
...BooleanFlag,
name: "publish-dev",
description: "run npm publish-dev"
} ]
} ]
};
const {parseOption, formatUsage} = (0, _Option.prepareOption)(OPTION_CONFIG);
exports.formatUsage = formatUsage;
exports.parseOption = parseOption;
var e=require("dr-js/library/common/module/Option/preset")
var t=require("dr-js/library/node/module/Option")
const{SingleString:o,SinglePath:a,BooleanFlag:p,Config:r}=t.ConfigPresetNode
const i={prefixENV:"dev-dep",prefixJSON:"dev-dep",formatList:[r,{...p,name:"help",shortName:"h"},{...p,name:"version",shortName:"v"},{...a,optional:(0,e.getOptionalFormatFlag)("check-outdated","pack"),name:"path-input",shortName:"i",description:"path to 'package.json', or directory with 'package.json' inside"},{...p,name:"check-outdated",shortName:"C",extendFormatList:[{...a,optional:!0,name:"path-temp"}]},{...p,name:"pack",shortName:"P",extendFormatList:[{...a,name:"path-output",shortName:"o",description:"output path"},{...o,optional:!0,name:"output-name",description:"output package name"},{...o,optional:!0,name:"output-version",description:"output package version"},{...o,optional:!0,name:"output-description",description:"output package description"},{...p,name:"publish",description:"run npm publish"},{...p,name:"publish-dev",description:"run npm publish-dev"}]},{...p,name:"step-package-version",shortName:"S",description:"step up package version (expect '0.0.0-dev.0-local.0' format)",extendFormatList:[{...p,name:"sort-key",shortName:"K"},{...p,name:"git-commit",shortName:"G"}]}]}
const{parseOption:n,formatUsage:s}=(0,t.prepareOption)(i)
exports.formatUsage=s
exports.parseOption=n

@@ -1,63 +0,34 @@

var _path = require("path");
var _fs = require("fs");
var _child_process = require("child_process");
var _format = require("dr-js/library/common/format");
var _Object = require("dr-js/library/common/mutable/Object");
var _File = require("dr-js/library/node/file/File");
var _Modify = require("dr-js/library/node/file/Modify");
var _Run = require("dr-js/library/node/system/Run");
const loadPackage = (packagePath, packageInfo = {
packageJSON: {},
exportFilePairList: [],
installFilePairList: []
}, loadedSet = new Set()) => {
const packageFile = packagePath.endsWith(".json") ? packagePath : (0, _path.join)(packagePath, "package.json");
packagePath.endsWith(".json") && (packagePath = (0, _path.dirname)(packagePath));
if (loadedSet.has(packageFile)) return packageInfo;
loadedSet.add(packageFile);
const {IMPORT: importList, EXPORT: exportList, INSTALL: installList, ...mergePackageJSON} = require(packageFile);
const {packageJSON, exportFilePairList, installFilePairList} = packageInfo;
importList && importList.forEach(importPackagePath => loadPackage((0, _path.resolve)(packagePath, importPackagePath), packageInfo, loadedSet));
console.log(`[loadPackage] load: ${packageFile}`);
installList && installList.forEach(filePath => installFilePairList.push(parseResourcePath(filePath, packagePath)));
exportList && exportList.forEach(filePath => exportFilePairList.push(parseResourcePath(filePath, packagePath)));
mergePackageJSON && (0, _Object.objectMergeDeep)(packageJSON, mergePackageJSON);
return packageInfo;
};
const parseResourcePath = (resourcePath, packagePath) => "object" == typeof resourcePath ? [ (0, _path.resolve)(packagePath, resourcePath.from), resourcePath.to ] : [ (0, _path.resolve)(packagePath, resourcePath), resourcePath ];
const PACKAGE_KEY_SORT_REQUIRED = [ "dependencies", "devDependencies", "peerDependencies", "optionalDependencies", "bundledDependencies" ];
const PACKAGE_KEY_ORDER = [ "private", "name", "version", "description", "author", "contributors", "license", "keywords", "repository", "homepage", "bugs", "os", "cpu", "engines", "engineStrict", "preferGlobal", "main", "bin", "man", "files", "directories", "scripts", "config", "publishConfig", ...PACKAGE_KEY_SORT_REQUIRED, "sideEffects" ];
exports.doPack = (async ({pathInput, pathOutput, outputName, outputVersion, outputDescription, isPublish, isPublishDev}) => {
const pathOutputInstall = (0, _path.resolve)(pathOutput, "install");
const {packageJSON, exportFilePairList, installFilePairList} = loadPackage(pathInput);
outputName && (packageJSON.name = outputName);
outputVersion && (packageJSON.version = outputVersion);
outputDescription && (packageJSON.description = outputDescription);
await _Modify.modify.delete(pathOutput).catch(() => {});
await (0, _File.createDirectory)(pathOutput);
await (0, _File.createDirectory)(pathOutputInstall);
await (async (packageJSON, path) => {
PACKAGE_KEY_SORT_REQUIRED.forEach(key => {
packageJSON[key] && (0, _Object.objectSortKey)(packageJSON[key]);
});
const jsonFileStringList = Object.keys(packageJSON).sort((a, b) => PACKAGE_KEY_ORDER.indexOf(a) - PACKAGE_KEY_ORDER.indexOf(b)).map(key => (0, _format.stringIndentLine)(`${JSON.stringify(key)}: ${JSON.stringify(packageJSON[key], null, 2)}`));
const packageBuffer = Buffer.from(`{\n${jsonFileStringList.join(",\n")}\n}\n`);
(0, _fs.writeFileSync)(path, packageBuffer);
console.log(`[writePackageJSON] ${path} [${(0, _format.binary)(packageBuffer.length)}B]`);
})(packageJSON, (0, _path.join)(pathOutput, "package.json"));
(0, _fs.writeFileSync)((0, _path.join)(pathOutput, "README.md"), [ `# ${packageJSON.name}\n`, "[![i:npm]][l:npm]", "[![i:size]][l:size]", "", `${packageJSON.description}`, "", `[i:npm]: https://img.shields.io/npm/v/${packageJSON.name}.svg?colorB=blue`, `[l:npm]: https://npm.im/${packageJSON.name}`, `[i:size]: https://packagephobia.now.sh/badge?p=${packageJSON.name}`, `[l:size]: https://packagephobia.now.sh/result?p=${packageJSON.name}` ].join("\n"));
for (const [source, targetRelative] of exportFilePairList) await _Modify.modify.copy(source, (0, _path.join)(pathOutput, targetRelative));
for (const [source, targetRelative] of installFilePairList) await _Modify.modify.copy(source, (0, _path.join)(pathOutputInstall, targetRelative));
(0, _child_process.execSync)("npm --no-update-notifier pack", {
cwd: pathOutput,
stdio: "inherit",
shell: !0
});
const outputFileName = `${packageJSON.name}-${packageJSON.version}.tgz`;
const outputFilePath = (0, _path.join)(pathOutput, outputFileName);
console.log(`done pack: ${outputFileName} [${(0, _format.binary)((0, _fs.statSync)(outputFilePath).size)}B]`);
(isPublish || isPublishDev) && (0, _Run.runSync)({
command: "npm",
argList: [ "publish", outputFilePath, "--tag", isPublishDev ? "dev" : "latest" ]
});
});
var e=require("path")
var a=require("fs")
var r=require("child_process")
var i=require("dr-js/library/common/format")
var t=require("dr-js/library/common/mutable/Object")
var o=require("dr-js/library/node/file/File")
var s=require("dr-js/library/node/file/Modify")
var n=require("dr-js/library/node/system/Run")
var c=require("./function")
const l=(a,r={packageJSON:{},exportFilePairList:[],installFilePairList:[]},i=new Set)=>{const{packageFile:o,packagePath:s}=(0,c.formatPackagePath)(a)
if(i.has(o))return r
i.add(o)
const{IMPORT:n,EXPORT:u,INSTALL:d,...m}=require(o)
const{packageJSON:h,exportFilePairList:v,installFilePairList:f}=r
n&&n.forEach(a=>l((0,e.resolve)(s,a),r,i))
console.log(`[loadPackage] load: ${o}`)
d&&d.forEach(e=>f.push(p(e,s)))
u&&u.forEach(e=>v.push(p(e,s)))
m&&(0,t.objectMergeDeep)(h,m)
return r}
const p=(a,r)=>"object"==typeof a?[(0,e.resolve)(r,a.from),a.to]:[(0,e.resolve)(r,a),a]
exports.doPack=(async({pathInput:t,pathOutput:p,pathOutputInstall:u=(0,e.resolve)(p,"install"),outputName:d,outputVersion:m,outputDescription:h,isPublish:v,isPublishDev:f})=>{const{packageJSON:y,exportFilePairList:g,installFilePairList:b}=l(t)
d&&(y.name=d)
m&&(y.version=m)
h&&(y.description=h)
await s.modify.delete(p).catch(()=>{})
await(0,o.createDirectory)(p)
await(0,o.createDirectory)(u)
await(0,c.writePackageJSON)({path:(0,e.resolve)(p,"package.json"),packageJSON:y});(0,a.writeFileSync)((0,e.resolve)(p,"README.md"),[`# ${y.name}\n`,"[![i:npm]][l:npm]","[![i:size]][l:size]","",`${y.description}`,"",`[i:npm]: https://img.shields.io/npm/v/${y.name}.svg?colorB=blue`,`[l:npm]: https://npm.im/${y.name}`,`[i:size]: https://packagephobia.now.sh/badge?p=${y.name}`,`[l:size]: https://packagephobia.now.sh/result?p=${y.name}`].join("\n"))
for(const[a,r]of g)await s.modify.copy(a,(0,e.resolve)(p,r))
for(const[a,r]of b)await s.modify.copy(a,(0,e.resolve)(u,r));(0,r.execSync)("npm --no-update-notifier pack",{cwd:p,stdio:"inherit",shell:!0})
const P=`${y.name}-${y.version}.tgz`
const k=(0,e.resolve)(p,P)
console.log(`done pack: ${P} [${(0,i.binary)((0,a.statSync)(k).size)}B]`);(v||f)&&(0,n.runSync)({command:"npm",argList:["publish",k,"--tag",f?"dev":"latest"]})})

@@ -1,86 +0,46 @@

var _assert = require("assert");
var _child_process = require("child_process");
var _fs = require("fs");
var _format = require("dr-js/library/common/format");
var _File = require("dr-js/library/node/file/File");
var _Directory = require("dr-js/library/node/file/Directory");
var _Run = require("dr-js/library/node/system/Run");
var _Modify = require("dr-js/library/node/file/Modify");
var _main = require("./main");
exports.initOutput = (async ({fromRoot, fromOutput, deleteKeyList = [ "private", "scripts", "devDependencies" ], copyPathList = [ "LICENSE", "README.md" ], logger: {padLog, log}}) => {
padLog("reset output");
await _Modify.modify.delete(fromOutput()).catch(() => {});
await (0, _File.createDirectory)(fromOutput());
padLog("init output package.json");
const packageJSON = require(fromRoot("package.json"));
for (const deleteKey of deleteKeyList) {
delete packageJSON[deleteKey];
log(`dropped key: ${deleteKey}`);
}
(0, _fs.writeFileSync)(fromOutput("package.json"), JSON.stringify(packageJSON));
padLog("init output file");
for (const copyPath of copyPathList) if ("README.md" === copyPath) {
(0, _fs.writeFileSync)(fromOutput(copyPath), (0, _fs.readFileSync)(fromRoot(copyPath)).toString().split("[//]: # (NON_PACKAGE_CONTENT)")[0].trim());
log(`copied: ${copyPath} (with NON_PACKAGE_CONTENT trimmed)`);
} else {
await _Modify.modify.copy(fromRoot(copyPath), fromOutput(copyPath));
log(`copied: ${copyPath}`);
}
return packageJSON;
});
exports.packOutput = (async ({fromRoot, fromOutput, logger: {padLog, log}}) => {
padLog("run pack output");
(0, _child_process.execSync)("npm --no-update-notifier pack", {
cwd: fromOutput(),
stdio: _main.__VERBOSE__ ? "inherit" : [ "ignore", "ignore" ],
shell: !0
});
log("move to root path");
const packageJSON = require(fromOutput("package.json"));
const packName = `${packageJSON.name.replace(/^@/, "").replace("/", "-")}-${packageJSON.version}.tgz`;
await _Modify.modify.move(fromOutput(packName), fromRoot(packName));
padLog(`pack size: ${(0, _format.binary)((0, _fs.statSync)(fromRoot(packName)).size)}B`);
return fromRoot(packName);
});
exports.verifyOutputBinVersion = (async ({fromOutput, packageJSON, matchStringList = [ packageJSON.name, packageJSON.version ], logger: {padLog, log}}) => {
padLog("verify output bin working");
const outputBinTest = (0, _child_process.execSync)("node bin --version", {
cwd: fromOutput(),
stdio: "pipe",
shell: !0
}).toString();
log(`bin test output: ${outputBinTest}`);
for (const testString of matchStringList) (0, _assert.ok)(outputBinTest.includes(testString), `should output contain: ${testString}`);
});
exports.verifyNoGitignore = (async ({path, logger: {padLog}}) => {
padLog("verify no gitignore file left");
const badFileList = (await (0, _Directory.getFileList)(path)).filter(path => path.includes("gitignore"));
badFileList.length && console.error(`found gitignore file:\n - ${badFileList.join("\n - ")}`);
(0, _assert.ok)(!badFileList.length, `${badFileList.length} gitignore file found`);
});
const getPublishFlag = flagList => {
const isDev = flagList.includes("publish-dev");
return {
isPublish: isDev || flagList.includes("publish"),
isDev
};
};
exports.getPublishFlag = getPublishFlag;
const checkPublishVersion = ({isDev, version}) => isDev ? REGEXP_PUBLISH_VERSION_DEV.test(version) : REGEXP_PUBLISH_VERSION.test(version);
exports.checkPublishVersion = checkPublishVersion;
const REGEXP_PUBLISH_VERSION = /^\d+\.\d+\.\d+$/;
const REGEXP_PUBLISH_VERSION_DEV = /^\d+\.\d+\.\d+-dev\.\d+$/;
exports.publishOutput = (async ({flagList, packageJSON, pathPackagePack, extraArgs = [], logger}) => {
const {isPublish, isDev} = getPublishFlag(flagList);
if (!isPublish) return logger.padLog("skipped publish output, no flag found");
if (!pathPackagePack || !pathPackagePack.endsWith(".tgz")) throw new Error(`[publishOutput] invalid pathPackagePack: ${pathPackagePack}`);
if (!checkPublishVersion({
isDev,
version: packageJSON.version
})) throw new Error(`[publishOutput] invalid version: ${packageJSON.version}, isDev: ${isDev}`);
logger.padLog(`${isDev ? "publish-dev" : "publish"}: ${packageJSON.version}`);
(0, _Run.runSync)({
command: "npm",
argList: [ "--no-update-notifier", "publish", pathPackagePack, "--tag", isDev ? "dev" : "latest", ...extraArgs ]
});
});
var e=require("assert")
var t=require("child_process")
var i=require("fs")
var r=require("dr-js/library/common/format")
var o=require("dr-js/library/node/file/File")
var s=require("dr-js/library/node/file/Directory")
var n=require("dr-js/library/node/system/Run")
var a=require("dr-js/library/node/file/Modify")
var p=require("./main")
exports.initOutput=(async({fromRoot:e,fromOutput:t,deleteKeyList:r=["private","scripts","devDependencies"],copyPathList:s=["LICENSE","README.md"],logger:{padLog:n,log:p}})=>{n("reset output")
await a.modify.delete(t()).catch(()=>{})
await(0,o.createDirectory)(t())
n("init output package.json")
const u=require(e("package.json"))
for(const e of r){delete u[e]
p(`dropped key: ${e}`)}(0,i.writeFileSync)(t("package.json"),JSON.stringify(u))
n("init output file")
for(const r of s)if("README.md"===r){(0,i.writeFileSync)(t(r),(0,i.readFileSync)(e(r)).toString().split("[//]: # (NON_PACKAGE_CONTENT)")[0].trim())
p(`copied: ${r} (with NON_PACKAGE_CONTENT trimmed)`)}else{await a.modify.copy(e(r),t(r))
p(`copied: ${r}`)}return u})
exports.packOutput=(async({fromRoot:e,fromOutput:o,logger:{padLog:s,log:n}})=>{s("run pack output");(0,t.execSync)("npm --no-update-notifier pack",{cwd:o(),stdio:p.__VERBOSE__?"inherit":["ignore","ignore"],shell:!0})
n("move to root path")
const u=require(o("package.json"))
const c=`${u.name.replace(/^@/,"").replace("/","-")}-${u.version}.tgz`
await a.modify.move(o(c),e(c))
s(`pack size: ${(0,r.binary)((0,i.statSync)(e(c)).size)}B`)
return e(c)})
exports.verifyOutputBinVersion=(async({fromOutput:i,packageJSON:r,matchStringList:o=[r.name,r.version],logger:{padLog:s,log:n}})=>{s("verify output bin working")
const a=(0,t.execSync)("node bin --version",{cwd:i(),stdio:"pipe",shell:!0}).toString()
n(`bin test output: ${a}`)
for(const t of o)(0,e.ok)(a.includes(t),`should output contain: ${t}`)})
exports.verifyNoGitignore=(async({path:t,logger:{padLog:i}})=>{i("verify no gitignore file left")
const r=(await(0,s.getFileList)(t)).filter(e=>e.includes("gitignore"))
r.length&&console.error(`found gitignore file:\n - ${r.join("\n - ")}`);(0,e.ok)(!r.length,`${r.length} gitignore file found`)})
const u=e=>{const t=e.includes("publish-dev")
return{isPublish:t||e.includes("publish"),isDev:t}}
exports.getPublishFlag=u
const c=({isDev:e,version:t})=>e?d.test(t):l.test(t)
exports.checkPublishVersion=c
const l=/^\d+\.\d+\.\d+$/
const d=/^\d+\.\d+\.\d+-dev\.\d+$/
exports.publishOutput=(async({flagList:e,packageJSON:t,pathPackagePack:i,extraArgs:r=[],logger:o})=>{const{isPublish:s,isDev:a}=u(e)
if(!s)return o.padLog("skipped publish output, no flag found")
if(!i||!i.endsWith(".tgz"))throw new Error(`[publishOutput] invalid pathPackagePack: ${i}`)
if(!c({isDev:a,version:t.version}))throw new Error(`[publishOutput] invalid version: ${t.version}, isDev: ${a}`)
o.padLog(`${a?"publish-dev":"publish"}: ${t.version}`);(0,n.runSync)({command:"npm",argList:["--no-update-notifier","publish",i,"--tag",a?"dev":"latest",...r]})})

@@ -1,16 +0,6 @@

var _child_process = require("child_process");
const tryExec = (command, option) => {
try {
return (0, _child_process.execSync)(command, option).toString();
} catch (error) {
console.warn(`[tryExec] failed for: ${command}, error: ${error}`);
return "";
}
};
exports.tryExec = tryExec;
exports.getGitBranch = (() => tryExec("git symbolic-ref --short HEAD", {
stdio: "pipe"
}).replace("\n", "").trim());
exports.getGitCommitHash = (() => tryExec('git log -1 --format="%H"', {
stdio: "pipe"
}).replace("\n", "").trim());
var e=require("child_process")
const t=(t,r)=>{try{return(0,e.execSync)(t,r).toString()}catch(e){console.warn(`[tryExec] failed for: ${t}, error: ${e}`)
return""}}
exports.tryExec=t
exports.getGitBranch=(()=>t("git symbolic-ref --short HEAD",{stdio:"pipe"}).replace("\n","").trim())
exports.getGitCommitHash=(()=>t('git log -1 --format="%H"',{stdio:"pipe"}).replace("\n","").trim())

@@ -1,53 +0,27 @@

const toExportName = name => `${name.charAt(0).toUpperCase()}${name.slice(1)}`;
const isFirstUpperCase = name => /[A-Z]/.test(name.charAt(0));
exports.generateIndexScript = (({sourceRouteMap}) => {
const indexScriptMap = {};
Object.values(sourceRouteMap).forEach(({routeList, directoryList, fileList}) => {
const textList = [];
const importList = [];
directoryList.forEach(name => {
const exportName = toExportName(name);
textList.push(`import * as ${exportName} from './${name}'`);
importList.push(exportName);
});
fileList.map(({name, exportList}) => {
if (directoryList.length || isFirstUpperCase(name)) {
const exportName = toExportName(name);
textList.push(`import * as ${exportName} from './${name}'`);
importList.push(exportName);
} else textList.push(`export { ${exportList.join(", ")} } from './${name}'`);
});
importList.length && textList.push(`export { ${importList.join(", ")} }`);
indexScriptMap[[ ...routeList, "index.js" ].join("/")] = textList.join("\n");
});
return indexScriptMap;
});
exports.HOIST_LIST_KEY = "~hoist";
exports.EXPORT_LIST_KEY = "~export";
exports.EXPORT_HOIST_LIST_KEY = "~export-hoist";
exports.generateExportInfo = (({sourceRouteMap}) => {
const exportInfoMap = {};
const getExportInfo = (...routeList) => {
const key = routeList.join("/");
exportInfoMap[key] || (exportInfoMap[key] = {});
return exportInfoMap[key];
};
Object.values(sourceRouteMap).forEach(({routeList, directoryList, fileList}) => {
const exportInfo = getExportInfo(...routeList);
directoryList.forEach(name => {
exportInfo[toExportName(name)] = getExportInfo(...routeList, name);
});
fileList.map(({name, exportList}) => {
if (directoryList.length || isFirstUpperCase(name)) exportInfo[toExportName(name)] = {
"~export": exportList
}; else {
exportInfo[name] = {
"~hoist": exportList
};
exportInfo["~export-hoist"] = [ ...exportInfo["~export-hoist"] || [], ...exportList ];
}
getExportInfo(...routeList, name)["~export"] = exportList;
});
});
return exportInfoMap;
});
const t=t=>`${t.charAt(0).toUpperCase()}${t.slice(1)}`
const e=t=>/[A-Z]/.test(t.charAt(0))
exports.generateIndexScript=(({sourceRouteMap:o})=>{const r={}
Object.values(o).forEach(({routeList:o,directoryList:s,fileList:p})=>{const n=[]
const i=[]
s.forEach(e=>{const o=t(e)
n.push(`import * as ${o} from './${e}'`)
i.push(o)})
p.map(({name:o,exportList:r})=>{if(s.length||e(o)){const e=t(o)
n.push(`import * as ${e} from './${o}'`)
i.push(e)}else n.push(`export { ${r.join(", ")} } from './${o}'`)})
i.length&&n.push(`export { ${i.join(", ")} }`)
r[[...o,"index.js"].join("/")]=n.join("\n")})
return r})
exports.HOIST_LIST_KEY="~hoist"
exports.EXPORT_LIST_KEY="~export"
exports.EXPORT_HOIST_LIST_KEY="~export-hoist"
exports.generateExportInfo=(({sourceRouteMap:o})=>{const r={}
const s=(...t)=>{const e=t.join("/")
r[e]||(r[e]={})
return r[e]}
Object.values(o).forEach(({routeList:o,directoryList:r,fileList:p})=>{const n=s(...o)
r.forEach(e=>{n[t(e)]=s(...o,e)})
p.map(({name:p,exportList:i})=>{if(r.length||e(p))n[t(p)]={"~export":i}
else{n[p]={"~hoist":i}
n["~export-hoist"]=[...n["~export-hoist"]||[],...i]}s(...o,p)["~export"]=i})})
return r})

@@ -1,70 +0,31 @@

var _path = require("path");
var _fs = require("fs");
var _parser = require("@babel/parser");
var _compare = require("dr-js/library/common/compare");
var _Object = require("dr-js/library/common/mutable/Object");
var _File = require("dr-js/library/node/file/File");
var _Directory = require("dr-js/library/node/file/Directory");
const createExportParser = ({parserPluginList, logger}) => {
let sourceRouteMap = {};
const getRoute = routeList => {
const key = routeList.join("/");
sourceRouteMap[key] || (sourceRouteMap[key] = {
routeList,
directoryList: [],
fileList: []
});
return sourceRouteMap[key];
};
return {
parseExport: async path => {
const fileStat = await (0, _File.getPathStat)(path);
const routeList = path.split(_path.sep);
const name = routeList.pop();
if (fileStat.isDirectory()) {
logger.devLog(`[directory] ${path}`);
getRoute(routeList).directoryList.push(name);
} else if (fileStat.isFile() && name.endsWith(".js")) {
const exportList = ((fileString, sourceFilename, parserPluginList) => {
const exportNodeList = (0, _parser.parse)(fileString, {
sourceFilename,
sourceType: "module",
plugins: parserPluginList || [ "objectRestSpread", "classProperties", "exportDefaultFrom", "exportNamespaceFrom", "jsx" ]
}).program.body.filter(({type}) => "ExportNamedDeclaration" === type);
return [].concat(...exportNodeList.map(({specifiers, declaration}) => declaration ? declaration.declarations ? declaration.declarations.map(({id: {name}}) => name) : [ declaration.id.name ] : specifiers.map(({exported: {name}}) => name)));
})((0, _fs.readFileSync)(path, {
encoding: "utf8"
}), path, parserPluginList);
logger.devLog(`[file] ${path}`);
if (!exportList.length) return;
getRoute(routeList).fileList.push({
name: name.slice(0, -3),
exportList
});
logger.devLog(` export [${exportList.length}]: ${exportList.join(", ")}`);
} else logger.devLog(`[skipped] ${path} (${(0, _File.getPathTypeFromStat)(fileStat)})`);
},
getSourceRouteMap: () => {
const result = (sourceRouteMap => {
Object.values(sourceRouteMap).forEach(({routeList, directoryList, fileList}) => {
directoryList.sort(_compare.compareString);
fileList.sort(({name: a}, {name: b}) => (0, _compare.compareString)(a, b));
fileList.forEach(({exportList}) => exportList.sort(_compare.compareString));
});
(0, _Object.objectSortKey)(sourceRouteMap);
return sourceRouteMap;
})(sourceRouteMap);
sourceRouteMap = {};
return result;
}
};
};
exports.createExportParser = createExportParser;
exports.collectSourceRouteMap = (async ({pathRootList = [], pathInfoFilter = info => !0, logger}) => {
const {parseExport, getSourceRouteMap} = createExportParser({
logger
});
const parseWalkExport = info => pathInfoFilter(info) && parseExport(info.path);
for (const pathRoot of pathRootList) await (0, _Directory.walkDirectoryInfoTree)(await (0, _Directory.getDirectoryInfoTree)(pathRoot), parseWalkExport);
return getSourceRouteMap();
});
var e=require("path")
var r=require("fs")
var t=require("@babel/parser")
var o=require("dr-js/library/common/compare")
var a=require("dr-js/library/common/mutable/Object")
var i=require("dr-js/library/node/file/File")
var s=require("dr-js/library/node/file/Directory")
const c=({parserPluginList:s,logger:c})=>{let n={}
const p=e=>{const r=e.join("/")
n[r]||(n[r]={routeList:e,directoryList:[],fileList:[]})
return n[r]}
return{parseExport:async o=>{const a=await(0,i.getPathStat)(o)
const n=o.split(e.sep)
const l=n.pop()
if(a.isDirectory()){c.devLog(`[directory] ${o}`)
p(n).directoryList.push(l)}else if(a.isFile()&&l.endsWith(".js")){const e=((e,r,o)=>{const a=(0,t.parse)(e,{sourceFilename:r,sourceType:"module",plugins:o||["objectRestSpread","classProperties","exportDefaultFrom","exportNamespaceFrom","jsx"]}).program.body.filter(({type:e})=>"ExportNamedDeclaration"===e)
return[].concat(...a.map(({specifiers:e,declaration:r})=>r?r.declarations?r.declarations.map(({id:{name:e}})=>e):[r.id.name]:e.map(({exported:{name:e}})=>e)))})((0,r.readFileSync)(o,{encoding:"utf8"}),o,s)
c.devLog(`[file] ${o}`)
if(!e.length)return
p(n).fileList.push({name:l.slice(0,-3),exportList:e})
c.devLog(` export [${e.length}]: ${e.join(", ")}`)}else c.devLog(`[skipped] ${o} (${(0,i.getPathTypeFromStat)(a)})`)},getSourceRouteMap:()=>{const e=(e=>{Object.values(e).forEach(({routeList:e,directoryList:r,fileList:t})=>{r.sort(o.compareString)
t.sort(({name:e},{name:r})=>(0,o.compareString)(e,r))
t.forEach(({exportList:e})=>e.sort(o.compareString))});(0,a.objectSortKey)(e)
return e})(n)
n={}
return e}}}
exports.createExportParser=c
exports.collectSourceRouteMap=(async({pathRootList:e=[],pathInfoFilter:r=(e=>!0),logger:t})=>{const{parseExport:o,getSourceRouteMap:a}=c({logger:t})
const i=e=>r(e)&&o(e.path)
for(const r of e)await(0,s.walkDirectoryInfoTree)(await(0,s.getDirectoryInfoTree)(r),i)
return a()})

@@ -1,38 +0,22 @@

var _path = require("path");
var _function = require("dr-js/library/node/file/function");
var _generateInfo = require("./generateInfo");
const getMarkdownHeaderLink = text => `[${text}](#${text.trim().toLowerCase().replace(/[^\w\- ]+/g, "").replace(/\s/g, "-").replace(/-+$/, "")})`;
exports.getMarkdownHeaderLink = getMarkdownHeaderLink;
const REGEXP_MARKDOWN_HEADER = /^#{1,6}(.+?)#*$/gm;
exports.autoAppendMarkdownHeaderLink = ((...markdownStringList) => {
const headerTextList = (string => {
const headerTextList = [];
let result;
for (;result = REGEXP_MARKDOWN_HEADER.exec(string); ) {
const headerText = result[1].trim();
headerText && headerTextList.push(headerText);
}
return headerTextList;
})(markdownStringList.join("\n"));
return headerTextList.length ? [ ...headerTextList.map(text => `* ${getMarkdownHeaderLink(text)}`), "", ...markdownStringList ] : [];
});
const escapeMarkdownLink = name => name.replace(/_/g, "\\_");
exports.escapeMarkdownLink = escapeMarkdownLink;
const renderMarkdownFileLink = path => `📄 [${escapeMarkdownLink(path)}](${path})`;
exports.renderMarkdownFileLink = renderMarkdownFileLink;
exports.renderMarkdownDirectoryLink = (path => `📁 [${escapeMarkdownLink(path).replace(/\/*$/, "/")}](${path})`);
exports.renderMarkdownExportPath = (({exportInfoMap, rootPath}) => Object.entries(exportInfoMap).reduce((textList, [path, value]) => {
value[_generateInfo.EXPORT_LIST_KEY] && textList.push(`+ ${renderMarkdownFileLink(`${(0, _function.toPosixPath)((0, _path.relative)(rootPath, path))}.js`)}`, ` - ${value[_generateInfo.EXPORT_LIST_KEY].map(text => `\`${text}\``).join(", ")}`);
return textList;
}, []));
const renderMarkdownExportTree = ({exportInfo, routeList}) => Object.entries(exportInfo).reduce((textList, [key, value]) => {
if (key === _generateInfo.HOIST_LIST_KEY) ; else if (key === _generateInfo.EXPORT_LIST_KEY || key === _generateInfo.EXPORT_HOIST_LIST_KEY) textList.push(`- ${value.map(text => `\`${text}\``).join(", ")}`); else {
const childTextList = renderMarkdownExportTree({
exportInfo: value,
routeList: [ ...routeList, key ]
});
childTextList.length && textList.push(`- **${key}**`, ...childTextList.map(text => ` ${text}`));
}
return textList;
}, []);
exports.renderMarkdownExportTree = renderMarkdownExportTree;
var e=require("path")
var r=require("dr-js/library/node/file/function")
var o=require("./generateInfo")
const t=e=>`[${e}](#${e.trim().toLowerCase().replace(/[^\w\- ]+/g,"").replace(/\s/g,"-").replace(/-+$/,"")})`
exports.getMarkdownHeaderLink=t
const n=/^#{1,6}(.+?)#*$/gm
exports.autoAppendMarkdownHeaderLink=((...e)=>{const r=(e=>{const r=[]
let o
for(;o=n.exec(e);){const e=o[1].trim()
e&&r.push(e)}return r})(e.join("\n"))
return r.length?[...r.map(e=>`* ${t(e)}`),"",...e]:[]})
const a=e=>e.replace(/_/g,"\\_")
exports.escapeMarkdownLink=a
const p=e=>`📄 [${a(e)}](${e})`
exports.renderMarkdownFileLink=p
exports.renderMarkdownDirectoryLink=(e=>`📁 [${a(e).replace(/\/*$/,"/")}](${e})`)
exports.renderMarkdownExportPath=(({exportInfoMap:t,rootPath:n})=>Object.entries(t).reduce((t,[a,s])=>{s[o.EXPORT_LIST_KEY]&&t.push(`+ ${p(`${(0,r.toPosixPath)((0,e.relative)(n,a))}.js`)}`,` - ${s[o.EXPORT_LIST_KEY].map(e=>`\`${e}\``).join(", ")}`)
return t},[]))
const s=({exportInfo:e,routeList:r})=>Object.entries(e).reduce((e,[t,n])=>{if(t===o.HOIST_LIST_KEY);else if(t===o.EXPORT_LIST_KEY||t===o.EXPORT_HOIST_LIST_KEY)e.push(`- ${n.map(e=>`\`${e}\``).join(", ")}`)
else{const o=s({exportInfo:n,routeList:[...r,t]})
o.length&&e.push(`- **${t}**`,...o.map(e=>` ${e}`))}return e},[])
exports.renderMarkdownExportTree=s

@@ -1,10 +0,8 @@

var _Directory = require("dr-js/library/node/file/Directory");
const DEFAULT_RESOLVE_PATH = path => path;
const getFileListFromPathList = async (pathList = [], resolvePath = DEFAULT_RESOLVE_PATH, filterFile) => {
let resultFileList = [];
for (const path of pathList) resultFileList = resultFileList.concat(await (0, _Directory.getFileList)(resolvePath(path)));
filterFile && (resultFileList = resultFileList.filter(filterFile));
return resultFileList;
};
exports.getFileListFromPathList = getFileListFromPathList;
exports.getScriptFileListFromPathList = (async (pathList = [], resolvePath = DEFAULT_RESOLVE_PATH, filterFile = (path => path.endsWith(".js") && !path.endsWith(".test.js"))) => getFileListFromPathList(pathList, resolvePath, filterFile));
var t=require("dr-js/library/node/file/Directory")
const e=t=>t
const s=async(s=[],i=e,r)=>{let o=[]
for(const e of s)o=o.concat(await(0,t.getFileList)(i(e)))
r&&(o=o.filter(r))
return o}
exports.getFileListFromPathList=s
exports.getScriptFileListFromPathList=(async(t=[],i=e,r=(t=>t.endsWith(".js")&&!t.endsWith(".test.js")))=>s(t,i,r))

@@ -1,20 +0,13 @@

var _fs = require("fs");
var _format = require("dr-js/library/common/format");
exports.wrapFileProcessor = (({processor, logger: {log, devLog}}) => async filePath => {
const inputString = (0, _fs.readFileSync)(filePath, "utf8");
const outputString = await processor(inputString, filePath);
if (inputString === outputString) {
devLog(`process skipped ${filePath}`);
return 0;
}
const {size: inputSize} = (0, _fs.statSync)(filePath);
outputString ? (0, _fs.writeFileSync)(filePath, outputString) : (0, _fs.unlinkSync)(filePath);
const {size: outputSize} = outputString ? (0, _fs.statSync)(filePath) : {
size: 0
};
const sizeChange = outputSize - inputSize;
devLog(`∆${(outputSize / inputSize).toFixed(2)}(${(0, _format.binary)(sizeChange)}B)`, `${(0, _format.binary)(inputSize)}B → ${(0, _format.binary)(outputSize)}B`, `${filePath}`);
return outputSize - inputSize;
});
exports.fileProcessorBabel = (inputString => inputString.replace(/['"]use strict['"];?\s*/g, "").replace(/Object\.defineProperty\(exports,\s*['"]__esModule['"],\s*{\s*value:\s*(true|!0)\s*}\)[;,]?\s*/g, "").replace(/(exports\.\w+\s*=\s*)+(undefined|void 0)[;,]?\s*/g, "").replace(/[\n\r]{2,}/g, "\n").replace(/^[\n\r]+/, ""));
exports.fileProcessorWebpack = (inputString => inputString.replace(/function\s*\(\)\s*{\s*return\s+([\w$]+(?:\.[\w$]+)?)\s*}([\s;)\]])/g, "()=>$1$2"));
var e=require("fs")
var r=require("dr-js/library/common/format")
exports.wrapFileProcessor=(({processor:s,logger:{log:o,devLog:t}})=>async o=>{const c=(0,e.readFileSync)(o,"utf8")
const i=await s(c,o)
if(c===i){t(`process skipped ${o}`)
return 0}const{size:n}=(0,e.statSync)(o)
i?(0,e.writeFileSync)(o,i):(0,e.unlinkSync)(o)
const{size:a}=i?(0,e.statSync)(o):{size:0}
const p=a-n
t(`∆${(a/n).toFixed(2)}(${(0,r.binary)(p)}B)`,`${(0,r.binary)(n)}B → ${(0,r.binary)(a)}B`,`${o}`)
return a-n})
exports.fileProcessorBabel=(e=>e.replace(/['"]use strict['"];?\s*/g,"").replace(/Object\.defineProperty\(exports,\s*['"]__esModule['"],\s*{\s*value:\s*(true|!0)\s*}\)[;,]?\s*/g,"").replace(/(exports\.\w+\s*=\s*)+(undefined|void 0)[;,]?\s*/g,"").replace(/[\n\r]{2,}/g,"\n").replace(/^[\n\r]+/,""))
exports.fileProcessorWebpack=(e=>e.replace(/function\s*\(\)\s*{\s*return\s+([\w$]+(?:\.[\w$]+)?)\s*}([\s;)\]])/g,"()=>$1$2"))

@@ -1,38 +0,19 @@

var _time = require("dr-js/library/common/time");
var _format = require("dr-js/library/common/format");
var _main = require("./main");
const EMPTY_FUNC = () => {};
exports.getLogger = ((title = "dev-dep", quiet = !1, padWidth = 120) => {
const envTitle = (0, _main.loadEnvKey)("__DEV_LOGGER_TITLE__");
title = envTitle ? `${title}|${envTitle}` : title;
(0, _main.saveEnvKey)("__DEV_LOGGER_TITLE__", title);
const startTime = (0, _time.clock)();
let prevTime = (0, _time.clock)();
const stepLog = (...args) => console.log(`- (+${(() => {
const time = (0, _time.clock)();
const stepTime = time - prevTime;
prevTime = time;
return (0, _format.time)(stepTime);
})()}) ${args.join(" ")}`);
const log = (...args) => console.log(`- ${args.join(" ")}`);
const devLog = _main.__VERBOSE__ ? log : EMPTY_FUNC;
return quiet ? {
padLog: stepLog,
stepLog: devLog,
log: devLog,
devLog: EMPTY_FUNC
} : {
padLog: (...args) => {
const start = `## ${args.join(" ")} `;
const end = ` [${title}|${(() => {
const time = (0, _time.clock)();
prevTime = time;
return (0, _format.time)(time - startTime);
})()}]`;
console.log(`\n${start.padEnd(padWidth - end.length, "-")}${end}`);
},
stepLog,
log,
devLog
};
});
var o=require("dr-js/library/common/time")
var e=require("dr-js/library/common/format")
var t=require("./main")
const r=()=>{}
exports.getLogger=((n="dev-dep",c=!1,s=120)=>{const l=(0,t.loadEnvKey)("__DEV_LOGGER_TITLE__")
n=l?`${n}|${l}`:n;(0,t.saveEnvKey)("__DEV_LOGGER_TITLE__",n)
const g=(0,o.clock)()
let _=(0,o.clock)()
const i=(...t)=>console.log(`- (+${(()=>{const t=(0,o.clock)()
const r=t-_
_=t
return(0,e.time)(r)})()}) ${t.join(" ")}`)
const d=(...o)=>console.log(`- ${o.join(" ")}`)
const a=t.__VERBOSE__?d:r
return c?{padLog:i,stepLog:a,log:a,devLog:r}:{padLog:(...t)=>{const r=`## ${t.join(" ")} `
const c=` [${n}|${(()=>{const t=(0,o.clock)()
_=t
return(0,e.time)(t-g)})()}]`
console.log(`\n${r.padEnd(s-c.length,"-")}${c}`)},stepLog:i,log:d,devLog:a}})

@@ -1,37 +0,19 @@

var _time = require("dr-js/library/common/time");
var _format = require("dr-js/library/common/format");
const loadEnvKey = key => {
try {
return JSON.parse(process.env[key]);
} catch (error) {
return null;
}
};
exports.loadEnvKey = loadEnvKey;
const saveEnvKey = (key, value) => {
try {
process.env[key] = JSON.stringify(value);
} catch (error) {}
};
exports.saveEnvKey = saveEnvKey;
const syncEnvKey = (key, defaultValue) => {
const value = loadEnvKey(key) || defaultValue;
saveEnvKey(key, value);
return value;
};
exports.syncEnvKey = syncEnvKey;
const __VERBOSE__ = syncEnvKey("__DEV_VERBOSE__", process.argv.includes("verbose"));
exports.__VERBOSE__ = __VERBOSE__;
const checkFlag = (flagList, checkFlagList) => flagList.find(flag => checkFlagList.includes(flag));
exports.checkFlag = checkFlag;
exports.argvFlag = ((...checkFlagList) => checkFlag(process.argv, checkFlagList));
exports.runMain = ((main, logger, ...args) => {
const startTime = (0, _time.clock)();
new Promise(resolve => resolve(main(logger, ...args))).then(() => {
logger.padLog(`done in ${(0, _format.time)((0, _time.clock)() - startTime)}`);
}, error => {
console.warn(error);
logger.padLog(`error after ${(0, _format.time)((0, _time.clock)() - startTime)}: ${error}`);
process.exit(-1);
});
});
var e=require("dr-js/library/common/time")
var r=require("dr-js/library/common/format")
const o=e=>{try{return JSON.parse(process.env[e])}catch(e){return null}}
exports.loadEnvKey=o
const s=(e,r)=>{try{process.env[e]=JSON.stringify(r)}catch(e){}}
exports.saveEnvKey=s
const t=(e,r)=>{const t=o(e)||r
s(e,t)
return t}
exports.syncEnvKey=t
const n=t("__DEV_VERBOSE__",process.argv.includes("verbose"))
exports.__VERBOSE__=n
const c=(e,r)=>e.find(e=>r.includes(e))
exports.checkFlag=c
exports.argvFlag=((...e)=>c(process.argv,e))
exports.runMain=((o,s,...t)=>{const n=(0,e.clock)()
new Promise(e=>e(o(s,...t))).then(()=>{s.padLog(`done in ${(0,r.time)((0,e.clock)()-n)}`)},o=>{console.warn(o)
s.padLog(`error after ${(0,r.time)((0,e.clock)()-n)}: ${o}`)
process.exit(-1)})})

@@ -1,89 +0,29 @@

var _path = require("path");
var _fs = require("fs");
var _terser = (obj = require("terser")) && obj.__esModule ? obj : {
default: obj
};
var obj;
var _time = require("dr-js/library/common/time");
var _format = require("dr-js/library/common/format");
var _main = require("./main");
exports.getTerserOption = (({isDevelopment = !1, isModule = !1} = {}) => {
return {
ecma: 8,
toplevel: !0,
parse: {
ecma: 8
},
compress: {
ecma: 8,
toplevel: !0,
join_vars: !1,
sequences: !1,
global_defs: {
"process.env.NODE_ENV": isDevelopment ? "development" : "production",
__DEV__: Boolean(isDevelopment)
}
},
mangle: !isModule && {
toplevel: !0
},
output: isModule ? {
ecma: 8,
beautify: !0,
indent_level: 2,
width: 240
} : {
ecma: 8,
beautify: !1,
semicolons: !1
},
sourceMap: !1
};
});
const minifyWithTerser = ({filePath, option, logger}) => {
const timeStart = (0, _time.clock)();
const scriptSource = (0, _fs.readFileSync)(filePath, {
encoding: "utf8"
});
const {error, code: scriptOutput} = _terser.default.minify(scriptSource, option);
if (error) {
logger.padLog(`[minifyWithTerser] failed to minify file: ${filePath}`);
throw error;
}
(0, _fs.writeFileSync)(filePath, scriptOutput);
const timeEnd = (0, _time.clock)();
return {
sizeSource: Buffer.byteLength(scriptSource),
sizeOutput: Buffer.byteLength(scriptOutput),
timeStart,
timeEnd
};
};
exports.minifyWithTerser = minifyWithTerser;
exports.minifyFileListWithTerser = (async ({fileList, option, rootPath = "", logger}) => {
logger.padLog(`minify ${fileList.length} file with terser`);
const table = [];
let totalTimeStart = (0, _time.clock)();
let totalSizeSource = 0;
let totalSizeDelta = 0;
for (const filePath of fileList) {
const {sizeSource, sizeOutput, timeStart, timeEnd} = minifyWithTerser({
filePath,
option,
logger
});
const sizeDelta = sizeOutput - sizeSource;
totalSizeSource += sizeSource;
totalSizeDelta += sizeDelta;
_main.__VERBOSE__ && table.push([ `∆ ${(100 * sizeDelta / sizeSource).toFixed(2)}% (${(0, _format.binary)(sizeDelta)}B)`, (0, _format.time)(timeEnd - timeStart), `${(0, _path.relative)(rootPath, filePath)}` ]);
}
_main.__VERBOSE__ && table.push([ "--", "--", "--" ]);
table.push([ `∆ ${(100 * totalSizeDelta / totalSizeSource).toFixed(2)}% (${(0, _format.binary)(totalSizeDelta)}B)`, (0, _format.time)((0, _time.clock)() - totalTimeStart), `TOTAL of ${fileList.length} file (${(0, _format.binary)(totalSizeSource)}B)` ]);
logger.log(`result:\n ${(0, _format.padTable)({
table,
padFuncList: [ "L", "R", "L" ],
cellPad: " | ",
rowPad: "\n "
})}`);
return totalSizeDelta;
});
var e=require("path")
var t=require("fs")
var r=(i=require("terser"))&&i.__esModule?i:{default:i}
var i
var o=require("dr-js/library/common/time")
var s=require("dr-js/library/common/format")
var n=require("./main")
exports.getTerserOption=(({isDevelopment:e=!1,isModule:t=!1}={})=>{return{ecma:8,toplevel:!0,parse:{ecma:8},compress:{ecma:8,toplevel:!0,join_vars:!1,sequences:!1,global_defs:{"process.env.NODE_ENV":e?"development":"production",__DEV__:Boolean(e)}},mangle:!t&&{toplevel:!0},output:t?{ecma:8,beautify:!0,indent_level:2,width:240}:{ecma:8,beautify:!1,semicolons:!1},sourceMap:!1}})
const l=({filePath:e,option:i,logger:s})=>{const n=(0,o.clock)()
const l=(0,t.readFileSync)(e,{encoding:"utf8"})
const{error:a,code:c}=r.default.minify(l,i)
if(a){s.padLog(`[minifyWithTerser] failed to minify file: ${e}`)
throw a}(0,t.writeFileSync)(e,c)
const u=(0,o.clock)()
return{sizeSource:Buffer.byteLength(l),sizeOutput:Buffer.byteLength(c),timeStart:n,timeEnd:u}}
exports.minifyWithTerser=l
exports.minifyFileListWithTerser=(async({fileList:t,option:r,rootPath:i="",logger:a})=>{a.padLog(`minify ${t.length} file with terser`)
const c=[]
let u=(0,o.clock)()
let p=0
let f=0
for(const o of t){const{sizeSource:t,sizeOutput:u,timeStart:m,timeEnd:d}=l({filePath:o,option:r,logger:a})
const y=u-t
p+=t
f+=y
n.__VERBOSE__&&c.push([`∆ ${(100*y/t).toFixed(2)}% (${(0,s.binary)(y)}B)`,(0,s.time)(d-m),`${(0,e.relative)(i,o)}`])}n.__VERBOSE__&&c.push(["--","--","--"])
c.push([`∆ ${(100*f/p).toFixed(2)}% (${(0,s.binary)(f)}B)`,(0,s.time)((0,o.clock)()-u),`TOTAL of ${t.length} file (${(0,s.binary)(p)}B)`])
a.log(`result:\n ${(0,s.padTable)({table:c,padFuncList:["L","R","L"],cellPad:" | ",rowPad:"\n "})}`)
return f})

@@ -1,92 +0,46 @@

var _path = require("path");
var _fs = require("fs");
var _webpack = (obj = require("webpack")) && obj.__esModule ? obj : {
default: obj
};
var obj;
var _format = require("dr-js/library/common/format");
var _File = require("dr-js/library/node/file/File");
var _ExitListener = require("dr-js/library/node/system/ExitListener");
var _main = require("./main");
const getStatsCheck = (onError, onStats) => (error, statsData) => {
if (error) return onError(error);
if (statsData.hasErrors() || statsData.hasWarnings()) {
const {errors = [], warnings = []} = statsData.toJson();
errors.forEach(message => console.error(message));
warnings.forEach(message => console.warn(message));
if (statsData.hasErrors()) return onError(new Error("webpack stats Error"));
}
onStats(statsData);
};
const joinTag = (...args) => args.filter(Boolean).join(",");
exports.compileWithWebpack = (async ({config, isWatch, profileOutput, assetMapOutput, logger}) => {
const {log} = logger;
if (profileOutput) {
isWatch && console.warn("[watch] warning: skipped generate profileOutput");
config.profile = !0;
}
const compiler = (0, _webpack.default)(config);
const logStats = ((isWatch, {padLog, log}) => {
const logSingleStats = ({compilation: {assets = {}, chunks = []}, startTime, endTime}) => {
startTime && endTime && padLog(`[${isWatch ? "watch" : "compile"}] time: ${(0, _format.time)(endTime - startTime)}`);
const table = [];
Object.entries(assets).forEach(([name, sourceInfo]) => table.push([ "asset", name, `${(0, _format.binary)(sourceInfo.size())}B`, joinTag(sourceInfo.emitted && "emitted") ]));
_main.__VERBOSE__ && chunks.forEach(chunk => table.push([ "chunk", chunk.name || chunk.id, `${(0, _format.binary)(chunk.modulesSize())}B`, joinTag(chunk.canBeInitial() && "initial", chunk.hasRuntime() && "entry", chunk.rendered && "rendered") ]));
log(`output:\n ${(0, _format.padTable)({
table,
padFuncList: [ "L", "R", "R", "L" ],
cellPad: " | ",
rowPad: "\n "
})}`);
};
return statsData => {
if (statsData.compilation) return logSingleStats(statsData);
if (statsData.stats) return statsData.stats.map(logSingleStats);
console.warn("[getLogStats] unexpected statData", statsData);
throw new Error("[getLogStats] unexpected statData");
};
})(isWatch, logger);
if (!isWatch) {
log("[compile] start");
const statsData = await new Promise((resolve, reject) => compiler.run(getStatsCheck(reject, resolve)));
logStats(statsData);
let statsDataObject;
const getStatsDataObject = () => {
void 0 === statsDataObject && (statsDataObject = statsData.toJson());
return statsDataObject;
};
profileOutput && (0, _fs.writeFileSync)(profileOutput, JSON.stringify(getStatsDataObject()));
profileOutput && log(`[compile] generated profileOutput at: ${profileOutput}`);
assetMapOutput && (0, _fs.writeFileSync)(assetMapOutput, JSON.stringify(getStatsDataObject().assetsByChunkName || {}));
assetMapOutput && log(`[compile] generated assetMapOutput at: ${assetMapOutput}`);
return statsData;
}
log("[watch] start");
compiler.watch({
aggregateTimeout: 512,
poll: void 0
}, getStatsCheck(error => log(`error: ${error}`), logStats));
(0, _ExitListener.addExitListenerSync)(exitState => log(`[watch] exit with state: ${JSON.stringify(exitState)}`));
});
exports.commonFlag = (async ({argvFlag, fromRoot, profileOutput = fromRoot(".temp-gitignore/profile-stat.json"), assetMapOutput = "", logger: {log}}) => {
const mode = argvFlag("development", "production") || "production";
const isWatch = Boolean(argvFlag("watch"));
const isProduction = "production" === mode;
argvFlag("profile") || (profileOutput = null);
profileOutput && await (0, _File.createDirectory)((0, _path.dirname)(profileOutput));
assetMapOutput && await (0, _File.createDirectory)((0, _path.dirname)(assetMapOutput));
log(`compile flag: ${JSON.stringify({
mode,
isWatch,
isProduction,
profileOutput,
assetMapOutput
}, null, " ")}`);
return {
mode,
isWatch,
isProduction,
profileOutput,
assetMapOutput
};
});
var e=require("path")
var t=require("fs")
var r=(o=require("webpack"))&&o.__esModule?o:{default:o}
var o
var a=require("dr-js/library/common/format")
var i=require("dr-js/library/node/file/File")
var n=require("dr-js/library/node/system/ExitListener")
var s=require("./main")
const c=(e,t)=>(r,o)=>{if(r)return e(r)
if(o.hasErrors()||o.hasWarnings()){const{errors:t=[],warnings:r=[]}=o.toJson()
t.forEach(e=>console.error(e))
r.forEach(e=>console.warn(e))
if(o.hasErrors())return e(new Error("webpack stats Error"))}t(o)}
const u=(...e)=>e.filter(Boolean).join(",")
exports.compileWithWebpack=(async({config:e,isWatch:o,profileOutput:i,assetMapOutput:p,logger:l})=>{const{log:d}=l
if(i){o&&console.warn("[watch] warning: skipped generate profileOutput")
e.profile=!0}const m=(0,r.default)(e)
const f=((e,{padLog:t,log:r})=>{const o=({compilation:{assets:o={},chunks:i=[]},startTime:n,endTime:c})=>{n&&c&&t(`[${e?"watch":"compile"}] time: ${(0,a.time)(c-n)}`)
const p=[]
Object.entries(o).forEach(([e,t])=>p.push(["asset",e,`${(0,a.binary)(t.size())}B`,u(t.emitted&&"emitted")]))
s.__VERBOSE__&&i.forEach(e=>p.push(["chunk",e.name||e.id,`${(0,a.binary)(e.modulesSize())}B`,u(e.canBeInitial()&&"initial",e.hasRuntime()&&"entry",e.rendered&&"rendered")]))
r(`output:\n ${(0,a.padTable)({table:p,padFuncList:["L","R","R","L"],cellPad:" | ",rowPad:"\n "})}`)}
return e=>{if(e.compilation)return o(e)
if(e.stats)return e.stats.map(o)
console.warn("[getLogStats] unexpected statData",e)
throw new Error("[getLogStats] unexpected statData")}})(o,l)
if(!o){d("[compile] start")
const e=await new Promise((e,t)=>m.run(c(t,e)))
f(e)
let r
const o=()=>{void 0===r&&(r=e.toJson())
return r}
i&&(0,t.writeFileSync)(i,JSON.stringify(o()))
i&&d(`[compile] generated profileOutput at: ${i}`)
p&&(0,t.writeFileSync)(p,JSON.stringify(o().assetsByChunkName||{}))
p&&d(`[compile] generated assetMapOutput at: ${p}`)
return e}d("[watch] start")
m.watch({aggregateTimeout:512,poll:void 0},c(e=>d(`error: ${e}`),f));(0,n.addExitListenerSync)(e=>d(`[watch] exit with state: ${JSON.stringify(e)}`))})
exports.commonFlag=(async({argvFlag:t,fromRoot:r,profileOutput:o=r(".temp-gitignore/profile-stat.json"),assetMapOutput:a="",logger:{log:n}})=>{const s=t("development","production")||"production"
const c=Boolean(t("watch"))
const u="production"===s
t("profile")||(o=null)
o&&await(0,i.createDirectory)((0,e.dirname)(o))
a&&await(0,i.createDirectory)((0,e.dirname)(a))
n(`compile flag: ${JSON.stringify({mode:s,isWatch:c,isProduction:u,profileOutput:o,assetMapOutput:a},null," ")}`)
return{mode:s,isWatch:c,isProduction:u,profileOutput:o,assetMapOutput:a}})

@@ -1,1 +0,1 @@

{"name":"dev-dep-tool","version":"0.4.1","author":"dr-js","license":"MIT","description":"Provide common package devDependencies","keywords":["Dr","Dr-js","JavaScript"],"repository":"github:dr-js/dev-dep","bin":"bin/index.js","engines":{"node":"^8.11 || ^9.11 || >=10.9","npm":">=6"},"dependencies":{"dr-js":"^0.17.0 || ^0.17.1-dev.0"},"sideEffects":false}
{"name":"dev-dep-tool","version":"0.4.2-dev.0","description":"Provide common package devDependencies","author":"dr-js","license":"MIT","keywords":["Dr","Dr-js","JavaScript"],"repository":"github:dr-js/dev-dep","bin":"bin/index.js","engines":{"node":"^8.11 || ^9.11 || >=10.9","npm":">=6"},"dependencies":{"dr-js":"0.18.0-dev.1"},"sideEffects":false}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc