dev-dep-tool
Advanced tools
Comparing version 0.3.3-dev.1 to 0.4.0-dev.0
@@ -1,24 +0,48 @@ | ||
var e=require("path") | ||
var r=require("fs") | ||
var t=require("dr-js/library/common/mutable/Object") | ||
var c=require("dr-js/library/node/file/File") | ||
var n=require("dr-js/library/node/file/Directory") | ||
const a=(t,c,n)=>{const a=(0,e.relative)(t,c) | ||
const{dependencies:o,devDependencies:i,peerDependencies:s,optionalDependencies:p}=JSON.parse((0,r.readFileSync)(c,"utf8")) | ||
o&&n(o,a) | ||
i&&n(i,a) | ||
s&&n(s,a) | ||
p&&n(p,a)} | ||
exports.collectDependency=(async e=>{const{collect:r,getResult:o}=(()=>{let e={} | ||
let r={} | ||
return{collect:(t,c)=>Object.entries(t).forEach(([t,n])=>{if(e[t])return console.warn(`[collect] dropped duplicate package: ${t} at ${c} with version: ${n}, checking: ${e[t].version}`) | ||
e[t]={name:t,version:n,source:c} | ||
r[t]=n}),getResult:()=>{(0,t.objectSortKey)(r) | ||
const c={packageInfoMap:e,dependencyMap:r} | ||
e={} | ||
r={} | ||
return c}}})() | ||
const i=(await(0,c.getPathStat)(e)).isDirectory() | ||
i?(await(0,n.getFileList)(e)).filter(e=>e.endsWith("package.json")).forEach(t=>a(e,t,r)):a(e,e,r) | ||
const{packageInfoMap:s,dependencyMap:p}=o() | ||
return{isDirectory:i,packageInfoMap:s,dependencyMap:p}}) | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _Object = require("dr-js/library/common/mutable/Object"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Directory = require("dr-js/library/node/file/Directory"); | ||
const loadPackage = (pathInput, path, collect) => { | ||
const packageSource = (0, _path.relative)(pathInput, path); | ||
const {dependencies, devDependencies, peerDependencies, optionalDependencies} = JSON.parse((0, _fs.readFileSync)(path, "utf8")); | ||
dependencies && collect(dependencies, packageSource); | ||
devDependencies && collect(devDependencies, packageSource); | ||
peerDependencies && collect(peerDependencies, packageSource); | ||
optionalDependencies && collect(optionalDependencies, packageSource); | ||
}; | ||
exports.collectDependency = (async pathInput => { | ||
const {collect, getResult} = (() => { | ||
let packageInfoMap = {}; | ||
let dependencyMap = {}; | ||
return { | ||
collect: (dependencyObject, source) => Object.entries(dependencyObject).forEach(([name, version]) => { | ||
if (packageInfoMap[name]) return console.warn(`[collect] dropped duplicate package: ${name} at ${source} with version: ${version}, checking: ${packageInfoMap[name].version}`); | ||
packageInfoMap[name] = { | ||
name, | ||
version, | ||
source | ||
}; | ||
dependencyMap[name] = version; | ||
}), | ||
getResult: () => { | ||
(0, _Object.objectSortKey)(dependencyMap); | ||
const result = { | ||
packageInfoMap, | ||
dependencyMap | ||
}; | ||
packageInfoMap = {}; | ||
dependencyMap = {}; | ||
return result; | ||
} | ||
}; | ||
})(); | ||
const isDirectory = (await (0, _File.getPathStat)(pathInput)).isDirectory(); | ||
isDirectory ? (await (0, _Directory.getFileList)(pathInput)).filter(path => path.endsWith("package.json")).forEach(path => loadPackage(pathInput, path, collect)) : loadPackage(pathInput, pathInput, collect); | ||
const {packageInfoMap, dependencyMap} = getResult(); | ||
return { | ||
isDirectory, | ||
packageInfoMap, | ||
dependencyMap | ||
}; | ||
}); |
@@ -1,10 +0,16 @@ | ||
var e=require("path") | ||
var t=require("fs") | ||
var r=require("dr-js/library/node/file/Modify") | ||
var a=require("./collectDependency") | ||
var c=require("./logResult") | ||
exports.doCheckOutdated=(async({pathInput:o,pathTemp:i=(0,e.resolve)(o,"check-outdated-gitignore")})=>{console.log(`[checkOutdated] checking '${o}'`) | ||
const{isDirectory:d,packageInfoMap:n,dependencyMap:s}=await(0,a.collectDependency)(o) | ||
const l=d?await(0,r.withTempDirectory)(i,async()=>{(0,t.writeFileSync)((0,e.resolve)(i,"package.json"),JSON.stringify({dependencies:s})) | ||
return(0,c.logCheckOutdatedResult)(n,i)}):await(0,c.logCheckOutdatedResult)(n,(0,e.dirname)(o)) | ||
process.exit(l)}) | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _Modify = require("dr-js/library/node/file/Modify"); | ||
var _collectDependency = require("./collectDependency"); | ||
var _logResult = require("./logResult"); | ||
exports.doCheckOutdated = (async ({pathInput, pathTemp = (0, _path.resolve)(pathInput, "check-outdated-gitignore")}) => { | ||
console.log(`[checkOutdated] checking '${pathInput}'`); | ||
const {isDirectory, packageInfoMap, dependencyMap} = await (0, _collectDependency.collectDependency)(pathInput); | ||
const outdatedCount = isDirectory ? await (0, _Modify.withTempDirectory)(pathTemp, async () => { | ||
(0, _fs.writeFileSync)((0, _path.resolve)(pathTemp, "package.json"), JSON.stringify({ | ||
dependencies: dependencyMap | ||
})); | ||
return (0, _logResult.logCheckOutdatedResult)(packageInfoMap, pathTemp); | ||
}) : await (0, _logResult.logCheckOutdatedResult)(packageInfoMap, (0, _path.dirname)(pathInput)); | ||
process.exit(outdatedCount); | ||
}); |
@@ -1,29 +0,53 @@ | ||
var e=require("dr-js/library/common/format") | ||
var t=require("dr-js/library/common/function") | ||
var o=require("dr-js/library/common/module/SemVer") | ||
var r=require("dr-js/library/node/system/Run") | ||
const s=/\033\[[0-9;]*[a-zA-Z]/g | ||
const n=/(\S+)\s+\S+\s+(\S+)\s+(\S+)/ | ||
const c=([e,,,t],[o,,,r])=>t!==r?t.localeCompare(r):e.localeCompare(o) | ||
const a=t=>(0,e.padTable)({table:t,cellPad:" | ",padFuncList:["R","L","L","L"]}) | ||
exports.logCheckOutdatedResult=(async(e,i)=>(async(e,t)=>{const r=[] | ||
const i=[] | ||
t.split("\n").forEach(t=>{const[,c,a,l]=n.exec(t.replace(s,""))||[] | ||
if(!e[c])return | ||
const{version:u,source:d}=e[c] | ||
const m=(0,o.compareSemVer)(a,l)<=0?l:a | ||
const p=[c,u,m,d] | ||
u.endsWith(m)?r.push(p):i.push(p)}) | ||
const l=r.length+i.length | ||
r.sort(c) | ||
r.length&&console.log(`SAME[${r.length}/${l}]:\n${a(r)}`) | ||
i.sort(c) | ||
i.length&&console.error(`OUTDATED[${i.length}/${l}]:\n${a(i)}`) | ||
return i.length})(e,await(async e=>{const{promise:o,subProcess:s,stdoutBufferPromise:n}=(0,r.runQuiet)({command:"npm",argList:["outdated"],option:{cwd:e}}) | ||
const{promise:c,resolve:a,reject:i}=(0,t.createInsideOutPromise)() | ||
o.then(a,a) | ||
setTimeout(i,42e3) | ||
const{code:l,signal:u}=await c.catch(async()=>{console.warn("[checkNpmOutdated] timeout") | ||
"win32"===process.platform?(0,r.runSync)({command:"taskkill",argList:["-F","-T","-PID",s.pid],option:{stdio:"ignore"}}):s.kill() | ||
throw new Error("[checkNpmOutdated] timeout")}) | ||
return(await n).toString()})(i))) | ||
var _format = require("dr-js/library/common/format"); | ||
var _function = require("dr-js/library/common/function"); | ||
var _SemVer = require("dr-js/library/common/module/SemVer"); | ||
var _Run = require("dr-js/library/node/system/Run"); | ||
const REGEXP_ANSI_ESCAPE_CODE = /\033\[[0-9;]*[a-zA-Z]/g; | ||
const REGEXP_NPM_OUTDATED_OUTPUT = /(\S+)\s+\S+\s+(\S+)\s+(\S+)/; | ||
const sortTableRow = ([nameA, , , sourceA], [nameB, , , sourceB]) => sourceA !== sourceB ? sourceA.localeCompare(sourceB) : nameA.localeCompare(nameB); | ||
const formatPadTable = table => (0, _format.padTable)({ | ||
table, | ||
cellPad: " | ", | ||
padFuncList: [ "R", "L", "L", "L" ] | ||
}); | ||
exports.logCheckOutdatedResult = (async (packageInfoMap, pathPackage) => (async (packageInfoMap, npmOutdatedOutputString) => { | ||
const sameTable = []; | ||
const outdatedTable = []; | ||
npmOutdatedOutputString.split("\n").forEach(outputLine => { | ||
const [, name, versionWanted, versionLatest] = REGEXP_NPM_OUTDATED_OUTPUT.exec(outputLine.replace(REGEXP_ANSI_ESCAPE_CODE, "")) || []; | ||
if (!packageInfoMap[name]) return; | ||
const {version, source} = packageInfoMap[name]; | ||
const versionTarget = (0, _SemVer.compareSemVer)(versionWanted, versionLatest) <= 0 ? versionLatest : versionWanted; | ||
const rowList = [ name, version, versionTarget, source ]; | ||
version.endsWith(versionTarget) ? sameTable.push(rowList) : outdatedTable.push(rowList); | ||
}); | ||
const total = sameTable.length + outdatedTable.length; | ||
sameTable.sort(sortTableRow); | ||
sameTable.length && console.log(`SAME[${sameTable.length}/${total}]:\n${formatPadTable(sameTable)}`); | ||
outdatedTable.sort(sortTableRow); | ||
outdatedTable.length && console.error(`OUTDATED[${outdatedTable.length}/${total}]:\n${formatPadTable(outdatedTable)}`); | ||
return outdatedTable.length; | ||
})(packageInfoMap, await (async pathPackage => { | ||
const {promise: runPromise, subProcess, stdoutBufferPromise} = (0, _Run.runQuiet)({ | ||
command: "npm", | ||
argList: [ "--no-update-notifier", "outdated" ], | ||
option: { | ||
cwd: pathPackage | ||
} | ||
}); | ||
const {promise, resolve, reject} = (0, _function.createInsideOutPromise)(); | ||
runPromise.then(resolve, resolve); | ||
setTimeout(reject, 42e3); | ||
const {code, signal} = await promise.catch(async () => { | ||
console.warn("[checkNpmOutdated] timeout"); | ||
"win32" === process.platform ? (0, _Run.runSync)({ | ||
command: "taskkill", | ||
argList: [ "-F", "-T", "-PID", subProcess.pid ], | ||
option: { | ||
stdio: "ignore" | ||
} | ||
}) : subProcess.kill(); | ||
throw new Error("[checkNpmOutdated] timeout"); | ||
}); | ||
return (await stdoutBufferPromise).toString(); | ||
})(pathPackage))); |
#!/usr/bin/env node | ||
var t=require("./option") | ||
var e=require("./checkOutdated") | ||
var a=require("./pack") | ||
var i=require("../package.json");(async()=>{const{getSingleOption:o,getSingleOptionOptional:p}=await(0,t.parseOption)() | ||
const n=p("check-outdated") | ||
const s=p("pack") | ||
n||s?await(async({isCheckOutdated:t,isPack:i},{getSingleOption:o,getSingleOptionOptional:p})=>{t&&await(0,e.doCheckOutdated)({pathInput:o("path-input"),pathTemp:p("path-temp")}) | ||
i&&await(0,a.doPack)({pathInput:o("path-input"),pathOutput:o("path-output"),outputName:p("output-name"),outputVersion:p("output-version"),outputDescription:p("output-description"),isPublish:p("publish"),isPublishDev:p("publish-dev")})})({isCheckOutdated:n,isPack:s},{getSingleOption:o,getSingleOptionOptional:p}).catch(t=>{console.warn("[Error]",t.stack||t) | ||
process.exit(2)}):p("version")?console.log(JSON.stringify({packageName:i.name,packageVersion:i.version},null," ")):console.log((0,t.formatUsage)(null,p("help")?null:"simple"))})().catch(e=>{console.warn((0,t.formatUsage)(e.stack||e,"simple")) | ||
process.exit(1)}) | ||
var _option = require("./option"); | ||
var _checkOutdated = require("./checkOutdated"); | ||
var _pack = require("./pack"); | ||
var _package = require("../package.json"); | ||
(async () => { | ||
const {getSingleOption, getSingleOptionOptional} = await (0, _option.parseOption)(); | ||
const isCheckOutdated = getSingleOptionOptional("check-outdated"); | ||
const isPack = getSingleOptionOptional("pack"); | ||
if (!isCheckOutdated && !isPack) return getSingleOptionOptional("version") ? console.log(JSON.stringify({ | ||
packageName: _package.name, | ||
packageVersion: _package.version | ||
}, null, " ")) : console.log((0, _option.formatUsage)(null, getSingleOptionOptional("help") ? null : "simple")); | ||
await (async ({isCheckOutdated, isPack}, {getSingleOption, getSingleOptionOptional}) => { | ||
isCheckOutdated && await (0, _checkOutdated.doCheckOutdated)({ | ||
pathInput: getSingleOption("path-input"), | ||
pathTemp: getSingleOptionOptional("path-temp") | ||
}); | ||
isPack && await (0, _pack.doPack)({ | ||
pathInput: getSingleOption("path-input"), | ||
pathOutput: getSingleOption("path-output"), | ||
outputName: getSingleOptionOptional("output-name"), | ||
outputVersion: getSingleOptionOptional("output-version"), | ||
outputDescription: getSingleOptionOptional("output-description"), | ||
isPublish: getSingleOptionOptional("publish"), | ||
isPublishDev: getSingleOptionOptional("publish-dev") | ||
}); | ||
})({ | ||
isCheckOutdated, | ||
isPack | ||
}, { | ||
getSingleOption, | ||
getSingleOptionOptional | ||
}).catch(error => { | ||
console.warn("[Error]", error.stack || error); | ||
process.exit(2); | ||
}); | ||
})().catch(error => { | ||
console.warn((0, _option.formatUsage)(error.stack || error, "simple")); | ||
process.exit(1); | ||
}); |
@@ -1,7 +0,67 @@ | ||
var e=require("dr-js/library/common/module/Option/preset") | ||
var t=require("dr-js/library/node/module/Option") | ||
const{SingleString:o,SinglePath:a,BooleanFlag:p,Config:i}=t.ConfigPresetNode | ||
const n={prefixENV:"dev-dep",prefixJSON:"dev-dep",formatList:[i,{...p,name:"help",shortName:"h"},{...p,name:"version",shortName:"v"},{...a,optional:(0,e.getOptionalFormatFlag)("check-outdated","pack"),name:"path-input",shortName:"i",description:"path to 'package.json', or directory with 'package.json' inside"},{...p,name:"check-outdated",shortName:"C",extendFormatList:[{...a,optional:!0,name:"path-temp"}]},{...p,name:"pack",shortName:"P",extendFormatList:[{...a,name:"path-output",shortName:"o",description:"output path"},{...o,optional:!0,name:"output-name",description:"output package name"},{...o,optional:!0,name:"output-version",description:"output package version"},{...o,optional:!0,name:"output-description",description:"output package description"},{...p,name:"publish",description:"run npm publish"},{...p,name:"publish-dev",description:"run npm publish-dev"}]}]} | ||
const{parseOption:r,formatUsage:s}=(0,t.prepareOption)(n) | ||
exports.formatUsage=s | ||
exports.parseOption=r | ||
var _preset = require("dr-js/library/common/module/Option/preset"); | ||
var _Option = require("dr-js/library/node/module/Option"); | ||
const {SingleString, SinglePath, BooleanFlag, Config} = _Option.ConfigPresetNode; | ||
const OPTION_CONFIG = { | ||
prefixENV: "dev-dep", | ||
prefixJSON: "dev-dep", | ||
formatList: [ Config, { | ||
...BooleanFlag, | ||
name: "help", | ||
shortName: "h" | ||
}, { | ||
...BooleanFlag, | ||
name: "version", | ||
shortName: "v" | ||
}, { | ||
...SinglePath, | ||
optional: (0, _preset.getOptionalFormatFlag)("check-outdated", "pack"), | ||
name: "path-input", | ||
shortName: "i", | ||
description: "path to 'package.json', or directory with 'package.json' inside" | ||
}, { | ||
...BooleanFlag, | ||
name: "check-outdated", | ||
shortName: "C", | ||
extendFormatList: [ { | ||
...SinglePath, | ||
optional: !0, | ||
name: "path-temp" | ||
} ] | ||
}, { | ||
...BooleanFlag, | ||
name: "pack", | ||
shortName: "P", | ||
extendFormatList: [ { | ||
...SinglePath, | ||
name: "path-output", | ||
shortName: "o", | ||
description: "output path" | ||
}, { | ||
...SingleString, | ||
optional: !0, | ||
name: "output-name", | ||
description: "output package name" | ||
}, { | ||
...SingleString, | ||
optional: !0, | ||
name: "output-version", | ||
description: "output package version" | ||
}, { | ||
...SingleString, | ||
optional: !0, | ||
name: "output-description", | ||
description: "output package description" | ||
}, { | ||
...BooleanFlag, | ||
name: "publish", | ||
description: "run npm publish" | ||
}, { | ||
...BooleanFlag, | ||
name: "publish-dev", | ||
description: "run npm publish-dev" | ||
} ] | ||
} ] | ||
}; | ||
const {parseOption, formatUsage} = (0, _Option.prepareOption)(OPTION_CONFIG); | ||
exports.formatUsage = formatUsage; | ||
exports.parseOption = parseOption; |
103
bin/pack.js
@@ -1,40 +0,63 @@ | ||
var e=require("path") | ||
var i=require("fs") | ||
var r=require("child_process") | ||
var o=require("dr-js/library/common/format") | ||
var n=require("dr-js/library/common/mutable/Object") | ||
var t=require("dr-js/library/node/file/File") | ||
var s=require("dr-js/library/node/file/Modify") | ||
var a=require("dr-js/library/node/system/Run") | ||
const c=(i,r=(()=>({packageJSON:{},exportFilePairList:[],installFilePairList:[]}))(),o=new Set)=>{const t=i.endsWith(".json")?i:(0,e.join)(i,"package.json") | ||
i.endsWith(".json")&&(i=(0,e.dirname)(i)) | ||
if(o.has(t))return r | ||
o.add(t) | ||
const{IMPORT:s,EXPORT:a,INSTALL:p,...d}=require(t) | ||
const{packageJSON:u,exportFilePairList:f,installFilePairList:y}=r | ||
s&&s.forEach(n=>c((0,e.resolve)(i,n),r,o)) | ||
console.log(`[loadPackage] load: ${t}`) | ||
p&&p.forEach(e=>y.push(l(e,i))) | ||
a&&a.forEach(e=>f.push(l(e,i))) | ||
d&&(0,n.objectMergeDeep)(u,d) | ||
return r} | ||
const l=(i,r)=>"object"==typeof i?[(0,e.resolve)(r,i.from),i.to]:[(0,e.resolve)(r,i),i] | ||
const p=["dependencies","devDependencies","peerDependencies","optionalDependencies","bundledDependencies"] | ||
const d=["private","name","version","description","author","contributors","license","keywords","repository","homepage","bugs","os","cpu","engines","engineStrict","preferGlobal","main","bin","man","files","directories","scripts","config","publishConfig",...p,"sideEffects"] | ||
exports.doPack=(async({pathInput:l,pathOutput:u,outputName:f,outputVersion:y,outputDescription:m,isPublish:g,isPublishDev:b})=>{const h=(0,e.resolve)(u,"install") | ||
const{packageJSON:j,exportFilePairList:v,installFilePairList:P}=c(l) | ||
f&&(j.name=f) | ||
y&&(j.version=y) | ||
m&&(j.description=m) | ||
await s.modify.delete(u).catch(()=>{}) | ||
await(0,t.createDirectory)(u) | ||
await(0,t.createDirectory)(h) | ||
await(async(e,r)=>{p.forEach(i=>{e[i]&&(0,n.objectSortKey)(e[i])}) | ||
const t=Object.keys(e).sort((e,i)=>d.indexOf(e)-d.indexOf(i)).map(i=>(0,o.stringIndentLine)(`${JSON.stringify(i)}: ${JSON.stringify(e[i],null,2)}`)) | ||
const s=Buffer.from(`{\n${t.join(",\n")}\n}\n`);(0,i.writeFileSync)(r,s) | ||
console.log(`[writePackageJSON] ${r} [${(0,o.binary)(s.length)}B]`)})(j,(0,e.join)(u,"package.json")) | ||
for(const[i,r]of v)await s.modify.copy(i,(0,e.join)(u,r)) | ||
for(const[i,r]of P)await s.modify.copy(i,(0,e.join)(h,r));(0,r.execSync)("npm pack",{cwd:u,stdio:"inherit",shell:!0}) | ||
const O=`${j.name}-${j.version}.tgz` | ||
const S=(0,e.join)(u,O) | ||
console.log(`done pack: ${O} [${(0,o.binary)((0,i.statSync)(S).size)}B]`);(g||b)&&(0,a.runSync)({command:"npm",argList:["publish",S,"--tag",b?"dev":"latest"]})}) | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _child_process = require("child_process"); | ||
var _format = require("dr-js/library/common/format"); | ||
var _Object = require("dr-js/library/common/mutable/Object"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Modify = require("dr-js/library/node/file/Modify"); | ||
var _Run = require("dr-js/library/node/system/Run"); | ||
const loadPackage = (packagePath, packageInfo = { | ||
packageJSON: {}, | ||
exportFilePairList: [], | ||
installFilePairList: [] | ||
}, loadedSet = new Set()) => { | ||
const packageFile = packagePath.endsWith(".json") ? packagePath : (0, _path.join)(packagePath, "package.json"); | ||
packagePath.endsWith(".json") && (packagePath = (0, _path.dirname)(packagePath)); | ||
if (loadedSet.has(packageFile)) return packageInfo; | ||
loadedSet.add(packageFile); | ||
const {IMPORT: importList, EXPORT: exportList, INSTALL: installList, ...mergePackageJSON} = require(packageFile); | ||
const {packageJSON, exportFilePairList, installFilePairList} = packageInfo; | ||
importList && importList.forEach(importPackagePath => loadPackage((0, _path.resolve)(packagePath, importPackagePath), packageInfo, loadedSet)); | ||
console.log(`[loadPackage] load: ${packageFile}`); | ||
installList && installList.forEach(filePath => installFilePairList.push(parseResourcePath(filePath, packagePath))); | ||
exportList && exportList.forEach(filePath => exportFilePairList.push(parseResourcePath(filePath, packagePath))); | ||
mergePackageJSON && (0, _Object.objectMergeDeep)(packageJSON, mergePackageJSON); | ||
return packageInfo; | ||
}; | ||
const parseResourcePath = (resourcePath, packagePath) => "object" == typeof resourcePath ? [ (0, _path.resolve)(packagePath, resourcePath.from), resourcePath.to ] : [ (0, _path.resolve)(packagePath, resourcePath), resourcePath ]; | ||
const PACKAGE_KEY_SORT_REQUIRED = [ "dependencies", "devDependencies", "peerDependencies", "optionalDependencies", "bundledDependencies" ]; | ||
const PACKAGE_KEY_ORDER = [ "private", "name", "version", "description", "author", "contributors", "license", "keywords", "repository", "homepage", "bugs", "os", "cpu", "engines", "engineStrict", "preferGlobal", "main", "bin", "man", "files", "directories", "scripts", "config", "publishConfig", ...PACKAGE_KEY_SORT_REQUIRED, "sideEffects" ]; | ||
exports.doPack = (async ({pathInput, pathOutput, outputName, outputVersion, outputDescription, isPublish, isPublishDev}) => { | ||
const pathOutputInstall = (0, _path.resolve)(pathOutput, "install"); | ||
const {packageJSON, exportFilePairList, installFilePairList} = loadPackage(pathInput); | ||
outputName && (packageJSON.name = outputName); | ||
outputVersion && (packageJSON.version = outputVersion); | ||
outputDescription && (packageJSON.description = outputDescription); | ||
await _Modify.modify.delete(pathOutput).catch(() => {}); | ||
await (0, _File.createDirectory)(pathOutput); | ||
await (0, _File.createDirectory)(pathOutputInstall); | ||
await (async (packageJSON, path) => { | ||
PACKAGE_KEY_SORT_REQUIRED.forEach(key => { | ||
packageJSON[key] && (0, _Object.objectSortKey)(packageJSON[key]); | ||
}); | ||
const jsonFileStringList = Object.keys(packageJSON).sort((a, b) => PACKAGE_KEY_ORDER.indexOf(a) - PACKAGE_KEY_ORDER.indexOf(b)).map(key => (0, _format.stringIndentLine)(`${JSON.stringify(key)}: ${JSON.stringify(packageJSON[key], null, 2)}`)); | ||
const packageBuffer = Buffer.from(`{\n${jsonFileStringList.join(",\n")}\n}\n`); | ||
(0, _fs.writeFileSync)(path, packageBuffer); | ||
console.log(`[writePackageJSON] ${path} [${(0, _format.binary)(packageBuffer.length)}B]`); | ||
})(packageJSON, (0, _path.join)(pathOutput, "package.json")); | ||
(0, _fs.writeFileSync)((0, _path.join)(pathOutput, "README.md"), [ `# ${packageJSON.name}\n`, "[![i:npm]][l:npm]", "[![i:size]][l:size]", "", `${packageJSON.description}`, "", `[i:npm]: https://img.shields.io/npm/v/${packageJSON.name}.svg?colorB=blue`, `[l:npm]: https://www.npmjs.com/package/${packageJSON.name}`, `[i:size]: https://packagephobia.now.sh/badge?p=${packageJSON.name}`, `[l:size]: https://packagephobia.now.sh/result?p=${packageJSON.name}` ].join("\n")); | ||
for (const [source, targetRelative] of exportFilePairList) await _Modify.modify.copy(source, (0, _path.join)(pathOutput, targetRelative)); | ||
for (const [source, targetRelative] of installFilePairList) await _Modify.modify.copy(source, (0, _path.join)(pathOutputInstall, targetRelative)); | ||
(0, _child_process.execSync)("npm --no-update-notifier pack", { | ||
cwd: pathOutput, | ||
stdio: "inherit", | ||
shell: !0 | ||
}); | ||
const outputFileName = `${packageJSON.name}-${packageJSON.version}.tgz`; | ||
const outputFilePath = (0, _path.join)(pathOutput, outputFileName); | ||
console.log(`done pack: ${outputFileName} [${(0, _format.binary)((0, _fs.statSync)(outputFilePath).size)}B]`); | ||
(isPublish || isPublishDev) && (0, _Run.runSync)({ | ||
command: "npm", | ||
argList: [ "publish", outputFilePath, "--tag", isPublishDev ? "dev" : "latest" ] | ||
}); | ||
}); |
@@ -1,46 +0,86 @@ | ||
var e=require("assert") | ||
var t=require("child_process") | ||
var i=require("fs") | ||
var r=require("dr-js/library/common/format") | ||
var o=require("dr-js/library/node/file/File") | ||
var s=require("dr-js/library/node/file/Directory") | ||
var n=require("dr-js/library/node/system/Run") | ||
var a=require("dr-js/library/node/file/Modify") | ||
var u=require("./__utils__") | ||
exports.initOutput=(async({fromRoot:e,fromOutput:t,deleteKeyList:r=["private","scripts","devDependencies"],copyPathList:s=["LICENSE","README.md"],logger:{padLog:n,log:u}})=>{n("reset output") | ||
await a.modify.delete(t()).catch(()=>{}) | ||
await(0,o.createDirectory)(t()) | ||
n("init output package.json") | ||
const p=require(e("package.json")) | ||
for(const e of r){delete p[e] | ||
u(`dropped key: ${e}`)}(0,i.writeFileSync)(t("package.json"),JSON.stringify(p)) | ||
n("init output file") | ||
for(const r of s)if("README.md"===r){(0,i.writeFileSync)(t(r),(0,i.readFileSync)(e(r)).toString().split("[//]: # (NON_PACKAGE_CONTENT)")[0].trim()) | ||
u(`copied: ${r} (with NON_PACKAGE_CONTENT trimmed)`)}else{await a.modify.copy(e(r),t(r)) | ||
u(`copied: ${r}`)}return p}) | ||
exports.packOutput=(async({fromRoot:e,fromOutput:o,logger:{padLog:s,log:n}})=>{s("run pack output");(0,t.execSync)("npm pack",{cwd:o(),stdio:u.__VERBOSE__?"inherit":["ignore","ignore"],shell:!0}) | ||
n("move to root path") | ||
const p=require(o("package.json")) | ||
const c=`${p.name.replace(/^@/,"").replace("/","-")}-${p.version}.tgz` | ||
await a.modify.move(o(c),e(c)) | ||
s(`pack size: ${(0,r.binary)((0,i.statSync)(e(c)).size)}B`) | ||
return e(c)}) | ||
exports.verifyOutputBinVersion=(async({fromOutput:i,packageJSON:r,matchStringList:o=[r.name,r.version],logger:{padLog:s,log:n}})=>{s("verify output bin working") | ||
const a=(0,t.execSync)("node bin --version",{cwd:i(),stdio:"pipe",shell:!0}).toString() | ||
n(`bin test output: ${a}`) | ||
for(const t of o)(0,e.ok)(a.includes(t),`should output contain: ${t}`)}) | ||
exports.verifyNoGitignore=(async({path:t,logger:{padLog:i}})=>{i("verify no gitignore file left") | ||
const r=(await(0,s.getFileList)(t)).filter(e=>e.includes("gitignore")) | ||
r.length&&console.error(`found gitignore file:\n - ${r.join("\n - ")}`);(0,e.ok)(!r.length,`${r.length} gitignore file found`)}) | ||
const p=e=>{const t=e.includes("publish-dev") | ||
return{isPublish:t||e.includes("publish"),isDev:t}} | ||
exports.getPublishFlag=p | ||
const c=({isDev:e,version:t})=>e?d.test(t):l.test(t) | ||
exports.checkPublishVersion=c | ||
const l=/^\d+\.\d+\.\d+$/ | ||
const d=/^\d+\.\d+\.\d+-dev\.\d+$/ | ||
exports.publishOutput=(async({flagList:e,packageJSON:t,pathPackagePack:i,extraArgs:r=[],logger:o})=>{const{isPublish:s,isDev:a}=p(e) | ||
if(!s)return o.padLog("skipped publish output, no flag found") | ||
if(!i||!i.endsWith(".tgz"))throw new Error(`[publishOutput] invalid pathPackagePack: ${i}`) | ||
if(!c({isDev:a,version:t.version}))throw new Error(`[publishOutput] invalid version: ${t.version}, isDev: ${a}`) | ||
o.padLog(`${a?"publish-dev":"publish"}: ${t.version}`);(0,n.runSync)({command:"npm",argList:["publish",i,"--tag",a?"dev":"latest",...r]})}) | ||
var _assert = require("assert"); | ||
var _child_process = require("child_process"); | ||
var _fs = require("fs"); | ||
var _format = require("dr-js/library/common/format"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Directory = require("dr-js/library/node/file/Directory"); | ||
var _Run = require("dr-js/library/node/system/Run"); | ||
var _Modify = require("dr-js/library/node/file/Modify"); | ||
var _main = require("./main"); | ||
exports.initOutput = (async ({fromRoot, fromOutput, deleteKeyList = [ "private", "scripts", "devDependencies" ], copyPathList = [ "LICENSE", "README.md" ], logger: {padLog, log}}) => { | ||
padLog("reset output"); | ||
await _Modify.modify.delete(fromOutput()).catch(() => {}); | ||
await (0, _File.createDirectory)(fromOutput()); | ||
padLog("init output package.json"); | ||
const packageJSON = require(fromRoot("package.json")); | ||
for (const deleteKey of deleteKeyList) { | ||
delete packageJSON[deleteKey]; | ||
log(`dropped key: ${deleteKey}`); | ||
} | ||
(0, _fs.writeFileSync)(fromOutput("package.json"), JSON.stringify(packageJSON)); | ||
padLog("init output file"); | ||
for (const copyPath of copyPathList) if ("README.md" === copyPath) { | ||
(0, _fs.writeFileSync)(fromOutput(copyPath), (0, _fs.readFileSync)(fromRoot(copyPath)).toString().split("[//]: # (NON_PACKAGE_CONTENT)")[0].trim()); | ||
log(`copied: ${copyPath} (with NON_PACKAGE_CONTENT trimmed)`); | ||
} else { | ||
await _Modify.modify.copy(fromRoot(copyPath), fromOutput(copyPath)); | ||
log(`copied: ${copyPath}`); | ||
} | ||
return packageJSON; | ||
}); | ||
exports.packOutput = (async ({fromRoot, fromOutput, logger: {padLog, log}}) => { | ||
padLog("run pack output"); | ||
(0, _child_process.execSync)("npm --no-update-notifier pack", { | ||
cwd: fromOutput(), | ||
stdio: _main.__VERBOSE__ ? "inherit" : [ "ignore", "ignore" ], | ||
shell: !0 | ||
}); | ||
log("move to root path"); | ||
const packageJSON = require(fromOutput("package.json")); | ||
const packName = `${packageJSON.name.replace(/^@/, "").replace("/", "-")}-${packageJSON.version}.tgz`; | ||
await _Modify.modify.move(fromOutput(packName), fromRoot(packName)); | ||
padLog(`pack size: ${(0, _format.binary)((0, _fs.statSync)(fromRoot(packName)).size)}B`); | ||
return fromRoot(packName); | ||
}); | ||
exports.verifyOutputBinVersion = (async ({fromOutput, packageJSON, matchStringList = [ packageJSON.name, packageJSON.version ], logger: {padLog, log}}) => { | ||
padLog("verify output bin working"); | ||
const outputBinTest = (0, _child_process.execSync)("node bin --version", { | ||
cwd: fromOutput(), | ||
stdio: "pipe", | ||
shell: !0 | ||
}).toString(); | ||
log(`bin test output: ${outputBinTest}`); | ||
for (const testString of matchStringList) (0, _assert.ok)(outputBinTest.includes(testString), `should output contain: ${testString}`); | ||
}); | ||
exports.verifyNoGitignore = (async ({path, logger: {padLog}}) => { | ||
padLog("verify no gitignore file left"); | ||
const badFileList = (await (0, _Directory.getFileList)(path)).filter(path => path.includes("gitignore")); | ||
badFileList.length && console.error(`found gitignore file:\n - ${badFileList.join("\n - ")}`); | ||
(0, _assert.ok)(!badFileList.length, `${badFileList.length} gitignore file found`); | ||
}); | ||
const getPublishFlag = flagList => { | ||
const isDev = flagList.includes("publish-dev"); | ||
return { | ||
isPublish: isDev || flagList.includes("publish"), | ||
isDev | ||
}; | ||
}; | ||
exports.getPublishFlag = getPublishFlag; | ||
const checkPublishVersion = ({isDev, version}) => isDev ? REGEXP_PUBLISH_VERSION_DEV.test(version) : REGEXP_PUBLISH_VERSION.test(version); | ||
exports.checkPublishVersion = checkPublishVersion; | ||
const REGEXP_PUBLISH_VERSION = /^\d+\.\d+\.\d+$/; | ||
const REGEXP_PUBLISH_VERSION_DEV = /^\d+\.\d+\.\d+-dev\.\d+$/; | ||
exports.publishOutput = (async ({flagList, packageJSON, pathPackagePack, extraArgs = [], logger}) => { | ||
const {isPublish, isDev} = getPublishFlag(flagList); | ||
if (!isPublish) return logger.padLog("skipped publish output, no flag found"); | ||
if (!pathPackagePack || !pathPackagePack.endsWith(".tgz")) throw new Error(`[publishOutput] invalid pathPackagePack: ${pathPackagePack}`); | ||
if (!checkPublishVersion({ | ||
isDev, | ||
version: packageJSON.version | ||
})) throw new Error(`[publishOutput] invalid version: ${packageJSON.version}, isDev: ${isDev}`); | ||
logger.padLog(`${isDev ? "publish-dev" : "publish"}: ${packageJSON.version}`); | ||
(0, _Run.runSync)({ | ||
command: "npm", | ||
argList: [ "--no-update-notifier", "publish", pathPackagePack, "--tag", isDev ? "dev" : "latest", ...extraArgs ] | ||
}); | ||
}); |
@@ -1,6 +0,16 @@ | ||
var e=require("child_process") | ||
const t=(t,r)=>{try{return(0,e.execSync)(t,r).toString()}catch(e){console.warn(`[tryExec] failed for: ${t}, error: ${e}`) | ||
return""}} | ||
exports.tryExec=t | ||
exports.getGitBranch=(()=>t("git symbolic-ref --short HEAD",{stdio:"pipe"}).replace("\n","").trim()) | ||
exports.getGitCommitHash=(()=>t('git log -1 --format="%H"',{stdio:"pipe"}).replace("\n","").trim()) | ||
var _child_process = require("child_process"); | ||
const tryExec = (command, option) => { | ||
try { | ||
return (0, _child_process.execSync)(command, option).toString(); | ||
} catch (error) { | ||
console.warn(`[tryExec] failed for: ${command}, error: ${error}`); | ||
return ""; | ||
} | ||
}; | ||
exports.tryExec = tryExec; | ||
exports.getGitBranch = (() => tryExec("git symbolic-ref --short HEAD", { | ||
stdio: "pipe" | ||
}).replace("\n", "").trim()); | ||
exports.getGitCommitHash = (() => tryExec('git log -1 --format="%H"', { | ||
stdio: "pipe" | ||
}).replace("\n", "").trim()); |
@@ -1,27 +0,53 @@ | ||
const t=t=>`${t.charAt(0).toUpperCase()}${t.slice(1)}` | ||
const e=t=>/[A-Z]/.test(t.charAt(0)) | ||
exports.generateIndexScript=(({sourceRouteMap:o})=>{const r={} | ||
Object.values(o).forEach(({routeList:o,directoryList:s,fileList:p})=>{const n=[] | ||
const i=[] | ||
s.forEach(e=>{const o=t(e) | ||
n.push(`import * as ${o} from './${e}'`) | ||
i.push(o)}) | ||
p.map(({name:o,exportList:r})=>{if(s.length||e(o)){const e=t(o) | ||
n.push(`import * as ${e} from './${o}'`) | ||
i.push(e)}else n.push(`export { ${r.join(", ")} } from './${o}'`)}) | ||
i.length&&n.push(`export { ${i.join(", ")} }`) | ||
r[[...o,"index.js"].join("/")]=n.join("\n")}) | ||
return r}) | ||
exports.HOIST_LIST_KEY="~hoist" | ||
exports.EXPORT_LIST_KEY="~export" | ||
exports.EXPORT_HOIST_LIST_KEY="~export-hoist" | ||
exports.generateExportInfo=(({sourceRouteMap:o})=>{const r={} | ||
const s=(...t)=>{const e=t.join("/") | ||
r[e]||(r[e]={}) | ||
return r[e]} | ||
Object.values(o).forEach(({routeList:o,directoryList:r,fileList:p})=>{const n=s(...o) | ||
r.forEach(e=>{n[t(e)]=s(...o,e)}) | ||
p.map(({name:p,exportList:i})=>{if(r.length||e(p))n[t(p)]={"~export":i} | ||
else{n[p]={"~hoist":i} | ||
n["~export-hoist"]=[...n["~export-hoist"]||[],...i]}s(...o,p)["~export"]=i})}) | ||
return r}) | ||
const toExportName = name => `${name.charAt(0).toUpperCase()}${name.slice(1)}`; | ||
const isFirstUpperCase = name => /[A-Z]/.test(name.charAt(0)); | ||
exports.generateIndexScript = (({sourceRouteMap}) => { | ||
const indexScriptMap = {}; | ||
Object.values(sourceRouteMap).forEach(({routeList, directoryList, fileList}) => { | ||
const textList = []; | ||
const importList = []; | ||
directoryList.forEach(name => { | ||
const exportName = toExportName(name); | ||
textList.push(`import * as ${exportName} from './${name}'`); | ||
importList.push(exportName); | ||
}); | ||
fileList.map(({name, exportList}) => { | ||
if (directoryList.length || isFirstUpperCase(name)) { | ||
const exportName = toExportName(name); | ||
textList.push(`import * as ${exportName} from './${name}'`); | ||
importList.push(exportName); | ||
} else textList.push(`export { ${exportList.join(", ")} } from './${name}'`); | ||
}); | ||
importList.length && textList.push(`export { ${importList.join(", ")} }`); | ||
indexScriptMap[[ ...routeList, "index.js" ].join("/")] = textList.join("\n"); | ||
}); | ||
return indexScriptMap; | ||
}); | ||
exports.HOIST_LIST_KEY = "~hoist"; | ||
exports.EXPORT_LIST_KEY = "~export"; | ||
exports.EXPORT_HOIST_LIST_KEY = "~export-hoist"; | ||
exports.generateExportInfo = (({sourceRouteMap}) => { | ||
const exportInfoMap = {}; | ||
const getExportInfo = (...routeList) => { | ||
const key = routeList.join("/"); | ||
exportInfoMap[key] || (exportInfoMap[key] = {}); | ||
return exportInfoMap[key]; | ||
}; | ||
Object.values(sourceRouteMap).forEach(({routeList, directoryList, fileList}) => { | ||
const exportInfo = getExportInfo(...routeList); | ||
directoryList.forEach(name => { | ||
exportInfo[toExportName(name)] = getExportInfo(...routeList, name); | ||
}); | ||
fileList.map(({name, exportList}) => { | ||
if (directoryList.length || isFirstUpperCase(name)) exportInfo[toExportName(name)] = { | ||
"~export": exportList | ||
}; else { | ||
exportInfo[name] = { | ||
"~hoist": exportList | ||
}; | ||
exportInfo["~export-hoist"] = [ ...exportInfo["~export-hoist"] || [], ...exportList ]; | ||
} | ||
getExportInfo(...routeList, name)["~export"] = exportList; | ||
}); | ||
}); | ||
return exportInfoMap; | ||
}); |
@@ -1,31 +0,70 @@ | ||
var e=require("path") | ||
var r=require("fs") | ||
var t=require("@babel/parser") | ||
var o=require("dr-js/library/common/compare") | ||
var a=require("dr-js/library/common/mutable/Object") | ||
var i=require("dr-js/library/node/file/File") | ||
var s=require("dr-js/library/node/file/Directory") | ||
const c=({babylonPluginList:s,parserPluginList:c=s,logger:n})=>{let p={} | ||
const l=e=>{const r=e.join("/") | ||
p[r]||(p[r]={routeList:e,directoryList:[],fileList:[]}) | ||
return p[r]} | ||
return{parseExport:async o=>{const a=await(0,i.getPathStat)(o) | ||
const s=o.split(e.sep) | ||
const p=s.pop() | ||
if(a.isDirectory()){n.devLog(`[directory] ${o}`) | ||
l(s).directoryList.push(p)}else if(a.isFile()&&p.endsWith(".js")){const e=((e,r,o)=>{const a=(0,t.parse)(e,{sourceFilename:r,sourceType:"module",plugins:o||["objectRestSpread","classProperties","exportDefaultFrom","exportNamespaceFrom","jsx"]}).program.body.filter(({type:e})=>"ExportNamedDeclaration"===e) | ||
return[].concat(...a.map(({specifiers:e,declaration:r})=>r?r.declarations?r.declarations.map(({id:{name:e}})=>e):[r.id.name]:e.map(({exported:{name:e}})=>e)))})((0,r.readFileSync)(o,{encoding:"utf8"}),o,c) | ||
n.devLog(`[file] ${o}`) | ||
if(!e.length)return | ||
l(s).fileList.push({name:p.slice(0,-3),exportList:e}) | ||
n.devLog(` export [${e.length}]: ${e.join(", ")}`)}else n.devLog(`[skipped] ${o} (${(0,i.getPathTypeFromStat)(a)})`)},getSourceRouteMap:()=>{const e=(e=>{Object.values(e).forEach(({routeList:e,directoryList:r,fileList:t})=>{r.sort(o.compareString) | ||
t.sort(({name:e},{name:r})=>(0,o.compareString)(e,r)) | ||
t.forEach(({exportList:e})=>e.sort(o.compareString))});(0,a.objectSortKey)(e) | ||
return e})(p) | ||
p={} | ||
return e}}} | ||
exports.createExportParser=c | ||
exports.collectSourceRouteMap=(async({pathRootList:e=[],pathInfoFilter:r=(e=>!0),logger:t})=>{const{parseExport:o,getSourceRouteMap:a}=c({logger:t}) | ||
const i=e=>r(e)&&o(e.path) | ||
for(const r of e)await(0,s.walkDirectoryInfoTree)(await(0,s.getDirectoryInfoTree)(r),i) | ||
return a()}) | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _parser = require("@babel/parser"); | ||
var _compare = require("dr-js/library/common/compare"); | ||
var _Object = require("dr-js/library/common/mutable/Object"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Directory = require("dr-js/library/node/file/Directory"); | ||
const createExportParser = ({babylonPluginList, parserPluginList = babylonPluginList, logger}) => { | ||
let sourceRouteMap = {}; | ||
const getRoute = routeList => { | ||
const key = routeList.join("/"); | ||
sourceRouteMap[key] || (sourceRouteMap[key] = { | ||
routeList, | ||
directoryList: [], | ||
fileList: [] | ||
}); | ||
return sourceRouteMap[key]; | ||
}; | ||
return { | ||
parseExport: async path => { | ||
const fileStat = await (0, _File.getPathStat)(path); | ||
const routeList = path.split(_path.sep); | ||
const name = routeList.pop(); | ||
if (fileStat.isDirectory()) { | ||
logger.devLog(`[directory] ${path}`); | ||
getRoute(routeList).directoryList.push(name); | ||
} else if (fileStat.isFile() && name.endsWith(".js")) { | ||
const exportList = ((fileString, sourceFilename, parserPluginList) => { | ||
const exportNodeList = (0, _parser.parse)(fileString, { | ||
sourceFilename, | ||
sourceType: "module", | ||
plugins: parserPluginList || [ "objectRestSpread", "classProperties", "exportDefaultFrom", "exportNamespaceFrom", "jsx" ] | ||
}).program.body.filter(({type}) => "ExportNamedDeclaration" === type); | ||
return [].concat(...exportNodeList.map(({specifiers, declaration}) => declaration ? declaration.declarations ? declaration.declarations.map(({id: {name}}) => name) : [ declaration.id.name ] : specifiers.map(({exported: {name}}) => name))); | ||
})((0, _fs.readFileSync)(path, { | ||
encoding: "utf8" | ||
}), path, parserPluginList); | ||
logger.devLog(`[file] ${path}`); | ||
if (!exportList.length) return; | ||
getRoute(routeList).fileList.push({ | ||
name: name.slice(0, -3), | ||
exportList | ||
}); | ||
logger.devLog(` export [${exportList.length}]: ${exportList.join(", ")}`); | ||
} else logger.devLog(`[skipped] ${path} (${(0, _File.getPathTypeFromStat)(fileStat)})`); | ||
}, | ||
getSourceRouteMap: () => { | ||
const result = (sourceRouteMap => { | ||
Object.values(sourceRouteMap).forEach(({routeList, directoryList, fileList}) => { | ||
directoryList.sort(_compare.compareString); | ||
fileList.sort(({name: a}, {name: b}) => (0, _compare.compareString)(a, b)); | ||
fileList.forEach(({exportList}) => exportList.sort(_compare.compareString)); | ||
}); | ||
(0, _Object.objectSortKey)(sourceRouteMap); | ||
return sourceRouteMap; | ||
})(sourceRouteMap); | ||
sourceRouteMap = {}; | ||
return result; | ||
} | ||
}; | ||
}; | ||
exports.createExportParser = createExportParser; | ||
exports.collectSourceRouteMap = (async ({pathRootList = [], pathInfoFilter = info => !0, logger}) => { | ||
const {parseExport, getSourceRouteMap} = createExportParser({ | ||
logger | ||
}); | ||
const parseWalkExport = info => pathInfoFilter(info) && parseExport(info.path); | ||
for (const pathRoot of pathRootList) await (0, _Directory.walkDirectoryInfoTree)(await (0, _Directory.getDirectoryInfoTree)(pathRoot), parseWalkExport); | ||
return getSourceRouteMap(); | ||
}); |
@@ -1,15 +0,24 @@ | ||
var e=require("path") | ||
var r=require("dr-js/library/node/file/function") | ||
var o=require("./generateInfo") | ||
exports.getMarkdownHeaderLink=(e=>`[${e}](#${e.trim().toLowerCase().replace(/[^\w\- ]+/g,"").replace(/\s/g,"-").replace(/-+$/,"")})`) | ||
const t=e=>e.replace(/_/g,"\\_") | ||
exports.escapeMarkdownLink=t | ||
const n=e=>`📄 [${t(e)}](${e})` | ||
exports.renderMarkdownFileLink=n | ||
exports.renderMarkdownDirectoryLink=(e=>`📁 [${t(e).replace(/\/*$/,"/")}](${e})`) | ||
exports.renderMarkdownExportPath=(({exportInfoMap:t,rootPath:a})=>Object.entries(t).reduce((t,[p,s])=>{s[o.EXPORT_LIST_KEY]&&t.push(`+ ${n(`${(0,r.toPosixPath)((0,e.relative)(a,p))}.js`)}`,` - ${s[o.EXPORT_LIST_KEY].map(e=>`\`${e}\``).join(", ")}`) | ||
return t},[])) | ||
const a=({exportInfo:e,routeList:r})=>Object.entries(e).reduce((e,[t,n])=>{if(t===o.HOIST_LIST_KEY);else if(t===o.EXPORT_LIST_KEY||t===o.EXPORT_HOIST_LIST_KEY)e.push(`- ${n.map(e=>`\`${e}\``).join(", ")}`) | ||
else{const o=a({exportInfo:n,routeList:[...r,t]}) | ||
o.length&&e.push(`- **${t}**`,...o.map(e=>` ${e}`))}return e},[]) | ||
exports.renderMarkdownExportTree=a | ||
var _path = require("path"); | ||
var _function = require("dr-js/library/node/file/function"); | ||
var _generateInfo = require("./generateInfo"); | ||
exports.getMarkdownHeaderLink = (text => `[${text}](#${text.trim().toLowerCase().replace(/[^\w\- ]+/g, "").replace(/\s/g, "-").replace(/-+$/, "")})`); | ||
const escapeMarkdownLink = name => name.replace(/_/g, "\\_"); | ||
exports.escapeMarkdownLink = escapeMarkdownLink; | ||
const renderMarkdownFileLink = path => `📄 [${escapeMarkdownLink(path)}](${path})`; | ||
exports.renderMarkdownFileLink = renderMarkdownFileLink; | ||
exports.renderMarkdownDirectoryLink = (path => `📁 [${escapeMarkdownLink(path).replace(/\/*$/, "/")}](${path})`); | ||
exports.renderMarkdownExportPath = (({exportInfoMap, rootPath}) => Object.entries(exportInfoMap).reduce((textList, [path, value]) => { | ||
value[_generateInfo.EXPORT_LIST_KEY] && textList.push(`+ ${renderMarkdownFileLink(`${(0, _function.toPosixPath)((0, _path.relative)(rootPath, path))}.js`)}`, ` - ${value[_generateInfo.EXPORT_LIST_KEY].map(text => `\`${text}\``).join(", ")}`); | ||
return textList; | ||
}, [])); | ||
const renderMarkdownExportTree = ({exportInfo, routeList}) => Object.entries(exportInfo).reduce((textList, [key, value]) => { | ||
if (key === _generateInfo.HOIST_LIST_KEY) ; else if (key === _generateInfo.EXPORT_LIST_KEY || key === _generateInfo.EXPORT_HOIST_LIST_KEY) textList.push(`- ${value.map(text => `\`${text}\``).join(", ")}`); else { | ||
const childTextList = renderMarkdownExportTree({ | ||
exportInfo: value, | ||
routeList: [ ...routeList, key ] | ||
}); | ||
childTextList.length && textList.push(`- **${key}**`, ...childTextList.map(text => ` ${text}`)); | ||
} | ||
return textList; | ||
}, []); | ||
exports.renderMarkdownExportTree = renderMarkdownExportTree; |
@@ -1,13 +0,20 @@ | ||
var e=require("fs") | ||
var r=require("dr-js/library/common/format") | ||
exports.wrapFileProcessor=(({processor:s,logger:{log:o,devLog:t}})=>async o=>{const c=(0,e.readFileSync)(o,"utf8") | ||
const i=await s(c,o) | ||
if(c===i){t(`process skipped ${o}`) | ||
return 0}const{size:n}=(0,e.statSync)(o) | ||
i?(0,e.writeFileSync)(o,i):(0,e.unlinkSync)(o) | ||
const{size:a}=i?(0,e.statSync)(o):{size:0} | ||
const p=a-n | ||
t(`∆${(a/n).toFixed(2)}(${(0,r.binary)(p)}B)`,`${(0,r.binary)(n)}B → ${(0,r.binary)(a)}B`,`${o}`) | ||
return a-n}) | ||
exports.fileProcessorBabel=(e=>e.replace(/['"]use strict['"];?\s*/g,"").replace(/Object\.defineProperty\(exports,\s*['"]__esModule['"],\s*{\s*value:\s*(true|!0)\s*}\)[;,]?\s*/g,"").replace(/(exports\.\w+\s*=\s*)+(undefined|void 0)[;,]?\s*/g,"").replace(/[\n\r]{2,}/g,"\n").replace(/^[\n\r]+/,"")) | ||
exports.fileProcessorWebpack=(e=>e.replace(/function\s*\(\)\s*{\s*return\s+([\w$]{1,3})\s*}/g,"()=>$1")) | ||
var _fs = require("fs"); | ||
var _format = require("dr-js/library/common/format"); | ||
exports.wrapFileProcessor = (({processor, logger: {log, devLog}}) => async filePath => { | ||
const inputString = (0, _fs.readFileSync)(filePath, "utf8"); | ||
const outputString = await processor(inputString, filePath); | ||
if (inputString === outputString) { | ||
devLog(`process skipped ${filePath}`); | ||
return 0; | ||
} | ||
const {size: inputSize} = (0, _fs.statSync)(filePath); | ||
outputString ? (0, _fs.writeFileSync)(filePath, outputString) : (0, _fs.unlinkSync)(filePath); | ||
const {size: outputSize} = outputString ? (0, _fs.statSync)(filePath) : { | ||
size: 0 | ||
}; | ||
const sizeChange = outputSize - inputSize; | ||
devLog(`∆${(outputSize / inputSize).toFixed(2)}(${(0, _format.binary)(sizeChange)}B)`, `${(0, _format.binary)(inputSize)}B → ${(0, _format.binary)(outputSize)}B`, `${filePath}`); | ||
return outputSize - inputSize; | ||
}); | ||
exports.fileProcessorBabel = (inputString => inputString.replace(/['"]use strict['"];?\s*/g, "").replace(/Object\.defineProperty\(exports,\s*['"]__esModule['"],\s*{\s*value:\s*(true|!0)\s*}\)[;,]?\s*/g, "").replace(/(exports\.\w+\s*=\s*)+(undefined|void 0)[;,]?\s*/g, "").replace(/[\n\r]{2,}/g, "\n").replace(/^[\n\r]+/, "")); | ||
exports.fileProcessorWebpack = (inputString => inputString.replace(/function\s*\(\)\s*{\s*return\s+([\w$]{1,3})\s*}/g, "()=>$1")); |
@@ -1,19 +0,38 @@ | ||
var o=require("dr-js/library/common/time") | ||
var e=require("dr-js/library/common/format") | ||
var t=require("./__utils__") | ||
const r=()=>{} | ||
exports.getLogger=((n="dev-dep",c=!1,s=120)=>{const _=(0,t.loadEnvKey)("__DEV_LOGGER_TITLE__") | ||
n=_?`${n}|${_}`:n;(0,t.saveEnvKey)("__DEV_LOGGER_TITLE__",n) | ||
const l=(0,o.clock)() | ||
let g=(0,o.clock)() | ||
const i=(...t)=>console.log(`- (+${(()=>{const t=(0,o.clock)() | ||
const r=t-g | ||
g=t | ||
return(0,e.time)(r)})()}) ${t.join(" ")}`) | ||
const d=(...o)=>console.log(`- ${o.join(" ")}`) | ||
const a=t.__VERBOSE__?d:r | ||
return c?{padLog:i,stepLog:a,log:a,devLog:r}:{padLog:(...t)=>{const r=`## ${t.join(" ")} ` | ||
const c=` [${n}|${(()=>{const t=(0,o.clock)() | ||
g=t | ||
return(0,e.time)(t-l)})()}]` | ||
console.log(`\n${r.padEnd(s-c.length,"-")}${c}`)},stepLog:i,log:d,devLog:a}}) | ||
var _time = require("dr-js/library/common/time"); | ||
var _format = require("dr-js/library/common/format"); | ||
var _main = require("./main"); | ||
const EMPTY_FUNC = () => {}; | ||
exports.getLogger = ((title = "dev-dep", quiet = !1, padWidth = 120) => { | ||
const envTitle = (0, _main.loadEnvKey)("__DEV_LOGGER_TITLE__"); | ||
title = envTitle ? `${title}|${envTitle}` : title; | ||
(0, _main.saveEnvKey)("__DEV_LOGGER_TITLE__", title); | ||
const startTime = (0, _time.clock)(); | ||
let prevTime = (0, _time.clock)(); | ||
const stepLog = (...args) => console.log(`- (+${(() => { | ||
const time = (0, _time.clock)(); | ||
const stepTime = time - prevTime; | ||
prevTime = time; | ||
return (0, _format.time)(stepTime); | ||
})()}) ${args.join(" ")}`); | ||
const log = (...args) => console.log(`- ${args.join(" ")}`); | ||
const devLog = _main.__VERBOSE__ ? log : EMPTY_FUNC; | ||
return quiet ? { | ||
padLog: stepLog, | ||
stepLog: devLog, | ||
log: devLog, | ||
devLog: EMPTY_FUNC | ||
} : { | ||
padLog: (...args) => { | ||
const start = `## ${args.join(" ")} `; | ||
const end = ` [${title}|${(() => { | ||
const time = (0, _time.clock)(); | ||
prevTime = time; | ||
return (0, _format.time)(time - startTime); | ||
})()}]`; | ||
console.log(`\n${start.padEnd(padWidth - end.length, "-")}${end}`); | ||
}, | ||
stepLog, | ||
log, | ||
devLog | ||
}; | ||
}); |
@@ -1,39 +0,92 @@ | ||
var e=require("path") | ||
var t=require("fs") | ||
var r=function(e){return e&&e.__esModule?e:{default:e}}(require("webpack")) | ||
var o=require("dr-js/library/common/format") | ||
var i=require("dr-js/library/node/file/File") | ||
var a=require("dr-js/library/node/system/ExitListener") | ||
var n=require("./__utils__") | ||
const s=(e,t)=>(r,o)=>{if(r)return e(r) | ||
if(o.hasErrors()||o.hasWarnings()){const{errors:t=[],warnings:r=[]}=o.toJson() | ||
t.forEach(e=>console.error(e)) | ||
r.forEach(e=>console.warn(e)) | ||
if(o.hasErrors())return e(new Error("webpack stats Error"))}t(o)} | ||
const c=(...e)=>e.filter(Boolean).join(",") | ||
exports.compileWithWebpack=(async({config:e,isWatch:i,profileOutput:l,logger:u})=>{const{log:p}=u | ||
if(l){i&&console.warn("[watch] warning: skipped generate profileOutput") | ||
e.profile=!0}const d=(0,r.default)(e) | ||
const f=((e,{padLog:t,log:r})=>{const i=({compilation:{assets:i={},chunks:a=[]},startTime:s,endTime:l})=>{s&&l&&t(`[${e?"watch":"compile"}] time: ${(0,o.time)(l-s)}`) | ||
const u=[] | ||
Object.entries(i).forEach(([e,t])=>u.push(["asset",e,`${(0,o.binary)(t.size())}B`,c(t.emitted&&"emitted")])) | ||
n.__VERBOSE__&&a.forEach(e=>u.push(["chunk",e.name||e.id,`${(0,o.binary)(e.modulesSize())}B`,c(e.canBeInitial()&&"initial",e.hasRuntime()&&"entry",e.rendered&&"rendered")])) | ||
r(`output:\n ${(0,o.padTable)({table:u,padFuncList:["L","R","R","L"],cellPad:" | ",rowPad:"\n "})}`)} | ||
return e=>{if(e.compilation)return i(e) | ||
if(e.stats)return e.stats.map(i) | ||
console.warn("[getLogStats] unexpected statData",e) | ||
throw new Error("[getLogStats] unexpected statData")}})(i,u) | ||
if(!i){p("[compile] start") | ||
const e=await new Promise((e,t)=>d.run(s(t,e))) | ||
f(e) | ||
l&&(0,t.writeFileSync)(l,JSON.stringify(e.toJson())) | ||
l&&p(`[compile] generated profileOutput at: ${l}`) | ||
return e}p("[watch] start") | ||
d.watch({aggregateTimeout:512,poll:void 0},s(e=>p(`error: ${e}`),f));(0,a.addExitListenerSync)(e=>p(`[watch] exit with state: ${JSON.stringify(e)}`))}) | ||
exports.commonFlag=(async({argvFlag:t,fromRoot:r,profileOutput:o=r(".temp-gitignore/profile-stat.json"),logger:{log:a}})=>{const n=t("development","production")||"production" | ||
const s=Boolean(t("watch")) | ||
const c="production"===n | ||
t("profile")||(o=null) | ||
o&&await(0,i.createDirectory)((0,e.dirname)(o)) | ||
a(`compile flag: ${JSON.stringify({mode:n,isWatch:s,isProduction:c,profileOutput:o},null," ")}`) | ||
return{mode:n,isWatch:s,isProduction:c,profileOutput:o}}) | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _webpack = (obj = require("webpack")) && obj.__esModule ? obj : { | ||
default: obj | ||
}; | ||
var obj; | ||
var _format = require("dr-js/library/common/format"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _ExitListener = require("dr-js/library/node/system/ExitListener"); | ||
var _main = require("./main"); | ||
const getStatsCheck = (onError, onStats) => (error, statsData) => { | ||
if (error) return onError(error); | ||
if (statsData.hasErrors() || statsData.hasWarnings()) { | ||
const {errors = [], warnings = []} = statsData.toJson(); | ||
errors.forEach(message => console.error(message)); | ||
warnings.forEach(message => console.warn(message)); | ||
if (statsData.hasErrors()) return onError(new Error("webpack stats Error")); | ||
} | ||
onStats(statsData); | ||
}; | ||
const joinTag = (...args) => args.filter(Boolean).join(","); | ||
exports.compileWithWebpack = (async ({config, isWatch, profileOutput, assetMapOutput, logger}) => { | ||
const {log} = logger; | ||
if (profileOutput) { | ||
isWatch && console.warn("[watch] warning: skipped generate profileOutput"); | ||
config.profile = !0; | ||
} | ||
const compiler = (0, _webpack.default)(config); | ||
const logStats = ((isWatch, {padLog, log}) => { | ||
const logSingleStats = ({compilation: {assets = {}, chunks = []}, startTime, endTime}) => { | ||
startTime && endTime && padLog(`[${isWatch ? "watch" : "compile"}] time: ${(0, _format.time)(endTime - startTime)}`); | ||
const table = []; | ||
Object.entries(assets).forEach(([name, sourceInfo]) => table.push([ "asset", name, `${(0, _format.binary)(sourceInfo.size())}B`, joinTag(sourceInfo.emitted && "emitted") ])); | ||
_main.__VERBOSE__ && chunks.forEach(chunk => table.push([ "chunk", chunk.name || chunk.id, `${(0, _format.binary)(chunk.modulesSize())}B`, joinTag(chunk.canBeInitial() && "initial", chunk.hasRuntime() && "entry", chunk.rendered && "rendered") ])); | ||
log(`output:\n ${(0, _format.padTable)({ | ||
table, | ||
padFuncList: [ "L", "R", "R", "L" ], | ||
cellPad: " | ", | ||
rowPad: "\n " | ||
})}`); | ||
}; | ||
return statsData => { | ||
if (statsData.compilation) return logSingleStats(statsData); | ||
if (statsData.stats) return statsData.stats.map(logSingleStats); | ||
console.warn("[getLogStats] unexpected statData", statsData); | ||
throw new Error("[getLogStats] unexpected statData"); | ||
}; | ||
})(isWatch, logger); | ||
if (!isWatch) { | ||
log("[compile] start"); | ||
const statsData = await new Promise((resolve, reject) => compiler.run(getStatsCheck(reject, resolve))); | ||
logStats(statsData); | ||
let statsDataObject; | ||
const getStatsDataObject = () => { | ||
void 0 === statsDataObject && (statsDataObject = statsData.toJson()); | ||
return statsDataObject; | ||
}; | ||
profileOutput && (0, _fs.writeFileSync)(profileOutput, JSON.stringify(getStatsDataObject())); | ||
profileOutput && log(`[compile] generated profileOutput at: ${profileOutput}`); | ||
assetMapOutput && (0, _fs.writeFileSync)(assetMapOutput, JSON.stringify(getStatsDataObject().assetsByChunkName || {})); | ||
assetMapOutput && log(`[compile] generated assetMapOutput at: ${assetMapOutput}`); | ||
return statsData; | ||
} | ||
log("[watch] start"); | ||
compiler.watch({ | ||
aggregateTimeout: 512, | ||
poll: void 0 | ||
}, getStatsCheck(error => log(`error: ${error}`), logStats)); | ||
(0, _ExitListener.addExitListenerSync)(exitState => log(`[watch] exit with state: ${JSON.stringify(exitState)}`)); | ||
}); | ||
exports.commonFlag = (async ({argvFlag, fromRoot, profileOutput = fromRoot(".temp-gitignore/profile-stat.json"), assetMapOutput = "", logger: {log}}) => { | ||
const mode = argvFlag("development", "production") || "production"; | ||
const isWatch = Boolean(argvFlag("watch")); | ||
const isProduction = "production" === mode; | ||
argvFlag("profile") || (profileOutput = null); | ||
profileOutput && await (0, _File.createDirectory)((0, _path.dirname)(profileOutput)); | ||
assetMapOutput && await (0, _File.createDirectory)((0, _path.dirname)(assetMapOutput)); | ||
log(`compile flag: ${JSON.stringify({ | ||
mode, | ||
isWatch, | ||
isProduction, | ||
profileOutput, | ||
assetMapOutput | ||
}, null, " ")}`); | ||
return { | ||
mode, | ||
isWatch, | ||
isProduction, | ||
profileOutput, | ||
assetMapOutput | ||
}; | ||
}); |
@@ -1,1 +0,1 @@ | ||
{"name":"dev-dep-tool","version":"0.3.3-dev.1","author":"dr-js","license":"MIT","description":"Provide common package devDependencies","keywords":["Dr","Dr-js","JavaScript"],"repository":"github:dr-js/dev-dep","bin":"bin/index.js","engines":{"node":"^8.11 || ^9.11 || >=10.9","npm":">=6"},"dependencies":{"dr-js":"0.16.3-dev.2 || ^0.16.3-dev.0 || ^0.16.2"},"sideEffects":false} | ||
{"name":"dev-dep-tool","version":"0.4.0-dev.0","author":"dr-js","license":"MIT","description":"Provide common package devDependencies","keywords":["Dr","Dr-js","JavaScript"],"repository":"github:dr-js/dev-dep","bin":"bin/index.js","engines":{"node":"^8.11 || ^9.11 || >=10.9","npm":">=6"},"dependencies":{"dr-js":"0.17.0-dev.3 || ^0.16.3-dev.0 || ^0.16.2"},"sideEffects":false} |
@@ -15,3 +15,23 @@ # dev-dep-tool | ||
[l:size]: https://packagephobia.now.sh/result?p=dev-dep-tool | ||
[i:lint]: https://img.shields.io/badge/code_style-standard-yellow.svg | ||
[l:lint]: https://standardjs.com | ||
[i:lint]: https://img.shields.io/badge/code_style-standard_ES6+-yellow.svg | ||
[l:lint]: https://standardjs.com | ||
- [![i:p-b]][l:p-b] | ||
- [![i:p-br]][l:p-br] | ||
- [![i:p-w]][l:p-w] | ||
- [![i:p-wr]][l:p-wr] | ||
- [![i:p-wrp]][l:p-wrp] | ||
- [![i:p-wrsc]][l:p-wrsc] | ||
[i:p-b]: https://img.shields.io/badge/dev--dep-babel-yellow.svg | ||
[l:p-b]: https://www.npmjs.com/package/dev-dep-babel | ||
[i:p-br]: https://img.shields.io/badge/dev--dep-babel--react-yellow.svg | ||
[l:p-br]: https://www.npmjs.com/package/dev-dep-babel-react | ||
[i:p-w]: https://img.shields.io/badge/dev--dep-web-blue.svg | ||
[l:p-w]: https://www.npmjs.com/package/dev-dep-web | ||
[i:p-wr]: https://img.shields.io/badge/dev--dep-web--react-blue.svg | ||
[l:p-wr]: https://www.npmjs.com/package/dev-dep-web-react | ||
[i:p-wrp]: https://img.shields.io/badge/dev--dep-web--react--postcss-blue.svg | ||
[l:p-wrp]: https://www.npmjs.com/package/dev-dep-web-react-postcss | ||
[i:p-wrsc]: https://img.shields.io/badge/dev--dep-web--react--styled--components-blue.svg | ||
[l:p-wrsc]: https://www.npmjs.com/package/dev-dep-web-react-styled-components |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 2 instances in 1 package
40156
800
2
36
4