dev-dep-tool
Advanced tools
Comparing version 0.0.14-dev.7 to 0.0.14-dev.8
@@ -1,72 +0,27 @@ | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Directory = require("dr-js/library/node/file/Directory"); | ||
var _Modify = require("dr-js/library/node/file/Modify"); | ||
var _logResult = require("./logResult"); | ||
const loadPackage = (pathInput, path, collectDependency) => { | ||
const packageSource = (0, _path.relative)(pathInput, path); | ||
const { | ||
dependencies, | ||
devDependencies, | ||
peerDependencies, | ||
optionalDependencies | ||
} = JSON.parse((0, _fs.readFileSync)(path, 'utf8')); | ||
dependencies && collectDependency(dependencies, packageSource); | ||
devDependencies && collectDependency(devDependencies, packageSource); | ||
peerDependencies && collectDependency(peerDependencies, packageSource); | ||
optionalDependencies && collectDependency(optionalDependencies, packageSource); | ||
}; | ||
const withPathTemp = async ({ | ||
pathTemp, | ||
packageInfoMap, | ||
dependencyMap | ||
}) => { | ||
await (0, _File.createDirectory)(pathTemp); | ||
(0, _fs.writeFileSync)((0, _path.resolve)(pathTemp, 'package.json'), JSON.stringify({ | ||
dependencies: dependencyMap | ||
})); | ||
let result, resultError; | ||
try { | ||
result = await (0, _logResult.logCheckOutdatedResult)(packageInfoMap, pathTemp); | ||
} catch (error) { | ||
resultError = error; | ||
} | ||
await _Modify.modify.delete(pathTemp); | ||
if (resultError) throw resultError; | ||
return result; | ||
}; | ||
const doCheckOutdated = async ({ | ||
pathInput, | ||
pathTemp | ||
}) => { | ||
const packageInfoMap = {}; | ||
const dependencyMap = {}; | ||
const collectDependency = (dependencyObject, source) => Object.entries(dependencyObject).forEach(([name, version]) => { | ||
if (packageInfoMap[name]) return console.warn(`[collectDependency] dropped duplicate package: ${name} at ${source} with version: ${version}, checking: ${packageInfoMap[name].version}`); | ||
packageInfoMap[name] = { | ||
name, | ||
version, | ||
source | ||
}; | ||
dependencyMap[name] = version; | ||
}); | ||
let outdatedCount; | ||
if ((await (0, _File.getPathType)(pathInput)) === _File.FILE_TYPE.Directory) { | ||
if (!pathTemp) pathTemp = (0, _path.resolve)(pathInput, 'check-outdated-gitignore'); | ||
console.log(`[checkOutdated] create and checking '${pathTemp}'`); | ||
const fileList = await (0, _Directory.getFileList)(pathInput); | ||
fileList.filter(path => path.endsWith('package.json')).forEach(path => loadPackage(pathInput, path, collectDependency)); | ||
outdatedCount = await withPathTemp({ | ||
pathTemp, | ||
packageInfoMap, | ||
dependencyMap | ||
}); | ||
} else { | ||
console.log(`[checkOutdated] direct checking '${pathInput}'`); | ||
loadPackage(pathInput, pathInput, collectDependency); | ||
outdatedCount = await (0, _logResult.logCheckOutdatedResult)(packageInfoMap, (0, _path.dirname)(pathInput)); | ||
} | ||
process.exit(outdatedCount); | ||
}; | ||
exports.doCheckOutdated = doCheckOutdated; | ||
var e=require("path") | ||
var t=require("fs") | ||
var r=require("dr-js/library/node/file/File") | ||
var a=require("dr-js/library/node/file/Directory") | ||
var i=require("dr-js/library/node/file/Modify") | ||
var c=require("./logResult") | ||
const n=(r,a,i)=>{const c=(0,e.relative)(r,a) | ||
const{dependencies:n,devDependencies:o,peerDependencies:d,optionalDependencies:s}=JSON.parse((0,t.readFileSync)(a,"utf8")) | ||
n&&i(n,c) | ||
o&&i(o,c) | ||
d&&i(d,c) | ||
s&&i(s,c)} | ||
exports.doCheckOutdated=(async({pathInput:o,pathTemp:d})=>{const s={} | ||
const p={} | ||
const l=(e,t)=>Object.entries(e).forEach(([e,r])=>{if(s[e])return console.warn(`[collectDependency] dropped duplicate package: ${e} at ${t} with version: ${r}, checking: ${s[e].version}`) | ||
s[e]={name:e,version:r,source:t} | ||
p[e]=r}) | ||
let u | ||
if(await(0,r.getPathType)(o)===r.FILE_TYPE.Directory){d||(d=(0,e.resolve)(o,"check-outdated-gitignore")) | ||
console.log(`[checkOutdated] create and checking '${d}'`);(await(0,a.getFileList)(o)).filter(e=>e.endsWith("package.json")).forEach(e=>n(o,e,l)) | ||
u=await(async({pathTemp:a,packageInfoMap:n,dependencyMap:o})=>{await(0,r.createDirectory)(a);(0,t.writeFileSync)((0,e.resolve)(a,"package.json"),JSON.stringify({dependencies:o})) | ||
let d,s | ||
try{d=await(0,c.logCheckOutdatedResult)(n,a)}catch(e){s=e}await i.modify.delete(a) | ||
if(s)throw s | ||
return d})({pathTemp:d,packageInfoMap:s,dependencyMap:p})}else{console.log(`[checkOutdated] direct checking '${o}'`) | ||
n(o,o,l) | ||
u=await(0,c.logCheckOutdatedResult)(s,(0,e.dirname)(o))}process.exit(u)}) |
@@ -1,49 +0,24 @@ | ||
var _child_process = require("child_process"); | ||
var _format = require("dr-js/library/common/format"); | ||
var _semver = require("./semver"); | ||
const checkNpmOutdated = pathPackage => { | ||
const { | ||
stdout, | ||
status, | ||
signal, | ||
error | ||
} = (0, _child_process.spawnSync)('npm', ['outdated'], { | ||
cwd: pathPackage, | ||
stdio: 'pipe', | ||
shell: true | ||
}); | ||
if (error) throw error; | ||
return stdout.toString(); | ||
}; | ||
const processResult = async (packageInfoMap, npmOutdatedOutputString) => { | ||
const sameTable = []; | ||
const outdatedTable = []; | ||
npmOutdatedOutputString.split('\n').forEach(outputLine => { | ||
const [, name, versionWanted, versionLatest] = REGEXP_NPM_OUTDATED_OUTPUT.exec(outputLine.replace(REGEXP_ANSI_ESCAPE_CODE, '')) || []; | ||
if (!packageInfoMap[name]) return; | ||
const versionTarget = (0, _semver.compareSemver)(versionWanted, versionLatest) <= 0 ? versionLatest : versionWanted; | ||
const { | ||
version, | ||
source | ||
} = packageInfoMap[name]; | ||
const rowList = [name, version, versionTarget, source]; | ||
version.endsWith(versionTarget) ? sameTable.push(rowList) : outdatedTable.push(rowList); | ||
}); | ||
const total = sameTable.length + outdatedTable.length; | ||
sameTable.sort(sortTableRow); | ||
sameTable.length && console.log(`SAME[${sameTable.length}/${total}]:\n${formatPadTable(sameTable)}`); | ||
outdatedTable.sort(sortTableRow); | ||
outdatedTable.length && console.log(`OUTDATED[${outdatedTable.length}/${total}]:\n${formatPadTable(outdatedTable)}`); | ||
return outdatedTable.length; | ||
}; | ||
const REGEXP_ANSI_ESCAPE_CODE = /\033\[[0-9;]*[a-zA-Z]/g; | ||
const REGEXP_NPM_OUTDATED_OUTPUT = /(\S+)\s+\S+\s+(\S+)\s+(\S+)/; | ||
const sortTableRow = ([nameA,,, sourceA], [nameB,,, sourceB]) => sourceA !== sourceB ? sourceA.localeCompare(sourceB) : nameA.localeCompare(nameB); | ||
const PAD_FUNC_LIST = [(name, maxWidth) => ` ${name.padStart(maxWidth)}`, undefined, undefined, source => source]; | ||
const formatPadTable = table => (0, _format.padTable)({ | ||
table, | ||
cellPad: ' | ', | ||
padFuncList: PAD_FUNC_LIST | ||
}); | ||
const logCheckOutdatedResult = async (packageInfoMap, pathPackage) => processResult(packageInfoMap, checkNpmOutdated(pathPackage)); | ||
exports.logCheckOutdatedResult = logCheckOutdatedResult; | ||
var e=require("child_process") | ||
var t=require("dr-js/library/common/format") | ||
var o=require("./semver") | ||
const s=/\033\[[0-9;]*[a-zA-Z]/g | ||
const r=/(\S+)\s+\S+\s+(\S+)\s+(\S+)/ | ||
const n=([e,,,t],[o,,,s])=>t!==s?t.localeCompare(s):e.localeCompare(o) | ||
const c=[(e,t)=>` ${e.padStart(t)}`,void 0,void 0,e=>e] | ||
const l=e=>(0,t.padTable)({table:e,cellPad:" | ",padFuncList:c}) | ||
exports.logCheckOutdatedResult=(async(t,c)=>(async(e,t)=>{const c=[] | ||
const a=[] | ||
t.split("\n").forEach(t=>{const[,n,l,d]=r.exec(t.replace(s,""))||[] | ||
if(!e[n])return | ||
const i=(0,o.compareSemver)(l,d)<=0?d:l | ||
const{version:u,source:p}=e[n] | ||
const h=[n,u,i,p] | ||
u.endsWith(i)?c.push(h):a.push(h)}) | ||
const d=c.length+a.length | ||
c.sort(n) | ||
c.length&&console.log(`SAME[${c.length}/${d}]:\n${l(c)}`) | ||
a.sort(n) | ||
a.length&&console.log(`OUTDATED[${a.length}/${d}]:\n${l(a)}`) | ||
return a.length})(t,(t=>{const{stdout:o,status:s,signal:r,error:n}=(0,e.spawnSync)("npm",["outdated"],{cwd:t,stdio:"pipe",shell:!0}) | ||
if(n)throw n | ||
return o.toString()})(c))) |
@@ -1,16 +0,6 @@ | ||
const REGEXP_SEMVER = /(\d+)\.(\d+)\.(\d+)(.*)/; | ||
const parseSemver = versionString => { | ||
const [, major, minor, patch, label = ''] = REGEXP_SEMVER.exec(versionString); | ||
return { | ||
major, | ||
minor, | ||
patch, | ||
label | ||
}; | ||
}; | ||
const compareSemver = (stringA, stringB) => { | ||
const a = parseSemver(stringA); | ||
const b = parseSemver(stringB); | ||
return parseInt(a.major) - parseInt(b.major) || parseInt(a.minor) - parseInt(b.minor) || parseInt(a.patch) - parseInt(b.patch) || a.label.localeCompare(b.label); | ||
}; | ||
exports.compareSemver = compareSemver; | ||
const e=/(\d+)\.(\d+)\.(\d+)(.*)/ | ||
const r=r=>{const[,t,o,a,n=""]=e.exec(r) | ||
return{major:t,minor:o,patch:a,label:n}} | ||
exports.compareSemver=((e,t)=>{const o=r(e) | ||
const a=r(t) | ||
return parseInt(o.major)-parseInt(a.major)||parseInt(o.minor)-parseInt(a.minor)||parseInt(o.patch)-parseInt(a.patch)||o.label.localeCompare(a.label)}) |
#!/usr/bin/env node | ||
var _option = require("./option"); | ||
var _checkOutdated = require("./checkOutdated"); | ||
var _pack = require("./pack"); | ||
var _package = require("../package.json"); | ||
const main = async () => { | ||
const { | ||
getSingleOption, | ||
getSingleOptionOptional | ||
} = await (0, _option.parseOption)(); | ||
try { | ||
if (getSingleOptionOptional('version')) return console.log(JSON.stringify({ | ||
packageName: _package.name, | ||
packageVersion: _package.version | ||
}, null, ' ')); | ||
const isCheckOutdated = getSingleOptionOptional('check-outdated'); | ||
const isPack = getSingleOptionOptional('pack'); | ||
isCheckOutdated && (await (0, _checkOutdated.doCheckOutdated)({ | ||
pathInput: getSingleOption('path-input'), | ||
pathTemp: getSingleOptionOptional('path-temp') | ||
})); | ||
isPack && (await (0, _pack.doPack)({ | ||
pathInput: getSingleOption('path-input'), | ||
pathOutput: getSingleOption('path-output'), | ||
outputName: getSingleOptionOptional('output-name'), | ||
outputVersion: getSingleOptionOptional('output-version'), | ||
outputDescription: getSingleOptionOptional('output-description'), | ||
isPublish: getSingleOptionOptional('publish'), | ||
isPublishDev: getSingleOptionOptional('publish-dev') | ||
})); | ||
isCheckOutdated || isPack || console.log((0, _option.formatUsage)()); | ||
} catch (error) { | ||
console.warn(`[Error]`, error); | ||
process.exit(2); | ||
} | ||
}; | ||
main().catch(error => { | ||
console.warn((0, _option.formatUsage)(error.stack || error.message || error.toString())); | ||
process.exit(1); | ||
}); | ||
var t=require("./option") | ||
var e=require("./checkOutdated") | ||
var a=require("./pack") | ||
var o=require("../package.json");(async()=>{const{getSingleOption:p,getSingleOptionOptional:i}=await(0,t.parseOption)() | ||
try{if(i("version"))return console.log(JSON.stringify({packageName:o.name,packageVersion:o.version},null," ")) | ||
const n=i("check-outdated") | ||
const r=i("pack") | ||
n&&await(0,e.doCheckOutdated)({pathInput:p("path-input"),pathTemp:i("path-temp")}) | ||
r&&await(0,a.doPack)({pathInput:p("path-input"),pathOutput:p("path-output"),outputName:i("output-name"),outputVersion:i("output-version"),outputDescription:i("output-description"),isPublish:i("publish"),isPublishDev:i("publish-dev")}) | ||
n||r||console.log((0,t.formatUsage)())}catch(t){console.warn("[Error]",t) | ||
process.exit(2)}})().catch(e=>{console.warn((0,t.formatUsage)(e.stack||e.message||e.toString())) | ||
process.exit(1)}) |
@@ -1,74 +0,8 @@ | ||
var _parser = require("dr-js/library/common/module/Option/parser"); | ||
var _preset = require("dr-js/library/common/module/Option/preset"); | ||
var _Option = require("dr-js/library/node/module/Option"); | ||
const { | ||
SingleString, | ||
BooleanFlag, | ||
Config | ||
} = _preset.ConfigPreset; | ||
const OPTION_CONFIG = { | ||
prefixENV: 'dev-dep', | ||
formatList: [Config, Object.assign({}, BooleanFlag, { | ||
name: 'help', | ||
shortName: 'h' | ||
}), Object.assign({}, BooleanFlag, { | ||
name: 'version', | ||
shortName: 'v' | ||
}), Object.assign({}, SingleString, { | ||
isPath: true, | ||
optional: (0, _preset.getOptionalFormatFlag)('check-outdated', 'pack'), | ||
name: 'path-input', | ||
shortName: 'i', | ||
description: `path to 'package.json', or directory with 'package.json' inside` | ||
}), Object.assign({}, BooleanFlag, { | ||
name: 'check-outdated', | ||
shortName: 'C', | ||
extendFormatList: [Object.assign({}, SingleString, { | ||
isPath: true, | ||
optional: true, | ||
name: 'path-temp' | ||
})] | ||
}), Object.assign({}, BooleanFlag, { | ||
name: 'pack', | ||
shortName: 'P', | ||
extendFormatList: [Object.assign({}, SingleString, { | ||
isPath: true, | ||
name: 'path-output', | ||
shortName: 'o', | ||
description: `output path` | ||
}), Object.assign({}, SingleString, { | ||
optional: true, | ||
name: 'output-name', | ||
description: `output package name` | ||
}), Object.assign({}, SingleString, { | ||
optional: true, | ||
name: 'output-version', | ||
description: `output package version` | ||
}), Object.assign({}, SingleString, { | ||
optional: true, | ||
name: 'output-description', | ||
description: `output package description` | ||
}), Object.assign({}, BooleanFlag, { | ||
name: 'publish', | ||
description: `run npm publish` | ||
}), Object.assign({}, BooleanFlag, { | ||
name: 'publish-dev', | ||
description: `run npm publish-dev` | ||
})] | ||
})] | ||
}; | ||
const { | ||
parseCLI, | ||
parseENV, | ||
parseJSON, | ||
processOptionMap, | ||
formatUsage | ||
} = (0, _parser.createOptionParser)(OPTION_CONFIG); | ||
exports.formatUsage = formatUsage; | ||
const parseOption = async () => (0, _Option.createOptionGetter)((await (0, _Option.parseOptionMap)({ | ||
parseCLI, | ||
parseENV, | ||
parseJSON, | ||
processOptionMap | ||
}))); | ||
exports.parseOption = parseOption; | ||
var e=require("dr-js/library/common/module/Option/parser") | ||
var t=require("dr-js/library/common/module/Option/preset") | ||
var a=require("dr-js/library/node/module/Option") | ||
const{SingleString:s,BooleanFlag:o,Config:i}=t.ConfigPreset | ||
const p={prefixENV:"dev-dep",formatList:[i,Object.assign({},o,{name:"help",shortName:"h"}),Object.assign({},o,{name:"version",shortName:"v"}),Object.assign({},s,{isPath:!0,optional:(0,t.getOptionalFormatFlag)("check-outdated","pack"),name:"path-input",shortName:"i",description:"path to 'package.json', or directory with 'package.json' inside"}),Object.assign({},o,{name:"check-outdated",shortName:"C",extendFormatList:[Object.assign({},s,{isPath:!0,optional:!0,name:"path-temp"})]}),Object.assign({},o,{name:"pack",shortName:"P",extendFormatList:[Object.assign({},s,{isPath:!0,name:"path-output",shortName:"o",description:"output path"}),Object.assign({},s,{optional:!0,name:"output-name",description:"output package name"}),Object.assign({},s,{optional:!0,name:"output-version",description:"output package version"}),Object.assign({},s,{optional:!0,name:"output-description",description:"output package description"}),Object.assign({},o,{name:"publish",description:"run npm publish"}),Object.assign({},o,{name:"publish-dev",description:"run npm publish-dev"})]})]} | ||
const{parseCLI:r,parseENV:n,parseJSON:c,processOptionMap:m,formatUsage:u}=(0,e.createOptionParser)(p) | ||
exports.formatUsage=u | ||
exports.parseOption=(async()=>(0,a.createOptionGetter)(await(0,a.parseOptionMap)({parseCLI:r,parseENV:n,parseJSON:c,processOptionMap:m}))) |
139
bin/pack.js
@@ -1,92 +0,47 @@ | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _child_process = require("child_process"); | ||
var _format = require("dr-js/library/common/format"); | ||
var _utils__ = require("dr-js/library/common/data/__utils__"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Modify = require("dr-js/library/node/file/Modify"); | ||
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; } | ||
const GET_INITIAL_PACKAGE_INFO = () => ({ | ||
packageJSON: {}, | ||
exportFilePairList: [], | ||
installFilePairList: [] | ||
}); | ||
const loadPackage = (packagePath, packageInfo = GET_INITIAL_PACKAGE_INFO(), loadedSet = new Set()) => { | ||
const packageFile = packagePath.endsWith('.json') ? packagePath : (0, _path.join)(packagePath, 'package.json'); | ||
if (packagePath.endsWith('.json')) packagePath = (0, _path.dirname)(packagePath); | ||
if (loadedSet.has(packageFile)) return packageInfo; | ||
loadedSet.add(packageFile); | ||
const _require = require(packageFile), | ||
{ | ||
IMPORT: importList, | ||
EXPORT: exportList, | ||
INSTALL: installList | ||
} = _require, | ||
mergePackageJSON = _objectWithoutProperties(_require, ["IMPORT", "EXPORT", "INSTALL"]); | ||
const { | ||
packageJSON, | ||
exportFilePairList, | ||
installFilePairList | ||
} = packageInfo; | ||
importList && importList.forEach(importPackagePath => loadPackage((0, _path.resolve)(packagePath, importPackagePath), packageInfo, loadedSet)); | ||
console.log(`[loadPackage] load: ${packageFile}`); | ||
installList && installList.forEach(filePath => installFilePairList.push(parseResourcePath(filePath, packagePath))); | ||
exportList && exportList.forEach(filePath => exportFilePairList.push(parseResourcePath(filePath, packagePath))); | ||
mergePackageJSON && (0, _utils__.objectMergeDeep)(packageJSON, mergePackageJSON); | ||
return packageInfo; | ||
}; | ||
const parseResourcePath = (resourcePath, packagePath) => typeof resourcePath === 'object' ? [(0, _path.resolve)(packagePath, resourcePath.from), resourcePath.to] : [(0, _path.resolve)(packagePath, resourcePath), resourcePath]; | ||
const PACKAGE_KEY_SORT_REQUIRED = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies', 'bundledDependencies']; | ||
const PACKAGE_KEY_ORDER = ['private', 'name', 'version', 'description', 'author', 'contributors', 'license', 'keywords', 'repository', 'homepage', 'bugs', 'os', 'cpu', 'engines', 'engineStrict', 'preferGlobal', 'main', 'bin', 'man', 'files', 'directories', 'scripts', 'config', 'publishConfig', ...PACKAGE_KEY_SORT_REQUIRED]; | ||
const writePackageJSON = async (packageJSON, path) => { | ||
PACKAGE_KEY_SORT_REQUIRED.forEach(key => { | ||
packageJSON[key] && (0, _utils__.objectSortKey)(packageJSON[key]); | ||
}); | ||
const jsonFileStringList = Object.keys(packageJSON).sort((a, b) => PACKAGE_KEY_ORDER.indexOf(a) - PACKAGE_KEY_ORDER.indexOf(b)).map(key => (0, _format.stringIndentLine)(`${JSON.stringify(key)}: ${JSON.stringify(packageJSON[key], null, 2)}`)); | ||
const packageBuffer = Buffer.from(`{\n${jsonFileStringList.join(',\n')}\n}\n`); | ||
(0, _fs.writeFileSync)(path, packageBuffer); | ||
console.log(`[writePackageJSON] ${path} [${(0, _format.binary)(packageBuffer.length)}B]`); | ||
}; | ||
const doPack = async ({ | ||
pathInput, | ||
pathOutput, | ||
outputName, | ||
outputVersion, | ||
outputDescription, | ||
isPublish, | ||
isPublishDev | ||
}) => { | ||
const pathOutputInstall = (0, _path.resolve)(pathOutput, 'install'); | ||
const { | ||
packageJSON, | ||
exportFilePairList, | ||
installFilePairList | ||
} = loadPackage(pathInput); | ||
if (outputName) packageJSON.name = outputName; | ||
if (outputVersion) packageJSON.version = outputVersion; | ||
if (outputDescription) packageJSON.description = outputDescription; | ||
await _Modify.modify.delete(pathOutput).catch(() => {}); | ||
await (0, _File.createDirectory)(pathOutput); | ||
await (0, _File.createDirectory)(pathOutputInstall); | ||
await writePackageJSON(packageJSON, (0, _path.join)(pathOutput, 'package.json')); | ||
for (const [source, targetRelative] of exportFilePairList) await _Modify.modify.copy(source, (0, _path.join)(pathOutput, targetRelative)); | ||
for (const [source, targetRelative] of installFilePairList) await _Modify.modify.copy(source, (0, _path.join)(pathOutputInstall, targetRelative)); | ||
(0, _child_process.execSync)('npm pack', { | ||
cwd: pathOutput, | ||
stdio: 'inherit', | ||
shell: true | ||
}); | ||
const outputFileName = `${packageJSON.name}-${packageJSON.version}.tgz`; | ||
console.log(`done pack: ${outputFileName} [${(0, _format.binary)((0, _fs.statSync)((0, _path.join)(pathOutput, outputFileName)).size)}B]`); | ||
isPublish && (0, _child_process.execSync)('npm publish', { | ||
cwd: pathOutput, | ||
stdio: 'inherit', | ||
shell: true | ||
}); | ||
isPublishDev && (0, _child_process.execSync)('npm publish --tag dev', { | ||
cwd: pathOutput, | ||
stdio: 'inherit', | ||
shell: true | ||
}); | ||
}; | ||
exports.doPack = doPack; | ||
var e=require("path") | ||
var i=require("fs") | ||
var r=require("child_process") | ||
var o=require("dr-js/library/common/format") | ||
var t=require("dr-js/library/common/data/__utils__") | ||
var n=require("dr-js/library/node/file/File") | ||
var s=require("dr-js/library/node/file/Modify") | ||
const a=(i,r={packageJSON:{},exportFilePairList:[],installFilePairList:[]},o=new Set)=>{const n=i.endsWith(".json")?i:(0,e.join)(i,"package.json") | ||
i.endsWith(".json")&&(i=(0,e.dirname)(i)) | ||
if(o.has(n))return r | ||
o.add(n) | ||
const s=require(n),{IMPORT:l,EXPORT:p,INSTALL:d}=s,u=function(e,i){if(null==e)return{} | ||
var r={} | ||
var o=Object.keys(e) | ||
var t,n | ||
for(n=0;n<o.length;n++){t=o[n] | ||
i.indexOf(t)>=0||(r[t]=e[t])}if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e) | ||
for(n=0;n<s.length;n++){t=s[n] | ||
i.indexOf(t)>=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}}return r}(s,["IMPORT","EXPORT","INSTALL"]) | ||
const{packageJSON:f,exportFilePairList:y,installFilePairList:h}=r | ||
l&&l.forEach(t=>a((0,e.resolve)(i,t),r,o)) | ||
console.log(`[loadPackage] load: ${n}`) | ||
d&&d.forEach(e=>h.push(c(e,i))) | ||
p&&p.forEach(e=>y.push(c(e,i))) | ||
u&&(0,t.objectMergeDeep)(f,u) | ||
return r} | ||
const c=(i,r)=>"object"==typeof i?[(0,e.resolve)(r,i.from),i.to]:[(0,e.resolve)(r,i),i] | ||
const l=["dependencies","devDependencies","peerDependencies","optionalDependencies","bundledDependencies"] | ||
const p=["private","name","version","description","author","contributors","license","keywords","repository","homepage","bugs","os","cpu","engines","engineStrict","preferGlobal","main","bin","man","files","directories","scripts","config","publishConfig",...l] | ||
exports.doPack=(async({pathInput:c,pathOutput:d,outputName:u,outputVersion:f,outputDescription:y,isPublish:h,isPublishDev:b})=>{const g=(0,e.resolve)(d,"install") | ||
const{packageJSON:m,exportFilePairList:v,installFilePairList:j}=a(c) | ||
u&&(m.name=u) | ||
f&&(m.version=f) | ||
y&&(m.description=y) | ||
await s.modify.delete(d).catch(()=>{}) | ||
await(0,n.createDirectory)(d) | ||
await(0,n.createDirectory)(g) | ||
await(async(e,r)=>{l.forEach(i=>{e[i]&&(0,t.objectSortKey)(e[i])}) | ||
const n=Object.keys(e).sort((e,i)=>p.indexOf(e)-p.indexOf(i)).map(i=>(0,o.stringIndentLine)(`${JSON.stringify(i)}: ${JSON.stringify(e[i],null,2)}`)) | ||
const s=Buffer.from(`{\n${n.join(",\n")}\n}\n`);(0,i.writeFileSync)(r,s) | ||
console.log(`[writePackageJSON] ${r} [${(0,o.binary)(s.length)}B]`)})(m,(0,e.join)(d,"package.json")) | ||
for(const[i,r]of v)await s.modify.copy(i,(0,e.join)(d,r)) | ||
for(const[i,r]of j)await s.modify.copy(i,(0,e.join)(g,r));(0,r.execSync)("npm pack",{cwd:d,stdio:"inherit",shell:!0}) | ||
const O=`${m.name}-${m.version}.tgz` | ||
console.log(`done pack: ${O} [${(0,o.binary)((0,i.statSync)((0,e.join)(d,O)).size)}B]`) | ||
h&&(0,r.execSync)("npm publish",{cwd:d,stdio:"inherit",shell:!0}) | ||
b&&(0,r.execSync)("npm publish --tag dev",{cwd:d,stdio:"inherit",shell:!0})}) |
@@ -1,34 +0,16 @@ | ||
const loadEnvKey = key => { | ||
try { | ||
return JSON.parse(process.env[key]); | ||
} catch (error) { | ||
return null; | ||
} | ||
}; | ||
exports.loadEnvKey = loadEnvKey; | ||
const saveEnvKey = (key, value) => { | ||
try { | ||
process.env[key] = JSON.stringify(value); | ||
} catch (error) {} | ||
}; | ||
exports.saveEnvKey = saveEnvKey; | ||
const syncEnvKey = (key, defaultValue) => { | ||
const value = loadEnvKey(key) || defaultValue; | ||
saveEnvKey(key, value); | ||
return value; | ||
}; | ||
exports.syncEnvKey = syncEnvKey; | ||
const __VERBOSE__ = syncEnvKey('__DEV_VERBOSE__', process.argv.includes('verbose')); | ||
exports.__VERBOSE__ = __VERBOSE__; | ||
const checkFlag = (flagList, checkFlagList) => flagList.find(flag => checkFlagList.includes(flag)); | ||
exports.checkFlag = checkFlag; | ||
const argvFlag = (...checkFlagList) => checkFlag(process.argv, checkFlagList); | ||
exports.argvFlag = argvFlag; | ||
const runMain = (main, logger, ...args) => main(logger, ...args).then(() => { | ||
logger.padLog(`done`); | ||
}, error => { | ||
logger.padLog(`error`); | ||
console.warn(error); | ||
process.exit(-1); | ||
}); | ||
exports.runMain = runMain; | ||
const e=e=>{try{return JSON.parse(process.env[e])}catch(e){return null}} | ||
exports.loadEnvKey=e | ||
const r=(e,r)=>{try{process.env[e]=JSON.stringify(r)}catch(e){}} | ||
exports.saveEnvKey=r | ||
const s=(s,o)=>{const t=e(s)||o | ||
r(s,t) | ||
return t} | ||
exports.syncEnvKey=s | ||
const o=s("__DEV_VERBOSE__",process.argv.includes("verbose")) | ||
exports.__VERBOSE__=o | ||
const t=(e,r)=>e.find(e=>r.includes(e)) | ||
exports.checkFlag=t | ||
exports.argvFlag=((...e)=>t(process.argv,e)) | ||
exports.runMain=((e,r,...s)=>e(r,...s).then(()=>{r.padLog("done")},e=>{r.padLog("error") | ||
console.warn(e) | ||
process.exit(-1)})) |
@@ -1,73 +0,29 @@ | ||
var _fs = require("fs"); | ||
var _child_process = require("child_process"); | ||
var _format = require("dr-js/library/common/format"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
var _Modify = require("dr-js/library/node/file/Modify"); | ||
const initOutput = async ({ | ||
fromRoot, | ||
fromOutput, | ||
logger, | ||
deleteKeyList = ['private', 'scripts', 'engines', 'devDependencies'], | ||
copyPathList = ['LICENSE', 'README.md'] | ||
}) => { | ||
logger.padLog('reset output'); | ||
await _Modify.modify.delete(fromOutput()).catch(() => {}); | ||
await (0, _File.createDirectory)(fromOutput()); | ||
logger.padLog(`init output package.json`); | ||
const packageJSON = require(fromRoot('package.json')); | ||
for (const deleteKey of deleteKeyList) delete packageJSON[deleteKey]; | ||
logger.log(`dropped ${JSON.stringify(deleteKeyList)} from package.json`); | ||
(0, _fs.writeFileSync)(fromOutput('package.json'), JSON.stringify(packageJSON)); | ||
logger.padLog(`init output file from root: ${JSON.stringify(copyPathList)}`); | ||
for (const copyPath of copyPathList) await _Modify.modify.copy(fromRoot(copyPath), fromOutput(copyPath)); | ||
return packageJSON; | ||
}; | ||
exports.initOutput = initOutput; | ||
const packOutput = async ({ | ||
fromRoot, | ||
fromOutput, | ||
logger | ||
}) => { | ||
const execOptionOutput = { | ||
cwd: fromOutput(), | ||
stdio: 'inherit', | ||
shell: true | ||
}; | ||
logger.padLog('run pack output'); | ||
(0, _child_process.execSync)('npm pack', execOptionOutput); | ||
logger.padLog('move to root path'); | ||
const packageJSON = require(fromOutput('package.json')); | ||
const packName = `${packageJSON.name}-${packageJSON.version}.tgz`; | ||
await _Modify.modify.move(fromOutput(packName), fromRoot(packName)); | ||
logger.log(`pack size: ${(0, _format.binary)((0, _fs.statSync)(fromRoot(packName)).size)}B`); | ||
}; | ||
exports.packOutput = packOutput; | ||
const publishOutput = async ({ | ||
flagList, | ||
packageJSON, | ||
fromOutput, | ||
onPublish = () => (0, _child_process.execSync)('npm publish --tag latest', { | ||
cwd: fromOutput(), | ||
stdio: 'inherit', | ||
shell: true | ||
}), | ||
onPublishDev = () => (0, _child_process.execSync)('npm publish --tag dev', { | ||
cwd: fromOutput(), | ||
stdio: 'inherit', | ||
shell: true | ||
}), | ||
logger | ||
}) => { | ||
if (flagList.includes('publish-dev')) { | ||
logger.padLog(`publish-dev: ${packageJSON.version}`); | ||
if (!REGEXP_PUBLISH_VERSION_DEV.test(packageJSON.version)) throw new Error(`[publish-dev] invalid version: ${packageJSON.version}`); | ||
await onPublishDev(); | ||
} else if (flagList.includes('publish')) { | ||
logger.padLog(`publish: ${packageJSON.version}`); | ||
if (!REGEXP_PUBLISH_VERSION.test(packageJSON.version)) throw new Error(`[publish] invalid version: ${packageJSON.version}`); | ||
await onPublish(); | ||
} | ||
}; | ||
exports.publishOutput = publishOutput; | ||
const REGEXP_PUBLISH_VERSION = /^\d+\.\d+\.\d+$/; | ||
const REGEXP_PUBLISH_VERSION_DEV = /^\d+\.\d+\.\d+-dev\.\d+$/; | ||
var e=require("fs") | ||
var t=require("child_process") | ||
var i=require("dr-js/library/common/format") | ||
var o=require("dr-js/library/node/file/File") | ||
var r=require("dr-js/library/node/file/Modify") | ||
exports.initOutput=(async({fromRoot:t,fromOutput:i,logger:s,deleteKeyList:a=["private","scripts","engines","devDependencies"],copyPathList:n=["LICENSE","README.md"]})=>{s.padLog("reset output") | ||
await r.modify.delete(i()).catch(()=>{}) | ||
await(0,o.createDirectory)(i()) | ||
s.padLog("init output package.json") | ||
const p=require(t("package.json")) | ||
for(const e of a)delete p[e] | ||
s.log(`dropped ${JSON.stringify(a)} from package.json`);(0,e.writeFileSync)(i("package.json"),JSON.stringify(p)) | ||
s.padLog(`init output file from root: ${JSON.stringify(n)}`) | ||
for(const e of n)await r.modify.copy(t(e),i(e)) | ||
return p}) | ||
exports.packOutput=(async({fromRoot:o,fromOutput:s,logger:a})=>{const n={cwd:s(),stdio:"inherit",shell:!0} | ||
a.padLog("run pack output");(0,t.execSync)("npm pack",n) | ||
a.padLog("move to root path") | ||
const p=require(s("package.json")) | ||
const u=`${p.name}-${p.version}.tgz` | ||
await r.modify.move(s(u),o(u)) | ||
a.log(`pack size: ${(0,i.binary)((0,e.statSync)(o(u)).size)}B`)}) | ||
exports.publishOutput=(async({flagList:e,packageJSON:i,fromOutput:o,onPublish:r=(()=>(0,t.execSync)("npm publish --tag latest",{cwd:o(),stdio:"inherit",shell:!0})),onPublishDev:n=(()=>(0,t.execSync)("npm publish --tag dev",{cwd:o(),stdio:"inherit",shell:!0})),logger:p})=>{if(e.includes("publish-dev")){p.padLog(`publish-dev: ${i.version}`) | ||
if(!a.test(i.version))throw new Error(`[publish-dev] invalid version: ${i.version}`) | ||
await n()}else if(e.includes("publish")){p.padLog(`publish: ${i.version}`) | ||
if(!s.test(i.version))throw new Error(`[publish] invalid version: ${i.version}`) | ||
await r()}}) | ||
const s=/^\d+\.\d+\.\d+$/ | ||
const a=/^\d+\.\d+\.\d+-dev\.\d+$/ |
@@ -1,20 +0,6 @@ | ||
const { | ||
execSync | ||
} = require('child_process'); | ||
const tryExec = (command, option) => { | ||
try { | ||
return execSync(command, option).toString(); | ||
} catch (error) { | ||
console.warn(`[tryExec] failed for: ${command}, error: ${error}`); | ||
return ''; | ||
} | ||
}; | ||
exports.tryExec = tryExec; | ||
const getGitBranch = () => tryExec('git symbolic-ref --short HEAD', { | ||
stdio: 'pipe' | ||
}).replace('\n', ''); | ||
exports.getGitBranch = getGitBranch; | ||
const getGitCommitHash = () => tryExec('git log -1 --format="%H"', { | ||
stdio: 'pipe' | ||
}).replace('\n', ''); | ||
exports.getGitCommitHash = getGitCommitHash; | ||
const{execSync:e}=require("child_process") | ||
const t=(t,r)=>{try{return e(t,r).toString()}catch(e){console.warn(`[tryExec] failed for: ${t}, error: ${e}`) | ||
return""}} | ||
exports.tryExec=t | ||
exports.getGitBranch=(()=>t("git symbolic-ref --short HEAD",{stdio:"pipe"}).replace("\n","")) | ||
exports.getGitCommitHash=(()=>t('git log -1 --format="%H"',{stdio:"pipe"}).replace("\n","")) |
@@ -1,87 +0,28 @@ | ||
const toExportName = name => `${name[0].toUpperCase()}${name.slice(1)}`; | ||
const isLeadingUpperCase = name => name.charAt(0) >= 'A' && name.charAt(0) <= 'Z'; | ||
const compareFileName = ({ | ||
name: A | ||
}, { | ||
name: B | ||
}) => (isLeadingUpperCase(A) ? A.charCodeAt(0) - 255 : A.charCodeAt(0)) - (isLeadingUpperCase(B) ? B.charCodeAt(0) - 255 : B.charCodeAt(0)); | ||
const generateIndexScript = ({ | ||
sourceRouteMap | ||
}) => { | ||
const indexScriptMap = {}; | ||
Object.values(sourceRouteMap).forEach(({ | ||
routeList, | ||
directoryList, | ||
fileList | ||
}) => { | ||
const textList = []; | ||
const importList = []; | ||
directoryList.forEach(name => { | ||
const exportName = toExportName(name); | ||
textList.push(`import * as ${exportName} from './${name}'`); | ||
importList.push(exportName); | ||
}); | ||
fileList.sort(compareFileName).map(({ | ||
name, | ||
exportList | ||
}) => { | ||
const shouldMergeExport = directoryList.length || isLeadingUpperCase(name); | ||
if (shouldMergeExport) { | ||
const exportName = toExportName(name); | ||
textList.push(`import * as ${exportName} from './${name}'`); | ||
importList.push(exportName); | ||
} else { | ||
textList.push(`export { ${exportList.join(', ')} } from './${name}'`); | ||
} | ||
}); | ||
importList.length && textList.push(`export { ${importList.join(', ')} }`); | ||
indexScriptMap[[...routeList, 'index.js'].join('/')] = textList.join('\n'); | ||
}); | ||
return indexScriptMap; | ||
}; | ||
exports.generateIndexScript = generateIndexScript; | ||
const HOIST_LIST_KEY = '@@|hoist'; | ||
exports.HOIST_LIST_KEY = HOIST_LIST_KEY; | ||
const EXPORT_LIST_KEY = '@@|export'; | ||
exports.EXPORT_LIST_KEY = EXPORT_LIST_KEY; | ||
const EXPORT_HOIST_LIST_KEY = '@@|export-hoist'; | ||
exports.EXPORT_HOIST_LIST_KEY = EXPORT_HOIST_LIST_KEY; | ||
const generateExportInfo = ({ | ||
sourceRouteMap | ||
}) => { | ||
const exportInfoMap = {}; | ||
const getExportInfo = (...routeList) => { | ||
const key = routeList.join('/'); | ||
if (!exportInfoMap[key]) exportInfoMap[key] = {}; | ||
return exportInfoMap[key]; | ||
}; | ||
Object.values(sourceRouteMap).forEach(({ | ||
routeList, | ||
directoryList, | ||
fileList | ||
}) => { | ||
const exportInfo = getExportInfo(...routeList); | ||
directoryList.forEach(name => { | ||
exportInfo[toExportName(name)] = getExportInfo(...routeList, name); | ||
}); | ||
fileList.sort(compareFileName).map(({ | ||
name, | ||
exportList | ||
}) => { | ||
const shouldMergeExport = directoryList.length || isLeadingUpperCase(name); | ||
if (shouldMergeExport) { | ||
exportInfo[toExportName(name)] = { | ||
[EXPORT_LIST_KEY]: exportList | ||
}; | ||
} else { | ||
exportInfo[name] = { | ||
[HOIST_LIST_KEY]: exportList | ||
}; | ||
exportInfo[EXPORT_HOIST_LIST_KEY] = [...(exportInfo[EXPORT_HOIST_LIST_KEY] || []), ...exportList]; | ||
} | ||
getExportInfo(...routeList, name)[EXPORT_LIST_KEY] = exportList; | ||
}); | ||
}); | ||
return exportInfoMap; | ||
}; | ||
exports.generateExportInfo = generateExportInfo; | ||
const t=t=>`${t[0].toUpperCase()}${t.slice(1)}` | ||
const e=t=>t.charAt(0)>="A"&&t.charAt(0)<="Z" | ||
const o=({name:t},{name:o})=>(e(t)?t.charCodeAt(0)-255:t.charCodeAt(0))-(e(o)?o.charCodeAt(0)-255:o.charCodeAt(0)) | ||
exports.generateIndexScript=(({sourceRouteMap:r})=>{const s={} | ||
Object.values(r).forEach(({routeList:r,directoryList:p,fileList:n})=>{const c=[] | ||
const i=[] | ||
p.forEach(e=>{const o=t(e) | ||
c.push(`import * as ${o} from './${e}'`) | ||
i.push(o)}) | ||
n.sort(o).map(({name:o,exportList:r})=>{if(p.length||e(o)){const e=t(o) | ||
c.push(`import * as ${e} from './${o}'`) | ||
i.push(e)}else c.push(`export { ${r.join(", ")} } from './${o}'`)}) | ||
i.length&&c.push(`export { ${i.join(", ")} }`) | ||
s[[...r,"index.js"].join("/")]=c.join("\n")}) | ||
return s}) | ||
exports.HOIST_LIST_KEY="@@|hoist" | ||
exports.EXPORT_LIST_KEY="@@|export" | ||
exports.EXPORT_HOIST_LIST_KEY="@@|export-hoist" | ||
exports.generateExportInfo=(({sourceRouteMap:r})=>{const s={} | ||
const p=(...t)=>{const e=t.join("/") | ||
s[e]||(s[e]={}) | ||
return s[e]} | ||
Object.values(r).forEach(({routeList:r,directoryList:s,fileList:n})=>{const c=p(...r) | ||
s.forEach(e=>{c[t(e)]=p(...r,e)}) | ||
n.sort(o).map(({name:o,exportList:n})=>{if(s.length||e(o))c[t(o)]={"@@|export":n} | ||
else{c[o]={"@@|hoist":n} | ||
c["@@|export-hoist"]=[...c["@@|export-hoist"]||[],...n]}p(...r,o)["@@|export"]=n})}) | ||
return s}) |
@@ -1,65 +0,17 @@ | ||
var _path = require("path"); | ||
var _fs = require("fs"); | ||
var _babylon = require("babylon"); | ||
var _File = require("dr-js/library/node/file/File"); | ||
const getExportListFromParsedAST = (fileString, sourceFilename, babylonPluginList) => { | ||
const resultAST = (0, _babylon.parse)(fileString, { | ||
sourceFilename, | ||
sourceType: 'module', | ||
plugins: babylonPluginList || ['objectRestSpread', 'classProperties', 'exportDefaultFrom', 'exportNamespaceFrom', 'jsx'] | ||
}); | ||
const exportNodeList = resultAST.program.body.filter(({ | ||
type | ||
}) => type === 'ExportNamedDeclaration'); | ||
return [].concat(...exportNodeList.map(({ | ||
specifiers, | ||
declaration | ||
}) => declaration ? declaration.declarations.map(({ | ||
id: { | ||
name | ||
} | ||
}) => name) : specifiers.map(({ | ||
exported: { | ||
name | ||
} | ||
}) => name))); | ||
}; | ||
const createExportParser = ({ | ||
babylonPluginList, | ||
logger | ||
}) => { | ||
const sourceRouteMap = {}; | ||
const getRoute = routeList => { | ||
const key = routeList.join('/'); | ||
if (!sourceRouteMap[key]) sourceRouteMap[key] = { | ||
routeList, | ||
directoryList: [], | ||
fileList: [] | ||
}; | ||
return sourceRouteMap[key]; | ||
}; | ||
return { | ||
parseExport: async path => { | ||
const fileType = await (0, _File.getPathType)(path); | ||
const routeList = path.split(_path.sep); | ||
const name = routeList.pop(); | ||
if (_File.FILE_TYPE.Directory === fileType) { | ||
getRoute(routeList).directoryList.push(name); | ||
logger.devLog(`[directory] ${path}`); | ||
} else if (_File.FILE_TYPE.File === fileType && name.endsWith('.js') && !name.endsWith('.test.js')) { | ||
const fileString = (0, _fs.readFileSync)(path, { | ||
encoding: 'utf8' | ||
}); | ||
const exportList = getExportListFromParsedAST(fileString, path, babylonPluginList); | ||
getRoute(routeList).fileList.push({ | ||
name: name.slice(0, -3), | ||
exportList | ||
}); | ||
logger.devLog(`[file] ${path}`); | ||
logger.devLog(` export [${exportList.length}]: ${exportList.join(', ')}`); | ||
} else logger.devLog(`[skipped] ${path} (${fileType})`); | ||
}, | ||
getSourceRouteMap: () => sourceRouteMap | ||
}; | ||
}; | ||
exports.createExportParser = createExportParser; | ||
var e=require("path") | ||
var r=require("fs") | ||
var t=require("babylon") | ||
var o=require("dr-js/library/node/file/File") | ||
exports.createExportParser=(({babylonPluginList:s,logger:i})=>{const a={} | ||
const p=e=>{const r=e.join("/") | ||
a[r]||(a[r]={routeList:e,directoryList:[],fileList:[]}) | ||
return a[r]} | ||
return{parseExport:async a=>{const n=await(0,o.getPathType)(a) | ||
const c=a.split(e.sep) | ||
const l=c.pop() | ||
if(o.FILE_TYPE.Directory===n){p(c).directoryList.push(l) | ||
i.devLog(`[directory] ${a}`)}else if(o.FILE_TYPE.File===n&&l.endsWith(".js")&&!l.endsWith(".test.js")){const e=((e,r,o)=>{const s=(0,t.parse)(e,{sourceFilename:r,sourceType:"module",plugins:o||["objectRestSpread","classProperties","exportDefaultFrom","exportNamespaceFrom","jsx"]}).program.body.filter(({type:e})=>"ExportNamedDeclaration"===e) | ||
return[].concat(...s.map(({specifiers:e,declaration:r})=>r?r.declarations.map(({id:{name:e}})=>e):e.map(({exported:{name:e}})=>e)))})((0,r.readFileSync)(a,{encoding:"utf8"}),a,s) | ||
p(c).fileList.push({name:l.slice(0,-3),exportList:e}) | ||
i.devLog(`[file] ${a}`) | ||
i.devLog(` export [${e.length}]: ${e.join(", ")}`)}else i.devLog(`[skipped] ${a} (${n})`)},getSourceRouteMap:()=>a}}) |
@@ -1,29 +0,12 @@ | ||
var _fs = require("fs"); | ||
var _format = require("dr-js/library/common/format"); | ||
const wrapFileProcessor = ({ | ||
processor, | ||
logger | ||
}) => async filePath => { | ||
const inputString = (0, _fs.readFileSync)(filePath, 'utf8'); | ||
const outputString = await processor(inputString, filePath); | ||
if (inputString === outputString) { | ||
logger.log(`process skipped ${filePath}`); | ||
return 0; | ||
} | ||
const { | ||
size: inputSize | ||
} = (0, _fs.statSync)(filePath); | ||
outputString ? (0, _fs.writeFileSync)(filePath, outputString) : (0, _fs.unlinkSync)(filePath); | ||
const { | ||
size: outputSize | ||
} = outputString ? (0, _fs.statSync)(filePath) : { | ||
size: 0 | ||
}; | ||
logger.devLog(`∆${(outputSize / inputSize).toFixed(2)}(${(0, _format.binary)(outputSize - inputSize)}B)`, `${(0, _format.binary)(inputSize)}B → ${(0, _format.binary)(outputSize)}B`, `${filePath}`); | ||
return outputSize - inputSize; | ||
}; | ||
exports.wrapFileProcessor = wrapFileProcessor; | ||
const fileProcessorBabel = inputString => inputString.replace(/['"]use strict['"];?\s*/g, '').replace(/Object\.defineProperty\(exports,\s*['"]__esModule['"],\s*{\s*value:\s*(true|!0)\s*}\);?\s*/g, '').replace(/(exports\.\w+\s*=\s*)+(undefined|void 0);?\s*/g, '').replace(/[\n\r]{2,}/g, '\n').replace(/^[\n\r]+/, ''); | ||
exports.fileProcessorBabel = fileProcessorBabel; | ||
const fileProcessorWebpack = inputString => inputString.replace(/function\(\){return\s*([\w$]+)}/g, '()=>$1'); | ||
exports.fileProcessorWebpack = fileProcessorWebpack; | ||
var e=require("fs") | ||
var r=require("dr-js/library/common/format") | ||
exports.wrapFileProcessor=(({processor:s,logger:o})=>async t=>{const c=(0,e.readFileSync)(t,"utf8") | ||
const i=await s(c,t) | ||
if(c===i){o.log(`process skipped ${t}`) | ||
return 0}const{size:n}=(0,e.statSync)(t) | ||
i?(0,e.writeFileSync)(t,i):(0,e.unlinkSync)(t) | ||
const{size:a}=i?(0,e.statSync)(t):{size:0} | ||
o.devLog(`∆${(a/n).toFixed(2)}(${(0,r.binary)(a-n)}B)`,`${(0,r.binary)(n)}B → ${(0,r.binary)(a)}B`,`${t}`) | ||
return a-n}) | ||
exports.fileProcessorBabel=(e=>e.replace(/['"]use strict['"];?\s*/g,"").replace(/Object\.defineProperty\(exports,\s*['"]__esModule['"],\s*{\s*value:\s*(true|!0)\s*}\);?\s*/g,"").replace(/(exports\.\w+\s*=\s*)+(undefined|void 0);?\s*/g,"").replace(/[\n\r]{2,}/g,"\n").replace(/^[\n\r]+/,"")) | ||
exports.fileProcessorWebpack=(e=>e.replace(/function\(\){return\s*([\w$]+)}/g,"()=>$1")) |
@@ -1,15 +0,6 @@ | ||
var _utils__ = require("./__utils__"); | ||
const getLogger = (title = 'dev-dep', padWidth = 160) => { | ||
const envTitle = (0, _utils__.loadEnvKey)('__DEV_LOGGER_TITLE__'); | ||
title = envTitle ? `${title}|${envTitle}` : title; | ||
(0, _utils__.saveEnvKey)('__DEV_LOGGER_TITLE__', title); | ||
const padTitle = ` [${title}]`; | ||
const log = (...args) => console.log(`- ${args.join(' ')}`); | ||
const devLog = _utils__.__VERBOSE__ ? log : () => {}; | ||
return { | ||
padLog: (...args) => console.log(`\n## ${args.join(' ')} `.padEnd(padWidth - padTitle.length, '-') + padTitle), | ||
log, | ||
devLog | ||
}; | ||
}; | ||
exports.getLogger = getLogger; | ||
var e=require("./__utils__") | ||
exports.getLogger=((o="dev-dep",_=160)=>{const t=(0,e.loadEnvKey)("__DEV_LOGGER_TITLE__") | ||
o=t?`${o}|${t}`:o;(0,e.saveEnvKey)("__DEV_LOGGER_TITLE__",o) | ||
const n=` [${o}]` | ||
const r=(...e)=>console.log(`- ${e.join(" ")}`) | ||
return{padLog:(...e)=>console.log(`\n## ${e.join(" ")} `.padEnd(_-n.length,"-")+n),log:r,devLog:e.__VERBOSE__?r:()=>{}}}) |
@@ -1,68 +0,15 @@ | ||
var _fs = require("fs"); | ||
var _uglifyEs = _interopRequireDefault(require("uglify-es")); | ||
var _time = require("dr-js/library/common/time"); | ||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
const ecma = 8; | ||
const toplevel = true; | ||
const MODULE_OPTION = { | ||
ecma, | ||
toplevel, | ||
parse: { | ||
ecma | ||
}, | ||
compress: { | ||
ecma, | ||
toplevel, | ||
join_vars: false, | ||
sequences: false | ||
}, | ||
mangle: false, | ||
output: { | ||
ecma, | ||
beautify: true, | ||
indent_level: 2, | ||
width: 240 | ||
}, | ||
sourceMap: false | ||
}; | ||
exports.MODULE_OPTION = MODULE_OPTION; | ||
const LIBRARY_OPTION = Object.assign({}, MODULE_OPTION, { | ||
mangle: { | ||
toplevel | ||
}, | ||
output: { | ||
ecma, | ||
beautify: false, | ||
semicolons: false | ||
} | ||
}); | ||
exports.LIBRARY_OPTION = LIBRARY_OPTION; | ||
const minifyWithUglifyEs = ({ | ||
filePath, | ||
option, | ||
logger | ||
}) => { | ||
const timeStart = (0, _time.clock)(); | ||
const scriptSource = (0, _fs.readFileSync)(filePath, { | ||
encoding: 'utf8' | ||
}); | ||
const { | ||
error, | ||
code: scriptOutput | ||
} = _uglifyEs.default.minify(scriptSource, option); | ||
if (error) { | ||
logger.log(`[minifyWithUglifyEs] failed to minify file: ${filePath}`); | ||
throw error; | ||
} | ||
(0, _fs.writeFileSync)(filePath, scriptOutput); | ||
const timeEnd = (0, _time.clock)(); | ||
const sizeSource = Buffer.byteLength(scriptSource); | ||
const sizeOutput = Buffer.byteLength(scriptOutput); | ||
return { | ||
sizeSource, | ||
sizeOutput, | ||
timeStart, | ||
timeEnd | ||
}; | ||
}; | ||
exports.minifyWithUglifyEs = minifyWithUglifyEs; | ||
var e=require("fs") | ||
var t=(i=require("uglify-es"))&&i.__esModule?i:{default:i} | ||
var i | ||
var o=require("dr-js/library/common/time") | ||
const r={ecma:8,toplevel:!0,parse:{ecma:8},compress:{ecma:8,toplevel:!0,join_vars:!1,sequences:!1},mangle:!1,output:{ecma:8,beautify:!0,indent_level:2,width:240},sourceMap:!1} | ||
exports.MODULE_OPTION=r | ||
const s=Object.assign({},r,{mangle:{toplevel:!0},output:{ecma:8,beautify:!1,semicolons:!1}}) | ||
exports.LIBRARY_OPTION=s | ||
exports.minifyWithUglifyEs=(({filePath:i,option:r,logger:s})=>{const l=(0,o.clock)() | ||
const n=(0,e.readFileSync)(i,{encoding:"utf8"}) | ||
const{error:c,code:a}=t.default.minify(n,r) | ||
if(c){s.log(`[minifyWithUglifyEs] failed to minify file: ${i}`) | ||
throw c}(0,e.writeFileSync)(i,a) | ||
const u=(0,o.clock)() | ||
return{sizeSource:Buffer.byteLength(n),sizeOutput:Buffer.byteLength(a),timeStart:l,timeEnd:u}}) |
@@ -1,45 +0,26 @@ | ||
var _webpack = _interopRequireDefault(require("webpack")); | ||
var _format = require("dr-js/library/common/format"); | ||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
const compileWithWebpack = async ({ | ||
config, | ||
isWatch, | ||
logger | ||
}) => { | ||
const compiler = (0, _webpack.default)(config); | ||
const logStats = stats => { | ||
const { | ||
compilation: { | ||
assets | ||
}, | ||
startTime, | ||
endTime | ||
} = stats; | ||
isWatch && logger.padLog(`watch`); | ||
Object.entries(assets).forEach(([name, sourceInfo]) => sourceInfo.emitted && logger.log(`emitted asset: ${name} [${(0, _format.binary)(sourceInfo.size())}B]`)); | ||
logger.log(`compile time: ${(0, _format.time)(endTime - startTime)}`); | ||
}; | ||
if (isWatch) { | ||
compiler.watch({ | ||
aggregateTimeout: 300, | ||
poll: undefined | ||
}, getStatsCheck(error => logger.log(`error: ${error}`), logStats)); | ||
} else { | ||
const stats = await new Promise((resolve, reject) => compiler.run(getStatsCheck(reject, resolve))); | ||
logStats(stats); | ||
} | ||
}; | ||
exports.compileWithWebpack = compileWithWebpack; | ||
const getStatsCheck = (onError, onStats) => (error, stats) => { | ||
if (error) return onError(error); | ||
if (stats.hasErrors() || stats.hasWarnings()) { | ||
const { | ||
errors = [], | ||
warnings = [] | ||
} = stats.toJson(); | ||
errors.forEach(message => console.error(message)); | ||
warnings.forEach(message => console.warn(message)); | ||
if (stats.hasErrors()) return onError(new Error('webpack stats Error')); | ||
} | ||
onStats(stats); | ||
}; | ||
var e=require("fs") | ||
var t=(r=require("webpack"))&&r.__esModule?r:{default:r} | ||
var r | ||
var o=require("dr-js/library/common/format") | ||
var s=require("dr-js/library/node/system/ProcessExitListener") | ||
const a=(e,t)=>(r,o)=>{if(r)return e(r) | ||
if(o.hasErrors()||o.hasWarnings()){const{errors:t=[],warnings:r=[]}=o.toJson() | ||
t.forEach(e=>console.error(e)) | ||
r.forEach(e=>console.warn(e)) | ||
if(o.hasErrors())return e(new Error("webpack stats Error"))}t(o)} | ||
exports.compileWithWebpack=(async({config:r,isWatch:i,profileOutput:n,logger:c})=>{n&&i&&c.log("[watch] warning: skipped generate profileOutput") | ||
n&&(r.profile=!0) | ||
const l=(0,t.default)(r) | ||
const p=((e,t)=>{const r=({compilation:{assets:r},startTime:s,endTime:a})=>{e&&t.padLog("watch") | ||
Object.entries(r).forEach(([e,r])=>r.emitted&&t.log(`emitted asset: ${e} [${(0,o.binary)(r.size())}B]`)) | ||
s&&a&&t.log(`compile time: ${(0,o.time)(a-s)}`)} | ||
return e=>{if(e.compilation)r(e) | ||
else{if(!e.stats){console.warn("[getLogStats] unexpected statData",e) | ||
throw new Error("[getLogStats] unexpected statData")}e.stats.map(r)}}})(i,c) | ||
if(i){c.log("[watch] start") | ||
const{eventType:e,code:t}=await new Promise(e=>{l.watch({aggregateTimeout:300,poll:void 0},a(e=>c.log(`error: ${e}`),p));(0,s.setProcessExitListener)({listenerSync:e})}) | ||
c.log(`[watch] exit with eventType: ${e}, code: ${t}`) | ||
return null}const u=await new Promise((e,t)=>l.run(a(t,e))) | ||
p(u) | ||
if(n){(0,e.writeFileSync)(n,JSON.stringify(u.toJson())) | ||
c.log(`generated profileOutput: ${n}`)}return u}) |
@@ -1,1 +0,1 @@ | ||
{"name":"dev-dep-tool","version":"0.0.14-dev.7","author":"dr-js","license":"MIT","description":"Provide common package devDependencies","keywords":["Dr","Dr-js","JavaScript"],"homepage":"https://github.com/dr-js/dev-dep#readme","repository":{"type":"git","url":"git+https://github.com/dr-js/dev-dep.git"},"bugs":{"url":"https://github.com/dr-js/dev-dep/issues"},"bin":"bin/index.js","dependencies":{"dr-js":"^0.12.0-dev.2"}} | ||
{"name":"dev-dep-tool","version":"0.0.14-dev.8","author":"dr-js","license":"MIT","description":"Provide common package devDependencies","keywords":["Dr","Dr-js","JavaScript"],"repository":"https://github.com/dr-js/dev-dep.git","homepage":"https://github.com/dr-js/dev-dep#readme","bugs":"https://github.com/dr-js/dev-dep/issues","bin":"bin/index.js","sideEffects":false,"dependencies":{"dr-js":"^0.12.0"}} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 2 instances in 1 package
No bug tracker
MaintenancePackage does not have a linked bug tracker in package.json.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
No bug tracker
MaintenancePackage does not have a linked bug tracker in package.json.
Found 1 instance in 1 package
19651
288
4
11
5
Updateddr-js@^0.12.0