@arcsine/nodesh
Advanced tools
Comparing version 1.5.0 to 1.6.0
Changelog | ||
----------------------------- | ||
## 1.6.0 | ||
Added in gzip/gunzip support | ||
Supports glob patterns in file matching | ||
General improvements | ||
## 1.5.0 | ||
@@ -5,0 +10,0 @@ $tokens extracts now by pattern, vs separator. |
@@ -1,1 +0,1 @@ | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),require("./global"),require("./patch");const path=require("path"),fs=require("fs"),stream_1=require("stream"),register_1=require("./util/register"),helper_1=require("./helper"),stream_2=require("./util/stream");function initialize(){const a=fs.readdirSync(path.resolve(__dirname,"operator")).filter(a=>!a.endsWith(".d.ts")).map(a=>path.resolve(__dirname,"operator",a)).map(require).reduce((a,b)=>[...a,...Object.values(b)],[]);register_1.RegisterUtil.registerOperators(a,Object),register_1.RegisterUtil.registerAsyncable(Object),register_1.RegisterUtil.properties({$iterable(){return stream_2.StreamUtil.readStream(this)}},stream_1.Readable.prototype),register_1.RegisterUtil.properties({async*$iterable(){yield this}},String.prototype);const b=Object.getOwnPropertyDescriptors(helper_1.GlobalHelpers);if(delete b.prototype,Object.defineProperties(globalThis,b),10<parseInt(process.version.replace(/^v/i,"").split(".")[0],10)){const{constructor:a}=async function*(){}();register_1.RegisterUtil.registerThenable(a)}}initialize(); | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),require("./global"),require("./patch");const path=require("path"),fs=require("fs"),stream_1=require("stream"),register_1=require("./util/register"),helper_1=require("./helper"),stream_2=require("./util/stream");function initialize(){const a=fs.readdirSync(path.resolve(__dirname,"operator")).filter(a=>!a.endsWith(".d.ts")).map(a=>path.resolve(__dirname,"operator",a)).map(require).reduce((a,b)=>[...a,...Object.values(b)],[]);register_1.RegisterUtil.registerOperators(a,Object),register_1.RegisterUtil.registerAsyncable(Object),register_1.RegisterUtil.properties({$iterable(){return stream_2.StreamUtil.readStream(this)}},stream_1.Readable.prototype),register_1.RegisterUtil.properties({async*$iterable(){yield this}},String.prototype),register_1.RegisterUtil.properties({async*$iterable(){yield this}},Buffer.prototype);const b=Object.getOwnPropertyDescriptors(helper_1.GlobalHelpers);if(delete b.prototype,Object.defineProperties(globalThis,b),10<parseInt(process.version.replace(/^v/i,"").split(".")[0],10)){const{constructor:a}=async function*(){}();register_1.RegisterUtil.registerThenable(a)}}initialize(); |
@@ -39,2 +39,23 @@ /// <reference types="node" /> | ||
$prompt(this: AsyncIterable<string>, input?: Readable, output?: Writable): $AsyncIterable<string>; | ||
/** | ||
* Compresses inbound binary/text data into compressed stream of Buffers | ||
* | ||
* @example | ||
* __filename | ||
* .$read() // Read current file | ||
* .$gzip() // Compress | ||
* .$write(`${__filename}.gz`)// Store | ||
*/ | ||
$gzip(this: AsyncIterable<string | Buffer>): $AsyncIterable<Buffer>; | ||
/** | ||
* Decompresses inbound gzip'd data into uncompressed stream. | ||
* | ||
* 'Hello World' | ||
* .$gzip() // Compress | ||
* .$gunzip() // Decompress | ||
* .$stdout // Prints 'Hello World' | ||
*/ | ||
$gunzip<T extends Buffer>(this: AsyncIterable<T>, mode: 'text'): $AsyncIterable<string>; | ||
$gunzip<T extends Buffer>(this: AsyncIterable<T>, mode: 'binary'): $AsyncIterable<Buffer>; | ||
$gunzip<T extends Buffer>(this: AsyncIterable<T>): $AsyncIterable<Buffer>; | ||
} |
@@ -1,1 +0,1 @@ | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const readline=require("readline");class DataOperators{$json(a=!0){return(a?this.$toString():this).$map(a=>JSON.parse(a))}$csv(a){return this.$columns({names:a,sep:/,/})}async*$prompt(a=process.stdin,b=process.stdout){let c;try{c=readline.createInterface({input:a,output:b});for await(const a of this)yield await new Promise(b=>c.question(`${a}\n`,b))}finally{c.close()}}}exports.DataOperators=DataOperators; | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const readline=require("readline"),zlib=require("zlib"),stream_1=require("../util/stream");class DataOperators{$json(a=!0){return(a?this.$toString():this).$map(a=>JSON.parse(a))}$csv(a){return this.$columns({names:a,sep:/,/})}async*$prompt(a=process.stdin,b=process.stdout){let c;try{c=readline.createInterface({input:a,output:b});for await(const a of this)yield await new Promise(b=>c.question(`${a}\n`,b))}finally{c.close()}}async*$gzip(){const a=zlib.createGzip(),b=this.$stream("binary");yield*stream_1.StreamUtil.readStream(b.pipe(a),{mode:"binary"})}async*$gunzip(a){const b=zlib.createGunzip(),c=this.$stream("binary"),d=c.pipe(b);yield*stream_1.StreamUtil.readStream(d,{mode:a})}}exports.DataOperators=DataOperators; |
@@ -40,3 +40,4 @@ import * as fs from 'fs'; | ||
* input sequence type: | ||
* * A `string` which represents a suffix search on file names (e.g. `.csv`) | ||
* * A `string` which represents a a file extension (e.g. `.csv`). Will match all files recursively. | ||
* * A `string` which represents a glob pattern search on file names (e.g. `**\/*.csv`). | ||
* * A `RegExp` which represents a file pattern to search on (e.g. `/path\/sub\/.*[.]js/`) | ||
@@ -43,0 +44,0 @@ * |
@@ -1,1 +0,1 @@ | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const path=require("path"),stream_1=require("../util/stream"),file_1=require("../util/file");class FileOperators{async*$read(a={}){for await(const b of this)yield*stream_1.StreamUtil.readStream(b,a)}async*$dir(a={base:process.cwd()}){a.base=path.resolve(process.cwd(),a.base||"");for await(const b of this){const c="string"==typeof b?a=>a.endsWith(b):a=>b.test(a);yield*file_1.FileUtil.scanDir({testFile:c},a.base).$map(b=>a.full?b:b.file)}}}exports.FileOperators=FileOperators; | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const path=require("path"),picomatch=require("picomatch"),stream_1=require("../util/stream"),file_1=require("../util/file");class FileOperators{async*$read(a={}){for await(const b of this)yield*stream_1.StreamUtil.readStream(b,a)}async*$dir(a={base:process.cwd()}){a.base=path.resolve(process.cwd(),a.base||"");for await(const b of this){const c="string"==typeof b?/^[.][a-zA-Z0-9]+$/.test(b)?picomatch(`**/*${b}`):picomatch(b):b.test.bind(b);yield*file_1.FileUtil.scanDir({testFile:a=>c(a)},a.base).$map(b=>a.full?b:b.file)}}}exports.FileOperators=FileOperators; |
@@ -30,3 +30,3 @@ /// <reference types="node" /> | ||
*/ | ||
static trackStream<T extends Readable | Writable>(stream: T): Promise<any>; | ||
static trackStream<T extends Readable | Writable>(stream: T, withTimer?: boolean): Promise<any>; | ||
/** | ||
@@ -33,0 +33,0 @@ * Get writable stream |
@@ -1,1 +0,1 @@ | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const readline=require("readline"),fs=require("fs"),stream_1=require("stream"),async_1=require("./async"),text_1=require("./text");class MemoryStream extends stream_1.Writable{constructor(){super(...arguments),this.store=[]}_write(a,b,c){this.store.push(a),c()}getText(){return Buffer.concat(this.store).toString("utf8")}}class StreamUtil{static memoryWritable(){return new MemoryStream}static toStream(a,b){const c=stream_1.Readable.from(async function*(){for await(const c of a){if(c===void 0){yield;continue}c instanceof Buffer?b&&"binary"!==b?yield text_1.TextUtil.toLine(c):yield c:b&&"text"!==b?yield Buffer.from(text_1.TextUtil.toText(c),"utf8"):yield text_1.TextUtil.toLine(c)}}());return c}static async*readStream(a,b={}){var c;const d=(c=b.mode,null!==c&&void 0!==c?c:"text"),e="string"==typeof a?fs.createReadStream(a,{encoding:"utf8"}):a,f=this.trackStream(e);if("raw"===d){return void(yield{stream:a,completed:f})}const g="text"===d?readline.createInterface(e):e;let h,i=!1,j=[];const k=async a=>{var b;("string"==typeof a||a instanceof Buffer)&&j.push(a),null===(b=h)||void 0===b?void 0:b.resolve(null)};for(g.on("close",()=>k(i=!0)),"text"===d?g.on("line",a=>k(text_1.TextUtil.toText(a))):g.on("data",a=>k("string"==typeof a?Buffer.from(a,"utf8"):a));!i;)await(h=async_1.AsyncUtil.resolvablePromise()),!b.singleValue&&j.length&&(yield*j,j=[]);b.singleValue&&(j="text"===d?[j.join("\n")]:[Buffer.concat(j)]),j.length&&(yield*j),e.destroyed||e.destroy(),await f}static trackStream(a){return async_1.AsyncUtil.trackWithTimer(new Promise((b,c)=>{a.on("finish",b),a.on("end",b),a.on("close",b),a.on("error",c)}))}static getWritable(a){return"string"!=typeof a&&"write"in a?a:fs.createWriteStream(a,{flags:"w",autoClose:!0})}}exports.StreamUtil=StreamUtil; | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const readline=require("readline"),fs=require("fs"),stream_1=require("stream"),async_1=require("./async"),text_1=require("./text");class MemoryStream extends stream_1.Writable{constructor(){super(...arguments),this.store=[]}_write(a,b,c){this.store.push(a),c()}getText(){return Buffer.concat(this.store).toString("utf8")}}class StreamUtil{static memoryWritable(){return new MemoryStream}static toStream(a,b){const c=stream_1.Readable.from(async function*(){for await(const c of a){if(c===void 0){yield;continue}c instanceof Buffer?b&&"binary"!==b?yield text_1.TextUtil.toLine(c):yield c:b&&"text"!==b?yield Buffer.from(text_1.TextUtil.toText(c),"utf8"):yield text_1.TextUtil.toLine(c)}}());return c}static async*readStream(a,b={}){var c;const d=(c=b.mode,null!==c&&void 0!==c?c:"text"),e="string"==typeof a?fs.createReadStream(a,{encoding:"utf8"}):a,f=this.trackStream(e);if("raw"===d)return void(yield{stream:e,completed:f});const g="text"===d?readline.createInterface(e):e;let h,i=!1,j=[];const k=async a=>{var b;("string"==typeof a||a instanceof Buffer)&&j.push(a),null===(b=h)||void 0===b?void 0:b.resolve(null)};for(g.on("end",()=>k(i=!0)),g.on("close",()=>k(i=!0)),"text"===d?g.on("line",a=>k(text_1.TextUtil.toText(a))):g.on("data",a=>k("string"==typeof a?Buffer.from(a,"utf8"):a));!i;)await(h=async_1.AsyncUtil.resolvablePromise()),!b.singleValue&&j.length&&(yield*j,j=[]);b.singleValue&&(j="text"===d?[j.join("\n")]:[Buffer.concat(j)]),j.length&&(yield*j),e.destroyed||e.destroy(),await f}static trackStream(a,b=!0){const c=new Promise((b,c)=>{a.on("finish",b),a.on("end",b),a.on("close",b),a.on("error",c)});return b?async_1.AsyncUtil.trackWithTimer(c):c}static getWritable(a){return"string"!=typeof a&&"write"in a?a:fs.createWriteStream(a,{flags:"w",autoClose:!0})}}exports.StreamUtil=StreamUtil; |
{ | ||
"name": "@arcsine/nodesh", | ||
"version": "1.5.0", | ||
"version": "1.6.0", | ||
"description": "A node-based library, providing Unix shell-like functionality", | ||
@@ -45,3 +45,5 @@ "scripts": { | ||
}, | ||
"dependencies": {} | ||
"dependencies": { | ||
"picomatch": "^2.2.1" | ||
} | ||
} |
@@ -6,3 +6,3 @@ <h1> | ||
 | ||
 | ||
@@ -518,3 +518,4 @@ Nodesh is an `npm` package aimed at providing shell-like operations/simplicity within the node ecosystem. The goal is to make working with files/folders, http requests, and transformations, as easy as possible. The library is built upon the async generation constructs within ecmascript as well as stream constructs within the node ecosystem. This means the performance is iterative and real-time, the same way piping works in a Unix shell. | ||
input sequence type: | ||
* A `string` which represents a suffix search on file names (e.g. `.csv`) | ||
* A `string` which represents a a file extension (e.g. `.csv`). Will match all files recursively. | ||
* A `string` which represents a glob pattern search on file names (e.g. `**\/*.csv`). | ||
* A `RegExp` which represents a file pattern to search on (e.g. `/path\/sub\/.*[.]js/`) | ||
@@ -521,0 +522,0 @@ |
@@ -6,3 +6,3 @@ <h1> | ||
 | ||
 | ||
@@ -9,0 +9,0 @@ Nodesh is an `npm` package aimed at providing shell-like operations/simplicity within the node ecosystem. The goal is to make working with files/folders, http requests, and transformations, as easy as possible. The library is built upon the async generation constructs within ecmascript as well as stream constructs within the node ecosystem. This means the performance is iterative and real-time, the same way piping works in a Unix shell. |
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 2 instances in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 2 instances in 1 package
107842
1214
1172
1
+ Addedpicomatch@^2.2.1
+ Addedpicomatch@2.3.1(transitive)