@jsonic/multisource
Advanced tools
Comparing version 0.0.6 to 0.0.7
@@ -1,4 +0,13 @@ | ||
import { Jsonic, Plugin, Context } from 'jsonic'; | ||
declare type Resolver = (path: string, jsonic: Jsonic, ctx: Context, opts: any) => any; | ||
import { Plugin, Context } from 'jsonic'; | ||
import { makeMemResolver } from './resolver/mem'; | ||
import { makeFileResolver } from './resolver/file'; | ||
interface Resolution { | ||
path: string; | ||
full: string; | ||
base: string; | ||
src?: string; | ||
} | ||
declare type Resolver = (path: string, ctx?: Context) => Resolution; | ||
declare const TOP: unique symbol; | ||
declare let MultiSource: Plugin; | ||
export { MultiSource, Resolver }; | ||
export { MultiSource, Resolver, Resolution, TOP, Context, makeFileResolver, makeMemResolver, }; |
@@ -5,4 +5,9 @@ /* Copyright (c) 2021 Richard Rodger, MIT License */ | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.MultiSource = void 0; | ||
exports.makeMemResolver = exports.makeFileResolver = exports.TOP = exports.MultiSource = void 0; | ||
const jsonic_1 = require("jsonic"); | ||
// TODO: get package sub file refs working with ts | ||
const mem_1 = require("./resolver/mem"); | ||
Object.defineProperty(exports, "makeMemResolver", { enumerable: true, get: function () { return mem_1.makeMemResolver; } }); | ||
const file_1 = require("./resolver/file"); | ||
Object.defineProperty(exports, "makeFileResolver", { enumerable: true, get: function () { return file_1.makeFileResolver; } }); | ||
//import { Json } from './json' | ||
@@ -16,4 +21,5 @@ //import { Csv } from './csv' | ||
markchar: '@', | ||
basepath: '.', | ||
}; | ||
const TOP = Symbol('TOP'); | ||
exports.TOP = TOP; | ||
let MultiSource = function multisource(jsonic) { | ||
@@ -39,53 +45,35 @@ let popts = jsonic_1.util.deep({}, DEFAULTS, jsonic.options.plugin.multisource); | ||
let ST = jsonic.token.ST; | ||
let TX = jsonic.token.TX; | ||
let AT = jsonic.token(tn); | ||
jsonic.rule('val', (rs) => { | ||
rs.def.open.push({ s: [AT, ST] }, { s: [AT, TX] }); | ||
rs.def.open.push({ s: [AT, ST] }); | ||
let orig_bc = rs.def.bc; | ||
rs.def.bc = function (rule, ctx) { | ||
if (rule.open[0] && AT === rule.open[0].tin) { | ||
// TODO: test TX=foo/bar as @"foo/bar" works but @foo/bar does not! | ||
// let filepath = rule.open[1].val | ||
// let fullpath = Path.resolve(ctx.meta.basepath || popts.basepath, filepath) | ||
// let filedesc = Path.parse(fullpath) | ||
// let basepath = filedesc.dir | ||
// let file_ext = filedesc.ext.toLowerCase() | ||
// console.log('MS res meta', ctx.meta) | ||
let val = undefined; | ||
let path = rule.open[1].val; | ||
let val = resolver(path, jsonic, ctx, popts); | ||
// if ('.js' === file_ext) { | ||
// val = require(fullpath) | ||
// if ('function' === typeof val) { | ||
// val = val({ fullpath, filepath, rule, ctx }) | ||
// } | ||
// } | ||
// // Manually load file contents | ||
// else { | ||
// let partial_ctx = { | ||
// root: ctx.root | ||
// } | ||
// let content: string | ||
// if ('.jsonic' === file_ext) { | ||
// content = Fs.readFileSync(fullpath).toString() | ||
// val = jsonic( | ||
// content, | ||
// { basepath: basepath, fileName: fullpath }, | ||
// partial_ctx) | ||
// } | ||
// /* | ||
// if ('.json' === file_ext) { | ||
// content = Fs.readFileSync(fullpath).toString() | ||
// val = json(content, { fileName: fullpath }, partial_ctx) | ||
// } | ||
// else if ('.csv' === file_ext) { | ||
// content = Fs.readFileSync(fullpath).toString() | ||
// val = csv(content, {}, partial_ctx) | ||
// } | ||
// */ | ||
// else { | ||
// return { | ||
// err: 'multifile_unsupported_file', | ||
// path: fullpath, | ||
// } | ||
// } | ||
// } | ||
let res = resolver(path, ctx); | ||
if (null != res.src) { | ||
let msmeta = ctx.meta.multisource || {}; | ||
let meta = { | ||
...ctx.meta, | ||
multisource: { | ||
...msmeta, | ||
path: res.full | ||
} | ||
}; | ||
// console.log('MSMETA path', path, res.full) | ||
val = jsonic(res.src, meta); | ||
if (msmeta.deps) { | ||
let depmap = msmeta.deps; | ||
let parent = (msmeta.path || TOP); | ||
if (null != parent) { | ||
(depmap[parent] = depmap[parent] || {})[res.full] = { | ||
tar: parent, | ||
src: res.full, | ||
wen: Date.now() | ||
}; | ||
} | ||
} | ||
} | ||
rule.open[0].val = val; | ||
@@ -92,0 +80,0 @@ } |
import { Resolver } from '../multisource'; | ||
declare const FileResolver: Resolver; | ||
export { FileResolver }; | ||
declare function makeFileResolver(): Resolver; | ||
export { makeFileResolver }; |
@@ -6,20 +6,32 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.FileResolver = void 0; | ||
exports.makeFileResolver = void 0; | ||
const fs_1 = __importDefault(require("fs")); | ||
const path_1 = __importDefault(require("path")); | ||
const FileResolver = (path, jsonic, ctx, opts) => { | ||
// TODO: needs more thought | ||
let basepath = ctx.meta.basepath || opts.basepath; | ||
let fullpath = path_1.default.resolve(basepath, path); | ||
let filedesc = path_1.default.parse(fullpath); | ||
let filebase = filedesc.dir; | ||
// let file_ext = filedesc.ext.toLowerCase() | ||
let content = fs_1.default.readFileSync(fullpath).toString(); | ||
let partial_ctx = { | ||
root: ctx.root | ||
function makeFileResolver() { | ||
return function FileResolver(path, ctx) { | ||
let msmeta = ctx && ctx.meta && ctx.meta.multisource || {}; | ||
let popts = ctx && ctx.opts && ctx.opts && | ||
ctx.opts.plugin && ctx.opts.plugin.multisource || {}; | ||
let basefile = null == msmeta.path ? | ||
null == popts.path ? | ||
path : popts.path : msmeta.path; | ||
let fstats = fs_1.default.statSync(basefile); | ||
let basepath = basefile; | ||
if (fstats.isFile()) { | ||
let basedesc = path_1.default.parse(basefile); | ||
basepath = basedesc.dir; | ||
} | ||
let isabsolute = path_1.default.isAbsolute(path); | ||
let fullpath = isabsolute ? path : | ||
(null == basepath ? path : path_1.default.resolve(basepath, path)); | ||
let src = fs_1.default.readFileSync(fullpath).toString(); | ||
return { | ||
path: path, | ||
full: fullpath, | ||
base: basepath, | ||
src, | ||
}; | ||
}; | ||
let val = jsonic(content, { basepath: filebase, fileName: fullpath }, partial_ctx); | ||
return val; | ||
}; | ||
exports.FileResolver = FileResolver; | ||
} | ||
exports.makeFileResolver = makeFileResolver; | ||
//# sourceMappingURL=file.js.map |
@@ -16,13 +16,71 @@ "use strict"; | ||
const file_1 = require("../resolver/file"); | ||
const mem_1 = require("../resolver/mem"); | ||
describe('multisource', function () { | ||
it('happy', () => { | ||
let j0 = jsonic_1.Jsonic.make().use(multisource_1.MultiSource, { | ||
basepath: __dirname, | ||
resolver: file_1.FileResolver, | ||
resolver: mem_1.makeMemResolver({ | ||
'/a': 'b:1', | ||
}), | ||
}); | ||
expect(j0('a:1,b:@"./t01.jsonic"')).equals({ a: 1, b: { c: 2 } }); | ||
expect(j0('a:1,b:@"./t02.jsonic",c:3')) | ||
expect(j0('c:1')).equals({ c: 1 }); | ||
expect(j0('c:@"/a"')).equals({ c: { b: 1 } }); | ||
expect(j0('x:y:1, x:z:2')).equals({ x: { y: 1, z: 2 } }); | ||
}); | ||
it('file', () => { | ||
let r0 = file_1.makeFileResolver(); | ||
let j0 = jsonic_1.Jsonic.make().use(multisource_1.MultiSource, { | ||
resolver: r0, | ||
}); | ||
let deps = {}; | ||
expect(j0('a:1,b:@"./t01.jsonic"', { multisource: { path: __dirname, deps } })) | ||
.equals({ a: 1, b: { c: 2 } }); | ||
//console.dir(deps, { depth: null }) | ||
deps = {}; | ||
expect(j0('a:1,b:@"./t02.jsonic",c:3', { multisource: { path: __dirname, deps } })) | ||
.equals({ a: 1, b: { d: 2, e: { f: 4 } }, c: 3 }); | ||
//console.dir(deps, { depth: null }) | ||
}); | ||
it('mem', () => { | ||
let r0 = mem_1.makeMemResolver({ | ||
'/a': 'a:1', | ||
'/b': 'b:@"c",', | ||
'/b/c': 'c:@"/a"', | ||
'/d': 'd:4', | ||
}); | ||
let j0 = jsonic_1.Jsonic.make().use(multisource_1.MultiSource, { | ||
resolver: r0, | ||
}); | ||
let deps = {}; | ||
expect(j0('q:11,x:@"a",k:@"d",y:@"b",z:@"/b/c",w:22', { multisource: { deps } })) | ||
.equals({ | ||
q: 11, | ||
x: { a: 1 }, | ||
k: { d: 4 }, | ||
y: { b: { c: { a: 1 } } }, | ||
z: { c: { a: 1 } }, | ||
w: 22, | ||
}); | ||
//console.dir(deps, { depth: null }) | ||
expect(remove(deps, 'wen')).equal({ | ||
'/b/c': { '/a': { tar: '/b/c', src: '/a' } }, | ||
'/b': { '/b/c': { tar: '/b', src: '/b/c' } }, | ||
[multisource_1.TOP]: { | ||
'/a': { tar: multisource_1.TOP, src: '/a' }, | ||
'/d': { tar: multisource_1.TOP, src: '/d' }, | ||
'/b': { tar: multisource_1.TOP, src: '/b' }, | ||
'/b/c': { tar: multisource_1.TOP, src: '/b/c' } | ||
} | ||
}); | ||
}); | ||
}); | ||
function remove(o, k) { | ||
if (null != o && 'object' === typeof (o)) { | ||
delete o[k]; | ||
remove(o[multisource_1.TOP], k); | ||
for (let p in o) { | ||
remove(o[p], k); | ||
} | ||
} | ||
return o; | ||
} | ||
//# sourceMappingURL=multisource.test.js.map |
@@ -6,2 +6,7 @@ /* Copyright (c) 2021 Richard Rodger, MIT License */ | ||
import { Jsonic, Plugin, Rule, RuleSpec, Context, util } from 'jsonic' | ||
// TODO: get package sub file refs working with ts | ||
import { makeMemResolver } from './resolver/mem' | ||
import { makeFileResolver } from './resolver/file' | ||
//import { Json } from './json' | ||
@@ -16,17 +21,41 @@ //import { Csv } from './csv' | ||
let DEFAULTS = { | ||
markchar: '@', | ||
basepath: '.', | ||
} | ||
type Resolver = ( | ||
path: string, | ||
jsonic: Jsonic, | ||
ctx: Context, | ||
opts: any | ||
) => any | ||
interface Meta { | ||
path?: string // Base path for this parse run. | ||
deps?: DependencyMap // Provide an empty object to be filled. | ||
} | ||
interface Resolution { | ||
path: string // Original path (possibly relative) | ||
full: string // Normalized full path | ||
base: string // Current base path | ||
src?: string // Undefined if no resolution | ||
} | ||
type Resolver = (path: string, ctx?: Context) => Resolution | ||
interface Dependency { | ||
tar: string | typeof TOP, // Target that depends on source (src). | ||
src: string, // Source that target (tar) depends on. | ||
wen: number, // Time of resolution. | ||
} | ||
type DependencyMap = { | ||
[tar_full_path: string]: { | ||
[src_full_path: string]: Dependency | ||
} | ||
} | ||
const TOP = Symbol('TOP') | ||
let MultiSource: Plugin = function multisource(jsonic: Jsonic) { | ||
@@ -56,3 +85,2 @@ let popts = util.deep({}, DEFAULTS, jsonic.options.plugin.multisource) | ||
let ST = jsonic.token.ST | ||
let TX = jsonic.token.TX | ||
let AT = jsonic.token(tn) | ||
@@ -63,4 +91,3 @@ | ||
rs.def.open.push( | ||
{ s: [AT, ST] }, | ||
{ s: [AT, TX] } | ||
{ s: [AT, ST] }, // NOTE: must use strings to specify path: @"...path..." | ||
) | ||
@@ -72,55 +99,37 @@ | ||
// TODO: test TX=foo/bar as @"foo/bar" works but @foo/bar does not! | ||
// let filepath = rule.open[1].val | ||
// let fullpath = Path.resolve(ctx.meta.basepath || popts.basepath, filepath) | ||
// let filedesc = Path.parse(fullpath) | ||
// let basepath = filedesc.dir | ||
// let file_ext = filedesc.ext.toLowerCase() | ||
// console.log('MS res meta', ctx.meta) | ||
let val: any = undefined | ||
let path = rule.open[1].val | ||
let val = resolver(path, jsonic, ctx, popts) | ||
let res = resolver(path, ctx) | ||
// if ('.js' === file_ext) { | ||
// val = require(fullpath) | ||
// if ('function' === typeof val) { | ||
// val = val({ fullpath, filepath, rule, ctx }) | ||
// } | ||
// } | ||
if (null != res.src) { | ||
let msmeta: Meta = ctx.meta.multisource || {} | ||
let meta = { | ||
...ctx.meta, | ||
multisource: { | ||
...msmeta, | ||
path: res.full | ||
} | ||
} | ||
// // Manually load file contents | ||
// else { | ||
// let partial_ctx = { | ||
// root: ctx.root | ||
// } | ||
// console.log('MSMETA path', path, res.full) | ||
// let content: string | ||
// if ('.jsonic' === file_ext) { | ||
// content = Fs.readFileSync(fullpath).toString() | ||
// val = jsonic( | ||
// content, | ||
// { basepath: basepath, fileName: fullpath }, | ||
// partial_ctx) | ||
// } | ||
val = jsonic(res.src, meta) | ||
// /* | ||
// if ('.json' === file_ext) { | ||
// content = Fs.readFileSync(fullpath).toString() | ||
// val = json(content, { fileName: fullpath }, partial_ctx) | ||
// } | ||
if (msmeta.deps) { | ||
let depmap = (msmeta.deps as DependencyMap) | ||
let parent = (msmeta.path || TOP) as string | ||
if (null != parent) { | ||
(depmap[parent] = depmap[parent] || {})[res.full] = { | ||
tar: parent, | ||
src: res.full, | ||
wen: Date.now() | ||
} | ||
} | ||
} | ||
// else if ('.csv' === file_ext) { | ||
// content = Fs.readFileSync(fullpath).toString() | ||
// val = csv(content, {}, partial_ctx) | ||
// } | ||
// */ | ||
} | ||
// else { | ||
// return { | ||
// err: 'multifile_unsupported_file', | ||
// path: fullpath, | ||
// } | ||
// } | ||
// } | ||
rule.open[0].val = val | ||
@@ -135,2 +144,15 @@ } | ||
export { MultiSource, Resolver } | ||
export { | ||
MultiSource, | ||
Resolver, | ||
Resolution, | ||
TOP, | ||
// Re-exported from jsonic for convenience | ||
Context, | ||
// TODO: remove for better tree shaking | ||
makeFileResolver, | ||
makeMemResolver, | ||
} |
{ | ||
"name": "@jsonic/multisource", | ||
"version": "0.0.6", | ||
"version": "0.0.7", | ||
"description": "", | ||
@@ -33,3 +33,3 @@ "main": "dist/multisource.js", | ||
"@hapi/code": "^8.0.3", | ||
"@hapi/lab": "^24.2.0", | ||
"@hapi/lab": "^24.2.1", | ||
"lab-transform-typescript": "^3.0.1", | ||
@@ -36,0 +36,0 @@ "typescript": "^4.2.4" |
@@ -5,35 +5,41 @@ | ||
import { Jsonic, Context } from 'jsonic' | ||
import { Resolver } from '../multisource' | ||
const FileResolver: Resolver = ( | ||
path: string, | ||
jsonic: Jsonic, | ||
ctx: Context, | ||
opts: any | ||
) => { | ||
import { Context } from 'jsonic' | ||
import { Resolver, Resolution } from '../multisource' | ||
// TODO: needs more thought | ||
let basepath = ctx.meta.basepath || opts.basepath | ||
let fullpath = Path.resolve(basepath, path) | ||
let filedesc = Path.parse(fullpath) | ||
let filebase = filedesc.dir | ||
// let file_ext = filedesc.ext.toLowerCase() | ||
function makeFileResolver(): Resolver { | ||
let content = Fs.readFileSync(fullpath).toString() | ||
return function FileResolver(path: string, ctx?: Context): Resolution { | ||
let msmeta = ctx && ctx.meta && ctx.meta.multisource || {} | ||
let popts = ctx && ctx.opts && ctx.opts && | ||
ctx.opts.plugin && ctx.opts.plugin.multisource || {} | ||
let partial_ctx = { | ||
root: ctx.root | ||
} | ||
let basefile = | ||
null == msmeta.path ? | ||
null == popts.path ? | ||
path : popts.path : msmeta.path | ||
let fstats = Fs.statSync(basefile) | ||
let basepath = basefile | ||
let val = jsonic( | ||
content, | ||
{ basepath: filebase, fileName: fullpath }, | ||
partial_ctx | ||
) | ||
if (fstats.isFile()) { | ||
let basedesc = Path.parse(basefile) | ||
basepath = basedesc.dir | ||
} | ||
return val | ||
let isabsolute = Path.isAbsolute(path) | ||
let fullpath = isabsolute ? path : | ||
(null == basepath ? path : Path.resolve(basepath, path)) | ||
let src = Fs.readFileSync(fullpath).toString() | ||
return { | ||
path: path, | ||
full: fullpath, | ||
base: basepath, | ||
src, | ||
} | ||
} | ||
} | ||
@@ -43,3 +49,3 @@ | ||
export { | ||
FileResolver | ||
makeFileResolver | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
25552
18
416
1