+12
-15
@@ -1,22 +0,19 @@ | ||
| Sorcery version <%= version %> | ||
| ===================================== | ||
| # Sorcery version <%= version %> | ||
| Usage: | ||
| sorcery [options] | ||
| Usage: | ||
| sorcery [options] | ||
| Options: | ||
| -h, --help Show help message | ||
| -v, --version Show version | ||
| -i, --input <file|folder> Input file | ||
| -o, --output <file|folder> Output file (if absent, will overwrite input) | ||
| -d, --datauri Append map as a data URI, rather than separate file | ||
| -x, --excludeContent Don't populate the sourcesContent array | ||
| Options: | ||
| -h, --help Show help message | ||
| -v, --version Show version | ||
| -i, --input <file|folder> Input file | ||
| -o, --output <file|folder> Output file (if absent, will overwrite input) | ||
| -d, --datauri Append map as a data URI, rather than separate file | ||
| -x, --excludeContent Don't populate the sourcesContent array | ||
| Example: | ||
| Example: | ||
| sorcery --input some/generated/code.min.js | ||
| sorcery --input tmp --output dist | ||
| For more information visit https://github.com/Rich-Harris/sorcery | ||
| For more information visit https://github.com/Rich-Harris/sorcery |
+10
-8
@@ -1,13 +0,15 @@ | ||
| var fs = require( 'fs' ), | ||
| path = require( 'path' ); | ||
| import fs from 'node:fs'; | ||
| import path from 'node:path'; | ||
| module.exports = function ( stream ) { | ||
| fs.readFile( path.join( __dirname, 'help.md' ), function ( err, result ) { | ||
| export default function (stream) { | ||
| fs.readFile(path.join(__dirname, 'help.md'), function (err, result) { | ||
| var help; | ||
| if ( err ) throw err; | ||
| if (err) throw err; | ||
| help = result.toString().replace( '<%= version %>', require( '../package.json' ).version ); | ||
| ( stream || process.stderr ).write( '\n' + help + '\n' ); | ||
| help = result | ||
| .toString() | ||
| .replace('<%= version %>', require('../package.json').version); | ||
| (stream || process.stderr).write('\n' + help + '\n'); | ||
| }); | ||
| }; | ||
| } |
+21
-30
| { | ||
| "name": "sorcery", | ||
| "description": "Resolve a chain of sourcemaps back to the original source", | ||
| "version": "0.11.1", | ||
| "version": "1.0.0", | ||
| "author": "Rich Harris", | ||
| "repository": "https://github.com/Rich-Harris/sorcery", | ||
| "main": "dist/sorcery.cjs.js", | ||
| "jsnext:main": "dist/sorcery.es6.js", | ||
| "type": "module", | ||
| "exports": { | ||
| ".": { | ||
| "types": "./types/index.d.ts", | ||
| "default": "./src/index.js" | ||
| } | ||
| }, | ||
| "license": "MIT", | ||
| "dependencies": { | ||
| "buffer-crc32": "^1.0.0", | ||
| "@jridgewell/sourcemap-codec": "^1.4.14", | ||
| "minimist": "^1.2.0", | ||
| "sander": "^0.5.0", | ||
| "@jridgewell/sourcemap-codec": "^1.4.14" | ||
| "tiny-glob": "^0.2.9" | ||
| }, | ||
| "devDependencies": { | ||
| "@rollup/plugin-buble": "^1.0.3", | ||
| "@rollup/plugin-commonjs": "^26.0.1", | ||
| "@rollup/plugin-node-resolve": "^15.2.3", | ||
| "buble": "^0.20.0", | ||
| "codecov.io": "^0.1.6", | ||
| "@types/node": "^20.14.2", | ||
| "coffee-script": "^1.10.0", | ||
| "eslint": "^2.8.0", | ||
| "glob": "^7.0.3", | ||
| "istanbul": "^0.4.3", | ||
| "less": "^2.6.1", | ||
| "mocha": "^2.4.5", | ||
| "promise-map-series": "^0.2.2", | ||
| "remap-istanbul": "^0.6.3", | ||
| "rollup": "^4.18.0", | ||
| "prettier": "^3.3.2", | ||
| "rimraf": "^5.0.7", | ||
| "source-map": "^0.5.3", | ||
| "source-map-support": "^0.4.0", | ||
| "typescript": "^5.4.5", | ||
| "uglify-js": "^2.6.2" | ||
@@ -39,19 +34,15 @@ }, | ||
| "scripts": { | ||
| "build": "rm -rf dist && rollup -c rollup.config.mjs", | ||
| "pretest": "npm run build", | ||
| "build": "tsc", | ||
| "prepare-tests": "node test/samples/prepare-tests.js", | ||
| "test": "mocha --compilers js:buble/register", | ||
| "prepublish": "npm test", | ||
| "lint": "eslint src", | ||
| "pretest-coverage": "npm run build", | ||
| "test-coverage": "rm -rf coverage/* && istanbul cover --report json node_modules/.bin/_mocha -- -u exports -R spec test/test.js", | ||
| "posttest-coverage": "remap-istanbul -i coverage/coverage-final.json -o coverage/coverage-remapped.json -b dist && remap-istanbul -i coverage/coverage-final.json -o coverage/coverage-remapped.lcov -t lcovonly -b dist && remap-istanbul -i coverage/coverage-final.json -o coverage/coverage-remapped -t html -b dist", | ||
| "ci": "npm run test-coverage && codecov < coverage/coverage-remapped.lcov" | ||
| "prepublishOnly": "pnpm test && pnpm build", | ||
| "lint": "prettier .", | ||
| "format": "prettier --write .", | ||
| "test": "node test/sorcery.test.js" | ||
| }, | ||
| "files": [ | ||
| "src/", | ||
| "dist/", | ||
| "bin/", | ||
| "README.md" | ||
| ] | ||
| ], | ||
| "packageManager": "pnpm@9.3.0" | ||
| } |
+11
-13
@@ -5,3 +5,3 @@ # sorcery.js | ||
| But if you have more than one transformation - say you want to transpile your JavaScript, concatenate several files into one, and minify the result - it gets a little trickier. Each intermediate step needs to be able to both *ingest* a sourcemap and *generate* one, all the time pointing back to the original source. | ||
| But if you have more than one transformation - say you want to transpile your JavaScript, concatenate several files into one, and minify the result - it gets a little trickier. Each intermediate step needs to be able to both _ingest_ a sourcemap and _generate_ one, all the time pointing back to the original source. | ||
@@ -14,3 +14,2 @@ Most compilers don't do that. ([UglifyJS](https://github.com/mishoo/UglifyJS2) is an honourable exception.) So when you fire up devtools, instead of looking at the original source you find yourself looking at the final intermediate step in the chain of transformations. | ||
| ## Usage | ||
@@ -27,5 +26,5 @@ | ||
| ```js | ||
| var sorcery = require( 'sorcery' ); | ||
| import * as sorcery from 'sorcery'; | ||
| sorcery.load( 'some/generated/code.min.js' ).then( function ( chain ) { | ||
| sorcery.load('some/generated/code.min.js').then(function (chain) { | ||
| // generate a flattened sourcemap | ||
@@ -43,10 +42,10 @@ var map = chain.apply(); // { version: 3, file: 'code.min.js', ... } | ||
| // returns a Promise | ||
| chain.write( 'output.js' ); | ||
| chain.write('output.js'); | ||
| // write to a new file but use an absolute path for the | ||
| // sourceMappingURL | ||
| chain.write( 'output.js', { absolutePath: true }); | ||
| chain.write('output.js', { absolutePath: true }); | ||
| // write to a new file, but append the flattened sourcemap as a data URI | ||
| chain.write( 'output.js', { inline: true }); | ||
| chain.write('output.js', { inline: true }); | ||
@@ -61,9 +60,9 @@ // overwrite the existing file | ||
| // one-based, column numbers are always zero-based. It's daft, I know. | ||
| var loc = chain.trace( x, y ); | ||
| var loc = chain.trace(x, y); | ||
| }); | ||
| // You can also use sorcery synchronously: | ||
| var chain = sorcery.loadSync( 'some/generated/code.min.js' ); | ||
| var chain = sorcery.loadSync('some/generated/code.min.js'); | ||
| var map = chain.apply(); | ||
| var loc = chain.trace( x, y ); | ||
| var loc = chain.trace(x, y); | ||
| chain.writeSync(); | ||
@@ -76,4 +75,4 @@ ``` | ||
| * `content` - a map of `filename: contents` pairs. `filename` will be resolved against the current working directory if needs be | ||
| * `sourcemaps` - a map of `filename: sourcemap` pairs, where `filename` is the name of the file the sourcemap is related to. This will override any `sourceMappingURL` comments in the file itself. | ||
| - `content` - a map of `filename: contents` pairs. `filename` will be resolved against the current working directory if needs be | ||
| - `sourcemaps` - a map of `filename: sourcemap` pairs, where `filename` is the name of the file the sourcemap is related to. This will override any `sourceMappingURL` comments in the file itself. | ||
@@ -138,5 +137,4 @@ For example: | ||
| ## License | ||
| MIT |
+94
-71
@@ -1,3 +0,4 @@ | ||
| import { basename, dirname, extname, relative, resolve } from 'path'; | ||
| import { writeFile, writeFileSync } from 'sander'; | ||
| import { basename, dirname, extname, relative, resolve } from 'node:path'; | ||
| import * as fs from 'node:fs'; | ||
| import { writeFile } from 'node:fs/promises'; | ||
| import { encode } from '@jridgewell/sourcemap-codec'; | ||
@@ -8,19 +9,24 @@ import SourceMap from './SourceMap.js'; | ||
| const SOURCEMAP_COMMENT = new RegExp( `\n*(?:` + | ||
| `\\/\\/[@#]\\s*${SOURCEMAPPING_URL}=([^\n]+)|` + // js | ||
| `\\/\\*#?\\s*${SOURCEMAPPING_URL}=([^'"]+)\\s\\*\\/)` + // css | ||
| '\\s*$', 'g' ); | ||
| const SOURCEMAP_COMMENT = new RegExp( | ||
| `\n*(?:` + | ||
| `\\/\\/[@#]\\s*${SOURCEMAPPING_URL}=([^\n]+)|` + // js | ||
| `\\/\\*#?\\s*${SOURCEMAPPING_URL}=([^'"]+)\\s\\*\\/)` + // css | ||
| '\\s*$', | ||
| 'g' | ||
| ); | ||
| export default function Chain ( node, sourcesContentByPath ) { | ||
| this.node = node; | ||
| this.sourcesContentByPath = sourcesContentByPath; | ||
| export default class Chain { | ||
| constructor(node, sourcesContentByPath) { | ||
| this.node = node; | ||
| this.sourcesContentByPath = sourcesContentByPath; | ||
| this._stats = {}; | ||
| } | ||
| this._stats = {}; | ||
| } | ||
| Chain.prototype = { | ||
| stat () { | ||
| stat() { | ||
| return { | ||
| selfDecodingTime: this._stats.decodingTime / 1e6, | ||
| totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6, | ||
| totalDecodingTime: | ||
| (this._stats.decodingTime + tally(this.node.sources, 'decodingTime')) / | ||
| 1e6, | ||
@@ -32,18 +38,19 @@ encodingTime: this._stats.encodingTime / 1e6, | ||
| }; | ||
| }, | ||
| } | ||
| apply ( options = {} ) { | ||
| apply(options = {}) { | ||
| let allNames = []; | ||
| let allSources = []; | ||
| const applySegment = ( segment, result ) => { | ||
| if ( segment.length < 4 ) return; | ||
| const applySegment = (segment, result) => { | ||
| if (segment.length < 4) return; | ||
| const traced = this.node.sources[ segment[1] ].trace( // source | ||
| const traced = this.node.sources[segment[1]].trace( | ||
| // source | ||
| segment[2], // source code line | ||
| segment[3], // source code column | ||
| this.node.map.names[ segment[4] ] | ||
| this.node.map.names[segment[4]] | ||
| ); | ||
| if ( !traced ) { | ||
| if (!traced) { | ||
| this._stats.untraceable += 1; | ||
@@ -53,6 +60,6 @@ return; | ||
| let sourceIndex = allSources.indexOf( traced.source ); | ||
| if ( !~sourceIndex ) { | ||
| let sourceIndex = allSources.indexOf(traced.source); | ||
| if (!~sourceIndex) { | ||
| sourceIndex = allSources.length; | ||
| allSources.push( traced.source ); | ||
| allSources.push(traced.source); | ||
| } | ||
@@ -67,7 +74,7 @@ | ||
| if ( traced.name ) { | ||
| let nameIndex = allNames.indexOf( traced.name ); | ||
| if ( !~nameIndex ) { | ||
| if (traced.name) { | ||
| let nameIndex = allNames.indexOf(traced.name); | ||
| if (!~nameIndex) { | ||
| nameIndex = allNames.length; | ||
| allNames.push( traced.name ); | ||
| allNames.push(traced.name); | ||
| } | ||
@@ -78,3 +85,3 @@ | ||
| result[ result.length ] = newSegment; | ||
| result[result.length] = newSegment; | ||
| }; | ||
@@ -86,16 +93,16 @@ | ||
| let i = this.node.mappings.length; | ||
| let resolved = new Array( i ); | ||
| let resolved = new Array(i); | ||
| let j, line, result; | ||
| while ( i-- ) { | ||
| while (i--) { | ||
| line = this.node.mappings[i]; | ||
| resolved[i] = result = []; | ||
| for ( j = 0; j < line.length; j += 1 ) { | ||
| applySegment( line[j], result ); | ||
| for (j = 0; j < line.length; j += 1) { | ||
| applySegment(line[j], result); | ||
| } | ||
| } | ||
| let tracingTime = process.hrtime( tracingStart ); | ||
| let tracingTime = process.hrtime(tracingStart); | ||
| this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1]; | ||
@@ -105,4 +112,4 @@ | ||
| let encodingStart = process.hrtime(); | ||
| let mappings = encode( resolved ); | ||
| let encodingTime = process.hrtime( encodingStart ); | ||
| let mappings = encode(resolved); | ||
| let encodingTime = process.hrtime(encodingStart); | ||
| this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1]; | ||
@@ -113,16 +120,20 @@ | ||
| return new SourceMap({ | ||
| file: basename( this.node.file ), | ||
| sources: allSources.map( source => slash( relative( options.base || dirname( this.node.file ), source ) ) ), | ||
| sourcesContent: allSources.map( source => includeContent ? this.sourcesContentByPath[ source ] : null ), | ||
| file: basename(this.node.file), | ||
| sources: allSources.map((source) => | ||
| slash(relative(options.base || dirname(this.node.file), source)) | ||
| ), | ||
| sourcesContent: allSources.map((source) => | ||
| includeContent ? this.sourcesContentByPath[source] : null | ||
| ), | ||
| names: allNames, | ||
| mappings | ||
| }); | ||
| }, | ||
| } | ||
| trace ( oneBasedLineIndex, zeroBasedColumnIndex ) { | ||
| return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null ); | ||
| }, | ||
| trace(oneBasedLineIndex, zeroBasedColumnIndex) { | ||
| return this.node.trace(oneBasedLineIndex - 1, zeroBasedColumnIndex, null); | ||
| } | ||
| write ( dest, options ) { | ||
| if ( typeof dest !== 'string' ) { | ||
| write(dest, options) { | ||
| if (typeof dest !== 'string') { | ||
| options = dest; | ||
@@ -134,15 +145,19 @@ dest = this.node.file; | ||
| const { resolved, content, map } = processWriteOptions( dest, this, options ); | ||
| const { resolved, content, map } = processWriteOptions(dest, this, options); | ||
| let promises = [ writeFile( resolved, content ) ]; | ||
| try { | ||
| fs.mkdirSync(dirname(resolved), { recursive: true }); | ||
| } catch {} | ||
| if ( !options.inline ) { | ||
| promises.push( writeFile( resolved + '.map', map.toString() ) ); | ||
| let promises = [writeFile(resolved, content)]; | ||
| if (!options.inline) { | ||
| promises.push(writeFile(resolved + '.map', map.toString())); | ||
| } | ||
| return Promise.all( promises ); | ||
| }, | ||
| return Promise.all(promises); | ||
| } | ||
| writeSync ( dest, options ) { | ||
| if ( typeof dest !== 'string' ) { | ||
| writeSync(dest, options) { | ||
| if (typeof dest !== 'string') { | ||
| options = dest; | ||
@@ -154,24 +169,32 @@ dest = this.node.file; | ||
| const { resolved, content, map } = processWriteOptions( dest, this, options ); | ||
| const { resolved, content, map } = processWriteOptions(dest, this, options); | ||
| writeFileSync( resolved, content ); | ||
| try { | ||
| fs.mkdirSync(dirname(resolved), { recursive: true }); | ||
| } catch {} | ||
| if ( !options.inline ) { | ||
| writeFileSync( resolved + '.map', map.toString() ); | ||
| fs.writeFileSync(resolved, content); | ||
| if (!options.inline) { | ||
| fs.writeFileSync(resolved + '.map', map.toString()); | ||
| } | ||
| } | ||
| }; | ||
| } | ||
| function processWriteOptions ( dest, chain, options ) { | ||
| const resolved = resolve( dest ); | ||
| function processWriteOptions(dest, chain, options) { | ||
| const resolved = resolve(dest); | ||
| const map = chain.apply({ | ||
| includeContent: options.includeContent, | ||
| base: options.base ? resolve( options.base ) : dirname( resolved ) | ||
| base: options.base ? resolve(options.base) : dirname(resolved) | ||
| }); | ||
| const url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : basename( resolved ) ) + '.map'; | ||
| const url = options.inline | ||
| ? map.toUrl() | ||
| : (options.absolutePath ? resolved : basename(resolved)) + '.map'; | ||
| // TODO shouldn't url be relative? | ||
| const content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved ); | ||
| const content = | ||
| chain.node.content.replace(SOURCEMAP_COMMENT, '') + | ||
| sourcemapComment(url, resolved); | ||
@@ -181,13 +204,13 @@ return { resolved, content, map }; | ||
| function tally ( nodes, stat ) { | ||
| return nodes.reduce( ( total, node ) => { | ||
| return total + node._stats[ stat ]; | ||
| }, 0 ); | ||
| function tally(nodes, stat) { | ||
| return nodes.reduce((total, node) => { | ||
| return total + node._stats[stat]; | ||
| }, 0); | ||
| } | ||
| function sourcemapComment ( url, dest ) { | ||
| const ext = extname( dest ); | ||
| url = encodeURI( url ); | ||
| function sourcemapComment(url, dest) { | ||
| const ext = extname(dest); | ||
| url = encodeURI(url); | ||
| if ( ext === '.css' ) { | ||
| if (ext === '.css') { | ||
| return `\n/*# ${SOURCEMAPPING_URL}=${url} */\n`; | ||
@@ -194,0 +217,0 @@ } |
+42
-16
@@ -1,20 +0,46 @@ | ||
| import { resolve } from 'path'; | ||
| import { resolve } from 'node:path'; | ||
| import Node from './Node.js'; | ||
| import Chain from './Chain.js'; | ||
| export function load ( file, options ) { | ||
| const { node, sourcesContentByPath, sourceMapByPath } = init( file, options ); | ||
| /** | ||
| * @param {string} file | ||
| * @param {{ | ||
| * content?: Record<string, string>; | ||
| * sourcemaps?: Record<string, any>; | ||
| * }} options | ||
| * @returns | ||
| */ | ||
| export function load(file, options) { | ||
| const { node, sourcesContentByPath, sourceMapByPath } = init(file, options); | ||
| return node.load( sourcesContentByPath, sourceMapByPath ) | ||
| .then( () => node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ) ); | ||
| return node | ||
| .load(sourcesContentByPath, sourceMapByPath) | ||
| .then(() => | ||
| node.isOriginalSource ? null : new Chain(node, sourcesContentByPath) | ||
| ); | ||
| } | ||
| export function loadSync ( file, options = {} ) { | ||
| const { node, sourcesContentByPath, sourceMapByPath } = init( file, options ); | ||
| /** | ||
| * @param {string} file | ||
| * @param {{ | ||
| * content?: Record<string, string>; | ||
| * sourcemaps?: Record<string, any>; | ||
| * }} options | ||
| * @returns | ||
| */ | ||
| export function loadSync(file, options = {}) { | ||
| const { node, sourcesContentByPath, sourceMapByPath } = init(file, options); | ||
| node.loadSync( sourcesContentByPath, sourceMapByPath ); | ||
| return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); | ||
| node.loadSync(sourcesContentByPath, sourceMapByPath); | ||
| return node.isOriginalSource ? null : new Chain(node, sourcesContentByPath); | ||
| } | ||
| function init ( file, options = {} ) { | ||
| /** | ||
| * @param {string} file | ||
| * @param {{ | ||
| * content?: Record<string, string>; | ||
| * sourcemaps?: Record<string, any>; | ||
| * }} options | ||
| */ | ||
| function init(file, options = {}) { | ||
| const node = new Node({ file }); | ||
@@ -25,11 +51,11 @@ | ||
| if ( options.content ) { | ||
| Object.keys( options.content ).forEach( key => { | ||
| sourcesContentByPath[ resolve( key ) ] = options.content[ key ]; | ||
| if (options.content) { | ||
| Object.keys(options.content).forEach((key) => { | ||
| sourcesContentByPath[resolve(key)] = options.content[key]; | ||
| }); | ||
| } | ||
| if ( options.sourcemaps ) { | ||
| Object.keys( options.sourcemaps ).forEach( key => { | ||
| sourceMapByPath[ resolve( key ) ] = options.sourcemaps[ key ]; | ||
| if (options.sourcemaps) { | ||
| Object.keys(options.sourcemaps).forEach((key) => { | ||
| sourceMapByPath[resolve(key)] = options.sourcemaps[key]; | ||
| }); | ||
@@ -36,0 +62,0 @@ } |
+95
-70
@@ -1,36 +0,43 @@ | ||
| import { dirname, resolve } from 'path'; | ||
| import { readFile, readFileSync, Promise } from 'sander'; | ||
| import { dirname, resolve } from 'node:path'; | ||
| import { readFileSync } from 'node:fs'; | ||
| import { readFile } from 'node:fs/promises'; | ||
| import { decode } from '@jridgewell/sourcemap-codec'; | ||
| import getMap from './utils/getMap.js'; | ||
| export default function Node ({ file, content }) { | ||
| this.file = file ? resolve( file ) : null; | ||
| this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't | ||
| export default class Node { | ||
| /** | ||
| * @param {{ | ||
| * file?: string; | ||
| * content?: string; | ||
| * }} opts | ||
| */ | ||
| constructor({ file, content }) { | ||
| this.file = file ? resolve(file) : null; | ||
| this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't | ||
| if ( !this.file && this.content === null ) { | ||
| throw new Error( 'A source must specify either file or content' ); | ||
| } | ||
| if (!this.file && this.content === null) { | ||
| throw new Error('A source must specify either file or content'); | ||
| } | ||
| // these get filled in later | ||
| this.map = null; | ||
| this.mappings = null; | ||
| this.sources = null; | ||
| this.isOriginalSource = null; | ||
| // these get filled in later | ||
| this.map = null; | ||
| this.mappings = null; | ||
| this.sources = null; | ||
| this.isOriginalSource = null; | ||
| this._stats = { | ||
| decodingTime: 0, | ||
| encodingTime: 0, | ||
| tracingTime: 0, | ||
| this._stats = { | ||
| decodingTime: 0, | ||
| encodingTime: 0, | ||
| tracingTime: 0, | ||
| untraceable: 0 | ||
| }; | ||
| } | ||
| untraceable: 0 | ||
| }; | ||
| } | ||
| Node.prototype = { | ||
| load ( sourcesContentByPath, sourceMapByPath ) { | ||
| return getContent( this, sourcesContentByPath ).then( content => { | ||
| this.content = sourcesContentByPath[ this.file ] = content; | ||
| load(sourcesContentByPath, sourceMapByPath) { | ||
| return getContent(this, sourcesContentByPath).then((content) => { | ||
| this.content = sourcesContentByPath[this.file] = content; | ||
| return getMap( this, sourceMapByPath ).then( map => { | ||
| if ( !map ) return null; | ||
| return getMap(this, sourceMapByPath).then((map) => { | ||
| if (!map) return null; | ||
@@ -40,4 +47,4 @@ this.map = map; | ||
| let decodingStart = process.hrtime(); | ||
| this.mappings = decode( map.mappings ); | ||
| let decodingTime = process.hrtime( decodingStart ); | ||
| this.mappings = decode(map.mappings); | ||
| let decodingTime = process.hrtime(decodingStart); | ||
| this._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1]; | ||
@@ -47,7 +54,10 @@ | ||
| const sourceRoot = resolve( dirname( this.file || '' ), map.sourceRoot || '' ); | ||
| const sourceRoot = resolve( | ||
| dirname(this.file || ''), | ||
| map.sourceRoot || '' | ||
| ); | ||
| this.sources = map.sources.map( ( source, i ) => { | ||
| this.sources = map.sources.map((source, i) => { | ||
| return new Node({ | ||
| file: source ? resolve( sourceRoot, source ) : null, | ||
| file: source ? resolve(sourceRoot, source) : null, | ||
| content: sourcesContent[i] | ||
@@ -57,43 +67,50 @@ }); | ||
| const promises = this.sources.map( node => node.load( sourcesContentByPath, sourceMapByPath ) ); | ||
| return Promise.all( promises ); | ||
| const promises = this.sources.map((node) => | ||
| node.load(sourcesContentByPath, sourceMapByPath) | ||
| ); | ||
| return Promise.all(promises); | ||
| }); | ||
| }); | ||
| }, | ||
| } | ||
| loadSync ( sourcesContentByPath, sourceMapByPath ) { | ||
| if ( !this.content ) { | ||
| if ( !sourcesContentByPath[ this.file ] ) { | ||
| sourcesContentByPath[ this.file ] = readFileSync( this.file, { encoding: 'utf-8' }); | ||
| loadSync(sourcesContentByPath, sourceMapByPath) { | ||
| if (!this.content) { | ||
| if (!sourcesContentByPath[this.file]) { | ||
| sourcesContentByPath[this.file] = readFileSync(this.file, { | ||
| encoding: 'utf-8' | ||
| }); | ||
| } | ||
| this.content = sourcesContentByPath[ this.file ]; | ||
| this.content = sourcesContentByPath[this.file]; | ||
| } else { | ||
| sourcesContentByPath[ this.file ] = this.content; | ||
| sourcesContentByPath[this.file] = this.content; | ||
| } | ||
| const map = getMap( this, sourceMapByPath, true ); | ||
| const map = getMap(this, sourceMapByPath, true); | ||
| let sourcesContent; | ||
| if ( !map ) { | ||
| if (!map) { | ||
| this.isOriginalSource = true; | ||
| } else { | ||
| this.map = map; | ||
| this.mappings = decode( map.mappings ); | ||
| this.mappings = decode(map.mappings); | ||
| sourcesContent = map.sourcesContent || []; | ||
| const sourceRoot = resolve( dirname( this.file || '' ), map.sourceRoot || '' ); | ||
| const sourceRoot = resolve( | ||
| dirname(this.file || ''), | ||
| map.sourceRoot || '' | ||
| ); | ||
| this.sources = map.sources.map( ( source, i ) => { | ||
| this.sources = map.sources.map((source, i) => { | ||
| const node = new Node({ | ||
| file: resolve( sourceRoot, source ), | ||
| file: resolve(sourceRoot, source), | ||
| content: sourcesContent[i] | ||
| }); | ||
| node.loadSync( sourcesContentByPath, sourceMapByPath ); | ||
| node.loadSync(sourcesContentByPath, sourceMapByPath); | ||
| return node; | ||
| }); | ||
| } | ||
| }, | ||
| } | ||
@@ -106,3 +123,3 @@ /** | ||
| segment as found in `this` | ||
| * @param {string || null} - if specified, the name that should be | ||
| * @param {string | null} name - if specified, the name that should be | ||
| (eventually) returned, as it is closest to the generated code | ||
@@ -113,9 +130,9 @@ * @returns {object} | ||
| @property {number} column - the zero-based column index | ||
| @property {string || null} name - the name corresponding | ||
| @property {string | null} name - the name corresponding | ||
| to the segment being traced | ||
| */ | ||
| trace ( lineIndex, columnIndex, name ) { | ||
| trace(lineIndex, columnIndex, name) { | ||
| // If this node doesn't have a source map, we have | ||
| // to assume it is the original source | ||
| if ( this.isOriginalSource ) { | ||
| if (this.isOriginalSource) { | ||
| return { | ||
@@ -131,21 +148,21 @@ source: this.file, | ||
| // the intermediate file corresponds to in *its* source | ||
| const segments = this.mappings[ lineIndex ]; | ||
| const segments = this.mappings[lineIndex]; | ||
| if ( !segments || segments.length === 0 ) { | ||
| if (!segments || segments.length === 0) { | ||
| return null; | ||
| } | ||
| if ( columnIndex != null ) { | ||
| if (columnIndex != null) { | ||
| let len = segments.length; | ||
| let i; | ||
| for ( i = 0; i < len; i += 1 ) { | ||
| for (i = 0; i < len; i += 1) { | ||
| let generatedCodeColumn = segments[i][0]; | ||
| if ( generatedCodeColumn > columnIndex ) { | ||
| if (generatedCodeColumn > columnIndex) { | ||
| break; | ||
| } | ||
| if ( generatedCodeColumn === columnIndex ) { | ||
| if ( segments[i].length < 4 ) return null; | ||
| if (generatedCodeColumn === columnIndex) { | ||
| if (segments[i].length < 4) return null; | ||
@@ -157,4 +174,8 @@ let sourceFileIndex = segments[i][1]; | ||
| let parent = this.sources[ sourceFileIndex ]; | ||
| return parent.trace( sourceCodeLine, sourceCodeColumn, this.map.names[ nameIndex ] || name ); | ||
| let parent = this.sources[sourceFileIndex]; | ||
| return parent.trace( | ||
| sourceCodeLine, | ||
| sourceCodeColumn, | ||
| this.map.names[nameIndex] || name | ||
| ); | ||
| } | ||
@@ -169,17 +190,21 @@ } | ||
| let parent = this.sources[ sourceFileIndex ]; | ||
| return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name ); | ||
| let parent = this.sources[sourceFileIndex]; | ||
| return parent.trace( | ||
| sourceCodeLine, | ||
| null, | ||
| this.map.names[nameIndex] || name | ||
| ); | ||
| } | ||
| }; | ||
| } | ||
| function getContent ( node, sourcesContentByPath ) { | ||
| if ( node.file in sourcesContentByPath ) { | ||
| node.content = sourcesContentByPath[ node.file ]; | ||
| function getContent(node, sourcesContentByPath) { | ||
| if (node.file in sourcesContentByPath) { | ||
| node.content = sourcesContentByPath[node.file]; | ||
| } | ||
| if ( !node.content ) { | ||
| return readFile( node.file, { encoding: 'utf-8' }); | ||
| if (!node.content) { | ||
| return readFile(node.file, 'utf-8'); | ||
| } | ||
| return Promise.resolve( node.content ); | ||
| return Promise.resolve(node.content); | ||
| } |
+17
-15
| import btoa from './utils/btoa.js'; | ||
| export default function SourceMap ( properties ) { | ||
| this.version = 3; | ||
| export default class SourceMap { | ||
| constructor(properties) { | ||
| this.version = 3; | ||
| this.file = properties.file; | ||
| this.sources = properties.sources; | ||
| this.sourcesContent = properties.sourcesContent; | ||
| this.names = properties.names; | ||
| this.mappings = properties.mappings; | ||
| } | ||
| this.file = properties.file; | ||
| this.sources = properties.sources; | ||
| this.sourcesContent = properties.sourcesContent; | ||
| this.names = properties.names; | ||
| this.mappings = properties.mappings; | ||
| } | ||
| SourceMap.prototype = { | ||
| toString () { | ||
| return JSON.stringify( this ); | ||
| }, | ||
| toString() { | ||
| return JSON.stringify(this); | ||
| } | ||
| toUrl () { | ||
| return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() ); | ||
| toUrl() { | ||
| return ( | ||
| 'data:application/json;charset=utf-8;base64,' + btoa(this.toString()) | ||
| ); | ||
| } | ||
| }; | ||
| } |
@@ -6,4 +6,4 @@ /** | ||
| */ | ||
| export default function atob ( base64 ) { | ||
| return new Buffer( base64, 'base64' ).toString( 'utf8' ); | ||
| } | ||
| export default function atob(base64) { | ||
| return Buffer.from(base64, 'base64').toString('utf8'); | ||
| } |
@@ -6,4 +6,4 @@ /** | ||
| */ | ||
| export default function btoa ( str ) { | ||
| return new Buffer( str ).toString( 'base64' ); | ||
| } | ||
| export default function btoa(str) { | ||
| return Buffer.from(str).toString('base64'); | ||
| } |
+9
-12
@@ -1,21 +0,18 @@ | ||
| import { Promise } from 'sander'; | ||
| import getMapFromUrl from './getMapFromUrl.js'; | ||
| import getSourceMappingUrl from './getSourceMappingUrl.js'; | ||
| export default function getMap ( node, sourceMapByPath, sync ) { | ||
| if ( node.file in sourceMapByPath ) { | ||
| const map = sourceMapByPath[ node.file ]; | ||
| return sync ? map : Promise.resolve( map ); | ||
| } | ||
| export default function getMap(node, sourceMapByPath, sync) { | ||
| if (node.file in sourceMapByPath) { | ||
| const map = sourceMapByPath[node.file]; | ||
| return sync ? map : Promise.resolve(map); | ||
| } else { | ||
| const url = getSourceMappingUrl(node.content); | ||
| else { | ||
| const url = getSourceMappingUrl( node.content ); | ||
| if ( !url ) { | ||
| if (!url) { | ||
| node.isOriginalSource = true; | ||
| return sync ? null : Promise.resolve( null ); | ||
| return sync ? null : Promise.resolve(null); | ||
| } | ||
| return getMapFromUrl( url, node.file, sync ); | ||
| return getMapFromUrl(url, node.file, sync); | ||
| } | ||
| } |
@@ -1,11 +0,11 @@ | ||
| import { dirname, resolve } from 'path'; | ||
| import { readFile, readFileSync, Promise } from 'sander'; | ||
| import { dirname, resolve } from 'node:path'; | ||
| import { readFileSync } from 'node:fs'; | ||
| import atob from './atob.js'; | ||
| import SOURCEMAPPING_URL from './sourceMappingURL.js'; | ||
| function parseJSON ( json, url ) { | ||
| function parseJSON(json, url) { | ||
| try { | ||
| return JSON.parse( json ); | ||
| } catch ( err ) { | ||
| throw new Error( `Could not parse sourcemap (${url}): ${err.message}` ); | ||
| return JSON.parse(json); | ||
| } catch (err) { | ||
| throw new Error(`Could not parse sourcemap (${url}): ${err.message}`); | ||
| } | ||
@@ -24,22 +24,24 @@ } | ||
| */ | ||
| export default function getMapFromUrl ( url, base, sync ) { | ||
| if ( /^data:/.test( url ) ) { // TODO beef this up | ||
| const match = /base64,(.+)$/.exec( url ); | ||
| export default function getMapFromUrl(url, base, sync) { | ||
| if (/^data:/.test(url)) { | ||
| // TODO beef this up | ||
| const match = /base64,(.+)$/.exec(url); | ||
| if ( !match ) { | ||
| throw new Error( `${SOURCEMAPPING_URL} is not base64-encoded` ); | ||
| if (!match) { | ||
| throw new Error(`${SOURCEMAPPING_URL} is not base64-encoded`); | ||
| } | ||
| const json = atob( match[1] ); | ||
| const map = parseJSON( json, `data URI in ${base}` ); | ||
| return sync ? map : Promise.resolve( map ); | ||
| const json = atob(match[1]); | ||
| const map = parseJSON(json, `data URI in ${base}`); | ||
| return sync ? map : Promise.resolve(map); | ||
| } | ||
| url = resolve( dirname( base ), decodeURI( url ) ); | ||
| url = resolve(dirname(base), decodeURI(url)); | ||
| if ( sync ) { | ||
| return parseJSON( readFileSync( url, { encoding: 'utf-8' }), url ); | ||
| if (sync) { | ||
| return parseJSON(readFileSync(url, { encoding: 'utf-8' }), url); | ||
| } else { | ||
| return readFile( url, { encoding: 'utf-8' }).then( json => parseJSON( json, url ) ); | ||
| const json = readFileSync(url, 'utf-8'); | ||
| return Promise.resolve(parseJSON(json, url)); | ||
| } | ||
| } |
| import SOURCEMAPPING_URL from './sourceMappingURL.js'; | ||
| export default function getSourceMappingUrl ( str ) { | ||
| export default function getSourceMappingUrl(str) { | ||
| var index, substring, url, match; | ||
| // assume we want the last occurence | ||
| index = str.lastIndexOf( `${SOURCEMAPPING_URL}=` ); | ||
| index = str.lastIndexOf(`${SOURCEMAPPING_URL}=`); | ||
| if ( index === -1 ) { | ||
| if (index === -1) { | ||
| return null; | ||
| } | ||
| substring = str.substring( index + 17 ); | ||
| match = /^[^\r\n]+/.exec( substring ); | ||
| substring = str.substring(index + 17); | ||
| match = /^[^\r\n]+/.exec(substring); | ||
@@ -20,4 +20,4 @@ url = match ? match[0] : null; | ||
| // from the sourceMappingURL because it might not have been correctly encoded | ||
| if ( url && url.slice( -2 ) === '*/' ) { | ||
| url = url.slice( 0, -2 ).trim(); | ||
| if (url && url.slice(-2) === '*/') { | ||
| url = url.slice(0, -2).trim(); | ||
| } | ||
@@ -24,0 +24,0 @@ |
@@ -1,5 +0,3 @@ | ||
| export default function slash ( path ) { | ||
| return typeof path === 'string' ? | ||
| path.replace( /\\/g, '/' ) : | ||
| path; | ||
| export default function slash(path) { | ||
| return typeof path === 'string' ? path.replace(/\\/g, '/') : path; | ||
| } |
| 'use strict'; | ||
| var path = require('path'); | ||
| var sander = require('sander'); | ||
| const comma = ','.charCodeAt(0); | ||
| const semicolon = ';'.charCodeAt(0); | ||
| const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; | ||
| const intToChar = new Uint8Array(64); // 64 possible chars. | ||
| const charToInt = new Uint8Array(128); // z is 122 in ASCII | ||
| for (let i = 0; i < chars.length; i++) { | ||
| const c = chars.charCodeAt(i); | ||
| intToChar[i] = c; | ||
| charToInt[c] = i; | ||
| } | ||
| // Provide a fallback for older environments. | ||
| const td = typeof TextDecoder !== 'undefined' | ||
| ? /* #__PURE__ */ new TextDecoder() | ||
| : typeof Buffer !== 'undefined' | ||
| ? { | ||
| decode(buf) { | ||
| const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); | ||
| return out.toString(); | ||
| }, | ||
| } | ||
| : { | ||
| decode(buf) { | ||
| let out = ''; | ||
| for (let i = 0; i < buf.length; i++) { | ||
| out += String.fromCharCode(buf[i]); | ||
| } | ||
| return out; | ||
| }, | ||
| }; | ||
| function decode(mappings) { | ||
| const state = new Int32Array(5); | ||
| const decoded = []; | ||
| let index = 0; | ||
| do { | ||
| const semi = indexOf(mappings, index); | ||
| const line = []; | ||
| let sorted = true; | ||
| let lastCol = 0; | ||
| state[0] = 0; | ||
| for (let i = index; i < semi; i++) { | ||
| let seg; | ||
| i = decodeInteger(mappings, i, state, 0); // genColumn | ||
| const col = state[0]; | ||
| if (col < lastCol) | ||
| sorted = false; | ||
| lastCol = col; | ||
| if (hasMoreVlq(mappings, i, semi)) { | ||
| i = decodeInteger(mappings, i, state, 1); // sourcesIndex | ||
| i = decodeInteger(mappings, i, state, 2); // sourceLine | ||
| i = decodeInteger(mappings, i, state, 3); // sourceColumn | ||
| if (hasMoreVlq(mappings, i, semi)) { | ||
| i = decodeInteger(mappings, i, state, 4); // namesIndex | ||
| seg = [col, state[1], state[2], state[3], state[4]]; | ||
| } | ||
| else { | ||
| seg = [col, state[1], state[2], state[3]]; | ||
| } | ||
| } | ||
| else { | ||
| seg = [col]; | ||
| } | ||
| line.push(seg); | ||
| } | ||
| if (!sorted) | ||
| sort(line); | ||
| decoded.push(line); | ||
| index = semi + 1; | ||
| } while (index <= mappings.length); | ||
| return decoded; | ||
| } | ||
| function indexOf(mappings, index) { | ||
| const idx = mappings.indexOf(';', index); | ||
| return idx === -1 ? mappings.length : idx; | ||
| } | ||
| function decodeInteger(mappings, pos, state, j) { | ||
| let value = 0; | ||
| let shift = 0; | ||
| let integer = 0; | ||
| do { | ||
| const c = mappings.charCodeAt(pos++); | ||
| integer = charToInt[c]; | ||
| value |= (integer & 31) << shift; | ||
| shift += 5; | ||
| } while (integer & 32); | ||
| const shouldNegate = value & 1; | ||
| value >>>= 1; | ||
| if (shouldNegate) { | ||
| value = -0x80000000 | -value; | ||
| } | ||
| state[j] += value; | ||
| return pos; | ||
| } | ||
| function hasMoreVlq(mappings, i, length) { | ||
| if (i >= length) | ||
| return false; | ||
| return mappings.charCodeAt(i) !== comma; | ||
| } | ||
| function sort(line) { | ||
| line.sort(sortComparator); | ||
| } | ||
| function sortComparator(a, b) { | ||
| return a[0] - b[0]; | ||
| } | ||
| function encode(decoded) { | ||
| const state = new Int32Array(5); | ||
| const bufLength = 1024 * 16; | ||
| const subLength = bufLength - 36; | ||
| const buf = new Uint8Array(bufLength); | ||
| const sub = buf.subarray(0, subLength); | ||
| let pos = 0; | ||
| let out = ''; | ||
| for (let i = 0; i < decoded.length; i++) { | ||
| const line = decoded[i]; | ||
| if (i > 0) { | ||
| if (pos === bufLength) { | ||
| out += td.decode(buf); | ||
| pos = 0; | ||
| } | ||
| buf[pos++] = semicolon; | ||
| } | ||
| if (line.length === 0) | ||
| continue; | ||
| state[0] = 0; | ||
| for (let j = 0; j < line.length; j++) { | ||
| const segment = line[j]; | ||
| // We can push up to 5 ints, each int can take at most 7 chars, and we | ||
| // may push a comma. | ||
| if (pos > subLength) { | ||
| out += td.decode(sub); | ||
| buf.copyWithin(0, subLength, pos); | ||
| pos -= subLength; | ||
| } | ||
| if (j > 0) | ||
| buf[pos++] = comma; | ||
| pos = encodeInteger(buf, pos, state, segment, 0); // genColumn | ||
| if (segment.length === 1) | ||
| continue; | ||
| pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex | ||
| pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine | ||
| pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn | ||
| if (segment.length === 4) | ||
| continue; | ||
| pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex | ||
| } | ||
| } | ||
| return out + td.decode(buf.subarray(0, pos)); | ||
| } | ||
| function encodeInteger(buf, pos, state, segment, j) { | ||
| const next = segment[j]; | ||
| let num = next - state[j]; | ||
| state[j] = next; | ||
| num = num < 0 ? (-num << 1) | 1 : num << 1; | ||
| do { | ||
| let clamped = num & 0b011111; | ||
| num >>>= 5; | ||
| if (num > 0) | ||
| clamped |= 0b100000; | ||
| buf[pos++] = intToChar[clamped]; | ||
| } while (num > 0); | ||
| return pos; | ||
| } | ||
| /** | ||
| * Decodes a base64 string | ||
| * @param {string} base64 - the string to decode | ||
| * @returns {string} | ||
| */ | ||
| function atob ( base64 ) { | ||
| return new Buffer( base64, 'base64' ).toString( 'utf8' ); | ||
| } | ||
| // this looks ridiculous, but it prevents sourcemap tooling from mistaking | ||
| // this for an actual sourceMappingURL | ||
| var SOURCEMAPPING_URL = 'sourceMa'; | ||
| SOURCEMAPPING_URL += 'ppingURL'; | ||
| var SOURCEMAPPING_URL$1 = SOURCEMAPPING_URL; | ||
| function parseJSON ( json, url ) { | ||
| try { | ||
| return JSON.parse( json ); | ||
| } catch ( err ) { | ||
| throw new Error( ("Could not parse sourcemap (" + url + "): " + (err.message)) ); | ||
| } | ||
| } | ||
| /** | ||
| * Turns a sourceMappingURL into a sourcemap | ||
| * @param {string} url - the sourceMappingURL. Can be a | ||
| base64-encoded data URI | ||
| * @param {string} base - the URL against which relative URLS | ||
| should be resolved | ||
| * @param {boolean} sync - if `true`, return a promise, otherwise | ||
| return the sourcemap | ||
| * @returns {object} - a version 3 sourcemap | ||
| */ | ||
| function getMapFromUrl ( url, base, sync ) { | ||
| if ( /^data:/.test( url ) ) { // TODO beef this up | ||
| var match = /base64,(.+)$/.exec( url ); | ||
| if ( !match ) { | ||
| throw new Error( (SOURCEMAPPING_URL$1 + " is not base64-encoded") ); | ||
| } | ||
| var json = atob( match[1] ); | ||
| var map = parseJSON( json, ("data URI in " + base) ); | ||
| return sync ? map : sander.Promise.resolve( map ); | ||
| } | ||
| url = path.resolve( path.dirname( base ), decodeURI( url ) ); | ||
| if ( sync ) { | ||
| return parseJSON( sander.readFileSync( url, { encoding: 'utf-8' }), url ); | ||
| } else { | ||
| return sander.readFile( url, { encoding: 'utf-8' }).then( function (json) { return parseJSON( json, url ); } ); | ||
| } | ||
| } | ||
| function getSourceMappingUrl ( str ) { | ||
| var index, substring, url, match; | ||
| // assume we want the last occurence | ||
| index = str.lastIndexOf( (SOURCEMAPPING_URL$1 + "=") ); | ||
| if ( index === -1 ) { | ||
| return null; | ||
| } | ||
| substring = str.substring( index + 17 ); | ||
| match = /^[^\r\n]+/.exec( substring ); | ||
| url = match ? match[0] : null; | ||
| // possibly a better way to do this, but we don't want to exclude whitespace | ||
| // from the sourceMappingURL because it might not have been correctly encoded | ||
| if ( url && url.slice( -2 ) === '*/' ) { | ||
| url = url.slice( 0, -2 ).trim(); | ||
| } | ||
| return url; | ||
| } | ||
| function getMap ( node, sourceMapByPath, sync ) { | ||
| if ( node.file in sourceMapByPath ) { | ||
| var map = sourceMapByPath[ node.file ]; | ||
| return sync ? map : sander.Promise.resolve( map ); | ||
| } | ||
| else { | ||
| var url = getSourceMappingUrl( node.content ); | ||
| if ( !url ) { | ||
| node.isOriginalSource = true; | ||
| return sync ? null : sander.Promise.resolve( null ); | ||
| } | ||
| return getMapFromUrl( url, node.file, sync ); | ||
| } | ||
| } | ||
| function Node (ref) { | ||
| var file = ref.file; | ||
| var content = ref.content; | ||
| this.file = file ? path.resolve( file ) : null; | ||
| this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't | ||
| if ( !this.file && this.content === null ) { | ||
| throw new Error( 'A source must specify either file or content' ); | ||
| } | ||
| // these get filled in later | ||
| this.map = null; | ||
| this.mappings = null; | ||
| this.sources = null; | ||
| this.isOriginalSource = null; | ||
| this._stats = { | ||
| decodingTime: 0, | ||
| encodingTime: 0, | ||
| tracingTime: 0, | ||
| untraceable: 0 | ||
| }; | ||
| } | ||
| Node.prototype = { | ||
| load: function load ( sourcesContentByPath, sourceMapByPath ) { | ||
| var this$1$1 = this; | ||
| return getContent( this, sourcesContentByPath ).then( function (content) { | ||
| this$1$1.content = sourcesContentByPath[ this$1$1.file ] = content; | ||
| return getMap( this$1$1, sourceMapByPath ).then( function (map) { | ||
| if ( !map ) { return null; } | ||
| this$1$1.map = map; | ||
| var decodingStart = process.hrtime(); | ||
| this$1$1.mappings = decode( map.mappings ); | ||
| var decodingTime = process.hrtime( decodingStart ); | ||
| this$1$1._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1]; | ||
| var sourcesContent = map.sourcesContent || []; | ||
| var sourceRoot = path.resolve( path.dirname( this$1$1.file || '' ), map.sourceRoot || '' ); | ||
| this$1$1.sources = map.sources.map( function ( source, i ) { | ||
| return new Node({ | ||
| file: source ? path.resolve( sourceRoot, source ) : null, | ||
| content: sourcesContent[i] | ||
| }); | ||
| }); | ||
| var promises = this$1$1.sources.map( function (node) { return node.load( sourcesContentByPath, sourceMapByPath ); } ); | ||
| return sander.Promise.all( promises ); | ||
| }); | ||
| }); | ||
| }, | ||
| loadSync: function loadSync ( sourcesContentByPath, sourceMapByPath ) { | ||
| if ( !this.content ) { | ||
| if ( !sourcesContentByPath[ this.file ] ) { | ||
| sourcesContentByPath[ this.file ] = sander.readFileSync( this.file, { encoding: 'utf-8' }); | ||
| } | ||
| this.content = sourcesContentByPath[ this.file ]; | ||
| } | ||
| var map = getMap( this, sourceMapByPath, true ); | ||
| var sourcesContent; | ||
| if ( !map ) { | ||
| this.isOriginalSource = true; | ||
| } else { | ||
| this.map = map; | ||
| this.mappings = decode( map.mappings ); | ||
| sourcesContent = map.sourcesContent || []; | ||
| var sourceRoot = path.resolve( path.dirname( this.file || '' ), map.sourceRoot || '' ); | ||
| this.sources = map.sources.map( function ( source, i ) { | ||
| var node = new Node({ | ||
| file: path.resolve( sourceRoot, source ), | ||
| content: sourcesContent[i] | ||
| }); | ||
| node.loadSync( sourcesContentByPath, sourceMapByPath ); | ||
| return node; | ||
| }); | ||
| } | ||
| }, | ||
| /** | ||
| * Traces a segment back to its origin | ||
| * @param {number} lineIndex - the zero-based line index of the | ||
| segment as found in `this` | ||
| * @param {number} columnIndex - the zero-based column index of the | ||
| segment as found in `this` | ||
| * @param {string || null} - if specified, the name that should be | ||
| (eventually) returned, as it is closest to the generated code | ||
| * @returns {object} | ||
| @property {string} source - the filepath of the source | ||
| @property {number} line - the one-based line index | ||
| @property {number} column - the zero-based column index | ||
| @property {string || null} name - the name corresponding | ||
| to the segment being traced | ||
| */ | ||
| trace: function trace ( lineIndex, columnIndex, name ) { | ||
| // If this node doesn't have a source map, we have | ||
| // to assume it is the original source | ||
| if ( this.isOriginalSource ) { | ||
| return { | ||
| source: this.file, | ||
| line: lineIndex + 1, | ||
| column: columnIndex || 0, | ||
| name: name | ||
| }; | ||
| } | ||
| // Otherwise, we need to figure out what this position in | ||
| // the intermediate file corresponds to in *its* source | ||
| var segments = this.mappings[ lineIndex ]; | ||
| if ( !segments || segments.length === 0 ) { | ||
| return null; | ||
| } | ||
| if ( columnIndex != null ) { | ||
| var len = segments.length; | ||
| var i; | ||
| for ( i = 0; i < len; i += 1 ) { | ||
| var generatedCodeColumn = segments[i][0]; | ||
| if ( generatedCodeColumn > columnIndex ) { | ||
| break; | ||
| } | ||
| if ( generatedCodeColumn === columnIndex ) { | ||
| if ( segments[i].length < 4 ) { return null; } | ||
| var sourceFileIndex$1 = segments[i][1]; | ||
| var sourceCodeLine$1 = segments[i][2]; | ||
| var sourceCodeColumn = segments[i][3]; | ||
| var nameIndex$1 = segments[i][4]; | ||
| var parent$1 = this.sources[ sourceFileIndex$1 ]; | ||
| return parent$1.trace( sourceCodeLine$1, sourceCodeColumn, this.map.names[ nameIndex$1 ] || name ); | ||
| } | ||
| } | ||
| } | ||
| // fall back to a line mapping | ||
| var sourceFileIndex = segments[0][1]; | ||
| var sourceCodeLine = segments[0][2]; | ||
| var nameIndex = segments[0][4]; | ||
| var parent = this.sources[ sourceFileIndex ]; | ||
| return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name ); | ||
| } | ||
| }; | ||
| function getContent ( node, sourcesContentByPath ) { | ||
| if ( node.file in sourcesContentByPath ) { | ||
| node.content = sourcesContentByPath[ node.file ]; | ||
| } | ||
| if ( !node.content ) { | ||
| return sander.readFile( node.file, { encoding: 'utf-8' }); | ||
| } | ||
| return sander.Promise.resolve( node.content ); | ||
| } | ||
| /** | ||
| * Encodes a string as base64 | ||
| * @param {string} str - the string to encode | ||
| * @returns {string} | ||
| */ | ||
| function btoa ( str ) { | ||
| return new Buffer( str ).toString( 'base64' ); | ||
| } | ||
| function SourceMap ( properties ) { | ||
| this.version = 3; | ||
| this.file = properties.file; | ||
| this.sources = properties.sources; | ||
| this.sourcesContent = properties.sourcesContent; | ||
| this.names = properties.names; | ||
| this.mappings = properties.mappings; | ||
| } | ||
| SourceMap.prototype = { | ||
| toString: function toString () { | ||
| return JSON.stringify( this ); | ||
| }, | ||
| toUrl: function toUrl () { | ||
| return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() ); | ||
| } | ||
| }; | ||
| function slash ( path ) { | ||
| return typeof path === 'string' ? | ||
| path.replace( /\\/g, '/' ) : | ||
| path; | ||
| } | ||
| var SOURCEMAP_COMMENT = new RegExp( "\n*(?:" + | ||
| "\\/\\/[@#]\\s*" + SOURCEMAPPING_URL$1 + "=([^\n]+)|" + // js | ||
| "\\/\\*#?\\s*" + SOURCEMAPPING_URL$1 + "=([^'\"]+)\\s\\*\\/)" + // css | ||
| '\\s*$', 'g' ); | ||
| function Chain ( node, sourcesContentByPath ) { | ||
| this.node = node; | ||
| this.sourcesContentByPath = sourcesContentByPath; | ||
| this._stats = {}; | ||
| } | ||
| Chain.prototype = { | ||
| stat: function stat () { | ||
| return { | ||
| selfDecodingTime: this._stats.decodingTime / 1e6, | ||
| totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6, | ||
| encodingTime: this._stats.encodingTime / 1e6, | ||
| tracingTime: this._stats.tracingTime / 1e6, | ||
| untraceable: this._stats.untraceable | ||
| }; | ||
| }, | ||
| apply: function apply ( options ) { | ||
| var this$1$1 = this; | ||
| if ( options === void 0 ) options = {}; | ||
| var allNames = []; | ||
| var allSources = []; | ||
| var applySegment = function ( segment, result ) { | ||
| if ( segment.length < 4 ) { return; } | ||
| var traced = this$1$1.node.sources[ segment[1] ].trace( // source | ||
| segment[2], // source code line | ||
| segment[3], // source code column | ||
| this$1$1.node.map.names[ segment[4] ] | ||
| ); | ||
| if ( !traced ) { | ||
| this$1$1._stats.untraceable += 1; | ||
| return; | ||
| } | ||
| var sourceIndex = allSources.indexOf( traced.source ); | ||
| if ( !~sourceIndex ) { | ||
| sourceIndex = allSources.length; | ||
| allSources.push( traced.source ); | ||
| } | ||
| var newSegment = [ | ||
| segment[0], // generated code column | ||
| sourceIndex, | ||
| traced.line - 1, | ||
| traced.column | ||
| ]; | ||
| if ( traced.name ) { | ||
| var nameIndex = allNames.indexOf( traced.name ); | ||
| if ( !~nameIndex ) { | ||
| nameIndex = allNames.length; | ||
| allNames.push( traced.name ); | ||
| } | ||
| newSegment[4] = nameIndex; | ||
| } | ||
| result[ result.length ] = newSegment; | ||
| }; | ||
| // Trace mappings | ||
| var tracingStart = process.hrtime(); | ||
| var i = this.node.mappings.length; | ||
| var resolved = new Array( i ); | ||
| var j, line, result; | ||
| while ( i-- ) { | ||
| line = this.node.mappings[i]; | ||
| resolved[i] = result = []; | ||
| for ( j = 0; j < line.length; j += 1 ) { | ||
| applySegment( line[j], result ); | ||
| } | ||
| } | ||
| var tracingTime = process.hrtime( tracingStart ); | ||
| this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1]; | ||
| // Encode mappings | ||
| var encodingStart = process.hrtime(); | ||
| var mappings = encode( resolved ); | ||
| var encodingTime = process.hrtime( encodingStart ); | ||
| this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1]; | ||
| var includeContent = options.includeContent !== false; | ||
| return new SourceMap({ | ||
| file: path.basename( this.node.file ), | ||
| sources: allSources.map( function (source) { return slash( path.relative( options.base || path.dirname( this$1$1.node.file ), source ) ); } ), | ||
| sourcesContent: allSources.map( function (source) { return includeContent ? this$1$1.sourcesContentByPath[ source ] : null; } ), | ||
| names: allNames, | ||
| mappings: mappings | ||
| }); | ||
| }, | ||
| trace: function trace ( oneBasedLineIndex, zeroBasedColumnIndex ) { | ||
| return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null ); | ||
| }, | ||
| write: function write ( dest, options ) { | ||
| if ( typeof dest !== 'string' ) { | ||
| options = dest; | ||
| dest = this.node.file; | ||
| } | ||
| options = options || {}; | ||
| var ref = processWriteOptions( dest, this, options ); | ||
| var resolved = ref.resolved; | ||
| var content = ref.content; | ||
| var map = ref.map; | ||
| var promises = [ sander.writeFile( resolved, content ) ]; | ||
| if ( !options.inline ) { | ||
| promises.push( sander.writeFile( resolved + '.map', map.toString() ) ); | ||
| } | ||
| return Promise.all( promises ); | ||
| }, | ||
| writeSync: function writeSync ( dest, options ) { | ||
| if ( typeof dest !== 'string' ) { | ||
| options = dest; | ||
| dest = this.node.file; | ||
| } | ||
| options = options || {}; | ||
| var ref = processWriteOptions( dest, this, options ); | ||
| var resolved = ref.resolved; | ||
| var content = ref.content; | ||
| var map = ref.map; | ||
| sander.writeFileSync( resolved, content ); | ||
| if ( !options.inline ) { | ||
| sander.writeFileSync( resolved + '.map', map.toString() ); | ||
| } | ||
| } | ||
| }; | ||
| function processWriteOptions ( dest, chain, options ) { | ||
| var resolved = path.resolve( dest ); | ||
| var map = chain.apply({ | ||
| includeContent: options.includeContent, | ||
| base: options.base ? path.resolve( options.base ) : path.dirname( resolved ) | ||
| }); | ||
| var url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : path.basename( resolved ) ) + '.map'; | ||
| // TODO shouldn't url be relative? | ||
| var content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved ); | ||
| return { resolved: resolved, content: content, map: map }; | ||
| } | ||
| function tally ( nodes, stat ) { | ||
| return nodes.reduce( function ( total, node ) { | ||
| return total + node._stats[ stat ]; | ||
| }, 0 ); | ||
| } | ||
| function sourcemapComment ( url, dest ) { | ||
| var ext = path.extname( dest ); | ||
| url = encodeURI( url ); | ||
| if ( ext === '.css' ) { | ||
| return ("\n/*# " + SOURCEMAPPING_URL$1 + "=" + url + " */\n"); | ||
| } | ||
| return ("\n//# " + SOURCEMAPPING_URL$1 + "=" + url + "\n"); | ||
| } | ||
| function load ( file, options ) { | ||
| var ref = init( file, options ); | ||
| var node = ref.node; | ||
| var sourcesContentByPath = ref.sourcesContentByPath; | ||
| var sourceMapByPath = ref.sourceMapByPath; | ||
| return node.load( sourcesContentByPath, sourceMapByPath ) | ||
| .then( function () { return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); } ); | ||
| } | ||
| function loadSync ( file, options ) { | ||
| if ( options === void 0 ) options = {}; | ||
| var ref = init( file, options ); | ||
| var node = ref.node; | ||
| var sourcesContentByPath = ref.sourcesContentByPath; | ||
| var sourceMapByPath = ref.sourceMapByPath; | ||
| node.loadSync( sourcesContentByPath, sourceMapByPath ); | ||
| return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); | ||
| } | ||
| function init ( file, options ) { | ||
| if ( options === void 0 ) options = {}; | ||
| var node = new Node({ file: file }); | ||
| var sourcesContentByPath = {}; | ||
| var sourceMapByPath = {}; | ||
| if ( options.content ) { | ||
| Object.keys( options.content ).forEach( function (key) { | ||
| sourcesContentByPath[ path.resolve( key ) ] = options.content[ key ]; | ||
| }); | ||
| } | ||
| if ( options.sourcemaps ) { | ||
| Object.keys( options.sourcemaps ).forEach( function (key) { | ||
| sourceMapByPath[ path.resolve( key ) ] = options.sourcemaps[ key ]; | ||
| }); | ||
| } | ||
| return { node: node, sourcesContentByPath: sourcesContentByPath, sourceMapByPath: sourceMapByPath }; | ||
| } | ||
| exports.load = load; | ||
| exports.loadSync = loadSync; |
| import { resolve, dirname, basename, relative, extname } from 'path'; | ||
| import { Promise as Promise$1, readFileSync, readFile, writeFile, writeFileSync } from 'sander'; | ||
| const comma = ','.charCodeAt(0); | ||
| const semicolon = ';'.charCodeAt(0); | ||
| const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; | ||
| const intToChar = new Uint8Array(64); // 64 possible chars. | ||
| const charToInt = new Uint8Array(128); // z is 122 in ASCII | ||
| for (let i = 0; i < chars.length; i++) { | ||
| const c = chars.charCodeAt(i); | ||
| intToChar[i] = c; | ||
| charToInt[c] = i; | ||
| } | ||
| // Provide a fallback for older environments. | ||
| const td = typeof TextDecoder !== 'undefined' | ||
| ? /* #__PURE__ */ new TextDecoder() | ||
| : typeof Buffer !== 'undefined' | ||
| ? { | ||
| decode(buf) { | ||
| const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); | ||
| return out.toString(); | ||
| }, | ||
| } | ||
| : { | ||
| decode(buf) { | ||
| let out = ''; | ||
| for (let i = 0; i < buf.length; i++) { | ||
| out += String.fromCharCode(buf[i]); | ||
| } | ||
| return out; | ||
| }, | ||
| }; | ||
| function decode(mappings) { | ||
| const state = new Int32Array(5); | ||
| const decoded = []; | ||
| let index = 0; | ||
| do { | ||
| const semi = indexOf(mappings, index); | ||
| const line = []; | ||
| let sorted = true; | ||
| let lastCol = 0; | ||
| state[0] = 0; | ||
| for (let i = index; i < semi; i++) { | ||
| let seg; | ||
| i = decodeInteger(mappings, i, state, 0); // genColumn | ||
| const col = state[0]; | ||
| if (col < lastCol) | ||
| sorted = false; | ||
| lastCol = col; | ||
| if (hasMoreVlq(mappings, i, semi)) { | ||
| i = decodeInteger(mappings, i, state, 1); // sourcesIndex | ||
| i = decodeInteger(mappings, i, state, 2); // sourceLine | ||
| i = decodeInteger(mappings, i, state, 3); // sourceColumn | ||
| if (hasMoreVlq(mappings, i, semi)) { | ||
| i = decodeInteger(mappings, i, state, 4); // namesIndex | ||
| seg = [col, state[1], state[2], state[3], state[4]]; | ||
| } | ||
| else { | ||
| seg = [col, state[1], state[2], state[3]]; | ||
| } | ||
| } | ||
| else { | ||
| seg = [col]; | ||
| } | ||
| line.push(seg); | ||
| } | ||
| if (!sorted) | ||
| sort(line); | ||
| decoded.push(line); | ||
| index = semi + 1; | ||
| } while (index <= mappings.length); | ||
| return decoded; | ||
| } | ||
| function indexOf(mappings, index) { | ||
| const idx = mappings.indexOf(';', index); | ||
| return idx === -1 ? mappings.length : idx; | ||
| } | ||
| function decodeInteger(mappings, pos, state, j) { | ||
| let value = 0; | ||
| let shift = 0; | ||
| let integer = 0; | ||
| do { | ||
| const c = mappings.charCodeAt(pos++); | ||
| integer = charToInt[c]; | ||
| value |= (integer & 31) << shift; | ||
| shift += 5; | ||
| } while (integer & 32); | ||
| const shouldNegate = value & 1; | ||
| value >>>= 1; | ||
| if (shouldNegate) { | ||
| value = -0x80000000 | -value; | ||
| } | ||
| state[j] += value; | ||
| return pos; | ||
| } | ||
| function hasMoreVlq(mappings, i, length) { | ||
| if (i >= length) | ||
| return false; | ||
| return mappings.charCodeAt(i) !== comma; | ||
| } | ||
| function sort(line) { | ||
| line.sort(sortComparator); | ||
| } | ||
| function sortComparator(a, b) { | ||
| return a[0] - b[0]; | ||
| } | ||
| function encode(decoded) { | ||
| const state = new Int32Array(5); | ||
| const bufLength = 1024 * 16; | ||
| const subLength = bufLength - 36; | ||
| const buf = new Uint8Array(bufLength); | ||
| const sub = buf.subarray(0, subLength); | ||
| let pos = 0; | ||
| let out = ''; | ||
| for (let i = 0; i < decoded.length; i++) { | ||
| const line = decoded[i]; | ||
| if (i > 0) { | ||
| if (pos === bufLength) { | ||
| out += td.decode(buf); | ||
| pos = 0; | ||
| } | ||
| buf[pos++] = semicolon; | ||
| } | ||
| if (line.length === 0) | ||
| continue; | ||
| state[0] = 0; | ||
| for (let j = 0; j < line.length; j++) { | ||
| const segment = line[j]; | ||
| // We can push up to 5 ints, each int can take at most 7 chars, and we | ||
| // may push a comma. | ||
| if (pos > subLength) { | ||
| out += td.decode(sub); | ||
| buf.copyWithin(0, subLength, pos); | ||
| pos -= subLength; | ||
| } | ||
| if (j > 0) | ||
| buf[pos++] = comma; | ||
| pos = encodeInteger(buf, pos, state, segment, 0); // genColumn | ||
| if (segment.length === 1) | ||
| continue; | ||
| pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex | ||
| pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine | ||
| pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn | ||
| if (segment.length === 4) | ||
| continue; | ||
| pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex | ||
| } | ||
| } | ||
| return out + td.decode(buf.subarray(0, pos)); | ||
| } | ||
| function encodeInteger(buf, pos, state, segment, j) { | ||
| const next = segment[j]; | ||
| let num = next - state[j]; | ||
| state[j] = next; | ||
| num = num < 0 ? (-num << 1) | 1 : num << 1; | ||
| do { | ||
| let clamped = num & 0b011111; | ||
| num >>>= 5; | ||
| if (num > 0) | ||
| clamped |= 0b100000; | ||
| buf[pos++] = intToChar[clamped]; | ||
| } while (num > 0); | ||
| return pos; | ||
| } | ||
| /** | ||
| * Decodes a base64 string | ||
| * @param {string} base64 - the string to decode | ||
| * @returns {string} | ||
| */ | ||
| function atob ( base64 ) { | ||
| return new Buffer( base64, 'base64' ).toString( 'utf8' ); | ||
| } | ||
| // this looks ridiculous, but it prevents sourcemap tooling from mistaking | ||
| // this for an actual sourceMappingURL | ||
| var SOURCEMAPPING_URL = 'sourceMa'; | ||
| SOURCEMAPPING_URL += 'ppingURL'; | ||
| var SOURCEMAPPING_URL$1 = SOURCEMAPPING_URL; | ||
| function parseJSON ( json, url ) { | ||
| try { | ||
| return JSON.parse( json ); | ||
| } catch ( err ) { | ||
| throw new Error( ("Could not parse sourcemap (" + url + "): " + (err.message)) ); | ||
| } | ||
| } | ||
| /** | ||
| * Turns a sourceMappingURL into a sourcemap | ||
| * @param {string} url - the sourceMappingURL. Can be a | ||
| base64-encoded data URI | ||
| * @param {string} base - the URL against which relative URLS | ||
| should be resolved | ||
| * @param {boolean} sync - if `true`, return a promise, otherwise | ||
| return the sourcemap | ||
| * @returns {object} - a version 3 sourcemap | ||
| */ | ||
| function getMapFromUrl ( url, base, sync ) { | ||
| if ( /^data:/.test( url ) ) { // TODO beef this up | ||
| var match = /base64,(.+)$/.exec( url ); | ||
| if ( !match ) { | ||
| throw new Error( (SOURCEMAPPING_URL$1 + " is not base64-encoded") ); | ||
| } | ||
| var json = atob( match[1] ); | ||
| var map = parseJSON( json, ("data URI in " + base) ); | ||
| return sync ? map : Promise$1.resolve( map ); | ||
| } | ||
| url = resolve( dirname( base ), decodeURI( url ) ); | ||
| if ( sync ) { | ||
| return parseJSON( readFileSync( url, { encoding: 'utf-8' }), url ); | ||
| } else { | ||
| return readFile( url, { encoding: 'utf-8' }).then( function (json) { return parseJSON( json, url ); } ); | ||
| } | ||
| } | ||
| function getSourceMappingUrl ( str ) { | ||
| var index, substring, url, match; | ||
| // assume we want the last occurence | ||
| index = str.lastIndexOf( (SOURCEMAPPING_URL$1 + "=") ); | ||
| if ( index === -1 ) { | ||
| return null; | ||
| } | ||
| substring = str.substring( index + 17 ); | ||
| match = /^[^\r\n]+/.exec( substring ); | ||
| url = match ? match[0] : null; | ||
| // possibly a better way to do this, but we don't want to exclude whitespace | ||
| // from the sourceMappingURL because it might not have been correctly encoded | ||
| if ( url && url.slice( -2 ) === '*/' ) { | ||
| url = url.slice( 0, -2 ).trim(); | ||
| } | ||
| return url; | ||
| } | ||
| function getMap ( node, sourceMapByPath, sync ) { | ||
| if ( node.file in sourceMapByPath ) { | ||
| var map = sourceMapByPath[ node.file ]; | ||
| return sync ? map : Promise$1.resolve( map ); | ||
| } | ||
| else { | ||
| var url = getSourceMappingUrl( node.content ); | ||
| if ( !url ) { | ||
| node.isOriginalSource = true; | ||
| return sync ? null : Promise$1.resolve( null ); | ||
| } | ||
| return getMapFromUrl( url, node.file, sync ); | ||
| } | ||
| } | ||
| function Node (ref) { | ||
| var file = ref.file; | ||
| var content = ref.content; | ||
| this.file = file ? resolve( file ) : null; | ||
| this.content = content || null; // sometimes exists in sourcesContent, sometimes doesn't | ||
| if ( !this.file && this.content === null ) { | ||
| throw new Error( 'A source must specify either file or content' ); | ||
| } | ||
| // these get filled in later | ||
| this.map = null; | ||
| this.mappings = null; | ||
| this.sources = null; | ||
| this.isOriginalSource = null; | ||
| this._stats = { | ||
| decodingTime: 0, | ||
| encodingTime: 0, | ||
| tracingTime: 0, | ||
| untraceable: 0 | ||
| }; | ||
| } | ||
| Node.prototype = { | ||
| load: function load ( sourcesContentByPath, sourceMapByPath ) { | ||
| var this$1$1 = this; | ||
| return getContent( this, sourcesContentByPath ).then( function (content) { | ||
| this$1$1.content = sourcesContentByPath[ this$1$1.file ] = content; | ||
| return getMap( this$1$1, sourceMapByPath ).then( function (map) { | ||
| if ( !map ) { return null; } | ||
| this$1$1.map = map; | ||
| var decodingStart = process.hrtime(); | ||
| this$1$1.mappings = decode( map.mappings ); | ||
| var decodingTime = process.hrtime( decodingStart ); | ||
| this$1$1._stats.decodingTime = 1e9 * decodingTime[0] + decodingTime[1]; | ||
| var sourcesContent = map.sourcesContent || []; | ||
| var sourceRoot = resolve( dirname( this$1$1.file || '' ), map.sourceRoot || '' ); | ||
| this$1$1.sources = map.sources.map( function ( source, i ) { | ||
| return new Node({ | ||
| file: source ? resolve( sourceRoot, source ) : null, | ||
| content: sourcesContent[i] | ||
| }); | ||
| }); | ||
| var promises = this$1$1.sources.map( function (node) { return node.load( sourcesContentByPath, sourceMapByPath ); } ); | ||
| return Promise$1.all( promises ); | ||
| }); | ||
| }); | ||
| }, | ||
| loadSync: function loadSync ( sourcesContentByPath, sourceMapByPath ) { | ||
| if ( !this.content ) { | ||
| if ( !sourcesContentByPath[ this.file ] ) { | ||
| sourcesContentByPath[ this.file ] = readFileSync( this.file, { encoding: 'utf-8' }); | ||
| } | ||
| this.content = sourcesContentByPath[ this.file ]; | ||
| } | ||
| var map = getMap( this, sourceMapByPath, true ); | ||
| var sourcesContent; | ||
| if ( !map ) { | ||
| this.isOriginalSource = true; | ||
| } else { | ||
| this.map = map; | ||
| this.mappings = decode( map.mappings ); | ||
| sourcesContent = map.sourcesContent || []; | ||
| var sourceRoot = resolve( dirname( this.file || '' ), map.sourceRoot || '' ); | ||
| this.sources = map.sources.map( function ( source, i ) { | ||
| var node = new Node({ | ||
| file: resolve( sourceRoot, source ), | ||
| content: sourcesContent[i] | ||
| }); | ||
| node.loadSync( sourcesContentByPath, sourceMapByPath ); | ||
| return node; | ||
| }); | ||
| } | ||
| }, | ||
| /** | ||
| * Traces a segment back to its origin | ||
| * @param {number} lineIndex - the zero-based line index of the | ||
| segment as found in `this` | ||
| * @param {number} columnIndex - the zero-based column index of the | ||
| segment as found in `this` | ||
| * @param {string || null} - if specified, the name that should be | ||
| (eventually) returned, as it is closest to the generated code | ||
| * @returns {object} | ||
| @property {string} source - the filepath of the source | ||
| @property {number} line - the one-based line index | ||
| @property {number} column - the zero-based column index | ||
| @property {string || null} name - the name corresponding | ||
| to the segment being traced | ||
| */ | ||
| trace: function trace ( lineIndex, columnIndex, name ) { | ||
| // If this node doesn't have a source map, we have | ||
| // to assume it is the original source | ||
| if ( this.isOriginalSource ) { | ||
| return { | ||
| source: this.file, | ||
| line: lineIndex + 1, | ||
| column: columnIndex || 0, | ||
| name: name | ||
| }; | ||
| } | ||
| // Otherwise, we need to figure out what this position in | ||
| // the intermediate file corresponds to in *its* source | ||
| var segments = this.mappings[ lineIndex ]; | ||
| if ( !segments || segments.length === 0 ) { | ||
| return null; | ||
| } | ||
| if ( columnIndex != null ) { | ||
| var len = segments.length; | ||
| var i; | ||
| for ( i = 0; i < len; i += 1 ) { | ||
| var generatedCodeColumn = segments[i][0]; | ||
| if ( generatedCodeColumn > columnIndex ) { | ||
| break; | ||
| } | ||
| if ( generatedCodeColumn === columnIndex ) { | ||
| if ( segments[i].length < 4 ) { return null; } | ||
| var sourceFileIndex$1 = segments[i][1]; | ||
| var sourceCodeLine$1 = segments[i][2]; | ||
| var sourceCodeColumn = segments[i][3]; | ||
| var nameIndex$1 = segments[i][4]; | ||
| var parent$1 = this.sources[ sourceFileIndex$1 ]; | ||
| return parent$1.trace( sourceCodeLine$1, sourceCodeColumn, this.map.names[ nameIndex$1 ] || name ); | ||
| } | ||
| } | ||
| } | ||
| // fall back to a line mapping | ||
| var sourceFileIndex = segments[0][1]; | ||
| var sourceCodeLine = segments[0][2]; | ||
| var nameIndex = segments[0][4]; | ||
| var parent = this.sources[ sourceFileIndex ]; | ||
| return parent.trace( sourceCodeLine, null, this.map.names[ nameIndex ] || name ); | ||
| } | ||
| }; | ||
| function getContent ( node, sourcesContentByPath ) { | ||
| if ( node.file in sourcesContentByPath ) { | ||
| node.content = sourcesContentByPath[ node.file ]; | ||
| } | ||
| if ( !node.content ) { | ||
| return readFile( node.file, { encoding: 'utf-8' }); | ||
| } | ||
| return Promise$1.resolve( node.content ); | ||
| } | ||
| /** | ||
| * Encodes a string as base64 | ||
| * @param {string} str - the string to encode | ||
| * @returns {string} | ||
| */ | ||
| function btoa ( str ) { | ||
| return new Buffer( str ).toString( 'base64' ); | ||
| } | ||
| function SourceMap ( properties ) { | ||
| this.version = 3; | ||
| this.file = properties.file; | ||
| this.sources = properties.sources; | ||
| this.sourcesContent = properties.sourcesContent; | ||
| this.names = properties.names; | ||
| this.mappings = properties.mappings; | ||
| } | ||
| SourceMap.prototype = { | ||
| toString: function toString () { | ||
| return JSON.stringify( this ); | ||
| }, | ||
| toUrl: function toUrl () { | ||
| return 'data:application/json;charset=utf-8;base64,' + btoa( this.toString() ); | ||
| } | ||
| }; | ||
| function slash ( path ) { | ||
| return typeof path === 'string' ? | ||
| path.replace( /\\/g, '/' ) : | ||
| path; | ||
| } | ||
| var SOURCEMAP_COMMENT = new RegExp( "\n*(?:" + | ||
| "\\/\\/[@#]\\s*" + SOURCEMAPPING_URL$1 + "=([^\n]+)|" + // js | ||
| "\\/\\*#?\\s*" + SOURCEMAPPING_URL$1 + "=([^'\"]+)\\s\\*\\/)" + // css | ||
| '\\s*$', 'g' ); | ||
| function Chain ( node, sourcesContentByPath ) { | ||
| this.node = node; | ||
| this.sourcesContentByPath = sourcesContentByPath; | ||
| this._stats = {}; | ||
| } | ||
| Chain.prototype = { | ||
| stat: function stat () { | ||
| return { | ||
| selfDecodingTime: this._stats.decodingTime / 1e6, | ||
| totalDecodingTime: ( this._stats.decodingTime + tally( this.node.sources, 'decodingTime' ) ) / 1e6, | ||
| encodingTime: this._stats.encodingTime / 1e6, | ||
| tracingTime: this._stats.tracingTime / 1e6, | ||
| untraceable: this._stats.untraceable | ||
| }; | ||
| }, | ||
| apply: function apply ( options ) { | ||
| var this$1$1 = this; | ||
| if ( options === void 0 ) options = {}; | ||
| var allNames = []; | ||
| var allSources = []; | ||
| var applySegment = function ( segment, result ) { | ||
| if ( segment.length < 4 ) { return; } | ||
| var traced = this$1$1.node.sources[ segment[1] ].trace( // source | ||
| segment[2], // source code line | ||
| segment[3], // source code column | ||
| this$1$1.node.map.names[ segment[4] ] | ||
| ); | ||
| if ( !traced ) { | ||
| this$1$1._stats.untraceable += 1; | ||
| return; | ||
| } | ||
| var sourceIndex = allSources.indexOf( traced.source ); | ||
| if ( !~sourceIndex ) { | ||
| sourceIndex = allSources.length; | ||
| allSources.push( traced.source ); | ||
| } | ||
| var newSegment = [ | ||
| segment[0], // generated code column | ||
| sourceIndex, | ||
| traced.line - 1, | ||
| traced.column | ||
| ]; | ||
| if ( traced.name ) { | ||
| var nameIndex = allNames.indexOf( traced.name ); | ||
| if ( !~nameIndex ) { | ||
| nameIndex = allNames.length; | ||
| allNames.push( traced.name ); | ||
| } | ||
| newSegment[4] = nameIndex; | ||
| } | ||
| result[ result.length ] = newSegment; | ||
| }; | ||
| // Trace mappings | ||
| var tracingStart = process.hrtime(); | ||
| var i = this.node.mappings.length; | ||
| var resolved = new Array( i ); | ||
| var j, line, result; | ||
| while ( i-- ) { | ||
| line = this.node.mappings[i]; | ||
| resolved[i] = result = []; | ||
| for ( j = 0; j < line.length; j += 1 ) { | ||
| applySegment( line[j], result ); | ||
| } | ||
| } | ||
| var tracingTime = process.hrtime( tracingStart ); | ||
| this._stats.tracingTime = 1e9 * tracingTime[0] + tracingTime[1]; | ||
| // Encode mappings | ||
| var encodingStart = process.hrtime(); | ||
| var mappings = encode( resolved ); | ||
| var encodingTime = process.hrtime( encodingStart ); | ||
| this._stats.encodingTime = 1e9 * encodingTime[0] + encodingTime[1]; | ||
| var includeContent = options.includeContent !== false; | ||
| return new SourceMap({ | ||
| file: basename( this.node.file ), | ||
| sources: allSources.map( function (source) { return slash( relative( options.base || dirname( this$1$1.node.file ), source ) ); } ), | ||
| sourcesContent: allSources.map( function (source) { return includeContent ? this$1$1.sourcesContentByPath[ source ] : null; } ), | ||
| names: allNames, | ||
| mappings: mappings | ||
| }); | ||
| }, | ||
| trace: function trace ( oneBasedLineIndex, zeroBasedColumnIndex ) { | ||
| return this.node.trace( oneBasedLineIndex - 1, zeroBasedColumnIndex, null ); | ||
| }, | ||
| write: function write ( dest, options ) { | ||
| if ( typeof dest !== 'string' ) { | ||
| options = dest; | ||
| dest = this.node.file; | ||
| } | ||
| options = options || {}; | ||
| var ref = processWriteOptions( dest, this, options ); | ||
| var resolved = ref.resolved; | ||
| var content = ref.content; | ||
| var map = ref.map; | ||
| var promises = [ writeFile( resolved, content ) ]; | ||
| if ( !options.inline ) { | ||
| promises.push( writeFile( resolved + '.map', map.toString() ) ); | ||
| } | ||
| return Promise.all( promises ); | ||
| }, | ||
| writeSync: function writeSync ( dest, options ) { | ||
| if ( typeof dest !== 'string' ) { | ||
| options = dest; | ||
| dest = this.node.file; | ||
| } | ||
| options = options || {}; | ||
| var ref = processWriteOptions( dest, this, options ); | ||
| var resolved = ref.resolved; | ||
| var content = ref.content; | ||
| var map = ref.map; | ||
| writeFileSync( resolved, content ); | ||
| if ( !options.inline ) { | ||
| writeFileSync( resolved + '.map', map.toString() ); | ||
| } | ||
| } | ||
| }; | ||
| function processWriteOptions ( dest, chain, options ) { | ||
| var resolved = resolve( dest ); | ||
| var map = chain.apply({ | ||
| includeContent: options.includeContent, | ||
| base: options.base ? resolve( options.base ) : dirname( resolved ) | ||
| }); | ||
| var url = options.inline ? map.toUrl() : ( options.absolutePath ? resolved : basename( resolved ) ) + '.map'; | ||
| // TODO shouldn't url be relative? | ||
| var content = chain.node.content.replace( SOURCEMAP_COMMENT, '' ) + sourcemapComment( url, resolved ); | ||
| return { resolved: resolved, content: content, map: map }; | ||
| } | ||
| function tally ( nodes, stat ) { | ||
| return nodes.reduce( function ( total, node ) { | ||
| return total + node._stats[ stat ]; | ||
| }, 0 ); | ||
| } | ||
| function sourcemapComment ( url, dest ) { | ||
| var ext = extname( dest ); | ||
| url = encodeURI( url ); | ||
| if ( ext === '.css' ) { | ||
| return ("\n/*# " + SOURCEMAPPING_URL$1 + "=" + url + " */\n"); | ||
| } | ||
| return ("\n//# " + SOURCEMAPPING_URL$1 + "=" + url + "\n"); | ||
| } | ||
| function load ( file, options ) { | ||
| var ref = init( file, options ); | ||
| var node = ref.node; | ||
| var sourcesContentByPath = ref.sourcesContentByPath; | ||
| var sourceMapByPath = ref.sourceMapByPath; | ||
| return node.load( sourcesContentByPath, sourceMapByPath ) | ||
| .then( function () { return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); } ); | ||
| } | ||
| function loadSync ( file, options ) { | ||
| if ( options === void 0 ) options = {}; | ||
| var ref = init( file, options ); | ||
| var node = ref.node; | ||
| var sourcesContentByPath = ref.sourcesContentByPath; | ||
| var sourceMapByPath = ref.sourceMapByPath; | ||
| node.loadSync( sourcesContentByPath, sourceMapByPath ); | ||
| return node.isOriginalSource ? null : new Chain( node, sourcesContentByPath ); | ||
| } | ||
| function init ( file, options ) { | ||
| if ( options === void 0 ) options = {}; | ||
| var node = new Node({ file: file }); | ||
| var sourcesContentByPath = {}; | ||
| var sourceMapByPath = {}; | ||
| if ( options.content ) { | ||
| Object.keys( options.content ).forEach( function (key) { | ||
| sourcesContentByPath[ resolve( key ) ] = options.content[ key ]; | ||
| }); | ||
| } | ||
| if ( options.sourcemaps ) { | ||
| Object.keys( options.sourcemaps ).forEach( function (key) { | ||
| sourceMapByPath[ resolve( key ) ] = options.sourcemaps[ key ]; | ||
| }); | ||
| } | ||
| return { node: node, sourcesContentByPath: sourcesContentByPath, sourceMapByPath: sourceMapByPath }; | ||
| } | ||
| export { load, loadSync }; |
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
3
-25%8
-52.94%1
-50%3
-50%Yes
NaN23335
-63.7%16
-11.11%512
-68.22%134
-1.47%1
Infinity%+ Added
+ Added
+ Added
+ Added
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed