fast-sourcemap-concat
Advanced tools
Comparing version 1.2.3 to 1.2.4
173
lib/coder.js
@@ -1,109 +0,112 @@ | ||
var vlq = require('./vlq'); | ||
var fields = ['generatedColumn', 'source', 'originalLine', 'originalColumn', 'name']; | ||
'use strict'; | ||
module.exports = Coder; | ||
function Coder() {} | ||
const vlq = require('./vlq'); | ||
Coder.prototype.decode = function(mapping) { | ||
var value = this.rawDecode(mapping); | ||
var output = {}; | ||
const FIELDS = ['generatedColumn', 'source', 'originalLine', 'originalColumn', 'name']; | ||
for (var i=0; i<fields.length;i++) { | ||
var field = fields[i]; | ||
var prevField = 'prev_' + field; | ||
if (value.hasOwnProperty(field)) { | ||
output[field] = value[field]; | ||
if (typeof this[prevField] !== 'undefined') { | ||
output[field] += this[prevField]; | ||
class Coder { | ||
decode(mapping) { | ||
let value = this.rawDecode(mapping); | ||
let output = {}; | ||
for (let i=0; i<FIELDS.length;i++) { | ||
let field = FIELDS[i]; | ||
let prevField = 'prev_' + field; | ||
if (value.hasOwnProperty(field)) { | ||
output[field] = value[field]; | ||
if (typeof this[prevField] !== 'undefined') { | ||
output[field] += this[prevField]; | ||
} | ||
this[prevField] = output[field]; | ||
} | ||
this[prevField] = output[field]; | ||
} | ||
return output; | ||
} | ||
return output; | ||
}; | ||
Coder.prototype.encode = function(value) { | ||
var output = ''; | ||
for (var i=0; i<fields.length;i++) { | ||
var field = fields[i]; | ||
if (value.hasOwnProperty(field)){ | ||
var prevField = 'prev_' + field; | ||
var valueField = value[field]; | ||
if (typeof this[prevField] !== 'undefined') { | ||
output += vlq.encode(valueField-this[prevField]); | ||
} else { | ||
output += vlq.encode(valueField); | ||
encode(value) { | ||
let output = ''; | ||
for (let i=0; i<FIELDS.length;i++) { | ||
let field = FIELDS[i]; | ||
if (value.hasOwnProperty(field)){ | ||
let prevField = 'prev_' + field; | ||
let valueField = value[field]; | ||
if (typeof this[prevField] !== 'undefined') { | ||
output += vlq.encode(valueField-this[prevField]); | ||
} else { | ||
output += vlq.encode(valueField); | ||
} | ||
this[prevField] = valueField; | ||
} | ||
this[prevField] = valueField; | ||
} | ||
return output; | ||
} | ||
return output; | ||
}; | ||
Coder.prototype.resetColumn = function() { | ||
this.prev_generatedColumn = 0; | ||
}; | ||
resetColumn() { | ||
this.prev_generatedColumn = 0; | ||
} | ||
Coder.prototype.adjustLine = function(n) { | ||
this.prev_originalLine += n; | ||
}; | ||
adjustLine(n) { | ||
this.prev_originalLine += n; | ||
} | ||
Coder.prototype.rawDecode = function(mapping) { | ||
var buf = {rest: 0}; | ||
var output = {}; | ||
var fieldIndex = 0; | ||
while (fieldIndex < fields.length && buf.rest < mapping.length) { | ||
vlq.decode(mapping, buf.rest, buf); | ||
output[fields[fieldIndex]] = buf.value; | ||
fieldIndex++; | ||
rawDecode(mapping) { | ||
let buf = {rest: 0}; | ||
let output = {}; | ||
let fieldIndex = 0; | ||
while (fieldIndex < FIELDS.length && buf.rest < mapping.length) { | ||
vlq.decode(mapping, buf.rest, buf); | ||
output[FIELDS[fieldIndex]] = buf.value; | ||
fieldIndex++; | ||
} | ||
return output; | ||
} | ||
return output; | ||
}; | ||
copy() { | ||
let c = new Coder(); | ||
let key; | ||
for (let i = 0; i < FIELDS.length; i++) { | ||
key = 'prev_' + FIELDS[i]; | ||
c[key] = this[key]; | ||
} | ||
return c; | ||
} | ||
Coder.prototype.copy = function() { | ||
var c = new Coder(); | ||
var key; | ||
for (var i = 0; i < fields.length; i++) { | ||
key = 'prev_' + fields[i]; | ||
c[key] = this[key]; | ||
serialize() { | ||
let output = ''; | ||
for (let i=0; i<FIELDS.length;i++) { | ||
let valueField = this['prev_' + FIELDS[i]] || 0; | ||
output += vlq.encode(valueField); | ||
} | ||
return output; | ||
} | ||
return c; | ||
}; | ||
Coder.prototype.serialize = function() { | ||
var output = ''; | ||
for (var i=0; i<fields.length;i++) { | ||
var valueField = this['prev_' + fields[i]] || 0; | ||
output += vlq.encode(valueField); | ||
add(other) { | ||
this._combine(other, function(a,b){return a + b; }); | ||
} | ||
return output; | ||
}; | ||
Coder.prototype.add = function(other) { | ||
this._combine(other, function(a,b){return a + b; }); | ||
}; | ||
subtract(other) { | ||
this._combine(other, function(a,b){return a - b; }); | ||
} | ||
Coder.prototype.subtract = function(other) { | ||
this._combine(other, function(a,b){return a - b; }); | ||
}; | ||
_combine(other, operation) { | ||
let key; | ||
for (let i = 0; i < FIELDS.length; i++) { | ||
key = 'prev_' + FIELDS[i]; | ||
this[key] = operation((this[key] || 0), other[key] || 0); | ||
} | ||
} | ||
Coder.prototype._combine = function(other, operation) { | ||
var key; | ||
for (var i = 0; i < fields.length; i++) { | ||
key = 'prev_' + fields[i]; | ||
this[key] = operation((this[key] || 0), other[key] || 0); | ||
debug(mapping) { | ||
let buf = {rest: 0}; | ||
let output = []; | ||
let fieldIndex = 0; | ||
while (fieldIndex < FIELDS.length && buf.rest < mapping.length) { | ||
vlq.decode(mapping, buf.rest, buf); | ||
output.push(buf.value); | ||
fieldIndex++; | ||
} | ||
return output.join('.'); | ||
} | ||
}; | ||
} | ||
Coder.prototype.debug = function(mapping) { | ||
var buf = {rest: 0}; | ||
var output = []; | ||
var fieldIndex = 0; | ||
while (fieldIndex < fields.length && buf.rest < mapping.length) { | ||
vlq.decode(mapping, buf.rest, buf); | ||
output.push(buf.value); | ||
fieldIndex++; | ||
} | ||
return output.join('.'); | ||
}; | ||
module.exports = Coder; |
@@ -1,2 +0,5 @@ | ||
var SourceMap = require('./source-map'); | ||
'use strict'; | ||
const SourceMap = require('./source-map'); | ||
module.exports = SourceMap; |
@@ -1,475 +0,475 @@ | ||
var fs = require('fs-extra'); | ||
var srcURL = require('source-map-url'); | ||
var path = require('path'); | ||
var RSVP = require('rsvp'); | ||
var MemoryStreams = require('memory-streams'); | ||
var Coder = require('./coder'); | ||
var crypto = require('crypto'); | ||
var chalk = require('chalk'); | ||
var EOL = require('os').EOL; | ||
var validator = require('sourcemap-validator'); | ||
var logger = require('heimdalljs-logger')('fast-sourcemap-concat:'); | ||
'use strict'; | ||
module.exports = SourceMap; | ||
function SourceMap(opts) { | ||
if (!(this instanceof SourceMap)) { | ||
return new SourceMap(opts); | ||
const fs = require('fs-extra'); | ||
const srcURL = require('source-map-url'); | ||
const path = require('path'); | ||
const MemoryStreams = require('memory-streams'); | ||
const Coder = require('./coder'); | ||
const crypto = require('crypto'); | ||
const chalk = require('chalk'); | ||
const EOL = require('os').EOL; | ||
const validator = require('sourcemap-validator'); | ||
const logger = require('heimdalljs-logger')('fast-sourcemap-concat:'); | ||
class SourceMap { | ||
constructor(opts) { | ||
if (!opts || (!opts.outputFile && (!opts.mapURL || !opts.file))) { | ||
throw new Error("Must specify at least outputFile or mapURL and file"); | ||
} | ||
if (opts.mapFile && !opts.mapURL) { | ||
throw new Error("must specify the mapURL when setting a custom mapFile"); | ||
} | ||
this.baseDir = opts.baseDir; | ||
this.outputFile = opts.outputFile; | ||
this.cache = opts.cache; | ||
this.mapFile = opts.mapFile || (this.outputFile && this.outputFile.replace(/\.js$/, '') + '.map'); | ||
this.mapURL = opts.mapURL || (this.mapFile && path.basename(this.mapFile)); | ||
this.mapCommentType = opts.mapCommentType || 'line'; | ||
this._initializeStream(); | ||
this.id = opts.pluginId; | ||
this.content = { | ||
version: 3, | ||
sources: [], | ||
sourcesContent: [], | ||
names: [], | ||
mappings: '' | ||
}; | ||
if (opts.sourceRoot) { | ||
this.content.sourceRoot = opts.sourceRoot; | ||
} | ||
this.content.file = opts.file || path.basename(opts.outputFile); | ||
this.encoder = new Coder(); | ||
// Keep track of what column we're currently outputing in the | ||
// concatenated sourcecode file. Notice that we don't track line | ||
// though -- line is implicit in this.content.mappings. | ||
this.column = 0; | ||
// Keep track of how many lines worth of mappings we've output into | ||
// the concatenated sourcemap. We use this to correct broken input | ||
// sourcemaps that don't match the length of their sourcecode. | ||
this.linesMapped = 0; | ||
this._sizes = {}; | ||
} | ||
if (!opts || (!opts.outputFile && (!opts.mapURL || !opts.file))) { | ||
throw new Error("Must specify at least outputFile or mapURL and file"); | ||
_resolveFile(filename) { | ||
if (this.baseDir && !path.isAbsolute(filename)) { | ||
filename = path.join(this.baseDir, filename); | ||
} | ||
return filename; | ||
} | ||
if (opts.mapFile && !opts.mapURL) { | ||
throw new Error("must specify the mapURL when setting a custom mapFile"); | ||
} | ||
this.baseDir = opts.baseDir; | ||
this.outputFile = opts.outputFile; | ||
this.cache = opts.cache; | ||
this.mapFile = opts.mapFile || (this.outputFile && this.outputFile.replace(/\.js$/, '') + '.map'); | ||
this.mapURL = opts.mapURL || (this.mapFile && path.basename(this.mapFile)); | ||
this.mapCommentType = opts.mapCommentType || 'line'; | ||
this._initializeStream(); | ||
this.id = opts.pluginId; | ||
this.content = { | ||
version: 3, | ||
sources: [], | ||
sourcesContent: [], | ||
names: [], | ||
mappings: '' | ||
}; | ||
if (opts.sourceRoot) { | ||
this.content.sourceRoot = opts.sourceRoot; | ||
_initializeStream() { | ||
if (this.outputFile) { | ||
fs.mkdirpSync(path.dirname(this.outputFile)); | ||
this.stream = fs.createWriteStream(this.outputFile); | ||
} else { | ||
this.stream = new MemoryStreams.WritableStream(); | ||
} | ||
} | ||
this.content.file = opts.file || path.basename(opts.outputFile); | ||
this.encoder = new Coder(); | ||
// Keep track of what column we're currently outputing in the | ||
// concatenated sourcecode file. Notice that we don't track line | ||
// though -- line is implicit in this.content.mappings. | ||
this.column = 0; | ||
// Keep track of how many lines worth of mappings we've output into | ||
// the concatenated sourcemap. We use this to correct broken input | ||
// sourcemaps that don't match the length of their sourcecode. | ||
this.linesMapped = 0; | ||
this._sizes = {}; | ||
} | ||
SourceMap.prototype._resolveFile = function(filename) { | ||
if (this.baseDir && filename.slice(0,1) !== '/') { | ||
filename = path.join(this.baseDir, filename); | ||
addFile(filename) { | ||
let source = ensurePosix(fs.readFileSync(this._resolveFile(filename), 'utf-8')); | ||
this._sizes[filename] = source.length; | ||
return this.addFileSource(filename, source); | ||
} | ||
return filename; | ||
}; | ||
SourceMap.prototype._initializeStream = function() { | ||
if (this.outputFile) { | ||
fs.mkdirpSync(path.dirname(this.outputFile)); | ||
this.stream = fs.createWriteStream(this.outputFile); | ||
} else { | ||
this.stream = new MemoryStreams.WritableStream(); | ||
} | ||
}; | ||
addFileSource(filename, source, inputSrcMap) { | ||
let url; | ||
if (source.length === 0) { | ||
return; | ||
} | ||
SourceMap.prototype.addFile = function(filename) { | ||
var source = ensurePosix(fs.readFileSync(this._resolveFile(filename), 'utf-8')); | ||
this._sizes[filename] = source.length; | ||
return this.addFileSource(filename, source); | ||
}; | ||
if (srcURL.existsIn(source)) { | ||
url = srcURL.getFrom(source); | ||
source = srcURL.removeFrom(source); | ||
} | ||
SourceMap.prototype.addFileSource = function(filename, source, inputSrcMap) { | ||
var url; | ||
if (this.content.mappings.length > 0 && !/[;,]$/.test(this.content.mappings)) { | ||
this.content.mappings += ','; | ||
} | ||
if (source.length === 0) { | ||
return; | ||
} | ||
if (typeof inputSrcMap === 'string') { | ||
inputSrcMap = JSON.parse(inputSrcMap); | ||
} | ||
if (srcURL.existsIn(source)) { | ||
url = srcURL.getFrom(source); | ||
source = srcURL.removeFrom(source); | ||
} | ||
if (inputSrcMap === undefined && url) { | ||
inputSrcMap = this._resolveSourcemap(filename, url); | ||
} | ||
if (this.content.mappings.length > 0 && !/[;,]$/.test(this.content.mappings)) { | ||
this.content.mappings += ','; | ||
} | ||
let valid = true; | ||
if (typeof inputSrcMap === 'string') { | ||
inputSrcMap = JSON.parse(inputSrcMap); | ||
} | ||
if (inputSrcMap) { | ||
try { | ||
// TODO: don't stringify here | ||
validator(source, JSON.stringify(inputSrcMap)); | ||
} catch (e) { | ||
logger.error(' invalid sourcemap for: %s', filename); | ||
if (typeof e === 'object' && e !== null) { | ||
logger.error(' error: ', e.message); | ||
} | ||
if (inputSrcMap === undefined && url) { | ||
inputSrcMap = this._resolveSourcemap(filename, url); | ||
} | ||
var valid = true; | ||
if (inputSrcMap) { | ||
try { | ||
// TODO: don't stringify here | ||
validator(source, JSON.stringify(inputSrcMap)); | ||
} catch (e) { | ||
logger.error(' invalid sourcemap for: %s', filename); | ||
if (typeof e === 'object' && e !== null) { | ||
logger.error(' error: ', e.message); | ||
valid = false; | ||
} | ||
} | ||
valid = false; | ||
if (inputSrcMap && valid) { | ||
let haveLines = countNewLines(source); | ||
source = this._addMap(filename, inputSrcMap, source, haveLines); | ||
} else { | ||
logger.info('generating new map: %s', filename); | ||
this.content.sources.push(filename); | ||
this.content.sourcesContent.push(source); | ||
this._generateNewMap(source); | ||
} | ||
} | ||
if (inputSrcMap && valid) { | ||
var haveLines = countNewLines(source); | ||
source = this._addMap(filename, inputSrcMap, source, haveLines); | ||
} else { | ||
logger.info('generating new map: %s', filename); | ||
this.content.sources.push(filename); | ||
this.content.sourcesContent.push(source); | ||
this._generateNewMap(source); | ||
this.stream.write(source); | ||
} | ||
this.stream.write(source); | ||
}; | ||
_cacheEncoderResults(key, operations, filename) { | ||
let encoderState = this.encoder.copy(); | ||
let initialLinesMapped = this.linesMapped; | ||
let cacheEntry = this.cache[key]; | ||
let finalState; | ||
SourceMap.prototype._cacheEncoderResults = function(key, operations, filename) { | ||
var encoderState = this.encoder.copy(); | ||
var initialLinesMapped = this.linesMapped; | ||
var cacheEntry = this.cache[key]; | ||
var finalState; | ||
if (cacheEntry) { | ||
// The cache contains the encoder-differential for our file. So | ||
// this updates encoderState to the final value our encoder will | ||
// have after processing the file. | ||
encoderState.decode(cacheEntry.encoder); | ||
// We provide that final value as a performance hint. | ||
operations.call(this, { | ||
encoder: encoderState, | ||
lines: cacheEntry.lines | ||
}); | ||
if (cacheEntry) { | ||
// The cache contains the encoder-differential for our file. So | ||
// this updates encoderState to the final value our encoder will | ||
// have after processing the file. | ||
encoderState.decode(cacheEntry.encoder); | ||
// We provide that final value as a performance hint. | ||
operations.call(this, { | ||
encoder: encoderState, | ||
lines: cacheEntry.lines | ||
}); | ||
logger.info('encoder cache hit: %s', filename); | ||
} else { | ||
logger.info('encoder cache hit: %s', filename); | ||
} else { | ||
// Run the operations with no hint because we don't have one yet. | ||
operations.call(this); | ||
// Then store the encoder differential in the cache. | ||
finalState = this.encoder.copy(); | ||
finalState.subtract(encoderState); | ||
this.cache[key] = { | ||
encoder: finalState.serialize(), | ||
lines: this.linesMapped - initialLinesMapped | ||
}; | ||
// Run the operations with no hint because we don't have one yet. | ||
operations.call(this); | ||
// Then store the encoder differential in the cache. | ||
finalState = this.encoder.copy(); | ||
finalState.subtract(encoderState); | ||
this.cache[key] = { | ||
encoder: finalState.serialize(), | ||
lines: this.linesMapped - initialLinesMapped | ||
}; | ||
logger.info('encoder cache prime: %s', filename); | ||
logger.info('encoder cache prime: %s', filename); | ||
} | ||
} | ||
}; | ||
// This is useful for things like separators that you're appending to | ||
// your JS file that don't need to have their own source mapping, but | ||
// will alter the line numbering for subsequent files. | ||
SourceMap.prototype.addSpace = function(source) { | ||
this.stream.write(source); | ||
var lineCount = countNewLines(source); | ||
if (lineCount === 0) { | ||
this.column += source.length; | ||
} else { | ||
this.column = 0; | ||
var mappings = this.content.mappings; | ||
for (var i = 0; i < lineCount; i++) { | ||
mappings += ';'; | ||
// This is useful for things like separators that you're appending to | ||
// your JS file that don't need to have their own source mapping, but | ||
// will alter the line numbering for subsequent files. | ||
addSpace(source) { | ||
this.stream.write(source); | ||
let lineCount = countNewLines(source); | ||
if (lineCount === 0) { | ||
this.column += source.length; | ||
} else { | ||
this.column = 0; | ||
let mappings = this.content.mappings; | ||
for (let i = 0; i < lineCount; i++) { | ||
mappings += ';'; | ||
} | ||
this.content.mappings = mappings; | ||
} | ||
this.content.mappings = mappings; | ||
} | ||
}; | ||
SourceMap.prototype._generateNewMap = function(source) { | ||
var mappings = this.content.mappings; | ||
var lineCount = countNewLines(source); | ||
_generateNewMap(source) { | ||
let mappings = this.content.mappings; | ||
let lineCount = countNewLines(source); | ||
mappings += this.encoder.encode({ | ||
generatedColumn: this.column, | ||
source: this.content.sources.length-1, | ||
originalLine: 0, | ||
originalColumn: 0 | ||
}); | ||
mappings += this.encoder.encode({ | ||
generatedColumn: this.column, | ||
source: this.content.sources.length-1, | ||
originalLine: 0, | ||
originalColumn: 0 | ||
}); | ||
if (lineCount === 0) { | ||
// no newline in the source. Keep outputting one big line. | ||
this.column += source.length; | ||
} else { | ||
// end the line | ||
this.column = 0; | ||
this.encoder.resetColumn(); | ||
mappings += ';'; | ||
this.encoder.adjustLine(lineCount-1); | ||
} | ||
if (lineCount === 0) { | ||
// no newline in the source. Keep outputting one big line. | ||
this.column += source.length; | ||
} else { | ||
// end the line | ||
this.column = 0; | ||
this.encoder.resetColumn(); | ||
mappings += ';'; | ||
this.encoder.adjustLine(lineCount-1); | ||
} | ||
// For the remainder of the lines (if any), we're just following | ||
// one-to-one. | ||
for (var i = 0; i < lineCount-1; i++) { | ||
mappings += 'AACA;'; | ||
// For the remainder of the lines (if any), we're just following | ||
// one-to-one. | ||
for (let i = 0; i < lineCount-1; i++) { | ||
mappings += 'AACA;'; | ||
} | ||
this.linesMapped += lineCount; | ||
this.content.mappings = mappings; | ||
} | ||
this.linesMapped += lineCount; | ||
this.content.mappings = mappings; | ||
}; | ||
SourceMap.prototype._resolveSourcemap = function(filename, url) { | ||
var srcMap; | ||
var match = /^data:.+?;base64,/.exec(url); | ||
_resolveSourcemap(filename, url) { | ||
let srcMap; | ||
let match = /^data:.+?;base64,/.exec(url); | ||
try { | ||
if (match) { | ||
srcMap = new Buffer(url.slice(match[0].length), 'base64'); | ||
} else if (this.baseDir && url.slice(0,1) === '/') { | ||
srcMap = fs.readFileSync( | ||
path.join(this.baseDir, url), | ||
'utf8' | ||
); | ||
} else { | ||
srcMap = fs.readFileSync( | ||
path.join(path.dirname(this._resolveFile(filename)), url), | ||
'utf8' | ||
); | ||
try { | ||
if (match) { | ||
srcMap = new Buffer(url.slice(match[0].length), 'base64'); | ||
} else if (this.baseDir && url.slice(0,1) === '/') { | ||
srcMap = fs.readFileSync( | ||
path.join(this.baseDir, url), | ||
'utf8' | ||
); | ||
} else { | ||
srcMap = fs.readFileSync( | ||
path.join(path.dirname(this._resolveFile(filename)), url), | ||
'utf8' | ||
); | ||
} | ||
return JSON.parse(srcMap); | ||
} catch (err) { | ||
this._warn('Warning: ignoring input sourcemap for ' + filename + ' because ' + err.message); | ||
} | ||
return JSON.parse(srcMap); | ||
} catch (err) { | ||
this._warn('Warning: ignoring input sourcemap for ' + filename + ' because ' + err.message); | ||
} | ||
}; | ||
_addMap(filename, srcMap, source) { | ||
let initialLinesMapped = this.linesMapped; | ||
let haveLines = countNewLines(source); | ||
let self = this; | ||
SourceMap.prototype._addMap = function(filename, srcMap, source) { | ||
var initialLinesMapped = this.linesMapped; | ||
var haveLines = countNewLines(source); | ||
var self = this; | ||
if (this.cache) { | ||
this._cacheEncoderResults(hash(JSON.stringify(srcMap)), function(cacheHint) { | ||
self._assimilateExistingMap(filename, srcMap, cacheHint); | ||
}, filename); | ||
} else { | ||
this._assimilateExistingMap(filename, srcMap); | ||
} | ||
if (this.cache) { | ||
this._cacheEncoderResults(hash(JSON.stringify(srcMap)), function(cacheHint) { | ||
self._assimilateExistingMap(filename, srcMap, cacheHint); | ||
}, filename); | ||
} else { | ||
this._assimilateExistingMap(filename, srcMap); | ||
while (this.linesMapped - initialLinesMapped < haveLines) { | ||
// This cleans up after upstream sourcemaps that are too short | ||
// for their sourcecode so they don't break the rest of our | ||
// mapping. Coffeescript does this. | ||
this.content.mappings += ';'; | ||
this.linesMapped++; | ||
} | ||
while (haveLines < this.linesMapped - initialLinesMapped) { | ||
// Likewise, this cleans up after upstream sourcemaps that are | ||
// too long for their sourcecode. | ||
source += "\n"; | ||
haveLines++; | ||
} | ||
return source; | ||
} | ||
while (this.linesMapped - initialLinesMapped < haveLines) { | ||
// This cleans up after upstream sourcemaps that are too short | ||
// for their sourcecode so they don't break the rest of our | ||
// mapping. Coffeescript does this. | ||
this.content.mappings += ';'; | ||
this.linesMapped++; | ||
} | ||
while (haveLines < this.linesMapped - initialLinesMapped) { | ||
// Likewise, this cleans up after upstream sourcemaps that are | ||
// too long for their sourcecode. | ||
source += "\n"; | ||
haveLines++; | ||
} | ||
return source; | ||
}; | ||
_assimilateExistingMap(filename, srcMap, cacheHint) { | ||
let content = this.content; | ||
let sourcesOffset = content.sources.length; | ||
let namesOffset = content.names.length; | ||
SourceMap.prototype._assimilateExistingMap = function(filename, srcMap, cacheHint) { | ||
var content = this.content; | ||
var sourcesOffset = content.sources.length; | ||
var namesOffset = content.names.length; | ||
content.sources = content.sources.concat(this._resolveSources(srcMap)); | ||
content.sourcesContent = content.sourcesContent.concat(this._resolveSourcesContent(srcMap, filename)); | ||
while (content.sourcesContent.length > content.sources.length) { | ||
content.sourcesContent.pop(); | ||
content.sources = content.sources.concat(this._resolveSources(srcMap)); | ||
content.sourcesContent = content.sourcesContent.concat(this._resolveSourcesContent(srcMap, filename)); | ||
while (content.sourcesContent.length > content.sources.length) { | ||
content.sourcesContent.pop(); | ||
} | ||
while (content.sourcesContent.length < content.sources.length) { | ||
content.sourcesContent.push(null); | ||
} | ||
content.names = content.names.concat(srcMap.names); | ||
this._scanMappings(srcMap, sourcesOffset, namesOffset, cacheHint); | ||
} | ||
while (content.sourcesContent.length < content.sources.length) { | ||
content.sourcesContent.push(null); | ||
} | ||
content.names = content.names.concat(srcMap.names); | ||
this._scanMappings(srcMap, sourcesOffset, namesOffset, cacheHint); | ||
}; | ||
SourceMap.prototype._resolveSources = function(srcMap) { | ||
var baseDir = this.baseDir; | ||
if (!baseDir) { | ||
return srcMap.sources; | ||
_resolveSources(srcMap) { | ||
let baseDir = this.baseDir; | ||
if (!baseDir) { | ||
return srcMap.sources; | ||
} | ||
return srcMap.sources.map(function(src) { | ||
return src.replace(baseDir, ''); | ||
}); | ||
} | ||
return srcMap.sources.map(function(src) { | ||
return src.replace(baseDir, ''); | ||
}); | ||
}; | ||
SourceMap.prototype._resolveSourcesContent = function(srcMap, filename) { | ||
if (srcMap.sourcesContent) { | ||
// Upstream srcmap already had inline content, so easy. | ||
return srcMap.sourcesContent; | ||
} else { | ||
// Look for original sources relative to our input source filename. | ||
return srcMap.sources.map(function(source){ | ||
var fullPath; | ||
if (path.isAbsolute(source)) { | ||
fullPath = source; | ||
} else { | ||
fullPath = path.join(path.dirname(this._resolveFile(filename)), source); | ||
} | ||
return ensurePosix(fs.readFileSync(fullPath, 'utf-8')); | ||
}.bind(this)); | ||
_resolveSourcesContent(srcMap, filename) { | ||
if (srcMap.sourcesContent) { | ||
// Upstream srcmap already had inline content, so easy. | ||
return srcMap.sourcesContent; | ||
} else { | ||
// Look for original sources relative to our input source filename. | ||
return srcMap.sources.map(function(source){ | ||
let fullPath; | ||
if (path.isAbsolute(source)) { | ||
fullPath = source; | ||
} else { | ||
fullPath = path.join(path.dirname(this._resolveFile(filename)), source); | ||
} | ||
return ensurePosix(fs.readFileSync(fullPath, 'utf-8')); | ||
}.bind(this)); | ||
} | ||
} | ||
}; | ||
_scanMappings(srcMap, sourcesOffset, namesOffset, cacheHint) { | ||
let mappings = this.content.mappings; | ||
let decoder = new Coder(); | ||
let inputMappings = srcMap.mappings; | ||
let pattern = /^([;,]*)([^;,]*)/; | ||
let continuation = /^([;,]*)((?:AACA;)+)/; | ||
let initialMappedLines = this.linesMapped; | ||
let match; | ||
let lines; | ||
SourceMap.prototype._scanMappings = function(srcMap, sourcesOffset, namesOffset, cacheHint) { | ||
var mappings = this.content.mappings; | ||
var decoder = new Coder(); | ||
var inputMappings = srcMap.mappings; | ||
var pattern = /^([;,]*)([^;,]*)/; | ||
var continuation = /^([;,]*)((?:AACA;)+)/; | ||
var initialMappedLines = this.linesMapped; | ||
var match; | ||
var lines; | ||
while (inputMappings.length > 0) { | ||
match = pattern.exec(inputMappings); | ||
while (inputMappings.length > 0) { | ||
match = pattern.exec(inputMappings); | ||
// If the entry was preceded by separators, copy them through. | ||
if (match[1]) { | ||
mappings += match[1]; | ||
lines = match[1].replace(/,/g, '').length; | ||
if (lines > 0) { | ||
this.linesMapped += lines; | ||
this.encoder.resetColumn(); | ||
decoder.resetColumn(); | ||
// If the entry was preceded by separators, copy them through. | ||
if (match[1]) { | ||
mappings += match[1]; | ||
lines = match[1].replace(/,/g, '').length; | ||
if (lines > 0) { | ||
this.linesMapped += lines; | ||
this.encoder.resetColumn(); | ||
decoder.resetColumn(); | ||
} | ||
} | ||
} | ||
// Re-encode the entry. | ||
if (match[2]){ | ||
var value = decoder.decode(match[2]); | ||
value.generatedColumn += this.column; | ||
this.column = 0; | ||
if (sourcesOffset && value.hasOwnProperty('source')) { | ||
value.source += sourcesOffset; | ||
decoder.prev_source += sourcesOffset; | ||
sourcesOffset = 0; | ||
// Re-encode the entry. | ||
if (match[2]){ | ||
let value = decoder.decode(match[2]); | ||
value.generatedColumn += this.column; | ||
this.column = 0; | ||
if (sourcesOffset && value.hasOwnProperty('source')) { | ||
value.source += sourcesOffset; | ||
decoder.prev_source += sourcesOffset; | ||
sourcesOffset = 0; | ||
} | ||
if (namesOffset && value.hasOwnProperty('name')) { | ||
value.name += namesOffset; | ||
decoder.prev_name += namesOffset; | ||
namesOffset = 0; | ||
} | ||
mappings += this.encoder.encode(value); | ||
} | ||
if (namesOffset && value.hasOwnProperty('name')) { | ||
value.name += namesOffset; | ||
decoder.prev_name += namesOffset; | ||
namesOffset = 0; | ||
} | ||
mappings += this.encoder.encode(value); | ||
} | ||
inputMappings = inputMappings.slice(match[0].length); | ||
inputMappings = inputMappings.slice(match[0].length); | ||
// Once we've applied any offsets, we can try to jump ahead. | ||
if (!sourcesOffset && !namesOffset) { | ||
if (cacheHint) { | ||
// Our cacheHint tells us what our final encoder state will be | ||
// after processing this file. And since we've got nothing | ||
// left ahead that needs rewriting, we can just copy the | ||
// remaining mappings over and jump to the final encoder | ||
// state. | ||
mappings += inputMappings; | ||
inputMappings = ''; | ||
this.linesMapped = initialMappedLines + cacheHint.lines; | ||
this.encoder = cacheHint.encoder; | ||
} | ||
// Once we've applied any offsets, we can try to jump ahead. | ||
if (!sourcesOffset && !namesOffset) { | ||
if (cacheHint) { | ||
// Our cacheHint tells us what our final encoder state will be | ||
// after processing this file. And since we've got nothing | ||
// left ahead that needs rewriting, we can just copy the | ||
// remaining mappings over and jump to the final encoder | ||
// state. | ||
mappings += inputMappings; | ||
inputMappings = ''; | ||
this.linesMapped = initialMappedLines + cacheHint.lines; | ||
this.encoder = cacheHint.encoder; | ||
} | ||
if ((match = continuation.exec(inputMappings))) { | ||
// This is a significant optimization, especially when we're | ||
// doing simple line-for-line concatenations. | ||
lines = match[2].length / 5; | ||
this.encoder.adjustLine(lines); | ||
this.encoder.resetColumn(); | ||
decoder.adjustLine(lines); | ||
decoder.resetColumn(); | ||
this.linesMapped += lines + match[1].replace(/,/g, '').length; | ||
mappings += match[0]; | ||
inputMappings = inputMappings.slice(match[0].length); | ||
if ((match = continuation.exec(inputMappings))) { | ||
// This is a significant optimization, especially when we're | ||
// doing simple line-for-line concatenations. | ||
lines = match[2].length / 5; | ||
this.encoder.adjustLine(lines); | ||
this.encoder.resetColumn(); | ||
decoder.adjustLine(lines); | ||
decoder.resetColumn(); | ||
this.linesMapped += lines + match[1].replace(/,/g, '').length; | ||
mappings += match[0]; | ||
inputMappings = inputMappings.slice(match[0].length); | ||
} | ||
} | ||
} | ||
// ensure we always reset the column. This is to ensure we remain resilient | ||
// to invalid input. | ||
this.encoder.resetColumn(); | ||
this.content.mappings = mappings; | ||
} | ||
// ensure we always reset the column. This is to ensure we remain resilient | ||
// to invalid input. | ||
this.encoder.resetColumn(); | ||
this.content.mappings = mappings; | ||
}; | ||
writeConcatStatsSync(outputPath, content) { | ||
fs.mkdirpSync(path.dirname(outputPath)); | ||
fs.writeFileSync(outputPath, JSON.stringify(content, null, 2)); | ||
} | ||
SourceMap.prototype.writeConcatStatsSync = function(outputPath, content) { | ||
fs.mkdirpSync(path.dirname(outputPath)); | ||
fs.writeFileSync(outputPath, JSON.stringify(content, null, 2)); | ||
}; | ||
end(cb, thisArg) { | ||
let stream = this.stream; | ||
let sourceMap = this; | ||
SourceMap.prototype.end = function(cb, thisArg) { | ||
var stream = this.stream; | ||
var sourceMap = this; | ||
return new Promise(function(resolve, reject) { | ||
stream.on('error', function(error) { | ||
stream.on('close', function() { | ||
reject(error); | ||
}); | ||
return new RSVP.Promise(function(resolve, reject) { | ||
stream.on('error', function(error) { | ||
stream.on('close', function() { | ||
reject(error); | ||
stream.end(); | ||
}); | ||
stream.end(); | ||
}); | ||
let error, success; | ||
var error, success; | ||
try { | ||
if (cb) { | ||
cb.call(thisArg, sourceMap); | ||
} | ||
try { | ||
if (cb) { | ||
cb.call(thisArg, sourceMap); | ||
} | ||
if (process.env.CONCAT_STATS) { | ||
let outputPath = process.cwd() + '/concat-stats-for/' + sourceMap.id + '-' + path.basename(sourceMap.outputFile) + '.json'; | ||
if (process.env.CONCAT_STATS) { | ||
var outputPath = process.cwd() + '/concat-stats-for/' + sourceMap.id + '-' + path.basename(sourceMap.outputFile) + '.json'; | ||
sourceMap.writeConcatStatsSync( | ||
outputPath, | ||
{ | ||
outputFile: sourceMap.outputFile, | ||
sizes: sourceMap._sizes | ||
} | ||
); | ||
} | ||
sourceMap.writeConcatStatsSync( | ||
outputPath, | ||
{ | ||
outputFile: sourceMap.outputFile, | ||
sizes: sourceMap._sizes | ||
} | ||
); | ||
} | ||
if (sourceMap.mapCommentType === 'line') { | ||
stream.write('//# sourceMappingURL=' + sourceMap.mapURL + '\n'); | ||
} else { | ||
stream.write('/*# sourceMappingURL=' + sourceMap.mapURL + ' */\n'); | ||
} | ||
if (sourceMap.mapCommentType === 'line') { | ||
stream.write('//# sourceMappingURL=' + sourceMap.mapURL + '\n'); | ||
} else { | ||
stream.write('/*# sourceMappingURL=' + sourceMap.mapURL + ' */\n'); | ||
if (sourceMap.mapFile) { | ||
fs.mkdirpSync(path.dirname(sourceMap.mapFile)); | ||
fs.writeFileSync(sourceMap.mapFile, JSON.stringify(sourceMap.content)); | ||
} | ||
success = true; | ||
} catch(e) { | ||
success = false; | ||
error = e; | ||
} finally { | ||
if (sourceMap.outputFile) { | ||
stream.on('close', function() { | ||
if (success) { | ||
resolve(); | ||
} else { | ||
reject(error); | ||
} | ||
}); | ||
stream.end(); | ||
} else { | ||
resolve({ | ||
code: stream.toString(), | ||
map: sourceMap.content | ||
}); | ||
} | ||
} | ||
}); | ||
} | ||
if (sourceMap.mapFile) { | ||
fs.mkdirpSync(path.dirname(sourceMap.mapFile)); | ||
fs.writeFileSync(sourceMap.mapFile, JSON.stringify(sourceMap.content)); | ||
} | ||
success = true; | ||
} catch(e) { | ||
success = false; | ||
error = e; | ||
} finally { | ||
if (sourceMap.outputFile) { | ||
stream.on('close', function() { | ||
if (success) { | ||
resolve(); | ||
} else { | ||
reject(error); | ||
} | ||
}); | ||
stream.end(); | ||
} else { | ||
resolve({ | ||
code: stream.toString(), | ||
map: sourceMap.content | ||
}); | ||
} | ||
} | ||
}); | ||
}; | ||
_warn(msg) { | ||
console.warn(chalk.yellow(msg)); | ||
} | ||
} | ||
SourceMap.prototype._warn = function(msg) { | ||
console.warn(chalk.yellow(msg)); | ||
}; | ||
module.exports = SourceMap; | ||
function countNewLines(src) { | ||
var newlinePattern = /(\r?\n)/g; | ||
var count = 0; | ||
let newlinePattern = /(\r?\n)/g; | ||
let count = 0; | ||
while (newlinePattern.exec(src)) { | ||
@@ -476,0 +476,0 @@ count++; |
@@ -1,2 +0,5 @@ | ||
var vlq = require('source-map/lib/source-map/base64-vlq'); | ||
'use strict'; | ||
const vlq = require('source-map/lib/source-map/base64-vlq'); | ||
module.exports = vlq; |
{ | ||
"name": "fast-sourcemap-concat", | ||
"version": "1.2.3", | ||
"version": "1.2.4", | ||
"description": "Concatenate files while generating or propagating sourcemaps.", | ||
@@ -22,3 +22,2 @@ "main": "lib/index.js", | ||
"mkdirp": "^0.5.0", | ||
"rsvp": "^3.0.14", | ||
"source-map": "^0.4.2", | ||
@@ -31,6 +30,9 @@ "source-map-url": "^0.3.0", | ||
"mocha": "^2.0.1", | ||
"mocha-jshint": "0.0.9", | ||
"mocha-eslint": "^4.1.0", | ||
"rimraf": "^2.2.8", | ||
"sinon": "^1.12.2" | ||
}, | ||
"engines": { | ||
"node": ">= 4" | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
19217
8
530
- Removedrsvp@^3.0.14
- Removedrsvp@3.6.2(transitive)