scss-tokenizer
Advanced tools
Comparing version 0.3.0 to 0.4.0
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
@@ -5,0 +7,0 @@ var _input = require('./input'); |
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); | ||
var _previousMap = require('./previous-map'); | ||
@@ -50,5 +54,8 @@ | ||
Input.prototype.mapResolve = function mapResolve(file) { | ||
return _path2.default.resolve(this.map.consumer().sourceRoot || '.', file); | ||
}; | ||
_createClass(Input, [{ | ||
key: 'mapResolve', | ||
value: function mapResolve(file) { | ||
return _path2.default.resolve(this.map.consumer().sourceRoot || '.', file); | ||
} | ||
}]); | ||
@@ -55,0 +62,0 @@ return Input; |
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; | ||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); | ||
var _jsBase = require('js-base64'); | ||
@@ -37,66 +41,74 @@ | ||
PreviousMap.prototype.consumer = function consumer() { | ||
if (!this.consumerCache) { | ||
this.consumerCache = new _sourceMap2.default.SourceMapConsumer(this.text); | ||
_createClass(PreviousMap, [{ | ||
key: 'consumer', | ||
value: function consumer() { | ||
if (!this.consumerCache) { | ||
this.consumerCache = new _sourceMap2.default.SourceMapConsumer(this.text); | ||
} | ||
return this.consumerCache; | ||
} | ||
return this.consumerCache; | ||
}; | ||
PreviousMap.prototype.withContent = function withContent() { | ||
return !!(this.consumer().sourcesContent && this.consumer().sourcesContent.length > 0); | ||
}; | ||
PreviousMap.prototype.startWith = function startWith(string, start) { | ||
if (!string) return false; | ||
return string.substr(0, start.length) === start; | ||
}; | ||
PreviousMap.prototype.loadAnnotation = function loadAnnotation(css) { | ||
var match = css.match(/\/\*\s*# sourceMappingURL=(.*)\s*\*\//); | ||
if (match) this.annotation = match[1].trim(); | ||
}; | ||
PreviousMap.prototype.decodeInline = function decodeInline(text) { | ||
var uri = 'data:application/json,'; | ||
var base64 = 'data:application/json;base64,'; | ||
if (this.startWith(text, uri)) { | ||
return decodeURIComponent(text.substr(uri.length)); | ||
} else if (this.startWith(text, base64)) { | ||
return _jsBase.Base64.decode(text.substr(base64.length)); | ||
} else { | ||
var encoding = text.match(/data:application\/json;([^,]+),/)[1]; | ||
throw new Error('Unsupported source map encoding ' + encoding); | ||
}, { | ||
key: 'withContent', | ||
value: function withContent() { | ||
return !!(this.consumer().sourcesContent && this.consumer().sourcesContent.length > 0); | ||
} | ||
}; | ||
}, { | ||
key: 'startWith', | ||
value: function startWith(string, start) { | ||
if (!string) return false; | ||
return string.substr(0, start.length) === start; | ||
} | ||
}, { | ||
key: 'loadAnnotation', | ||
value: function loadAnnotation(css) { | ||
var match = css.match(/\/\*\s*# sourceMappingURL=(.*)\s*\*\//); | ||
if (match) this.annotation = match[1].trim(); | ||
} | ||
}, { | ||
key: 'decodeInline', | ||
value: function decodeInline(text) { | ||
var uri = 'data:application/json,'; | ||
var base64 = 'data:application/json;base64,'; | ||
PreviousMap.prototype.loadMap = function loadMap(file, prev) { | ||
if (prev === false) return false; | ||
if (prev) { | ||
if (typeof prev === 'string') { | ||
return prev; | ||
} else if (prev instanceof _sourceMap2.default.SourceMapConsumer) { | ||
return _sourceMap2.default.SourceMapGenerator.fromSourceMap(prev).toString(); | ||
} else if (prev instanceof _sourceMap2.default.SourceMapGenerator) { | ||
return prev.toString(); | ||
} else if ((typeof prev === 'undefined' ? 'undefined' : _typeof(prev)) === 'object' && prev.mappings) { | ||
return JSON.stringify(prev); | ||
if (this.startWith(text, uri)) { | ||
return decodeURIComponent(text.substr(uri.length)); | ||
} else if (this.startWith(text, base64)) { | ||
return _jsBase.Base64.decode(text.substr(base64.length)); | ||
} else { | ||
throw new Error('Unsupported previous source map format: ' + prev.toString()); | ||
var encoding = text.match(/data:application\/json;([^,]+),/)[1]; | ||
throw new Error('Unsupported source map encoding ' + encoding); | ||
} | ||
} else if (this.inline) { | ||
return this.decodeInline(this.annotation); | ||
} else if (this.annotation) { | ||
var map = this.annotation; | ||
if (file) map = _path2.default.join(_path2.default.dirname(file), map); | ||
} | ||
}, { | ||
key: 'loadMap', | ||
value: function loadMap(file, prev) { | ||
if (prev === false) return false; | ||
this.root = _path2.default.dirname(map); | ||
if (_fs2.default.existsSync && _fs2.default.existsSync(map)) { | ||
return _fs2.default.readFileSync(map, 'utf-8').toString().trim(); | ||
} else { | ||
return false; | ||
if (prev) { | ||
if (typeof prev === 'string') { | ||
return prev; | ||
} else if (prev instanceof _sourceMap2.default.SourceMapConsumer) { | ||
return _sourceMap2.default.SourceMapGenerator.fromSourceMap(prev).toString(); | ||
} else if (prev instanceof _sourceMap2.default.SourceMapGenerator) { | ||
return prev.toString(); | ||
} else if ((typeof prev === 'undefined' ? 'undefined' : _typeof(prev)) === 'object' && prev.mappings) { | ||
return JSON.stringify(prev); | ||
} else { | ||
throw new Error('Unsupported previous source map format: ' + prev.toString()); | ||
} | ||
} else if (this.inline) { | ||
return this.decodeInline(this.annotation); | ||
} else if (this.annotation) { | ||
var map = this.annotation; | ||
if (file) map = _path2.default.join(_path2.default.dirname(file), map); | ||
this.root = _path2.default.dirname(map); | ||
if (_fs2.default.existsSync && _fs2.default.existsSync(map)) { | ||
return _fs2.default.readFileSync(map, 'utf-8').toString().trim(); | ||
} else { | ||
return false; | ||
} | ||
} | ||
} | ||
}; | ||
}]); | ||
@@ -103,0 +115,0 @@ return PreviousMap; |
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
exports.default = tokenize; | ||
@@ -33,3 +35,3 @@ | ||
function tokenize(input, l, p) { | ||
function tokenize(input, l, p, o) { | ||
var tokens = []; | ||
@@ -53,3 +55,3 @@ var css = input.css.valueOf(); | ||
var length = css.length; | ||
var offset = -1; | ||
var offset = o || -1; | ||
var line = l || 1; | ||
@@ -120,8 +122,12 @@ var pos = p || 0; | ||
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1), | ||
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset), | ||
t = _tokenizeInterpolant.tokens, | ||
_p = _tokenizeInterpolant.pos; | ||
_l = _tokenizeInterpolant.line, | ||
_p = _tokenizeInterpolant.pos, | ||
_o = _tokenizeInterpolant.offset; | ||
tokens = tokens.concat(t); | ||
next = _p; | ||
line = _l; | ||
offset = _o; | ||
@@ -128,0 +134,0 @@ pos = next; |
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
exports.default = tokenize; | ||
@@ -54,3 +56,3 @@ | ||
function tokenize(input, l, p) { | ||
function tokenize(input, l, p, o) { | ||
var tokens = []; | ||
@@ -75,3 +77,3 @@ var css = input.css.valueOf(); | ||
var length = css.length; | ||
var offset = -1; | ||
var offset = o || -1; | ||
var line = l || 1; | ||
@@ -132,9 +134,4 @@ var pos = p || 0; | ||
case closeCurly: | ||
if (inInterpolant) { | ||
inInterpolant = false; | ||
tokens.push(['endInterpolant', '}', line, pos - offset]); | ||
} else { | ||
break loop; | ||
} | ||
break; | ||
tokens.push(['endInterpolant', '}', line, pos - offset]); | ||
break loop; | ||
@@ -171,8 +168,12 @@ case comma: | ||
var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, quote), | ||
var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, offset, quote), | ||
t = _tokenizeString.tokens, | ||
_p = _tokenizeString.pos; | ||
_l = _tokenizeString.line, | ||
_p = _tokenizeString.pos, | ||
_o = _tokenizeString.offset; | ||
tokens = tokens.concat(t); | ||
next = _p; | ||
line = _l; | ||
offset = _o; | ||
@@ -211,12 +212,12 @@ pos = next; | ||
var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1), | ||
var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1, offset), | ||
_t = _tokenizeComment.tokens, | ||
_l = _tokenizeComment.line, | ||
_l2 = _tokenizeComment.line, | ||
_p2 = _tokenizeComment.pos, | ||
o = _tokenizeComment.offset; | ||
_o2 = _tokenizeComment.offset; | ||
tokens = tokens.concat(_t); | ||
next = _p2; | ||
line = _l; | ||
offset = o; | ||
line = _l2; | ||
offset = _o2; | ||
@@ -307,3 +308,3 @@ pos = next; | ||
return { tokens: tokens, pos: pos }; | ||
return { tokens: tokens, line: line, pos: pos, offset: offset }; | ||
} |
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
exports.default = tokenize; | ||
@@ -36,3 +38,3 @@ | ||
function tokenize(input, l, p, quote) { | ||
function tokenize(input, l, p, o, quote) { | ||
var tokens = []; | ||
@@ -56,3 +58,3 @@ var css = input.css.valueOf(); | ||
var length = css.length; | ||
var offset = -1; | ||
var offset = o || -1; | ||
var line = l || 1; | ||
@@ -102,8 +104,12 @@ var pos = p || 0; | ||
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1), | ||
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset), | ||
t = _tokenizeInterpolant.tokens, | ||
_p = _tokenizeInterpolant.pos; | ||
_l = _tokenizeInterpolant.line, | ||
_p = _tokenizeInterpolant.pos, | ||
_o = _tokenizeInterpolant.offset; | ||
tokens = tokens.concat(t); | ||
next = _p; | ||
line = _l; | ||
offset = _o; | ||
@@ -132,3 +138,3 @@ pos = next; | ||
return { tokens: tokens, pos: pos }; | ||
return { tokens: tokens, line: line, pos: pos, offset: offset }; | ||
} |
'use strict'; | ||
exports.__esModule = true; | ||
Object.defineProperty(exports, "__esModule", { | ||
value: true | ||
}); | ||
exports.default = tokenize; | ||
@@ -168,8 +170,12 @@ | ||
var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, quote), | ||
var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, offset, quote), | ||
t = _tokenizeString.tokens, | ||
_p = _tokenizeString.pos; | ||
_l = _tokenizeString.line, | ||
_p = _tokenizeString.pos, | ||
o = _tokenizeString.offset; | ||
tokens = tokens.concat(t); | ||
next = _p; | ||
line = _l; | ||
offset = o; | ||
@@ -208,12 +214,12 @@ pos = next; | ||
var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1), | ||
var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1, offset), | ||
_t = _tokenizeComment.tokens, | ||
_l = _tokenizeComment.line, | ||
_l2 = _tokenizeComment.line, | ||
_p2 = _tokenizeComment.pos, | ||
o = _tokenizeComment.offset; | ||
_o = _tokenizeComment.offset; | ||
tokens = tokens.concat(_t); | ||
next = _p2; | ||
line = _l; | ||
offset = o; | ||
line = _l2; | ||
offset = _o; | ||
@@ -246,8 +252,12 @@ pos = next; | ||
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1), | ||
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset), | ||
_t2 = _tokenizeInterpolant.tokens, | ||
_p3 = _tokenizeInterpolant.pos; | ||
_l3 = _tokenizeInterpolant.line, | ||
_p3 = _tokenizeInterpolant.pos, | ||
_o2 = _tokenizeInterpolant.offset; | ||
tokens = tokens.concat(_t2); | ||
next = _p3; | ||
line = _l3; | ||
offset = _o2; | ||
@@ -254,0 +264,0 @@ pos = next; |
{ | ||
"name": "scss-tokenizer", | ||
"version": "0.3.0", | ||
"version": "0.4.0", | ||
"description": "A tokenzier for Sass' SCSS syntax", | ||
@@ -33,14 +33,14 @@ "main": "index.js", | ||
"dependencies": { | ||
"js-base64": "^2.4.3", | ||
"source-map": "^0.7.1" | ||
"js-base64": "^2.4.9", | ||
"source-map": "^0.7.3" | ||
}, | ||
"devDependencies": { | ||
"babel-cli": "^6.26.0", | ||
"babel-core": "^6.26.0", | ||
"babel-jest": "^22.4.3", | ||
"babel-preset-es2015": "^6.24.1", | ||
"glob": "^7.1.2", | ||
"jest": "^22.4.3", | ||
"sass-spec": "^3.5.1" | ||
"babel-core": "^6.26.3", | ||
"babel-jest": "^23.6.0", | ||
"babel-preset-env": "^1.7.0", | ||
"glob": "^7.1.3", | ||
"jest": "^23.6.0", | ||
"sass-spec": "3.5.1" | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
40031
870
Updatedjs-base64@^2.4.9
Updatedsource-map@^0.7.3