Comparing version 0.0.21 to 0.0.22
@@ -5,3 +5,3 @@ { | ||
"description": "Port of YUI CSS Compressor to NodeJS", | ||
"version": "0.0.21", | ||
"version": "0.0.22", | ||
"keywords": [ | ||
@@ -8,0 +8,0 @@ "css", |
@@ -36,2 +36,3 @@ UglifyCSS is a port of [YUI Compressor](https://github.com/yui/yuicompressor) to [NodeJS](http://nodejs.org) for its CSS part. Its name is a reference to the awesome [UglifyJS](https://github.com/mishoo/UglifyJS) but UglifyCSS is not a CSS parser. Like YUI CSS Compressor, it applies many regexp replacements. Note that a [port to JavaScript](https://github.com/yui/ycssmin) is also available in the YUI Compressor repository. | ||
* `--cute-comments` preserves newlines within and around preserved comments | ||
* `--convert-urls d` convert relative urls according to the `d` directory | ||
@@ -68,4 +69,2 @@ If no file name is specified, input is read from stdin. | ||
See also [test.js](https://github.com/fmarcia/UglifyCSS/blob/master/test.js). | ||
### License | ||
@@ -72,0 +71,0 @@ |
@@ -30,3 +30,9 @@ /** | ||
var fs = require('fs'); | ||
var path = require('path'); | ||
var SEP = "/"; | ||
var PATH_SEP = path.sep; | ||
var pathResolve = PATH_SEP === "/" ? path.posix.resolve : path.win32.resolve; | ||
var defaultOptions = { | ||
@@ -36,6 +42,115 @@ maxLineLen: 0, | ||
uglyComments: false, | ||
cuteComments: false | ||
cuteComments: false, | ||
convertUrls: "" | ||
}; | ||
/** | ||
* Utility method to convert relative urls and replace them with tokens before | ||
* we start compressing. It must be called *after* extractDataUrls | ||
* | ||
* @private | ||
* @function convertRelativeUrls | ||
* @param {String} css The input css | ||
* @param {Object} options Options | ||
* @param {Array} The global array of tokens to preserve | ||
* @returns String The processed css | ||
*/ | ||
function convertRelativeUrls(css, options, preservedTokens) { | ||
var maxIndex = css.length - 1, | ||
appendIndex = 0, | ||
startIndex, | ||
endIndex, | ||
terminator, | ||
foundTerminator, | ||
sb = [], | ||
m, | ||
preserver, | ||
token, | ||
url, | ||
file, | ||
target, | ||
pattern = /(url\s*\()\s*(["']?)/g; | ||
// Since we need to account for non-base64 data urls, we need to handle | ||
// ' and ) being part of the data string. Hence switching to indexOf, | ||
// to determine whether or not we have matching string terminators and | ||
// handling sb appends directly, instead of using matcher.append* methods. | ||
while ((m = pattern.exec(css)) !== null) { | ||
startIndex = m.index + m[1].length; // "url(".length() | ||
terminator = m[2]; // ', " or empty (not quoted) | ||
if (terminator.length === 0) { | ||
terminator = ")"; | ||
} | ||
foundTerminator = false; | ||
endIndex = pattern.lastIndex - 1; | ||
while(foundTerminator === false && endIndex+1 <= maxIndex) { | ||
endIndex = css.indexOf(terminator, endIndex + 1); | ||
// endIndex == 0 doesn't really apply here | ||
if ((endIndex > 0) && (css.charAt(endIndex - 1) !== '\\')) { | ||
foundTerminator = true; | ||
if (")" != terminator) { | ||
endIndex = css.indexOf(")", endIndex); | ||
} | ||
} | ||
} | ||
// Enough searching, start moving stuff over to the buffer | ||
sb.push(css.substring(appendIndex, m.index)); | ||
if (foundTerminator) { | ||
token = css.substring(startIndex, endIndex).replace(/(^\s*|\s*$)/g, ""); | ||
if (token.slice(0, 18) === "___PRESERVED_TOKEN_") { | ||
continue; | ||
} | ||
if (terminator === "'" || terminator === '"') { | ||
token = token.slice(1, -1); | ||
} else if (terminator === ")") { | ||
terminator = ""; | ||
} | ||
// build path of detected urls: | ||
target = options.target.slice(); | ||
token = token.split(SEP).join(PATH_SEP); // assuming urls in css use "/" | ||
url = pathResolve(options.source.join(PATH_SEP), token).split(PATH_SEP); | ||
file = url.pop(); | ||
// remove common part of both paths | ||
while (target[0] === url[0]) { | ||
target.shift(); | ||
url.shift(); | ||
} | ||
target.fill(".."); | ||
url = terminator + target.concat(url, file).join(SEP) + terminator; | ||
preservedTokens.push(url); | ||
preserver = "url(___PRESERVED_TOKEN_" + (preservedTokens.length - 1) + "___)"; | ||
sb.push(preserver); | ||
appendIndex = endIndex + 1; | ||
} else { | ||
// No end terminator found, re-add the whole match. Should we throw/warn here? | ||
sb.push(css.substring(m.index, pattern.lastIndex)); | ||
appendIndex = pattern.lastIndex; | ||
} | ||
} | ||
sb.push(css.substring(appendIndex)); | ||
return sb.join(""); | ||
} | ||
/** | ||
* Utility method to replace all data urls with tokens before we start | ||
@@ -323,2 +438,5 @@ * compressing, to avoid performance issues running some of the subsequent | ||
content = extractDataUrls(content, preservedTokens); | ||
if (options.convertUrls) { | ||
content = convertRelativeUrls(content, options, preservedTokens); | ||
} | ||
content = collectComments(content, comments); | ||
@@ -672,2 +790,8 @@ | ||
options = options || defaultOptions; | ||
if (options.convertUrls) { | ||
options.target = pathResolve(process.cwd(), options.convertUrls).split(PATH_SEP); | ||
} | ||
// process files | ||
@@ -679,2 +803,6 @@ for (index = 0; index < nFiles; index += 1) { | ||
if (content.length) { | ||
if (options.convertUrls) { | ||
options.source = pathResolve(process.cwd(), filename).split(PATH_SEP); | ||
options.source.pop(); | ||
} | ||
uglies.push(processString(content, options)); | ||
@@ -681,0 +809,0 @@ } |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
1
36135
7
693
72
1