i18next-parser
Advanced tools
Comparing version 1.0.0-beta2 to 1.0.0-beta21
@@ -5,3 +5,3 @@ #!/usr/bin/env node | ||
var fs = require('fs') | ||
var i18nTransform = require('../dist').default | ||
var i18nTransform = require('../dist/transform') | ||
var path = require('path') | ||
@@ -69,3 +69,3 @@ var pkg = require('../package.json') | ||
console.log(' Input: '.yellow + args.join(', ')) | ||
console.log(' Output: '.yellow + program.output) | ||
console.log(' Output: '.yellow + output) | ||
if (!program.silent) { | ||
@@ -94,3 +94,6 @@ console.log() | ||
.on('error', function (message, region) { | ||
console.log(' [error] '.red + message + ': ' + region.trim()) | ||
if (typeof region === 'string') { | ||
message += ': ' + region.trim() | ||
} | ||
console.log(' [error] '.red + message) | ||
}) | ||
@@ -104,2 +107,2 @@ .on('finish', function () { | ||
) | ||
.pipe(vfs.dest(output)) | ||
.pipe(vfs.dest(process.cwd())) |
# Changelog | ||
## 1.0.0-beta2 - latest | ||
## 1.0.0-beta - latest | ||
- See [release](https://github.com/i18next/i18next-parser/releases/tag/1.0.0-beta2) | ||
- The changelog for the beta can be found in the [releases](https://github.com/i18next/i18next-parser/releases) | ||
## 1.0.0-beta1 | ||
- See [release](https://github.com/i18next/i18next-parser/releases/tag/1.0.0-beta1) | ||
## 0.13.0 | ||
@@ -12,0 +8,0 @@ |
@@ -1,8 +0,14 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });exports.populateHash = exports.mergeHashes = exports.dotPathToHash = undefined;var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {return typeof obj;} : function (obj) {return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;};var _lodash = require('lodash');var _lodash2 = _interopRequireDefault(_lodash);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };} | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {return typeof obj;} : function (obj) {return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;};function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;} /** | ||
* Take an entry for the Parser and turn it into a hash, | ||
* turning the key path 'foo.bar' into an hash {foo: {bar: ""}} | ||
* The generated hash can be merged with an optional `target`. | ||
* @returns An `{ target, duplicate }` object. `target` is the hash that | ||
* was passed as an argument or a new hash if none was passed. `duplicate` | ||
* indicates whether the entry already existed in the `target` hash. | ||
*/ | ||
function dotPathToHash(entry) {var target = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; | ||
var path = entry.key; | ||
var separator = options.separator || '.'; | ||
var newValue = entry.defaultValue || options.value || ''; | ||
// Takes a `path` of the form 'foo.bar' and | ||
// turn it into a hash {foo: {bar: ""}}. | ||
// The generated hash can be attached to an | ||
// optional `hash`. | ||
function dotPathToHash(path) {var separator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '.';var value = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : '';var target = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; | ||
if (path.endsWith(separator)) { | ||
@@ -12,25 +18,41 @@ path = path.slice(0, -separator.length); | ||
var result = {}; | ||
var segments = path.split(separator); | ||
segments.reduce(function (hash, segment, index) { | ||
if (index === segments.length - 1) { | ||
hash[segment] = value; | ||
} else | ||
{ | ||
hash[segment] = {}; | ||
var inner = target; | ||
for (var i = 0; i < segments.length - 1; i += 1) { | ||
var segment = segments[i]; | ||
if (segment) { | ||
if (inner[segment] === undefined) { | ||
inner[segment] = {}; | ||
} | ||
inner = inner[segment]; | ||
} | ||
return hash[segment]; | ||
}, result); | ||
} | ||
return _lodash2.default.merge(target, result); | ||
var lastSegment = segments[segments.length - 1]; | ||
var oldValue = inner[lastSegment]; | ||
var duplicate = oldValue !== undefined; | ||
var conflict = oldValue !== undefined && oldValue !== newValue; | ||
inner[lastSegment] = newValue; | ||
return { target: target, duplicate: duplicate, conflict: conflict }; | ||
} | ||
// Takes a `source` hash and make sure its value | ||
// are pasted in the `target` hash, if the target | ||
// hash has the corresponding key (or if keepRemoved is true). | ||
// If not, the value is added to an `old` hash. | ||
function mergeHashes(source) {var target = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};var old = arguments[2];var keepRemoved = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false; | ||
old = old || {}; | ||
Object.keys(source).forEach(function (key) { | ||
/** | ||
* Takes a `source` hash and makes sure its value | ||
* is pasted in the `target` hash, if the target | ||
* hash has the corresponding key (or if `keepRemoved` is true). | ||
* @returns An `{ old, new, mergeCount, pullCount, oldCount }` object. | ||
* `old` is a hash of values that have not been merged into `target`. | ||
* `new` is `target`. `mergeCount` is the number of keys merged into | ||
* `new`, `pullCount` is the number of context and plural keys added to | ||
* `new` and `oldCount` is the number of keys that were either added to `old` or | ||
* `new` (if `keepRemoved` is true and `target` didn't have the corresponding | ||
* key). | ||
*/ | ||
function mergeHashes(source, target) {var keepRemoved = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; | ||
var old = {}; | ||
var mergeCount = 0; | ||
var pullCount = 0; | ||
var oldCount = 0; | ||
for (var key in source) { | ||
var hasNestedEntries = | ||
@@ -46,58 +68,79 @@ _typeof(target[key]) === 'object' && !Array.isArray(target[key]); | ||
target[key] = nested.new; | ||
old[key] = nested.old; | ||
mergeCount += nested.mergeCount; | ||
pullCount += nested.pullCount; | ||
if (Object.keys(nested.old).length) { | ||
old[key] = nested.old; | ||
oldCount += nested.oldCount; | ||
} | ||
} else | ||
if (target[key] !== undefined) { | ||
if (typeof source[key] === 'string' || Array.isArray(source[key])) { | ||
target[key] = source[key]; | ||
{ | ||
if (target[key] !== undefined) { | ||
if ( | ||
typeof source[key] === 'string' || | ||
Array.isArray(source[key])) | ||
{ | ||
target[key] = source[key]; | ||
mergeCount += 1; | ||
} else { | ||
old[key] = source[key]; | ||
oldCount += 1; | ||
} | ||
} else | ||
{ | ||
old[key] = source[key]; | ||
} | ||
} else | ||
{ | ||
// support for plural in keys | ||
var pluralMatch = /_plural(_\d+)?$/.test(key); | ||
var singularKey = key.replace(/_plural(_\d+)?$/, ''); | ||
// support for plural in keys | ||
var pluralRegex = /_plural(_\d+)?$/; | ||
var pluralMatch = pluralRegex.test(key); | ||
var singularKey = key.replace(pluralRegex, ''); | ||
// support for context in keys | ||
var contextMatch = /_([^_]+)?$/.test(singularKey); | ||
var rawKey = singularKey.replace(/_([^_]+)?$/, ''); | ||
// support for context in keys | ||
var contextRegex = /_([^_]+)?$/; | ||
var contextMatch = contextRegex.test(singularKey); | ||
var rawKey = singularKey.replace(contextRegex, ''); | ||
if ( | ||
contextMatch && target[rawKey] !== undefined || | ||
pluralMatch && target[singularKey] !== undefined) | ||
{ | ||
target[key] = source[key]; | ||
} else | ||
if (keepRemoved) { | ||
target[key] = source[key]; | ||
old[key] = source[key]; | ||
} else | ||
{ | ||
old[key] = source[key]; | ||
if ( | ||
contextMatch && target[rawKey] !== undefined || | ||
pluralMatch && target[singularKey] !== undefined) | ||
{ | ||
target[key] = source[key]; | ||
pullCount += 1; | ||
} else { | ||
if (keepRemoved) { | ||
target[key] = source[key]; | ||
} else { | ||
old[key] = source[key]; | ||
} | ||
oldCount += 1; | ||
} | ||
} | ||
} | ||
}); | ||
} | ||
return { old: old, new: target }; | ||
return { old: old, new: target, mergeCount: mergeCount, pullCount: pullCount, oldCount: oldCount }; | ||
} | ||
// Takes a `target` hash and replace its empty | ||
// values with the `source` hash ones if they | ||
// exist | ||
function populateHash(source) {var target = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; | ||
Object.keys(source).forEach(function (key) { | ||
if (target[key] !== undefined) { | ||
if (_typeof(source[key]) === 'object') { | ||
target[key] = populateHash(source[key], target[key]); | ||
} else | ||
if (target[key] === '') { | ||
target[key] = source[key]; | ||
} | ||
/** | ||
* Merge `source` into `target` by merging nested dictionaries. | ||
*/ | ||
function transferValues(source, target) { | ||
for (var key in source) { | ||
var sourceValue = source[key]; | ||
var targetValue = target[key]; | ||
if ( | ||
(typeof sourceValue === 'undefined' ? 'undefined' : _typeof(sourceValue)) === 'object' && | ||
(typeof targetValue === 'undefined' ? 'undefined' : _typeof(targetValue)) === 'object' && | ||
!Array.isArray(sourceValue)) | ||
{ | ||
transferValues(sourceValue, targetValue); | ||
} else | ||
{ | ||
target[key] = sourceValue; | ||
} | ||
}); | ||
} | ||
}var | ||
return target; | ||
}exports. | ||
ParsingError = function (_Error) {_inherits(ParsingError, _Error); | ||
function ParsingError(message) {_classCallCheck(this, ParsingError);var _this = _possibleConstructorReturn(this, (ParsingError.__proto__ || Object.getPrototypeOf(ParsingError)).call(this, | ||
message)); | ||
_this.name = 'ParsingError';return _this; | ||
}return ParsingError;}(Error);exports. | ||
@@ -108,2 +151,3 @@ | ||
mergeHashes = mergeHashes;exports. | ||
populateHash = populateHash; | ||
transferValues = transferValues;exports. | ||
ParsingError = ParsingError; |
@@ -1,206 +0,4 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _helpers = require('./helpers'); | ||
var _stream = require('stream'); | ||
var _lodash = require('lodash');var _lodash2 = _interopRequireDefault(_lodash); | ||
var _eol = require('eol');var _eol2 = _interopRequireDefault(_eol); | ||
var _fs = require('fs');var _fs2 = _interopRequireDefault(_fs); | ||
var _parser = require('./parser');var _parser2 = _interopRequireDefault(_parser); | ||
var _path = require('path');var _path2 = _interopRequireDefault(_path); | ||
var _vinyl = require('vinyl');var _vinyl2 = _interopRequireDefault(_vinyl); | ||
var _yamljs = require('yamljs');var _yamljs2 = _interopRequireDefault(_yamljs);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
i18nTransform = function (_Transform) {_inherits(i18nTransform, _Transform); | ||
function i18nTransform() {var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};_classCallCheck(this, i18nTransform); | ||
options.objectMode = true;var _this = _possibleConstructorReturn(this, (i18nTransform.__proto__ || Object.getPrototypeOf(i18nTransform)).call(this, | ||
options)); | ||
_this.defaults = { | ||
contextSeparator: '_', | ||
createOldLibraries: true, | ||
defaultNamespace: 'translation', | ||
defaultValue: '', | ||
extension: '.json', | ||
filename: '$NAMESPACE', | ||
indentation: 2, | ||
keepRemoved: false, | ||
keySeparator: '.', | ||
lexers: {}, | ||
lineEnding: 'auto', | ||
locales: ['en', 'fr'], | ||
namespaceSeparator: ':', | ||
output: 'locales', | ||
sort: false }; | ||
_this.options = _extends({}, _this.defaults, options); | ||
_this.entries = []; | ||
_this.parser = new _parser2.default(_this.options.lexers); | ||
_this.parser.on('error', function (error) {return _this.emit('error', error);}); | ||
_this.parser.on('warning', function (warning) {return _this.emit('warning', warning);}); | ||
_this.localeRegex = /\$LOCALE/g; | ||
_this.namespaceRegex = /\$NAMESPACE/g;return _this; | ||
}_createClass(i18nTransform, [{ key: '_transform', value: function _transform( | ||
file, encoding, done) {var _this2 = this; | ||
var content = void 0; | ||
if (file.isBuffer()) { | ||
content = file.contents; | ||
} else | ||
{ | ||
content = _fs2.default.readFileSync(file.path, encoding); | ||
} | ||
this.emit('reading', file); | ||
var extenstion = _path2.default.extname(file.path).substring(1); | ||
var entries = this.parser.parse(content, extenstion); | ||
entries.forEach(function (entry) { | ||
var key = entry.key; | ||
var parts = key.split(_this2.options.namespaceSeparator); | ||
if (parts.length > 1) { | ||
entry.namespace = parts.shift(); | ||
} else | ||
{ | ||
entry.namespace = _this2.options.defaultNamespace; | ||
} | ||
key = parts.join(_this2.options.namespaceSeparator); | ||
key = key.replace(/\\('|"|`)/g, '$1'); | ||
key = key.replace(/\\n/g, '\n'); | ||
key = key.replace(/\\r/g, '\r'); | ||
key = key.replace(/\\t/g, '\t'); | ||
key = key.replace(/\\\\/g, '\\'); | ||
entry.key = entry.namespace + _this2.options.keySeparator + key; | ||
_this2.addEntry(entry); | ||
}); | ||
done(); | ||
} }, { key: '_flush', value: function _flush( | ||
done) {var _this3 = this; | ||
var catalog = {}; | ||
if (this.options.sort) { | ||
this.entries = this.entries.sort(function (a, b) {return a.key.localeCompare(b.key);}); | ||
} | ||
this.entries.forEach(function (entry) { | ||
catalog = (0, _helpers.dotPathToHash)( | ||
entry.key, | ||
_this3.options.keySeparator, | ||
entry.defaultValue || _this3.options.defaultValue, | ||
catalog); | ||
}); | ||
this.options.locales.forEach(function (locale) { | ||
var outputPath = _path2.default.resolve(_this3.options.output, locale); | ||
Object.keys(catalog).forEach(function (namespace) { | ||
var filename = _this3.options.filename; | ||
filename = filename.replace(_this3.localeRegex, locale); | ||
filename = filename.replace(_this3.namespaceRegex, namespace); | ||
var extension = _this3.options.extension; | ||
extension = extension.replace(_this3.localeRegex, locale); | ||
extension = extension.replace(_this3.namespaceRegex, namespace); | ||
var oldFilename = filename + '_old' + extension; | ||
filename += extension; | ||
var namespacePath = _path2.default.resolve(outputPath, filename); | ||
var namespaceOldPath = _path2.default.resolve(outputPath, oldFilename); | ||
var newCatalog = void 0; | ||
var existingCatalog = _this3.getCatalog(namespacePath); | ||
var oldCatalog = _this3.getCatalog(namespaceOldPath); | ||
// merges existing translations with the new ones | ||
var _mergeHashes = (0, _helpers.mergeHashes)( | ||
existingCatalog, | ||
catalog[namespace], | ||
null, | ||
_this3.options.keepRemoved),newKeys = _mergeHashes.new,oldKeys = _mergeHashes.old; | ||
// restore old translations if the key is empty | ||
newCatalog = (0, _helpers.populateHash)(oldCatalog, newKeys); | ||
// add keys from the current catalog that are no longer used | ||
oldCatalog = _lodash2.default.extend(oldCatalog, oldKeys); | ||
// push files back to the stream | ||
_this3.pushFile(namespacePath, newCatalog); | ||
if (_this3.options.createOldLibraries) { | ||
_this3.pushFile(namespaceOldPath, oldCatalog); | ||
} | ||
}); | ||
}); | ||
done(); | ||
} }, { key: 'addEntry', value: function addEntry( | ||
entry) { | ||
var existing = this.entries.filter(function (x) {return x.key === entry.key;})[0]; | ||
if (!existing) { | ||
this.entries.push(entry); | ||
} else | ||
{ | ||
existing = _extends({}, existing, entry); | ||
} | ||
if (entry.context) { | ||
var contextEntry = Object.assign({}, entry); | ||
delete contextEntry.context; | ||
contextEntry.key += this.options.contextSeparator + entry.context; | ||
this.addEntry(contextEntry); | ||
} | ||
} }, { key: 'getCatalog', value: function getCatalog( | ||
path) { | ||
var content = void 0; | ||
try { | ||
content = JSON.parse(_fs2.default.readFileSync(path)); | ||
} | ||
catch (error) { | ||
if (error.code !== 'ENOENT') { | ||
this.emit('error', error); | ||
} | ||
content = {}; | ||
} | ||
return content; | ||
} }, { key: 'pushFile', value: function pushFile( | ||
path, contents) { | ||
var text = void 0; | ||
if (path.endsWith('yml')) { | ||
text = _yamljs2.default.stringify(contents, null, this.options.indentation); | ||
} else | ||
{ | ||
text = JSON.stringify(contents, null, this.options.indentation) + '\n'; | ||
} | ||
if (this.options.lineEnding === 'auto') { | ||
text = _eol2.default.auto(text); | ||
} else | ||
if (lineEnding === '\r\n' || lineEnding === 'crlf') { | ||
text = _eol2.default.crlf(text); | ||
} else | ||
if (lineEnding === '\r' || lineEnding === 'cr') { | ||
text = _eol2.default.cr(text); | ||
} else | ||
{ | ||
// Defaults to LF, aka \n | ||
text = _eol2.default.lf(text); | ||
} | ||
var file = new _vinyl2.default({ | ||
path: path, | ||
contents: Buffer.from(text) }); | ||
this.push(file); | ||
} }]);return i18nTransform;}(_stream.Transform);exports.default = i18nTransform; | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _broccoli = require('./broccoli');Object.defineProperty(exports, 'broccoli', { enumerable: true, get: function get() {return _interopRequireDefault(_broccoli).default;} });var _parser = require('./parser');Object.defineProperty(exports, 'parser', { enumerable: true, get: function get() {return _interopRequireDefault(_parser). | ||
default;} });var _transform = require('./transform');Object.defineProperty(exports, 'transform', { enumerable: true, get: function get() {return _interopRequireDefault(_transform). | ||
default;} });Object.defineProperty(exports, 'gulp', { enumerable: true, get: function get() {return _interopRequireDefault(_transform). | ||
default;} });function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };} |
@@ -1,2 +0,2 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _events = require('events');var _events2 = _interopRequireDefault(_events);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _events = require('events');var _events2 = _interopRequireDefault(_events);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
@@ -8,25 +8,4 @@ BaseLexer = function (_EventEmitter) {_inherits(BaseLexer, _EventEmitter); | ||
_this.functions = options.functions || ['t'];return _this; | ||
}_createClass(BaseLexer, [{ key: 'populateKeysFromArguments', value: function populateKeysFromArguments( | ||
}_createClass(BaseLexer, [{ key: 'validateString', value: function validateString( | ||
args) { | ||
var firstArgument = args.arguments[0]; | ||
var secondArgument = args.arguments[1]; | ||
var isKeyString = this.validateString(firstArgument); | ||
var isDefaultValueString = this.validateString(secondArgument); | ||
if (!isKeyString) { | ||
this.emit('warning', 'Key is not a string litteral: ' + firstArgument); | ||
} else | ||
{ | ||
var result = _extends({}, | ||
args.options, { | ||
key: firstArgument.slice(1, -1) }); | ||
if (isDefaultValueString) { | ||
result.defaultValue = secondArgument.slice(1, -1); | ||
} | ||
this.keys.push(result); | ||
} | ||
} }, { key: 'validateString', value: function validateString( | ||
string) { | ||
@@ -78,2 +57,2 @@ var regex = new RegExp('^' + BaseLexer.stringPattern + '$', 'i'); | ||
} }]);return BaseLexer;}(_events2.default);exports.default = BaseLexer; | ||
} }]);return BaseLexer;}(_events2.default);exports.default = BaseLexer;module.exports = exports['default']; |
@@ -1,2 +0,2 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
@@ -39,2 +39,23 @@ HandlebarsLexer = function (_BaseLexer) {_inherits(HandlebarsLexer, _BaseLexer); | ||
return result; | ||
} }, { key: 'populateKeysFromArguments', value: function populateKeysFromArguments( | ||
args) { | ||
var firstArgument = args.arguments[0]; | ||
var secondArgument = args.arguments[1]; | ||
var isKeyString = this.validateString(firstArgument); | ||
var isDefaultValueString = this.validateString(secondArgument); | ||
if (!isKeyString) { | ||
this.emit('warning', 'Key is not a string literal: ' + firstArgument); | ||
} else | ||
{ | ||
var result = _extends({}, | ||
args.options, { | ||
key: firstArgument.slice(1, -1) }); | ||
if (isDefaultValueString) { | ||
result.defaultValue = secondArgument.slice(1, -1); | ||
} | ||
this.keys.push(result); | ||
} | ||
} }, { key: 'createFunctionRegex', value: function createFunctionRegex() | ||
@@ -44,3 +65,3 @@ | ||
var functionPattern = this.functionPattern(); | ||
var curlyPattern = '(?:{{)' + functionPattern + '\\s+(.*)(?:}})'; | ||
var curlyPattern = '(?:{{)' + functionPattern + '\\s+(.*?)(?:}})'; | ||
var parenthesisPattern = '(?:\\()' + functionPattern + '\\s+(.*)(?:\\))'; | ||
@@ -65,2 +86,2 @@ var pattern = curlyPattern + '|' + parenthesisPattern; | ||
return this.argumentsRegex; | ||
} }]);return HandlebarsLexer;}(_baseLexer2.default);exports.default = HandlebarsLexer; | ||
} }]);return HandlebarsLexer;}(_baseLexer2.default);exports.default = HandlebarsLexer;module.exports = exports['default']; |
@@ -1,2 +0,3 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer); | ||
var _cheerio = require('cheerio');var _cheerio2 = _interopRequireDefault(_cheerio);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
@@ -8,67 +9,36 @@ HTMLLexer = function (_BaseLexer) {_inherits(HTMLLexer, _BaseLexer); | ||
_this.attr = options.attr || 'data-i18n'; | ||
_this.optionAttr = options.optionAttr || 'data-i18n-options'; | ||
_this.optionAttr = options.optionAttr || 'data-i18n-options';return _this; | ||
}_createClass(HTMLLexer, [{ key: 'extract', value: function extract( | ||
_this.createAttributeRegex(); | ||
_this.createOptionAttributeRegex();return _this; | ||
} | ||
content) {var _this2 = this; | ||
var that = this; | ||
var $ = _cheerio2.default.load(content); | ||
$('[' + that.attr + ']').each(function (index, node) { | ||
var $node = _cheerio2.default.load(node); | ||
// TODO rewrite to support the BaseLexer.extract() | ||
_createClass(HTMLLexer, [{ key: 'extract', value: function extract(content) {var _this2 = this; | ||
var matches = void 0; | ||
var regex = new RegExp( | ||
'<([A-Z][A-Z0-9]*)([^>]*\\s' + this.attr + '[^>]*)>(?:(.*?)<\\/\\1>)?', | ||
'gi');var _loop = function _loop() { | ||
// the attribute can hold multiple keys | ||
var keys = node.attribs[that.attr].split(';'); | ||
var options = node.attribs[that.optionAttr]; | ||
if (options) { | ||
try { | ||
options = JSON.parse(options); | ||
} finally | ||
{} | ||
}var _iteratorNormalCompletion = true;var _didIteratorError = false;var _iteratorError = undefined;try { | ||
for (var _iterator = keys[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {var key = _step.value; | ||
// remove any leading [] in the key | ||
key = key.replace(/^\[[a-zA-Z0-9_-]*\]/, ''); | ||
var attrs = _this2.parseAttributes(matches[2]); | ||
// if empty grab innerHTML from regex | ||
key = key || $node.text(); | ||
// the attribute can hold multiple keys | ||
var keys = attrs.keys.split(';'); | ||
keys.forEach(function (key) { | ||
// remove any leading [] in the key | ||
key = key.replace(/^\[[a-zA-Z0-9_-]*\]/, ''); | ||
if (key) { | ||
_this2.keys.push(_extends({}, options, { key: key })); | ||
} | ||
}} catch (err) {_didIteratorError = true;_iteratorError = err;} finally {try {if (!_iteratorNormalCompletion && _iterator.return) {_iterator.return();}} finally {if (_didIteratorError) {throw _iteratorError;}}} | ||
}); | ||
// if empty grab innerHTML from regex | ||
key = key || matches[3]; | ||
if (key) { | ||
_this2.keys.push(_extends({}, attrs.options, { key: key })); | ||
} | ||
});};while (matches = regex.exec(content)) {_loop(); | ||
} | ||
return this.keys; | ||
} }, { key: 'createAttributeRegex', value: function createAttributeRegex() | ||
{ | ||
var pattern = '(?:' + this.attr + ')(?:\\s*=\\s*(' + _baseLexer2.default.stringPattern + ')|$|\\s)'; | ||
this.attrRegex = new RegExp(pattern, 'i'); | ||
return this.attrRegex; | ||
} }, { key: 'createOptionAttributeRegex', value: function createOptionAttributeRegex() | ||
{ | ||
var pattern = '(?:' + this.optionAttr + ')(?:\\s*=\\s*(' + _baseLexer2.default.stringPattern + '))?'; | ||
this.optionAttrRegex = new RegExp(pattern, 'i'); | ||
return this.optionAttrRegex; | ||
} }, { key: 'parseAttributes', value: function parseAttributes( | ||
args) { | ||
var result = { keys: '', options: {} }; | ||
this.attrRegex.lastIndex = 0; | ||
var keysMatch = this.attrRegex.exec(args); | ||
if (keysMatch && keysMatch[1]) { | ||
result.keys = keysMatch[1].slice(1, -1); | ||
} | ||
this.optionAttrRegex.lastIndex = 0; | ||
var optionsMatch = this.optionAttrRegex.exec(args); | ||
if (optionsMatch && optionsMatch[1]) { | ||
try { | ||
result.options = JSON.parse(optionsMatch[1].slice(1, -1)); | ||
} finally | ||
{} | ||
} | ||
return result; | ||
} }]);return HTMLLexer;}(_baseLexer2.default);exports.default = HTMLLexer; | ||
} }]);return HTMLLexer;}(_baseLexer2.default);exports.default = HTMLLexer;module.exports = exports['default']; |
@@ -1,2 +0,4 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _acornJsx = require('acorn-jsx');var acorn = _interopRequireWildcard(_acornJsx); | ||
var _walk = require('acorn/dist/walk');var walk = _interopRequireWildcard(_walk); | ||
var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _interopRequireWildcard(obj) {if (obj && obj.__esModule) {return obj;} else {var newObj = {};if (obj != null) {for (var key in obj) {if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key];}}newObj.default = obj;return newObj;}}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
@@ -7,150 +9,94 @@ JavascriptLexer = function (_BaseLexer) {_inherits(JavascriptLexer, _BaseLexer); | ||
_this.acornOptions = _extends({ sourceType: 'module' }, options.acorn); | ||
_this.functions = options.functions || ['t']; | ||
_this.createFunctionRegex(); | ||
_this.createArgumentsRegex(); | ||
_this.createHashRegex();return _this; | ||
_this.attr = options.attr || 'i18nKey';return _this; | ||
}_createClass(JavascriptLexer, [{ key: 'extract', value: function extract( | ||
content) { | ||
var matches = void 0; | ||
var that = this; | ||
while (matches = this.functionRegex.exec(content)) { | ||
var args = this.parseArguments(matches[1] || matches[2]); | ||
this.populateKeysFromArguments(args); | ||
} | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression: function CallExpression(node) { | ||
that.expressionExtractor.call(that, node); | ||
} }); | ||
return this.keys; | ||
} }, { key: 'parseArguments', value: function parseArguments( | ||
} }, { key: 'expressionExtractor', value: function expressionExtractor( | ||
args) { | ||
var matches = void 0; | ||
var result = { | ||
arguments: [], | ||
options: {} }; | ||
node) { | ||
var entry = {}; | ||
var isTranslationFunction = | ||
node.callee && ( | ||
this.functions.includes(node.callee.name) || | ||
node.callee.property && this.functions.includes(node.callee.property.name)); | ||
while (matches = this.argumentsRegex.exec(args)) { | ||
var arg = matches[1]; | ||
if (arg.startsWith('{')) { | ||
var optionMatches = void 0; | ||
while (optionMatches = this.hashRegex.exec(args)) { | ||
var key = optionMatches[2]; | ||
var value = optionMatches[3]; | ||
if (this.validateString(value)) { | ||
result.options[key] = value.slice(1, -1); | ||
} | ||
if (isTranslationFunction) { | ||
var keyArgument = node.arguments.shift(); | ||
if (keyArgument && keyArgument.type === 'Literal') { | ||
entry.key = keyArgument.value; | ||
} else | ||
if (keyArgument && keyArgument.type === 'BinaryExpression') { | ||
var concatenatedString = this.concatenateString(keyArgument); | ||
if (!concatenatedString) { | ||
this.emit('warning', 'Key is not a string literal: ' + keyArgument.name); | ||
return; | ||
} | ||
entry.key = concatenatedString; | ||
} else | ||
{ | ||
arg = this.concatenateString(arg); | ||
if (keyArgument.type === 'Identifier') { | ||
this.emit('warning', 'Key is not a string literal: ' + keyArgument.name); | ||
} | ||
return; | ||
} | ||
result.arguments.push(arg); | ||
var optionsArgument = node.arguments.shift(); | ||
if (optionsArgument && optionsArgument.type === 'Literal') { | ||
entry.defaultValue = optionsArgument.value; | ||
} else | ||
if (optionsArgument && optionsArgument.type === 'ObjectExpression') {var _iteratorNormalCompletion = true;var _didIteratorError = false;var _iteratorError = undefined;try { | ||
for (var _iterator = optionsArgument.properties[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {var p = _step.value; | ||
entry[p.key.name || p.key.value] = p.value.value; | ||
}} catch (err) {_didIteratorError = true;_iteratorError = err;} finally {try {if (!_iteratorNormalCompletion && _iterator.return) {_iterator.return();}} finally {if (_didIteratorError) {throw _iteratorError;}}} | ||
} | ||
this.keys.push(entry); | ||
} | ||
return result; | ||
} }, { key: 'concatenateString', value: function concatenateString( | ||
string) {var _this2 = this; | ||
string = string.trim(); | ||
var matches = void 0; | ||
var containsVariable = false; | ||
var parts = []; | ||
var quotationMark = string.charAt(0) === '"' ? '"' : "'"; | ||
binaryExpression) {var string = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; | ||
if (binaryExpression.operator !== '+') { | ||
return; | ||
} | ||
var regex = new RegExp(JavascriptLexer.concatenatedSegmentPattern, 'gi'); | ||
while (matches = regex.exec(string)) { | ||
var match = matches[0].trim(); | ||
if (match !== '+') { | ||
parts.push(match); | ||
} | ||
if (binaryExpression.left.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.left, string); | ||
} else | ||
if (binaryExpression.left.type === 'Literal') { | ||
string += binaryExpression.left.value; | ||
} else | ||
{ | ||
return; | ||
} | ||
var result = parts.reduce(function (concatenatedString, x) { | ||
x = x && x.trim(); | ||
if (_this2.validateString(x)) { | ||
concatenatedString += x.slice(1, -1); | ||
} else | ||
{ | ||
containsVariable = true; | ||
} | ||
return concatenatedString; | ||
}, ''); | ||
if (!result || containsVariable) { | ||
return string; | ||
if (binaryExpression.right.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.right, string); | ||
} else | ||
if (binaryExpression.right.type === 'Literal') { | ||
string += binaryExpression.right.value; | ||
} else | ||
{ | ||
return quotationMark + result + quotationMark; | ||
return; | ||
} | ||
} }, { key: 'createFunctionRegex', value: function createFunctionRegex() | ||
{ | ||
var pattern = | ||
'(?:\\W|^)' + | ||
this.functionPattern() + '\\s*\\(\\s*' + | ||
JavascriptLexer.stringOrVariableOrHashPattern + | ||
'\\s*\\)'; | ||
this.functionRegex = new RegExp(pattern, 'gi'); | ||
return this.functionRegex; | ||
} }, { key: 'createArgumentsRegex', value: function createArgumentsRegex() | ||
{ | ||
var pattern = | ||
'(' + | ||
[ | ||
JavascriptLexer.concatenatedArgumentPattern, | ||
JavascriptLexer.hashPattern]. | ||
join('|') + | ||
')' + | ||
'(?:\\s*,\\s*)?'; | ||
this.argumentsRegex = new RegExp(pattern, 'gi'); | ||
return this.argumentsRegex; | ||
} }, { key: 'createHashRegex', value: function createHashRegex() | ||
{ | ||
var pattern = | ||
'(?:(\'|")?(' + | ||
['context', 'defaultValue'].join('|') + | ||
')\\1)' + | ||
'(?:\\s*:\\s*)' + | ||
'(' + _baseLexer2.default.stringPattern + ')'; | ||
this.hashRegex = new RegExp(pattern, 'gi'); | ||
return this.hashRegex; | ||
} }], [{ key: 'concatenatedSegmentPattern', get: function get() {return [_baseLexer2.default.singleQuotePattern, _baseLexer2.default.doubleQuotePattern, _baseLexer2.default.backQuotePattern, _baseLexer2.default.variablePattern, '(?:\\s*\\+\\s*)' // support for concatenation via + | ||
].join('|');} }, { key: 'concatenatedArgumentPattern', get: function get() {return '(' + '(?:' + JavascriptLexer.concatenatedSegmentPattern + ')+' + ')';} }, { key: 'hashPattern', get: function get() {return '(\\{[^}]*\\})';} }, { key: 'stringOrVariableOrHashPattern', get: function get() {return '(' + '(' + '(?:' + [JavascriptLexer.concatenatedArgumentPattern, JavascriptLexer.hashPattern].join('|') + ')' + '(?:\\s*,\\s*)?' + ')+' + ')';} }]);return JavascriptLexer;}(_baseLexer2.default);exports.default = JavascriptLexer; | ||
return string; | ||
} }]);return JavascriptLexer;}(_baseLexer2.default);exports.default = JavascriptLexer;module.exports = exports['default']; |
@@ -5,2 +5,3 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _events = require('events');var _events2 = _interopRequireDefault(_events); | ||
var _javascriptLexer = require('./lexers/javascript-lexer');var _javascriptLexer2 = _interopRequireDefault(_javascriptLexer); | ||
var _jsxLexer = require('./lexers/jsx-lexer');var _jsxLexer2 = _interopRequireDefault(_jsxLexer); | ||
var _path = require('path');var _path2 = _interopRequireDefault(_path);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;} | ||
@@ -16,2 +17,3 @@ | ||
js: ['JavascriptLexer'], | ||
jsx: ['JsxLexer'], | ||
mjs: ['JavascriptLexer'], | ||
@@ -25,3 +27,4 @@ | ||
HTMLLexer: _htmlLexer2.default, | ||
JavascriptLexer: _javascriptLexer2.default };var | ||
JavascriptLexer: _javascriptLexer2.default, | ||
JsxLexer: _jsxLexer2.default };var | ||
@@ -32,3 +35,9 @@ | ||
options)); | ||
_this.lexers = _extends({}, lexers, options);return _this; | ||
_this.options = options; | ||
if (options.reactNamespace) { | ||
lexers.js = lexers.jsx; | ||
} | ||
_this.lexers = _extends({}, lexers, options.lexers);return _this; | ||
}_createClass(Parser, [{ key: 'parse', value: function parse( | ||
@@ -38,28 +47,27 @@ | ||
var keys = []; | ||
var lexers = this.lexers[extension] || this.lexers.default; | ||
var lexers = this.lexers[extension] || this.lexers.default;var _iteratorNormalCompletion = true;var _didIteratorError = false;var _iteratorError = undefined;try { | ||
lexers.forEach(function (lexerConfig) { | ||
var lexerName = void 0; | ||
var lexerOptions = void 0; | ||
for (var _iterator = lexers[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {var lexerConfig = _step.value; | ||
var lexerName = void 0; | ||
var lexerOptions = void 0; | ||
if (typeof lexerConfig === 'string') { | ||
lexerName = lexerConfig; | ||
lexerOptions = {}; | ||
} else | ||
{ | ||
lexerName = lexerConfig.lexer; | ||
delete lexerConfig.lexer; | ||
lexerOptions = lexerConfig; | ||
} | ||
if (typeof lexerConfig === 'string') { | ||
lexerName = lexerConfig; | ||
lexerOptions = {}; | ||
} else | ||
{ | ||
lexerName = lexerConfig.lexer; | ||
lexerOptions = lexerConfig; | ||
} | ||
if (!lexersMap[lexerName]) { | ||
_this2.emit('error', new Error('Lexer \'' + lexerName + '\' does not exist')); | ||
} | ||
if (!lexersMap[lexerName]) { | ||
this.emit('error', new Error('Lexer \'' + lexerName + '\' does not exist')); | ||
} | ||
var Lexer = new lexersMap[lexerName](lexerOptions); | ||
Lexer.on('warning', function (warning) {return _this2.emit('warning', warning);}); | ||
keys = keys.concat(Lexer.extract(content)); | ||
}); | ||
var Lexer = new lexersMap[lexerName](lexerOptions); | ||
Lexer.on('warning', function (warning) {return _this2.emit('warning', warning);}); | ||
keys = keys.concat(Lexer.extract(content)); | ||
}} catch (err) {_didIteratorError = true;_iteratorError = err;} finally {try {if (!_iteratorNormalCompletion && _iterator.return) {_iterator.return();}} finally {if (_didIteratorError) {throw _iteratorError;}}} | ||
return keys; | ||
} }]);return Parser;}(_events2.default);exports.default = Parser; | ||
} }]);return Parser;}(_events2.default);exports.default = Parser;module.exports = exports['default']; |
@@ -46,5 +46,22 @@ # Contribute | ||
cd test | ||
i18next 'templating/**/*' -o manual | ||
i18next test/**/*.html -c i18next-parser.config.js | ||
``` | ||
To test gulp: | ||
``` | ||
yarn global add gulp@next | ||
cd test | ||
gulp i18next | ||
``` | ||
To test broccoli: | ||
``` | ||
yarn global add broccoli-cli | ||
cd test | ||
rm -rf dist && broccoli build dist | ||
``` | ||
## `0.x` vs `1.x` | ||
@@ -58,1 +75,9 @@ | ||
I will not maintain the old version but will welcome bug fixes as PRs. | ||
## Deploy | ||
- `yarn watch` | ||
- update `package.json` version | ||
- create commit and add version tag | ||
- `npm publish --tag next` (skip next tag if not in beta) | ||
- create a github release |
@@ -5,5 +5,5 @@ { | ||
"name": "i18next-parser", | ||
"version": "1.0.0-beta2", | ||
"version": "1.0.0-beta21", | ||
"license": "MIT", | ||
"main": "src/index.js", | ||
"main": "dist/index.js", | ||
"bin": { | ||
@@ -14,3 +14,3 @@ "i18next": "./bin/cli.js" | ||
"test": "mocha --require babel-register --require babel-polyfill 'test/**/*.test.js'", | ||
"watch": "babel src -d dist -w" | ||
"watch": "./node_modules/babel-cli/bin/babel.js src -d dist -w" | ||
}, | ||
@@ -22,2 +22,6 @@ "repository": { | ||
"dependencies": { | ||
"acorn": "^5.5.3", | ||
"acorn-jsx": "^4.1.1", | ||
"broccoli-plugin": "^1.3.0", | ||
"cheerio": "^1.0.0-rc.2", | ||
"colors": "~1.2.0-rc0", | ||
@@ -27,4 +31,5 @@ "commander": "~2.9.0", | ||
"eol": "^0.9.1", | ||
"fs-extra": "^6.0.1", | ||
"gulp-sort": "^2.0.0", | ||
"lodash": "~4.17.4", | ||
"rsvp": "^4.8.2", | ||
"through2": "~2.0.3", | ||
@@ -40,2 +45,3 @@ "vinyl": "~2.0.1", | ||
"babel-cli": "^6.26.0", | ||
"babel-plugin-add-module-exports": "^0.2.1", | ||
"babel-plugin-transform-object-rest-spread": "^6.26.0", | ||
@@ -45,3 +51,6 @@ "babel-polyfill": "^6.26.0", | ||
"babel-register": "^6.26.0", | ||
"broccoli": "^1.1.4", | ||
"broccoli-funnel": "^2.0.1", | ||
"chai": "^4.1.2", | ||
"gulp": "^4.0.0", | ||
"mocha": "^5.0.0" | ||
@@ -48,0 +57,0 @@ }, |
100
README.md
@@ -5,4 +5,5 @@ # i18next Parser [![Build Status](https://travis-ci.org/i18next/i18next-parser.svg?branch=master)](https://travis-ci.org/i18next/i18next-parser) | ||
When translating an application, maintaining the catalog by hand is painful. This package automate the process. Don't let the name fool you, it was originally built with i18next in mind but it works well with other i18n libraries. | ||
When translating an application, maintaining the translation catalog by hand is painful. This package parses your code and automates this process. | ||
If you want to make this process even less painful, I invite you to check [Locize](https://locize.com/). And if you use this package and like it, supporting me on [Patreon](https://www.patreon.com/karelledru) would mean a great deal! (disclamer: Locize is supporting this project on Patreon). | ||
@@ -13,11 +14,10 @@ | ||
- Choose your weapon: A CLI, a standalone parser or a stream transform | ||
- Three built in lexers: Javascript, HTML and Handlebars | ||
- 4 built in lexers: Javascript, JSX, HTML and Handlebars | ||
- Creates one catalog file per locale and per namespace | ||
- Backs up the old keys your code doesn't use anymore in `namespace_old.json` catalog. | ||
- Restores keys from the `_old` file if the one in the translation file is empty. | ||
- Backs up the old keys your code doesn't use anymore in `namespace_old.json` catalog | ||
- Restores keys from the `_old` file if the one in the translation file is empty | ||
- Supports i18next features: | ||
- **Context**: keys of the form `key_context` | ||
- **Plural**: keys of the form `key_plural` and `key_plural_0` | ||
- Behind the hood, it's a stream transform (so it works with gulp) | ||
- Supports es6 template strings (in addition to single/double quoted strings) with ${expression} placeholders | ||
- Tested on Node 6+ | ||
@@ -53,3 +53,3 @@ ## DISCLAMER: `1.0.0-beta` | ||
yarn add -D i18next-parser@next | ||
npm install --save-dev i18next-parser | ||
npm install --save-dev i18next-parser@next | ||
``` | ||
@@ -60,11 +60,11 @@ | ||
```javascript | ||
const i18next = require('i18next-parser'); | ||
const i18nextParser = require('i18next-parser').gulp; | ||
gulp.task('i18next', function() { | ||
gulp.src('app/**') | ||
.pipe(i18next({ | ||
.pipe(new i18nextParser({ | ||
locales: ['en', 'de'], | ||
output: '../locales' | ||
output: 'locales' | ||
})) | ||
.pipe(gulp.dest('locales')); | ||
.pipe(gulp.dest('./')); | ||
}); | ||
@@ -75,27 +75,58 @@ ``` | ||
### Broccoli | ||
Save the package to your devDependencies: | ||
``` | ||
yarn add -D i18next-parser@next | ||
npm install --save-dev i18next-parser@next | ||
``` | ||
[Broccoli.js](https://github.com/broccolijs/broccoli) defines itself as a fast, reliable asset pipeline, supporting constant-time rebuilds and compact build definitions. | ||
```javascript | ||
const Funnel = require('broccoli-funnel') | ||
const i18nextParser = require('i18next-parser').broccoli; | ||
const appRoot = 'broccoli' | ||
let i18n = new Funnel(appRoot, { | ||
files: ['handlebars.hbs', 'javascript.js'], | ||
annotation: 'i18next-parser' | ||
}) | ||
i18n = new i18nextParser([i18n], { | ||
output: 'broccoli/locales' | ||
}) | ||
module.exports = i18n | ||
``` | ||
## Options | ||
Option | Description | Default | ||
Option | Description | Default | ||
---------------------- | ----------------------------------------------------- | --- | ||
**contextSeparator** | Key separator used in your translation keys | `_` | ||
**createOldLibraries** | Save the \_old files | `true` | ||
**defaultNamespace** | Default namespace used in your i18next config | `translation` | ||
**defaultValue** | Default value to give to empty keys | `''` | ||
**extension** | Edit the extension of the locale files | `.json` | ||
**filename** | Edit the filename of the locale files | `'$NAMESPACE'` | ||
**indentation** | Indentation of the catalog files | `2` | ||
**keepRemoved** | Keep keys from the catalog that are no longer in code | `false` | ||
**keySeparator** | Key separator used in your translation keys | `.` | ||
**lexers** | See below for details | `{}` | ||
**lineEnding** | Control the line ending. See options at [eol](https://github.com/ryanve/eol) | `auto` | ||
**locales** | An array of the locales in your applications | `['en','fr']` | ||
**namespaceSeparator** | Namespace separator used in your translation keys | `:` | ||
**output** | Where to write the locale files relative to the base | `locales` | ||
**reactNamespace** | For react file, extract the [defaultNamespace](https://react.i18next.com/components/translate-hoc.html) | `false` (`true` for `.jsx` files) | ||
**sort** | Whether or not to sort the catalog | `false` | ||
**contextSeparator** | Key separator used in your translation keys | `_` | ||
**createOldCatalogs** | Save the \_old files | `true` | ||
**defaultNamespace** | Default namespace used in your i18next config | `translation` | ||
**defaultValue** | Default value to give to empty keys | `''` | ||
**extension** <sup>1<sup>| Extenstion of the catalogs | `.json` | ||
**filename** <sup>1<sup>| Filename of the catalogs | `'$NAMESPACE'` | ||
**indentation** | Indentation of the catalog files | `2` | ||
**keepRemoved** | Keep keys from the catalog that are no longer in code | `false` | ||
**keySeparator** <sup>2<sup>| Key separator used in your translation keys | `.` | ||
**lexers** | See below for details | `{}` | ||
**lineEnding** | Control the line ending. See options at [eol](https://github.com/ryanve/eol) | `auto` | ||
**locales** | An array of the locales in your applications | `['en','fr']` | ||
**namespaceSeparator** <sup>2<sup>| Namespace separator used in your translation keys | `:` | ||
**output** | Where to write the locale files relative to the base | `locales` | ||
**reactNamespace** <sup>3<sup>| For react file, extract the [defaultNamespace](https://react.i18next.com/components/translate-hoc.html) | `false` | ||
**sort** | Whether or not to sort the catalog | `false` | ||
**verbose** | Display info about the parsing including some stats | `false` | ||
### Catalog filenames | ||
1. Both `filename` and `extension` options support injection of `$LOCALE` and `$NAMESPACE` variables. The file output is JSON by default, if you want YAML, the `extension` must end with `yml`. | ||
2. If you want to use plain english keys, separators such as `.` and `:` will conflict. You might want to set `keySeparator: false` and `namespaceSeparator: false`. That way, `t('Status: Loading...')` will not think that there are a namespace and three separator dots for instance. | ||
3. If the file being parsed has a `.jsx` extension, this option is ignored and the namespace is being extracted. | ||
Both `filename` and `extension` options support injection of `$LOCALE` and `$NAMESPACE` variables. | ||
@@ -106,3 +137,3 @@ ### Lexers | ||
``` | ||
```js | ||
{ | ||
@@ -127,3 +158,3 @@ lexers: { | ||
``` | ||
```js | ||
{ | ||
@@ -137,3 +168,3 @@ lexers: { | ||
], | ||
... | ||
// ... | ||
} | ||
@@ -161,2 +192,3 @@ } | ||
**functions** | Array of functions to match | `['t']` | ||
**acorn** | Options to pass to acorn | `{}` | ||
@@ -189,4 +221,4 @@ **`JsxLexer` options** | ||
Any contribution is welcome. Please [read the guidelines](doc/development.md) first. | ||
Any contribution is welcome. Please [read the guidelines](docs/development.md) first. | ||
Thanks a lot to all the previous [contributors](https://github.com/i18next/i18next-parser/graphs/contributors). |
@@ -1,8 +0,14 @@ | ||
import _ from 'lodash' | ||
/** | ||
* Take an entry for the Parser and turn it into a hash, | ||
* turning the key path 'foo.bar' into an hash {foo: {bar: ""}} | ||
* The generated hash can be merged with an optional `target`. | ||
* @returns An `{ target, duplicate }` object. `target` is the hash that | ||
* was passed as an argument or a new hash if none was passed. `duplicate` | ||
* indicates whether the entry already existed in the `target` hash. | ||
*/ | ||
function dotPathToHash(entry, target = {}, options = {}) { | ||
let path = entry.key | ||
const separator = options.separator || '.' | ||
const newValue = entry.defaultValue || options.value || '' | ||
// Takes a `path` of the form 'foo.bar' and | ||
// turn it into a hash {foo: {bar: ""}}. | ||
// The generated hash can be attached to an | ||
// optional `hash`. | ||
function dotPathToHash(path, separator = '.', value = '', target = {}) { | ||
if (path.endsWith(separator)) { | ||
@@ -12,25 +18,41 @@ path = path.slice(0, -separator.length) | ||
let result = {} | ||
const segments = path.split(separator) | ||
segments.reduce((hash, segment, index) => { | ||
if (index === segments.length - 1) { | ||
hash[segment] = value | ||
let inner = target | ||
for (let i = 0; i < segments.length - 1; i += 1) { | ||
const segment = segments[i] | ||
if (segment) { | ||
if (inner[segment] === undefined) { | ||
inner[segment] = {} | ||
} | ||
inner = inner[segment] | ||
} | ||
else { | ||
hash[segment] = {} | ||
} | ||
return hash[segment] | ||
}, result) | ||
} | ||
return _.merge(target, result) | ||
const lastSegment = segments[segments.length - 1]; | ||
const oldValue = inner[lastSegment]; | ||
const duplicate = oldValue !== undefined | ||
const conflict = oldValue !== undefined && oldValue !== newValue | ||
inner[lastSegment] = newValue | ||
return { target, duplicate, conflict } | ||
} | ||
// Takes a `source` hash and make sure its value | ||
// are pasted in the `target` hash, if the target | ||
// hash has the corresponding key (or if keepRemoved is true). | ||
// If not, the value is added to an `old` hash. | ||
function mergeHashes(source, target = {}, old, keepRemoved = false) { | ||
old = old || {} | ||
Object.keys(source).forEach(key => { | ||
/** | ||
* Takes a `source` hash and makes sure its value | ||
* is pasted in the `target` hash, if the target | ||
* hash has the corresponding key (or if `keepRemoved` is true). | ||
* @returns An `{ old, new, mergeCount, pullCount, oldCount }` object. | ||
* `old` is a hash of values that have not been merged into `target`. | ||
* `new` is `target`. `mergeCount` is the number of keys merged into | ||
* `new`, `pullCount` is the number of context and plural keys added to | ||
* `new` and `oldCount` is the number of keys that were either added to `old` or | ||
* `new` (if `keepRemoved` is true and `target` didn't have the corresponding | ||
* key). | ||
*/ | ||
function mergeHashes(source, target, keepRemoved = false) { | ||
let old = {} | ||
let mergeCount = 0 | ||
let pullCount = 0 | ||
let oldCount = 0 | ||
for (const key in source) { | ||
const hasNestedEntries = | ||
@@ -46,64 +68,86 @@ typeof target[key] === 'object' && !Array.isArray(target[key]) | ||
) | ||
target[key] = nested.new | ||
old[key] = nested.old | ||
} | ||
else if (target[key] !== undefined) { | ||
if (typeof source[key] === 'string' || Array.isArray(source[key])) { | ||
target[key] = source[key] | ||
mergeCount += nested.mergeCount | ||
pullCount += nested.pullCount | ||
if (Object.keys(nested.old).length) { | ||
old[key] = nested.old | ||
oldCount += nested.oldCount | ||
} | ||
else { | ||
old[key] = source[key] | ||
} | ||
} | ||
else { | ||
// support for plural in keys | ||
const pluralMatch = /_plural(_\d+)?$/.test(key) | ||
const singularKey = key.replace(/_plural(_\d+)?$/, '') | ||
// support for context in keys | ||
const contextMatch = /_([^_]+)?$/.test(singularKey) | ||
const rawKey = singularKey.replace(/_([^_]+)?$/, '') | ||
if ( | ||
(contextMatch && target[rawKey] !== undefined) || | ||
(pluralMatch && target[singularKey] !== undefined) | ||
) { | ||
target[key] = source[key] | ||
if (target[key] !== undefined) { | ||
if ( | ||
typeof source[key] === 'string' || | ||
Array.isArray(source[key]) | ||
) { | ||
target[key] = source[key] | ||
mergeCount += 1 | ||
} else { | ||
old[key] = source[key] | ||
oldCount += 1 | ||
} | ||
} | ||
else if (keepRemoved) { | ||
target[key] = source[key] | ||
old[key] = source[key] | ||
} | ||
else { | ||
old[key] = source[key] | ||
// support for plural in keys | ||
const pluralRegex = /_plural(_\d+)?$/; | ||
const pluralMatch = pluralRegex.test(key) | ||
const singularKey = key.replace(pluralRegex, '') | ||
// support for context in keys | ||
const contextRegex = /_([^_]+)?$/; | ||
const contextMatch = contextRegex.test(singularKey) | ||
const rawKey = singularKey.replace(contextRegex, '') | ||
if ( | ||
(contextMatch && target[rawKey] !== undefined) || | ||
(pluralMatch && target[singularKey] !== undefined) | ||
) { | ||
target[key] = source[key] | ||
pullCount += 1 | ||
} else { | ||
if (keepRemoved) { | ||
target[key] = source[key] | ||
} else { | ||
old[key] = source[key] | ||
} | ||
oldCount += 1 | ||
} | ||
} | ||
} | ||
}) | ||
} | ||
return { old, new: target } | ||
return { old, new: target, mergeCount, pullCount, oldCount } | ||
} | ||
// Takes a `target` hash and replace its empty | ||
// values with the `source` hash ones if they | ||
// exist | ||
function populateHash(source, target = {}) { | ||
Object.keys(source).forEach(key => { | ||
if (target[key] !== undefined) { | ||
if (typeof source[key] === 'object') { | ||
target[key] = populateHash(source[key], target[key]) | ||
} | ||
else if (target[key] === '') { | ||
target[key] = source[key] | ||
} | ||
/** | ||
* Merge `source` into `target` by merging nested dictionaries. | ||
*/ | ||
function transferValues(source, target) { | ||
for (const key in source) { | ||
const sourceValue = source[key] | ||
const targetValue = target[key] | ||
if ( | ||
typeof sourceValue === 'object' && | ||
typeof targetValue === 'object' && | ||
!Array.isArray(sourceValue) | ||
) { | ||
transferValues(sourceValue, targetValue) | ||
} | ||
}) | ||
else { | ||
target[key] = sourceValue; | ||
} | ||
} | ||
} | ||
return target | ||
class ParsingError extends Error { | ||
constructor(message) { | ||
super(message); | ||
this.name = 'ParsingError'; | ||
} | ||
} | ||
export { | ||
dotPathToHash, | ||
mergeHashes, | ||
populateHash | ||
transferValues, | ||
ParsingError | ||
} |
224
src/index.js
@@ -1,220 +0,4 @@ | ||
import { dotPathToHash, mergeHashes, populateHash } from './helpers' | ||
import { Transform } from 'stream' | ||
import _ from 'lodash' | ||
import eol from 'eol' | ||
import fs from 'fs' | ||
import Parser from './parser' | ||
import path from 'path' | ||
import VirtualFile from 'vinyl' | ||
import YAML from 'yamljs' | ||
import BaseLexer from './lexers/base-lexer'; | ||
export default class i18nTransform extends Transform { | ||
constructor(options = {}) { | ||
options.objectMode = true | ||
super(options) | ||
this.defaults = { | ||
contextSeparator: '_', | ||
createOldLibraries: true, | ||
defaultNamespace: 'translation', | ||
defaultValue: '', | ||
extension: '.json', | ||
filename: '$NAMESPACE', | ||
indentation: 2, | ||
keepRemoved: false, | ||
keySeparator: '.', | ||
lexers: {}, | ||
lineEnding: 'auto', | ||
locales: ['en', 'fr'], | ||
namespaceSeparator: ':', | ||
output: 'locales', | ||
reactNamespace: false, | ||
sort: false | ||
} | ||
this.options = { ...this.defaults, ...options } | ||
this.entries = [] | ||
this.parser = new Parser(this.options) | ||
this.parser.on('error', error => this.emit('error', error)) | ||
this.parser.on('warning', warning => this.emit('warning', warning)) | ||
this.localeRegex = /\$LOCALE/g | ||
this.namespaceRegex = /\$NAMESPACE/g | ||
} | ||
_transform(file, encoding, done) { | ||
let content | ||
if (file.isBuffer()) { | ||
content = file.contents.toString('utf8') | ||
} | ||
else { | ||
content = fs.readFileSync(file.path, encoding) | ||
} | ||
this.emit('reading', file) | ||
const extension = path.extname(file.path).substring(1) | ||
const entries = this.parser.parse(content, extension) | ||
entries.forEach(entry => { | ||
let key = entry.key | ||
const parts = key.split(this.options.namespaceSeparator) | ||
if (parts.length > 1) { | ||
entry.namespace = parts.shift() | ||
} | ||
else if (extension === 'jsx' || this.options.reactNamespace) { | ||
entry.namespace = this.grabReactNamespace(content) | ||
} | ||
entry.namespace = entry.namespace || this.options.defaultNamespace | ||
key = parts.join(this.options.namespaceSeparator) | ||
key = key.replace(/\\('|"|`)/g, '$1') | ||
key = key.replace(/\\n/g, '\n') | ||
key = key.replace(/\\r/g, '\r') | ||
key = key.replace(/\\t/g, '\t') | ||
key = key.replace(/\\\\/g, '\\') | ||
entry.key = entry.namespace + this.options.keySeparator + key | ||
this.addEntry(entry) | ||
}) | ||
done() | ||
} | ||
_flush(done) { | ||
let catalog = {} | ||
if (this.options.sort) { | ||
this.entries = this.entries.sort((a, b) => a.key.localeCompare(b.key)) | ||
} | ||
this.entries.forEach(entry => { | ||
catalog = dotPathToHash( | ||
entry.key, | ||
this.options.keySeparator, | ||
entry.defaultValue || this.options.defaultValue, | ||
catalog | ||
) | ||
}) | ||
this.options.locales.forEach(locale => { | ||
const outputPath = path.resolve(this.options.output, locale) | ||
Object.keys(catalog).forEach(namespace => { | ||
let filename = this.options.filename | ||
filename = filename.replace(this.localeRegex, locale) | ||
filename = filename.replace(this.namespaceRegex, namespace) | ||
let extension = this.options.extension | ||
extension = extension.replace(this.localeRegex, locale) | ||
extension = extension.replace(this.namespaceRegex, namespace) | ||
const oldFilename = filename + '_old' + extension | ||
filename += extension | ||
const namespacePath = path.resolve(outputPath, filename) | ||
const namespaceOldPath = path.resolve(outputPath, oldFilename) | ||
let newCatalog | ||
let existingCatalog = this.getCatalog(namespacePath) | ||
let oldCatalog = this.getCatalog(namespaceOldPath) | ||
// merges existing translations with the new ones | ||
const { new: newKeys, old: oldKeys } = mergeHashes( | ||
existingCatalog, | ||
catalog[namespace], | ||
null, | ||
this.options.keepRemoved | ||
) | ||
// restore old translations if the key is empty | ||
newCatalog = populateHash(oldCatalog, newKeys) | ||
// add keys from the current catalog that are no longer used | ||
oldCatalog = _.extend(oldCatalog, oldKeys) | ||
// push files back to the stream | ||
this.pushFile(namespacePath, newCatalog) | ||
if (this.options.createOldLibraries) { | ||
this.pushFile(namespaceOldPath, oldCatalog) | ||
} | ||
}) | ||
}) | ||
done() | ||
} | ||
addEntry(entry) { | ||
let existing = this.entries.filter(x => x.key === entry.key)[0] | ||
if (!existing) { | ||
this.entries.push(entry) | ||
} | ||
else { | ||
existing = { ...existing, ...entry } | ||
} | ||
if (entry.context) { | ||
const contextEntry = Object.assign({}, entry) | ||
delete contextEntry.context | ||
contextEntry.key += this.options.contextSeparator + entry.context | ||
this.addEntry(contextEntry) | ||
} | ||
} | ||
getCatalog(path) { | ||
let content | ||
try { | ||
content = JSON.parse( fs.readFileSync( path ) ) | ||
} | ||
catch (error) { | ||
if (error.code !== 'ENOENT') { | ||
this.emit('error', error) | ||
} | ||
content = {} | ||
} | ||
return content | ||
} | ||
pushFile(path, contents) { | ||
let text | ||
if (path.endsWith('yml')) { | ||
text = YAML.stringify(contents, null, this.options.indentation) | ||
} | ||
else { | ||
text = JSON.stringify(contents, null, this.options.indentation) + '\n' | ||
} | ||
if (this.options.lineEnding === 'auto') { | ||
text = eol.auto(text) | ||
} | ||
else if (lineEnding === '\r\n' || lineEnding === 'crlf') { | ||
text = eol.crlf(text) | ||
} | ||
else if (lineEnding === '\r' || lineEnding === 'cr') { | ||
text = eol.cr(text) | ||
} | ||
else { | ||
// Defaults to LF, aka \n | ||
text = eol.lf(text) | ||
} | ||
const file = new VirtualFile({ | ||
path, | ||
contents: Buffer.from(text) | ||
}) | ||
this.push(file) | ||
} | ||
grabReactNamespace(content) { | ||
const reactTranslateRegex = new RegExp( | ||
'translate\\((?:\\s*\\[?\\s*)(' + BaseLexer.stringPattern + ')' | ||
) | ||
const translateMatches = content.match(reactTranslateRegex) | ||
if (translateMatches) { | ||
return translateMatches[1].slice(1, -1) | ||
} | ||
} | ||
} | ||
export { default as broccoli } from './broccoli' | ||
export { default as parser } from './parser' | ||
export { default as transform } from './transform' | ||
export { default as gulp } from './transform' |
@@ -10,23 +10,2 @@ import EventEmitter from 'events' | ||
populateKeysFromArguments(args) { | ||
const firstArgument = args.arguments[0] | ||
const secondArgument = args.arguments[1] | ||
const isKeyString = this.validateString(firstArgument) | ||
const isDefaultValueString = this.validateString(secondArgument) | ||
if (!isKeyString) { | ||
this.emit('warning', `Key is not a string litteral: ${firstArgument}`) | ||
} | ||
else { | ||
const result = { | ||
...args.options, | ||
key: firstArgument.slice(1, -1) | ||
} | ||
if (isDefaultValueString) { | ||
result.defaultValue = secondArgument.slice(1, -1) | ||
} | ||
this.keys.push(result) | ||
} | ||
} | ||
validateString(string) { | ||
@@ -33,0 +12,0 @@ const regex = new RegExp('^' + BaseLexer.stringPattern + '$', 'i') |
@@ -41,5 +41,26 @@ import BaseLexer from './base-lexer' | ||
populateKeysFromArguments(args) { | ||
const firstArgument = args.arguments[0] | ||
const secondArgument = args.arguments[1] | ||
const isKeyString = this.validateString(firstArgument) | ||
const isDefaultValueString = this.validateString(secondArgument) | ||
if (!isKeyString) { | ||
this.emit('warning', `Key is not a string literal: ${firstArgument}`) | ||
} | ||
else { | ||
const result = { | ||
...args.options, | ||
key: firstArgument.slice(1, -1) | ||
} | ||
if (isDefaultValueString) { | ||
result.defaultValue = secondArgument.slice(1, -1) | ||
} | ||
this.keys.push(result) | ||
} | ||
} | ||
createFunctionRegex() { | ||
const functionPattern = this.functionPattern() | ||
const curlyPattern = '(?:{{)' + functionPattern + '\\s+(.*)(?:}})' | ||
const curlyPattern = '(?:{{)' + functionPattern + '\\s+(.*?)(?:}})' | ||
const parenthesisPattern = '(?:\\()' + functionPattern + '\\s+(.*)(?:\\))' | ||
@@ -46,0 +67,0 @@ const pattern = curlyPattern + '|' + parenthesisPattern |
import BaseLexer from './base-lexer' | ||
import cheerio from 'cheerio' | ||
@@ -9,21 +10,22 @@ export default class HTMLLexer extends BaseLexer { | ||
this.optionAttr = options.optionAttr || 'data-i18n-options' | ||
this.createAttributeRegex() | ||
this.createOptionAttributeRegex() | ||
} | ||
// TODO rewrite to support the BaseLexer.extract() | ||
extract(content) { | ||
let matches | ||
const regex = new RegExp( | ||
'<([A-Z][A-Z0-9]*)([^>]*\\s' + this.attr + '[^>]*)>(?:((?:\\s|.)*?)<\\/\\1>)?', | ||
'gi' | ||
) | ||
const that = this | ||
const $ = cheerio.load(content) | ||
$(`[${that.attr}]`).each((index, node) => { | ||
const $node = cheerio.load(node) | ||
while (matches = regex.exec(content)) { | ||
const attrs = this.parseAttributes(matches[2]) | ||
// the attribute can hold multiple keys | ||
const keys = node.attribs[that.attr].split(';') | ||
let options = node.attribs[that.optionAttr] | ||
// the attribute can hold multiple keys | ||
const keys = attrs.keys.split(';') | ||
keys.forEach(key => { | ||
if (options) { | ||
try { | ||
options = JSON.parse(options) | ||
} | ||
finally {} | ||
} | ||
for (let key of keys) { | ||
// remove any leading [] in the key | ||
@@ -33,44 +35,12 @@ key = key.replace(/^\[[a-zA-Z0-9_-]*\]/, '') | ||
// if empty grab innerHTML from regex | ||
key = key || matches[3] | ||
key = key || $node.text() | ||
if (key) { | ||
this.keys.push({ ...attrs.options, key }) | ||
this.keys.push({ ...options, key }) | ||
} | ||
}) | ||
} | ||
} | ||
}) | ||
return this.keys | ||
} | ||
createAttributeRegex() { | ||
const pattern = '(?:' + this.attr + ')(?:\\s*=\\s*(' + BaseLexer.stringPattern + ')|$|\\s)' | ||
this.attrRegex = new RegExp(pattern, 'i') | ||
return this.attrRegex | ||
} | ||
createOptionAttributeRegex() { | ||
const pattern = '(?:' + this.optionAttr + ')(?:\\s*=\\s*(' + BaseLexer.stringPattern + '))?' | ||
this.optionAttrRegex = new RegExp(pattern, 'i') | ||
return this.optionAttrRegex | ||
} | ||
parseAttributes(args) { | ||
const result = { keys: '', options: {} } | ||
this.attrRegex.lastIndex = 0 | ||
let keysMatch = this.attrRegex.exec(args) | ||
if (keysMatch && keysMatch[1]) { | ||
result.keys = keysMatch[1].slice(1, -1) | ||
} | ||
this.optionAttrRegex.lastIndex = 0 | ||
const optionsMatch = this.optionAttrRegex.exec(args) | ||
if (optionsMatch && optionsMatch[1]) { | ||
try { | ||
result.options = JSON.parse(optionsMatch[1].slice(1, -1)) | ||
} | ||
finally {} | ||
} | ||
return result | ||
} | ||
} |
@@ -0,1 +1,3 @@ | ||
import * as acorn from 'acorn-jsx' | ||
import * as walk from 'acorn/dist/walk' | ||
import BaseLexer from './base-lexer' | ||
@@ -7,16 +9,18 @@ | ||
this.acornOptions = { sourceType: 'module', ...options.acorn } | ||
this.functions = options.functions || ['t'] | ||
this.createFunctionRegex() | ||
this.createArgumentsRegex() | ||
this.createHashRegex() | ||
this.attr = options.attr || 'i18nKey' | ||
} | ||
extract(content) { | ||
let matches | ||
const that = this | ||
while (matches = this.functionRegex.exec(content)) { | ||
const args = this.parseArguments(matches[1] || matches[2]) | ||
this.populateKeysFromArguments(args) | ||
} | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression(node) { | ||
that.expressionExtractor.call(that, node) | ||
} | ||
} | ||
) | ||
@@ -26,132 +30,75 @@ return this.keys | ||
parseArguments(args) { | ||
let matches | ||
const result = { | ||
arguments: [], | ||
options: {} | ||
} | ||
while (matches = this.argumentsRegex.exec(args)) { | ||
let arg = matches[1] | ||
expressionExtractor(node) { | ||
const entry = {} | ||
const isTranslationFunction = ( | ||
node.callee && ( | ||
this.functions.includes(node.callee.name) || | ||
node.callee.property && this.functions.includes(node.callee.property.name) | ||
) | ||
) | ||
if (isTranslationFunction) { | ||
const keyArgument = node.arguments.shift() | ||
if (arg.startsWith('{')) { | ||
let optionMatches | ||
while (optionMatches = this.hashRegex.exec(args)) { | ||
const key = optionMatches[2] | ||
let value = optionMatches[3] | ||
if (this.validateString(value)) { | ||
result.options[key] = value.slice(1, -1) | ||
} | ||
if (keyArgument && keyArgument.type === 'Literal') { | ||
entry.key = keyArgument.value | ||
} | ||
else if (keyArgument && keyArgument.type === 'BinaryExpression') { | ||
const concatenatedString = this.concatenateString(keyArgument) | ||
if (!concatenatedString) { | ||
this.emit('warning', `Key is not a string literal: ${keyArgument.name}`) | ||
return | ||
} | ||
entry.key = concatenatedString | ||
} | ||
else { | ||
arg = this.concatenateString(arg) | ||
if (keyArgument.type === 'Identifier') { | ||
this.emit('warning', `Key is not a string literal: ${keyArgument.name}`) | ||
} | ||
return | ||
} | ||
result.arguments.push(arg) | ||
} | ||
return result | ||
} | ||
concatenateString(string) { | ||
string = string.trim() | ||
let matches | ||
let containsVariable = false | ||
const parts = [] | ||
const quotationMark = string.charAt(0) === '"' ? '"' : "'" | ||
const regex = new RegExp(JavascriptLexer.concatenatedSegmentPattern, 'gi') | ||
while(matches = regex.exec(string)) { | ||
const match = matches[0].trim() | ||
if (match !== '+') { | ||
parts.push(match) | ||
} | ||
} | ||
const optionsArgument = node.arguments.shift() | ||
const result = parts.reduce((concatenatedString, x) => { | ||
x = x && x.trim() | ||
if (this.validateString(x)) { | ||
concatenatedString += x.slice(1, -1) | ||
if (optionsArgument && optionsArgument.type === 'Literal') { | ||
entry.defaultValue = optionsArgument.value | ||
} | ||
else { | ||
containsVariable = true | ||
else if (optionsArgument && optionsArgument.type === 'ObjectExpression') { | ||
for (const p of optionsArgument.properties) { | ||
entry[p.key.name || p.key.value] = p.value.value | ||
} | ||
} | ||
return concatenatedString | ||
}, '') | ||
if (!result || containsVariable) { | ||
return string | ||
this.keys.push(entry) | ||
} | ||
else { | ||
return quotationMark + result + quotationMark | ||
} | ||
} | ||
static get concatenatedSegmentPattern() { | ||
return [ | ||
BaseLexer.singleQuotePattern, | ||
BaseLexer.doubleQuotePattern, | ||
BaseLexer.backQuotePattern, | ||
BaseLexer.variablePattern, | ||
'(?:\\s*\\+\\s*)' // support for concatenation via + | ||
].join('|') | ||
} | ||
concatenateString(binaryExpression, string = '') { | ||
if (binaryExpression.operator !== '+') { | ||
return | ||
} | ||
static get concatenatedArgumentPattern() { | ||
return '(' + '(?:' + JavascriptLexer.concatenatedSegmentPattern + ')+' + ')' | ||
} | ||
if (binaryExpression.left.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.left, string) | ||
} | ||
else if (binaryExpression.left.type === 'Literal') { | ||
string += binaryExpression.left.value | ||
} | ||
else { | ||
return | ||
} | ||
static get hashPattern() { | ||
return '(\\{[^}]*\\})' | ||
} | ||
if (binaryExpression.right.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.right, string) | ||
} | ||
else if (binaryExpression.right.type === 'Literal') { | ||
string += binaryExpression.right.value | ||
} | ||
else { | ||
return | ||
} | ||
static get stringOrVariableOrHashPattern() { | ||
return ( | ||
'(' + | ||
'(' + | ||
'(?:' + | ||
[ | ||
JavascriptLexer.concatenatedArgumentPattern, | ||
JavascriptLexer.hashPattern | ||
].join('|') + | ||
')' + | ||
'(?:\\s*,\\s*)?' + | ||
')+' + | ||
')' | ||
) | ||
return string | ||
} | ||
createFunctionRegex() { | ||
const pattern = ( | ||
'(?:\\W|^)' + | ||
this.functionPattern() + '\\s*\\(\\s*' + | ||
JavascriptLexer.stringOrVariableOrHashPattern + | ||
'\\s*\\)' | ||
) | ||
this.functionRegex = new RegExp(pattern, 'gi') | ||
return this.functionRegex | ||
} | ||
createArgumentsRegex() { | ||
const pattern = ( | ||
'(' + | ||
[ | ||
JavascriptLexer.concatenatedArgumentPattern, | ||
JavascriptLexer.hashPattern | ||
].join('|') + | ||
')' + | ||
'(?:\\s*,\\s*)?' | ||
) | ||
this.argumentsRegex = new RegExp(pattern, 'gi') | ||
return this.argumentsRegex | ||
} | ||
createHashRegex() { | ||
const pattern = ( | ||
'(?:(\'|")?(' + | ||
['context', 'defaultValue'].join('|') + | ||
')\\1)' + | ||
'(?:\\s*:\\s*)' + | ||
'(' + BaseLexer.stringPattern + ')' | ||
) | ||
this.hashRegex = new RegExp(pattern, 'gi') | ||
return this.hashRegex | ||
} | ||
} |
@@ -1,51 +0,160 @@ | ||
import HTMLLexer from './html-lexer' | ||
import * as acorn from 'acorn-jsx' | ||
import * as walk from 'acorn/dist/walk' | ||
import JavascriptLexer from './javascript-lexer' | ||
export default class JsxLexer extends HTMLLexer { | ||
const JSXParserExtension = Object.assign({}, walk.base, { | ||
JSXText(node, st, c) { | ||
// We need this catch, but we don't need the catch to do anything. | ||
}, | ||
JSXEmptyExpression(node, st, c) { | ||
// We need this catch, but we don't need the catch to do anything. | ||
}, | ||
JSXElement(node, st, c) { | ||
node.openingElement.attributes.forEach(attr => c(attr, st, attr.type)) | ||
node.children.forEach(child => c(child, st, child.type)) | ||
}, | ||
JSXExpressionContainer(node, st, c) { | ||
c(node.expression, st, node.expression.type) | ||
}, | ||
JSXAttribute(node, st, c) { | ||
if (node.value !== null) { | ||
c(node.value, st, node.value.type) | ||
} | ||
}, | ||
JSXSpreadAttribute(node, st, c) { | ||
c(node.argument, st, node.argument.type) | ||
} | ||
}) | ||
export default class JsxLexer extends JavascriptLexer { | ||
constructor(options = {}) { | ||
options.attr = options.attr || 'i18nKey' | ||
super(options) | ||
this.acornOptions = { sourceType: 'module', plugins: { jsx: true }, ...options.acorn } | ||
} | ||
extract(content) { | ||
this.extractInterpolate(content) | ||
this.extractTrans(content) | ||
const that = this | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression(node) { | ||
that.expressionExtractor.call(that, node) | ||
}, | ||
JSXElement(node) { | ||
const element = node.openingElement | ||
if (element.name.name === "Trans") { | ||
const entry = {} | ||
const defaultValue = that.nodeToString.call(that, node, content) | ||
for (const attr of element.attributes) { | ||
if (attr.name.name === that.attr) { | ||
entry.key = attr.value.value | ||
} | ||
} | ||
if (defaultValue !== '') { | ||
entry.defaultValue = defaultValue | ||
if (!entry.key) | ||
entry.key = entry.defaultValue | ||
} | ||
if (entry.key) | ||
that.keys.push(entry) | ||
} | ||
else if (element.name.name === "Interpolate") { | ||
const entry = {} | ||
for (const attr of element.attributes) { | ||
if (attr.name.name === that.attr) { | ||
entry.key = attr.value.value | ||
} | ||
} | ||
if (entry.key) | ||
that.keys.push(entry) | ||
} | ||
} | ||
}, | ||
JSXParserExtension | ||
) | ||
return this.keys | ||
} | ||
extractInterpolate(content) { | ||
let matches | ||
const regex = new RegExp( | ||
'<Interpolate([^>]*\\s' + this.attr + '[^>]*)\\/?>', | ||
'gi' | ||
) | ||
nodeToString(ast, string) { | ||
const children = this.parseAcornPayload(ast.children, string) | ||
while (matches = regex.exec(content)) { | ||
const attrs = this.parseAttributes(matches[1]) | ||
const key = attrs.keys | ||
if (key) { | ||
this.keys.push({ ...attrs.options, key }) | ||
const elemsToString = children => children.map((child, index) => { | ||
switch(child.type) { | ||
case 'text': return child.content | ||
case 'js': return `<${index}>${child.content}</${index}>` | ||
case 'tag': return `<${index}>${elemsToString(child.children)}</${index}>` | ||
default: throw new ParsingError('Unknown parsed content: ' + child.type) | ||
} | ||
} | ||
}).join('') | ||
return this.keys | ||
return elemsToString(children) | ||
} | ||
extractTrans(content) { | ||
let matches | ||
const regex = new RegExp( | ||
'<Trans([^>]*\\s' + this.attr + '[^>]*)>(?:((?:\\s|.)*?)<\\/Trans>)?', | ||
'gi' | ||
) | ||
parseAcornPayload(children, originalString) { | ||
return children.map(child => { | ||
if (child.type === 'JSXText') { | ||
return { | ||
type: 'text', | ||
content: child.value.replace(/^(?:\s*(\n|\r)\s*)?(.*)(?:\s*(\n|\r)\s*)?$/, '$2') | ||
} | ||
} | ||
else if (child.type === 'JSXElement') { | ||
return { | ||
type: 'tag', | ||
children: this.parseAcornPayload(child.children, originalString) | ||
} | ||
} | ||
else if (child.type === 'JSXExpressionContainer') { | ||
// strip empty expressions | ||
if (child.expression.type === 'JSXEmptyExpression') | ||
return { | ||
type: 'text', | ||
content: '' | ||
} | ||
while (matches = regex.exec(content)) { | ||
const attrs = this.parseAttributes(matches[1]) | ||
const key = attrs.keys | ||
if (key) { | ||
attrs.options.defaultValue = attrs.options.defaultValue || matches[2].trim() | ||
this.keys.push({ ...attrs.options, key }) | ||
// strip properties from ObjectExpressions | ||
// annoying (and who knows how many other exceptions we'll need to write) but necessary | ||
else if (child.expression.type === 'ObjectExpression') { | ||
// i18next-react only accepts two props, any random single prop, and a format prop | ||
// for our purposes, format prop is always ignored | ||
let nonFormatProperties = child.expression.properties.filter(prop => prop.key.name !== 'format') | ||
// more than one property throw a warning in i18next-react, but still works as a key | ||
if (nonFormatProperties.length > 1) { | ||
this.emit('warning', `The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.`) | ||
return { | ||
type: 'text', | ||
content: '' | ||
} | ||
} | ||
return { | ||
type: 'js', | ||
content: `{{${nonFormatProperties[0].key.name}}}` | ||
} | ||
} | ||
// slice on the expression so that we ignore comments around it | ||
return { | ||
type: 'js', | ||
content: `{${originalString.slice(child.expression.start, child.expression.end)}}` | ||
} | ||
} | ||
} | ||
return this.keys | ||
else { | ||
throw new ParsingError('Unknown ast element when parsing jsx: ' + child.type) | ||
} | ||
}).filter(child => child.type !== 'text' || child.content) | ||
} | ||
} |
@@ -16,3 +16,3 @@ import EventEmitter from 'events' | ||
js: ['JavascriptLexer'], | ||
jsx: ['JavascriptLexer', 'JsxLexer'], | ||
jsx: ['JsxLexer'], | ||
mjs: ['JavascriptLexer'], | ||
@@ -34,2 +34,7 @@ | ||
this.options = options | ||
if (options.reactNamespace) { | ||
lexers.js = lexers.jsx | ||
} | ||
this.lexers = { ...lexers, ...options.lexers } | ||
@@ -42,3 +47,3 @@ } | ||
lexers.forEach(lexerConfig => { | ||
for (const lexerConfig of lexers) { | ||
let lexerName | ||
@@ -53,3 +58,2 @@ let lexerOptions | ||
lexerName = lexerConfig.lexer | ||
delete lexerConfig.lexer | ||
lexerOptions = lexerConfig | ||
@@ -65,3 +69,3 @@ } | ||
keys = keys.concat(Lexer.extract(content)) | ||
}) | ||
} | ||
@@ -68,0 +72,0 @@ return keys |
import { assert } from 'chai' | ||
import { dotPathToHash } from '../../src/helpers' | ||
describe('dotPathToHash helper function', function () { | ||
it('creates an object from a string path', function (done) { | ||
const res = dotPathToHash('one') | ||
assert.deepEqual(res, { one: '' }) | ||
describe('dotPathToHash helper function', () => { | ||
it('creates an object from a string path', (done) => { | ||
const { target, duplicate } = dotPathToHash({ key: 'one' }) | ||
assert.deepEqual(target, { one: '' }) | ||
assert.equal(duplicate, false) | ||
done() | ||
}) | ||
it('ignores trailing separator', function (done) { | ||
const res = dotPathToHash('one..', '..') | ||
assert.deepEqual(res, { one: '' }) | ||
it('ignores trailing separator', (done) => { | ||
const { target } = dotPathToHash( | ||
{ key: 'one.' }, | ||
{}, | ||
{ separator: '.' } | ||
) | ||
assert.deepEqual(target, { one: '' }) | ||
done() | ||
}) | ||
it('ignores duplicated separator', function (done) { | ||
const res = dotPathToHash('one..two', '..') | ||
assert.deepEqual(res, { one: { two: '' } }) | ||
it('ignores duplicated separator', (done) => { | ||
const { target } = dotPathToHash( | ||
{ key: 'one..two' } | ||
) | ||
assert.deepEqual(target, { one: { two: '' } }) | ||
done() | ||
}) | ||
it('use provided default value', function (done) { | ||
const res = dotPathToHash('one', null, 'myDefaultValue') | ||
assert.deepEqual(res, { one: 'myDefaultValue' }) | ||
it('handles a target hash', (done) => { | ||
const { target, duplicate } = dotPathToHash( | ||
{ key: 'one.two.three' }, | ||
{ one: { twenty: '' } } | ||
) | ||
assert.deepEqual(target, { one: { two: { three: '' }, twenty: '' } }) | ||
assert.equal(duplicate, false) | ||
done() | ||
}) | ||
it('use provided default value', function (done) { | ||
const res = dotPathToHash('one', null, 'myDefaultValue') | ||
assert.deepEqual(res, { one: 'myDefaultValue' }) | ||
it('handles a `defaultValue` option', (done) => { | ||
const { target } = dotPathToHash( | ||
{ key: 'one' }, | ||
{}, | ||
{ value: 'myDefaultValue' } | ||
) | ||
assert.deepEqual(target, { one: 'myDefaultValue' }) | ||
done() | ||
}) | ||
it('handles a target hash', function (done) { | ||
const res = dotPathToHash('one.two.three', '.', '', { one: { twenty: '' } }) | ||
assert.deepEqual(res, { one: { two: { three: '' }, twenty: '' } }) | ||
it('handles a `separator` option', (done) => { | ||
const { target } = dotPathToHash( | ||
{ key: 'one_two_three.' }, | ||
{}, | ||
{ separator: '_' } | ||
) | ||
assert.deepEqual(target, { one: { two: { 'three.': '' } } }) | ||
done() | ||
}) | ||
it('handles a different separator', function (done) { | ||
const res = dotPathToHash('one_two_three.', '_') | ||
assert.deepEqual(res, { one: { two: { 'three.': '' } } }) | ||
it('detects duplicate keys with the same value', (done) => { | ||
const { target, duplicate, conflict } = dotPathToHash( | ||
{ key: 'one.two.three' }, | ||
{ one: { two: { three: '' } } }, | ||
) | ||
assert.deepEqual(target, { one: { two: { three: '' } } }) | ||
assert.equal(duplicate, true) | ||
assert.equal(conflict, false) | ||
done() | ||
}) | ||
it('detects and overwrites duplicate keys with different values', (done) => { | ||
const { target, duplicate, conflict } = dotPathToHash( | ||
{ key: 'one.two.three', defaultValue: 'new' }, | ||
{ one: { two: { three: 'old' } } }, | ||
) | ||
assert.deepEqual(target, { one: { two: { three: 'new' } } }) | ||
assert.equal(duplicate, true) | ||
assert.equal(conflict, true) | ||
done() | ||
}) | ||
}) |
import { assert } from 'chai' | ||
import { mergeHashes } from '../../src/helpers' | ||
describe('mergeHashes helper function', function () { | ||
it('replaces empty `target` keys with `source`', function (done) { | ||
describe('mergeHashes helper function', () => { | ||
it('replaces empty `target` keys with `source`', (done) => { | ||
const source = { key1: 'value1' } | ||
@@ -12,6 +12,9 @@ const target = { key1: '' } | ||
assert.deepEqual(res.old, {}) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 0) | ||
done() | ||
}) | ||
it('does not replaces empty `target` keys with `source` if it is a hash', function (done) { | ||
it('does not replace empty `target` keys with `source` if it is a hash', (done) => { | ||
const source = { key1: { key11: 'value1'} } | ||
@@ -23,6 +26,9 @@ const target = { key1: '' } | ||
assert.deepEqual(res.old, { key1: { key11: 'value1' } }) | ||
assert.strictEqual(res.mergeCount, 0) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 1) | ||
done() | ||
}) | ||
}) | ||
it('keeps `target` keys not in `source`', function (done) { | ||
it('keeps `target` keys not in `source`', (done) => { | ||
const source = { key1: 'value1' } | ||
@@ -34,6 +40,9 @@ const target = { key1: '', key2: '' } | ||
assert.deepEqual(res.old, {}) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 0) | ||
done() | ||
}) | ||
it('stores into `old` the keys from `source` that are not in `target`', function (done) { | ||
it('stores into `old` the keys from `source` that are not in `target`', (done) => { | ||
const source = { key1: 'value1', key2: 'value2' } | ||
@@ -45,16 +54,22 @@ const target = { key1: '' } | ||
assert.deepEqual(res.old, { key2: 'value2' }) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 1) | ||
done() | ||
}) | ||
it('copies `source` keys to `target` regardless of presence when keepRemoved is enabled', function (done) { | ||
it('copies `source` keys to `target` regardless of presence when `keepRemoved` is enabled', (done) => { | ||
const source = { key1: 'value1', key2: 'value2' } | ||
const target = { key1: '', key3: '' } | ||
const res = mergeHashes(source, target, null, true) | ||
const res = mergeHashes(source, target, true) | ||
assert.deepEqual(res.new, { key1: 'value1', key2: 'value2', key3: '' }) | ||
assert.deepEqual(res.old, { key2: 'value2' }) | ||
assert.deepEqual(res.old, { }) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 1) | ||
done() | ||
}) | ||
it('restores plural keys when the singular one exists', function (done) { | ||
it('restores plural keys when the singular one exists', (done) => { | ||
const source = { key1: '', key1_plural: 'value1' } | ||
@@ -66,6 +81,9 @@ const target = { key1: '' } | ||
assert.deepEqual(res.old, {}) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 1) | ||
assert.strictEqual(res.oldCount, 0) | ||
done() | ||
}) | ||
it('does not restores plural keys when the singular one does not', function (done) { | ||
it('does not restore plural keys when the singular one does not', (done) => { | ||
const source = { key1: '', key1_plural: 'value1' } | ||
@@ -77,6 +95,9 @@ const target = { key2: '' } | ||
assert.deepEqual(res.old, { key1: '', key1_plural: 'value1' }) | ||
assert.strictEqual(res.mergeCount, 0) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 2) | ||
done() | ||
}) | ||
it('restores context keys when the singular one exists', function (done) { | ||
it('restores context keys when the singular one exists', (done) => { | ||
const source = { key1: '', key1_context: 'value1' } | ||
@@ -88,6 +109,9 @@ const target = { key1: '' } | ||
assert.deepEqual(res.old, {}) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 1) | ||
assert.strictEqual(res.oldCount, 0) | ||
done() | ||
}) | ||
it('does not restores context keys when the singular one does not', function (done) { | ||
it('does not restore context keys when the singular one does not', (done) => { | ||
const source = { key1: '', key1_context: 'value1' } | ||
@@ -99,6 +123,9 @@ const target = { key2: '' } | ||
assert.deepEqual(res.old, { key1: '', key1_context: 'value1' }) | ||
assert.strictEqual(res.mergeCount, 0) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 2) | ||
done() | ||
}) | ||
it('works with deep objects', function (done) { | ||
it('works with deep objects', (done) => { | ||
const source = { | ||
@@ -113,2 +140,5 @@ key1: 'value1', | ||
key23: 'value23' | ||
}, | ||
key4: { | ||
key41: 'value41' | ||
} | ||
@@ -126,3 +156,6 @@ } | ||
}, | ||
key3: '' | ||
key3: '', | ||
key4: { | ||
key41: 'value41' | ||
} | ||
} | ||
@@ -142,3 +175,6 @@ | ||
}, | ||
key3: '' | ||
key3: '', | ||
key4: { | ||
key41: 'value41' | ||
} | ||
} | ||
@@ -157,6 +193,9 @@ | ||
assert.deepEqual(res.old, expected_old) | ||
assert.strictEqual(res.mergeCount, 4) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 2) | ||
done() | ||
}) | ||
it('leaves arrays of values (multiline) untouched', function (done) { | ||
it('leaves arrays of values (multiline) untouched', (done) => { | ||
const source = { key1: ['Line one.', 'Line two.'] } | ||
@@ -167,4 +206,8 @@ const target = { key1: '' } | ||
assert.deepEqual(res.new, { key1: ['Line one.', 'Line two.'] }) | ||
assert.deepEqual(res.old, {}) | ||
assert.strictEqual(res.mergeCount, 1) | ||
assert.strictEqual(res.pullCount, 0) | ||
assert.strictEqual(res.oldCount, 0) | ||
done() | ||
}) | ||
}) |
@@ -12,2 +12,9 @@ import { assert } from 'chai' | ||
it('extracts multiple keys on a single line', (done) => { | ||
const Lexer = new HandlebarsLexer() | ||
const content = '<p>{{t "first"}} {{t "second"}}</p>' | ||
assert.deepEqual(Lexer.extract(content), [{ key: 'first' }, { key: 'second' }]) | ||
done() | ||
}) | ||
it('extracts the second argument as defaultValue', (done) => { | ||
@@ -14,0 +21,0 @@ const Lexer = new HandlebarsLexer() |
@@ -81,49 +81,2 @@ import { assert } from 'chai' | ||
}) | ||
describe('parseAttributes()', () => { | ||
it('extracts attribute value from string', (done) => { | ||
const Lexer = new HTMLLexer() | ||
assert.deepEqual( | ||
Lexer.parseAttributes('title="" bla data-i18n="key1"', 'data-i18n'), | ||
{ | ||
keys: 'key1', | ||
options: {} | ||
} | ||
) | ||
done() | ||
}) | ||
it('extracts json strings too', (done) => { | ||
const Lexer = new HTMLLexer() | ||
assert.deepEqual( | ||
Lexer.parseAttributes( | ||
'data-i18n="key1;key2" data-i18n-options=\'{"defaultValue": "bla"}\'', | ||
'data-i18n-options' | ||
), | ||
{ | ||
keys: 'key1;key2', | ||
options: { | ||
defaultValue: 'bla' | ||
} | ||
} | ||
) | ||
done() | ||
}) | ||
it('supports multiline', (done) => { | ||
const Lexer = new HTMLLexer() | ||
assert.deepEqual( | ||
Lexer.parseAttributes( | ||
'title=""\n bla\n data-i18n="first"\n data-i18n-options=\'{"defaultValue": "bar"}\'' | ||
), | ||
{ | ||
keys: 'first', | ||
options: { | ||
defaultValue: 'bar' | ||
} | ||
} | ||
) | ||
done() | ||
}) | ||
}) | ||
}) |
@@ -14,3 +14,3 @@ import { assert } from 'chai' | ||
const Lexer = new JavascriptLexer() | ||
const content = 'i18n.t("first" "bla")' | ||
const content = 'i18n.t("first", "bla")' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
@@ -31,2 +31,11 @@ { key: 'first', defaultValue: 'bla' } | ||
it('extracts the defaultValue/context on multiple lines', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const content = 'i18n.t("first", {\ndefaultValue: "foo",\n context: \'bar\'})' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: 'foo', context: 'bar' } | ||
]) | ||
done() | ||
}) | ||
it('extracts the defaultValue/context options with quotation marks', (done) => { | ||
@@ -41,2 +50,11 @@ const Lexer = new JavascriptLexer() | ||
it('extracts the defaultValue/context options with interpolated value', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const content = 'i18n.t("first", {context: "foo", "defaultValue": \'{{var}} bla\'})' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: '{{var}} bla', context: 'foo' } | ||
]) | ||
done() | ||
}) | ||
it('supports multiline and concatenation', (done) => { | ||
@@ -58,4 +76,4 @@ const Lexer = new JavascriptLexer() | ||
const Lexer = new JavascriptLexer() | ||
const js = "import './yolo.js' t('first')" | ||
assert.deepEqual(Lexer.extract(js), [{ key: 'first' }]) | ||
const js = "ttt('first')" | ||
assert.deepEqual(Lexer.extract(js), []) | ||
done() | ||
@@ -73,58 +91,2 @@ }) | ||
}) | ||
describe('concatenateString()', () => { | ||
it('concatenates strings', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
assert.equal(Lexer.concatenateString('"foo" + \'bar\''), '"foobar"') | ||
done() | ||
}) | ||
it('returns the original string if it contains variables', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
assert.equal(Lexer.concatenateString('"foo" + bar'), '"foo" + bar') | ||
done() | ||
}) | ||
it('returns the original string if it contains backquote string', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
assert.equal(Lexer.concatenateString('"foo" + `bar`'), '"foo" + `bar`') | ||
done() | ||
}) | ||
}) | ||
describe('parseArguments()', () => { | ||
it('matches string arguments', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const args = '"first", "bla"' | ||
assert.deepEqual(Lexer.parseArguments(args), { | ||
arguments: ['"first"', '"bla"'], | ||
options: {} | ||
}) | ||
done() | ||
}) | ||
it('matches variable arguments', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const args = 'first bla' | ||
assert.deepEqual(Lexer.parseArguments(args), { | ||
arguments: ['first', 'bla'], | ||
options: {} | ||
}) | ||
done() | ||
}) | ||
it('matches concatenated arguments and concatenate when possible', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const args = "'first' + asd, 'bla' + 'asd', foo+bar+baz" | ||
assert.deepEqual(Lexer.parseArguments(args), { | ||
arguments: [ | ||
"'first' + asd", | ||
"'blaasd'", // string got concatenated! | ||
'foo+bar+baz' | ||
], | ||
options: {} | ||
}) | ||
done() | ||
}) | ||
}) | ||
}) |
@@ -5,7 +5,7 @@ import { assert } from 'chai' | ||
describe('JsxLexer', () => { | ||
describe('extractInterpolate', () => { | ||
describe('<Interpolate>', () => { | ||
it('extracts keys from i18nKey attributes', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Interpolate i18nKey="first" />' | ||
assert.deepEqual(Lexer.extractInterpolate(content), [ | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first' } | ||
@@ -17,7 +17,7 @@ ]) | ||
describe('Trans', () => { | ||
it('extracts keys from i18nKey attributes', (done) => { | ||
describe('<Trans>', () => { | ||
it('extracts keys from i18nKey attributes from closing tags', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans i18nKey="first" count={count}>Yo</Trans>' | ||
assert.deepEqual(Lexer.extractTrans(content), [ | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: 'Yo' } | ||
@@ -27,3 +27,83 @@ ]) | ||
}) | ||
it('extracts keys from user-defined key attributes from closing tags', (done) => { | ||
const Lexer = new JsxLexer({ attr: "myIntlKey" }) | ||
const content = '<Trans myIntlKey="first" count={count}>Yo</Trans>' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: 'Yo' } | ||
]) | ||
done() | ||
}) | ||
it('extracts keys from i18nKey attributes from self-closing tags', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans i18nKey="first" count={count} />' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first' } | ||
]) | ||
done() | ||
}) | ||
it('extracts keys from user-defined key attributes from self-closing tags', (done) => { | ||
const Lexer = new JsxLexer({ attr: "myIntlKey" }) | ||
const content = '<Trans myIntlKey="first" count={count} />' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first' } | ||
]) | ||
done() | ||
}) | ||
it('extracts keys from Trans elements without an i18nKey', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans count={count}>Yo</Trans>' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'Yo', defaultValue: 'Yo' } | ||
]) | ||
done() | ||
}) | ||
it('extracts keys from Trans elements and ignores values of expressions and spaces', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans count={count}>{{ key: property }}</Trans>' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: '<0>{{key}}</0>', defaultValue: '<0>{{key}}</0>' } | ||
]) | ||
done() | ||
}) | ||
it('invalid interpolation gets stripped', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans count={count}>before{{ key1, key2 }}after</Trans>' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'beforeafter', defaultValue: 'beforeafter' } | ||
]) | ||
done() | ||
}) | ||
it('doesn\'t add a blank key for self-closing or empty tags', (done) => { | ||
const Lexer = new JsxLexer() | ||
const emptyTag = '<Trans count={count}></Trans>' | ||
assert.deepEqual(Lexer.extract(emptyTag), []) | ||
const selfClosing = '<Trans count={count}/>' | ||
assert.deepEqual(Lexer.extract(selfClosing), []) | ||
done() | ||
}) | ||
it('erases tags from content', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans>a<b test={"</b>"}>c<c>z</c></b>{d}<br stuff={y}/></Trans>' | ||
assert.equal(Lexer.extract(content)[0].defaultValue, 'a<1>c<1>z</1></1><2>{d}</2><3></3>') | ||
done() | ||
}) | ||
it('erases comment expressions', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans>{/* some comment */}Some Content</Trans>' | ||
assert.equal(Lexer.extract(content)[0].defaultValue, 'Some Content') | ||
done() | ||
}) | ||
}) | ||
}) |
{ | ||
"first": "first", | ||
"second": "" | ||
"second": "", | ||
"third": "third" | ||
} |
import { assert } from 'chai' | ||
import Vinyl from 'vinyl' | ||
import fs from 'fs' | ||
import i18nTransform from '../src/index' | ||
import i18nTransform from '../src/transform' | ||
import path from 'path' | ||
@@ -10,3 +10,3 @@ | ||
describe('parser', () => { | ||
it('parses globally on multiple lines', done => { | ||
it('parses globally on multiple lines', (done) => { | ||
let result | ||
@@ -16,3 +16,3 @@ const i18nextParser = new i18nTransform() | ||
contents: Buffer.from( | ||
"asd t('first') t('second') \n asd t('third') ad t('fourth')" | ||
"t('first'); t('second') \n t('third'); t('fourth');" | ||
), | ||
@@ -33,3 +33,3 @@ path: 'file.js' | ||
it('parses multiline function calls', done => { | ||
it('parses multiline function calls', (done) => { | ||
let result | ||
@@ -39,3 +39,3 @@ const i18nextParser = new i18nTransform() | ||
contents: Buffer.from( | ||
"asd t(\n 'first'\n) t('second') \n asd t(\n\n'third')" | ||
"t(\n 'first'\n)\n t('second'); t(\n\n'third')" | ||
), | ||
@@ -58,7 +58,7 @@ path: 'file.js' | ||
it('creates context keys', done => { | ||
it('creates context keys', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform() | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first', {context: 'female'})"), | ||
contents: Buffer.from("t('first', {context: 'female'})"), | ||
path: 'file.js' | ||
@@ -83,3 +83,3 @@ }) | ||
it('parses html files', done => { | ||
it('parses html files', (done) => { | ||
let result | ||
@@ -99,3 +99,4 @@ const i18nextParser = new i18nTransform() | ||
fifth: 'bar', | ||
sixth: '' | ||
sixth: '', | ||
selfClosing: '' | ||
} | ||
@@ -115,3 +116,3 @@ | ||
it('parses handlebars files', done => { | ||
it('parses handlebars files', (done) => { | ||
let result | ||
@@ -152,3 +153,3 @@ const i18nextParser = new i18nTransform() | ||
it('parses javascript files', done => { | ||
it('parses javascript files', (done) => { | ||
let result | ||
@@ -165,3 +166,3 @@ const i18nextParser = new i18nTransform() | ||
second: 'defaultValue', | ||
third: 'defaultValue', | ||
third: '{{var}} defaultValue', | ||
fourth: '' | ||
@@ -183,3 +184,3 @@ } | ||
it('parses react files', done => { | ||
it('parses react files', (done) => { | ||
let result | ||
@@ -196,4 +197,13 @@ const i18nextParser = new i18nTransform() | ||
second: '', | ||
third: 'Hello <strong title={t(\'fourth\')}>{{name}}</strong>, you have {{count}} unread message. <Link to="/msgs">Go to messages</Link>.', | ||
fourth: '' | ||
third: { | ||
first: 'Hello <1><0>{{name}}</0></1>, you have <3>{{count}}</3> unread message. <5>Go to messages</5>.', | ||
second: ' <1>Hello,</1> this shouldn\'t be trimmed.', | ||
third: '<0>Hello,</0>this should be trimmed.<2> and this shoudln\'t</2>' | ||
}, | ||
fourth: '', | ||
fifth: '', | ||
bar: '', | ||
foo: '', | ||
"This should be part of the value and the key": "This should be part of the value and the key", | ||
"don't split <1>{{on}}</1>": "don't split <1>{{on}}</1>" | ||
} | ||
@@ -215,3 +225,3 @@ | ||
it('creates two files per namespace and per locale', done => { | ||
it('creates one file per namespace and per locale', (done) => { | ||
let results = [] | ||
@@ -224,3 +234,3 @@ const i18nextParser = new i18nTransform({ | ||
contents: Buffer.from( | ||
"asd t('ns1:first') t('second') \n asd t('ns2:third') ad t('fourth')" | ||
"t('ns1:first'); t('second') \n t('ns2:third'); t('fourth')" | ||
), | ||
@@ -236,26 +246,17 @@ path: 'file.js' | ||
'en/default.json', | ||
'en/default_old.json', | ||
'en/ns1.json', | ||
'en/ns1_old.json', | ||
'en/ns2.json', | ||
'en/ns2_old.json', | ||
'de/default.json', | ||
'de/default_old.json', | ||
'de/ns1.json', | ||
'de/ns1_old.json', | ||
'de/ns2.json', | ||
'de/ns2_old.json', | ||
'fr/default.json', | ||
'fr/default_old.json', | ||
'fr/ns1.json', | ||
'fr/ns1_old.json', | ||
'fr/ns2.json', | ||
'fr/ns2_old.json' | ||
'fr/ns2.json' | ||
] | ||
let length = expectedFiles.length | ||
expectedFiles.forEach(filename => { | ||
for (const filename of expectedFiles) { | ||
assert.include(results, path.normalize(filename)) | ||
if (!--length) done() | ||
}) | ||
} | ||
}) | ||
@@ -266,3 +267,3 @@ | ||
it('handles escaped single and double quotes', done => { | ||
it('handles escaped single and double quotes', (done) => { | ||
let result | ||
@@ -272,3 +273,3 @@ const i18nextParser = new i18nTransform() | ||
contents: Buffer.from( | ||
'asd t(\'escaped \\\'single quotes\\\'\') t("escaped \\"double quotes\\"")' | ||
't(\'escaped \\\'single quotes\\\'\'); t("escaped \\"double quotes\\"")' | ||
), | ||
@@ -293,3 +294,3 @@ path: 'file.js' | ||
it('handles escaped characters', done => { | ||
it('handles escaped characters', (done) => { | ||
let result | ||
@@ -299,3 +300,3 @@ const i18nextParser = new i18nTransform() | ||
contents: Buffer.from( | ||
"asd t('escaped backslash\\\\ newline\\n\\r tab\\t')" | ||
"t('escaped backslash\\\\ newline\\n\\r tab\\t')" | ||
), | ||
@@ -319,7 +320,7 @@ path: 'file.js' | ||
it('returns buffers', done => { | ||
it('returns buffers', (done) => { | ||
const i18nextParser = new i18nTransform() | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from( | ||
"asd t('first') t('second') \n asd t('third') ad t('fourth')" | ||
"t('first')" | ||
), | ||
@@ -337,7 +338,7 @@ path: 'file.js' | ||
it('retrieves values in existing catalog', done => { | ||
let result | ||
it('retrieves values in existing catalog and creates old catalog', (done) => { | ||
let result, resultOld | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_merge:first') t('test_merge:second')"), | ||
contents: Buffer.from("t('test_merge:first'); t('test_merge:second')"), | ||
path: 'file.js' | ||
@@ -350,5 +351,9 @@ }) | ||
} | ||
else if (file.relative.endsWith(path.normalize('en/test_merge_old.json'))) { | ||
resultOld = JSON.parse(file.contents) | ||
} | ||
}) | ||
i18nextParser.once('end', () => { | ||
assert.deepEqual(result, { first: 'first', second: '' }) | ||
assert.deepEqual(resultOld, { third: 'third' }) | ||
done() | ||
@@ -360,3 +365,3 @@ }) | ||
it('does not leak values between locales', done => { | ||
it('does not leak values between locales', (done) => { | ||
let resultEN | ||
@@ -366,3 +371,3 @@ let resultFR | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_leak:first') t('test_leak:second')"), | ||
contents: Buffer.from("t('test_leak:first'); t('test_leak:second')"), | ||
path: 'file.js' | ||
@@ -388,7 +393,7 @@ }) | ||
it('retrieves context values in existing catalog', done => { | ||
it('retrieves context values in existing catalog', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_context:first')"), | ||
contents: Buffer.from("t('test_context:first')"), | ||
path: 'file.js' | ||
@@ -416,3 +421,59 @@ }) | ||
it('retrieves plural values in existing catalog', done => { | ||
it('saves unused translations in the old catalog', (done) => { | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("t('test_old:parent.third', 'third'), t('test_old:fourth', 'fourth')"), | ||
path: 'file.js' | ||
}) | ||
const expectedResult = { parent: { third: 'third' }, fourth: 'fourth' } | ||
const expectedResultOld = { parent: { first: 'first', some: 'some' }, second: 'second', other: 'other' } | ||
let result, resultOld; | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith(path.normalize('en/test_old.json'))) { | ||
result = JSON.parse(file.contents) | ||
} | ||
else if (file.relative.endsWith(path.normalize('en/test_old_old.json'))) { | ||
resultOld = JSON.parse(file.contents) | ||
} | ||
}) | ||
i18nextParser.once('end', () => { | ||
assert.deepEqual(result, expectedResult) | ||
assert.deepEqual(resultOld, expectedResultOld) | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('restores translations from the old catalog', (done) => { | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("t('test_old:parent.some', 'random'), t('test_old:other', 'random')"), | ||
path: 'file.js' | ||
}) | ||
const expectedResult = { parent: { some: 'some' }, other: 'other' } | ||
const expectedResultOld = { parent: { first: 'first' }, second: 'second' } | ||
let result, resultOld; | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith(path.normalize('en/test_old.json'))) { | ||
result = JSON.parse(file.contents) | ||
} | ||
else if (file.relative.endsWith(path.normalize('en/test_old_old.json'))) { | ||
resultOld = JSON.parse(file.contents) | ||
} | ||
}) | ||
i18nextParser.once('end', () => { | ||
assert.deepEqual(result, expectedResult) | ||
assert.deepEqual(resultOld, expectedResultOld) | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('retrieves plural values in existing catalog', (done) => { | ||
let result | ||
@@ -422,3 +483,3 @@ const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
contents: Buffer.from( | ||
"asd t('test_plural:first') t('test_plural:second')" | ||
"t('test_plural:first'); t('test_plural:second')" | ||
), | ||
@@ -449,7 +510,7 @@ path: 'file.js' | ||
it('retrieves plural and context values in existing catalog', done => { | ||
it('retrieves plural and context values in existing catalog', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_context_plural:first')"), | ||
contents: Buffer.from("t('test_context_plural:first')"), | ||
path: 'file.js' | ||
@@ -478,4 +539,4 @@ }) | ||
describe('options', () => { | ||
it('handles filename and extension with $LOCALE and $NAMESPACE var', done => { | ||
let results = [] | ||
it('handles filename and extension with $LOCALE and $NAMESPACE var', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
@@ -488,3 +549,3 @@ locales: ['en'], | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('fourth')"), | ||
contents: Buffer.from("t('fourth')"), | ||
path: 'file.js' | ||
@@ -494,15 +555,7 @@ }) | ||
i18nextParser.on('data', file => { | ||
results.push(file.relative.replace(/locales[\\\/]/, '')) | ||
result = file.relative.replace(/locales[\\\/]/, '') | ||
}) | ||
i18nextParser.on('end', () => { | ||
const expectedFiles = [ | ||
'en/p-en-default.en.i18n', | ||
'en/p-en-default_old.en.i18n' | ||
] | ||
let length = expectedFiles.length | ||
expectedFiles.forEach(filename => { | ||
assert.include(results, path.normalize(filename)) | ||
if (!--length) done() | ||
}) | ||
assert.strictEqual(result, path.normalize('en/p-en-default.en.i18n')) | ||
done() | ||
}) | ||
@@ -513,3 +566,3 @@ | ||
it('handles custom namespace and key separators', done => { | ||
it('handles custom namespace and key separators', (done) => { | ||
let result | ||
@@ -522,3 +575,3 @@ const i18nextParser = new i18nTransform({ | ||
contents: Buffer.from( | ||
"asd t('test_separators?first') t('test_separators?second-third')" | ||
"t('test_separators?first'); t('test_separators?second-third')" | ||
), | ||
@@ -541,9 +594,33 @@ path: 'file.js' | ||
it('supports a defaultValue', done => { | ||
it('handles disabling namespace and key separators', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
namespaceSeparator: false, | ||
keySeparator: false | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("t('Status: loading...')"), | ||
path: 'file.js' | ||
}) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
} | ||
}) | ||
i18nextParser.once('end', () => { | ||
assert.deepEqual(result, { 'Status: loading...': '' }) | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('supports a defaultValue', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
defaultValue: 'NOT_TRANSLATED' | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first')"), | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
@@ -565,9 +642,96 @@ }) | ||
it('supports outputing to yml', done => { | ||
it('supports a lineEnding', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
lineEnding: '\r\n' | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
}) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = file.contents.toString() | ||
} | ||
}) | ||
i18nextParser.once('end', () => { | ||
assert.equal(result, '{\r\n "first": ""\r\n}\r\n') | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('supports a lineEnding', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
lineEnding: '\r\n' | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
}) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = file.contents.toString() | ||
} | ||
}) | ||
i18nextParser.once('end', () => { | ||
assert.equal(result, '{\r\n "first": ""\r\n}\r\n') | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('parses Trans if reactNamespace is true', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
reactNamespace: true | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: fs.readFileSync( | ||
path.resolve(__dirname, 'templating/react.jsx') | ||
), | ||
path: 'react.js' | ||
}) | ||
const expected = { | ||
first: '', | ||
second: '', | ||
third: { | ||
first: 'Hello <1><0>{{name}}</0></1>, you have <3>{{count}}</3> unread message. <5>Go to messages</5>.', | ||
second: ' <1>Hello,</1> this shouldn\'t be trimmed.', | ||
third: '<0>Hello,</0>this should be trimmed.<2> and this shoudln\'t</2>' | ||
}, | ||
fourth: '', | ||
fifth: '', | ||
bar: '', | ||
foo: '', | ||
"This should be part of the value and the key": "This should be part of the value and the key", | ||
"don't split <1>{{on}}</1>": "don't split <1>{{on}}</1>" | ||
} | ||
i18nextParser.on('data', file => { | ||
// support for a default Namespace | ||
if (file.relative.endsWith(path.normalize('en/react.json'))) { | ||
result = JSON.parse(file.contents) | ||
} | ||
}) | ||
i18nextParser.on('end', () => { | ||
assert.deepEqual(result, expected) | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('supports outputing to yml', (done) => { | ||
let result | ||
const i18nextParser = new i18nTransform({ | ||
extension: '.yml' | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first')"), | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
@@ -589,3 +753,3 @@ }) | ||
it('supports an indentation option', done => { | ||
it('supports an indentation option', (done) => { | ||
let result | ||
@@ -596,3 +760,3 @@ const i18nextParser = new i18nTransform({ | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first')"), | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
@@ -614,12 +778,11 @@ }) | ||
it('handles skipping the old catalog with createOldLibraries=false', done => { | ||
it('handles skipping the old catalog with createOldCatalogs=false', (done) => { | ||
let results = [] | ||
const i18nextParser = new i18nTransform({ | ||
locales: ['en', 'de', 'fr'], | ||
defaultNamespace: 'default', | ||
createOldLibraries: false | ||
createOldCatalogs: false | ||
}) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from( | ||
"asd t('ns1:first') t('second') \n asd t('ns2:third') ad t('fourth')" | ||
"t('ns1:first'); t('second') \n t('fourth')" | ||
), | ||
@@ -636,16 +799,12 @@ path: 'file.js' | ||
'en/ns1.json', | ||
'en/ns2.json', | ||
'de/default.json', | ||
'de/ns1.json', | ||
'de/ns2.json', | ||
'fr/default.json', | ||
'fr/ns1.json', | ||
'fr/ns2.json' | ||
'fr/ns1.json' | ||
] | ||
let length = expectedFiles.length | ||
expectedFiles.forEach(filename => { | ||
assert.equal(results.length, expectedFiles.length) | ||
for (const filename of expectedFiles) { | ||
assert.include(results, path.normalize(filename)) | ||
if (!--length) done() | ||
}) | ||
} | ||
}) | ||
@@ -657,3 +816,3 @@ | ||
describe('lexers', () => { | ||
it('support custom lexers options', done => { | ||
it('support custom lexers options', (done) => { | ||
let result | ||
@@ -671,3 +830,3 @@ const i18nextParser = new i18nTransform({ | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd bla('first') _e('second')"), | ||
contents: Buffer.from("bla('first'); _e('second')"), | ||
path: 'file.js' | ||
@@ -691,3 +850,3 @@ }) | ||
describe('sort', () => { | ||
it('does not sort by default', done => { | ||
it('does not sort by default', (done) => { | ||
let result | ||
@@ -697,3 +856,3 @@ const i18nextParser = new i18nTransform() | ||
contents: Buffer.from( | ||
"asd t('ccc') t('aaa') t('bbb.bbb') t('bbb.aaa')" | ||
"t('ccc'); t('aaa'); t('bbb.bbb'); t('bbb.aaa')" | ||
), | ||
@@ -717,3 +876,3 @@ path: 'file.js' | ||
it('supports sort as an option', done => { | ||
it('supports sort as an option', (done) => { | ||
let result | ||
@@ -725,3 +884,3 @@ const i18nextParser = new i18nTransform({ | ||
contents: Buffer.from( | ||
"asd t('ccc') t('aaa') t('bbb.bbb') t('bbb.aaa')" | ||
"t('ccc'); t('aaa'); t('bbb.bbb'); t('bbb.aaa')" | ||
), | ||
@@ -748,3 +907,3 @@ path: 'file.js' | ||
describe('events', () => { | ||
it('emits a `reading` event', done => { | ||
it('emits a `reading` event', (done) => { | ||
let result | ||
@@ -767,3 +926,3 @@ const i18nextParser = new i18nTransform() | ||
it('emits a `error` event if the catalog is not valid json', done => { | ||
it('emits a `error` event if the catalog is not valid json', (done) => { | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
@@ -782,3 +941,3 @@ const fakeFile = new Vinyl({ | ||
it('emits an `error` if a lexer does not exist', done => { | ||
it('emits an `error` if a lexer does not exist', (done) => { | ||
const results = [] | ||
@@ -798,3 +957,3 @@ const i18nextParser = new i18nTransform({ lexers: { js: ['fakeLexer'] } }) | ||
it('emits a `warning` event if a key contains a variable', done => { | ||
it('emits a `warning` event if a key contains a variable', (done) => { | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
@@ -807,3 +966,3 @@ const fakeFile = new Vinyl({ | ||
i18nextParser.on('warning', message => { | ||
assert.equal(message, 'Key is not a string litteral: variable') | ||
assert.equal(message, 'Key is not a string literal: variable') | ||
done() | ||
@@ -813,3 +972,17 @@ }) | ||
}) | ||
it('emits a `warning` event if a react value contains two variables', (done) => { | ||
const i18nextParser = new i18nTransform({ output: 'test/locales' }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from('<Trans>{{ key1, key2 }}</Trans>'), | ||
path: 'file.js' | ||
}) | ||
i18nextParser.on('warning', message => { | ||
assert.equal(message, 'The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.') | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
}) | ||
}) |
@@ -0,9 +1,17 @@ | ||
import bla from 'bla'; | ||
notRelated() | ||
i18n.t('first') | ||
i18n.t('second', 'defaultValue') | ||
i18n.t('third', {defaultValue: 'defaultValue'}) | ||
i18n.t('third', { | ||
defaultValue: '{{var}} defaultValue' | ||
}) | ||
i18n.t( | ||
'fou' + | ||
'rth' | ||
'rt' + | ||
'h' | ||
) | ||
i18n.t('not picked' + variable, {foo: bar}, 'bla' + 'asd', {}, foo+bar+baz ) | ||
if (true) { | ||
i18n.t('not picked' + variable, {foo: bar}, 'bla' + 'asd', {}, foo+bar+baz ) | ||
} | ||
i18n.t(variable, {foo: bar}, 'bla' + 'asd', {}, foo+bar+baz ) |
import React from 'react' | ||
import { translate } from 'react-i18next' | ||
import { translate, Trans, Interpolate } from 'react-i18next' | ||
const bar = () => ( | ||
<div> | ||
<span><Trans i18nKey="bar"></Trans></span> | ||
</div> | ||
); | ||
const foo = () => ( | ||
<div> | ||
<span><Trans i18nKey="foo" /></span> | ||
</div> | ||
); | ||
class Test extends React.Component { | ||
@@ -11,5 +23,19 @@ render () { | ||
<Interpolate i18nKey="second" value="some thing" component={interpolateComponent} /> | ||
<Trans i18nKey="third" count={count}> | ||
Hello <strong title={t('fourth')}>{{name}}</strong>, you have {{count}} unread message. <Link to="/msgs">Go to messages</Link>. | ||
<Trans i18nKey="third.first" count={count}> | ||
Hello <strong title={t('fourth')}>{{name}}</strong>, you have {{count}} unread message. <Link to="/msgs">Go to messages</Link>. | ||
</Trans> | ||
<span><Trans i18nKey="fifth" count={count} /></span> | ||
<Trans i18nKey="third.second"> <b>Hello,</b> this shouldn't be trimmed.</Trans> | ||
<Trans i18nKey="third.third"> | ||
<b>Hello,</b> | ||
this should be trimmed. | ||
<i> and this shoudln't</i> | ||
</Trans> | ||
<Trans> | ||
This should be part of the value and the key | ||
{/* this shouldn't */} | ||
</Trans> | ||
<Trans> | ||
don't split {{ on: this }} | ||
</Trans> | ||
</div> | ||
@@ -16,0 +42,0 @@ ) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
452567
75
3657
215
15
11
7
+ Addedacorn@^5.5.3
+ Addedacorn-jsx@^4.1.1
+ Addedbroccoli-plugin@^1.3.0
+ Addedcheerio@^1.0.0-rc.2
+ Addedfs-extra@^6.0.1
+ Addedrsvp@^4.8.2
+ Addedacorn@5.7.4(transitive)
+ Addedacorn-jsx@4.1.1(transitive)
+ Addedboolbase@1.0.0(transitive)
+ Addedbroccoli-plugin@1.3.1(transitive)
+ Addedcall-bound@1.0.3(transitive)
+ Addedcheerio@1.0.0(transitive)
+ Addedcheerio-select@2.1.0(transitive)
+ Addedcss-select@5.1.0(transitive)
+ Addedcss-what@6.1.0(transitive)
+ Addeddom-serializer@2.0.0(transitive)
+ Addeddomelementtype@2.3.0(transitive)
+ Addeddomhandler@5.0.3(transitive)
+ Addeddomutils@3.1.0(transitive)
+ Addedencoding-sniffer@0.2.0(transitive)
+ Addedentities@4.5.0(transitive)
+ Addedfs-extra@6.0.1(transitive)
+ Addedhtmlparser2@9.1.0(transitive)
+ Addediconv-lite@0.6.3(transitive)
+ Addedjsonfile@4.0.0(transitive)
+ Addedmktemp@0.4.0(transitive)
+ Addednth-check@2.1.1(transitive)
+ Addedobject.assign@4.1.7(transitive)
+ Addedparse5@7.2.1(transitive)
+ Addedparse5-htmlparser2-tree-adapter@7.1.0(transitive)
+ Addedparse5-parser-stream@7.1.2(transitive)
+ Addedpromise-map-series@0.2.3(transitive)
+ Addedquick-temp@0.1.8(transitive)
+ Addedrimraf@2.7.1(transitive)
+ Addedrsvp@3.6.24.8.5(transitive)
+ Addedsafer-buffer@2.1.2(transitive)
+ Addedsprintf-js@1.1.3(transitive)
+ Addedsymlink-or-copy@1.3.1(transitive)
+ Addedunderscore.string@3.3.6(transitive)
+ Addedundici@6.21.0(transitive)
+ Addeduniversalify@0.1.2(transitive)
+ Addedwhatwg-encoding@3.1.1(transitive)
+ Addedwhatwg-mimetype@4.0.0(transitive)
- Removedlodash@~4.17.4
- Removedlodash@4.17.21(transitive)
- Removedobject.assign@4.1.5(transitive)