Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

i18next-parser

Package Overview
Dependencies
Maintainers
1
Versions
144
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

i18next-parser - npm Package Compare versions

Comparing version 1.0.5 to 1.0.6

.prettierignore

7

CHANGELOG.md
# Changelog
# 1.0.5 - latest
# 1.0.6 - latest
- Add support for `customValueTemplate` #211
- Add Prettier
# 1.0.5
- Add support for the `skipDefaultValues` option #216

@@ -6,0 +11,0 @@

7

dist/broccoli.js

@@ -24,3 +24,4 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _colors = require('colors');var _colors2 = _interopRequireDefault(_colors);

_vinylFs2.default.src(_this2.inputPaths.map(function (x) {return x + '/**/*.{js,hbs}';})).
_vinylFs2.default.
src(_this2.inputPaths.map(function (x) {return x + '/**/*.{js,hbs}';})).
pipe((0, _gulpSort2.default)()).

@@ -36,5 +37,3 @@ pipe(

on('data', function (file) {
files.push(
_fsExtra2.default.outputFile(file.path, file.contents));
files.push(_fsExtra2.default.outputFile(file.path, file.contents));
if (!this.options.silent) {

@@ -41,0 +40,0 @@ console.log(' [write] '.green + file.path);

@@ -22,4 +22,12 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {return typeof obj;} : function (obj) {return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;}; /**

var newValue = entry.defaultValue || options.value || '';
if (options.skipDefaultValues) {
newValue = '';
}
if (options.useKeysAsDefaultValue) {
newValue = entry.key.substring(entry.key.indexOf(separator) + separator.length, entry.key.length);
newValue = entry.key.substring(
entry.key.indexOf(separator) + separator.length,
entry.key.length);
}

@@ -53,4 +61,19 @@

var duplicate = oldValue !== undefined || conflict !== false;
inner[lastSegment] = newValue;
if (options.customValueTemplate) {
inner[lastSegment] = {};
var entries = Object.entries(options.customValueTemplate);
entries.forEach(function (valueEntry) {
if (valueEntry[1] === '${defaultValue}') {
inner[lastSegment][valueEntry[0]] = newValue;
} else {
inner[lastSegment][valueEntry[0]] =
entry[valueEntry[1].replace(/\${(\w+)}/, '$1')] || '';
}
});
} else {
inner[lastSegment] = newValue;
}
return { target: target, duplicate: duplicate, conflict: conflict };

@@ -93,9 +116,5 @@ }

}
} else
{
} else {
if (target[key] !== undefined) {
if (
typeof source[key] === 'string' ||
Array.isArray(source[key]))
{
if (typeof source[key] === 'string' || Array.isArray(source[key])) {
target[key] = source[key];

@@ -107,4 +126,3 @@ mergeCount += 1;

}
} else
{
} else {
// support for plural in keys

@@ -154,4 +172,3 @@ var pluralRegex = /(_plural)|(_\d+)$/;

transferValues(sourceValue, targetValue);
} else
{
} else {
target[key] = sourceValue;

@@ -162,5 +179,2 @@ }

dotPathToHash = dotPathToHash;exports.
mergeHashes = mergeHashes;exports.
transferValues = transferValues;
dotPathToHash = dotPathToHash;exports.mergeHashes = mergeHashes;exports.transferValues = transferValues;

@@ -38,6 +38,3 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _events = require('events');var _events2 = _interopRequireDefault(_events);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var

'(?:' +
[
BaseLexer.singleQuotePattern,
BaseLexer.doubleQuotePattern].
join('|') +
[BaseLexer.singleQuotePattern, BaseLexer.doubleQuotePattern].join('|') +
')');

@@ -44,0 +41,0 @@

@@ -49,4 +49,3 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var

this.emit('warning', 'Key is not a string literal: ' + firstArgument);
} else
{
} else {
var result = _extends({},

@@ -78,3 +77,5 @@ args.options, {

_baseLexer2.default.variablePattern +
'(?:=' + _baseLexer2.default.stringOrVariablePattern + ')?' +
'(?:=' +
_baseLexer2.default.stringOrVariablePattern +
')?' +
')' +

@@ -81,0 +82,0 @@ '|' +

@@ -25,4 +25,4 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);

options = JSON.parse(options);
} finally
{}
} finally {
}
}var _iteratorNormalCompletion = true;var _didIteratorError = false;var _iteratorError = undefined;try {

@@ -29,0 +29,0 @@

@@ -29,3 +29,7 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {return typeof obj;} : function (obj) {return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);

var sourceFile = ts.createSourceFile(filename, content, ts.ScriptTarget.Latest);
var sourceFile = ts.createSourceFile(
filename,
content,
ts.ScriptTarget.Latest);
parseTree(sourceFile);

@@ -41,5 +45,5 @@

node.expression.text && this.functions.includes(node.expression.text) ||
node.expression.name && this.functions.includes(node.expression.name.text);
node.expression.name &&
this.functions.includes(node.expression.name.text);
if (isTranslationFunction) {

@@ -50,14 +54,21 @@ var keyArgument = node.arguments.shift();

entry.key = keyArgument.text;
} else
if (keyArgument && keyArgument.kind === ts.SyntaxKind.BinaryExpression) {
} else if (
keyArgument &&
keyArgument.kind === ts.SyntaxKind.BinaryExpression)
{
var concatenatedString = this.concatenateString(keyArgument);
if (!concatenatedString) {
this.emit('warning', 'Key is not a string literal: ' + keyArgument.text);
this.emit(
'warning', 'Key is not a string literal: ' +
keyArgument.text);
return null;
}
entry.key = concatenatedString;
} else
{
} else {
if (keyArgument.kind === ts.SyntaxKind.Identifier) {
this.emit('warning', 'Key is not a string literal: ' + keyArgument.text);
this.emit(
'warning', 'Key is not a string literal: ' +
keyArgument.text);
}

@@ -68,9 +79,13 @@

var optionsArgument = node.arguments.shift();
if (optionsArgument && optionsArgument.kind === ts.SyntaxKind.StringLiteral) {
if (
optionsArgument &&
optionsArgument.kind === ts.SyntaxKind.StringLiteral)
{
entry.defaultValue = optionsArgument.text;
} else
if (optionsArgument && optionsArgument.kind === ts.SyntaxKind.ObjectLiteralExpression) {var _iteratorNormalCompletion = true;var _didIteratorError = false;var _iteratorError = undefined;try {
} else if (
optionsArgument &&
optionsArgument.kind === ts.SyntaxKind.ObjectLiteralExpression)
{var _iteratorNormalCompletion = true;var _didIteratorError = false;var _iteratorError = undefined;try {
for (var _iterator = optionsArgument.properties[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {var p = _step.value;

@@ -94,3 +109,6 @@ entry[p.name.text] = p.initializer && p.initializer.text || '';

if (node.expression.escapedText === 'useTranslation' && node.arguments.length) {
if (
node.expression.escapedText === 'useTranslation' &&
node.arguments.length)
{
this.defaultNamespace = node.arguments[0].text;

@@ -109,7 +127,5 @@ }

string += this.concatenateString(binaryExpression.left, string);
} else
if (binaryExpression.left.kind === ts.SyntaxKind.StringLiteral) {
} else if (binaryExpression.left.kind === ts.SyntaxKind.StringLiteral) {
string += binaryExpression.left.text;
} else
{
} else {
return;

@@ -120,7 +136,5 @@ }

string += this.concatenateString(binaryExpression.right, string);
} else
if (binaryExpression.right.kind === ts.SyntaxKind.StringLiteral) {
} else if (binaryExpression.right.kind === ts.SyntaxKind.StringLiteral) {
string += binaryExpression.right.text;
} else
{
} else {
return;

@@ -127,0 +141,0 @@ }

@@ -8,4 +8,10 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _javascriptLexer = require('./javascript-lexer');var _javascriptLexer2 = _interopRequireDefault(_javascriptLexer);

_this.transSupportBasicHtmlNodes = options.transSupportBasicHtmlNodes || false;
_this.transKeepBasicHtmlNodesFor = options.transKeepBasicHtmlNodesFor || ['br', 'strong', 'i', 'p'];return _this;
_this.transSupportBasicHtmlNodes =
options.transSupportBasicHtmlNodes || false;
_this.transKeepBasicHtmlNodesFor = options.transKeepBasicHtmlNodesFor || [
'br',
'strong',
'i',
'p'];return _this;
}_createClass(JsxLexer, [{ key: 'extract', value: function extract(

@@ -38,3 +44,7 @@

var sourceFile = ts.createSourceFile(filename, content, ts.ScriptTarget.Latest);
var sourceFile = ts.createSourceFile(
filename,
content,
ts.ScriptTarget.Latest);
parseTree(sourceFile);

@@ -49,3 +59,5 @@

var getPropValue = function getPropValue(node, tagName) {
var attribute = node.attributes.properties.find(function (attr) {return attr.name.text === tagName;});
var attribute = node.attributes.properties.find(
function (attr) {return attr.name.text === tagName;});
return attribute && attribute.initializer.text;

@@ -56,3 +68,3 @@ };

if (tagNode.tagName.text === "Trans") {
if (tagNode.tagName.text === 'Trans') {
var entry = {};

@@ -76,5 +88,18 @@ entry.key = getKey(tagNode);

tagNode.attributes.properties.forEach(function (property) {
if ([_this3.attr, 'ns'].includes(property.name.text)) {
return;
}
if (property.initializer.expression) {
entry[
property.name.text] = '{' +
property.initializer.expression.text + '}';
} else {
entry[property.name.text] = property.initializer.text;
}
});
return entry.key ? entry : null;
} else
if (tagNode.tagName.text === "Interpolate") {
} else if (tagNode.tagName.text === 'Interpolate') {
var _entry = {};

@@ -89,18 +114,24 @@ _entry.key = getKey(tagNode);

var elemsToString = function elemsToString(children) {return children.map(function (child, index) {
switch (child.type) {
case 'js':
case 'text':
return child.content;
case 'tag':
var elementName =
child.isBasic &&
_this4.transSupportBasicHtmlNodes &&
_this4.transKeepBasicHtmlNodesFor.includes(child.name) ?
child.name :
index;
return '<' + elementName + '>' + elemsToString(child.children) + '</' + elementName + '>';
default:throw new Error('Unknown parsed content: ' + child.type);}
var elemsToString = function elemsToString(children) {return (
children.
map(function (child, index) {
switch (child.type) {
case 'js':
case 'text':
return child.content;
case 'tag':
var elementName =
child.isBasic &&
_this4.transSupportBasicHtmlNodes &&
_this4.transKeepBasicHtmlNodesFor.includes(child.name) ?
child.name :
index;
return '<' + elementName + '>' + elemsToString(
child.children) + '</' +
elementName + '>';
default:
throw new Error('Unknown parsed content: ' + child.type);}
}).join('');};
}).
join(''));};

@@ -111,10 +142,15 @@ return elemsToString(children);

{var _this5 = this;var children = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];var sourceText = arguments[1];
return children.map(function (child) {
return children.
map(function (child) {
if (child.kind === ts.SyntaxKind.JsxText) {
return {
type: 'text',
content: child.text.replace(/(^(\n|\r)\s*)|((\n|\r)\s*$)/g, '').replace(/(\n|\r)\s*/g, ' ') };
content: child.text.
replace(/(^(\n|\r)\s*)|((\n|\r)\s*$)/g, '').
replace(/(\n|\r)\s*/g, ' ') };
} else
if (child.kind === ts.SyntaxKind.JsxElement || child.kind === ts.SyntaxKind.JsxSelfClosingElement) {
} else if (
child.kind === ts.SyntaxKind.JsxElement ||
child.kind === ts.SyntaxKind.JsxSelfClosingElement)
{
var element = child.openingElement || child;

@@ -129,4 +165,3 @@ var name = element.tagName.escapedText;

} else
if (child.kind === ts.SyntaxKind.JsxExpression) {
} else if (child.kind === ts.SyntaxKind.JsxExpression) {
// strip empty expressions

@@ -138,5 +173,3 @@ if (!child.expression) {

} else
if (child.expression.kind === ts.SyntaxKind.StringLiteral) {
} else if (child.expression.kind === ts.SyntaxKind.StringLiteral) {
return {

@@ -150,12 +183,19 @@ type: 'text',

// annoying (and who knows how many other exceptions we'll need to write) but necessary
else if (child.expression.kind === ts.SyntaxKind.ObjectLiteralExpression) {
else if (
child.expression.kind === ts.SyntaxKind.ObjectLiteralExpression)
{
// i18next-react only accepts two props, any random single prop, and a format prop
// for our purposes, format prop is always ignored
var nonFormatProperties = child.expression.properties.filter(function (prop) {return prop.name.text !== 'format';});
var nonFormatProperties = child.expression.properties.filter(
function (prop) {return prop.name.text !== 'format';});
// more than one property throw a warning in i18next-react, but still works as a key
if (nonFormatProperties.length > 1) {
_this5.emit('warning', 'The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.');
_this5.emit(
'warning', 'The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.');
return {

@@ -176,9 +216,12 @@ type: 'text',

type: 'js',
content: '{' + sourceText.slice(child.expression.pos, child.expression.end) + '}' };
content: '{' + sourceText.slice(
child.expression.pos,
child.expression.end) + '}' };
} else
{
} else {
throw new Error('Unknown ast element when parsing jsx: ' + child.kind);
}
}).filter(function (child) {return child.type !== 'text' || child.content;});
}).
filter(function (child) {return child.type !== 'text' || child.content;});
} }]);return JsxLexer;}(_javascriptLexer2.default);exports.default = JsxLexer;module.exports = exports['default'];

@@ -18,3 +18,4 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);

var compiledTemplate = require('vue-template-compiler').compile(content).render;
var compiledTemplate = require('vue-template-compiler').compile(content).
render;
var Lexer2 = new _javascriptLexer2.default({ functions: this.functions });

@@ -21,0 +22,0 @@ Lexer2.on('warning', function (warning) {return _this2.emit('warning', warning);});

@@ -56,7 +56,9 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _path = require('path');var _path2 = _interopRequireDefault(_path);

if (typeof lexerConfig === 'string' || typeof lexerConfig === 'function') {
if (
typeof lexerConfig === 'string' ||
typeof lexerConfig === 'function')
{
lexerName = lexerConfig;
lexerOptions = {};
} else
{
} else {
lexerName = lexerConfig.lexer;

@@ -69,4 +71,3 @@ lexerOptions = lexerConfig;

Lexer = lexerName;
} else
{
} else {
if (!lexersMap[lexerName]) {

@@ -73,0 +74,0 @@ this.emit('error', new Error('Lexer \'' + lexerName + '\' does not exist'));

@@ -42,3 +42,5 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _helpers = require('./helpers');

useKeysAsDefaultValue: false,
verbose: false };
verbose: false,
skipDefaultValues: false,
customValueTemplate: null };

@@ -83,4 +85,3 @@

content = file.contents.toString('utf8');
} else
{
} else {
content = _fs2.default.readFileSync(file.path, encoding);

@@ -105,4 +106,3 @@ }

entry.namespace = parts.shift();
} else
if (extension === 'jsx' || this.options.reactNamespace) {
} else if (extension === 'jsx' || this.options.reactNamespace) {
entry.namespace = this.grabReactNamespace(content);

@@ -139,13 +139,11 @@ }

var transformEntry = function transformEntry(entry, suffix) {var _dotPathToHash =
(0, _helpers.dotPathToHash)(
entry,
catalog,
{
(0, _helpers.dotPathToHash)(entry, catalog, {
suffix: suffix,
separator: _this2.options.keySeparator,
value: _this2.options.defaultValue,
useKeysAsDefaultValue: _this2.options.useKeysAsDefaultValue }),duplicate = _dotPathToHash.duplicate,conflict = _dotPathToHash.conflict;
useKeysAsDefaultValue: _this2.options.useKeysAsDefaultValue,
skipDefaultValues: _this2.options.skipDefaultValues,
customValueTemplate: _this2.options.customValueTemplate }),duplicate = _dotPathToHash.duplicate,conflict = _dotPathToHash.conflict;
if (duplicate) {

@@ -156,4 +154,3 @@ uniqueCount -= 1;

_this2.warn(warning);
} else
if (conflict === 'value') {
} else if (conflict === 'value') {
var _warning = 'Found same keys with different values: ' + entry.key;

@@ -187,4 +184,7 @@ _this2.warn(_warning);

var namespaceOldPath = _path2.default.join(parsedNamespacePath.dir, parsedNamespacePath.name + '_old' + parsedNamespacePath.ext);
var namespaceOldPath = _path2.default.join(
parsedNamespacePath.dir,
parsedNamespacePath.name + '_old' + parsedNamespacePath.ext);
var existingCatalog = _this2.getCatalog(namespacePath);

@@ -195,2 +195,6 @@ var existingOldCatalog = _this2.getCatalog(namespaceOldPath);

var _mergeHashes =
(0, _helpers.mergeHashes)(

@@ -203,4 +207,7 @@ existingCatalog,

// restore old translations
var _mergeHashes2 = (0, _helpers.mergeHashes)(existingOldCatalog, newCatalog),oldCatalog = _mergeHashes2.old,restoreCount = _mergeHashes2.mergeCount;
var _mergeHashes2 = (0, _helpers.mergeHashes)(
existingOldCatalog,
newCatalog),oldCatalog = _mergeHashes2.old,restoreCount = _mergeHashes2.mergeCount;
// backup unused translations

@@ -211,3 +218,5 @@ (0, _helpers.transferValues)(oldKeys, oldCatalog);

console.log('[' + locale + '] ' + namespace + '\n');
console.log('Unique keys: ' + uniqueCount + ' (' + countWithPlurals + ' with plurals)');
console.log('Unique keys: ' +
uniqueCount + ' (' + countWithPlurals + ' with plurals)');
var addCount = countWithPlurals - mergeCount;

@@ -244,4 +253,3 @@ console.log('Added keys: ' + addCount);

this.entries.push(contextEntry);
} else
{
} else {
this.entries.push(entry);

@@ -256,9 +264,7 @@ }

content = _yamljs2.default.parse(_fs2.default.readFileSync(path).toString());
} else
{
} else {
content = JSON.parse(_fs2.default.readFileSync(path));
}
return content;
}
catch (error) {
} catch (error) {
if (error.code !== 'ENOENT') {

@@ -276,4 +282,3 @@ this.emit('error', error);

text = _yamljs2.default.stringify(contents, null, this.options.indentation);
} else
{
} else {
text = JSON.stringify(contents, null, this.options.indentation) + '\n';

@@ -284,10 +289,13 @@ }

text = _eol2.default.auto(text);
} else
if (this.options.lineEnding === '\r\n' || this.options.lineEnding === 'crlf') {
} else if (
this.options.lineEnding === '\r\n' ||
this.options.lineEnding === 'crlf')
{
text = _eol2.default.crlf(text);
} else
if (this.options.lineEnding === '\r' || this.options.lineEnding === 'cr') {
} else if (
this.options.lineEnding === '\r' ||
this.options.lineEnding === 'cr')
{
text = _eol2.default.cr(text);
} else
{
} else {
// Defaults to LF, aka \n

@@ -294,0 +302,0 @@ text = _eol2.default.lf(text);

@@ -65,3 +65,10 @@ # Contribute

## Deploy
- `yarn build`
- update `package.json` version
- create commit and add version tag
- `npm publish`
## `0.x` vs `1.x`

@@ -75,9 +82,1 @@

I will not maintain the old version but will welcome bug fixes as PRs.
## Deploy
- `yarn watch`
- update `package.json` version
- create commit and add version tag
- `npm publish --tag next` (skip next tag if not in beta)
- create a github release

@@ -5,3 +5,3 @@ {

"name": "i18next-parser",
"version": "1.0.5",
"version": "1.0.6",
"license": "MIT",

@@ -14,3 +14,4 @@ "main": "dist/index.js",

"test": "mocha -r babel-register -r babel-polyfill --recursive test/*.test.js test/**/*.test.js",
"watch": "babel src -d dist -w"
"watch": "babel src -d dist -w",
"build": "prettier --write --no-semi --single-quote \"{src,test}/**/*.js\" && babel src -d dist"
},

@@ -57,2 +58,3 @@ "repository": {

"mocha": "^5.0.0",
"prettier": "2.0.5",
"sinon": "^6.2.0"

@@ -59,0 +61,0 @@ },

@@ -190,2 +190,13 @@ # i18next Parser [![Build Status](https://travis-ci.org/i18next/i18next-parser.svg?branch=master)](https://travis-ci.org/i18next/i18next-parser)

// Display info about the parsing including some stats
customValueTemplate: null,
// If you wish to customize the value output the value as an object, you can set your own format.
// ${defaultValue} is the default value you set in your translation function.
// Any other custom property will be automatically extracted.
//
// Example:
// {
// message: "${defaultValue}",
// description: "${maxLength}", // t('my-key', {maxLength: 150})
// }
}

@@ -192,0 +203,0 @@ ```

@@ -24,39 +24,38 @@ import colors from 'colors'

vfs.src(this.inputPaths.map(x => x + '/**/*.{js,hbs}'))
.pipe(sort())
.pipe(
new i18nTransform(this.options)
.on('reading', function (file) {
if (!this.options.silent) {
console.log(' [read] '.green + file.path)
}
count++
})
.on('data', function (file) {
files.push(
fse.outputFile(file.path, file.contents)
)
if (!this.options.silent) {
console.log(' [write] '.green + file.path)
}
})
.on('error', function (message, region) {
if (typeof region === 'string') {
message += ': ' + region.trim()
}
console.log(' [error] '.red + message)
})
.on('finish', function () {
if (!this.options.silent) {
console.log()
}
console.log(' Stats: '.yellow + count + ' files were parsed')
vfs
.src(this.inputPaths.map((x) => x + '/**/*.{js,hbs}'))
.pipe(sort())
.pipe(
new i18nTransform(this.options)
.on('reading', function (file) {
if (!this.options.silent) {
console.log(' [read] '.green + file.path)
}
count++
})
.on('data', function (file) {
files.push(fse.outputFile(file.path, file.contents))
if (!this.options.silent) {
console.log(' [write] '.green + file.path)
}
})
.on('error', function (message, region) {
if (typeof region === 'string') {
message += ': ' + region.trim()
}
console.log(' [error] '.red + message)
})
.on('finish', function () {
if (!this.options.silent) {
console.log()
}
console.log(' Stats: '.yellow + count + ' files were parsed')
Promise.all(files).then(() => {
resolve(files)
})
})
)
Promise.all(files).then(() => {
resolve(files)
})
})
)
})
}
}

@@ -24,7 +24,10 @@ /**

if (options.skipDefaultValues) {
newValue = ""
newValue = ''
}
if (options.useKeysAsDefaultValue) {
newValue = entry.key.substring(entry.key.indexOf(separator) + separator.length, entry.key.length)
newValue = entry.key.substring(
entry.key.indexOf(separator) + separator.length,
entry.key.length
)
}

@@ -52,10 +55,25 @@

const lastSegment = segments[segments.length - 1];
const oldValue = inner[lastSegment];
const lastSegment = segments[segments.length - 1]
const oldValue = inner[lastSegment]
if (oldValue !== undefined && oldValue !== newValue) {
conflict = typeof oldValue !== typeof newValue ? 'key' : 'value';
conflict = typeof oldValue !== typeof newValue ? 'key' : 'value'
}
const duplicate = oldValue !== undefined || conflict !== false
inner[lastSegment] = newValue
if (options.customValueTemplate) {
inner[lastSegment] = {}
const entries = Object.entries(options.customValueTemplate)
entries.forEach((valueEntry) => {
if (valueEntry[1] === '${defaultValue}') {
inner[lastSegment][valueEntry[0]] = newValue
} else {
inner[lastSegment][valueEntry[0]] =
entry[valueEntry[1].replace(/\${(\w+)}/, '$1')] || ''
}
})
} else {
inner[lastSegment] = newValue
}
return { target, duplicate, conflict }

@@ -98,9 +116,5 @@ }

}
}
else {
} else {
if (target[key] !== undefined) {
if (
typeof source[key] === 'string' ||
Array.isArray(source[key])
) {
if (typeof source[key] === 'string' || Array.isArray(source[key])) {
target[key] = source[key]

@@ -112,6 +126,5 @@ mergeCount += 1

}
}
else {
} else {
// support for plural in keys
const pluralRegex = /(_plural)|(_\d+)$/;
const pluralRegex = /(_plural)|(_\d+)$/
const pluralMatch = pluralRegex.test(key)

@@ -121,3 +134,3 @@ const singularKey = key.replace(pluralRegex, '')

// support for context in keys
const contextRegex = /_([^_]+)?$/;
const contextRegex = /_([^_]+)?$/
const contextMatch = contextRegex.test(singularKey)

@@ -160,13 +173,8 @@ const rawKey = singularKey.replace(contextRegex, '')

transferValues(sourceValue, targetValue)
} else {
target[key] = sourceValue
}
else {
target[key] = sourceValue;
}
}
}
export {
dotPathToHash,
mergeHashes,
transferValues
}
export { dotPathToHash, mergeHashes, transferValues }

@@ -38,6 +38,3 @@ import EventEmitter from 'events'

'(?:' +
[
BaseLexer.singleQuotePattern,
BaseLexer.doubleQuotePattern
].join('|') +
[BaseLexer.singleQuotePattern, BaseLexer.doubleQuotePattern].join('|') +
')'

@@ -53,3 +50,3 @@ )

BaseLexer.doubleQuotePattern,
BaseLexer.variablePattern
BaseLexer.variablePattern,
].join('|') +

@@ -56,0 +53,0 @@ ')'

@@ -16,3 +16,3 @@ import BaseLexer from './base-lexer'

while (matches = this.functionRegex.exec(content)) {
while ((matches = this.functionRegex.exec(content))) {
const args = this.parseArguments(matches[1] || matches[2])

@@ -29,5 +29,5 @@ this.populateKeysFromArguments(args)

arguments: [],
options: {}
options: {},
}
while (matches = this.argumentsRegex.exec(args)) {
while ((matches = this.argumentsRegex.exec(args))) {
const arg = matches[1]

@@ -51,7 +51,6 @@ const parts = arg.split('=')

this.emit('warning', `Key is not a string literal: ${firstArgument}`)
}
else {
} else {
const result = {
...args.options,
key: firstArgument.slice(1, -1)
key: firstArgument.slice(1, -1),
}

@@ -78,8 +77,10 @@ if (isDefaultValueString) {

'(' +
'(?:' +
BaseLexer.variablePattern +
'(?:=' + BaseLexer.stringOrVariablePattern + ')?' +
')' +
'|' +
BaseLexer.stringPattern +
'(?:' +
BaseLexer.variablePattern +
'(?:=' +
BaseLexer.stringOrVariablePattern +
')?' +
')' +
'|' +
BaseLexer.stringPattern +
')'

@@ -86,0 +87,0 @@ this.argumentsRegex = new RegExp(pattern, 'gi')

@@ -25,4 +25,4 @@ import BaseLexer from './base-lexer'

options = JSON.parse(options)
} finally {
}
finally {}
}

@@ -29,0 +29,0 @@

@@ -29,3 +29,7 @@ import BaseLexer from './base-lexer'

const sourceFile = ts.createSourceFile(filename, content, ts.ScriptTarget.Latest)
const sourceFile = ts.createSourceFile(
filename,
content,
ts.ScriptTarget.Latest
)
parseTree(sourceFile)

@@ -41,5 +45,5 @@

(node.expression.text && this.functions.includes(node.expression.text)) ||
(node.expression.name && this.functions.includes(node.expression.name.text))
(node.expression.name &&
this.functions.includes(node.expression.name.text))
if (isTranslationFunction) {

@@ -50,14 +54,21 @@ const keyArgument = node.arguments.shift()

entry.key = keyArgument.text
}
else if (keyArgument && keyArgument.kind === ts.SyntaxKind.BinaryExpression) {
} else if (
keyArgument &&
keyArgument.kind === ts.SyntaxKind.BinaryExpression
) {
const concatenatedString = this.concatenateString(keyArgument)
if (!concatenatedString) {
this.emit('warning', `Key is not a string literal: ${keyArgument.text}`)
this.emit(
'warning',
`Key is not a string literal: ${keyArgument.text}`
)
return null
}
entry.key = concatenatedString
}
else {
} else {
if (keyArgument.kind === ts.SyntaxKind.Identifier) {
this.emit('warning', `Key is not a string literal: ${keyArgument.text}`)
this.emit(
'warning',
`Key is not a string literal: ${keyArgument.text}`
)
}

@@ -68,11 +79,15 @@

const optionsArgument = node.arguments.shift()
if (optionsArgument && optionsArgument.kind === ts.SyntaxKind.StringLiteral) {
if (
optionsArgument &&
optionsArgument.kind === ts.SyntaxKind.StringLiteral
) {
entry.defaultValue = optionsArgument.text
}
else if (optionsArgument && optionsArgument.kind === ts.SyntaxKind.ObjectLiteralExpression) {
} else if (
optionsArgument &&
optionsArgument.kind === ts.SyntaxKind.ObjectLiteralExpression
) {
for (const p of optionsArgument.properties) {
entry[p.name.text] = p.initializer && p.initializer.text || ''
entry[p.name.text] = (p.initializer && p.initializer.text) || ''
}

@@ -94,3 +109,6 @@ }

if(node.expression.escapedText === 'useTranslation' && node.arguments.length) {
if (
node.expression.escapedText === 'useTranslation' &&
node.arguments.length
) {
this.defaultNamespace = node.arguments[0].text

@@ -109,7 +127,5 @@ }

string += this.concatenateString(binaryExpression.left, string)
}
else if (binaryExpression.left.kind === ts.SyntaxKind.StringLiteral) {
} else if (binaryExpression.left.kind === ts.SyntaxKind.StringLiteral) {
string += binaryExpression.left.text
}
else {
} else {
return

@@ -120,7 +136,5 @@ }

string += this.concatenateString(binaryExpression.right, string)
}
else if (binaryExpression.right.kind === ts.SyntaxKind.StringLiteral) {
} else if (binaryExpression.right.kind === ts.SyntaxKind.StringLiteral) {
string += binaryExpression.right.text
}
else {
} else {
return

@@ -127,0 +141,0 @@ }

@@ -7,5 +7,11 @@ import JavascriptLexer from './javascript-lexer'

super(options)
this.transSupportBasicHtmlNodes = options.transSupportBasicHtmlNodes || false
this.transKeepBasicHtmlNodesFor = options.transKeepBasicHtmlNodesFor || ['br', 'strong', 'i', 'p']
this.transSupportBasicHtmlNodes =
options.transSupportBasicHtmlNodes || false
this.transKeepBasicHtmlNodesFor = options.transKeepBasicHtmlNodesFor || [
'br',
'strong',
'i',
'p',
]
}

@@ -38,3 +44,7 @@

const sourceFile = ts.createSourceFile(filename, content, ts.ScriptTarget.Latest)
const sourceFile = ts.createSourceFile(
filename,
content,
ts.ScriptTarget.Latest
)
parseTree(sourceFile)

@@ -49,3 +59,5 @@

const getPropValue = (node, tagName) => {
const attribute = node.attributes.properties.find(attr => attr.name.text === tagName)
const attribute = node.attributes.properties.find(
(attr) => attr.name.text === tagName
)
return attribute && attribute.initializer.text

@@ -56,3 +68,3 @@ }

if (tagNode.tagName.text === "Trans") {
if (tagNode.tagName.text === 'Trans') {
const entry = {}

@@ -76,5 +88,18 @@ entry.key = getKey(tagNode)

tagNode.attributes.properties.forEach((property) => {
if ([this.attr, 'ns'].includes(property.name.text)) {
return
}
if (property.initializer.expression) {
entry[
property.name.text
] = `{${property.initializer.expression.text}}`
} else {
entry[property.name.text] = property.initializer.text
}
})
return entry.key ? entry : null
}
else if (tagNode.tagName.text === "Interpolate") {
} else if (tagNode.tagName.text === 'Interpolate') {
const entry = {}

@@ -89,18 +114,24 @@ entry.key = getKey(tagNode)

const elemsToString = (children) => children.map((child, index) => {
switch(child.type) {
case 'js':
case 'text':
return child.content
case 'tag':
const elementName =
child.isBasic &&
this.transSupportBasicHtmlNodes &&
this.transKeepBasicHtmlNodesFor.includes(child.name)
? child.name
: index
return `<${elementName}>${elemsToString(child.children)}</${elementName}>`
default: throw new Error('Unknown parsed content: ' + child.type)
}
}).join('')
const elemsToString = (children) =>
children
.map((child, index) => {
switch (child.type) {
case 'js':
case 'text':
return child.content
case 'tag':
const elementName =
child.isBasic &&
this.transSupportBasicHtmlNodes &&
this.transKeepBasicHtmlNodesFor.includes(child.name)
? child.name
: index
return `<${elementName}>${elemsToString(
child.children
)}</${elementName}>`
default:
throw new Error('Unknown parsed content: ' + child.type)
}
})
.join('')

@@ -111,71 +142,83 @@ return elemsToString(children)

parseChildren(children = [], sourceText) {
return children.map(child => {
if (child.kind === ts.SyntaxKind.JsxText) {
return {
type: 'text',
content: child.text.replace(/(^(\n|\r)\s*)|((\n|\r)\s*$)/g, '').replace(/(\n|\r)\s*/g, ' ')
}
}
else if (child.kind === ts.SyntaxKind.JsxElement || child.kind === ts.SyntaxKind.JsxSelfClosingElement) {
const element = child.openingElement || child
const name = element.tagName.escapedText
const isBasic = !element.attributes.properties.length
return {
type: 'tag',
children: this.parseChildren(child.children, sourceText),
name,
isBasic
}
}
else if (child.kind === ts.SyntaxKind.JsxExpression) {
// strip empty expressions
if (!child.expression) {
return children
.map((child) => {
if (child.kind === ts.SyntaxKind.JsxText) {
return {
type: 'text',
content: ''
content: child.text
.replace(/(^(\n|\r)\s*)|((\n|\r)\s*$)/g, '')
.replace(/(\n|\r)\s*/g, ' '),
}
}
else if (child.expression.kind === ts.SyntaxKind.StringLiteral) {
} else if (
child.kind === ts.SyntaxKind.JsxElement ||
child.kind === ts.SyntaxKind.JsxSelfClosingElement
) {
const element = child.openingElement || child
const name = element.tagName.escapedText
const isBasic = !element.attributes.properties.length
return {
type: 'text',
content: child.expression.text
type: 'tag',
children: this.parseChildren(child.children, sourceText),
name,
isBasic,
}
}
} else if (child.kind === ts.SyntaxKind.JsxExpression) {
// strip empty expressions
if (!child.expression) {
return {
type: 'text',
content: '',
}
} else if (child.expression.kind === ts.SyntaxKind.StringLiteral) {
return {
type: 'text',
content: child.expression.text,
}
}
// strip properties from ObjectExpressions
// annoying (and who knows how many other exceptions we'll need to write) but necessary
else if (child.expression.kind === ts.SyntaxKind.ObjectLiteralExpression) {
// i18next-react only accepts two props, any random single prop, and a format prop
// for our purposes, format prop is always ignored
// strip properties from ObjectExpressions
// annoying (and who knows how many other exceptions we'll need to write) but necessary
else if (
child.expression.kind === ts.SyntaxKind.ObjectLiteralExpression
) {
// i18next-react only accepts two props, any random single prop, and a format prop
// for our purposes, format prop is always ignored
let nonFormatProperties = child.expression.properties.filter(prop => prop.name.text !== 'format')
let nonFormatProperties = child.expression.properties.filter(
(prop) => prop.name.text !== 'format'
)
// more than one property throw a warning in i18next-react, but still works as a key
if (nonFormatProperties.length > 1) {
this.emit('warning', `The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.`)
// more than one property throw a warning in i18next-react, but still works as a key
if (nonFormatProperties.length > 1) {
this.emit(
'warning',
`The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.`
)
return {
type: 'text',
content: '',
}
}
return {
type: 'text',
content: ''
type: 'js',
content: `{{${nonFormatProperties[0].name.text}}}`,
}
}
// slice on the expression so that we ignore comments around it
return {
type: 'js',
content: `{{${nonFormatProperties[0].name.text}}}`
content: `{${sourceText.slice(
child.expression.pos,
child.expression.end
)}}`,
}
} else {
throw new Error('Unknown ast element when parsing jsx: ' + child.kind)
}
// slice on the expression so that we ignore comments around it
return {
type: 'js',
content: `{${sourceText.slice(child.expression.pos, child.expression.end)}}`
}
}
else {
throw new Error('Unknown ast element when parsing jsx: ' + child.kind)
}
}).filter(child => child.type !== 'text' || child.content)
})
.filter((child) => child.type !== 'text' || child.content)
}
}

@@ -15,8 +15,9 @@ import BaseLexer from './base-lexer'

const Lexer = new JavascriptLexer()
Lexer.on('warning', warning => this.emit('warning', warning))
Lexer.on('warning', (warning) => this.emit('warning', warning))
keys = keys.concat(Lexer.extract(content))
const compiledTemplate = require('vue-template-compiler').compile(content).render
const compiledTemplate = require('vue-template-compiler').compile(content)
.render
const Lexer2 = new JavascriptLexer({ functions: this.functions })
Lexer2.on('warning', warning => this.emit('warning', warning))
Lexer2.on('warning', (warning) => this.emit('warning', warning))
keys = keys.concat(Lexer2.extract(compiledTemplate))

@@ -23,0 +24,0 @@

@@ -24,3 +24,3 @@ import path from 'path'

default: ['JavascriptLexer']
default: ['JavascriptLexer'],
}

@@ -33,3 +33,3 @@

JsxLexer,
VueLexer
VueLexer,
}

@@ -58,7 +58,9 @@

if (typeof lexerConfig === 'string' || typeof lexerConfig === 'function') {
if (
typeof lexerConfig === 'string' ||
typeof lexerConfig === 'function'
) {
lexerName = lexerConfig
lexerOptions = {}
}
else {
} else {
lexerName = lexerConfig.lexer

@@ -68,7 +70,6 @@ lexerOptions = lexerConfig

let Lexer;
if(typeof lexerName === 'function') {
let Lexer
if (typeof lexerName === 'function') {
Lexer = lexerName
}
else {
} else {
if (!lexersMap[lexerName]) {

@@ -82,3 +83,3 @@ this.emit('error', new Error(`Lexer '${lexerName}' does not exist`))

const lexer = new Lexer(lexerOptions)
lexer.on('warning', warning => this.emit('warning', warning))
lexer.on('warning', (warning) => this.emit('warning', warning))
keys = keys.concat(lexer.extract(content, filename))

@@ -85,0 +86,0 @@ }

@@ -43,3 +43,4 @@ import { dotPathToHash, mergeHashes, transferValues } from './helpers'

verbose: false,
skipDefaultValues: false
skipDefaultValues: false,
customValueTemplate: null,
}

@@ -57,4 +58,4 @@

this.parser = new Parser(this.options)
this.parser.on('error', error => this.error(error))
this.parser.on('warning', warning => this.warn(warning))
this.parser.on('error', (error) => this.error(error))
this.parser.on('warning', (warning) => this.warn(warning))

@@ -64,3 +65,3 @@ this.localeRegex = /\$LOCALE/g

i18next.init();
i18next.init()
}

@@ -86,4 +87,3 @@

content = file.contents.toString('utf8')
}
else {
} else {
content = fs.readFileSync(file.path, encoding)

@@ -108,4 +108,3 @@ }

entry.namespace = parts.shift()
}
else if (extension === 'jsx' || this.options.reactNamespace) {
} else if (extension === 'jsx' || this.options.reactNamespace) {
entry.namespace = this.grabReactNamespace(content)

@@ -142,13 +141,10 @@ }

const transformEntry = (entry, suffix) => {
const { duplicate, conflict } = dotPathToHash(
entry,
catalog,
{
suffix,
separator: this.options.keySeparator,
value: this.options.defaultValue,
useKeysAsDefaultValue: this.options.useKeysAsDefaultValue,
skipDefaultValues: this.options.skipDefaultValues,
}
)
const { duplicate, conflict } = dotPathToHash(entry, catalog, {
suffix,
separator: this.options.keySeparator,
value: this.options.defaultValue,
useKeysAsDefaultValue: this.options.useKeysAsDefaultValue,
skipDefaultValues: this.options.skipDefaultValues,
customValueTemplate: this.options.customValueTemplate,
})

@@ -160,4 +156,3 @@ if (duplicate) {

this.warn(warning)
}
else if (conflict === 'value') {
} else if (conflict === 'value') {
const warning = `Found same keys with different values: ${entry.key}`

@@ -191,3 +186,6 @@ this.warn(warning)

const namespaceOldPath = path.join(parsedNamespacePath.dir, `${parsedNamespacePath.name}_old${parsedNamespacePath.ext}`)
const namespaceOldPath = path.join(
parsedNamespacePath.dir,
`${parsedNamespacePath.name}_old${parsedNamespacePath.ext}`
)

@@ -198,11 +196,18 @@ let existingCatalog = this.getCatalog(namespacePath)

// merges existing translations with the new ones
const { new: newCatalog, old: oldKeys, mergeCount, oldCount } =
mergeHashes(
existingCatalog,
catalog[namespace],
this.options.keepRemoved
)
const {
new: newCatalog,
old: oldKeys,
mergeCount,
oldCount,
} = mergeHashes(
existingCatalog,
catalog[namespace],
this.options.keepRemoved
)
// restore old translations
const { old: oldCatalog, mergeCount: restoreCount } = mergeHashes(existingOldCatalog, newCatalog)
const { old: oldCatalog, mergeCount: restoreCount } = mergeHashes(
existingOldCatalog,
newCatalog
)

@@ -214,3 +219,5 @@ // backup unused translations

console.log(`[${locale}] ${namespace}\n`)
console.log(`Unique keys: ${uniqueCount} (${countWithPlurals} with plurals)`)
console.log(
`Unique keys: ${uniqueCount} (${countWithPlurals} with plurals)`
)
const addCount = countWithPlurals - mergeCount

@@ -247,4 +254,3 @@ console.log(`Added keys: ${addCount}`)

this.entries.push(contextEntry)
}
else {
} else {
this.entries.push(entry)

@@ -259,9 +265,7 @@ }

content = YAML.parse(fs.readFileSync(path).toString())
}
else {
} else {
content = JSON.parse(fs.readFileSync(path))
}
return content
}
catch (error) {
} catch (error) {
if (error.code !== 'ENOENT') {

@@ -279,4 +283,3 @@ this.emit('error', error)

text = YAML.stringify(contents, null, this.options.indentation)
}
else {
} else {
text = JSON.stringify(contents, null, this.options.indentation) + '\n'

@@ -287,10 +290,13 @@ }

text = eol.auto(text)
}
else if (this.options.lineEnding === '\r\n' || this.options.lineEnding === 'crlf') {
} else if (
this.options.lineEnding === '\r\n' ||
this.options.lineEnding === 'crlf'
) {
text = eol.crlf(text)
}
else if (this.options.lineEnding === '\r' || this.options.lineEnding === 'cr') {
} else if (
this.options.lineEnding === '\r' ||
this.options.lineEnding === 'cr'
) {
text = eol.cr(text)
}
else {
} else {
// Defaults to LF, aka \n

@@ -302,3 +308,3 @@ text = eol.lf(text)

path,
contents: Buffer.from(text)
contents: Buffer.from(text),
})

@@ -305,0 +311,0 @@ this.push(file)

@@ -8,9 +8,9 @@ const Funnel = require('broccoli-funnel')

files: ['handlebars.hbs', 'javascript.js'],
annotation: 'i18next-parser'
annotation: 'i18next-parser',
})
i18n = new i18nextParser([i18n], {
output: 'broccoli/locales/$LOCALE/$NAMESPACE.json'
output: 'broccoli/locales/$LOCALE/$NAMESPACE.json',
})
module.exports = i18n

@@ -1,11 +0,14 @@

const gulp = require('gulp');
const i18next = require('../dist/index');
const gulp = require('gulp')
const i18next = require('../dist/index')
gulp.task('i18next', function() {
return gulp.src(['templating/*'])
.pipe(new i18next({
gulp.task('i18next', function () {
return gulp
.src(['templating/*'])
.pipe(
new i18next({
locales: ['en', 'fr'],
output: 'gulp/locales/$LOCALE/$NAMESPACE.json'
}))
.pipe(gulp.dest('./'));
});
output: 'gulp/locales/$LOCALE/$NAMESPACE.json',
})
)
.pipe(gulp.dest('./'))
})

@@ -13,7 +13,3 @@ import { assert } from 'chai'

it('ignores trailing separator', (done) => {
const { target } = dotPathToHash(
{ key: 'one.' },
{},
{ separator: '.' }
)
const { target } = dotPathToHash({ key: 'one.' }, {}, { separator: '.' })
assert.deepEqual(target, { one: '' })

@@ -24,5 +20,3 @@ done()

it('ignores duplicated separator', (done) => {
const { target } = dotPathToHash(
{ key: 'one..two' }
)
const { target } = dotPathToHash({ key: 'one..two' })
assert.deepEqual(target, { one: { two: '' } })

@@ -33,7 +27,3 @@ done()

it('supports custom separator', (done) => {
const { target } = dotPathToHash(
{ key: 'one-two' },
{},
{ separator: '-' }
)
const { target } = dotPathToHash({ key: 'one-two' }, {}, { separator: '-' })
assert.deepEqual(target, { one: { two: '' } })

@@ -86,3 +76,3 @@ done()

{ key: 'one.two.three' },
{ one: { two: { three: '' } } },
{ one: { two: { three: '' } } }
)

@@ -98,3 +88,3 @@ assert.deepEqual(target, { one: { two: { three: '' } } })

{ key: 'one.two.three', defaultValue: 'new' },
{ one: { two: { three: 'old' } } },
{ one: { two: { three: 'old' } } }
)

@@ -110,3 +100,3 @@ assert.deepEqual(target, { one: { two: { three: 'new' } } })

{ key: 'one', defaultValue: 'bla' },
{ one: { two: { three: 'bla' } } },
{ one: { two: { three: 'bla' } } }
)

@@ -122,3 +112,3 @@ assert.deepEqual(target, { one: 'bla' })

{ key: 'one.two.three', defaultValue: 'bla' },
{ one: 'bla' },
{ one: 'bla' }
)

@@ -125,0 +115,0 @@ assert.deepEqual(target, { one: { two: { three: 'bla' } } })

@@ -8,3 +8,3 @@ import { assert } from 'chai'

const target = { key1: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -20,5 +20,5 @@ assert.deepEqual(res.new, { key1: 'value1' })

it('does not replace empty `target` keys with `source` if it is a hash', (done) => {
const source = { key1: { key11: 'value1'} }
const source = { key1: { key11: 'value1' } }
const target = { key1: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -36,3 +36,3 @@ assert.deepEqual(res.new, { key1: '' })

const target = { key1: '', key2: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -50,3 +50,3 @@ assert.deepEqual(res.new, { key1: 'value1', key2: '' })

const target = { key1: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -64,6 +64,6 @@ assert.deepEqual(res.new, { key1: 'value1' })

const target = { key1: '', key3: '' }
const res = mergeHashes(source, target, true)
const res = mergeHashes(source, target, true)
assert.deepEqual(res.new, { key1: 'value1', key2: 'value2', key3: '' })
assert.deepEqual(res.old, { })
assert.deepEqual(res.old, {})
assert.strictEqual(res.mergeCount, 1)

@@ -78,3 +78,3 @@ assert.strictEqual(res.pullCount, 0)

const target = { key1: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -92,3 +92,3 @@ assert.deepEqual(res.new, { key1: '', key1_plural: 'value1' })

const target = { key2: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -106,3 +106,3 @@ assert.deepEqual(res.new, { key2: '' })

const target = { key1: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -120,3 +120,3 @@ assert.deepEqual(res.new, { key1: '', key1_context: 'value1' })

const target = { key2: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -133,29 +133,29 @@ assert.deepEqual(res.new, { key2: '' })

const source = {
key1: 'value1',
key2: {
key21: 'value21',
key22: {
key221: 'value221',
key222: 'value222'
},
key23: 'value23'
key1: 'value1',
key2: {
key21: 'value21',
key22: {
key221: 'value221',
key222: 'value222',
},
key4: {
key41: 'value41'
}
key23: 'value23',
},
key4: {
key41: 'value41',
},
}
const target = {
key1: '',
key2: {
key21: '',
key22: {
key222: '',
key223: ''
},
key24: ''
key1: '',
key2: {
key21: '',
key22: {
key222: '',
key223: '',
},
key3: '',
key4: {
key41: 'value41'
}
key24: '',
},
key3: '',
key4: {
key41: 'value41',
},
}

@@ -166,24 +166,24 @@

const expected_target = {
key1: 'value1',
key2: {
key21: 'value21',
key22: {
key222: 'value222',
key223: ''
},
key24: ''
key1: 'value1',
key2: {
key21: 'value21',
key22: {
key222: 'value222',
key223: '',
},
key3: '',
key4: {
key41: 'value41'
}
key24: '',
},
key3: '',
key4: {
key41: 'value41',
},
}
const expected_old = {
key2: {
key22: {
key221: 'value221'
},
key23: 'value23'
}
key2: {
key22: {
key221: 'value221',
},
key23: 'value23',
},
}

@@ -202,3 +202,3 @@

const target = { key1: '' }
const res = mergeHashes(source, target)
const res = mergeHashes(source, target)

@@ -205,0 +205,0 @@ assert.deepEqual(res.new, { key1: ['Line one.', 'Line two.'] })

@@ -28,5 +28,9 @@ import { assert } from 'chai'

assert.deepEqual(target, { key0: 'value0', key1: 'value1', key2: { key20: 'value20_old', key21: 'value21' } })
assert.deepEqual(target, {
key0: 'value0',
key1: 'value1',
key2: { key20: 'value20_old', key21: 'value21' },
})
done()
})
})
module.exports = {
output: 'manual/$LOCALE/$NAMESPACE.json'
};
output: 'manual/$LOCALE/$NAMESPACE.json',
}

@@ -15,3 +15,6 @@ import { assert } from 'chai'

const content = '<p>{{t "first"}} {{t "second"}}</p>'
assert.deepEqual(Lexer.extract(content), [{ key: 'first' }, { key: 'second' }])
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' },
{ key: 'second' },
])
done()

@@ -24,3 +27,3 @@ })

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'bla' }
{ key: 'first', defaultValue: 'bla' },
])

@@ -34,3 +37,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'bla' }
{ key: 'first', defaultValue: 'bla' },
])

@@ -59,3 +62,3 @@ done()

{ key: 'first' },
{ key: 'second' }
{ key: 'second' },
])

@@ -65,2 +68,11 @@ done()

it('extracts custom options', (done) => {
const Lexer = new HandlebarsLexer()
const content = '<p>{{t "first" description="bla"}}</p>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first', description: 'bla' },
])
done()
})
describe('parseArguments()', () => {

@@ -72,3 +84,3 @@ it('matches string arguments', (done) => {

arguments: ['"first"', '"bla"'],
options: {}
options: {},
})

@@ -83,3 +95,3 @@ done()

arguments: ['first', 'bla'],
options: {}
options: {},
})

@@ -95,4 +107,4 @@ done()

options: {
first: 'bla'
}
first: 'bla',
},
})

@@ -109,3 +121,3 @@ done()

// empty!
}
},
})

@@ -126,8 +138,8 @@ done()

"fifth='bla'",
'"sixth"'
'"sixth"',
],
options: {
'third-one': 'bla bla',
fifth: 'bla'
}
fifth: 'bla',
},
})

@@ -134,0 +146,0 @@ done()

@@ -10,3 +10,3 @@ import { assert } from 'chai'

{ key: 'first' },
{ key: 'second' }
{ key: 'second' },
])

@@ -21,3 +21,3 @@ done()

{ key: 'first' },
{ key: 'second' }
{ key: 'second' },
])

@@ -32,3 +32,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'bla' }
{ key: 'first', defaultValue: 'bla' },
])

@@ -53,3 +53,3 @@ done()

{ key: 'fourth' },
{ key: 'first', defaultValue: 'bar' }
{ key: 'first', defaultValue: 'bar' },
])

@@ -71,3 +71,3 @@ done()

{ key: 'first' },
{ key: 'second' }
{ key: 'second' },
])

@@ -82,6 +82,16 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'bar' }
{ key: 'first', defaultValue: 'bar' },
])
done()
})
it('extracts custom options', (done) => {
const Lexer = new HTMLLexer()
const content =
'<p data-i18n="first" data-i18n-options=\'{"description": "bla"}\'>first</p>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first', description: 'bla' },
])
done()
})
})

@@ -17,3 +17,3 @@ import { assert } from 'chai'

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'bla' }
{ key: 'first', defaultValue: 'bla' },
])

@@ -27,3 +27,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'foo', context: 'bar' }
{ key: 'first', defaultValue: 'foo', context: 'bar' },
])

@@ -35,5 +35,6 @@ done()

const Lexer = new JavascriptLexer()
const content = 'i18n.t("first", {\ndefaultValue: "foo",\n context: \'bar\'})'
const content =
'i18n.t("first", {\ndefaultValue: "foo",\n context: \'bar\'})'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'foo', context: 'bar' }
{ key: 'first', defaultValue: 'foo', context: 'bar' },
])

@@ -47,3 +48,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'bla', context: 'foo' }
{ key: 'first', defaultValue: 'bla', context: 'foo' },
])

@@ -55,5 +56,6 @@ done()

const Lexer = new JavascriptLexer()
const content = 'i18n.t("first", {context: "foo", "defaultValue": \'{{var}} bla\'})'
const content =
'i18n.t("first", {context: "foo", "defaultValue": \'{{var}} bla\'})'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: '{{var}} bla', context: 'foo' }
{ key: 'first', defaultValue: '{{var}} bla', context: 'foo' },
])

@@ -72,3 +74,4 @@ done()

const Lexer = new JavascriptLexer()
const js = "// FIX this doesn't work and this t is all alone\nt('first')\nt = () => {}"
const js =
"// FIX this doesn't work and this t is all alone\nt('first')\nt = () => {}"
assert.deepEqual(Lexer.extract(js), [{ key: 'first' }])

@@ -90,3 +93,3 @@ done()

{ key: 'first' },
{ key: 'second' }
{ key: 'second' },
])

@@ -105,6 +108,5 @@ done()

const Lexer = new JavascriptLexer()
const content = 'const data = { text: t("foo"), ...rest }; const { text, ...more } = data;'
assert.deepEqual(Lexer.extract(content), [
{ key: 'foo' }
])
const content =
'const data = { text: t("foo"), ...rest }; const { text, ...more } = data;'
assert.deepEqual(Lexer.extract(content), [{ key: 'foo' }])
done()

@@ -123,5 +125,3 @@ })

const content = '@decorator() class Test { test() { t("foo") } }'
assert.deepEqual(Lexer.extract(content), [
{ key: 'foo' }
])
assert.deepEqual(Lexer.extract(content), [{ key: 'foo' }])
done()

@@ -145,5 +145,18 @@ })

const content = 'const {t} = useTranslation("foo"); t("bar", {ns: "baz"});'
assert.deepEqual(Lexer.extract(content), [{ namespace: 'baz', key: 'bar', ns: 'baz' }])
assert.deepEqual(Lexer.extract(content), [
{ namespace: 'baz', key: 'bar', ns: 'baz' },
])
})
it('extracts custom options', () => {
const Lexer = new JavascriptLexer()
const content = 'i18n.t("headline", {description: "Fantastic key!"});'
assert.deepEqual(Lexer.extract(content), [
{
key: 'headline',
description: 'Fantastic key!',
},
])
})
})

@@ -10,5 +10,3 @@ import { assert, expect } from 'chai'

const content = '<Interpolate i18nKey="first" />'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
])
assert.deepEqual(Lexer.extract(content), [{ key: 'first' }])
done()

@@ -23,3 +21,3 @@ })

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'Yo' }
{ key: 'first', defaultValue: 'Yo', count: '{count}' },
])

@@ -30,6 +28,6 @@ done()

it('extracts keys from user-defined key attributes from closing tags', (done) => {
const Lexer = new JsxLexer({ attr: "myIntlKey" })
const Lexer = new JsxLexer({ attr: 'myIntlKey' })
const content = '<Trans myIntlKey="first" count={count}>Yo</Trans>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'Yo' }
{ key: 'first', defaultValue: 'Yo', count: '{count}' },
])

@@ -43,3 +41,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
{ key: 'first', count: '{count}' },
])

@@ -50,6 +48,6 @@ done()

it('extracts keys from user-defined key attributes from self-closing tags', (done) => {
const Lexer = new JsxLexer({ attr: "myIntlKey" })
const Lexer = new JsxLexer({ attr: 'myIntlKey' })
const content = '<Trans myIntlKey="first" count={count} />'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
{ key: 'first', count: '{count}' },
])

@@ -59,2 +57,11 @@ done()

it('extracts custom attributes', (done) => {
const Lexer = new JsxLexer()
const content = '<Trans customAttribute="Youpi">Yo</Trans>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'Yo', defaultValue: 'Yo', customAttribute: 'Youpi' },
])
done()
})
it('extracts keys from Trans elements without an i18nKey', (done) => {

@@ -64,3 +71,3 @@ const Lexer = new JsxLexer()

assert.deepEqual(Lexer.extract(content), [
{ key: 'Yo', defaultValue: 'Yo' }
{ key: 'Yo', defaultValue: 'Yo', count: '{count}' },
])

@@ -74,3 +81,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: '{{key}}', defaultValue: '{{key}}' }
{ key: '{{key}}', defaultValue: '{{key}}', count: '{count}' },
])

@@ -84,3 +91,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'beforeafter', defaultValue: 'beforeafter' }
{ key: 'beforeafter', defaultValue: 'beforeafter', count: '{count}' },
])

@@ -90,3 +97,3 @@ done()

it('doesn\'t add a blank key for self-closing or empty tags', (done) => {
it("doesn't add a blank key for self-closing or empty tags", (done) => {
const Lexer = new JsxLexer()

@@ -105,4 +112,8 @@

const Lexer = new JsxLexer()
const content = '<Trans>a<b test={"</b>"}>c<c>z</c></b>{d}<br stuff={y}/></Trans>'
assert.equal(Lexer.extract(content)[0].defaultValue, 'a<1>c<1>z</1></1>{d}<3></3>')
const content =
'<Trans>a<b test={"</b>"}>c<c>z</c></b>{d}<br stuff={y}/></Trans>'
assert.equal(
Lexer.extract(content)[0].defaultValue,
'a<1>c<1>z</1></1>{d}<3></3>'
)
done()

@@ -121,5 +132,3 @@ })

const content = '<><Trans i18nKey="first" /></>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
])
assert.deepEqual(Lexer.extract(content), [{ key: 'first' }])
done()

@@ -131,3 +140,6 @@ })

const content = `<Trans>Some{' '}Interpolated {'Content'}</Trans>`
assert.equal(Lexer.extract(content)[0].defaultValue, 'Some Interpolated Content')
assert.equal(
Lexer.extract(content)[0].defaultValue,
'Some Interpolated Content'
)
done()

@@ -139,6 +151,7 @@ })

const content = `<Trans ns="foo">bar</Trans>`
assert.deepEqual(Lexer.extract(content), [{ key: 'bar', defaultValue: 'bar', namespace: 'foo' }])
assert.deepEqual(Lexer.extract(content), [
{ key: 'bar', defaultValue: 'bar', namespace: 'foo' },
])
done()
})
})

@@ -150,5 +163,3 @@

const content = '<Interpolate i18nKey="first" someVar={foo() as bar} />'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
])
assert.deepEqual(Lexer.extract(content), [{ key: 'first' }])
})

@@ -160,5 +171,3 @@

const content = '<Interpolate i18nKey="first" />'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
])
assert.deepEqual(Lexer.extract(content), [{ key: 'first' }])
done()

@@ -173,3 +182,3 @@ })

assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'Yo' }
{ key: 'first', defaultValue: 'Yo', count: '{count}' },
])

@@ -180,6 +189,6 @@ done()

it('extracts keys from user-defined key attributes from closing tags', (done) => {
const Lexer = new JsxLexer({ attr: "myIntlKey" })
const Lexer = new JsxLexer({ attr: 'myIntlKey' })
const content = '<Trans myIntlKey="first" count={count}>Yo</Trans>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first', defaultValue: 'Yo' }
{ key: 'first', defaultValue: 'Yo', count: '{count}' },
])

@@ -193,3 +202,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
{ key: 'first', count: '{count}' },
])

@@ -200,6 +209,6 @@ done()

it('extracts keys from user-defined key attributes from self-closing tags', (done) => {
const Lexer = new JsxLexer({ attr: "myIntlKey" })
const Lexer = new JsxLexer({ attr: 'myIntlKey' })
const content = '<Trans myIntlKey="first" count={count} />'
assert.deepEqual(Lexer.extract(content), [
{ key: 'first' }
{ key: 'first', count: '{count}' },
])

@@ -213,3 +222,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: 'Yo', defaultValue: 'Yo' }
{ key: 'Yo', defaultValue: 'Yo', count: '{count}' },
])

@@ -223,3 +232,3 @@ done()

assert.deepEqual(Lexer.extract(content), [
{ key: '{{key}}', defaultValue: '{{key}}' }
{ key: '{{key}}', defaultValue: '{{key}}', count: '{count}' },
])

@@ -231,5 +240,6 @@ done()

const Lexer = new JsxLexer()
const content = '<Trans count={count}>before{{ key1, key2 }}after</Trans>'
const content =
'<Trans count={count}>before{{ key1, key2 }}after</Trans>'
assert.deepEqual(Lexer.extract(content), [
{ key: 'beforeafter', defaultValue: 'beforeafter' }
{ key: 'beforeafter', defaultValue: 'beforeafter', count: '{count}' },
])

@@ -239,3 +249,3 @@ done()

it('doesn\'t add a blank key for self-closing or empty tags', (done) => {
it("doesn't add a blank key for self-closing or empty tags", (done) => {
const Lexer = new JsxLexer()

@@ -254,4 +264,8 @@

const Lexer = new JsxLexer()
const content = '<Trans>a<b test={"</b>"}>c<c>z</c></b>{d}<br stuff={y}/></Trans>'
assert.equal(Lexer.extract(content)[0].defaultValue, 'a<1>c<1>z</1></1>{d}<3></3>')
const content =
'<Trans>a<b test={"</b>"}>c<c>z</c></b>{d}<br stuff={y}/></Trans>'
assert.equal(
Lexer.extract(content)[0].defaultValue,
'a<1>c<1>z</1></1>{d}<3></3>'
)
done()

@@ -258,0 +272,0 @@ })

@@ -10,3 +10,6 @@ import { assert } from 'chai'

"{ mounted() { this.$i18n.t('second'); } }</script>"
assert.deepEqual(Lexer.extract(content), [{ key: 'second' },{ key: 'first' }])
assert.deepEqual(Lexer.extract(content), [
{ key: 'second' },
{ key: 'first' },
])
done()

@@ -21,9 +24,12 @@ })

"{test: 'interpol'}); } }</script>"
assert.deepEqual(Lexer.extract(content), [{
key: 'second {test}',
test: 'interpol'
}, {
key: 'first {test}',
test: 'station'
}])
assert.deepEqual(Lexer.extract(content), [
{
key: 'second {test}',
test: 'interpol',
},
{
key: 'first {test}',
test: 'station',
},
])
done()

@@ -37,11 +43,26 @@ })

"{ mounted() { this.$i18n.t('second', {count: 2}); } }</script>"
assert.deepEqual(Lexer.extract(content), [{
key: 'second',
count: '2'
}, {
key: 'first',
count: '5'
}])
assert.deepEqual(Lexer.extract(content), [
{
key: 'second',
count: '2',
},
{
key: 'first',
count: '5',
},
])
done()
})
it('extracts custom options', (done) => {
const Lexer = new VueLexer()
const content =
"<template><p>{{ $t('first', {description: 'test'}) }}</p><template><script>export default " +
"{ mounted() { this.$i18n.t('second'); } }</script>"
assert.deepEqual(Lexer.extract(content), [
{ key: 'second' },
{ key: 'first', description: 'test' },
])
done()
})
})

@@ -18,5 +18,5 @@ import { assert } from 'chai'

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.once('data', file => {
i18nextParser.once('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -37,9 +37,7 @@ result = JSON.parse(file.contents)

const fakeFile = new Vinyl({
contents: Buffer.from(
"t(\n 'first'\n)\n t('second'); t(\n\n'third')"
),
path: 'file.js'
contents: Buffer.from("t(\n 'first'\n)\n t('second'); t(\n\n'third')"),
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -62,6 +60,6 @@ result = JSON.parse(file.contents)

contents: Buffer.from("t('first', {context: 'female'})"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -73,3 +71,3 @@ result = JSON.parse(file.contents)

assert.deepEqual(result, {
first_female: ''
first_female: '',
})

@@ -89,3 +87,3 @@ done()

),
path: 'file.html'
path: 'file.html',
})

@@ -99,6 +97,6 @@ const expected = {

sixth: '',
selfClosing: ''
selfClosing: '',
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -122,3 +120,3 @@ result = JSON.parse(file.contents)

),
path: 'file.hbs'
path: 'file.hbs',
})

@@ -132,6 +130,6 @@ const expected = {

sixth: '',
seventh: 'defaultValue'
seventh: 'defaultValue',
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -156,3 +154,3 @@ result = JSON.parse(file.contents)

),
path: 'file.js'
path: 'file.js',
})

@@ -163,6 +161,6 @@ const expected = {

third: '{{var}} defaultValue',
fourth: ''
fourth: '',
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -187,3 +185,3 @@ result = JSON.parse(file.contents)

),
path: 'react.jsx'
path: 'react.jsx',
})

@@ -194,15 +192,20 @@ const expected = {

third: {
first: 'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: ' <1>Hello,</1> this shouldn\'t be trimmed.',
third: '<0>Hello,</0>this should be trimmed.<2> and this shoudln\'t</2>'
first:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
first_plural:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: " <1>Hello,</1> this shouldn't be trimmed.",
third: "<0>Hello,</0>this should be trimmed.<2> and this shoudln't</2>",
},
fourth: '',
fifth: '',
fifth_plural: '',
bar: '',
foo: '',
"This should be part of the value and the key": "This should be part of the value and the key",
"don't split {{on}}": "don't split {{on}}"
'This should be part of the value and the key':
'This should be part of the value and the key',
"don't split {{on}}": "don't split {{on}}",
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
// support for a default Namespace

@@ -228,3 +231,3 @@ if (file.relative.endsWith(path.normalize('en/react.json'))) {

),
path: 'typescript.tsx'
path: 'typescript.tsx',
})

@@ -235,15 +238,20 @@ const expected = {

third: {
first: 'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: ' <1>Hello,</1> this shouldn\'t be trimmed.',
third: '<0>Hello,</0>this should be trimmed.<2> and this shoudln\'t</2>'
first:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
first_plural:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: " <1>Hello,</1> this shouldn't be trimmed.",
third: "<0>Hello,</0>this should be trimmed.<2> and this shoudln't</2>",
},
fourth: '',
fifth: '',
fifth_plural: '',
bar: '',
foo: '',
"This should be part of the value and the key": "This should be part of the value and the key",
"don't split {{on}}": "don't split {{on}}"
'This should be part of the value and the key':
'This should be part of the value and the key',
"don't split {{on}}": "don't split {{on}}",
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -265,3 +273,3 @@ result = JSON.parse(file.contents)

locales: ['en', 'de', 'fr'],
defaultNamespace: 'default'
defaultNamespace: 'default',
})

@@ -272,6 +280,6 @@ const fakeFile = new Vinyl({

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
results.push(file.relative.replace(/locales[\//\\]/, ''))

@@ -289,3 +297,3 @@ })

'fr/ns1.json',
'fr/ns2.json'
'fr/ns2.json',
]

@@ -310,6 +318,6 @@ let length = expectedFiles.length

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -333,9 +341,7 @@ result = JSON.parse(file.contents)

const fakeFile = new Vinyl({
contents: Buffer.from(
"t('escaped backslash\\\\ newline\\n\\r tab\\t')"
),
path: 'file.js'
contents: Buffer.from("t('escaped backslash\\\\ newline\\n\\r tab\\t')"),
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -357,9 +363,7 @@ result = JSON.parse(file.contents)

const fakeFile = new Vinyl({
contents: Buffer.from(
"t('first')"
),
path: 'file.js'
contents: Buffer.from("t('first')"),
path: 'file.js',
})
i18nextParser.once('data', file => {
i18nextParser.once('data', (file) => {
assert(file.isBuffer())

@@ -374,13 +378,16 @@ done()

let result, resultOld
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_merge:first'); t('test_merge:second')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_merge.json'))) {
result = JSON.parse(file.contents)
}
else if (file.relative.endsWith(path.normalize('en/test_merge_old.json'))) {
} else if (
file.relative.endsWith(path.normalize('en/test_merge_old.json'))
) {
resultOld = JSON.parse(file.contents)

@@ -401,9 +408,11 @@ }

let resultFR
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_leak:first'); t('test_leak:second')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_leak.json'))) {

@@ -427,6 +436,8 @@ resultEN = JSON.parse(file.contents)

let result
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_context:first')"),
path: 'file.js'
path: 'file.js',
})

@@ -437,6 +448,6 @@

first_context1: 'first context1',
first_context2: ''
first_context2: '',
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_context.json'))) {

@@ -455,17 +466,26 @@ result = JSON.parse(file.contents)

it('saves unused translations in the old catalog', (done) => {
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_old:parent.third', 'third'), t('test_old:fourth', 'fourth')"),
path: 'file.js'
contents: Buffer.from(
"t('test_old:parent.third', 'third'), t('test_old:fourth', 'fourth')"
),
path: 'file.js',
})
const expectedResult = { parent: { third: 'third' }, fourth: 'fourth' }
const expectedResultOld = { parent: { first: 'first', some: 'some' }, second: 'second', other: 'other' }
const expectedResultOld = {
parent: { first: 'first', some: 'some' },
second: 'second',
other: 'other',
}
let result, resultOld;
i18nextParser.on('data', file => {
let result, resultOld
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_old.json'))) {
result = JSON.parse(file.contents)
}
else if (file.relative.endsWith(path.normalize('en/test_old_old.json'))) {
} else if (
file.relative.endsWith(path.normalize('en/test_old_old.json'))
) {
resultOld = JSON.parse(file.contents)

@@ -484,6 +504,10 @@ }

it('restores translations from the old catalog', (done) => {
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_old:parent.some', 'random'), t('test_old:other', 'random')"),
path: 'file.js'
contents: Buffer.from(
"t('test_old:parent.some', 'random'), t('test_old:other', 'random')"
),
path: 'file.js',
})

@@ -494,8 +518,9 @@

let result, resultOld;
i18nextParser.on('data', file => {
let result, resultOld
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_old.json'))) {
result = JSON.parse(file.contents)
}
else if (file.relative.endsWith(path.normalize('en/test_old_old.json'))) {
} else if (
file.relative.endsWith(path.normalize('en/test_old_old.json'))
) {
resultOld = JSON.parse(file.contents)

@@ -515,8 +540,8 @@ }

let result
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from(
"t('test_plural:first'); t('test_plural:second')"
),
path: 'file.js'
contents: Buffer.from("t('test_plural:first'); t('test_plural:second')"),
path: 'file.js',
})

@@ -529,6 +554,6 @@

second_0: 'second plural 0',
second_12: 'second plural 12'
second_12: 'second plural 12',
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_plural.json'))) {

@@ -548,6 +573,8 @@ result = JSON.parse(file.contents)

let result
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_context_plural:first')"),
path: 'file.js'
path: 'file.js',
})

@@ -560,4 +587,6 @@

i18nextParser.on('data', file => {
if (file.relative.endsWith(path.normalize('en/test_context_plural.json'))) {
i18nextParser.on('data', (file) => {
if (
file.relative.endsWith(path.normalize('en/test_context_plural.json'))
) {
result = JSON.parse(file.contents)

@@ -580,10 +609,10 @@ }

defaultNamespace: 'default',
output: 'locales/$LOCALE/p-$LOCALE-$NAMESPACE.$LOCALE.i18n'
output: 'locales/$LOCALE/p-$LOCALE-$NAMESPACE.$LOCALE.i18n',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('fourth')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
result = file.relative.replace(/locales[\\\/]/, '')

@@ -603,3 +632,3 @@ })

namespaceSeparator: '?',
keySeparator: '-'
keySeparator: '-',
})

@@ -610,6 +639,6 @@ const fakeFile = new Vinyl({

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test_separators.json'))) {

@@ -631,10 +660,10 @@ result = JSON.parse(file.contents)

namespaceSeparator: false,
keySeparator: false
keySeparator: false,
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('Status: loading...')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -655,10 +684,10 @@ result = JSON.parse(file.contents)

const i18nextParser = new i18nTransform({
defaultValue: 'NOT_TRANSLATED'
defaultValue: 'NOT_TRANSLATED',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('first')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -679,10 +708,10 @@ result = JSON.parse(file.contents)

const i18nextParser = new i18nTransform({
lineEnding: '\r\n'
lineEnding: '\r\n',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('first')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -703,10 +732,10 @@ result = file.contents.toString()

const i18nextParser = new i18nTransform({
lineEnding: '\r\n'
lineEnding: '\r\n',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('first')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -727,3 +756,3 @@ result = file.contents.toString()

const i18nextParser = new i18nTransform({
reactNamespace: true
reactNamespace: true,
})

@@ -734,3 +763,3 @@ const fakeFile = new Vinyl({

),
path: 'react.js'
path: 'react.js',
})

@@ -741,15 +770,21 @@ const expected = {

third: {
first: 'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: ' <1>Hello,</1> this shouldn\'t be trimmed.',
third: '<0>Hello,</0>this should be trimmed.<2> and this shoudln\'t</2>'
first:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
first_plural:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: " <1>Hello,</1> this shouldn't be trimmed.",
third:
"<0>Hello,</0>this should be trimmed.<2> and this shoudln't</2>",
},
fourth: '',
fifth: '',
fifth_plural: '',
bar: '',
foo: '',
"This should be part of the value and the key": "This should be part of the value and the key",
"don't split {{on}}": "don't split {{on}}"
'This should be part of the value and the key':
'This should be part of the value and the key',
"don't split {{on}}": "don't split {{on}}",
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
// support for a default Namespace

@@ -776,6 +811,6 @@ if (file.relative.endsWith(path.normalize('en/react.json'))) {

transSupportBasicHtmlNodes: true,
transKeepBasicHtmlNodesFor: ['strong', 'b']
}
]
}
transKeepBasicHtmlNodesFor: ['strong', 'b'],
},
],
},
})

@@ -786,3 +821,3 @@ const fakeFile = new Vinyl({

),
path: 'react.jsx'
path: 'react.jsx',
})

@@ -793,15 +828,21 @@ const expected = {

third: {
first: 'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: ' <b>Hello,</b> this shouldn\'t be trimmed.',
third: '<b>Hello,</b>this should be trimmed.<2> and this shoudln\'t</2>'
first:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
first_plural:
'Hello <1>{{name}}</1>, you have {{count}} unread message. <5>Go to messages</5>.',
second: " <b>Hello,</b> this shouldn't be trimmed.",
third:
"<b>Hello,</b>this should be trimmed.<2> and this shoudln't</2>",
},
fourth: '',
fifth: '',
fifth_plural: '',
bar: '',
foo: '',
"This should be part of the value and the key": "This should be part of the value and the key",
"don't split {{on}}": "don't split {{on}}"
'This should be part of the value and the key':
'This should be part of the value and the key',
"don't split {{on}}": "don't split {{on}}",
}
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
// support for a default Namespace

@@ -820,14 +861,13 @@ if (file.relative.endsWith(path.normalize('en/react.json'))) {

it('supports outputing to yml', (done) => {
let result
const i18nextParser = new i18nTransform({
output: 'locales/$LOCALE/$NAMESPACE.yml'
output: 'locales/$LOCALE/$NAMESPACE.yml',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('first')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/translation.yml'))) {

@@ -848,10 +888,10 @@ result = file.contents.toString('utf8')

const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/test.yml'
output: 'test/locales/$LOCALE/test.yml',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('first')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(path.normalize('en/test.yml'))) {

@@ -872,10 +912,10 @@ result = file.contents.toString('utf8')

const i18nextParser = new i18nTransform({
indentation: 6
indentation: 6,
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('first')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -886,3 +926,6 @@ result = file.contents.toString('utf8')

i18nextParser.once('end', () => {
assert.deepEqual(result.replace(/\r\n/g, '\n').split('\n')[1], ' "first": ""')
assert.deepEqual(
result.replace(/\r\n/g, '\n').split('\n')[1],
' "first": ""'
)
done()

@@ -898,12 +941,10 @@ })

defaultNamespace: 'default',
createOldCatalogs: false
createOldCatalogs: false,
})
const fakeFile = new Vinyl({
contents: Buffer.from(
"t('ns1:first'); t('second') \n t('fourth')"
),
path: 'file.js'
contents: Buffer.from("t('ns1:first'); t('second') \n t('fourth')"),
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
results.push(file.relative.replace(/locales[\\\/]/, ''))

@@ -916,3 +957,3 @@ })

'fr/default.json',
'fr/ns1.json'
'fr/ns1.json',
]

@@ -940,6 +981,6 @@ let length = expectedFiles.length

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.once('data', file => {
i18nextParser.once('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -955,4 +996,4 @@ result = JSON.parse(file.contents)

six: {
seven: 'six.seven'
}
seven: 'six.seven',
},
})

@@ -969,6 +1010,6 @@ done()

contents: Buffer.from("t('test {{count}}', { count: 1 })"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -981,3 +1022,3 @@ result = JSON.parse(file.contents)

'test {{count}}': '',
'test {{count}}_plural': ''
'test {{count}}_plural': '',
})

@@ -995,6 +1036,6 @@ done()

contents: Buffer.from("t('test {{count}}', { count: 1 })"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(arLibraryPath)) {

@@ -1026,6 +1067,6 @@ result = JSON.parse(file.contents)

contents: Buffer.from("t('test {{count}}', { count: 1 })"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -1038,3 +1079,3 @@ result = JSON.parse(file.contents)

'test {{count}}': 'test {{count}}',
'test {{count}}_plural': 'test {{count}}'
'test {{count}}_plural': 'test {{count}}',
})

@@ -1051,10 +1092,10 @@ done()

useKeysAsDefaultValue: true,
locales: ['ar']
locales: ['ar'],
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test {{count}}', { count: 1 })"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(arLibraryPath)) {

@@ -1079,6 +1120,6 @@ result = JSON.parse(file.contents)

it("supports skipDefaultValues option", (done) => {
it('supports skipDefaultValues option', (done) => {
let result
const i18nextParser = new i18nTransform({
skipDefaultValues: true
skipDefaultValues: true,
})

@@ -1089,8 +1130,8 @@

"t('headline1', 'There will be a headline here.') \n" +
"t('headline2', {defaultValue: 'Another Headline here'}})",
"t('headline2', {defaultValue: 'Another Headline here'}})"
),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
result = JSON.parse(file.contents)

@@ -1101,4 +1142,4 @@ })

assert.deepEqual(result, {
'headline1': '',
'headline2': '',
headline1: '',
headline2: '',
})

@@ -1110,4 +1151,42 @@

i18nextParser.end(fakeFile)
});
})
it('supports customValueTemplate option', (done) => {
let result
const i18nextParser = new i18nTransform({
customValueTemplate: {
message: '${defaultValue}',
description: '${max}',
},
})
const fakeFile = new Vinyl({
contents: Buffer.from(
"t('test'); t('salt', {defaultValue: 'salty', max: 150})"
),
path: 'file.js',
})
i18nextParser.on('data', (file) => {
result = JSON.parse(file.contents)
})
i18nextParser.once('end', () => {
assert.deepEqual(result, {
test: {
message: '',
description: '',
},
salt: {
message: 'salty',
description: '150',
},
})
done()
})
i18nextParser.end(fakeFile)
})
describe('lexers', () => {

@@ -1121,13 +1200,13 @@ it('support custom lexers options', (done) => {

lexer: 'JavascriptLexer',
functions: ['bla', '_e']
}
]
}
functions: ['bla', '_e'],
},
],
},
})
const fakeFile = new Vinyl({
contents: Buffer.from("bla('first'); _e('second')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -1148,6 +1227,6 @@ result = JSON.parse(file.contents)

extract(content) {
return content.split(';').map(key => ({ key }))
return content.split(';').map((key) => ({ key }))
}
on() { }
on() {}
}

@@ -1158,11 +1237,11 @@

lexers: {
js: [CustomLexer]
}
js: [CustomLexer],
},
})
const fakeFile = new Vinyl({
contents: Buffer.from("first;second"),
path: 'file.js'
contents: Buffer.from('first;second'),
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -1183,10 +1262,10 @@ result = JSON.parse(file.contents)

constructor(options) {
this.delimiter = options.delimiter;
this.delimiter = options.delimiter
}
extract(content) {
return content.split(this.delimiter).map(key => ({ key }))
return content.split(this.delimiter).map((key) => ({ key }))
}
on() { }
on() {}
}

@@ -1200,13 +1279,13 @@

lexer: CustomLexer,
delimiter: '@'
}
]
}
delimiter: '@',
},
],
},
})
const fakeFile = new Vinyl({
contents: Buffer.from("first@second"),
path: 'file.js'
contents: Buffer.from('first@second'),
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -1233,6 +1312,6 @@ result = JSON.parse(file.contents)

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -1254,3 +1333,3 @@ result = JSON.parse(file.contents)

const i18nextParser = new i18nTransform({
sort: true
sort: true,
})

@@ -1261,6 +1340,6 @@ const fakeFile = new Vinyl({

),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('data', file => {
i18nextParser.on('data', (file) => {
if (file.relative.endsWith(enLibraryPath)) {

@@ -1287,6 +1366,6 @@ result = JSON.parse(file.contents)

contents: Buffer.from('content'),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('reading', file => {
i18nextParser.on('reading', (file) => {
result = file.path

@@ -1302,9 +1381,11 @@ })

it('emits a `error` event if the catalog is not valid json', (done) => {
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from("t('test_invalid:content')"),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('error', error => {
i18nextParser.on('error', (error) => {
assert.equal(error.message.startsWith('Unexpected token /'), true)

@@ -1320,6 +1401,6 @@ done()

contents: Buffer.from('content'),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('error', error => {
i18nextParser.on('error', (error) => {
assert.equal(error.message, "Lexer 'fakeLexer' does not exist")

@@ -1332,9 +1413,11 @@ done()

it('emits a `warning` event if a key contains a variable', (done) => {
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from('t(variable)'),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('warning', message => {
i18nextParser.on('warning', (message) => {
assert.equal(message, 'Key is not a string literal: variable')

@@ -1347,10 +1430,15 @@ done()

it('emits a `warning` event if a react value contains two variables', (done) => {
const i18nextParser = new i18nTransform({ output: 'test/locales/$LOCALE/$NAMESPACE.json' })
const i18nextParser = new i18nTransform({
output: 'test/locales/$LOCALE/$NAMESPACE.json',
})
const fakeFile = new Vinyl({
contents: Buffer.from('<Trans>{{ key1, key2 }}</Trans>'),
path: 'file.js'
path: 'file.js',
})
i18nextParser.on('warning', message => {
assert.equal(message, 'The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.')
i18nextParser.on('warning', (message) => {
assert.equal(
message,
'The passed in object contained more than one variable - the object should look like {{ value, format }} where format is optional.'
)
done()

@@ -1357,0 +1445,0 @@ })

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc