i18next-parser
Advanced tools
Comparing version 1.0.0-beta12 to 1.0.0-beta13
@@ -92,3 +92,6 @@ #!/usr/bin/env node | ||
.on('error', function (message, region) { | ||
console.log(' [error] '.red + message + ': ' + region.trim()) | ||
if (typeof region === 'string') { | ||
message += ': ' + region.trim() | ||
} | ||
console.log(' [error] '.red + message) | ||
}) | ||
@@ -95,0 +98,0 @@ .on('finish', function () { |
# Changelog | ||
## 1.0.0-beta12 - latest | ||
## 1.0.0-beta13 - latest | ||
@@ -5,0 +5,0 @@ - The changelog for the beta can be found in the [releases](https://github.com/i18next/i18next-parser/releases) |
@@ -1,2 +0,4 @@ | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _acornJsx = require('acorn-jsx');var acorn = _interopRequireWildcard(_acornJsx); | ||
var _walk = require('acorn/dist/walk');var walk = _interopRequireWildcard(_walk); | ||
var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _interopRequireWildcard(obj) {if (obj && obj.__esModule) {return obj;} else {var newObj = {};if (obj != null) {for (var key in obj) {if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key];}}newObj.default = obj;return newObj;}}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
@@ -7,150 +9,94 @@ JavascriptLexer = function (_BaseLexer) {_inherits(JavascriptLexer, _BaseLexer); | ||
_this.acornOptions = _extends({ sourceType: 'module' }, options.acorn); | ||
_this.functions = options.functions || ['t']; | ||
_this.createFunctionRegex(); | ||
_this.createArgumentsRegex(); | ||
_this.createHashRegex();return _this; | ||
_this.attr = options.attr || 'i18nKey';return _this; | ||
}_createClass(JavascriptLexer, [{ key: 'extract', value: function extract( | ||
content) { | ||
var matches = void 0; | ||
var that = this; | ||
while (matches = this.functionRegex.exec(content)) { | ||
var args = this.parseArguments(matches[1] || matches[2]); | ||
this.populateKeysFromArguments(args); | ||
} | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression: function CallExpression(node) { | ||
that.expressionExtractor.call(that, node); | ||
} }); | ||
return this.keys; | ||
} }, { key: 'parseArguments', value: function parseArguments( | ||
} }, { key: 'expressionExtractor', value: function expressionExtractor( | ||
args) { | ||
var matches = void 0; | ||
var result = { | ||
arguments: [], | ||
options: {} }; | ||
node) { | ||
var entry = {}; | ||
var isTranslationFunction = | ||
node.callee && ( | ||
this.functions.includes(node.callee.name) || | ||
node.callee.property && this.functions.includes(node.callee.property.name)); | ||
while (matches = this.argumentsRegex.exec(args)) { | ||
var arg = matches[1]; | ||
if (arg.startsWith('{')) { | ||
var optionMatches = void 0; | ||
while (optionMatches = this.hashRegex.exec(args)) { | ||
var key = optionMatches[2]; | ||
var value = optionMatches[3]; | ||
if (this.validateString(value)) { | ||
result.options[key] = value.slice(1, -1); | ||
} | ||
if (isTranslationFunction) { | ||
var keyArgument = node.arguments.shift(); | ||
if (keyArgument && keyArgument.type === 'Literal') { | ||
entry.key = keyArgument.value; | ||
} else | ||
if (keyArgument && keyArgument.type === 'BinaryExpression') { | ||
var concatenatedString = this.concatenateString(keyArgument); | ||
if (!concatenatedString) { | ||
this.emit('warning', 'Key is not a string litteral: ' + keyArgument.name); | ||
return; | ||
} | ||
entry.key = concatenatedString; | ||
} else | ||
{ | ||
arg = this.concatenateString(arg); | ||
if (keyArgument.type === 'Identifier') { | ||
this.emit('warning', 'Key is not a string litteral: ' + keyArgument.name); | ||
} | ||
return; | ||
} | ||
result.arguments.push(arg); | ||
var optionsArgument = node.arguments.shift(); | ||
if (optionsArgument && optionsArgument.type === 'Literal') { | ||
entry.defaultValue = optionsArgument.value; | ||
} else | ||
if (optionsArgument && optionsArgument.type === 'ObjectExpression') { | ||
optionsArgument.properties.forEach(function (p) { | ||
entry[p.key.name || p.key.value] = p.value.value; | ||
}); | ||
} | ||
this.keys.push(entry); | ||
} | ||
return result; | ||
} }, { key: 'concatenateString', value: function concatenateString( | ||
string) {var _this2 = this; | ||
string = string.trim(); | ||
var matches = void 0; | ||
var containsVariable = false; | ||
var parts = []; | ||
var quotationMark = string.charAt(0) === '"' ? '"' : "'"; | ||
binaryExpression) {var string = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; | ||
if (binaryExpression.operator !== '+') { | ||
return; | ||
} | ||
var regex = new RegExp(JavascriptLexer.concatenatedSegmentPattern, 'gi'); | ||
while (matches = regex.exec(string)) { | ||
var match = matches[0].trim(); | ||
if (match !== '+') { | ||
parts.push(match); | ||
} | ||
if (binaryExpression.left.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.left, string); | ||
} else | ||
if (binaryExpression.left.type === 'Literal') { | ||
string += binaryExpression.left.value; | ||
} else | ||
{ | ||
return; | ||
} | ||
var result = parts.reduce(function (concatenatedString, x) { | ||
x = x && x.trim(); | ||
if (_this2.validateString(x)) { | ||
concatenatedString += x.slice(1, -1); | ||
} else | ||
{ | ||
containsVariable = true; | ||
} | ||
return concatenatedString; | ||
}, ''); | ||
if (!result || containsVariable) { | ||
return string; | ||
if (binaryExpression.right.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.right, string); | ||
} else | ||
if (binaryExpression.right.type === 'Literal') { | ||
string += binaryExpression.right.value; | ||
} else | ||
{ | ||
return quotationMark + result + quotationMark; | ||
return; | ||
} | ||
} }, { key: 'createFunctionRegex', value: function createFunctionRegex() | ||
{ | ||
var pattern = | ||
'(?:\\W|^)' + | ||
this.functionPattern() + '\\s*\\(\\s*' + | ||
JavascriptLexer.stringOrVariableOrHashPattern + | ||
'\\s*\\)'; | ||
this.functionRegex = new RegExp(pattern, 'gi'); | ||
return this.functionRegex; | ||
} }, { key: 'createArgumentsRegex', value: function createArgumentsRegex() | ||
{ | ||
var pattern = | ||
'(' + | ||
[ | ||
JavascriptLexer.concatenatedArgumentPattern, | ||
JavascriptLexer.hashPattern]. | ||
join('|') + | ||
')' + | ||
'(?:\\s*,\\s*)?'; | ||
this.argumentsRegex = new RegExp(pattern, 'gi'); | ||
return this.argumentsRegex; | ||
} }, { key: 'createHashRegex', value: function createHashRegex() | ||
{ | ||
var pattern = | ||
'(?:(\'|")?(' + | ||
['context', 'defaultValue'].join('|') + | ||
')\\1)' + | ||
'(?:\\s*:\\s*)' + | ||
'(' + _baseLexer2.default.stringPattern + ')'; | ||
this.hashRegex = new RegExp(pattern, 'gi'); | ||
return this.hashRegex; | ||
} }], [{ key: 'concatenatedSegmentPattern', get: function get() {return [_baseLexer2.default.singleQuotePattern, _baseLexer2.default.doubleQuotePattern, _baseLexer2.default.backQuotePattern, _baseLexer2.default.variablePattern, '(?:\\s*\\+\\s*)' // support for concatenation via + | ||
].join('|');} }, { key: 'concatenatedArgumentPattern', get: function get() {return '(' + '(?:' + JavascriptLexer.concatenatedSegmentPattern + ')+' + ')';} }, { key: 'hashPattern', get: function get() {return '(\\{.*\\})';} }, { key: 'stringOrVariableOrHashPattern', get: function get() {return '(' + '(' + '(?:' + [JavascriptLexer.concatenatedArgumentPattern, JavascriptLexer.hashPattern].join('|') + ')' + '(?:\\s*,\\s*)?' + ')+' + ')';} }]);return JavascriptLexer;}(_baseLexer2.default);exports.default = JavascriptLexer;module.exports = exports['default']; | ||
return string; | ||
} }]);return JavascriptLexer;}(_baseLexer2.default);exports.default = JavascriptLexer;module.exports = exports['default']; |
'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _acornJsx = require('acorn-jsx');var acorn = _interopRequireWildcard(_acornJsx); | ||
var _assert = require('assert');var _assert2 = _interopRequireDefault(_assert); | ||
var _htmlLexer = require('./html-lexer');var _htmlLexer2 = _interopRequireDefault(_htmlLexer); | ||
var _baseLexer = require('./base-lexer');var _baseLexer2 = _interopRequireDefault(_baseLexer); | ||
var _helpers = require('../helpers');function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _interopRequireWildcard(obj) {if (obj && obj.__esModule) {return obj;} else {var newObj = {};if (obj != null) {for (var key in obj) {if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key];}}newObj.default = obj;return newObj;}}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;}var | ||
var _walk = require('acorn/dist/walk');var walk = _interopRequireWildcard(_walk); | ||
var _javascriptLexer = require('./javascript-lexer');var _javascriptLexer2 = _interopRequireDefault(_javascriptLexer);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}function _interopRequireWildcard(obj) {if (obj && obj.__esModule) {return obj;} else {var newObj = {};if (obj != null) {for (var key in obj) {if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key];}}newObj.default = obj;return newObj;}}function _classCallCheck(instance, Constructor) {if (!(instance instanceof Constructor)) {throw new TypeError("Cannot call a class as a function");}}function _possibleConstructorReturn(self, call) {if (!self) {throw new ReferenceError("this hasn't been initialised - super() hasn't been called");}return call && (typeof call === "object" || typeof call === "function") ? call : self;}function _inherits(subClass, superClass) {if (typeof superClass !== "function" && superClass !== null) {throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);}subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } });if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;} | ||
JsxLexer = function (_HTMLLexer) {_inherits(JsxLexer, _HTMLLexer); | ||
function JsxLexer() {var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};_classCallCheck(this, JsxLexer); | ||
options.attr = options.attr || 'i18nKey';return _possibleConstructorReturn(this, (JsxLexer.__proto__ || Object.getPrototypeOf(JsxLexer)).call(this, | ||
var JSXParserExtension = Object.assign({}, walk.base, { | ||
JSXText: function JSXText(node, st, c) { | ||
// We need this catch, but we don't need the catch to do anything. | ||
}, | ||
JSXElement: function JSXElement(node, st, c) { | ||
node.openingElement.attributes.forEach(function (attr) {return c(attr, st, attr.type);}); | ||
node.children.forEach(function (child) {return c(child, st, child.type);}); | ||
}, | ||
JSXExpressionContainer: function JSXExpressionContainer(node, st, c) { | ||
c(node.expression, st, node.expression.type); | ||
}, | ||
JSXAttribute: function JSXAttribute(node, st, c) { | ||
if (node.value !== null) { | ||
c(node.value, st, node.value.type); | ||
} | ||
}, | ||
JSXSpreadAttribute: function JSXSpreadAttribute(node, st, c) { | ||
c(node.argument, st, node.argument.type); | ||
} });var | ||
JsxLexer = function (_JavascriptLexer) {_inherits(JsxLexer, _JavascriptLexer); | ||
function JsxLexer() {var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};_classCallCheck(this, JsxLexer);var _this = _possibleConstructorReturn(this, (JsxLexer.__proto__ || Object.getPrototypeOf(JsxLexer)).call(this, | ||
options)); | ||
_this.acornOptions = _extends({ sourceType: 'module', plugins: { jsx: true } }, options.acorn);return _this; | ||
}_createClass(JsxLexer, [{ key: 'extract', value: function extract( | ||
content) { | ||
this.extractInterpolate(content); | ||
this.extractTrans(content); | ||
return this.keys; | ||
} }, { key: 'extractInterpolate', value: function extractInterpolate( | ||
var that = this; | ||
content) { | ||
var matches = void 0; | ||
var regex = new RegExp( | ||
'<Interpolate([^>]*\\s' + this.attr + '[^>]*)\\/?>', | ||
'gi'); | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression: function CallExpression(node) { | ||
that.expressionExtractor.call(that, node); | ||
}, | ||
JSXElement: function JSXElement(node) { | ||
var element = node.openingElement; | ||
if (element.name.name === "Trans") { | ||
var entry = {}; | ||
var defaultValue = that.nodeToString.call(that, node, content); | ||
element.attributes.forEach(function (attr) { | ||
if (attr.name.name === that.attr) { | ||
entry.key = attr.value.value; | ||
} | ||
}); | ||
while (matches = regex.exec(content)) { | ||
var attrs = this.parseAttributes(matches[1]); | ||
var key = attrs.keys; | ||
if (key) { | ||
this.keys.push(_extends({}, attrs.options, { key: key })); | ||
} | ||
} | ||
if (defaultValue !== '') { | ||
entry.defaultValue = defaultValue; | ||
return this.keys; | ||
} }, { key: 'extractTrans', value: function extractTrans( | ||
if (!entry.key) | ||
entry.key = entry.defaultValue; | ||
} | ||
content) { | ||
var matches = void 0; | ||
var closingTagPattern = '(?:<Trans([^>]*\\s' + this.attr + '[^>]*?)\\/>)'; | ||
var selfClosingTagPattern = '(?:<Trans([^>]*\\s' + this.attr + '[^>]*?)>((?:\\s|.)*?)<\\/Trans>)'; | ||
var regex = new RegExp( | ||
[closingTagPattern, selfClosingTagPattern].join('|'), | ||
'gi'); | ||
if (entry.key) | ||
that.keys.push(entry); | ||
} else | ||
if (element.name.name === "Interpolate") { | ||
var _entry = {}; | ||
while (matches = regex.exec(content)) { | ||
var attrs = this.parseAttributes(matches[1] || matches[2]); | ||
var key = attrs.keys; | ||
element.attributes.forEach(function (attr) { | ||
if (attr.name.name === that.attr) { | ||
_entry.key = attr.value.value; | ||
} | ||
}); | ||
if (matches[3] && !attrs.options.defaultValue) { | ||
attrs.options.defaultValue = this.eraseTags(matches[0]).replace(/\s+/g, ' '); | ||
} | ||
if (_entry.key) | ||
that.keys.push(_entry); | ||
} | ||
} }, | ||
if (key) { | ||
this.keys.push(_extends({}, attrs.options, { key: key })); | ||
} | ||
} | ||
JSXParserExtension); | ||
return this.keys; | ||
} | ||
} }, { key: 'nodeToString', value: function nodeToString( | ||
/** | ||
* Recursively convert html tags and js injections to tags with the child index in it | ||
* @param {string} string | ||
* | ||
* @returns string | ||
*/ }, { key: 'eraseTags', value: function eraseTags( | ||
string) { | ||
var acornAst = acorn.parse(string, { plugins: { jsx: true } }); | ||
var acornTransAst = acornAst.body[0].expression; | ||
var children = this.parseAcornPayload(acornTransAst.children, string); | ||
ast, string) { | ||
var children = this.parseAcornPayload(ast.children, string); | ||
@@ -78,3 +93,3 @@ var elemsToString = function elemsToString(children) {return children.map(function (child, index) { | ||
case 'tag':return '<' + index + '>' + elemsToString(child.children) + '</' + index + '>'; | ||
default:throw new _helpers.ParsingError('Unknown parsed content: ' + child.type);} | ||
default:throw new ParsingError('Unknown parsed content: ' + child.type);} | ||
@@ -84,9 +99,4 @@ }).join('');}; | ||
return elemsToString(children); | ||
} | ||
} }, { key: 'parseAcornPayload', value: function parseAcornPayload( | ||
/** | ||
* Simplify the bulky AST given by Acorn | ||
* @param {*} children An array of elements contained inside an html tag | ||
* @param {string} originalString The original string being parsed | ||
*/ }, { key: 'parseAcornPayload', value: function parseAcornPayload( | ||
children, originalString) {var _this2 = this; | ||
@@ -113,5 +123,5 @@ return children.map(function (child) { | ||
{ | ||
throw new _helpers.ParsingError('Unknown ast element when parsing jsx: ' + child.type); | ||
throw new ParsingError('Unknown ast element when parsing jsx: ' + child.type); | ||
} | ||
}).filter(function (child) {return child.type !== 'text' || child.content;}); | ||
} }]);return JsxLexer;}(_htmlLexer2.default);exports.default = JsxLexer;module.exports = exports['default']; | ||
} }]);return JsxLexer;}(_javascriptLexer2.default);exports.default = JsxLexer;module.exports = exports['default']; |
@@ -16,3 +16,3 @@ 'use strict';Object.defineProperty(exports, "__esModule", { value: true });var _extends = Object.assign || function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;};var _createClass = function () {function defineProperties(target, props) {for (var i = 0; i < props.length; i++) {var descriptor = props[i];descriptor.enumerable = descriptor.enumerable || false;descriptor.configurable = true;if ("value" in descriptor) descriptor.writable = true;Object.defineProperty(target, descriptor.key, descriptor);}}return function (Constructor, protoProps, staticProps) {if (protoProps) defineProperties(Constructor.prototype, protoProps);if (staticProps) defineProperties(Constructor, staticProps);return Constructor;};}();var _events = require('events');var _events2 = _interopRequireDefault(_events); | ||
js: ['JavascriptLexer'], | ||
jsx: ['JavascriptLexer', 'JsxLexer'], | ||
jsx: ['JsxLexer'], | ||
mjs: ['JavascriptLexer'], | ||
@@ -56,3 +56,2 @@ | ||
lexerName = lexerConfig.lexer; | ||
delete lexerConfig.lexer; | ||
lexerOptions = lexerConfig; | ||
@@ -59,0 +58,0 @@ } |
@@ -5,3 +5,3 @@ { | ||
"name": "i18next-parser", | ||
"version": "1.0.0-beta12", | ||
"version": "1.0.0-beta13", | ||
"license": "MIT", | ||
@@ -21,2 +21,3 @@ "main": "dist/index.js", | ||
"dependencies": { | ||
"acorn": "^5.5.3", | ||
"acorn-jsx": "^4.1.1", | ||
@@ -23,0 +24,0 @@ "colors": "~1.2.0-rc0", |
@@ -5,3 +5,3 @@ # i18next Parser [![Build Status](https://travis-ci.org/i18next/i18next-parser.svg?branch=master)](https://travis-ci.org/i18next/i18next-parser) | ||
When translating an application, maintaining the catalog by hand is painful. This package automate the process. Don't let the name fool you, it was originally built with i18next in mind but it works well with other i18n libraries. | ||
When translating an application, maintaining the translation catalog by hand is painful. This package automates this process. | ||
@@ -13,11 +13,10 @@ | ||
- Choose your weapon: A CLI, a standalone parser or a stream transform | ||
- Three built in lexers: Javascript, HTML and Handlebars | ||
- 4 built in lexers: Javascript, JSX, HTML and Handlebars | ||
- Creates one catalog file per locale and per namespace | ||
- Backs up the old keys your code doesn't use anymore in `namespace_old.json` catalog. | ||
- Restores keys from the `_old` file if the one in the translation file is empty. | ||
- Backs up the old keys your code doesn't use anymore in `namespace_old.json` catalog | ||
- Restores keys from the `_old` file if the one in the translation file is empty | ||
- Supports i18next features: | ||
- **Context**: keys of the form `key_context` | ||
- **Plural**: keys of the form `key_plural` and `key_plural_0` | ||
- Behind the hood, it's a stream transform (so it works with gulp) | ||
- Supports es6 template strings (in addition to single/double quoted strings) with ${expression} placeholders | ||
- Tested on Node 6+ | ||
@@ -156,2 +155,3 @@ ## DISCLAMER: `1.0.0-beta` | ||
**functions** | Array of functions to match | `['t']` | ||
**acorn** | Options to pass to acorn | `{}` | ||
@@ -158,0 +158,0 @@ **`JsxLexer` options** |
@@ -0,1 +1,3 @@ | ||
import * as acorn from 'acorn-jsx' | ||
import * as walk from 'acorn/dist/walk' | ||
import BaseLexer from './base-lexer' | ||
@@ -7,16 +9,18 @@ | ||
this.acornOptions = { sourceType: 'module', ...options.acorn } | ||
this.functions = options.functions || ['t'] | ||
this.createFunctionRegex() | ||
this.createArgumentsRegex() | ||
this.createHashRegex() | ||
this.attr = options.attr || 'i18nKey' | ||
} | ||
extract(content) { | ||
let matches | ||
const that = this | ||
while (matches = this.functionRegex.exec(content)) { | ||
const args = this.parseArguments(matches[1] || matches[2]) | ||
this.populateKeysFromArguments(args) | ||
} | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression(node) { | ||
that.expressionExtractor.call(that, node) | ||
} | ||
} | ||
) | ||
@@ -26,132 +30,75 @@ return this.keys | ||
parseArguments(args) { | ||
let matches | ||
const result = { | ||
arguments: [], | ||
options: {} | ||
} | ||
while (matches = this.argumentsRegex.exec(args)) { | ||
let arg = matches[1] | ||
expressionExtractor(node) { | ||
const entry = {} | ||
const isTranslationFunction = ( | ||
node.callee && ( | ||
this.functions.includes(node.callee.name) || | ||
node.callee.property && this.functions.includes(node.callee.property.name) | ||
) | ||
) | ||
if (isTranslationFunction) { | ||
const keyArgument = node.arguments.shift() | ||
if (arg.startsWith('{')) { | ||
let optionMatches | ||
while (optionMatches = this.hashRegex.exec(args)) { | ||
const key = optionMatches[2] | ||
let value = optionMatches[3] | ||
if (this.validateString(value)) { | ||
result.options[key] = value.slice(1, -1) | ||
} | ||
if (keyArgument && keyArgument.type === 'Literal') { | ||
entry.key = keyArgument.value | ||
} | ||
else if (keyArgument && keyArgument.type === 'BinaryExpression') { | ||
const concatenatedString = this.concatenateString(keyArgument) | ||
if (!concatenatedString) { | ||
this.emit('warning', `Key is not a string litteral: ${keyArgument.name}`) | ||
return | ||
} | ||
entry.key = concatenatedString | ||
} | ||
else { | ||
arg = this.concatenateString(arg) | ||
if (keyArgument.type === 'Identifier') { | ||
this.emit('warning', `Key is not a string litteral: ${keyArgument.name}`) | ||
} | ||
return | ||
} | ||
result.arguments.push(arg) | ||
} | ||
return result | ||
} | ||
concatenateString(string) { | ||
string = string.trim() | ||
let matches | ||
let containsVariable = false | ||
const parts = [] | ||
const quotationMark = string.charAt(0) === '"' ? '"' : "'" | ||
const regex = new RegExp(JavascriptLexer.concatenatedSegmentPattern, 'gi') | ||
while(matches = regex.exec(string)) { | ||
const match = matches[0].trim() | ||
if (match !== '+') { | ||
parts.push(match) | ||
} | ||
} | ||
const optionsArgument = node.arguments.shift() | ||
const result = parts.reduce((concatenatedString, x) => { | ||
x = x && x.trim() | ||
if (this.validateString(x)) { | ||
concatenatedString += x.slice(1, -1) | ||
if (optionsArgument && optionsArgument.type === 'Literal') { | ||
entry.defaultValue = optionsArgument.value | ||
} | ||
else { | ||
containsVariable = true | ||
else if (optionsArgument && optionsArgument.type === 'ObjectExpression') { | ||
optionsArgument.properties.forEach(p => { | ||
entry[p.key.name || p.key.value] = p.value.value | ||
}) | ||
} | ||
return concatenatedString | ||
}, '') | ||
if (!result || containsVariable) { | ||
return string | ||
this.keys.push(entry) | ||
} | ||
else { | ||
return quotationMark + result + quotationMark | ||
} | ||
} | ||
static get concatenatedSegmentPattern() { | ||
return [ | ||
BaseLexer.singleQuotePattern, | ||
BaseLexer.doubleQuotePattern, | ||
BaseLexer.backQuotePattern, | ||
BaseLexer.variablePattern, | ||
'(?:\\s*\\+\\s*)' // support for concatenation via + | ||
].join('|') | ||
} | ||
concatenateString(binaryExpression, string = '') { | ||
if (binaryExpression.operator !== '+') { | ||
return | ||
} | ||
static get concatenatedArgumentPattern() { | ||
return '(' + '(?:' + JavascriptLexer.concatenatedSegmentPattern + ')+' + ')' | ||
} | ||
if (binaryExpression.left.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.left, string) | ||
} | ||
else if (binaryExpression.left.type === 'Literal') { | ||
string += binaryExpression.left.value | ||
} | ||
else { | ||
return | ||
} | ||
static get hashPattern() { | ||
return '(\\{.*\\})' | ||
} | ||
if (binaryExpression.right.type === 'BinaryExpression') { | ||
string += this.concatenateString(binaryExpression.right, string) | ||
} | ||
else if (binaryExpression.right.type === 'Literal') { | ||
string += binaryExpression.right.value | ||
} | ||
else { | ||
return | ||
} | ||
static get stringOrVariableOrHashPattern() { | ||
return ( | ||
'(' + | ||
'(' + | ||
'(?:' + | ||
[ | ||
JavascriptLexer.concatenatedArgumentPattern, | ||
JavascriptLexer.hashPattern | ||
].join('|') + | ||
')' + | ||
'(?:\\s*,\\s*)?' + | ||
')+' + | ||
')' | ||
) | ||
return string | ||
} | ||
createFunctionRegex() { | ||
const pattern = ( | ||
'(?:\\W|^)' + | ||
this.functionPattern() + '\\s*\\(\\s*' + | ||
JavascriptLexer.stringOrVariableOrHashPattern + | ||
'\\s*\\)' | ||
) | ||
this.functionRegex = new RegExp(pattern, 'gi') | ||
return this.functionRegex | ||
} | ||
createArgumentsRegex() { | ||
const pattern = ( | ||
'(' + | ||
[ | ||
JavascriptLexer.concatenatedArgumentPattern, | ||
JavascriptLexer.hashPattern | ||
].join('|') + | ||
')' + | ||
'(?:\\s*,\\s*)?' | ||
) | ||
this.argumentsRegex = new RegExp(pattern, 'gi') | ||
return this.argumentsRegex | ||
} | ||
createHashRegex() { | ||
const pattern = ( | ||
'(?:(\'|")?(' + | ||
['context', 'defaultValue'].join('|') + | ||
')\\1)' + | ||
'(?:\\s*:\\s*)' + | ||
'(' + BaseLexer.stringPattern + ')' | ||
) | ||
this.hashRegex = new RegExp(pattern, 'gi') | ||
return this.hashRegex | ||
} | ||
} |
import * as acorn from 'acorn-jsx' | ||
import assert from 'assert' | ||
import HTMLLexer from './html-lexer' | ||
import BaseLexer from './base-lexer' | ||
import { ParsingError } from '../helpers' | ||
import * as walk from 'acorn/dist/walk' | ||
import JavascriptLexer from './javascript-lexer' | ||
export default class JsxLexer extends HTMLLexer { | ||
const JSXParserExtension = Object.assign({}, walk.base, { | ||
JSXText(node, st, c) { | ||
// We need this catch, but we don't need the catch to do anything. | ||
}, | ||
JSXElement(node, st, c) { | ||
node.openingElement.attributes.forEach(attr => c(attr, st, attr.type)) | ||
node.children.forEach(child => c(child, st, child.type)) | ||
}, | ||
JSXExpressionContainer(node, st, c) { | ||
c(node.expression, st, node.expression.type) | ||
}, | ||
JSXAttribute(node, st, c) { | ||
if (node.value !== null) { | ||
c(node.value, st, node.value.type) | ||
} | ||
}, | ||
JSXSpreadAttribute(node, st, c) { | ||
c(node.argument, st, node.argument.type) | ||
} | ||
}) | ||
export default class JsxLexer extends JavascriptLexer { | ||
constructor(options = {}) { | ||
options.attr = options.attr || 'i18nKey' | ||
super(options) | ||
this.acornOptions = { sourceType: 'module', plugins: { jsx: true }, ...options.acorn } | ||
} | ||
extract(content) { | ||
this.extractInterpolate(content) | ||
this.extractTrans(content) | ||
return this.keys | ||
} | ||
const that = this | ||
extractInterpolate(content) { | ||
let matches | ||
const regex = new RegExp( | ||
'<Interpolate([^>]*\\s' + this.attr + '[^>]*)\\/?>', | ||
'gi' | ||
) | ||
walk.simple( | ||
acorn.parse(content, this.acornOptions), | ||
{ | ||
CallExpression(node) { | ||
that.expressionExtractor.call(that, node) | ||
}, | ||
JSXElement(node) { | ||
const element = node.openingElement | ||
if (element.name.name === "Trans") { | ||
const entry = {} | ||
const defaultValue = that.nodeToString.call(that, node, content) | ||
while (matches = regex.exec(content)) { | ||
const attrs = this.parseAttributes(matches[1]) | ||
const key = attrs.keys | ||
if (key) { | ||
this.keys.push({ ...attrs.options, key }) | ||
} | ||
} | ||
element.attributes.forEach(attr => { | ||
if (attr.name.name === that.attr) { | ||
entry.key = attr.value.value | ||
} | ||
}) | ||
return this.keys | ||
} | ||
if (defaultValue !== '') { | ||
entry.defaultValue = defaultValue | ||
extractTrans(content) { | ||
let matches | ||
const closingTagPattern = '(?:<Trans([^>]*\\s' + this.attr + '[^>]*?)\\/>)' | ||
const selfClosingTagPattern = '(?:<Trans([^>]*\\s' + this.attr + '[^>]*?)>((?:\\s|.)*?)<\\/Trans>)' | ||
const regex = new RegExp( | ||
[closingTagPattern, selfClosingTagPattern].join('|'), | ||
'gi' | ||
) | ||
if (!entry.key) | ||
entry.key = entry.defaultValue | ||
} | ||
while (matches = regex.exec(content)) { | ||
const attrs = this.parseAttributes(matches[1] || matches[2]) | ||
const key = attrs.keys | ||
if (entry.key) | ||
that.keys.push(entry) | ||
} | ||
if (matches[3] && !attrs.options.defaultValue) { | ||
attrs.options.defaultValue = this.eraseTags(matches[0]).replace(/\s+/g, ' ') | ||
} | ||
else if (element.name.name === "Interpolate") { | ||
const entry = {} | ||
if (key) { | ||
this.keys.push({ ...attrs.options, key }) | ||
} | ||
} | ||
element.attributes.forEach(attr => { | ||
if (attr.name.name === that.attr) { | ||
entry.key = attr.value.value | ||
} | ||
}) | ||
if (entry.key) | ||
that.keys.push(entry) | ||
} | ||
} | ||
}, | ||
JSXParserExtension | ||
) | ||
return this.keys | ||
} | ||
/** | ||
* Recursively convert html tags and js injections to tags with the child index in it | ||
* @param {string} string | ||
* | ||
* @returns string | ||
*/ | ||
eraseTags(string) { | ||
const acornAst = acorn.parse(string, {plugins: {jsx: true}}) | ||
const acornTransAst = acornAst.body[0].expression | ||
const children = this.parseAcornPayload(acornTransAst.children, string) | ||
nodeToString(ast, string) { | ||
const children = this.parseAcornPayload(ast.children, string) | ||
@@ -85,7 +100,2 @@ const elemsToString = children => children.map((child, index) => { | ||
/** | ||
* Simplify the bulky AST given by Acorn | ||
* @param {*} children An array of elements contained inside an html tag | ||
* @param {string} originalString The original string being parsed | ||
*/ | ||
parseAcornPayload(children, originalString) { | ||
@@ -92,0 +102,0 @@ return children.map(child => { |
@@ -16,3 +16,3 @@ import EventEmitter from 'events' | ||
js: ['JavascriptLexer'], | ||
jsx: ['JavascriptLexer', 'JsxLexer'], | ||
jsx: ['JsxLexer'], | ||
mjs: ['JavascriptLexer'], | ||
@@ -56,3 +56,2 @@ | ||
lexerName = lexerConfig.lexer | ||
delete lexerConfig.lexer | ||
lexerOptions = lexerConfig | ||
@@ -59,0 +58,0 @@ } |
@@ -14,3 +14,3 @@ import { assert } from 'chai' | ||
const Lexer = new JavascriptLexer() | ||
const content = 'i18n.t("first" "bla")' | ||
const content = 'i18n.t("first", "bla")' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
@@ -31,2 +31,11 @@ { key: 'first', defaultValue: 'bla' } | ||
it('extracts the defaultValue/context on multiple lines', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const content = 'i18n.t("first", {\ndefaultValue: "foo",\n context: \'bar\'})' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: 'foo', context: 'bar' } | ||
]) | ||
done() | ||
}) | ||
it('extracts the defaultValue/context options with quotation marks', (done) => { | ||
@@ -66,4 +75,4 @@ const Lexer = new JavascriptLexer() | ||
const Lexer = new JavascriptLexer() | ||
const js = "import './yolo.js' t('first')" | ||
assert.deepEqual(Lexer.extract(js), [{ key: 'first' }]) | ||
const js = "ttt('first')" | ||
assert.deepEqual(Lexer.extract(js), []) | ||
done() | ||
@@ -81,58 +90,2 @@ }) | ||
}) | ||
describe('concatenateString()', () => { | ||
it('concatenates strings', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
assert.equal(Lexer.concatenateString('"foo" + \'bar\''), '"foobar"') | ||
done() | ||
}) | ||
it('returns the original string if it contains variables', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
assert.equal(Lexer.concatenateString('"foo" + bar'), '"foo" + bar') | ||
done() | ||
}) | ||
it('returns the original string if it contains backquote string', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
assert.equal(Lexer.concatenateString('"foo" + `bar`'), '"foo" + `bar`') | ||
done() | ||
}) | ||
}) | ||
describe('parseArguments()', () => { | ||
it('matches string arguments', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const args = '"first", "bla"' | ||
assert.deepEqual(Lexer.parseArguments(args), { | ||
arguments: ['"first"', '"bla"'], | ||
options: {} | ||
}) | ||
done() | ||
}) | ||
it('matches variable arguments', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const args = 'first bla' | ||
assert.deepEqual(Lexer.parseArguments(args), { | ||
arguments: ['first', 'bla'], | ||
options: {} | ||
}) | ||
done() | ||
}) | ||
it('matches concatenated arguments and concatenate when possible', (done) => { | ||
const Lexer = new JavascriptLexer() | ||
const args = "'first' + asd, 'bla' + 'asd', foo+bar+baz" | ||
assert.deepEqual(Lexer.parseArguments(args), { | ||
arguments: [ | ||
"'first' + asd", | ||
"'blaasd'", // string got concatenated! | ||
'foo+bar+baz' | ||
], | ||
options: {} | ||
}) | ||
done() | ||
}) | ||
}) | ||
}) |
@@ -5,7 +5,7 @@ import { assert } from 'chai' | ||
describe('JsxLexer', () => { | ||
describe('extractInterpolate', () => { | ||
describe('<Interpolate>', () => { | ||
it('extracts keys from i18nKey attributes', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Interpolate i18nKey="first" />' | ||
assert.deepEqual(Lexer.extractInterpolate(content), [ | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first' } | ||
@@ -17,7 +17,7 @@ ]) | ||
describe('Trans', () => { | ||
describe('<Trans>', () => { | ||
it('extracts keys from i18nKey attributes from closing tags', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans i18nKey="first" count={count}>Yo</Trans>' | ||
assert.deepEqual(Lexer.extractTrans(content), [ | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: 'Yo' } | ||
@@ -28,6 +28,15 @@ ]) | ||
it('extracts keys from user-defined key attributes from closing tags', (done) => { | ||
const Lexer = new JsxLexer({ attr: "myIntlKey" }) | ||
const content = '<Trans myIntlKey="first" count={count}>Yo</Trans>' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first', defaultValue: 'Yo' } | ||
]) | ||
done() | ||
}) | ||
it('extracts keys from i18nKey attributes from self-closing tags', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans i18nKey="first" count={count} />' | ||
assert.deepEqual(Lexer.extractTrans(content), [ | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first' } | ||
@@ -37,9 +46,37 @@ ]) | ||
}) | ||
}) | ||
describe('eraseTags()', () => { | ||
it('extracts keys from user-defined key attributes from self-closing tags', (done) => { | ||
const Lexer = new JsxLexer({ attr: "myIntlKey" }) | ||
const content = '<Trans myIntlKey="first" count={count} />' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'first' } | ||
]) | ||
done() | ||
}) | ||
it('extracts keys from Trans elements without an i18nKey', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans count={count}>Yo</Trans>' | ||
assert.deepEqual(Lexer.extract(content), [ | ||
{ key: 'Yo', defaultValue: 'Yo' } | ||
]) | ||
done() | ||
}) | ||
it('doesn\'t add a blank key for self-closing or empty tags', (done) => { | ||
const Lexer = new JsxLexer() | ||
const emptyTag = '<Trans count={count}></Trans>' | ||
assert.deepEqual(Lexer.extract(emptyTag), []) | ||
const selfClosing = '<Trans count={count}/>' | ||
assert.deepEqual(Lexer.extract(selfClosing), []) | ||
done() | ||
}) | ||
it('erases tags from content', (done) => { | ||
const Lexer = new JsxLexer() | ||
const content = '<Trans>a<b test={"</b>"}>c<c>z</c></b>{d}<br stuff={y}/></Trans>' | ||
assert.equal(Lexer.eraseTags(content), 'a<1>c<1>z</1></1><2>{d}</2><3></3>') | ||
assert.equal(Lexer.extract(content)[0].defaultValue, 'a<1>c<1>z</1></1><2>{d}</2><3></3>') | ||
done() | ||
@@ -46,0 +83,0 @@ }) |
@@ -15,3 +15,3 @@ import { assert } from 'chai' | ||
contents: Buffer.from( | ||
"asd t('first') t('second') \n asd t('third') ad t('fourth')" | ||
"t('first'); t('second') \n t('third'); t('fourth');" | ||
), | ||
@@ -37,3 +37,3 @@ path: 'file.js' | ||
contents: Buffer.from( | ||
"asd t(\n 'first'\n) t('second') \n asd t(\n\n'third')" | ||
"t(\n 'first'\n)\n t('second'); t(\n\n'third')" | ||
), | ||
@@ -60,3 +60,3 @@ path: 'file.js' | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first', {context: 'female'})"), | ||
contents: Buffer.from("t('first', {context: 'female'})"), | ||
path: 'file.js' | ||
@@ -159,3 +159,3 @@ }) | ||
second: 'defaultValue', | ||
third: 'defaultValue', | ||
third: '{{var}} defaultValue', | ||
fourth: '' | ||
@@ -222,3 +222,3 @@ } | ||
contents: Buffer.from( | ||
"asd t('ns1:first') t('second') \n asd t('ns2:third') ad t('fourth')" | ||
"t('ns1:first'); t('second') \n t('ns2:third'); t('fourth')" | ||
), | ||
@@ -268,3 +268,3 @@ path: 'file.js' | ||
contents: Buffer.from( | ||
'asd t(\'escaped \\\'single quotes\\\'\') t("escaped \\"double quotes\\"")' | ||
't(\'escaped \\\'single quotes\\\'\'); t("escaped \\"double quotes\\"")' | ||
), | ||
@@ -294,3 +294,3 @@ path: 'file.js' | ||
contents: Buffer.from( | ||
"asd t('escaped backslash\\\\ newline\\n\\r tab\\t')" | ||
"t('escaped backslash\\\\ newline\\n\\r tab\\t')" | ||
), | ||
@@ -318,3 +318,3 @@ path: 'file.js' | ||
contents: Buffer.from( | ||
"asd t('first') t('second') \n asd t('third') ad t('fourth')" | ||
"t('first')" | ||
), | ||
@@ -336,3 +336,3 @@ path: 'file.js' | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_merge:first') t('test_merge:second')"), | ||
contents: Buffer.from("t('test_merge:first'); t('test_merge:second')"), | ||
path: 'file.js' | ||
@@ -359,3 +359,3 @@ }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_leak:first') t('test_leak:second')"), | ||
contents: Buffer.from("t('test_leak:first'); t('test_leak:second')"), | ||
path: 'file.js' | ||
@@ -385,3 +385,3 @@ }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_context:first')"), | ||
contents: Buffer.from("t('test_context:first')"), | ||
path: 'file.js' | ||
@@ -414,3 +414,3 @@ }) | ||
contents: Buffer.from( | ||
"asd t('test_plural:first') t('test_plural:second')" | ||
"t('test_plural:first'); t('test_plural:second')" | ||
), | ||
@@ -445,3 +445,3 @@ path: 'file.js' | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('test_context_plural:first')"), | ||
contents: Buffer.from("t('test_context_plural:first')"), | ||
path: 'file.js' | ||
@@ -479,3 +479,3 @@ }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('fourth')"), | ||
contents: Buffer.from("t('fourth')"), | ||
path: 'file.js' | ||
@@ -511,3 +511,3 @@ }) | ||
contents: Buffer.from( | ||
"asd t('test_separators?first') t('test_separators?second-third')" | ||
"t('test_separators?first'); t('test_separators?second-third')" | ||
), | ||
@@ -537,3 +537,3 @@ path: 'file.js' | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('Status: loading...')"), | ||
contents: Buffer.from("t('Status: loading...')"), | ||
path: 'file.js' | ||
@@ -561,3 +561,3 @@ }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first')"), | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
@@ -624,3 +624,3 @@ }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first')"), | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
@@ -648,3 +648,3 @@ }) | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd t('first')"), | ||
contents: Buffer.from("t('first')"), | ||
path: 'file.js' | ||
@@ -674,3 +674,3 @@ }) | ||
contents: Buffer.from( | ||
"asd t('ns1:first') t('second') \n asd ad t('fourth')" | ||
"t('ns1:first'); t('second') \n t('fourth')" | ||
), | ||
@@ -716,3 +716,3 @@ path: 'file.js' | ||
const fakeFile = new Vinyl({ | ||
contents: Buffer.from("asd bla('first') _e('second')"), | ||
contents: Buffer.from("bla('first'); _e('second')"), | ||
path: 'file.js' | ||
@@ -741,3 +741,3 @@ }) | ||
contents: Buffer.from( | ||
"asd t('ccc') t('aaa') t('bbb.bbb') t('bbb.aaa')" | ||
"t('ccc'); t('aaa'); t('bbb.bbb'); t('bbb.aaa')" | ||
), | ||
@@ -768,3 +768,3 @@ path: 'file.js' | ||
contents: Buffer.from( | ||
"asd t('ccc') t('aaa') t('bbb.bbb') t('bbb.aaa')" | ||
"t('ccc'); t('aaa'); t('bbb.bbb'); t('bbb.aaa')" | ||
), | ||
@@ -771,0 +771,0 @@ path: 'file.js' |
@@ -0,9 +1,17 @@ | ||
import bla from 'bla'; | ||
notRelated() | ||
i18n.t('first') | ||
i18n.t('second', 'defaultValue') | ||
i18n.t('third', {defaultValue: 'defaultValue'}) | ||
i18n.t('third', { | ||
defaultValue: '{{var}} defaultValue' | ||
}) | ||
i18n.t( | ||
'fou' + | ||
'rth' | ||
'rt' + | ||
'h' | ||
) | ||
i18n.t('not picked' + variable, {foo: bar}, 'bla' + 'asd', {}, foo+bar+baz ) | ||
if (true) { | ||
i18n.t('not picked' + variable, {foo: bar}, 'bla' + 'asd', {}, foo+bar+baz ) | ||
} | ||
i18n.t(variable, {foo: bar}, 'bla' + 'asd', {}, foo+bar+baz ) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
263635
12
3226
+ Addedacorn@^5.5.3