i18next-parser
Advanced tools
Comparing version 1.0.0-beta1 to 1.0.0-beta2
# Changelog | ||
## 1.0.0-beta2 - latest | ||
- See [release](https://github.com/i18next/i18next-parser/releases/tag/1.0.0-beta2) | ||
## 1.0.0-beta1 | ||
- See [release](https://github.com/i18next/i18next-parser/releases/tag/1.0.0-beta1) | ||
## 0.13.0 | ||
## 0.13.0 - latest | ||
- Support `defaultValue` option along the translation key (#68) | ||
@@ -10,0 +14,0 @@ |
@@ -38,3 +38,3 @@ # Contribute | ||
``` | ||
mocha --require babel-register --require babel-polyfill test/**/*.js | ||
yarn test | ||
``` | ||
@@ -41,0 +41,0 @@ |
@@ -5,3 +5,3 @@ { | ||
"name": "i18next-parser", | ||
"version": "1.0.0-beta1", | ||
"version": "1.0.0-beta2", | ||
"license": "MIT", | ||
@@ -13,3 +13,3 @@ "main": "src/index.js", | ||
"scripts": { | ||
"test": "mocha --require babel-register --require babel-polyfill test/**/*.test.js", | ||
"test": "mocha --require babel-register --require babel-polyfill 'test/**/*.test.js'", | ||
"watch": "babel src -d dist -w" | ||
@@ -16,0 +16,0 @@ }, |
@@ -22,5 +22,5 @@ # i18next Parser [![Build Status](https://travis-ci.org/i18next/i18next-parser.svg?branch=master)](https://travis-ci.org/i18next/i18next-parser) | ||
## `1.x` | ||
## DISCLAMER: `1.0.0-beta` | ||
`1.x` is currently in beta. It is a deep rewrite of this package that solves many issues, the main one being that it was slowly becoming unmaintainable. The [migration](docs/migration.md) contains all the breaking changes. If you rely on a `0.x.x` version, you can still find the old documentation on its dedicated [branch](https://github.com/i18next/i18next-parser/tree/0.x.x). | ||
`1.x` is currently in beta. You can follow the pre-releases [here](https://github.com/i18next/i18next-parser/releases). It is a deep rewrite of this package that solves many issues, the main one being that it was slowly becoming unmaintainable. The [migration](docs/migration.md) contains all the breaking changes. Everything that follows is related to `1.x`. If you rely on a `0.x.x` version, you can still find the old documentation on its dedicated [branch](https://github.com/i18next/i18next-parser/tree/0.x.x). | ||
@@ -35,4 +35,4 @@ | ||
``` | ||
yarn global add i18next-parser | ||
npm install -g i18next-parser | ||
yarn global add i18next-parser@next | ||
npm install -g i18next-parser@next | ||
i18next 'app/**/*.{js,hbs}' 'lib/**/*.{js,hbs}' [-oc] | ||
@@ -52,3 +52,3 @@ ``` | ||
``` | ||
yarn add -D i18next-parser | ||
yarn add -D i18next-parser@next | ||
npm install --save-dev i18next-parser | ||
@@ -93,2 +93,3 @@ ``` | ||
**output** | Where to write the locale files relative to the base | `locales` | ||
**reactNamespace** | For react file, extract the [defaultNamespace](https://react.i18next.com/components/translate-hoc.html) | `false` (`true` for `.jsx` files) | ||
**sort** | Whether or not to sort the catalog | `false` | ||
@@ -114,2 +115,3 @@ | ||
js: ['JavascriptLexer'], | ||
jsx: ['JavascriptLexer', 'JsxLexer'], | ||
mjs: ['JavascriptLexer'], | ||
@@ -157,4 +159,10 @@ | ||
**`JsxLexer` options** | ||
Option | Description | Default | ||
------------- | ---------------------- | ------- | ||
**attr** | Attribute for the keys | `i18nKey` | ||
## Events | ||
@@ -161,0 +169,0 @@ |
@@ -10,2 +10,3 @@ import { dotPathToHash, mergeHashes, populateHash } from './helpers' | ||
import YAML from 'yamljs' | ||
import BaseLexer from './lexers/base-lexer'; | ||
@@ -32,2 +33,3 @@ export default class i18nTransform extends Transform { | ||
output: 'locales', | ||
reactNamespace: false, | ||
sort: false | ||
@@ -39,3 +41,3 @@ } | ||
this.parser = new Parser(this.options.lexers) | ||
this.parser = new Parser(this.options) | ||
this.parser.on('error', error => this.emit('error', error)) | ||
@@ -51,3 +53,3 @@ this.parser.on('warning', warning => this.emit('warning', warning)) | ||
if (file.isBuffer()) { | ||
content = file.contents | ||
content = file.contents.toString('utf8') | ||
} | ||
@@ -60,4 +62,4 @@ else { | ||
const extenstion = path.extname(file.path).substring(1) | ||
const entries = this.parser.parse(content, extenstion) | ||
const extension = path.extname(file.path).substring(1) | ||
const entries = this.parser.parse(content, extension) | ||
@@ -71,5 +73,6 @@ entries.forEach(entry => { | ||
} | ||
else { | ||
entry.namespace = this.options.defaultNamespace | ||
else if (extension === 'jsx' || this.options.reactNamespace) { | ||
entry.namespace = this.grabReactNamespace(content) | ||
} | ||
entry.namespace = entry.namespace || this.options.defaultNamespace | ||
@@ -213,2 +216,12 @@ key = parts.join(this.options.namespaceSeparator) | ||
} | ||
grabReactNamespace(content) { | ||
const reactTranslateRegex = new RegExp( | ||
'translate\\((?:\\s*\\[?\\s*)(' + BaseLexer.stringPattern + ')' | ||
) | ||
const translateMatches = content.match(reactTranslateRegex) | ||
if (translateMatches) { | ||
return translateMatches[1].slice(1, -1) | ||
} | ||
} | ||
} |
@@ -18,3 +18,3 @@ import BaseLexer from './base-lexer' | ||
const regex = new RegExp( | ||
'<([A-Z][A-Z0-9]*)([^>]*\\s' + this.attr + '[^>]*)>(?:(.*?)<\\/\\1>)?', | ||
'<([A-Z][A-Z0-9]*)([^>]*\\s' + this.attr + '[^>]*)>(?:((?:\\s|.)*?)<\\/\\1>)?', | ||
'gi' | ||
@@ -21,0 +21,0 @@ ) |
@@ -5,2 +5,3 @@ import EventEmitter from 'events' | ||
import JavascriptLexer from './lexers/javascript-lexer' | ||
import JsxLexer from './lexers/jsx-lexer' | ||
import path from 'path' | ||
@@ -16,2 +17,3 @@ | ||
js: ['JavascriptLexer'], | ||
jsx: ['JavascriptLexer', 'JsxLexer'], | ||
mjs: ['JavascriptLexer'], | ||
@@ -25,3 +27,4 @@ | ||
HTMLLexer, | ||
JavascriptLexer | ||
JavascriptLexer, | ||
JsxLexer | ||
} | ||
@@ -32,3 +35,4 @@ | ||
super(options) | ||
this.lexers = { ...lexers, ...options } | ||
this.options = options | ||
this.lexers = { ...lexers, ...options.lexers } | ||
} | ||
@@ -35,0 +39,0 @@ |
@@ -7,2 +7,4 @@ import { assert } from 'chai' | ||
const enLibraryPath = path.normalize('en/translation.json') | ||
describe('parser', () => { | ||
@@ -19,3 +21,3 @@ it('parses globally on multiple lines', done => { | ||
i18nextParser.once('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -42,3 +44,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -64,3 +66,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -99,3 +101,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -135,3 +137,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -165,3 +167,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -178,2 +180,32 @@ } | ||
it('parses react files', done => { | ||
let result | ||
const i18nextParser = new i18nTransform() | ||
const fakeFile = new Vinyl({ | ||
contents: fs.readFileSync( | ||
path.resolve(__dirname, 'templating/react.jsx') | ||
), | ||
path: 'react.jsx' | ||
}) | ||
const expected = { | ||
first: '', | ||
second: '', | ||
third: 'Hello <strong title={t(\'fourth\')}>{{name}}</strong>, you have {{count}} unread message. <Link to="/msgs">Go to messages</Link>.', | ||
fourth: '' | ||
} | ||
i18nextParser.on('data', file => { | ||
// support for a default Namespace | ||
if (file.relative.endsWith(path.normalize('en/react.json'))) { | ||
result = JSON.parse(file.contents) | ||
} | ||
}) | ||
i18nextParser.on('end', () => { | ||
assert.deepEqual(result, expected) | ||
done() | ||
}) | ||
i18nextParser.end(fakeFile) | ||
}) | ||
it('creates two files per namespace and per locale', done => { | ||
@@ -193,3 +225,3 @@ let results = [] | ||
i18nextParser.on('data', file => { | ||
results.push(file.relative.replace('locales/', '')) | ||
results.push(file.relative.replace(/locales[\//\\]/, '')) | ||
}) | ||
@@ -220,3 +252,3 @@ i18nextParser.on('end', () => { | ||
expectedFiles.forEach(filename => { | ||
assert.include(results, filename) | ||
assert.include(results, path.normalize(filename)) | ||
if (!--length) done() | ||
@@ -240,3 +272,3 @@ }) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -266,3 +298,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -306,3 +338,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/test_merge.json')) { | ||
if (file.relative.endsWith(path.normalize('en/test_merge.json'))) { | ||
result = JSON.parse(file.contents) | ||
@@ -329,6 +361,6 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/test_leak.json')) { | ||
if (file.relative.endsWith(path.normalize('en/test_leak.json'))) { | ||
resultEN = JSON.parse(file.contents) | ||
} | ||
if (file.relative.endsWith('fr/test_leak.json')) { | ||
if (file.relative.endsWith(path.normalize('fr/test_leak.json'))) { | ||
resultFR = JSON.parse(file.contents) | ||
@@ -361,3 +393,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/test_context.json')) { | ||
if (file.relative.endsWith(path.normalize('en/test_context.json'))) { | ||
result = JSON.parse(file.contents) | ||
@@ -393,3 +425,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/test_plural.json')) { | ||
if (file.relative.endsWith(path.normalize('en/test_plural.json'))) { | ||
result = JSON.parse(file.contents) | ||
@@ -421,3 +453,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/test_context_plural.json')) { | ||
if (file.relative.endsWith(path.normalize('en/test_context_plural.json'))) { | ||
result = JSON.parse(file.contents) | ||
@@ -449,3 +481,3 @@ } | ||
i18nextParser.on('data', file => { | ||
results.push(file.relative.replace('locales/', '')) | ||
results.push(file.relative.replace(/locales[\\\/]/, '')) | ||
}) | ||
@@ -460,3 +492,3 @@ i18nextParser.on('end', () => { | ||
expectedFiles.forEach(filename => { | ||
assert.include(results, filename) | ||
assert.include(results, path.normalize(filename)) | ||
if (!--length) done() | ||
@@ -483,3 +515,3 @@ }) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/test_separators.json')) { | ||
if (file.relative.endsWith(path.normalize('en/test_separators.json'))) { | ||
result = JSON.parse(file.contents) | ||
@@ -507,3 +539,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -531,3 +563,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.yml')) { | ||
if (file.relative.endsWith(path.normalize('en/translation.yml'))) { | ||
result = file.contents.toString('utf8') | ||
@@ -537,3 +569,3 @@ } | ||
i18nextParser.once('end', () => { | ||
assert.equal(result, 'first: ""\n') | ||
assert.equal(result.replace(/\r\n/g, '\n'), 'first: ""\n') | ||
done() | ||
@@ -556,3 +588,3 @@ }) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = file.contents.toString('utf8') | ||
@@ -562,3 +594,3 @@ } | ||
i18nextParser.once('end', () => { | ||
assert.deepEqual(result.split('\n')[1], ' "first": ""') | ||
assert.deepEqual(result.replace(/\r\n/g, '\n').split('\n')[1], ' "first": ""') | ||
done() | ||
@@ -585,3 +617,3 @@ }) | ||
i18nextParser.on('data', file => { | ||
results.push(file.relative.replace('locales/', '')) | ||
results.push(file.relative.replace(/locales[\\\/]/, '')) | ||
}) | ||
@@ -603,3 +635,3 @@ i18nextParser.on('end', () => { | ||
expectedFiles.forEach(filename => { | ||
assert.include(results, filename) | ||
assert.include(results, path.normalize(filename)) | ||
if (!--length) done() | ||
@@ -631,3 +663,3 @@ }) | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -657,3 +689,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -684,3 +716,3 @@ } | ||
i18nextParser.on('data', file => { | ||
if (file.relative.endsWith('en/translation.json')) { | ||
if (file.relative.endsWith(enLibraryPath)) { | ||
result = JSON.parse(file.contents) | ||
@@ -727,3 +759,3 @@ } | ||
i18nextParser.on('error', error => { | ||
assert.equal(error.message, 'Unexpected token / in JSON at position 0') | ||
assert.equal(error.message.startsWith('Unexpected token /'), true) | ||
done() | ||
@@ -730,0 +762,0 @@ }) |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
201158
49
2884
183
0