Comparing version 0.5.0 to 0.5.1
{ | ||
"name": "lexing", | ||
"version": "0.5.0", | ||
"version": "0.5.1", | ||
"description": "Regex-based lexer", | ||
@@ -17,2 +17,3 @@ "keywords": [ | ||
"devDependencies": { | ||
"babel-core": "^5.0.0", | ||
"mocha": "*", | ||
@@ -19,0 +20,0 @@ "typescript": "next" |
@@ -134,2 +134,2 @@ # lexing | ||
Copyright 2015 Christopher Brown. [MIT Licensed](http://opensource.org/licenses/MIT). | ||
Copyright 2015 Christopher Brown. [MIT Licensed](http://chbrown.github.io/licenses/MIT/#2015). |
@@ -1,115 +0,131 @@ | ||
/// <reference path="../type_declarations/DefinitelyTyped/node/node.d.ts" /> | ||
/// <reference path="../type_declarations/DefinitelyTyped/mocha/mocha.d.ts" /> | ||
var assert = require('assert'); | ||
var lexing = require('../index'); | ||
import assert from 'assert'; | ||
import {describe, it} from 'mocha'; | ||
import * as lexing from '../'; | ||
var Token = lexing.Token; | ||
function readToEOF(iterable) { | ||
var items = []; | ||
while (1) { | ||
var item = iterable.next(); | ||
items.push(item); | ||
if (item.name == 'EOF') { | ||
break; | ||
} | ||
function readToEOF<T>(iterable: lexing.Iterable<lexing.Token<T>>): lexing.Token<T>[] { | ||
var items = []; | ||
while (1) { | ||
var item = iterable.next(); | ||
items.push(item); | ||
if (item.name == 'EOF') { | ||
break; | ||
} | ||
return items; | ||
} | ||
return items; | ||
} | ||
describe('tokenizer', function () { | ||
var tokenizer = new lexing.Tokenizer([ | ||
[/^$/, function (match) { return Token('EOF'); }], | ||
[/^\s+/, function (match) { return null; }], | ||
[/^\w+/, function (match) { return Token('WORD', match[0]); }], | ||
]); | ||
it('should lex simple string', function () { | ||
var input_iterable = new lexing.StringIterator('This is a simple sentence'); | ||
var output_iterable = tokenizer.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'This'), | ||
Token('WORD', 'is'), | ||
Token('WORD', 'a'), | ||
Token('WORD', 'simple'), | ||
Token('WORD', 'sentence'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
describe('tokenizer', function() { | ||
var tokenizer = new lexing.Tokenizer([ | ||
[/^$/, () => Token('EOF') ], | ||
[/^\s+/, () => null ], | ||
[/^\w+/, match => Token('WORD', match[0]) ], | ||
]); | ||
it('should lex simple string', function() { | ||
var input_iterable = new lexing.StringIterator('This is a simple sentence'); | ||
var output_iterable = tokenizer.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'This'), | ||
Token('WORD', 'is'), | ||
Token('WORD', 'a'), | ||
Token('WORD', 'simple'), | ||
Token('WORD', 'sentence'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
}); | ||
describe('combiner', function () { | ||
var combiner = new lexing.Combiner([ | ||
['STRING', function (tokens) { return Token('STRING', tokens.map(function (token) { return token.value; }).join('')); }], | ||
['ARRAY', function (tokens) { return Token('ARRAY', tokens.map(function (token) { return token.value; })); }], | ||
describe('combiner', function() { | ||
var combiner = new lexing.Combiner([ | ||
['STRING', tokens => Token('STRING', tokens.map(token => token.value).join('')) ], | ||
['ARRAY', tokens => Token('ARRAY', tokens.map(token => token.value)) ], | ||
]); | ||
it('should join a START/END delimited STRING in a series of tokens', function() { | ||
var input_iterable = new lexing.ArrayIterator([ | ||
Token('WORD', 'BT'), | ||
Token('START', 'STRING'), | ||
Token('CHAR', 'A'), | ||
Token('CHAR', 'b'), | ||
Token('CHAR', 'c'), | ||
Token('END', 'STRING'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]); | ||
it('should join a START/END delimited STRING in a series of tokens', function () { | ||
var input_iterable = new lexing.ArrayIterator([ | ||
Token('WORD', 'BT'), | ||
Token('START', 'STRING'), | ||
Token('CHAR', 'A'), | ||
Token('CHAR', 'b'), | ||
Token('CHAR', 'c'), | ||
Token('END', 'STRING'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]); | ||
var output_iterable = combiner.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'BT'), | ||
Token('STRING', 'Abc'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
it('should combine a START/END delimited ARRAY as a basic Array', function () { | ||
var input_iterable = new lexing.ArrayIterator([ | ||
Token('WORD', 'BT'), | ||
Token('START', 'ARRAY'), | ||
Token('CHAR', 'A'), | ||
Token('CHAR', 'b'), | ||
Token('CHAR', 'c'), | ||
Token('END', 'ARRAY'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]); | ||
var output_iterable = combiner.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'BT'), | ||
Token('ARRAY', ['A', 'b', 'c']), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
it('should join strings nested inside an array', function () { | ||
var input_iterable = new lexing.ArrayIterator([ | ||
Token('WORD', 'BT'), | ||
Token('START', 'ARRAY'), | ||
Token('START', 'STRING'), | ||
Token('CHAR', 'A'), | ||
Token('CHAR', 'b'), | ||
Token('CHAR', 'c'), | ||
Token('END', 'STRING'), | ||
Token('NUMBER', 10), | ||
Token('START', 'STRING'), | ||
Token('CHAR', 'D'), | ||
Token('CHAR', 'e'), | ||
Token('CHAR', 'f'), | ||
Token('END', 'STRING'), | ||
Token('NUMBER', 20), | ||
Token('END', 'ARRAY'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]); | ||
var output_iterable = combiner.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'BT'), | ||
Token('ARRAY', ['Abc', 10, 'Def', 20]), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
var output_iterable = combiner.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'BT'), | ||
Token('STRING', 'Abc'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
it('should combine a START/END delimited ARRAY as a basic Array', function() { | ||
var input_iterable = new lexing.ArrayIterator([ | ||
Token('WORD', 'BT'), | ||
Token('START', 'ARRAY'), | ||
Token('CHAR', 'A'), | ||
Token('CHAR', 'b'), | ||
Token('CHAR', 'c'), | ||
Token('END', 'ARRAY'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]); | ||
var output_iterable = combiner.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'BT'), | ||
Token('ARRAY', ['A', 'b', 'c']), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
it('should join strings nested inside an array', function() { | ||
var input_iterable = new lexing.ArrayIterator([ | ||
Token('WORD', 'BT'), | ||
Token('START', 'ARRAY'), | ||
Token('START', 'STRING'), | ||
Token('CHAR', 'A'), | ||
Token('CHAR', 'b'), | ||
Token('CHAR', 'c'), | ||
Token('END', 'STRING'), | ||
Token('NUMBER', 10), | ||
Token('START', 'STRING'), | ||
Token('CHAR', 'D'), | ||
Token('CHAR', 'e'), | ||
Token('CHAR', 'f'), | ||
Token('END', 'STRING'), | ||
Token('NUMBER', 20), | ||
Token('END', 'ARRAY'), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]); | ||
var output_iterable = combiner.map(input_iterable); | ||
var expected_tokens = [ | ||
Token('WORD', 'BT'), | ||
Token('ARRAY', ['Abc', 10, 'Def', 20]), | ||
Token('WORD', 'ET'), | ||
Token('EOF'), | ||
]; | ||
var actual_tokens = readToEOF(output_iterable); | ||
assert.deepEqual(actual_tokens, expected_tokens); | ||
}); | ||
}); |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
42630
3
6
947