New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

very-small-parser

Package Overview
Dependencies
Maintainers
0
Versions
12
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

very-small-parser - npm Package Compare versions

Comparing version 1.0.0 to 1.1.0

dist/index.js

27

esm/html/HtmlParser.js
import { Parser } from '../Parser';
import { first } from '../util';
import { first, loop0 } from '../util';
export class HtmlParser extends Parser {

@@ -35,29 +35,10 @@ first;

const children = this.parse(src);
const root = {
type: 'root',
children,
len: src.length,
};
const root = { type: 'root', children, len: src.length };
return root;
}
parseFragment(src) {
const children = [];
const end = src.length;
let remaining = src;
let length = 0;
while (length < end) {
const tok = this.first(this, remaining);
if (!tok)
break;
children.push(tok);
length += tok.len || 0;
remaining = remaining.slice(tok.len);
}
const root = {
type: 'root',
children,
len: length,
};
const [children, len] = loop0(this, this.first, src);
const root = { type: 'root', children, len };
return root;
}
}
import * as markdown from './markdown';
import * as html from './html';
export { markdown, html };
import * as css from './css';
export { markdown, html, css };
import * as markdown from './markdown';
import * as html from './html';
export { markdown, html };
import * as css from './css';
export { markdown, html, css };
import type { IParser, IToken, TTokenizer } from './types';
export declare const token: <T extends IToken>(value: string, type: T["type"], children?: any, overrides?: Partial<T>, len?: number) => T;
export declare const loop0: <T extends IToken, P extends IParser<T>>(parser: P, tokenizer: TTokenizer<T, P>, src: string) => [tokens: T[], length: number];
export declare const loop: <T extends IToken, P extends IParser<T>>(parser: P, tokenizer: TTokenizer<T, P>, src: string) => T[];
export declare const first: <T extends IToken, P extends IParser<T>>(tokenizers: TTokenizer<T, P>[]) => TTokenizer<T, P>;
export declare const regexParser: <T extends IToken>(type: T["type"], reg: RegExp, childrenMatchIndex: number) => TTokenizer<T>;

@@ -12,3 +12,3 @@ const isTest = process.env.NODE_ENV !== 'production';

};
export const loop = (parser, tokenizer, src) => {
export const loop0 = (parser, tokenizer, src) => {
const children = [];

@@ -20,12 +20,11 @@ const end = src.length;

const tok = tokenizer(parser, remaining);
if (tok) {
children.push(tok);
length += tok.len || 0;
remaining = remaining.slice(tok.len);
}
else if (!children.length)
return [];
if (!tok)
break;
children.push(tok);
length += tok.len || 0;
remaining = remaining.slice(tok.len);
}
return children;
return [children, length];
};
export const loop = (parser, tokenizer, src) => loop0(parser, tokenizer, src)[0];
export const first = (tokenizers) => {

@@ -32,0 +31,0 @@ const length = tokenizers.length;

@@ -37,27 +37,8 @@ "use strict";

const children = this.parse(src);
const root = {
type: 'root',
children,
len: src.length,
};
const root = { type: 'root', children, len: src.length };
return root;
}
parseFragment(src) {
const children = [];
const end = src.length;
let remaining = src;
let length = 0;
while (length < end) {
const tok = this.first(this, remaining);
if (!tok)
break;
children.push(tok);
length += tok.len || 0;
remaining = remaining.slice(tok.len);
}
const root = {
type: 'root',
children,
len: length,
};
const [children, len] = (0, util_1.loop0)(this, this.first, src);
const root = { type: 'root', children, len };
return root;

@@ -64,0 +45,0 @@ }

import * as markdown from './markdown';
import * as html from './html';
export { markdown, html };
import * as css from './css';
export { markdown, html, css };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.html = exports.markdown = void 0;
exports.css = exports.html = exports.markdown = void 0;
const tslib_1 = require("tslib");

@@ -9,1 +9,3 @@ const markdown = tslib_1.__importStar(require("./markdown"));

exports.html = html;
const css = tslib_1.__importStar(require("./css"));
exports.css = css;
import type { IParser, IToken, TTokenizer } from './types';
export declare const token: <T extends IToken>(value: string, type: T["type"], children?: any, overrides?: Partial<T>, len?: number) => T;
export declare const loop0: <T extends IToken, P extends IParser<T>>(parser: P, tokenizer: TTokenizer<T, P>, src: string) => [tokens: T[], length: number];
export declare const loop: <T extends IToken, P extends IParser<T>>(parser: P, tokenizer: TTokenizer<T, P>, src: string) => T[];
export declare const first: <T extends IToken, P extends IParser<T>>(tokenizers: TTokenizer<T, P>[]) => TTokenizer<T, P>;
export declare const regexParser: <T extends IToken>(type: T["type"], reg: RegExp, childrenMatchIndex: number) => TTokenizer<T>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.regexParser = exports.first = exports.loop = exports.token = void 0;
exports.regexParser = exports.first = exports.loop = exports.loop0 = exports.token = void 0;
const isTest = process.env.NODE_ENV !== 'production';

@@ -16,3 +16,3 @@ const token = (value, type, children, overrides, len = value.length) => {

exports.token = token;
const loop = (parser, tokenizer, src) => {
const loop0 = (parser, tokenizer, src) => {
const children = [];

@@ -24,12 +24,12 @@ const end = src.length;

const tok = tokenizer(parser, remaining);
if (tok) {
children.push(tok);
length += tok.len || 0;
remaining = remaining.slice(tok.len);
}
else if (!children.length)
return [];
if (!tok)
break;
children.push(tok);
length += tok.len || 0;
remaining = remaining.slice(tok.len);
}
return children;
return [children, length];
};
exports.loop0 = loop0;
const loop = (parser, tokenizer, src) => (0, exports.loop0)(parser, tokenizer, src)[0];
exports.loop = loop;

@@ -36,0 +36,0 @@ const first = (tokenizers) => {

{
"name": "very-small-parser",
"version": "1.0.0",
"version": "1.1.0",
"description": "A very small Markdown, HTML, and CSS parser.",

@@ -27,5 +27,7 @@ "author": {

],
"main": "lib/index.js",
"main": "dist/index.js",
"types": "lib/index.d.ts",
"typings": "lib/index.d.ts",
"module": "dist/module.js",
"unpkg": "dist/index.js",
"files": [

@@ -35,3 +37,4 @@ "LICENSE",

"lib/",
"esm/"
"esm/",
"dist/"
],

@@ -43,6 +46,8 @@ "scripts": {

"lint:fix": "biome lint --apply ./src",
"clean": "npx rimraf@5.0.5 lib es6 es2019 es2020 esm typedocs coverage gh-pages yarn-error.log",
"clean": "npx rimraf@5.0.5 lib dist es6 es2019 es2020 esm typedocs coverage gh-pages yarn-error.log",
"build:es2020": "tsc --project tsconfig.build.json --module commonjs --target es2020 --outDir lib",
"build:esm": "tsc --project tsconfig.build.json --module ESNext --target ESNEXT --outDir esm",
"build:all": "npx concurrently@8.2.2 \"yarn build:es2020\" \"yarn build:esm\"",
"build:dist:mod": "NODE_ENV=production webpack --config ./webpack.config.js && npx rimraf@5.0.5 dist/index.html",
"build:dist:cjs": "NODE_ENV=production BUILD_TARGET=cjs webpack --config ./webpack.config.js && npx rimraf@5.0.5 dist/index.html",
"build:all": "npx concurrently@8.2.2 \"yarn build:es2020\" \"yarn build:esm && yarn build:dist:mod && yarn build:dist:cjs\"",
"build": "yarn build:es2020",

@@ -59,6 +64,10 @@ "test": "jest",

"config-galore": "^1.0.0",
"html-webpack-plugin": "^5.6.3",
"jest": "^29.7.0",
"ts-jest": "^29.2.5",
"ts-loader": "^9.5.1",
"tslib": "^2.8.1",
"typescript": "^5.7.2"
"typescript": "^5.7.2",
"webpack": "^5.96.1",
"webpack-cli": "^5.1.4"
},

@@ -65,0 +74,0 @@ "jest": {

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc