@getlang/parser
Advanced tools
Comparing version 0.0.9 to 0.0.10
import type { Token as MooToken } from 'moo'; | ||
import type { TypeInfo } from '@getlang/utils'; | ||
type Token = Omit<MooToken, 'toString'>; | ||
declare enum NodeKind { | ||
export declare enum NodeKind { | ||
Program = "Program", | ||
@@ -11,5 +12,7 @@ ExtractStmt = "ExtractStmt", | ||
RequestStmt = "RequestStmt", | ||
RequestExpr = "RequestExpr", | ||
TemplateExpr = "TemplateExpr", | ||
LiteralExpr = "LiteralExpr", | ||
IdentifierExpr = "IdentifierExpr", | ||
SelectorExpr = "SelectorExpr", | ||
ModifierExpr = "ModifierExpr", | ||
@@ -19,3 +22,2 @@ FunctionExpr = "FunctionExpr", | ||
ObjectLiteralExpr = "ObjectLiteralExpr", | ||
DrillExpr = "DrillExpr", | ||
SliceExpr = "SliceExpr" | ||
@@ -68,2 +70,6 @@ } | ||
kind: NodeKind.RequestStmt; | ||
request: Expr; | ||
}; | ||
type RequestExpr = { | ||
kind: NodeKind.RequestExpr; | ||
method: Token; | ||
@@ -74,2 +80,3 @@ url: Expr; | ||
body?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
@@ -79,2 +86,3 @@ type LiteralExpr = { | ||
value: Token; | ||
typeInfo?: TypeInfo; | ||
}; | ||
@@ -84,2 +92,3 @@ type TemplateExpr = { | ||
elements: Expr[]; | ||
typeInfo?: TypeInfo; | ||
}; | ||
@@ -90,6 +99,16 @@ type IdentifierExpr = { | ||
isUrlComponent: boolean; | ||
typeInfo?: TypeInfo; | ||
}; | ||
type SelectorExpr = { | ||
kind: NodeKind.SelectorExpr; | ||
selector: Expr; | ||
expand: boolean; | ||
context?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
type ModifierExpr = { | ||
kind: NodeKind.ModifierExpr; | ||
value: Token; | ||
context?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
@@ -99,2 +118,4 @@ type FunctionExpr = { | ||
body: Stmt[]; | ||
context?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
@@ -105,2 +126,4 @@ type ModuleCallExpr = { | ||
args: Expr; | ||
context?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
@@ -110,17 +133,15 @@ type ObjectLiteralExpr = { | ||
entries: ObjectEntry[]; | ||
context?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
type DrillExpr = { | ||
kind: NodeKind.DrillExpr; | ||
target: Expr | 'context'; | ||
bit: Expr; | ||
expand: boolean; | ||
}; | ||
type SliceExpr = { | ||
kind: NodeKind.SliceExpr; | ||
slice: Token; | ||
context?: Expr; | ||
typeInfo?: TypeInfo; | ||
}; | ||
type Stmt = Program | ExtractStmt | AssignmentStmt | DeclImportStmt | DeclInputsStmt | InputDeclStmt | RequestStmt; | ||
type Expr = TemplateExpr | LiteralExpr | IdentifierExpr | ModifierExpr | FunctionExpr | ModuleCallExpr | ObjectLiteralExpr | DrillExpr | SliceExpr; | ||
type Expr = RequestExpr | TemplateExpr | LiteralExpr | IdentifierExpr | SelectorExpr | ModifierExpr | FunctionExpr | ModuleCallExpr | ObjectLiteralExpr | SliceExpr; | ||
type Node = Stmt | Expr; | ||
declare const t: { | ||
export declare const t: { | ||
program: (body: Stmt[]) => Program; | ||
@@ -132,14 +153,14 @@ assignmentStmt: (name: Token, value: Expr, optional: boolean) => AssignmentStmt; | ||
extractStmt: (value: Expr) => ExtractStmt; | ||
requestStmt: (method: Token, url: Expr, headers: RequestEntry[], blocks: RequestBlocks, body: Expr) => RequestStmt; | ||
drillExpr: (target: Expr | "context", bit: Expr, expand: boolean) => DrillExpr; | ||
functionExpr: (body: Stmt[]) => FunctionExpr; | ||
requestStmt: (request: RequestExpr) => RequestStmt; | ||
requestExpr: (method: Token, url: Expr, headers: RequestEntry[], blocks: RequestBlocks, body: Expr) => RequestExpr; | ||
functionExpr: (body: Stmt[], context?: Expr) => FunctionExpr; | ||
identifierExpr: (value: Token) => IdentifierExpr; | ||
literalExpr: (value: Token) => LiteralExpr; | ||
modifierExpr: (value: Token) => ModifierExpr; | ||
moduleCallExpr: (name: Token, args?: Expr) => ModuleCallExpr; | ||
objectLiteralExpr: (entries: ObjectEntry[]) => ObjectLiteralExpr; | ||
sliceExpr: (slice: Token) => SliceExpr; | ||
selectorExpr: (selector: Expr, expand: boolean, context?: Expr) => SelectorExpr; | ||
modifierExpr: (value: Token, context?: Expr) => ModifierExpr; | ||
moduleCallExpr: (name: Token, args?: Expr, context?: Expr) => ModuleCallExpr; | ||
objectLiteralExpr: (entries: ObjectEntry[], context?: Expr) => ObjectLiteralExpr; | ||
sliceExpr: (slice: Token, context?: Expr) => SliceExpr; | ||
templateExpr: (elements: Expr[]) => TemplateExpr; | ||
}; | ||
export type { Token, Program, DeclInputsStmt, Node, Stmt, Expr }; | ||
export { NodeKind, t }; | ||
export type { Token, Program, DeclInputsStmt, Node, Stmt, Expr, RequestExpr }; |
@@ -1,2 +0,2 @@ | ||
var NodeKind; | ||
export var NodeKind; | ||
(function (NodeKind) { | ||
@@ -10,5 +10,7 @@ NodeKind["Program"] = "Program"; | ||
NodeKind["RequestStmt"] = "RequestStmt"; | ||
NodeKind["RequestExpr"] = "RequestExpr"; | ||
NodeKind["TemplateExpr"] = "TemplateExpr"; | ||
NodeKind["LiteralExpr"] = "LiteralExpr"; | ||
NodeKind["IdentifierExpr"] = "IdentifierExpr"; | ||
NodeKind["SelectorExpr"] = "SelectorExpr"; | ||
NodeKind["ModifierExpr"] = "ModifierExpr"; | ||
@@ -18,3 +20,2 @@ NodeKind["FunctionExpr"] = "FunctionExpr"; | ||
NodeKind["ObjectLiteralExpr"] = "ObjectLiteralExpr"; | ||
NodeKind["DrillExpr"] = "DrillExpr"; | ||
NodeKind["SliceExpr"] = "SliceExpr"; | ||
@@ -50,4 +51,8 @@ })(NodeKind || (NodeKind = {})); | ||
}); | ||
const requestStmt = (method, url, headers, blocks, body) => ({ | ||
const requestStmt = (request) => ({ | ||
kind: NodeKind.RequestStmt, | ||
request, | ||
}); | ||
const requestExpr = (method, url, headers, blocks, body) => ({ | ||
kind: NodeKind.RequestExpr, | ||
method, | ||
@@ -59,11 +64,6 @@ url, | ||
}); | ||
const drillExpr = (target, bit, expand) => ({ | ||
kind: NodeKind.DrillExpr, | ||
target, | ||
bit, | ||
expand, | ||
}); | ||
const functionExpr = (body) => ({ | ||
const functionExpr = (body, context) => ({ | ||
kind: NodeKind.FunctionExpr, | ||
body, | ||
context, | ||
}); | ||
@@ -79,18 +79,28 @@ const identifierExpr = (value) => ({ | ||
}); | ||
const modifierExpr = (value) => ({ | ||
const selectorExpr = (selector, expand, context) => ({ | ||
kind: NodeKind.SelectorExpr, | ||
selector, | ||
expand, | ||
context, | ||
}); | ||
const modifierExpr = (value, context) => ({ | ||
kind: NodeKind.ModifierExpr, | ||
value, | ||
context, | ||
}); | ||
const objectLiteralExpr = (entries) => ({ | ||
const objectLiteralExpr = (entries, context) => ({ | ||
kind: NodeKind.ObjectLiteralExpr, | ||
entries, | ||
context, | ||
}); | ||
const moduleCallExpr = (name, args = objectLiteralExpr([])) => ({ | ||
const moduleCallExpr = (name, args = objectLiteralExpr([]), context) => ({ | ||
kind: NodeKind.ModuleCallExpr, | ||
name, | ||
args, | ||
context, | ||
}); | ||
const sliceExpr = (slice) => ({ | ||
const sliceExpr = (slice, context) => ({ | ||
kind: NodeKind.SliceExpr, | ||
slice, | ||
context, | ||
}); | ||
@@ -101,3 +111,3 @@ const templateExpr = (elements) => ({ | ||
}); | ||
const t = { | ||
export const t = { | ||
program, | ||
@@ -112,6 +122,7 @@ // STATEMENTS | ||
// EXPRESSIONS | ||
drillExpr, | ||
requestExpr, | ||
functionExpr, | ||
identifierExpr, | ||
literalExpr, | ||
selectorExpr, | ||
modifierExpr, | ||
@@ -123,2 +134,1 @@ moduleCallExpr, | ||
}; | ||
export { NodeKind, t }; |
@@ -1,4 +0,4 @@ | ||
export declare const analyzeSlice: (source: string) => { | ||
export declare const analyzeSlice: (_source: string, includeDeps: boolean) => { | ||
source: string; | ||
deps: string[]; | ||
}; |
@@ -8,12 +8,15 @@ import { parse } from 'acorn'; | ||
]; | ||
export const analyzeSlice = (source) => { | ||
const ast = parse(source, { | ||
export const analyzeSlice = (_source, includeDeps) => { | ||
const ast = parse(_source, { | ||
ecmaVersion: 'latest', | ||
allowReturnOutsideFunction: true, | ||
}); | ||
let src = source; | ||
let source = _source; | ||
// auto-insert the return statement | ||
if (ast.body.length === 1 && ast.body[0]?.type !== 'ReturnStatement') { | ||
src = `return ${src}`; | ||
source = `return ${source}`; | ||
} | ||
if (!includeDeps) { | ||
return { source, deps: [] }; | ||
} | ||
// detect globals and load them from context | ||
@@ -23,8 +26,11 @@ const deps = detect(ast) | ||
.filter(id => !browserGlobals.includes(id)); | ||
if (deps.includes('$')) { | ||
return { source, deps: [] }; | ||
} | ||
if (deps.length) { | ||
const contextVars = deps.join(', '); | ||
const loadContext = `const { ${contextVars} } = $\n`; | ||
src = loadContext + src; | ||
source = loadContext + source; | ||
} | ||
return { source: src, deps }; | ||
return { source, deps }; | ||
}; |
@@ -1,3 +0,6 @@ | ||
import { type t } from '../ast'; | ||
type RequestStmt = ReturnType<typeof t.requestStmt>; | ||
import { type TypeInfo } from '@getlang/utils'; | ||
import type { Node } from '../ast'; | ||
import { type RequestExpr } from '../ast'; | ||
export declare function getTypeInfo(node: Node | undefined, msg?: string): TypeInfo; | ||
export declare function getModTypeInfo(mod: string): TypeInfo; | ||
export declare function createToken(text: string, value?: string): { | ||
@@ -11,3 +14,2 @@ text: string; | ||
}; | ||
export declare function getContentMod(req: RequestStmt): "json" | "html" | "js"; | ||
export {}; | ||
export declare function getContentMod(req: RequestExpr): "html" | "js" | "json"; |
@@ -0,2 +1,25 @@ | ||
import { Type, ValueTypeError } from '@getlang/utils'; | ||
import { NodeKind } from '../ast'; | ||
const modTypeMap = { | ||
html: { type: Type.Html }, | ||
json: { type: Type.Unknown }, | ||
js: { type: Type.Js }, | ||
headers: { type: Type.Headers }, | ||
cookies: { type: Type.Cookies }, | ||
link: { type: Type.Unknown }, | ||
}; | ||
export function getTypeInfo(node, msg) { | ||
if (node && 'typeInfo' in node && node.typeInfo) { | ||
return node.typeInfo; | ||
} | ||
const errMsg = msg ?? `Failed to locate type info for node: ${node?.kind}`; | ||
throw new ValueTypeError(errMsg); | ||
} | ||
export function getModTypeInfo(mod) { | ||
const typeInfo = modTypeMap[mod]; | ||
if (typeInfo) { | ||
return typeInfo; | ||
} | ||
throw new ValueTypeError(`Failed to locate type info for modifier: ${mod}`); | ||
} | ||
export function createToken(text, value = text) { | ||
@@ -3,0 +26,0 @@ return { |
import { doc } from 'prettier'; | ||
import { NodeKind } from './ast'; | ||
import { visit } from './visitor'; | ||
// NOTE: avoid using template interpolation with prettier.Doc | ||
// as the Doc may be a Doc array or Doc command | ||
const { builders: { group, indent, join, line, hardline, softline, ifBreak }, printer, } = doc; | ||
const unwrapHead = (expr) => { | ||
if (expr.kind === NodeKind.DrillExpr) { | ||
if (expr.target === 'context') { | ||
return expr.bit; | ||
} | ||
if (expr.target.kind === NodeKind.DrillExpr) { | ||
return unwrapHead(expr.target); | ||
} | ||
return expr.target; | ||
} | ||
return expr; | ||
}; | ||
const unwrapTail = (expr) => { | ||
if (expr.kind === NodeKind.DrillExpr) { | ||
return expr.bit; | ||
} | ||
return expr; | ||
}; | ||
const printVisitor = { | ||
@@ -41,13 +25,3 @@ Program(node) { | ||
}, | ||
InputDeclStmt(node) { | ||
const parts = [node.id.text]; | ||
if (node.optional) { | ||
parts.push('?'); | ||
} | ||
if (node.defaultValue) { | ||
parts.push(' = ', node.defaultValue); | ||
} | ||
return group(parts); | ||
}, | ||
RequestStmt(node) { | ||
RequestExpr(node) { | ||
const parts = [node.method.value, ' ', node.url]; | ||
@@ -69,20 +43,30 @@ for (const h of node.headers) { | ||
}, | ||
AssignmentStmt(node, orig) { | ||
const idTarget = unwrapHead(orig.value).kind === NodeKind.IdentifierExpr; | ||
const parts = ['set ', node.name.value]; | ||
InputDeclStmt(node) { | ||
const parts = [node.id.text]; | ||
if (node.optional) { | ||
parts.push('?'); | ||
} | ||
const sep = idTarget ? ' = $' : ' = '; | ||
parts.push(sep, node.value); | ||
if (node.defaultValue) { | ||
parts.push(' = ', node.defaultValue); | ||
} | ||
return group(parts); | ||
}, | ||
ExtractStmt(node, orig) { | ||
const head = unwrapHead(orig.value); | ||
const punct = head.kind === NodeKind.IdentifierExpr ? ['$'] : []; | ||
return group(['extract ', ...punct, node.value]); | ||
RequestStmt(node) { | ||
return node.request; | ||
}, | ||
AssignmentStmt(node) { | ||
return group([ | ||
'set ', | ||
node.name.value, | ||
node.optional ? '?' : '', | ||
' = ', | ||
node.value, | ||
]); | ||
}, | ||
ExtractStmt(node) { | ||
return group(['extract ', node.value]); | ||
}, | ||
ObjectLiteralExpr(node, orig) { | ||
const shorthand = []; | ||
const shouldBreak = orig.entries.some(e => unwrapTail(e.value).kind === NodeKind.TemplateExpr); | ||
const shouldBreak = orig.entries.some(e => e.value.kind === NodeKind.SelectorExpr); | ||
const entries = node.entries.map((entry, i) => { | ||
@@ -100,7 +84,3 @@ const origEntry = orig.entries[i]; | ||
// value | ||
const head = unwrapHead(origEntry.value); | ||
let value = entry.value; | ||
if (head.kind === NodeKind.IdentifierExpr) { | ||
value = ['$', value]; | ||
} | ||
const value = entry.value; | ||
let shValue = entry.value; | ||
@@ -119,5 +99,6 @@ if (Array.isArray(shValue) && | ||
const sep = ifBreak(line, [',', line]); | ||
return group(['{', indent([line, join(sep, inner)]), line, '}'], { | ||
const obj = group(['{', indent([line, join(sep, inner)]), line, '}'], { | ||
shouldBreak, | ||
}); | ||
return node.context ? [node.context, indent([line, '-> ', obj])] : obj; | ||
}, | ||
@@ -130,15 +111,19 @@ TemplateExpr(node, orig) { | ||
} | ||
if (origEl.kind === NodeKind.LiteralExpr) { | ||
else if (origEl.kind === NodeKind.LiteralExpr) { | ||
return el; | ||
} | ||
if (origEl.kind !== NodeKind.IdentifierExpr) { | ||
else if (origEl.kind !== NodeKind.IdentifierExpr) { | ||
throw new Error(`Unexpected template node: ${origEl?.kind}`); | ||
} | ||
// strip the leading `$` character | ||
if (typeof el !== 'string' && !Array.isArray(el)) { | ||
throw new Error(`Unsupported template node: ${el.type} command`); | ||
} | ||
let ret = el.slice(1); | ||
const nextEl = node.elements[i + 1]; | ||
let elDoc = el; | ||
if (typeof nextEl === 'string' && /^\w/.test(nextEl)) { | ||
// use ${id} syntax to delineate against next element in template | ||
elDoc = `{${el}}`; | ||
ret = ['{', ret, '}']; | ||
} | ||
return origEl.isUrlComponent ? `:${elDoc}` : `$${elDoc}`; | ||
return [origEl.isUrlComponent ? ':' : '$', ret]; | ||
}); | ||
@@ -150,15 +135,15 @@ }, | ||
IdentifierExpr(node) { | ||
return node.value.value; | ||
return ['$', node.value.value]; | ||
}, | ||
DrillExpr(node) { | ||
if (node.target === 'context') { | ||
return node.bit; | ||
SelectorExpr(node) { | ||
if (!node.context) { | ||
const arrow = node.expand ? '=> ' : ''; | ||
return [arrow, node.selector]; | ||
} | ||
const arrow = node.expand ? '=> ' : '-> '; | ||
return [node.target, indent([line, arrow, node.bit])]; | ||
// return group([node.target, line, arrow, node.bit]) | ||
// return fill([node.target, line, arrow, node.bit]) | ||
return [node.context, indent([line, arrow, node.selector])]; | ||
}, | ||
ModifierExpr(node) { | ||
return `@${node.value.value}`; | ||
const mod = ['@', node.value.value]; | ||
return node.context ? [node.context, indent([line, '-> ', mod])] : mod; | ||
}, | ||
@@ -169,3 +154,3 @@ SliceExpr(node) { | ||
const lines = value.split('\n'); | ||
return group([ | ||
const slice = group([ | ||
quot, | ||
@@ -176,5 +161,12 @@ indent([softline, join(hardline, lines)]), | ||
]); | ||
return node.context ? [node.context, indent([line, '-> ', slice])] : slice; | ||
}, | ||
FunctionExpr(node) { | ||
return ['(', indent(node.body.flatMap(x => [hardline, x])), hardline, ')']; | ||
const fn = [ | ||
'(', | ||
indent(node.body.flatMap(x => [hardline, x])), | ||
hardline, | ||
')', | ||
]; | ||
return node.context ? [node.context, indent([line, '-> ', fn])] : fn; | ||
}, | ||
@@ -181,0 +173,0 @@ }; |
@@ -0,195 +1,197 @@ | ||
import { RootScope, Type, invariant, QuerySyntaxError, } from '@getlang/utils'; | ||
import { NodeKind, t } from './ast'; | ||
import { visit } from './visitor'; | ||
import { analyzeSlice } from './desugar/slice'; | ||
import { createToken, getContentMod } from './desugar/utils'; | ||
class Scope { | ||
contextStack = []; | ||
parsedResponses = new Map(); | ||
constructor(initialContext) { | ||
if (initialContext) { | ||
this.contextStack.push(initialContext); | ||
} | ||
import { insertParsers } from './desugar/parsers'; | ||
import { createToken, getContentMod, getTypeInfo, getModTypeInfo, } from './desugar/utils'; | ||
export function desugar(ast) { | ||
invariant(ast.kind === NodeKind.Program, new QuerySyntaxError(`Non-program AST node provided: ${ast}`)); | ||
const scope = new RootScope(); | ||
const parsers = new Map(); | ||
function requireContext() { | ||
invariant(scope.context, new QuerySyntaxError('Unable to locate active context')); | ||
return scope.context; | ||
} | ||
pushContext(context) { | ||
this.contextStack.push(context); | ||
if (context.kind === NodeKind.RequestStmt) { | ||
this.parsedResponses.set(context, {}); | ||
function inferContext(_mod) { | ||
const context = requireContext(); | ||
if (context.kind !== NodeKind.RequestExpr) { | ||
return t.identifierExpr(createToken('')); | ||
} | ||
const mod = _mod ?? getContentMod(context); | ||
const [mods, index] = parsers.get(context) ?? [new Set(), parsers.size]; | ||
mods.add(mod); | ||
parsers.set(context, [mods, index]); | ||
const id = `__${mod}_${index}`; | ||
scope.vars[id] ??= { | ||
...t.modifierExpr(createToken(mod)), | ||
typeInfo: getModTypeInfo(mod), | ||
}; | ||
return t.identifierExpr(createToken(id)); | ||
} | ||
popContext() { | ||
this.contextStack.pop(); | ||
} | ||
get context() { | ||
return this.contextStack.at(-1); | ||
} | ||
hasRequest(req) { | ||
return this.parsedResponses.has(req); | ||
} | ||
getParsedRequestId(req, mod = getContentMod(req)) { | ||
const parsed = this.parsedResponses.get(req); | ||
if (!parsed) { | ||
throw new SyntaxError('Request cannot be parsed, does not exist in scope'); | ||
function contextual(context, itemContext, visit, cb) { | ||
if (context) { | ||
const ctype = getTypeInfo(context); | ||
if (ctype.type === Type.List) { | ||
scope.pushContext(context); | ||
const fn = visit(t.functionExpr([t.extractStmt(itemContext)], t.identifierExpr(createToken('')))); | ||
invariant(fn.kind === NodeKind.FunctionExpr, new QuerySyntaxError('Failed to create item context')); | ||
scope.popContext(); | ||
const typeInfo = { type: Type.List, of: getTypeInfo(fn) }; | ||
return { ...fn, context, typeInfo }; | ||
} | ||
} | ||
const pos = [...this.parsedResponses.keys()].indexOf(req); | ||
const name = `__${mod}_${pos}`; | ||
parsed[mod] = name; | ||
return name; | ||
context && scope.pushContext(context); | ||
const node = cb(); | ||
context && scope.popContext(); | ||
return node; | ||
} | ||
finalize() { | ||
const parserStmts = new Map(); | ||
for (const [req, parsed] of this.parsedResponses.entries()) { | ||
const stmts = Object.entries(parsed).map(([mod, id]) => { | ||
const contextId = t.identifierExpr(createToken('')); | ||
let target; | ||
let field = 'body'; | ||
if (mod === 'cookies') { | ||
target = t.drillExpr(contextId, t.templateExpr([t.literalExpr(createToken('headers'))]), false); | ||
field = 'set-cookie'; | ||
} | ||
else { | ||
target = contextId; | ||
if (mod === 'headers') { | ||
field = 'headers'; | ||
const visitor = { | ||
LiteralExpr(node) { | ||
return { ...node, typeInfo: { type: Type.Unknown } }; | ||
}, | ||
TemplateExpr(node) { | ||
return { ...node, typeInfo: { type: Type.Unknown } }; | ||
}, | ||
IdentifierExpr(node) { | ||
const id = node.value.value; | ||
const typeInfo = getTypeInfo(scope.vars[id], `Failed to find type info for variable '${id}'`); | ||
return { ...node, typeInfo }; | ||
}, | ||
FunctionExpr: { | ||
enter(node, visit) { | ||
let context = node.context && visit(node.context); | ||
if (context) { | ||
const ctype = getTypeInfo(context); | ||
if (ctype.type === Type.List) { | ||
// item context | ||
context = { ...context, typeInfo: ctype.of }; | ||
} | ||
} | ||
let expr = t.drillExpr(target, t.templateExpr([t.literalExpr(createToken(field))]), false); | ||
if (mod !== 'headers') { | ||
expr = t.drillExpr(expr, t.modifierExpr(createToken(`@${mod}`, mod)), false); | ||
} | ||
const optional = mod === 'cookies'; | ||
return t.assignmentStmt(createToken(id), expr, optional); | ||
}); | ||
parserStmts.set(req, stmts); | ||
} | ||
return { parserStmts }; | ||
} | ||
} | ||
class ScopeStack { | ||
stack = []; | ||
newScope() { | ||
let initialContext; | ||
if (this.stack.length) { | ||
initialContext = this.current.context; | ||
} | ||
const scope = new Scope(initialContext); | ||
this.stack.push(scope); | ||
} | ||
finalizeScope() { | ||
const scope = this.stack.pop(); | ||
if (scope) { | ||
return scope.finalize(); | ||
} | ||
throw new SyntaxError('Attempted to finalize scope on an empty stack'); | ||
} | ||
get current() { | ||
const scope = this.stack.at(-1); | ||
if (!scope) { | ||
throw new SyntaxError('Scope not found'); | ||
} | ||
return scope; | ||
} | ||
} | ||
const appendTrailing = (body, map) => { | ||
return body.flatMap(stmt => { | ||
const withTrailing = [stmt, ...(map.get(stmt) || [])]; | ||
return withTrailing; | ||
}); | ||
}; | ||
export function desugar(ast) { | ||
if (!(ast.kind === NodeKind.Program)) { | ||
throw new SyntaxError(`Non-program AST node provided: ${ast}`); | ||
} | ||
const scopes = new ScopeStack(); | ||
let disableSliceAnalysis = false; | ||
const visitor = { | ||
Program: { | ||
enter() { | ||
scopes.newScope(); | ||
return undefined; | ||
scope.push(context); | ||
const body = node.body.map(stmt => visit(stmt)); | ||
const extracted = scope.pop(); | ||
return { | ||
...node, | ||
body: insertParsers(body, parsers), | ||
typeInfo: getTypeInfo(extracted), | ||
}; | ||
}, | ||
exit(node) { | ||
const final = scopes.finalizeScope(); | ||
const body = appendTrailing(node.body, final?.parserStmts); | ||
return { ...node, body }; | ||
}, | ||
ObjectLiteralExpr: { | ||
enter(node, visit) { | ||
const context = node.context && visit(node.context); | ||
const itemContext = t.objectLiteralExpr(node.entries); | ||
return contextual(context, itemContext, visit, () => { | ||
const entries = node.entries.map(e => ({ | ||
key: visit(e.key), | ||
value: visit(e.value), | ||
optional: e.optional, | ||
})); | ||
return { | ||
...node, | ||
entries, | ||
typeInfo: { type: Type.Unknown }, | ||
}; | ||
}); | ||
}, | ||
}, | ||
FunctionExpr: { | ||
enter() { | ||
scopes.newScope(); | ||
return undefined; | ||
SelectorExpr: { | ||
enter(node, visit) { | ||
const context = visit(node.context ?? inferContext()); | ||
const itemContext = t.selectorExpr(node.selector, node.expand); | ||
return contextual(context, itemContext, visit, () => { | ||
const ctype = getTypeInfo(context); | ||
const typeInfo = node.expand | ||
? { type: Type.List, of: ctype } | ||
: ctype; | ||
return { ...node, context, typeInfo }; | ||
}); | ||
}, | ||
exit(node) { | ||
scopes.finalizeScope(); | ||
return node; | ||
}, | ||
ModifierExpr: { | ||
enter(node, visit) { | ||
const mod = node.value.value; | ||
const context = visit(node.context ?? inferContext(mod)); | ||
const itemContext = t.modifierExpr(node.value); | ||
const onRequest = !node.context && scope.context?.kind === NodeKind.RequestExpr; | ||
return contextual(context, itemContext, visit, () => { | ||
// if request context, replace modifier with parser identifier | ||
return onRequest | ||
? context | ||
: { ...node, context, typeInfo: getModTypeInfo(mod) }; | ||
}); | ||
}, | ||
}, | ||
RequestStmt(node) { | ||
scopes.current.pushContext(node); | ||
return node; | ||
}, | ||
DrillExpr: { | ||
// typeinfo is lost / resets back to Type.Unknown | ||
SliceExpr: { | ||
enter(node, visit) { | ||
let target; | ||
let bit = node.bit; | ||
if (node.target === 'context') { | ||
const { context } = scopes.current; | ||
if (!context) { | ||
throw new SyntaxError('Drill unable to locate active context'); | ||
const context = node.context && visit(node.context); | ||
const itemContext = t.sliceExpr(node.slice); | ||
return contextual(context, itemContext, visit, () => { | ||
const stat = analyzeSlice(node.slice.value, !context); | ||
const slice = createToken(stat.source); | ||
const typeInfo = { type: Type.Unknown }; | ||
if (stat.deps.length === 0) { | ||
return { | ||
...t.sliceExpr(slice, context), | ||
typeInfo, | ||
}; | ||
} | ||
if (context.kind === NodeKind.RequestStmt) { | ||
const bit = node.bit; | ||
const isModifier = bit.kind === NodeKind.ModifierExpr; | ||
const mod = isModifier ? bit.value.value : undefined; | ||
const scope = scopes.stack.find(x => x.hasRequest(context)); | ||
if (!scope) { | ||
throw new SyntaxError('Encountered orphan context'); | ||
} | ||
const id = scope.getParsedRequestId(context, mod); | ||
target = t.identifierExpr(createToken(id)); | ||
if (isModifier) { | ||
// replace the context drill with identifier for the | ||
// parsed value, essentially removing the modifier expr | ||
return target; | ||
} | ||
} | ||
else { | ||
target = t.identifierExpr(createToken('')); | ||
} | ||
} | ||
else { | ||
target = visit(node.target); | ||
} | ||
scopes.current.pushContext(target); | ||
disableSliceAnalysis = bit.kind === NodeKind.SliceExpr; | ||
bit = visit(node.bit); | ||
disableSliceAnalysis = false; | ||
scopes.current.popContext(); | ||
return t.drillExpr(target, bit, node.expand); | ||
const contextEntries = stat.deps.map(id => ({ | ||
key: t.literalExpr(createToken(id)), | ||
value: t.identifierExpr(createToken(id)), | ||
optional: false, | ||
})); | ||
return { | ||
...node, | ||
slice, | ||
context: t.objectLiteralExpr(contextEntries), | ||
typeInfo, | ||
}; | ||
}); | ||
}, | ||
}, | ||
SliceExpr(node) { | ||
if (disableSliceAnalysis) { | ||
// a context for the slice has already been defined | ||
// avoid desugar, which may have already been run | ||
return node; | ||
} | ||
const stat = analyzeSlice(node.slice.value); | ||
const slice = t.sliceExpr(createToken(stat.source)); | ||
if (!stat.deps.length) { | ||
return slice; | ||
} | ||
const contextEntries = stat.deps.map(id => ({ | ||
key: t.literalExpr(createToken(id)), | ||
value: t.identifierExpr(createToken(id)), | ||
optional: false, | ||
})); | ||
const context = t.objectLiteralExpr(contextEntries); | ||
return t.drillExpr(context, slice, false); | ||
// typeinfo is lost / resets back to Type.Unknown | ||
ModuleCallExpr(node) { | ||
return { ...node, typeInfo: { type: Type.Unknown } }; | ||
}, | ||
RequestExpr(node) { | ||
return { | ||
...node, | ||
typeInfo: { | ||
type: Type.Struct, | ||
schema: { | ||
status: { type: Type.Unknown }, | ||
headers: { type: Type.Headers }, | ||
body: { type: Type.Unknown }, | ||
}, | ||
}, | ||
}; | ||
}, | ||
InputDeclStmt(node) { | ||
scope.vars[node.id.value] = { | ||
...t.identifierExpr(node.id), | ||
typeInfo: { type: Type.Unknown }, | ||
}; | ||
return node; | ||
}, | ||
RequestStmt(node) { | ||
scope.pushContext(node.request); | ||
return node; | ||
}, | ||
AssignmentStmt(node) { | ||
scope.vars[node.name.value] = node.value; | ||
return node; | ||
}, | ||
ExtractStmt(node) { | ||
scope.extracted = node.value; | ||
return node; | ||
}, | ||
Program(node) { | ||
return { ...node, body: insertParsers(node.body, parsers) }; | ||
}, | ||
}; | ||
const simplified = visit(ast, visitor); | ||
if (simplified.kind !== NodeKind.Program) { | ||
throw new SyntaxError('Desugar encountered unexpected error'); | ||
} | ||
invariant(simplified.kind === NodeKind.Program, new QuerySyntaxError('Desugar encountered unexpected error')); | ||
return simplified; | ||
} |
@@ -5,471 +5,129 @@ // Generated automatically by nearley, version 2.20.1 | ||
// @ts-ignore | ||
function id(d) { | ||
return d[0]; | ||
} | ||
function id(d) { return d[0]; } | ||
import lexer from './parse/lexer'; | ||
import * as p from './parse/parse'; | ||
; | ||
; | ||
; | ||
; | ||
const grammar = { | ||
Lexer: lexer, | ||
ParserRules: [ | ||
{ | ||
name: 'program', | ||
symbols: ['_', 'header', 'statements', '_'], | ||
postprocess: p.program, | ||
}, | ||
{ name: 'header$ebnf$1', symbols: [] }, | ||
{ | ||
name: 'header$ebnf$1$subexpression$1$subexpression$1', | ||
symbols: ['import'], | ||
}, | ||
{ | ||
name: 'header$ebnf$1$subexpression$1$subexpression$1', | ||
symbols: ['inputs'], | ||
}, | ||
{ | ||
name: 'header$ebnf$1$subexpression$1', | ||
symbols: ['header$ebnf$1$subexpression$1$subexpression$1', 'line_sep'], | ||
}, | ||
{ | ||
name: 'header$ebnf$1', | ||
symbols: ['header$ebnf$1', 'header$ebnf$1$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ name: 'header', symbols: ['header$ebnf$1'], postprocess: p.header }, | ||
{ name: 'statements$ebnf$1', symbols: [] }, | ||
{ | ||
name: 'statements$ebnf$1$subexpression$1', | ||
symbols: ['line_sep', 'statement'], | ||
}, | ||
{ | ||
name: 'statements$ebnf$1', | ||
symbols: ['statements$ebnf$1', 'statements$ebnf$1$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ | ||
name: 'statements', | ||
symbols: ['statement', 'statements$ebnf$1'], | ||
postprocess: p.statements, | ||
}, | ||
{ name: 'statement$subexpression$1', symbols: ['request'] }, | ||
{ name: 'statement$subexpression$1', symbols: ['assignment'] }, | ||
{ name: 'statement$subexpression$1', symbols: ['extract'] }, | ||
{ | ||
name: 'statement', | ||
symbols: ['statement$subexpression$1'], | ||
postprocess: p.idd, | ||
}, | ||
{ | ||
name: 'import', | ||
symbols: [ | ||
{ literal: 'import' }, | ||
'__', | ||
lexer.has('identifier') ? { type: 'identifier' } : identifier, | ||
], | ||
postprocess: p.declImport, | ||
}, | ||
{ name: 'inputs$ebnf$1', symbols: [] }, | ||
{ | ||
name: 'inputs$ebnf$1$subexpression$1', | ||
symbols: ['_', { literal: ',' }, '_', 'input_decl'], | ||
}, | ||
{ | ||
name: 'inputs$ebnf$1', | ||
symbols: ['inputs$ebnf$1', 'inputs$ebnf$1$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ | ||
name: 'inputs', | ||
symbols: [ | ||
{ literal: 'inputs' }, | ||
'__', | ||
{ literal: '{' }, | ||
'_', | ||
'input_decl', | ||
'inputs$ebnf$1', | ||
'_', | ||
{ literal: '}' }, | ||
], | ||
postprocess: p.declInputs, | ||
}, | ||
{ name: 'assignment$ebnf$1', symbols: [{ literal: '?' }], postprocess: id }, | ||
{ name: 'assignment$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'assignment', | ||
symbols: [ | ||
{ literal: 'set' }, | ||
'__', | ||
lexer.has('identifier') ? { type: 'identifier' } : identifier, | ||
'assignment$ebnf$1', | ||
'_', | ||
{ literal: '=' }, | ||
'_', | ||
'drill', | ||
], | ||
postprocess: p.assignment, | ||
}, | ||
{ | ||
name: 'extract', | ||
symbols: [{ literal: 'extract' }, '__', 'drill'], | ||
postprocess: p.extract, | ||
}, | ||
{ name: 'input_decl$ebnf$1', symbols: [{ literal: '?' }], postprocess: id }, | ||
{ name: 'input_decl$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'input_decl$ebnf$2$subexpression$1', | ||
symbols: ['_', { literal: '=' }, '_', 'input_default'], | ||
}, | ||
{ | ||
name: 'input_decl$ebnf$2', | ||
symbols: ['input_decl$ebnf$2$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ name: 'input_decl$ebnf$2', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'input_decl', | ||
symbols: [ | ||
lexer.has('identifier') ? { type: 'identifier' } : identifier, | ||
'input_decl$ebnf$1', | ||
'input_decl$ebnf$2', | ||
], | ||
postprocess: p.inputDecl, | ||
}, | ||
{ name: 'input_default', symbols: ['slice'], postprocess: id }, | ||
{ | ||
name: 'request$ebnf$1$subexpression$1', | ||
symbols: ['line_sep', 'request_block'], | ||
}, | ||
{ | ||
name: 'request$ebnf$1', | ||
symbols: ['request$ebnf$1$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ name: 'request$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ name: 'request$ebnf$2', symbols: [] }, | ||
{ | ||
name: 'request$ebnf$2$subexpression$1', | ||
symbols: ['line_sep', 'request_block_named'], | ||
}, | ||
{ | ||
name: 'request$ebnf$2', | ||
symbols: ['request$ebnf$2', 'request$ebnf$2$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ | ||
name: 'request$ebnf$3$subexpression$1', | ||
symbols: ['line_sep', 'request_block_body'], | ||
}, | ||
{ | ||
name: 'request$ebnf$3', | ||
symbols: ['request$ebnf$3$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ name: 'request$ebnf$3', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'request', | ||
symbols: [ | ||
lexer.has('request_verb') ? { type: 'request_verb' } : request_verb, | ||
'template', | ||
'request$ebnf$1', | ||
'request$ebnf$2', | ||
'request$ebnf$3', | ||
], | ||
postprocess: p.request, | ||
}, | ||
{ | ||
name: 'request_block_named', | ||
symbols: [ | ||
lexer.has('request_block_name') | ||
? { type: 'request_block_name' } | ||
: request_block_name, | ||
'line_sep', | ||
'request_block', | ||
], | ||
postprocess: p.requestBlockNamed, | ||
}, | ||
{ | ||
name: 'request_block_body', | ||
symbols: [ | ||
lexer.has('request_block_body') | ||
? { type: 'request_block_body' } | ||
: request_block_body, | ||
'template', | ||
lexer.has('request_block_body_end') | ||
? { type: 'request_block_body_end' } | ||
: request_block_body_end, | ||
], | ||
postprocess: p.requestBlockBody, | ||
}, | ||
{ name: 'request_block$ebnf$1', symbols: [] }, | ||
{ | ||
name: 'request_block$ebnf$1$subexpression$1', | ||
symbols: ['line_sep', 'request_entry'], | ||
}, | ||
{ | ||
name: 'request_block$ebnf$1', | ||
symbols: ['request_block$ebnf$1', 'request_block$ebnf$1$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ | ||
name: 'request_block', | ||
symbols: ['request_entry', 'request_block$ebnf$1'], | ||
postprocess: p.requestBlock, | ||
}, | ||
{ | ||
name: 'request_entry$ebnf$1$subexpression$1', | ||
symbols: ['__', 'template'], | ||
}, | ||
{ | ||
name: 'request_entry$ebnf$1', | ||
symbols: ['request_entry$ebnf$1$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ name: 'request_entry$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'request_entry', | ||
symbols: ['template', { literal: ':' }, 'request_entry$ebnf$1'], | ||
postprocess: p.requestEntry, | ||
}, | ||
{ | ||
name: 'drill', | ||
symbols: [ | ||
'drill', | ||
'_', | ||
lexer.has('drill_arrow') ? { type: 'drill_arrow' } : drill_arrow, | ||
'_', | ||
'expression', | ||
], | ||
postprocess: p.drill, | ||
}, | ||
{ | ||
name: 'drill$ebnf$1$subexpression$1', | ||
symbols: [ | ||
lexer.has('drill_arrow') ? { type: 'drill_arrow' } : drill_arrow, | ||
'_', | ||
], | ||
}, | ||
{ | ||
name: 'drill$ebnf$1', | ||
symbols: ['drill$ebnf$1$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ name: 'drill$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'drill', | ||
symbols: ['drill$ebnf$1', 'expression'], | ||
postprocess: p.drillContext, | ||
}, | ||
{ name: 'expression$subexpression$1', symbols: ['template'] }, | ||
{ name: 'expression$subexpression$1', symbols: ['slice'] }, | ||
{ name: 'expression$subexpression$1', symbols: ['modifier'] }, | ||
{ | ||
name: 'expression', | ||
symbols: ['expression$subexpression$1'], | ||
postprocess: p.idd, | ||
}, | ||
{ name: 'expression$subexpression$2', symbols: ['object'] }, | ||
{ name: 'expression$subexpression$2', symbols: ['function'] }, | ||
{ name: 'expression$subexpression$2', symbols: ['module_call'] }, | ||
{ | ||
name: 'expression', | ||
symbols: ['expression$subexpression$2'], | ||
postprocess: p.idd, | ||
}, | ||
{ name: 'expression', symbols: ['id_expr'], postprocess: p.identifier }, | ||
{ | ||
name: 'function', | ||
symbols: [{ literal: '(' }, '_', 'statements', '_', { literal: ')' }], | ||
postprocess: p.fn, | ||
}, | ||
{ name: 'module_call$ebnf$1$subexpression$1', symbols: ['object', '_'] }, | ||
{ | ||
name: 'module_call$ebnf$1', | ||
symbols: ['module_call$ebnf$1$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ name: 'module_call$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'module_call', | ||
symbols: [ | ||
'id_expr', | ||
{ literal: '(' }, | ||
'module_call$ebnf$1', | ||
{ literal: ')' }, | ||
], | ||
postprocess: p.moduleCall, | ||
}, | ||
{ name: 'object$ebnf$1', symbols: [] }, | ||
{ | ||
name: 'object$ebnf$1$subexpression$1$ebnf$1$subexpression$1', | ||
symbols: ['_', { literal: ',' }], | ||
}, | ||
{ | ||
name: 'object$ebnf$1$subexpression$1$ebnf$1', | ||
symbols: ['object$ebnf$1$subexpression$1$ebnf$1$subexpression$1'], | ||
postprocess: id, | ||
}, | ||
{ | ||
name: 'object$ebnf$1$subexpression$1$ebnf$1', | ||
symbols: [], | ||
postprocess: () => null, | ||
}, | ||
{ | ||
name: 'object$ebnf$1$subexpression$1', | ||
symbols: ['object_entry', 'object$ebnf$1$subexpression$1$ebnf$1', '_'], | ||
}, | ||
{ | ||
name: 'object$ebnf$1', | ||
symbols: ['object$ebnf$1', 'object$ebnf$1$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ | ||
name: 'object', | ||
symbols: [{ literal: '{' }, '_', 'object$ebnf$1', { literal: '}' }], | ||
postprocess: p.object, | ||
}, | ||
{ | ||
name: 'object_entry$ebnf$1', | ||
symbols: [{ literal: '?' }], | ||
postprocess: id, | ||
}, | ||
{ name: 'object_entry$ebnf$1', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'object_entry', | ||
symbols: [ | ||
lexer.has('identifier') ? { type: 'identifier' } : identifier, | ||
'object_entry$ebnf$1', | ||
{ literal: ':' }, | ||
'_', | ||
'drill', | ||
], | ||
postprocess: p.objectEntry, | ||
}, | ||
{ | ||
name: 'object_entry$ebnf$2', | ||
symbols: [{ literal: '?' }], | ||
postprocess: id, | ||
}, | ||
{ name: 'object_entry$ebnf$2', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'object_entry', | ||
symbols: [ | ||
lexer.has('identifier') ? { type: 'identifier' } : identifier, | ||
'object_entry$ebnf$2', | ||
], | ||
postprocess: p.objectEntryShorthandDrill, | ||
}, | ||
{ | ||
name: 'object_entry$ebnf$3', | ||
symbols: [{ literal: '?' }], | ||
postprocess: id, | ||
}, | ||
{ name: 'object_entry$ebnf$3', symbols: [], postprocess: () => null }, | ||
{ | ||
name: 'object_entry', | ||
symbols: ['id_expr', 'object_entry$ebnf$3'], | ||
postprocess: p.objectEntryShorthandIdent, | ||
}, | ||
{ | ||
name: 'template$ebnf$1$subexpression$1', | ||
symbols: [lexer.has('literal') ? { type: 'literal' } : literal], | ||
}, | ||
{ | ||
name: 'template$ebnf$1$subexpression$1', | ||
symbols: [lexer.has('interpvar') ? { type: 'interpvar' } : interpvar], | ||
}, | ||
{ name: 'template$ebnf$1$subexpression$1', symbols: ['interp_expr'] }, | ||
{ name: 'template$ebnf$1', symbols: ['template$ebnf$1$subexpression$1'] }, | ||
{ | ||
name: 'template$ebnf$1$subexpression$2', | ||
symbols: [lexer.has('literal') ? { type: 'literal' } : literal], | ||
}, | ||
{ | ||
name: 'template$ebnf$1$subexpression$2', | ||
symbols: [lexer.has('interpvar') ? { type: 'interpvar' } : interpvar], | ||
}, | ||
{ name: 'template$ebnf$1$subexpression$2', symbols: ['interp_expr'] }, | ||
{ | ||
name: 'template$ebnf$1', | ||
symbols: ['template$ebnf$1', 'template$ebnf$1$subexpression$2'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ name: 'template', symbols: ['template$ebnf$1'], postprocess: p.template }, | ||
{ | ||
name: 'interp_expr', | ||
symbols: [ | ||
{ literal: '${' }, | ||
'_', | ||
lexer.has('identifier') ? { type: 'identifier' } : identifier, | ||
'_', | ||
{ literal: '}' }, | ||
], | ||
postprocess: p.interpExpr, | ||
}, | ||
{ | ||
name: 'slice', | ||
symbols: [lexer.has('slice') ? { type: 'slice' } : slice], | ||
postprocess: p.slice, | ||
}, | ||
{ | ||
name: 'modifier', | ||
symbols: [lexer.has('modifier') ? { type: 'modifier' } : modifier], | ||
postprocess: p.modifier, | ||
}, | ||
{ | ||
name: 'id_expr', | ||
symbols: [ | ||
lexer.has('identifier_expr') | ||
? { type: 'identifier_expr' } | ||
: identifier_expr, | ||
], | ||
postprocess: id, | ||
}, | ||
{ name: 'line_sep$ebnf$1', symbols: [] }, | ||
{ | ||
name: 'line_sep$ebnf$1$subexpression$1', | ||
symbols: [lexer.has('ws') ? { type: 'ws' } : ws], | ||
}, | ||
{ | ||
name: 'line_sep$ebnf$1$subexpression$1', | ||
symbols: [lexer.has('comment') ? { type: 'comment' } : comment], | ||
}, | ||
{ | ||
name: 'line_sep$ebnf$1', | ||
symbols: ['line_sep$ebnf$1', 'line_sep$ebnf$1$subexpression$1'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ | ||
name: 'line_sep', | ||
symbols: ['line_sep$ebnf$1', lexer.has('nl') ? { type: 'nl' } : nl, '_'], | ||
postprocess: p.ws, | ||
}, | ||
{ name: '__$ebnf$1', symbols: ['ws'] }, | ||
{ | ||
name: '__$ebnf$1', | ||
symbols: ['__$ebnf$1', 'ws'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ name: '__', symbols: ['__$ebnf$1'], postprocess: p.ws }, | ||
{ name: '_$ebnf$1', symbols: [] }, | ||
{ | ||
name: '_$ebnf$1', | ||
symbols: ['_$ebnf$1', 'ws'], | ||
postprocess: d => d[0].concat([d[1]]), | ||
}, | ||
{ name: '_', symbols: ['_$ebnf$1'], postprocess: p.ws }, | ||
{ | ||
name: 'ws$subexpression$1', | ||
symbols: [lexer.has('ws') ? { type: 'ws' } : ws], | ||
}, | ||
{ | ||
name: 'ws$subexpression$1', | ||
symbols: [lexer.has('comment') ? { type: 'comment' } : comment], | ||
}, | ||
{ | ||
name: 'ws$subexpression$1', | ||
symbols: [lexer.has('nl') ? { type: 'nl' } : nl], | ||
}, | ||
{ name: 'ws', symbols: ['ws$subexpression$1'], postprocess: p.ws }, | ||
{ "name": "program", "symbols": ["_", "header", "statements", "_"], "postprocess": p.program }, | ||
{ "name": "header$ebnf$1", "symbols": [] }, | ||
{ "name": "header$ebnf$1$subexpression$1$subexpression$1", "symbols": ["import"] }, | ||
{ "name": "header$ebnf$1$subexpression$1$subexpression$1", "symbols": ["inputs"] }, | ||
{ "name": "header$ebnf$1$subexpression$1", "symbols": ["header$ebnf$1$subexpression$1$subexpression$1", "line_sep"] }, | ||
{ "name": "header$ebnf$1", "symbols": ["header$ebnf$1", "header$ebnf$1$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "header", "symbols": ["header$ebnf$1"], "postprocess": p.header }, | ||
{ "name": "statements$ebnf$1", "symbols": [] }, | ||
{ "name": "statements$ebnf$1$subexpression$1", "symbols": ["line_sep", "statement"] }, | ||
{ "name": "statements$ebnf$1", "symbols": ["statements$ebnf$1", "statements$ebnf$1$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "statements", "symbols": ["statement", "statements$ebnf$1"], "postprocess": p.statements }, | ||
{ "name": "statement$subexpression$1", "symbols": ["request"] }, | ||
{ "name": "statement$subexpression$1", "symbols": ["assignment"] }, | ||
{ "name": "statement$subexpression$1", "symbols": ["extract"] }, | ||
{ "name": "statement", "symbols": ["statement$subexpression$1"], "postprocess": p.idd }, | ||
{ "name": "import", "symbols": [{ "literal": "import" }, "__", (lexer.has("identifier") ? { type: "identifier" } : identifier)], "postprocess": p.declImport }, | ||
{ "name": "inputs$ebnf$1", "symbols": [] }, | ||
{ "name": "inputs$ebnf$1$subexpression$1", "symbols": ["_", { "literal": "," }, "_", "input_decl"] }, | ||
{ "name": "inputs$ebnf$1", "symbols": ["inputs$ebnf$1", "inputs$ebnf$1$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "inputs", "symbols": [{ "literal": "inputs" }, "__", { "literal": "{" }, "_", "input_decl", "inputs$ebnf$1", "_", { "literal": "}" }], "postprocess": p.declInputs }, | ||
{ "name": "assignment$ebnf$1", "symbols": [{ "literal": "?" }], "postprocess": id }, | ||
{ "name": "assignment$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "assignment", "symbols": [{ "literal": "set" }, "__", (lexer.has("identifier") ? { type: "identifier" } : identifier), "assignment$ebnf$1", "_", { "literal": "=" }, "_", "drill"], "postprocess": p.assignment }, | ||
{ "name": "extract", "symbols": [{ "literal": "extract" }, "__", "drill"], "postprocess": p.extract }, | ||
{ "name": "input_decl$ebnf$1", "symbols": [{ "literal": "?" }], "postprocess": id }, | ||
{ "name": "input_decl$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "input_decl$ebnf$2$subexpression$1", "symbols": ["_", { "literal": "=" }, "_", "input_default"] }, | ||
{ "name": "input_decl$ebnf$2", "symbols": ["input_decl$ebnf$2$subexpression$1"], "postprocess": id }, | ||
{ "name": "input_decl$ebnf$2", "symbols": [], "postprocess": () => null }, | ||
{ "name": "input_decl", "symbols": [(lexer.has("identifier") ? { type: "identifier" } : identifier), "input_decl$ebnf$1", "input_decl$ebnf$2"], "postprocess": p.inputDecl }, | ||
{ "name": "input_default", "symbols": ["slice"], "postprocess": id }, | ||
{ "name": "request$ebnf$1$subexpression$1", "symbols": ["line_sep", "request_block"] }, | ||
{ "name": "request$ebnf$1", "symbols": ["request$ebnf$1$subexpression$1"], "postprocess": id }, | ||
{ "name": "request$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "request$ebnf$2", "symbols": [] }, | ||
{ "name": "request$ebnf$2$subexpression$1", "symbols": ["line_sep", "request_block_named"] }, | ||
{ "name": "request$ebnf$2", "symbols": ["request$ebnf$2", "request$ebnf$2$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "request$ebnf$3$subexpression$1", "symbols": ["line_sep", "request_block_body"] }, | ||
{ "name": "request$ebnf$3", "symbols": ["request$ebnf$3$subexpression$1"], "postprocess": id }, | ||
{ "name": "request$ebnf$3", "symbols": [], "postprocess": () => null }, | ||
{ "name": "request", "symbols": [(lexer.has("request_verb") ? { type: "request_verb" } : request_verb), "template", "request$ebnf$1", "request$ebnf$2", "request$ebnf$3"], "postprocess": p.request }, | ||
{ "name": "request_block_named", "symbols": [(lexer.has("request_block_name") ? { type: "request_block_name" } : request_block_name), "line_sep", "request_block"], "postprocess": p.requestBlockNamed }, | ||
{ "name": "request_block_body", "symbols": [(lexer.has("request_block_body") ? { type: "request_block_body" } : request_block_body), "template", (lexer.has("request_block_body_end") ? { type: "request_block_body_end" } : request_block_body_end)], "postprocess": p.requestBlockBody }, | ||
{ "name": "request_block$ebnf$1", "symbols": [] }, | ||
{ "name": "request_block$ebnf$1$subexpression$1", "symbols": ["line_sep", "request_entry"] }, | ||
{ "name": "request_block$ebnf$1", "symbols": ["request_block$ebnf$1", "request_block$ebnf$1$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "request_block", "symbols": ["request_entry", "request_block$ebnf$1"], "postprocess": p.requestBlock }, | ||
{ "name": "request_entry$ebnf$1$subexpression$1", "symbols": ["__", "template"] }, | ||
{ "name": "request_entry$ebnf$1", "symbols": ["request_entry$ebnf$1$subexpression$1"], "postprocess": id }, | ||
{ "name": "request_entry$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "request_entry", "symbols": ["template", { "literal": ":" }, "request_entry$ebnf$1"], "postprocess": p.requestEntry }, | ||
{ "name": "drill", "symbols": ["drill", "_", (lexer.has("drill_arrow") ? { type: "drill_arrow" } : drill_arrow), "_", "expression"], "postprocess": p.drill }, | ||
{ "name": "drill$ebnf$1$subexpression$1", "symbols": [(lexer.has("drill_arrow") ? { type: "drill_arrow" } : drill_arrow), "_"] }, | ||
{ "name": "drill$ebnf$1", "symbols": ["drill$ebnf$1$subexpression$1"], "postprocess": id }, | ||
{ "name": "drill$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "drill", "symbols": ["drill$ebnf$1", "expression"], "postprocess": p.drillContext }, | ||
{ "name": "expression$subexpression$1", "symbols": ["template"] }, | ||
{ "name": "expression$subexpression$1", "symbols": ["slice"] }, | ||
{ "name": "expression$subexpression$1", "symbols": ["modifier"] }, | ||
{ "name": "expression", "symbols": ["expression$subexpression$1"], "postprocess": p.idd }, | ||
{ "name": "expression$subexpression$2", "symbols": ["object"] }, | ||
{ "name": "expression$subexpression$2", "symbols": ["function"] }, | ||
{ "name": "expression$subexpression$2", "symbols": ["module_call"] }, | ||
{ "name": "expression", "symbols": ["expression$subexpression$2"], "postprocess": p.idd }, | ||
{ "name": "expression", "symbols": ["id_expr"], "postprocess": p.identifier }, | ||
{ "name": "function", "symbols": [{ "literal": "(" }, "_", "statements", "_", { "literal": ")" }], "postprocess": p.fn }, | ||
{ "name": "module_call$ebnf$1$subexpression$1", "symbols": ["object", "_"] }, | ||
{ "name": "module_call$ebnf$1", "symbols": ["module_call$ebnf$1$subexpression$1"], "postprocess": id }, | ||
{ "name": "module_call$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "module_call", "symbols": ["id_expr", { "literal": "(" }, "module_call$ebnf$1", { "literal": ")" }], "postprocess": p.moduleCall }, | ||
{ "name": "object$ebnf$1", "symbols": [] }, | ||
{ "name": "object$ebnf$1$subexpression$1$ebnf$1$subexpression$1", "symbols": ["_", { "literal": "," }] }, | ||
{ "name": "object$ebnf$1$subexpression$1$ebnf$1", "symbols": ["object$ebnf$1$subexpression$1$ebnf$1$subexpression$1"], "postprocess": id }, | ||
{ "name": "object$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "object$ebnf$1$subexpression$1", "symbols": ["object_entry", "object$ebnf$1$subexpression$1$ebnf$1", "_"] }, | ||
{ "name": "object$ebnf$1", "symbols": ["object$ebnf$1", "object$ebnf$1$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "object", "symbols": [{ "literal": "{" }, "_", "object$ebnf$1", { "literal": "}" }], "postprocess": p.object }, | ||
{ "name": "object_entry$ebnf$1", "symbols": [{ "literal": "?" }], "postprocess": id }, | ||
{ "name": "object_entry$ebnf$1", "symbols": [], "postprocess": () => null }, | ||
{ "name": "object_entry", "symbols": [(lexer.has("identifier") ? { type: "identifier" } : identifier), "object_entry$ebnf$1", { "literal": ":" }, "_", "drill"], "postprocess": p.objectEntry }, | ||
{ "name": "object_entry$ebnf$2", "symbols": [{ "literal": "?" }], "postprocess": id }, | ||
{ "name": "object_entry$ebnf$2", "symbols": [], "postprocess": () => null }, | ||
{ "name": "object_entry", "symbols": [(lexer.has("identifier") ? { type: "identifier" } : identifier), "object_entry$ebnf$2"], "postprocess": p.objectEntryShorthandSelect }, | ||
{ "name": "object_entry$ebnf$3", "symbols": [{ "literal": "?" }], "postprocess": id }, | ||
{ "name": "object_entry$ebnf$3", "symbols": [], "postprocess": () => null }, | ||
{ "name": "object_entry", "symbols": ["id_expr", "object_entry$ebnf$3"], "postprocess": p.objectEntryShorthandIdent }, | ||
{ "name": "template$ebnf$1$subexpression$1", "symbols": [(lexer.has("literal") ? { type: "literal" } : literal)] }, | ||
{ "name": "template$ebnf$1$subexpression$1", "symbols": [(lexer.has("interpvar") ? { type: "interpvar" } : interpvar)] }, | ||
{ "name": "template$ebnf$1$subexpression$1", "symbols": ["interp_expr"] }, | ||
{ "name": "template$ebnf$1", "symbols": ["template$ebnf$1$subexpression$1"] }, | ||
{ "name": "template$ebnf$1$subexpression$2", "symbols": [(lexer.has("literal") ? { type: "literal" } : literal)] }, | ||
{ "name": "template$ebnf$1$subexpression$2", "symbols": [(lexer.has("interpvar") ? { type: "interpvar" } : interpvar)] }, | ||
{ "name": "template$ebnf$1$subexpression$2", "symbols": ["interp_expr"] }, | ||
{ "name": "template$ebnf$1", "symbols": ["template$ebnf$1", "template$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "template", "symbols": ["template$ebnf$1"], "postprocess": p.template }, | ||
{ "name": "interp_expr", "symbols": [{ "literal": "${" }, "_", (lexer.has("identifier") ? { type: "identifier" } : identifier), "_", { "literal": "}" }], "postprocess": p.interpExpr }, | ||
{ "name": "slice", "symbols": [(lexer.has("slice") ? { type: "slice" } : slice)], "postprocess": p.slice }, | ||
{ "name": "modifier", "symbols": [(lexer.has("modifier") ? { type: "modifier" } : modifier)], "postprocess": p.modifier }, | ||
{ "name": "id_expr", "symbols": [(lexer.has("identifier_expr") ? { type: "identifier_expr" } : identifier_expr)], "postprocess": id }, | ||
{ "name": "line_sep$ebnf$1", "symbols": [] }, | ||
{ "name": "line_sep$ebnf$1$subexpression$1", "symbols": [(lexer.has("ws") ? { type: "ws" } : ws)] }, | ||
{ "name": "line_sep$ebnf$1$subexpression$1", "symbols": [(lexer.has("comment") ? { type: "comment" } : comment)] }, | ||
{ "name": "line_sep$ebnf$1", "symbols": ["line_sep$ebnf$1", "line_sep$ebnf$1$subexpression$1"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "line_sep", "symbols": ["line_sep$ebnf$1", (lexer.has("nl") ? { type: "nl" } : nl), "_"], "postprocess": p.ws }, | ||
{ "name": "__$ebnf$1", "symbols": ["ws"] }, | ||
{ "name": "__$ebnf$1", "symbols": ["__$ebnf$1", "ws"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "__", "symbols": ["__$ebnf$1"], "postprocess": p.ws }, | ||
{ "name": "_$ebnf$1", "symbols": [] }, | ||
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", "ws"], "postprocess": (d) => d[0].concat([d[1]]) }, | ||
{ "name": "_", "symbols": ["_$ebnf$1"], "postprocess": p.ws }, | ||
{ "name": "ws$subexpression$1", "symbols": [(lexer.has("ws") ? { type: "ws" } : ws)] }, | ||
{ "name": "ws$subexpression$1", "symbols": [(lexer.has("comment") ? { type: "comment" } : comment)] }, | ||
{ "name": "ws$subexpression$1", "symbols": [(lexer.has("nl") ? { type: "nl" } : nl)] }, | ||
{ "name": "ws", "symbols": ["ws$subexpression$1"], "postprocess": p.ws } | ||
], | ||
ParserStart: 'program', | ||
ParserStart: "program", | ||
}; | ||
export default grammar; |
import nearley from 'nearley'; | ||
import { QuerySyntaxError } from '@getlang/utils'; | ||
import grammar from './grammar'; | ||
@@ -16,3 +17,3 @@ import lexer from './parse/lexer'; | ||
if (typeof e === 'object' && e && 'token' in e) { | ||
throw new SyntaxError(lexer.formatError(e.token, 'SyntaxError: Invalid token')); | ||
throw new QuerySyntaxError(lexer.formatError(e.token, 'SyntaxError: Invalid token')); | ||
} | ||
@@ -26,6 +27,6 @@ throw e; | ||
case 0: | ||
throw new SyntaxError('Unexpected end of input'); | ||
throw new QuerySyntaxError('Unexpected end of input'); | ||
default: | ||
throw new SyntaxError('Unexpected parsing error'); | ||
throw new QuerySyntaxError('Unexpected parsing error'); | ||
} | ||
} |
@@ -0,1 +1,2 @@ | ||
import { invariant, QuerySyntaxError } from '@getlang/utils'; | ||
import { until } from './templates'; | ||
@@ -6,5 +7,3 @@ const getSliceValue = (text, places = 1) => { | ||
const firstIdx = lines.findIndex(x => x.trim().length); | ||
if (firstIdx === -1) { | ||
throw new SyntaxError('Slice must contain source'); | ||
} | ||
invariant(firstIdx !== -1, new QuerySyntaxError('Slice must contain source')); | ||
lines = lines.slice(firstIdx); | ||
@@ -11,0 +10,0 @@ const indent = lines[0]?.match(/^\s*/)?.[0].length || 0; |
@@ -8,11 +8,3 @@ import moo from 'moo'; | ||
const requestBlockNames = ['query', 'cookies', 'json', 'form']; | ||
const modifiers = [ | ||
'html', | ||
'json', | ||
'js', | ||
'cookies', | ||
'link', | ||
'headers', | ||
'cookies', | ||
]; | ||
const modifiers = ['html', 'json', 'js', 'cookies', 'link', 'headers']; | ||
const keywordsObj = Object.fromEntries(keywords.map(k => [`kw_${k}`, k])); | ||
@@ -19,0 +11,0 @@ const exprOpeners = { |
@@ -19,3 +19,3 @@ type PP = nearley.Postprocessor; | ||
export declare const objectEntry: PP; | ||
export declare const objectEntryShorthandDrill: PP; | ||
export declare const objectEntryShorthandSelect: PP; | ||
export declare const objectEntryShorthandIdent: PP; | ||
@@ -22,0 +22,0 @@ export declare const drill: PP; |
@@ -0,1 +1,2 @@ | ||
import { invariant, QuerySyntaxError } from '@getlang/utils'; | ||
import { t, NodeKind } from '../ast/ast'; | ||
@@ -38,3 +39,3 @@ export const program = ([, maybeHeader, body]) => { | ||
} | ||
return t.requestStmt(method, url, headers, blocks, body); | ||
return t.requestStmt(t.requestExpr(method, url, headers, blocks, body)); | ||
}; | ||
@@ -74,6 +75,6 @@ export const requestBlockNamed = ([name, , entries]) => ({ name, entries }); | ||
}); | ||
export const objectEntryShorthandDrill = ([identifier, optional]) => { | ||
export const objectEntryShorthandSelect = ([identifier, optional]) => { | ||
const value = t.templateExpr([t.literalExpr(identifier)]); | ||
const drill = t.drillExpr('context', value, false); | ||
return objectEntry([identifier, optional, null, null, drill]); | ||
const selector = t.selectorExpr(value, false); | ||
return objectEntry([identifier, optional, null, null, selector]); | ||
}; | ||
@@ -84,13 +85,20 @@ export const objectEntryShorthandIdent = ([identifier, optional]) => { | ||
}; | ||
export const drill = ([target, , arrow, , bit]) => { | ||
const expandingSelectors = [NodeKind.TemplateExpr, NodeKind.IdentifierExpr]; | ||
export const drill = ([context, , arrow, , bit]) => { | ||
const expand = arrow.value.startsWith('='); | ||
return t.drillExpr(target, bit, expand); | ||
if (expandingSelectors.includes(bit.kind)) { | ||
return t.selectorExpr(bit, expand, context); | ||
} | ||
invariant(!expand, new QuerySyntaxError('Wide arrow drill requires selector on RHS')); | ||
invariant('context' in bit, new QuerySyntaxError('Invalid drill value')); | ||
bit.context = context; | ||
return bit; | ||
}; | ||
const contextSelectors = [NodeKind.TemplateExpr, NodeKind.ModifierExpr]; | ||
export const drillContext = ([arrow, expr]) => { | ||
if (!contextSelectors.includes(expr.kind)) { | ||
return expr; | ||
const expand = arrow?.[0].value === '=>'; | ||
if (expr.kind === NodeKind.TemplateExpr) { | ||
return t.selectorExpr(expr, expand); | ||
} | ||
const expand = arrow?.[0].value === '=>'; | ||
return t.drillExpr('context', expr, expand); | ||
invariant(!expand, new QuerySyntaxError('Wide arrow drill requires selector on RHS')); | ||
return expr; | ||
}; | ||
@@ -124,3 +132,3 @@ export const identifier = ([id]) => { | ||
default: | ||
throw new SyntaxError(`Unkown template element: ${token.type}`); | ||
throw new QuerySyntaxError(`Unkown template element: ${token.type}`); | ||
} | ||
@@ -127,0 +135,0 @@ }); |
{ | ||
"name": "@getlang/parser", | ||
"version": "0.0.9", | ||
"version": "0.0.10", | ||
"license": "Apache-2.0", | ||
@@ -18,2 +18,3 @@ "type": "module", | ||
}, | ||
"homepage": "https://getlang.dev", | ||
"scripts": { | ||
@@ -24,3 +25,3 @@ "compile": "nearleyc parse/getlang.ne -o grammar.ts", | ||
"dependencies": { | ||
"@getlang/utils": "^0.0.7", | ||
"@getlang/utils": "^0.0.8", | ||
"acorn": "^8.12.0", | ||
@@ -27,0 +28,0 @@ "acorn-globals": "^7.0.1", |
No website
QualityPackage does not have a website.
Found 1 instance in 1 package
60379
29
1
1537
+ Added@getlang/utils@0.0.8(transitive)
- Removed@getlang/utils@0.0.7(transitive)
Updated@getlang/utils@^0.0.8