diff --git a/apps/parser/generated/cst-types.ts b/apps/parser/generated/cst-types.ts
index 89c0d6c..2601430 100644
--- a/apps/parser/generated/cst-types.ts
+++ b/apps/parser/generated/cst-types.ts
@@ -29,6 +29,8 @@ export type StatementCstChildren = {
body?: BodyCstNode[];
DO?: IToken[];
WHILE?: IToken[];
+ LPAREN?: IToken[];
+ RPAREN?: IToken[];
FINALLY?: IToken[];
};
@@ -78,7 +80,6 @@ export interface ExpressionCstNode extends CstNode {
export type ExpressionCstChildren = {
value: ValueCstNode[];
PostFix?: IToken[];
- CmpAsgn?: IToken[];
BinOp?: IToken[];
expression?: ExpressionCstNode[];
};
@@ -107,9 +108,9 @@ export type ConstantCstChildren = {
STRING?: IToken[];
BOOL?: IToken[];
BIN?: IToken[];
- INT?: IToken[];
CMPX?: IToken[];
REAL?: IToken[];
+ INT?: IToken[];
};
export interface TypeCstNode extends CstNode {
diff --git a/apps/parser/generated/syntax-diagrams.html b/apps/parser/generated/syntax-diagrams.html
index 111d915..4cae9a7 100644
--- a/apps/parser/generated/syntax-diagrams.html
+++ b/apps/parser/generated/syntax-diagrams.html
@@ -1,694 +1,675 @@
+
-
-
-
-
-
+
+
+
+
+
+
-
+
diff --git a/apps/parser/mod.ts b/apps/parser/mod.ts
index d6106ad..9763781 100644
--- a/apps/parser/mod.ts
+++ b/apps/parser/mod.ts
@@ -2,4 +2,5 @@ export * from './src/globals.ts';
export * from './src/lexer.ts';
export * from './src/logging.ts';
export * from './src/parser.ts';
+export * as v from './src/validate.ts';
export * from './src/visitors/mod.ts';
diff --git a/apps/parser/src/lexer.ts b/apps/parser/src/lexer.ts
index c6fc219..afad766 100644
--- a/apps/parser/src/lexer.ts
+++ b/apps/parser/src/lexer.ts
@@ -103,7 +103,11 @@ export const binopTokens: TokenType[] = [
IN,
];
/* Compound Assignment Tokens */
-export const CmpAsgn: TokenType = createToken({ name: 'CmpAsgn', pattern: Lexer.NA });
+export const CmpAsgn: TokenType = createToken({
+ name: 'CmpAsgn',
+ pattern: Lexer.NA,
+ categories: BinOp,
+});
export const PL_EQU: TokenType = createToken({
name: 'PL_EQU',
pattern: '+=',
diff --git a/apps/parser/src/parser.ts b/apps/parser/src/parser.ts
index faaeec6..bb7b88b 100644
--- a/apps/parser/src/parser.ts
+++ b/apps/parser/src/parser.ts
@@ -11,7 +11,7 @@ export class EncodeParser extends CstParser {
this.MANY(() => this.SUBRULE(this.statement));
});
- private statement = this.RULE('statement', () => {
+ public statement: ParserMethod<[], CstNode> = this.RULE('statement', () => {
this.OR([
{
ALT: () => {
@@ -68,7 +68,9 @@ export class EncodeParser extends CstParser {
});
this.CONSUME(Tokens.WHILE);
+ this.CONSUME(Tokens.LPAREN);
this.SUBRULE3(this.expression);
+ this.CONSUME(Tokens.RPAREN);
this.OR2([
{
ALT: () => {
@@ -92,7 +94,7 @@ export class EncodeParser extends CstParser {
]);
});
- private ifPredBody = this.RULE('ifPredBody', () => {
+ public ifPredBody: ParserMethod<[], CstNode> = this.RULE('ifPredBody', () => {
this.CONSUME(Tokens.LPAREN);
this.OR([
{
@@ -110,13 +112,13 @@ export class EncodeParser extends CstParser {
this.SUBRULE(this.body);
});
- private body = this.RULE('body', () => {
+ public body: ParserMethod<[], CstNode> = this.RULE('body', () => {
this.CONSUME(Tokens.LCURLY);
this.MANY(() => this.SUBRULE(this.statement));
this.CONSUME(Tokens.RCURLY);
});
- private declaration = this.RULE('declaration', () => {
+ public declaration: ParserMethod<[], CstNode> = this.RULE('declaration', () => {
this.CONSUME(Tokens.ID);
this.OPTION(() => {
this.CONSUME(Tokens.COLON);
@@ -128,32 +130,17 @@ export class EncodeParser extends CstParser {
});
});
- private expression = this.RULE('expression', () => {
+ public expression: ParserMethod<[], CstNode> = this.RULE('expression', () => {
this.SUBRULE(this.value);
- this.OR([
- {
- ALT: () => this.CONSUME(Tokens.PostFix),
- },
- {
- ALT: () => {
- this.OPTION(() => {
- this.OR2([
- {
- ALT: () => this.CONSUME(Tokens.CmpAsgn),
- },
- {
- ALT: () => this.CONSUME(Tokens.BinOp),
- },
- ]);
- this.SUBRULE(this.expression);
- // TODO reorder based on precedence
- });
- },
- },
- ]);
+ this.OPTION(() => this.CONSUME(Tokens.PostFix));
+
+ this.OPTION1(() => {
+ this.CONSUME(Tokens.BinOp); // Compound assignment is categorized as a Binary operation by the lexer now
+ this.SUBRULE(this.expression);
+ });
});
- private value = this.RULE('value', () => {
+ public value: ParserMethod<[], CstNode> = this.RULE('value', () => {
this.OR([
{
ALT: () => {
@@ -177,13 +164,19 @@ export class EncodeParser extends CstParser {
]);
});
- private constant = this.RULE('constant', () =>
+ public constant: ParserMethod<[], CstNode> = this.RULE('constant', () =>
this.OR(Tokens.literals.map((t) => ({ ALT: () => this.CONSUME(t) }))),
);
- private type = this.RULE('type', () => this.CONSUME(Tokens.BASIC_TYPE));
+ public type: ParserMethod<[], CstNode> = this.RULE('type', () => this.CONSUME(Tokens.BASIC_TYPE));
}
+export type EncodeRule = {
+ [Property in keyof EncodeParser]: EncodeParser[Property] extends ParserMethod<[], CstNode>
+ ? Property
+ : never;
+}[keyof EncodeParser];
+
export const parser: EncodeParser = new EncodeParser();
export const BaseCstVisitor: ReturnType =
parser.getBaseCstVisitorConstructor();
diff --git a/apps/parser/src/validate.ts b/apps/parser/src/validate.ts
new file mode 100644
index 0000000..09853dc
--- /dev/null
+++ b/apps/parser/src/validate.ts
@@ -0,0 +1,473 @@
+import { assert, assertEquals, assertGreater } from '@std/assert';
+import type {
+ BodyCstNode,
+ ConstantCstNode,
+ DeclarationCstNode,
+ ExpressionCstNode,
+ FileCstNode,
+ IfPredBodyCstNode,
+ StatementCstNode,
+ TypeCstNode,
+ ValueCstNode,
+} from '@/generated/cst-types.ts';
+
+export const skip = undefined;
+type Skip = typeof skip;
+export const none = false;
+type None = typeof none;
+
+export type ValidationFunction =
+ | typeof file
+ | typeof statement
+ | typeof ifPredBody
+ | typeof declaration
+ | typeof body
+ | typeof expression
+ | typeof value
+ | typeof constant
+ | typeof type;
+
+export function file(node: FileCstNode, args?: Statement[] | None) {
+ assertEquals(node.name, 'file');
+ const file = node.children;
+ // biome-ignore lint/complexity/useOptionalChain: args could be false without being nullish
+ if (args && args.length) {
+ assert(file.statement, `File: expected 1+ statements but received ${file.statement?.length}`);
+ }
+ if (args === false) {
+ assert(
+ !file.statement?.length,
+ `File: expected 0 statements but received ${file.statement?.length}`,
+ );
+ }
+ if (file.statement && args !== false) {
+ statement_list(file.statement, args);
+ }
+}
+
+function statement_list(statements: StatementCstNode[], args?: Statement[]) {
+ if (args) {
+ assertEquals(
+ statements.length,
+ args.length,
+ `Statement List: expected ${args.length} statements but received ${statements.length}`,
+ );
+ } else {
+ assertGreater(
+ statements.length,
+ 0,
+ `Statement List: expected 1+ statements but received ${statements.length}`,
+ );
+ }
+ for (let i = 0; i < statements.length; i++) {
+ statement(statements[i], args?.[i]);
+ }
+}
+
+export type BodyStatement = ['body', Body | Skip];
+export type IfStatement = [
+ 'if',
+ // 0 = if, 1-n = elif
+ (IfPredBody | None | Skip)[] | Skip,
+ Body | None | Skip, // else
+];
+export type WhileStatement = [
+ 'while',
+ Body | None | Skip, // do
+ Expression | Skip, // while
+ Body | None | Skip, // while-body
+ Body | None | Skip, // finally-body
+];
+
+export type Statement =
+ | ['declaration', Declaration | Skip]
+ | ['break']
+ | ['continue']
+ | ['return', Expression | None | Skip]
+ | IfStatement
+ | WhileStatement
+ | BodyStatement
+ | ['expression', Expression | Skip]
+ | None
+ | Skip;
+export function statement(node: StatementCstNode, args?: Statement) {
+ assertEquals(node.name, 'statement');
+ const stmt = node.children;
+ if (args === none) {
+ assert(
+ Object.keys(stmt).length === 1 && stmt.SEMI,
+ `Statement: expected SEMI but received ${Object.keys(stmt)}`,
+ );
+ } else if (stmt.LET && stmt.declaration) {
+ if (args) {
+ assertEquals(
+ args[0],
+ 'declaration',
+ `Statement: expected ${args[0]} but received declaration`,
+ );
+ }
+ declaration(stmt.declaration[0], (args?.[1] as Declaration) || skip);
+ } else if (stmt.BREAK) {
+ if (args) {
+ assertEquals(args[0], 'break', `Statement: expected ${args[0]} but received break`);
+ }
+ assertEquals(stmt.BREAK[0].image, 'break');
+ } else if (stmt.CONTINUE) {
+ if (args) {
+ assertEquals(args[0], 'continue', `Statement: expected ${args[0]} but received continue`);
+ }
+ assertEquals(stmt.CONTINUE[0].image, 'continue');
+ } else if (stmt.RETURN) {
+ if (args) {
+ assertEquals(args[0], 'return', `Statement: expected ${args[0]} but received return`);
+ }
+ assertEquals(stmt.RETURN[0].image, 'return');
+ assert(
+ args?.[1] === skip || (args[1] ? stmt.expression : !stmt.expression),
+ `Statement > return: expected ${!!args?.[1]} but received ${!!stmt.expression}`,
+ );
+ if (stmt.expression) {
+ assertEquals(stmt.expression.length, 1);
+ expression(stmt.expression[0], (args?.[1] as Expression) || skip);
+ }
+ } else if (stmt.IF && stmt.ifPredBody) {
+ if (args) {
+ assertEquals(args[0], 'if', `Statement: expected ${args[0]} but received if`);
+ }
+ const [_, p, e] = args ?? [];
+ let bodyCount = 0;
+ const predBody = stmt.ifPredBody;
+ if (p) {
+ assertEquals(
+ predBody.length,
+ p.length,
+ `Statement: expected ${p.length} if-preds but received ${predBody.length}`,
+ );
+ }
+ ifPredBody(predBody[bodyCount++], (p && (p[0] as IfPredBody)) || skip);
+ if (stmt.ELIF) {
+ stmt.ELIF.forEach(() => {
+ ifPredBody(predBody[bodyCount], (p && (p[bodyCount] as IfPredBody)) || skip);
+ bodyCount++;
+ });
+ }
+ assert(
+ e === skip || (e ? stmt.body : !stmt.body),
+ `Statement > else: expected ${!!e} but received ${!!stmt.body}`,
+ );
+ if (stmt.ELSE && stmt.body) {
+ body(stmt.body[0], e as Body);
+ }
+ } else if (stmt.WHILE && stmt.expression) {
+ if (args) {
+ assertEquals(args[0], 'while', `Statement: expected ${args[0]} but received while`);
+ }
+ const [_, d, we, wb, f] = args || [];
+ let bodyCount = 0;
+ assert(
+ d === skip || (d ? stmt.DO : !stmt.DO),
+ `Statement > do: expected ${!!d} but received ${!!stmt.DO}`,
+ );
+ if (stmt.DO) {
+ assert(
+ stmt.body?.[bodyCount],
+ `Statement > do: expected body but received ${stmt.body?.[bodyCount]}`,
+ );
+ body(stmt.body[bodyCount++], d as Body);
+ }
+ expression(stmt.expression[0], we as Expression);
+ assert(
+ wb === skip || (wb ? !stmt.SEMI : stmt.SEMI),
+ `Statement > while: expected ${!!wb} but received ${!stmt.SEMI}`,
+ );
+ if (!stmt.SEMI) {
+ assert(
+ stmt.body?.[bodyCount],
+ `Statement > while: expected body but received ${stmt.body?.[bodyCount]}`,
+ );
+ body(stmt.body[bodyCount++], wb as Body);
+ }
+ assert(
+ f === skip || (f ? stmt.FINALLY : !stmt.FINALLY),
+ `Statement > finally: expected ${!!f} but received ${!!stmt.FINALLY}`,
+ );
+ if (stmt.FINALLY) {
+ assert(
+ stmt.body?.[bodyCount],
+ `Statement > finally: expected body but received ${stmt.body?.[bodyCount]}`,
+ );
+ body(stmt.body[bodyCount++], f as Body);
+ }
+ } else if (stmt.body) {
+ if (args) {
+ assertEquals(args[0], 'body', `Statement: expected ${args[0]} but received body`);
+ }
+ body(stmt.body[0], args?.[1] as Body);
+ } else if (stmt.expression) {
+ if (args) {
+ assertEquals(args[0], 'expression', `Statement: expected ${args[0]} but received expression`);
+ }
+ expression(stmt.expression[0], args?.[1] as Expression);
+ } else {
+ throw new Error(`Validation: unhandled statement type!\n${JSON.stringify(stmt, null, 2)}`);
+ }
+}
+
+export type IfPredBody =
+ | ['declaration', Declaration | Skip, Body | Skip]
+ | ['expression', Expression | Skip, Body | Skip]
+ | Skip;
+export function ifPredBody(node: IfPredBodyCstNode, args?: IfPredBody) {
+ assertEquals(node.name, 'ifPredBody');
+ const predBody = node.children;
+ if (predBody.LET && predBody.declaration) {
+ if (args) {
+ assertEquals(
+ args[0],
+ 'declaration',
+ `IfPredBody: expected ${args[0]} but received declaration`,
+ );
+ }
+ declaration(predBody.declaration[0], args?.[1] as Declaration | Skip);
+ } else if (predBody.expression) {
+ if (args) {
+ assertEquals(
+ args[0],
+ 'expression',
+ `IfPredBody: expected ${args[0]} but received expression`,
+ );
+ }
+ expression(predBody.expression[0], args?.[1] as Expression | Skip);
+ } else {
+ throw new Error(`Validation: unhandled ifPredBody type!\n${JSON.stringify(predBody, null, 2)}`);
+ }
+
+ body(predBody.body[0], args?.[2]);
+}
+
+export type Declaration = [string | Skip, Type | None | Skip, Expression | None | Skip] | Skip;
+export function declaration(node: DeclarationCstNode, args?: Declaration) {
+ assertEquals(node.name, 'declaration');
+ const decl = node.children;
+ const [id, t, e] = args ?? [];
+ assertEquals(decl.ID.length, 1);
+ if (id) {
+ assertEquals(
+ decl.ID[0].image,
+ id,
+ `Declaration: expected ${id} but received ${decl.ID[0].image}`,
+ );
+ } else {
+ assertGreater(decl.ID[0].image.length, 0);
+ }
+ assert(
+ t === skip || (t ? decl.type : !decl.type),
+ `Declaration > type: expected ${!!t} but received ${!!decl.type}`,
+ );
+ if (decl.type) {
+ assertEquals(decl.type.length, 1);
+ type(decl.type[0], t || skip);
+ }
+ assert(
+ e === skip || (e ? decl.expression : !decl.expression),
+ `Declaration > expression: expected ${!!e} but received ${!!decl.expression}`,
+ );
+ if (decl.expression) {
+ assertEquals(decl.expression.length, 1);
+ expression(decl.expression[0], e || skip);
+ }
+}
+
+export type Body = Statement[] | None | Skip;
+export function body(node: BodyCstNode, args?: T) {
+ assertEquals(node.name, 'body');
+ const body = node.children;
+ assertEquals(body.LCURLY?.at(0)?.image, '{', 'Body: missing {');
+ // biome-ignore lint/complexity/useOptionalChain: args could be false without being nullish
+ if (args && args.length) {
+ assert(body.statement, `Body: expected 1+ statements but received ${body.statement?.length}`);
+ }
+ if (args === false) {
+ assert(
+ !body.statement?.length,
+ `Body: expected 0 statements but received ${body.statement?.length}`,
+ );
+ }
+ if (body.statement && args !== false) {
+ statement_list(body.statement, args);
+ }
+ assertEquals(body.RCURLY?.at(0)?.image, '}', 'Body: missing }');
+}
+
+export type Expression = Parameters[1];
+export function expression<
+ T extends
+ | [Value | Skip]
+ | [
+ Value | Skip,
+ string | Skip, // postfix operator
+ ]
+ | [
+ Value | Skip,
+ string | None | Skip, // postfix operator
+ string | Skip, // operator
+ T | Skip,
+ ]
+ | Skip,
+>(node: ExpressionCstNode, args?: T) {
+ assertEquals(node.name, 'expression');
+ const expr = node.children;
+ const [val, pf, op, rhs] = args ?? [];
+ assert(expr.value?.at(0)?.children);
+ value(expr.value[0], val);
+ assert(
+ pf === skip || (pf ? expr.PostFix : !expr.PostFix),
+ `Expression > PostFix: expected ${!!pf} but received ${!!expr.PostFix}`,
+ );
+ if (expr.PostFix) {
+ assertEquals(expr.PostFix.length, 1);
+ if (pf) {
+ assertEquals(
+ expr.PostFix[0].image,
+ pf,
+ `Expression > PostFix: expected ${pf} but received ${expr.PostFix[0].image}`,
+ );
+ } else {
+ assertGreater(expr.PostFix[0].image.length, 0);
+ }
+ }
+ assert(
+ op === skip || (op ? expr.BinOp : !expr.BinOp),
+ `Expression > BinOp: expected ${!!op} but received ${!!expr.BinOp}`,
+ );
+ if (expr.BinOp) {
+ assertEquals(expr.BinOp.length, 1);
+ if (op) {
+ assertEquals(
+ expr.BinOp[0].image,
+ op,
+ `Expression > BinOp: expected ${op} but received ${expr.BinOp[0].image}`,
+ );
+ } else {
+ assertGreater(expr.BinOp[0].image.length, 0);
+ }
+ }
+ assert(
+ (op === skip && rhs === skip) || (rhs ? expr.expression : !expr.expression),
+ `Expression > rhs: expected ${!!(op || rhs)} but received ${!!expr.expression}`,
+ );
+ if (expr.expression) {
+ assertEquals(expr.expression.length, 1);
+ expression(expr.expression[0], rhs || skip);
+ }
+}
+
+type NestedValue = ['nested', Expression];
+
+export type Value = Parameters[1];
+export function value<
+ T extends
+ | NestedValue
+ | ['constant', Constant]
+ | ['id', string | Skip]
+ | ['prefix', string | Skip, T]
+ | Skip,
+>(node: ValueCstNode, args?: T) {
+ assertEquals(node.name, 'value');
+ const val = node.children;
+ if (val.expression) {
+ if (args) {
+ assertEquals(args[0], 'nested', `Value: expected ${args[0]} but received nested`);
+ }
+ expression(val.expression[0], args?.at(1) as Expression);
+ } else if (val.constant) {
+ if (args) {
+ assertEquals(args[0], 'constant', `Value: expected ${args[0]} but received constant`);
+ }
+ constant(val.constant[0], args?.at(1) as Constant);
+ } else if (val.ID) {
+ assertEquals(val.ID.length, 1);
+ if (args) {
+ assertEquals(args[0], 'id', `Value: expected ${args[0]} but received id`);
+ }
+ if (args?.[1]) {
+ assertEquals(
+ val.ID[0].image,
+ args[1],
+ `Value > id: expected ${args[1]} but received ${val.ID[0].image}`,
+ );
+ } else {
+ assertGreater(val.ID[0].image.length, 0);
+ }
+ } else if (val.value) {
+ assertEquals(
+ val.UnOp?.length,
+ 1,
+ `Value > prefix: expected 1 prefix but received ${val.UnOp?.length}`,
+ );
+ if (args) {
+ assertEquals(args[0], 'prefix', `Value: expected ${args[0]} but received prefix`);
+ }
+ if (args?.[1]) {
+ assertEquals(
+ val.UnOp?.[0].image,
+ args[1],
+ `Value prefix: expected ${args[1]} but received ${val.UnOp?.[0].image}`,
+ );
+ } else {
+ assertGreater(val.UnOp?.[0].image.length, 0);
+ }
+ value(val.value[0], args?.at(2) as T);
+ } else {
+ throw new Error(`Validation: unhandled value type!\n${JSON.stringify(val, null, 2)}`);
+ }
+}
+
+export type Constant = [keyof ConstantCstNode['children'], string] | Skip;
+export function constant(node: ConstantCstNode, args?: Constant) {
+ assertEquals(node.name, 'constant');
+ const c = node.children;
+ assert(
+ c.BIN || c.BOOL || c.CMPX || c.INT || c.REAL || c.STRING,
+ `Constant: unexpected literal type ${Object.keys(c)}`,
+ );
+ assertEquals(
+ Object.keys(c).length,
+ 1,
+ `Constant: expected 1 literal but received ${Object.keys(c).length}`,
+ );
+ if (args?.[0]) {
+ assertEquals(
+ c[args[0]]?.length,
+ 1,
+ `Constant: expected ${args[0]} but received ${Object.keys(c)}`,
+ );
+ if (args[1]) {
+ const literal = c[args[0]]?.[0]?.image;
+ assertEquals(literal, args[1], `Constant: expected ${args[1]} but received ${literal}`);
+ }
+ } else {
+ assertEquals(Object.values(c)[0].length, 1);
+ assertGreater(Object.values(c)[0][0].image.length, 0);
+ }
+}
+
+export type Type = string | Skip;
+export function type(node: TypeCstNode, args?: Type) {
+ assertEquals(node.name, 'type');
+ const t = node.children;
+ assertEquals(
+ t.BASIC_TYPE?.length,
+ 1,
+ `Type: expected 1 type but received ${t.BASIC_TYPE?.length}`,
+ );
+ if (args) {
+ assertEquals(
+ t.BASIC_TYPE[0].image,
+ args,
+ `Type: expected ${args} but received ${t.BASIC_TYPE[0].image}`,
+ );
+ } else {
+ assertGreater(t.BASIC_TYPE[0].image.length, 0);
+ }
+}
diff --git a/apps/parser/src/visitors/precedence.ts b/apps/parser/src/visitors/precedence.ts
index 77701a2..783a139 100644
--- a/apps/parser/src/visitors/precedence.ts
+++ b/apps/parser/src/visitors/precedence.ts
@@ -21,7 +21,7 @@ enum Prec {
Mult = 0, // * / %
Add, // + -
Shift, // << >> >>>
- Order, // < > <= >=
+ Relation, // < > <= >= in
Equal, // == !=
BinXor, // ^
BinAnd, // &
@@ -48,7 +48,8 @@ function tok2Prec(tok: TokenType) {
case Tokens.GT:
case Tokens.LE:
case Tokens.GE:
- return Prec.Order;
+ case Tokens.IN:
+ return Prec.Relation;
case Tokens.EE:
case Tokens.NE:
return Prec.Equal;
diff --git a/apps/parser/src/visitors/printers/json.ts b/apps/parser/src/visitors/printers/json.ts
index 822f323..7e14428 100644
--- a/apps/parser/src/visitors/printers/json.ts
+++ b/apps/parser/src/visitors/printers/json.ts
@@ -158,10 +158,16 @@ export class JSONPrinter extends BasePrinter implements ICstNodeVisitor 'tokenType' in e[0]) as IToken[] | undefined;
+ const op = expr.BinOp;
+ const pf = expr.PostFix;
this.tree(`"expression":${this.pretty}{`, indent);
- this.tree(`"op":${this.pretty}"${op?.[0].image ?? ''}"`, indent + 1, true);
+ if (op) {
+ this.tree(`"op":${this.pretty}"${op[0].image}"`, indent + 1, true);
+ }
this.value(expr.value[0].children, indent + 1, !!expr.expression);
+ if (pf) {
+ this.tree(`"postfix":${this.pretty}"${pf[0].image}"`, indent + 1, true);
+ }
if (expr.expression) {
this.expression(expr.expression[0].children, indent + 1);
}
diff --git a/apps/parser/src/visitors/printers/paren.ts b/apps/parser/src/visitors/printers/paren.ts
index 74883ea..c40dd05 100644
--- a/apps/parser/src/visitors/printers/paren.ts
+++ b/apps/parser/src/visitors/printers/paren.ts
@@ -136,13 +136,16 @@ export class ParenPrinter extends BasePrinter implements ICstNodeVisitor 'tokenType' in e[0]) as IToken[] | undefined;
+ const op = expr.BinOp;
if (op) {
this.tree(`(${op[0].image}`, indent);
} else {
this.tree('(', indent);
}
this.value(expr.value[0].children, indent + 2);
+ if (expr.PostFix) {
+ this.tree(expr.PostFix[0].image, indent);
+ }
if (expr.expression) {
this.expression(expr.expression[0].children, indent + 2);
}
diff --git a/apps/parser/src/visitors/printers/xml.ts b/apps/parser/src/visitors/printers/xml.ts
index 53a2ede..ad059ad 100644
--- a/apps/parser/src/visitors/printers/xml.ts
+++ b/apps/parser/src/visitors/printers/xml.ts
@@ -143,8 +143,12 @@ export class XMLPrinter extends BasePrinter implements ICstNodeVisitor 'tokenType' in e[0]) as IToken[] | undefined;
- this.tree(``, indent);
+ const op = expr.BinOp;
+ const pf = expr.PostFix;
+ this.tree(
+ ``,
+ indent,
+ );
this.value(expr.value[0].children, indent + 2);
if (expr.expression) {
this.expression(expr.expression[0].children, indent + 2);
diff --git a/apps/parser/test/integration/comments.test.ts b/apps/parser/test/integration/comments.test.ts
index af1dad2..0953ae0 100644
--- a/apps/parser/test/integration/comments.test.ts
+++ b/apps/parser/test/integration/comments.test.ts
@@ -1,6 +1,8 @@
import * as TestSubject from '@encode/parser/lib';
-import { assert, assertEquals } from '@std/assert';
+import { v } from '@encode/parser/lib';
+import { assertEquals } from '@std/assert';
import { performParsingTestCase, useGlobalSettings } from '@/test/utils/mod.ts';
+import type { FileCstNode, StatementCstNode } from '../../generated/cst-types.ts';
Deno.test('Comment parsing #integration', async (t) => {
using _globalSettings = useGlobalSettings({ debugTrees: true });
@@ -14,7 +16,7 @@ Deno.test('Comment parsing #integration', async (t) => {
const typeAnalyzer = new TestSubject.TypeAnalyzer();
await t.step('line comment', () => {
- const { parserOutput, afterReorder } = performParsingTestCase({
+ const { parserOutput } = performParsingTestCase({
code: '// line comment',
parser,
@@ -25,13 +27,12 @@ Deno.test('Comment parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assertEquals(afterReorder, JSON.stringify({ file: {} }));
-
- assert(!parserOutput.statement, 'No output should be generated');
+ // TODO use dependent types so cast is unnecessary
+ v.file(parserOutput as FileCstNode, v.none);
});
await t.step('collapsed multiline comment', () => {
- const { parserOutput, afterReorder } = performParsingTestCase({
+ const { parserOutput } = performParsingTestCase({
code: [
'/**/ // collapsed multiline comment',
'/*****************',
@@ -52,16 +53,15 @@ Deno.test('Comment parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assertEquals(afterReorder, JSON.stringify({ file: {} }));
-
- assert(!parserOutput.statement, 'No output should be generated');
+ v.file(parserOutput as FileCstNode, v.none);
});
await t.step('comments embedded in a string', () => {
- const { parserOutput, afterReorder } = performParsingTestCase({
+ const { parserOutput } = performParsingTestCase({
code: "let str = '/*****/ //'; // comments embedded in a string",
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -69,30 +69,9 @@ Deno.test('Comment parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(!!parserOutput.statement);
-
- assertEquals(
- afterReorder,
- JSON.stringify({
- file: {
- statements: [
- {
- type: 'declaration',
- declaration: {
- image: 'str',
- expression: {
- op: '',
- value: {
- constant: "'/*****/ //'",
- },
- },
- },
- },
- ],
- },
- }),
- );
-
- assertEquals(parserOutput.statement.length, 1, 'One statement should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['str', v.none, [['constant', ['STRING', "'/*****/ //'"]]]],
+ ]);
});
});
diff --git a/apps/parser/test/integration/control-flow.test.ts b/apps/parser/test/integration/control-flow.test.ts
index 9247b13..aa3edc1 100644
--- a/apps/parser/test/integration/control-flow.test.ts
+++ b/apps/parser/test/integration/control-flow.test.ts
@@ -1,6 +1,8 @@
import * as TestSubject from '@encode/parser/lib';
-import { assert, assertEquals, assertGreater } from '@std/assert';
+import { v } from '@encode/parser/lib';
+import { assertEquals } from '@std/assert';
import { performParsingTestCase, useGlobalSettings } from '@/test/utils/mod.ts';
+import type { FileCstNode, StatementCstNode } from '../../generated/cst-types.ts';
Deno.test('Control flow parsing #integration', async (t) => {
using _globalSettings = useGlobalSettings({ debugTrees: true });
@@ -9,7 +11,7 @@ Deno.test('Control flow parsing #integration', async (t) => {
const precedenceHandler = new TestSubject.PrecedenceHandler();
- const printer = new TestSubject.ParenPrinter();
+ const printer = new TestSubject.JSONPrinter(false, null, 0);
const typeAnalyzer = new TestSubject.TypeAnalyzer();
@@ -25,8 +27,14 @@ Deno.test('Control flow parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement);
- assertGreater(parserOutput.statement.length, 0, 'Statements should be generated');
+ v.file(parserOutput as FileCstNode, [
+ ['declaration', ['a', v.none, [['constant', ['INT', '0']]]]],
+ [
+ 'if',
+ [['expression', [['id', 'a'], v.none, '>', [['constant', ['INT', '1']]]], v.none]],
+ v.none,
+ ],
+ ]);
assertEquals(typeOutput.warnings, 0, 'TypeAnalyzer should not report any warnings');
assertEquals(typeOutput.errors, 0, 'TypeAnalyzer should not report any errors');
@@ -56,8 +64,10 @@ Deno.test('Control flow parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement);
- assertGreater(parserOutput.statement.length, 0, 'Statements should be generated');
+ v.file(parserOutput as FileCstNode, [
+ ['declaration', ['a', v.none, [['constant', ['INT', '0']]]]],
+ ['if', v.skip, [['expression', [['id', 'b'], v.none, '=', [['constant', ['INT', '2']]]]]]],
+ ]);
assertEquals(typeOutput.warnings, 1, 'TypeAnalyzer should report a warning');
assertEquals(typeOutput.errors, 1, 'TypeAnalyzer should report an error');
@@ -70,6 +80,7 @@ Deno.test('Control flow parsing #integration', async (t) => {
),
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -77,8 +88,13 @@ Deno.test('Control flow parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement);
- assertGreater(parserOutput.statement.length, 0, 'Statements should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'while',
+ [],
+ [['id', 'a'], v.none, '<', [['constant', ['INT', '3']]]],
+ v.none,
+ v.none,
+ ]);
assertEquals(typeOutput.warnings, 0, 'TypeAnalyzer should not report any warnings');
assertEquals(typeOutput.errors, 1, 'TypeAnalyzer should report an error');
@@ -98,6 +114,7 @@ Deno.test('Control flow parsing #integration', async (t) => {
].join('\n'),
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -105,29 +122,28 @@ Deno.test('Control flow parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement);
- assertGreater(parserOutput.statement.length, 0, 'Statements should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'while',
+ v.none,
+ [['id', 'b'], v.none, '>', [['constant', ['INT', '4']]]],
+ [
+ ['declaration', ['c', v.none, [['constant', ['INT', '1']]]]],
+ ['if', [['expression', [['id', 'a']], [['continue']]]], v.none],
+ ],
+ [
+ [
+ 'return',
+ [
+ ['nested', [['constant', ['INT', '1']], v.none, '+', [['constant', ['INT', '2']]]]],
+ v.none,
+ '+',
+ [['id', 'c']],
+ ],
+ ],
+ ],
+ ]);
assertEquals(typeOutput.warnings, 0, 'TypeAnalyzer should not report any warnings');
assertEquals(typeOutput.errors, 3, 'TypeAnalyzer should report 3 errors');
});
-
- await t.step('simple do-while loop', () => {
- const { parserOutput, typeOutput } = performParsingTestCase({
- code: 'do {} while(true) {}',
-
- parser,
- precedenceHandler,
- printer,
- typeAnalyzer,
- });
-
- assertEquals(parser.errors.length, 0, 'Parser should not error');
-
- assert(parserOutput.statement);
- assertGreater(parserOutput.statement.length, 0, 'Statements should be generated');
-
- assertEquals(typeOutput.warnings, 0, 'TypeAnalyzer should not report any warnings');
- assertEquals(typeOutput.errors, 0, 'TypeAnalyzer should not report any errors');
- });
});
diff --git a/apps/parser/test/integration/data-types.test.ts b/apps/parser/test/integration/data-types.test.ts
index 5aba759..34293fa 100644
--- a/apps/parser/test/integration/data-types.test.ts
+++ b/apps/parser/test/integration/data-types.test.ts
@@ -1,6 +1,8 @@
import * as TestSubject from '@encode/parser/lib';
-import { assert, assertEquals } from '@std/assert';
+import { v } from '@encode/parser/lib';
+import { assertEquals } from '@std/assert';
import { performParsingTestCase, useGlobalSettings } from '@/test/utils/mod.ts';
+import type { StatementCstNode } from '../../generated/cst-types.ts';
Deno.test('Data type parsing #integration', async (t) => {
using _globalSettings = useGlobalSettings({ debugTrees: true });
@@ -18,6 +20,7 @@ Deno.test('Data type parsing #integration', async (t) => {
code: 'let real = 1.0;',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -25,7 +28,10 @@ Deno.test('Data type parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement, 'Some output should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['real', v.none, [['constant', ['REAL', '1.0']]]],
+ ]);
});
await t.step('integer literal', () => {
@@ -33,6 +39,7 @@ Deno.test('Data type parsing #integration', async (t) => {
code: 'let integer = 21;',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -40,7 +47,10 @@ Deno.test('Data type parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement, 'Some output should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['integer', v.none, [['constant', ['INT', '21']]]],
+ ]);
});
await t.step('string literal', () => {
@@ -48,6 +58,7 @@ Deno.test('Data type parsing #integration', async (t) => {
code: "let str = 'Hello, World!';",
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -55,7 +66,10 @@ Deno.test('Data type parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement, 'Some output should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['str', v.none, [['constant', ['STRING', "'Hello, World!'"]]]],
+ ]);
});
await t.step('boolean literal', () => {
@@ -63,6 +77,7 @@ Deno.test('Data type parsing #integration', async (t) => {
code: 'let flag = true;',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -70,7 +85,10 @@ Deno.test('Data type parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement, 'Some output should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['flag', v.none, [['constant', ['BOOL', 'true']]]],
+ ]);
});
await t.step('bit literal', () => {
@@ -78,6 +96,7 @@ Deno.test('Data type parsing #integration', async (t) => {
code: 'let bits = 0xff;',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -85,7 +104,10 @@ Deno.test('Data type parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement, 'Some output should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['bits', v.none, [['constant', ['BIN', '0xff']]]],
+ ]);
});
await t.step('complex number literal', () => {
@@ -93,6 +115,7 @@ Deno.test('Data type parsing #integration', async (t) => {
code: 'let imag = 1.0 + 2.0i;',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -100,6 +123,13 @@ Deno.test('Data type parsing #integration', async (t) => {
assertEquals(parser.errors.length, 0, 'Parser should not error');
- assert(parserOutput.statement, 'Some output should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ [
+ 'imag',
+ v.none,
+ [['constant', ['REAL', '1.0']], v.none, '+', [['constant', ['CMPX', '2.0i']]]],
+ ],
+ ]);
});
});
diff --git a/apps/parser/test/integration/expressions.test.ts b/apps/parser/test/integration/expressions.test.ts
index 4384c56..c155c1e 100644
--- a/apps/parser/test/integration/expressions.test.ts
+++ b/apps/parser/test/integration/expressions.test.ts
@@ -1,6 +1,8 @@
import * as TestSubject from '@encode/parser/lib';
-import { assert, assertEquals, assertGreater } from '@std/assert';
+import { v } from '@encode/parser/lib';
+import { assert, assertEquals } from '@std/assert';
import { performParsingTestCase, useGlobalSettings } from '@/test/utils/mod.ts';
+import type { StatementCstNode } from '../../generated/cst-types.ts';
Deno.test('Expression parsing #integration', async (t) => {
using _globalSettings = useGlobalSettings({ debugTrees: true });
@@ -18,6 +20,7 @@ Deno.test('Expression parsing #integration', async (t) => {
code: 'let a = 1 * 2 + 3;',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -25,8 +28,19 @@ Deno.test('Expression parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement);
- assertGreater(parserOutput.statement.length, 0, 'Statements should be generated');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ [
+ 'a',
+ v.none,
+ [
+ ['nested', [['constant', ['INT', '1']], v.none, '*', [['constant', ['INT', '2']]]]],
+ v.none,
+ '+',
+ [['constant', ['INT', '3']]],
+ ],
+ ],
+ ]);
assertEquals(precOutput, 1, 'Expression should be reordered');
});
diff --git a/apps/parser/test/integration/keywords.test.ts b/apps/parser/test/integration/keywords.test.ts
index 911914b..31ca793 100644
--- a/apps/parser/test/integration/keywords.test.ts
+++ b/apps/parser/test/integration/keywords.test.ts
@@ -1,6 +1,8 @@
import * as TestSubject from '@encode/parser/lib';
+import { v } from '@encode/parser/lib';
import { assert } from '@std/assert';
import { performParsingTestCase, useGlobalSettings } from '@/test/utils/mod.ts';
+import type { FileCstNode, StatementCstNode } from '../../generated/cst-types.ts';
Deno.test('Keyword parsing #integration', async (t) => {
using _globalSettings = useGlobalSettings({ debugTrees: true });
@@ -34,7 +36,25 @@ Deno.test('Keyword parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement, 'Parser should generate statements');
+ v.file(parserOutput as FileCstNode, [
+ ['declaration', ['lettuce', v.none, [['constant', ['INT', '1']]]]],
+ [
+ 'if',
+ [
+ [
+ 'expression',
+ [['id', 'lettuce']],
+ [['declaration', ['spiffy', v.none, [['constant', ['INT', '2']]]]]],
+ ],
+ [
+ 'expression',
+ [['id', 'lettuce']],
+ [['declaration', ['elifShmelif', v.none, [['constant', ['INT', '3']]]]]],
+ ],
+ ],
+ [['declaration', ['elsevier', v.none, [['constant', ['INT', '4']]]]]],
+ ],
+ ]);
});
await t.step('false positive keyword snippets', async (t) => {
@@ -43,6 +63,7 @@ Deno.test('Keyword parsing #integration', async (t) => {
code: 'let coffeebreak = 8; // break',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -50,7 +71,10 @@ Deno.test('Keyword parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement, 'Parser should generate statements');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['coffeebreak', v.none, [['constant', ['INT', '8']]]],
+ ]);
});
await t.step('continue', () => {
@@ -58,6 +82,7 @@ Deno.test('Keyword parsing #integration', async (t) => {
code: 'let dareIcontinue = 9; // continue',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -65,7 +90,10 @@ Deno.test('Keyword parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement, 'Parser should generate statements');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['dareIcontinue', v.none, [['constant', ['INT', '9']]]],
+ ]);
});
await t.step('return', () => {
@@ -80,7 +108,10 @@ Deno.test('Keyword parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement, 'Parser should generate statements');
+ v.file(parserOutput as FileCstNode, [
+ ['declaration', ['returnOfTheJedi', v.none, [['constant', ['INT', '10']]]]],
+ ['return', [['id', 'OfTheJedi']]],
+ ]);
});
await t.step('and, or, & not', () => {
@@ -95,7 +126,10 @@ Deno.test('Keyword parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement, 'Parser should generate statements');
+ v.file(parserOutput as FileCstNode, [
+ ['declaration', ['andor', v.none, [['constant', ['INT', '11']]]]],
+ ['declaration', ['notInNottingham', v.none, [['prefix', 'not', ['id', 'andor']]]]],
+ ]);
});
await t.step('in', () => {
@@ -103,6 +137,7 @@ Deno.test('Keyword parsing #integration', async (t) => {
code: 'let spinach = 13; // in',
parser,
+ startAt: 'statement',
precedenceHandler,
printer,
typeAnalyzer,
@@ -110,7 +145,10 @@ Deno.test('Keyword parsing #integration', async (t) => {
assert(parser.errors.length === 0, 'Parser should not error');
- assert(parserOutput.statement, 'Parser should generate statements');
+ v.statement(parserOutput as StatementCstNode, [
+ 'declaration',
+ ['spinach', v.none, [['constant', ['INT', '13']]]],
+ ]);
});
});
});
diff --git a/apps/parser/test/utils/mod.ts b/apps/parser/test/utils/mod.ts
index 1e6589e..ccbf328 100644
--- a/apps/parser/test/utils/mod.ts
+++ b/apps/parser/test/utils/mod.ts
@@ -3,12 +3,12 @@ import {
debug,
EncodeLexer,
type EncodeParser,
+ type EncodeRule,
Globals,
type PrecedenceHandler,
type TypeAnalyzer,
} from '@encode/parser/lib';
-import type { ILexingResult } from 'chevrotain';
-import type { FileCstChildren } from '@/generated/cst-types.ts';
+import type { CstNode, ILexingResult } from 'chevrotain';
export interface TestCaseParameters {
/**
@@ -17,6 +17,7 @@ export interface TestCaseParameters {
* **Note:** We choose not to instantiate this ourselves in case we want to inject something else, e.g. a shim or an experimental impl
*/
parser: EncodeParser;
+ startAt?: EncodeRule;
/**
* The parser to use for parsing Encode code.
*
@@ -43,7 +44,7 @@ export interface TestCaseParameters {
export interface TestCaseOutputs {
lexingResult: ILexingResult;
- parserOutput: FileCstChildren;
+ parserOutput: CstNode;
beforeReorder: string;
afterReorder: string;
precOutput: number;
@@ -64,11 +65,11 @@ export interface TestCaseOutputs {
* @returns the results of executing the test procedure to be examined by assertions
*/
export function performParsingTestCase(params: TestCaseParameters): TestCaseOutputs {
- const { code, parser, printer, typeAnalyzer, precedenceHandler } = params;
+ const { code, parser, startAt = 'file', printer, typeAnalyzer, precedenceHandler } = params;
const lexingResult = EncodeLexer.tokenize(code);
parser.input = lexingResult.tokens;
- const parserOutput = parser.file();
+ const parserOutput = parser[startAt]();
// cache printer.output
const printerOutput = printer.output;
@@ -101,7 +102,7 @@ export function performParsingTestCase(params: TestCaseParameters): TestCaseOutp
const testCaseOutputs: TestCaseOutputs = {
lexingResult,
- parserOutput: parserOutput.children,
+ parserOutput,
beforeReorder,
afterReorder,
precOutput: precedenceHandler.reordered,