basic parser built

This commit is contained in:
Tan, Kian-ting 2023-10-10 22:15:02 +08:00
parent 8d03cc503c
commit df1419e45c
3 changed files with 248 additions and 38 deletions

View file

@ -24,4 +24,5 @@ License: MIT
- 20230928basically fix `issue1`。其他ê物件猶著做。 - 20230928basically fix `issue1`。其他ê物件猶著做。
- 20230929add multi args parsing for `callee`. - 20230929add multi args parsing for `callee`.
- 20230930tîng khí parser, using `js-token`. - 20230930tîng khí parser, using `js-token`.
- 20231016: tîng siá parser, using `ts-parsec`. - 20231006: tîng siá parser, using `ts-parsec`.
- 20231010: 初步完成tsit ê階段ê Parser`。

View file

@ -62,43 +62,65 @@ var TokenKind;
TokenKind[TokenKind["Number"] = 2] = "Number"; TokenKind[TokenKind["Number"] = 2] = "Number";
TokenKind[TokenKind["Op"] = 3] = "Op"; TokenKind[TokenKind["Op"] = 3] = "Op";
TokenKind[TokenKind["ExprMark"] = 4] = "ExprMark"; TokenKind[TokenKind["ExprMark"] = 4] = "ExprMark";
TokenKind[TokenKind["Paren"] = 5] = "Paren"; TokenKind[TokenKind["ExcapeAt"] = 5] = "ExcapeAt";
TokenKind[TokenKind["SpaceNL"] = 6] = "SpaceNL"; TokenKind[TokenKind["Paren"] = 6] = "Paren";
TokenKind[TokenKind["Id"] = 7] = "Id"; TokenKind[TokenKind["SpaceNL"] = 7] = "SpaceNL";
TokenKind[TokenKind["Str"] = 8] = "Str"; TokenKind[TokenKind["Id"] = 8] = "Id";
TokenKind[TokenKind["Str"] = 9] = "Str";
TokenKind[TokenKind["Comment"] = 10] = "Comment";
})(TokenKind || (TokenKind = {})); })(TokenKind || (TokenKind = {}));
/** /**
* Parsing * Parsing
*/ */
const lexer = p.buildLexer([ const lexer = p.buildLexer([
[true, /^\d+(\.\d+)?/g, TokenKind.Number], [true, /^\d+(\.\d+)?/g, TokenKind.Number],
[true, /^\\\@/g, TokenKind.ExcapeAt],
[true, /^\/\*([^/]|\/[^*])*\*\//g, TokenKind.Comment],
[true, /^\;/g, TokenKind.Semicolon], [true, /^\;/g, TokenKind.Semicolon],
[true, /^[-][-][-]/g, TokenKind.Seperator], [true, /^[-][-][-]/g, TokenKind.Seperator],
[true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op], [true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op],
[true, /^\@+/g, TokenKind.ExprMark], [true, /^\@/g, TokenKind.ExprMark],
[true, /^[()\[\]{}]/g, TokenKind.Paren], [true, /^[()\[\]{}]/g, TokenKind.Paren],
[true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str], [true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str],
[true, /^[']([\']|[\\].)*[']/g, TokenKind.Str], [true, /^[']([\']|[\\].)*[']/g, TokenKind.Str],
[true, /^[()\[\]{}]/g, TokenKind.Paren], [true, /^[()\[\]{}]/g, TokenKind.Paren],
[true, /^[^\s\n\t\r;]+/g, TokenKind.Id], [true, /^[^\/\\\@\s\n\t\r;]+/g, TokenKind.Id],
[false, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL] [true, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL],
]); ]);
/** /**
* *
* # TEST * # TEST
*/ */
const inputTxt = `import ast; const inputTxt = `import a as b; /*bacourt*/
/* ba choir
ipsum lorem*/
import you as john;
--- ---
122`;
臺中市\\\@
政府
@2+2==4;
console.log("122");@
人民
`;
const PROG = p.rule(); const PROG = p.rule();
const UNIT = p.rule(); const SEGMENT = p.rule();
const IMPORT = p.rule();
const IMPORTS = p.rule(); const IMPORTS = p.rule();
const SEMICOLON = p.rule(); const SEMICOLON = p.rule();
const EXCAPE_AT = p.rule();
const NOT_AT_TEXT = p.rule();
const CONTENT = p.rule();
let doubleMinus = { type: 'Punctuator', value: '--' }; let doubleMinus = { type: 'Punctuator', value: '--' };
let doubleMinus2 = p.str('--'); let doubleMinus2 = p.str('--');
const TERM = p.rule(); const TERM = p.rule();
function applyUnit(value) { function applySegment(input) {
return value.text; let unpackedInnerExprs = input[1].map((x) => { return x.text; });
return ["%exprs", unpackedInnerExprs];
} }
function applySemiColon(value) { function applySemiColon(value) {
return value.text; return value.text;
@ -106,14 +128,80 @@ function applySemiColon(value) {
function applyParts(first, second) { function applyParts(first, second) {
return ["%clo", first, second[1]]; return ["%clo", first, second[1]];
} }
PROG.setPattern(p.lrec_sc(IMPORTS, p.seq(p.str('---'), UNIT), applyParts)); function applyComment(value) {
function applyImports(input) { return [value.text];
}
function applyImport(input) {
let importTail = input[1].map(x => x.text); let importTail = input[1].map(x => x.text);
return ["import"].concat(importTail); return ["import"].concat(importTail);
} }
; ;
IMPORTS.setPattern(p.apply(p.seq(p.str('import'), p.rep_sc(p.tok(TokenKind.Id)), SEMICOLON), applyImports)); /*
function applyImportComment(input: [Token<TokenKind>,Token<TokenKind>[],
tkTree, Token<TokenKind.Comment>]) : tkTree{
let importTail = input[1].map(x=>x.text);
let comment = [input[3].text];
return ["import"].concat(importTail).concat(comment);
};*/
function applyImports(input) {
let resultBody = [input[0]].concat(input[1]);
let resultWrapper = ["%import", resultBody];
return resultWrapper;
}
;
function applyNotAtText(value) {
if (value.text == "\\\@") {
return '@';
}
else {
return value.text;
}
}
;
function applyText(input) {
return ["%text", input];
}
;
function applyContent(input) {
return ["%content", input];
}
;
function applySpaceNL(value) {
return value.text;
}
/**
* IMPORTEE: Number, Op, Paren, Id, Str, Comment,
*/
let IMPORTEE = p.alt(p.tok(TokenKind.Number), p.tok(TokenKind.Op), p.tok(TokenKind.Paren), p.tok(TokenKind.Id), p.tok(TokenKind.Str), p.tok(TokenKind.SpaceNL), p.tok(TokenKind.Comment));
let NOT_AT = p.alt(p.tok(TokenKind.Seperator), p.tok(TokenKind.Semicolon), p.tok(TokenKind.Number), p.tok(TokenKind.ExcapeAt), p.tok(TokenKind.Op), p.tok(TokenKind.Paren), p.tok(TokenKind.SpaceNL), p.tok(TokenKind.Id), p.tok(TokenKind.Str), p.tok(TokenKind.Comment));
/**
* PROG : IMPORTS '---' CONTENT;
*/
PROG.setPattern(p.lrec_sc(IMPORTS, p.seq(p.str('---'), CONTENT), applyParts));
/**
* NOT_AT_TEXT : NOT_AT
*/
NOT_AT_TEXT.setPattern(p.apply(NOT_AT, applyNotAtText));
IMPORTS.setPattern(p.apply(p.seq(IMPORT, p.rep(IMPORT)), applyImports));
/**
* IMPORT :
* 'import' IMPORTEE* SEMICOLON |
* COMMENT |
*/
IMPORT.setPattern(p.alt(p.apply(p.seq(p.str('import'), p.rep_sc(IMPORTEE), SEMICOLON), applyImport), p.apply(p.tok(TokenKind.Comment), applyComment), p.apply(p.tok(TokenKind.SpaceNL), applySpaceNL)));
/**
* SEMICOLON : ';';
*/
SEMICOLON.setPattern(p.apply(p.tok(TokenKind.Semicolon), applySemiColon)); SEMICOLON.setPattern(p.apply(p.tok(TokenKind.Semicolon), applySemiColon));
UNIT.setPattern(p.apply(p.tok(TokenKind.Number), applyUnit)); /**
* SEGMENT : '@' NOT_AT* '@' |
* (NOT_AT_TEXT | EXCAPE_AT)*
*/
SEGMENT.setPattern(p.alt(p.apply(p.rep_sc(NOT_AT_TEXT), applyText), p.apply(p.seq(p.str('@'), p.rep(NOT_AT), p.str('@')), applySegment)));
/**
* CONTENT : SEGMENT*
*/
CONTENT.setPattern(p.apply(p.rep(SEGMENT), applyContent));
console.log(repr(lexer.parse(inputTxt)));
let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt)))); let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt))));
console.log("RESULT=" + tkTreeToSExp(tree)); console.log("RESULT=" + tkTreeToSExp(tree));

View file

@ -3,6 +3,7 @@ import jsTokens from "js-tokens";
import * as util from 'util'; import * as util from 'util';
import * as p from 'typescript-parsec'; import * as p from 'typescript-parsec';
import { Token } from 'typescript-parsec'; import { Token } from 'typescript-parsec';
import { TokenType } from "./tokenize";
/** /**
* *
* # REPRESENTATION * # REPRESENTATION
@ -40,15 +41,17 @@ let repr = (x : any)=>{return util.inspect(x, {depth: null})};
type tkTree = string | tkTree[]; type tkTree = string | tkTree[];
enum TokenKind { enum TokenKind {
Seperator, Seperator, // ---
Semicolon, Semicolon, // ;
Number, Number,
Op, Op,
ExprMark, ExprMark, // @
ExcapeAt, // \@
Paren, Paren,
SpaceNL, SpaceNL, // \s\t\n\r
Id, Id,
Str, Str,
Comment, // /* ooo */
} }
/** /**
@ -56,16 +59,19 @@ enum TokenKind {
*/ */
const lexer = p.buildLexer([ const lexer = p.buildLexer([
[true, /^\d+(\.\d+)?/g, TokenKind.Number], [true, /^\d+(\.\d+)?/g, TokenKind.Number],
[true, /^\\\@/g, TokenKind.ExcapeAt],
[true, /^\/\*([^/]|\/[^*])*\*\//g, TokenKind.Comment],
[true, /^\;/g, TokenKind.Semicolon], [true, /^\;/g, TokenKind.Semicolon],
[true, /^[-][-][-]/g, TokenKind.Seperator], [true, /^[-][-][-]/g, TokenKind.Seperator],
[true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op], [true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op],
[true, /^\@+/g, TokenKind.ExprMark], [true, /^\@/g, TokenKind.ExprMark],
[true, /^[()\[\]{}]/g, TokenKind.Paren], [true, /^[()\[\]{}]/g, TokenKind.Paren],
[true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str], [true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str],
[true, /^[']([\']|[\\].)*[']/g, TokenKind.Str], [true, /^[']([\']|[\\].)*[']/g, TokenKind.Str],
[true, /^[()\[\]{}]/g, TokenKind.Paren], [true, /^[()\[\]{}]/g, TokenKind.Paren],
[true, /^[^\s\n\t\r;]+/g, TokenKind.Id], [true, /^[^\/\\\@\s\n\t\r;]+/g, TokenKind.Id],
[false, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL] [true, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL],
]); ]);
/** /**
@ -73,23 +79,41 @@ const lexer = p.buildLexer([
* # TEST * # TEST
*/ */
const inputTxt= const inputTxt=
`import ast; `import a as b; /*bacourt*/
/* ba choir
ipsum lorem*/
import you as john;
--- ---
122`;
\\\@
@2+2==4;
console.log("122");@
`;
const PROG = p.rule<TokenKind, tkTree>(); const PROG = p.rule<TokenKind, tkTree>();
const UNIT = p.rule<TokenKind, tkTree>(); const SEGMENT = p.rule<TokenKind, tkTree>();
const IMPORT = p.rule<TokenKind, tkTree>();
const IMPORTS = p.rule<TokenKind, tkTree>(); const IMPORTS = p.rule<TokenKind, tkTree>();
const SEMICOLON = p.rule<TokenKind, tkTree>(); const SEMICOLON = p.rule<TokenKind, tkTree>();
const EXCAPE_AT = p.rule<TokenKind, tkTree>();
const NOT_AT_TEXT = p.rule<TokenKind, tkTree>();
const CONTENT = p.rule<TokenKind, tkTree>();
let doubleMinus = { type: 'Punctuator', value: '--' }; let doubleMinus = { type: 'Punctuator', value: '--' };
let doubleMinus2 = p.str('--'); let doubleMinus2 = p.str('--');
const TERM = p.rule<TokenKind, tkTree>(); const TERM = p.rule<TokenKind, tkTree>();
function applyUnit(value: Token<TokenKind.Number>): tkTree{ function applySegment(input: [Token<TokenKind>, Token<TokenKind>[],
return value.text; Token<TokenKind>]): tkTree[]{
let unpackedInnerExprs = input[1].map((x)=>{return x.text});
return ["%exprs", unpackedInnerExprs];
} }
function applySemiColon(value: Token<TokenKind.Semicolon>): tkTree{ function applySemiColon(value: Token<TokenKind.Semicolon>): tkTree{
@ -102,26 +126,108 @@ function applyParts(first: tkTree,
} }
function applyComment(value: Token<TokenKind.Comment>): tkTree[]{
return [value.text];
}
function applyImports(input: [Token<TokenKind>,Token<TokenKind>[], tkTree]):tkTree{ function applyImport(input: [Token<TokenKind>,Token<TokenKind>[], tkTree]) : tkTree{
let importTail = input[1].map(x=>x.text); let importTail = input[1].map(x=>x.text);
return ["import"].concat(importTail); return ["import"].concat(importTail);
}; };
/*
function applyImportComment(input: [Token<TokenKind>,Token<TokenKind>[],
tkTree, Token<TokenKind.Comment>]) : tkTree{
let importTail = input[1].map(x=>x.text);
let comment = [input[3].text];
return ["import"].concat(importTail).concat(comment);
};*/
function applyImports(input : [tkTree, tkTree[]]): tkTree{
let resultBody = [input[0]].concat(input[1]);
let resultWrapper = ["%import", resultBody];
return resultWrapper;
};
function applyNotAtText(value : Token<TokenKind>): tkTree{
if (value.text == "\\\@"){
return '@';
}
else{return value.text;}
};
function applyText (input : tkTree): tkTree[]{
return ["%text", input];
};
function applyContent(input : tkTree[]): tkTree[]{
return ["%content", input];
};
function applySpaceNL(value : Token<TokenKind.SpaceNL>): tkTree{
return value.text;
}
/** /**
* PROG : IMPORTS '---' UNIT; * IMPORTEE: Number, Op, Paren, Id, Str, Comment,
*/
let IMPORTEE = p.alt(p.tok(TokenKind.Number),
p.tok(TokenKind.Op),
p.tok(TokenKind.Paren),
p.tok(TokenKind.Id),
p.tok(TokenKind.Str),
p.tok(TokenKind.SpaceNL),
p.tok(TokenKind.Comment));
let NOT_AT = p.alt(p.tok(TokenKind.Seperator),
p.tok(TokenKind.Semicolon),
p.tok(TokenKind.Number),
p.tok(TokenKind.ExcapeAt),
p.tok(TokenKind.Op),
p.tok(TokenKind.Paren),
p.tok(TokenKind.SpaceNL),
p.tok(TokenKind.Id),
p.tok(TokenKind.Str),
p.tok(TokenKind.Comment),
);
/**
* PROG : IMPORTS '---' CONTENT;
*/ */
PROG.setPattern( PROG.setPattern(
p.lrec_sc(IMPORTS, p.seq(p.str('---'), UNIT), applyParts) p.lrec_sc(IMPORTS, p.seq(p.str('---'), CONTENT), applyParts)
) )
/** /**
* PROG : 'import' Id* SEMICOLON; * NOT_AT_TEXT : NOT_AT
*/ */
NOT_AT_TEXT.setPattern(
p.apply(NOT_AT, applyNotAtText)
);
IMPORTS.setPattern( IMPORTS.setPattern(
p.apply(p.seq(p.str('import'), p.rep_sc(p.tok(TokenKind.Id)), SEMICOLON) , applyImports) p.apply( p.seq(IMPORT, p.rep(IMPORT)), applyImports)
);
/**
* IMPORT :
* 'import' IMPORTEE* SEMICOLON |
* COMMENT |
*/
IMPORT.setPattern(
p.alt(
p.apply(p.seq(p.str('import'), p.rep_sc(IMPORTEE), SEMICOLON),
applyImport),
p.apply(p.tok(TokenKind.Comment), applyComment),
p.apply(p.tok(TokenKind.SpaceNL), applySpaceNL)
)
); );
/** /**
@ -131,13 +237,28 @@ SEMICOLON.setPattern(
p.apply(p.tok(TokenKind.Semicolon), applySemiColon) p.apply(p.tok(TokenKind.Semicolon), applySemiColon)
); );
/** /**
* UNIT : Number; * SEGMENT : '@' NOT_AT* '@' |
* (NOT_AT_TEXT | EXCAPE_AT)*
*/ */
UNIT.setPattern( SEGMENT.setPattern(
p.apply(p.tok(TokenKind.Number), applyUnit) p.alt(
p.apply(p.rep_sc(NOT_AT_TEXT), applyText),
p.apply(p.seq(p.str('@'), p.rep(NOT_AT), p.str('@')), applySegment),
)
); );
/**
* CONTENT : SEGMENT*
*/
CONTENT.setPattern(
p.apply(p.rep(SEGMENT), applyContent)
);
console.log(repr(lexer.parse(inputTxt)));
let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt)))); let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt))));