remove something and test something about parsing
This commit is contained in:
parent
7227602546
commit
9784854866
3 changed files with 15 additions and 16 deletions
|
@ -13,3 +13,4 @@ License: MIT
|
||||||
- 20230914-15: 追加一寡 tokenizer ê 功能。
|
- 20230914-15: 追加一寡 tokenizer ê 功能。
|
||||||
- 20230918: 重新tuì下kàu頂起做parser. add rule
|
- 20230918: 重新tuì下kàu頂起做parser. add rule
|
||||||
- 20230921-22:add rule, report issue
|
- 20230921-22:add rule, report issue
|
||||||
|
- 20230925-26: 試驗án-tsuánn解決[issue1](https://kianting.info/pipermail/clo_kianting.info/2023-September/000004.html), iáu-buē成功。
|
14
src/index.js
14
src/index.js
|
@ -225,7 +225,7 @@ let midfix = (f, signal) => (x) => {
|
||||||
let ast_tail = slice(a.value.ast, a.value.ast.length - 3);
|
let ast_tail = slice(a.value.ast, a.value.ast.length - 3);
|
||||||
let new_ast = [ast_tail];
|
let new_ast = [ast_tail];
|
||||||
a.value.ast = new_ast;
|
a.value.ast = new_ast;
|
||||||
console.log("+" + signal + "+" + repr(a));
|
// console.log("+"+signal+"+"+repr(a));
|
||||||
}
|
}
|
||||||
return a;
|
return a;
|
||||||
};
|
};
|
||||||
|
@ -256,14 +256,12 @@ let single = orDo(single1, single2);
|
||||||
let fac1Appliee = circumfix((x) => thenDo(thenDo(thenDo(tk.toSome(x), tLParen), tInt), tRParen), "fac1");
|
let fac1Appliee = circumfix((x) => thenDo(thenDo(thenDo(tk.toSome(x), tLParen), tInt), tRParen), "fac1");
|
||||||
let fac1 = (x) => {
|
let fac1 = (x) => {
|
||||||
let raw = thenDo(thenDo(toSome(x), single), OnceOrMoreDo(fac1Appliee));
|
let raw = thenDo(thenDo(toSome(x), single), OnceOrMoreDo(fac1Appliee));
|
||||||
console.log("+" + "火鳥" + "+" + repr(raw));
|
|
||||||
if (raw._tag == "Some") {
|
if (raw._tag == "Some") {
|
||||||
var result = raw.value.ast[0];
|
var result = raw.value.ast[0];
|
||||||
let applyToken = { text: '%apply', ln: 0, col: 0 };
|
let applyToken = { text: '%apply', ln: 0, col: 0 };
|
||||||
for (var i = 1; i < raw.value.ast.length; i++) {
|
for (var i = 1; i < raw.value.ast.length; i++) {
|
||||||
result = [applyToken, result, raw.value.ast[i]];
|
result = [applyToken, result, raw.value.ast[i]];
|
||||||
}
|
}
|
||||||
console.log("+" + "hitori" + "+" + repr(result));
|
|
||||||
if (!Array.isArray(result)) {
|
if (!Array.isArray(result)) {
|
||||||
raw.value.ast = [result];
|
raw.value.ast = [result];
|
||||||
}
|
}
|
||||||
|
@ -303,17 +301,15 @@ let expr2 = term;
|
||||||
*/
|
*/
|
||||||
let expr = orDo(expr1, expr2);
|
let expr = orDo(expr1, expr2);
|
||||||
let tokens = tk.tokenize("1");
|
let tokens = tk.tokenize("1");
|
||||||
|
let tokens2 = tk.tokenize("1(2)");
|
||||||
|
let tokens3 = tk.tokenize("1(2)(3)");
|
||||||
|
let tokens4 = tk.tokenize("(3(2))*2+1");
|
||||||
//let tokens = tk.tokenize("(4-(3/4))");
|
//let tokens = tk.tokenize("(4-(3/4))");
|
||||||
//tk.tokenize(argv[2]);
|
//tk.tokenize(argv[2]);
|
||||||
let tokensFiltered = tokens.filter((x) => {
|
let tokensFiltered = tokens4.filter((x) => {
|
||||||
return (x.type != tk.TokenType.NL
|
return (x.type != tk.TokenType.NL
|
||||||
&& x.type != tk.TokenType.SP);
|
&& x.type != tk.TokenType.SP);
|
||||||
});
|
});
|
||||||
let wrappedTokens = tk.toSome({
|
|
||||||
matched: [],
|
|
||||||
remained: tokensFiltered,
|
|
||||||
ast: []
|
|
||||||
});
|
|
||||||
let beta = expr({
|
let beta = expr({
|
||||||
matched: [],
|
matched: [],
|
||||||
remained: tokensFiltered,
|
remained: tokensFiltered,
|
||||||
|
|
16
src/index.ts
16
src/index.ts
|
@ -198,6 +198,7 @@ export function matchAny(m: TokenMatcheePair): tk.Maybe<TokenMatcheePair> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Danger : Maybe it's not enough to work.
|
||||||
* @description repeating matching function `f`
|
* @description repeating matching function `f`
|
||||||
* zero or more times, like the asterisk `*` in regex `f*` .
|
* zero or more times, like the asterisk `*` in regex `f*` .
|
||||||
* @param f : the function to be repeated 0+ times.
|
* @param f : the function to be repeated 0+ times.
|
||||||
|
@ -277,7 +278,9 @@ let single = orDo(single1, single2);
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
/** fac = single ["(" single ")"]? | single */
|
/** fac = single ["(" single ")"]? | single
|
||||||
|
* Issue1 to be fixed.
|
||||||
|
*/
|
||||||
let fac1Appliee = circumfix((x : TokenMatcheePair) => thenDo(thenDo(thenDo(tk.toSome(x), tLParen), tInt), tRParen), "fac1");
|
let fac1Appliee = circumfix((x : TokenMatcheePair) => thenDo(thenDo(thenDo(tk.toSome(x), tLParen), tInt), tRParen), "fac1");
|
||||||
let fac1 = (x : TokenMatcheePair) =>
|
let fac1 = (x : TokenMatcheePair) =>
|
||||||
{
|
{
|
||||||
|
@ -351,19 +354,18 @@ let expr = orDo(expr1, expr2);
|
||||||
|
|
||||||
|
|
||||||
let tokens = tk.tokenize("1");
|
let tokens = tk.tokenize("1");
|
||||||
|
let tokens2 = tk.tokenize("1(2)");
|
||||||
|
let tokens3 = tk.tokenize("1(2)(3)");
|
||||||
|
let tokens4 = tk.tokenize("(3(2))*2+1");
|
||||||
|
|
||||||
//let tokens = tk.tokenize("(4-(3/4))");
|
//let tokens = tk.tokenize("(4-(3/4))");
|
||||||
//tk.tokenize(argv[2]);
|
//tk.tokenize(argv[2]);
|
||||||
|
|
||||||
let tokensFiltered = tokens.filter(
|
let tokensFiltered = tokens4.filter(
|
||||||
(x)=>{return (x.type != tk.TokenType.NL
|
(x)=>{return (x.type != tk.TokenType.NL
|
||||||
&& x.type != tk.TokenType.SP)});
|
&& x.type != tk.TokenType.SP)});
|
||||||
|
|
||||||
let wrappedTokens : tk.Maybe<TokenMatcheePair> =
|
|
||||||
tk.toSome({
|
|
||||||
matched : [] ,
|
|
||||||
remained : tokensFiltered,
|
|
||||||
ast : []});
|
|
||||||
|
|
||||||
let beta = expr({
|
let beta = expr({
|
||||||
matched : [] ,
|
matched : [] ,
|
||||||
|
|
Loading…
Reference in a new issue