add ocaml EoC ast viewet

This commit is contained in:
Tan, Kian-ting 2024-03-25 00:32:19 +08:00
parent c2cab771be
commit 1ea8dfd995
39 changed files with 767 additions and 13673 deletions

View file

@ -1,22 +0,0 @@
Apache license for Zephyr libc implementations (zephyr-string.c),
emmalloc.cpp (from emscripten project) and MIT for rest of the project
Copyright (c) 2019 Ebrahim Byagowi
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Binary file not shown.

View file

@ -1,26 +0,0 @@
# uann
another personal draught of a language and its compiler fork from Clo
website: https://kianting.info/wiki/w/Project:Uann
License: MIT
## changing journal
- 20230904 建立 thenDo、matchRange的函數、refactor harfbuzzjs 以及libpdf 等測試界面
- 20230905-06: 建立 : `toSome`, initial of basic tokenizer (`tokenize`),
`matchAny`, `notDo`, `orDo`, `zeroOrMoreDo`, `zeroOrOnceDo`
- 20230905-07:強化`tokenize`, 加強功能,加`Token`界面。
- 20230907-08:強化`tokenize`。
- 20230910 : add basic parser `CONST` rule, and add the grammar rule.
- 20230914-15: 追加一寡 tokenizer ê 功能。
- 20230918: 重新tuì下kàu頂起做parser. add rule
- 20230921-22:add rule, report issue
- 20230925-26: 試驗án-tsuánn解決[issue1](https://kianting.info/pipermail/clo_kianting.info/2023-September/000004.html), iáu-buē成功。
- 凡勢用?
```
FuncApp ::= Single FuncAppAux | Single
FuncAppAUx ::= FunCallee FuncAppAUx
FuncCallee ::= "(" ")" | "(" ARGS ")"
ARGS = SINGLE "," ARGS | SINGLE
```
- 20230928basically fix `issue1`。其他ê物件猶著做。
- 20230929add multi args parsing for `callee`.
- 分ue改名做uann

Binary file not shown.

19
ocaml_yacc/Makefile Normal file
View file

@ -0,0 +1,19 @@
calc: lexer.cmo parser.cmo calc.cmo
ocamlc -o calc lexer.cmo parser.cmo calc.cmo
lexer.cmo: lexer.ml parser.cmo
ocamlc -c lexer.ml
parser.cmo: parser.ml
ocamlc -c parser.ml
calc.cmo: calc.ml
ocamlc -c calc.ml
parser.ml: parser.mli
ocamlc -c parser.mli
lexer.ml: lexer.mll
ocamllex lexer.mll
parser.mli: parser.mly ast.cmo
ocamlyacc parser.mly
ast.cmo: ast.ml
ocamlc -c ast.ml
clean:
rm -rf *.cmo *.mli parser.ml lexer.ml calc *.cmi

20
ocaml_yacc/Makefile~ Normal file
View file

@ -0,0 +1,20 @@
calc: lexer.cmo parser.cmo calc.cmo
ocamlc -o calc lexer.cmo parser.cmo calc.cmo
lexer.cmo: lexer.ml
ocaml -c lexer.ml
parser.cmo: parser.ml
ocaml -c parser.ml
calc.cmo: calc.ml
ocaml -c calc.ml
parser.ml: parser.mli
ocamlc -c parser.mli
lexer.ml: lexer.mll
ocamllex lexer.mll
parser.mli: parser.mly
ocamlyacc parser.mly

BIN
ocaml_yacc/ast.cmi Normal file

Binary file not shown.

BIN
ocaml_yacc/ast.cmo Normal file

Binary file not shown.

1
ocaml_yacc/ast.ml Normal file
View file

@ -0,0 +1 @@
type ast = Leaf of string | Node of ast list | Int of int

BIN
ocaml_yacc/calc Executable file

Binary file not shown.

BIN
ocaml_yacc/calc.cmi Normal file

Binary file not shown.

BIN
ocaml_yacc/calc.cmo Normal file

Binary file not shown.

17
ocaml_yacc/calc.ml Normal file
View file

@ -0,0 +1,17 @@
(* File calc.ml *)
let rec ast_to_string ast = match ast with
| Ast.Leaf s -> s
| Ast.Node ls -> "[" ^ String.concat " " (List.map ast_to_string ls) ^ "]"
;;
let _ =
try
let lexbuf = Lexing.from_channel stdin in
while true do
let result = Parser.main Lexer.token lexbuf in
Printf.printf "%s" (ast_to_string result); print_newline(); flush stdout
done
with Lexer.Eof ->
exit 0

BIN
ocaml_yacc/lexer.cmi Normal file

Binary file not shown.

BIN
ocaml_yacc/lexer.cmo Normal file

Binary file not shown.

281
ocaml_yacc/lexer.ml Normal file
View file

@ -0,0 +1,281 @@
# 2 "lexer.mll"
open Parser(* The type token is defined in parser.mli *)
exception Eof
# 7 "lexer.ml"
let __ocaml_lex_tables = {
Lexing.lex_base =
"\000\000\240\255\000\000\244\255\245\255\246\255\247\255\001\000\
\249\255\075\000\085\000\160\000\254\255\255\255\235\000\054\001\
\243\255\242\255";
Lexing.lex_backtrk =
"\255\255\255\255\014\000\255\255\255\255\255\255\255\255\007\000\
\255\255\005\000\003\000\003\000\255\255\255\255\004\000\002\000\
\255\255\255\255";
Lexing.lex_default =
"\255\255\000\000\255\255\000\000\000\000\000\000\000\000\255\255\
\000\000\255\255\255\255\255\255\000\000\000\000\255\255\255\255\
\000\000\000\000";
Lexing.lex_trans =
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\013\000\012\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\013\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\004\000\003\000\006\000\008\000\000\000\007\000\000\000\005\000\
\009\000\009\000\009\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\000\000\000\000\000\000\002\000\017\000\016\000\
\000\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\000\000\000\000\000\000\000\000\010\000\
\000\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\011\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\009\000\009\000\009\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\000\000\000\000\000\000\000\000\014\000\000\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\000\000\000\000\000\000\000\000\014\000\
\001\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\015\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\000\000\000\000\
\000\000\000\000\014\000\000\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\000\000\000\000\000\000\000\000\014\000\000\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000";
Lexing.lex_check =
"\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\000\000\000\000\255\255\000\000\255\255\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\255\255\255\255\255\255\000\000\002\000\007\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\255\255\255\255\255\255\255\255\000\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\009\000\009\000\009\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\255\255\255\255\255\255\255\255\010\000\255\255\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\011\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\011\000\255\255\255\255\255\255\255\255\011\000\
\000\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\
\011\000\011\000\011\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\255\255\255\255\
\255\255\255\255\014\000\255\255\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\
\014\000\014\000\014\000\014\000\014\000\014\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\255\255\255\255\255\255\255\255\015\000\255\255\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\
\015\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255";
Lexing.lex_base_code =
"";
Lexing.lex_backtrk_code =
"";
Lexing.lex_default_code =
"";
Lexing.lex_trans_code =
"";
Lexing.lex_check_code =
"";
Lexing.lex_code =
"";
}
let rec token lexbuf =
__ocaml_lex_token_rec lexbuf 0
and __ocaml_lex_token_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 7 "lexer.mll"
( token lexbuf )
# 186 "lexer.ml"
| 1 ->
# 8 "lexer.mll"
( EOL )
# 191 "lexer.ml"
| 2 ->
# 9 "lexer.mll"
( IN )
# 196 "lexer.ml"
| 3 ->
let
# 10 "lexer.mll"
lxm
# 202 "lexer.ml"
= Lexing.sub_lexeme_char lexbuf lexbuf.Lexing.lex_start_pos in
# 10 "lexer.mll"
( ID((String.make 1 lxm)) )
# 206 "lexer.ml"
| 4 ->
let
# 11 "lexer.mll"
lxm
# 212 "lexer.ml"
= Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos lexbuf.Lexing.lex_curr_pos in
# 11 "lexer.mll"
( ID(lxm) )
# 216 "lexer.ml"
| 5 ->
let
# 12 "lexer.mll"
lxm
# 222 "lexer.ml"
= Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos lexbuf.Lexing.lex_curr_pos in
# 12 "lexer.mll"
( INT(lxm) )
# 226 "lexer.ml"
| 6 ->
# 13 "lexer.mll"
( PLUS )
# 231 "lexer.ml"
| 7 ->
# 14 "lexer.mll"
( MINUS )
# 236 "lexer.ml"
| 8 ->
# 15 "lexer.mll"
( TIMES )
# 241 "lexer.ml"
| 9 ->
# 16 "lexer.mll"
( DIV )
# 246 "lexer.ml"
| 10 ->
# 17 "lexer.mll"
( LPAREN )
# 251 "lexer.ml"
| 11 ->
# 18 "lexer.mll"
( RPAREN )
# 256 "lexer.ml"
| 12 ->
# 19 "lexer.mll"
( IMPLY )
# 261 "lexer.ml"
| 13 ->
# 20 "lexer.mll"
( FUNC )
# 266 "lexer.ml"
| 14 ->
# 21 "lexer.mll"
( ASSIGN )
# 271 "lexer.ml"
| 15 ->
# 22 "lexer.mll"
( raise Eof )
# 276 "lexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_token_rec lexbuf __ocaml_lex_state
;;

22
ocaml_yacc/lexer.mll Normal file
View file

@ -0,0 +1,22 @@
(* File lexer.mll *)
{
open Parser(* The type token is defined in parser.mli *)
exception Eof
}
rule token = parse
[' ' '\t'] { token lexbuf } (* skip blanks *)
| ['\n' ]{ EOL }
| ['i']['n'] { IN }
| ('_'|['a'-'z']|['A'-'Z']) as lxm { ID((String.make 1 lxm)) }
| ('_'|['a'-'z']|['A'-'Z'])(['0'-'9']|'_'|['a'-'z']|['A'-'Z'])+ as lxm { ID(lxm) }
| ['0'-'9']+ as lxm { INT(lxm) }
| '+' { PLUS }
| '-' { MINUS }
| '*' { TIMES }
| '/' { DIV }
| '(' { LPAREN }
| ')' { RPAREN }
| ['-']['>'] { IMPLY }
| ['=']['>'] { FUNC }
| '=' { ASSIGN }
| eof { raise Eof }

BIN
ocaml_yacc/parser.cmi Normal file

Binary file not shown.

BIN
ocaml_yacc/parser.cmo Normal file

Binary file not shown.

327
ocaml_yacc/parser.ml Normal file
View file

@ -0,0 +1,327 @@
type token =
| INT of (string)
| ID of (string)
| PLUS
| MINUS
| TIMES
| DIV
| LPAREN
| RPAREN
| ASSIGN
| IN
| IMPLY
| FUNC
| EOL
open Parsing;;
let _ = parse_error;;
let yytransl_const = [|
259 (* PLUS *);
260 (* MINUS *);
261 (* TIMES *);
262 (* DIV *);
263 (* LPAREN *);
264 (* RPAREN *);
265 (* ASSIGN *);
266 (* IN *);
267 (* IMPLY *);
268 (* FUNC *);
269 (* EOL *);
0|]
let yytransl_block = [|
257 (* INT *);
258 (* ID *);
0|]
let yylhs = "\255\255\
\001\000\002\000\002\000\003\000\003\000\005\000\006\000\006\000\
\004\000\004\000\004\000\009\000\010\000\007\000\008\000\008\000\
\008\000\008\000\008\000\008\000\008\000\008\000\000\000"
let yylen = "\002\000\
\002\000\001\000\002\000\001\000\001\000\006\000\001\000\003\000\
\001\000\001\000\001\000\003\000\002\000\002\000\001\000\001\000\
\003\000\003\000\003\000\003\000\003\000\002\000\002\000"
let yydefred = "\000\000\
\000\000\000\000\015\000\000\000\000\000\000\000\023\000\000\000\
\002\000\000\000\005\000\000\000\009\000\010\000\011\000\000\000\
\000\000\000\000\000\000\001\000\003\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\013\000\017\000\000\000\
\000\000\000\000\000\000\000\000\007\000\000\000\012\000\000\000\
\000\000\006\000"
let yydgoto = "\002\000\
\007\000\008\000\009\000\026\000\011\000\018\000\013\000\014\000\
\015\000\016\000"
let yysindex = "\007\000\
\120\255\000\000\000\000\000\000\120\255\120\255\000\000\012\255\
\000\000\106\255\000\000\001\255\000\000\000\000\000\000\245\254\
\124\255\049\255\098\255\000\000\000\000\120\255\120\255\120\255\
\120\255\106\255\009\255\024\255\120\255\000\000\000\000\113\255\
\124\255\124\255\124\255\120\255\000\000\026\255\000\000\088\255\
\120\255\000\000"
let yyrindex = "\000\000\
\000\000\000\000\000\000\042\255\000\000\000\000\000\000\000\000\
\000\000\063\255\000\000\000\000\000\000\000\000\000\000\000\000\
\028\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\074\255\050\255\000\000\000\000\000\000\000\000\007\255\
\053\255\064\255\075\255\000\000\000\000\052\255\000\000\000\000\
\000\000\000\000"
let yygindex = "\000\000\
\000\000\000\000\254\255\255\255\000\000\001\000\000\000\000\000\
\000\000\000\000"
let yytablesize = 131
let yytable = "\010\000\
\029\000\012\000\027\000\017\000\019\000\021\000\010\000\001\000\
\012\000\018\000\018\000\028\000\003\000\004\000\018\000\005\000\
\018\000\036\000\006\000\018\000\032\000\033\000\034\000\035\000\
\020\000\037\000\039\000\010\000\038\000\012\000\022\000\022\000\
\022\000\022\000\040\000\022\000\028\000\022\000\042\000\010\000\
\022\000\012\000\016\000\007\000\016\000\016\000\016\000\016\000\
\016\000\016\000\030\000\016\000\007\000\008\000\016\000\019\000\
\019\000\022\000\022\000\028\000\019\000\013\000\019\000\000\000\
\000\000\019\000\020\000\020\000\020\000\020\000\004\000\020\000\
\004\000\020\000\000\000\004\000\020\000\021\000\021\000\021\000\
\021\000\014\000\021\000\014\000\021\000\000\000\014\000\021\000\
\003\000\004\000\022\000\023\000\024\000\025\000\006\000\000\000\
\000\000\041\000\003\000\004\000\022\000\023\000\024\000\025\000\
\006\000\031\000\003\000\004\000\022\000\023\000\024\000\025\000\
\006\000\003\000\004\000\000\000\000\000\024\000\025\000\006\000\
\003\000\004\000\000\000\005\000\003\000\004\000\006\000\000\000\
\000\000\000\000\006\000"
let yycheck = "\001\000\
\012\001\001\000\002\001\005\000\006\000\008\000\008\000\001\000\
\008\000\003\001\004\001\011\001\001\001\002\001\008\001\004\001\
\010\001\009\001\007\001\013\001\022\000\023\000\024\000\025\000\
\013\001\002\001\029\000\029\000\028\000\029\000\003\001\004\001\
\005\001\006\001\036\000\008\001\011\001\010\001\041\000\041\000\
\013\001\041\000\001\001\002\001\003\001\004\001\005\001\006\001\
\007\001\008\001\002\001\010\001\011\001\002\001\013\001\003\001\
\004\001\005\001\006\001\011\001\008\001\012\001\010\001\255\255\
\255\255\013\001\003\001\004\001\005\001\006\001\008\001\008\001\
\010\001\010\001\255\255\013\001\013\001\003\001\004\001\005\001\
\006\001\008\001\008\001\010\001\010\001\255\255\013\001\013\001\
\001\001\002\001\003\001\004\001\005\001\006\001\007\001\255\255\
\255\255\010\001\001\001\002\001\003\001\004\001\005\001\006\001\
\007\001\008\001\001\001\002\001\003\001\004\001\005\001\006\001\
\007\001\001\001\002\001\255\255\255\255\005\001\006\001\007\001\
\001\001\002\001\255\255\004\001\001\001\002\001\007\001\255\255\
\255\255\255\255\007\001"
let yynames_const = "\
PLUS\000\
MINUS\000\
TIMES\000\
DIV\000\
LPAREN\000\
RPAREN\000\
ASSIGN\000\
IN\000\
IMPLY\000\
FUNC\000\
EOL\000\
"
let yynames_block = "\
INT\000\
ID\000\
"
let yyact = [|
(fun _ -> failwith "parser")
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'blocks) in
Obj.repr(
# 15 "parser.mly"
( _1 )
# 145 "parser.ml"
: Ast.ast))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'block) in
Obj.repr(
# 18 "parser.mly"
( _1 )
# 152 "parser.ml"
: 'blocks))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'blocks) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'block) in
Obj.repr(
# 19 "parser.mly"
(match _1 with Ast.Node x -> Ast.Node (x @ [_2])
| Ast.Int x -> Ast.Node[_1; _2] | Ast.Leaf x -> Ast.Node [_1; _2])
# 161 "parser.ml"
: 'blocks))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 24 "parser.mly"
(_1)
# 168 "parser.ml"
: 'block))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_bind) in
Obj.repr(
# 25 "parser.mly"
(_1)
# 175 "parser.ml"
: 'block))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : 'typ) in
let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'block) in
Obj.repr(
# 29 "parser.mly"
(Ast.Node [Ast.Leaf "%let"; Ast.Node[_1; Ast.Leaf _2; _4]; _6])
# 185 "parser.ml"
: 'let_bind))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
Obj.repr(
# 33 "parser.mly"
(Ast.Leaf _1)
# 192 "parser.ml"
: 'typ))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typ) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
Obj.repr(
# 34 "parser.mly"
(Ast.Node [Ast.Leaf "->"; _1 ; _3])
# 200 "parser.ml"
: 'typ))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'app_expr) in
Obj.repr(
# 37 "parser.mly"
(_1)
# 207 "parser.ml"
: 'expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bin_expr) in
Obj.repr(
# 38 "parser.mly"
(_1)
# 214 "parser.ml"
: 'expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lam_expr) in
Obj.repr(
# 39 "parser.mly"
(_1)
# 221 "parser.ml"
: 'expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'arg) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'block) in
Obj.repr(
# 43 "parser.mly"
(Ast.Node [Ast.Leaf "%lambda"; _1 ; _3])
# 229 "parser.ml"
: 'lam_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'typ) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : string) in
Obj.repr(
# 46 "parser.mly"
( Ast.Node [_1;Ast.Leaf _2] )
# 237 "parser.ml"
: 'arg))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 50 "parser.mly"
( Ast.Node [ Ast.Leaf "%apply"; _1; _2] )
# 245 "parser.ml"
: 'app_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
Obj.repr(
# 54 "parser.mly"
( Ast.Int (int_of_string _1))
# 252 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
Obj.repr(
# 55 "parser.mly"
( Ast.Leaf _1 )
# 259 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in
Obj.repr(
# 56 "parser.mly"
( _2 )
# 266 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 57 "parser.mly"
( Ast.Node [ Ast.Leaf "+"; _1; _3] )
# 274 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 58 "parser.mly"
( Ast.Node[ Ast.Leaf "-"; _1 ; _3] )
# 282 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 59 "parser.mly"
( Ast.Node[ Ast.Leaf "*"; _1 ;_3] )
# 290 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 60 "parser.mly"
( Ast.Node[ Ast.Leaf "/"; _1; _3] )
# 298 "parser.ml"
: 'bin_expr))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in
Obj.repr(
# 61 "parser.mly"
( Ast.Node[ Ast.Leaf "-" ; _2] )
# 305 "parser.ml"
: 'bin_expr))
(* Entry main *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
|]
let yytables =
{ Parsing.actions=yyact;
Parsing.transl_const=yytransl_const;
Parsing.transl_block=yytransl_block;
Parsing.lhs=yylhs;
Parsing.len=yylen;
Parsing.defred=yydefred;
Parsing.dgoto=yydgoto;
Parsing.sindex=yysindex;
Parsing.rindex=yyrindex;
Parsing.gindex=yygindex;
Parsing.tablesize=yytablesize;
Parsing.table=yytable;
Parsing.check=yycheck;
Parsing.error_function=parse_error;
Parsing.names_const=yynames_const;
Parsing.names_block=yynames_block }
let main (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 1 lexfun lexbuf : Ast.ast)

17
ocaml_yacc/parser.mli Normal file
View file

@ -0,0 +1,17 @@
type token =
| INT of (string)
| ID of (string)
| PLUS
| MINUS
| TIMES
| DIV
| LPAREN
| RPAREN
| ASSIGN
| IN
| IMPLY
| FUNC
| EOL
val main :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> Ast.ast

63
ocaml_yacc/parser.mly Normal file
View file

@ -0,0 +1,63 @@
/* File parser.mly */
%token <string> INT
%token <string> ID
%token PLUS MINUS TIMES DIV
%token LPAREN RPAREN ASSIGN IN IMPLY FUNC
%token EOL
%right IMPLY FUNC
%left PLUS MINUS/* lowest precedence */
%left TIMES DIV /* medium precedence */
%nonassoc UMINUS/* highest precedence */
%start main /* the entry point */
%type <Ast.ast> main
%%
main:
blocks EOL{ $1 }
;
blocks:
block{ $1 }
|blocks block{match $1 with Ast.Node x -> Ast.Node (x @ [$2])
| Ast.Int x -> Ast.Node[$1; $2] | Ast.Leaf x -> Ast.Node [$1; $2]} /* It Must not be entered */
;
block:
expr {$1}
| let_bind {$1}
;
let_bind:
| typ ID ASSIGN expr IN block {Ast.Node [Ast.Leaf "%let"; Ast.Node[$1; Ast.Leaf $2; $4]; $6]}
;
typ:
| ID {Ast.Leaf $1}
| typ IMPLY typ {Ast.Node [Ast.Leaf "->"; $1 ; $3]}
expr:
app_expr {$1}
| bin_expr {$1}
| lam_expr{$1}
;
lam_expr:
arg FUNC block {Ast.Node [Ast.Leaf "%lambda"; $1 ; $3]}
;
arg:
typ ID { Ast.Node [$1;Ast.Leaf $2] }
;
app_expr:
expr expr { Ast.Node [ Ast.Leaf "%apply"; $1; $2] }
;
;
bin_expr:
INT { Ast.Int (int_of_string $1)}
| ID { Ast.Leaf $1 }
| LPAREN expr RPAREN { $2 }
| expr PLUS expr { Ast.Node [ Ast.Leaf "+"; $1; $3] }
| expr MINUS expr { Ast.Node[ Ast.Leaf "-"; $1 ; $3] }
| expr TIMES expr { Ast.Node[ Ast.Leaf "*"; $1 ;$3] }
| expr DIV expr { Ast.Node[ Ast.Leaf "/"; $1; $3] }
| MINUS expr %prec UMINUS { Ast.Node[ Ast.Leaf "-" ; $2] }
;

11481
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,45 +0,0 @@
{
"name": "clo",
"version": "0.0.1",
"description": "a little typesetting engine in TypeScript",
"main": "src/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Yoxem/clo.git"
},
"keywords": [
"typesetting"
],
"author": "Tan Kian-ting",
"license": "MIT",
"bugs": {
"url": "https://github.com/Yoxem/clo/issues"
},
"homepage": "https://github.com/Yoxem/clo#readme",
"devDependencies": {
"@types/chai": "^4.3.5",
"@types/mocha": "^10.0.1",
"@types/node": "^20.5.7",
"@typescript-eslint/eslint-plugin": "^6.5.0",
"chai": "^4.3.8",
"eslint": "^8.48.0",
"eslint-config-standard-with-typescript": "^39.0.0",
"eslint-plugin-import": "^2.28.1",
"eslint-plugin-n": "^16.0.2",
"eslint-plugin-promise": "^6.1.1",
"mocha": "^10.2.0",
"nyc": "^15.1.0",
"ts-node": "^10.9.1",
"tslint": "^6.1.3",
"typescript": "^5.2.2"
},
"dependencies": {
"@pdf-lib/fontkit": "^1.1.1",
"harfbuzzjs": "^0.3.3",
"npx": "^3.0.0",
"pdf-lib": "^1.17.1"
}
}

View file

@ -1,8 +0,0 @@
single = "(" expr ")" | int
args = single , args | single
callees = "(" args ")" | "(" ")"
facAux = callees facAux | callees
single facAux | single
fac = single facAux | single
term = fac | fac (MUL | DIV) fac
expr = term (ADD | SUB) term

View file

@ -1,65 +0,0 @@
let sqrtSum = (int x, int y) -> int {
let x2 = x * x;
let y2 = y * y;
return x2+y2;
}
let pi = 3.14159;
let _2p = (intToFLo 2) *. pi;
let c = if (2 == 2) then 2 else 3;
let aStr = "hello";
let rec fac = (int n)-> int {
if n == 0 then 1 else (fac (n - 1));};
type student = Student {int id, string name};
let alice = Student {id=20, name="Alice"};
alice.name = "Siobhan";
let customAnd = (@ 'a has age) ('a x, 'a y) => {'a > 'b};
type list 'a = (Cons 'a (List 'a)) | Nil;
import("alifbata.clo"); # 匯入檔案 alifbata #
t of import :== string -> Option string string
Error("string") | Ok("import aaa")
# 型別構造子統一大寫,型別小寫 #
PROG ::= (STMT | COMMENT | STMT_COMMENT)*
COMMENT ::= # COMMENT_INNER #
COMMENT_INNER ::= [^#]+
STMT ::= (TYPE_DEF | VAR_DEF | SET | EXPR ) ";"
TYPE_DEF ::= type ID "=" UNION
| type ID TYPE_VARS+ "=" UNIOM
TYPE_VARS = ' ID
UNION ::= (REC "|" UNION) | REC
REC ::= ID ( TYPES )
TYPES ::= TYPE+
TYPE ::= ID
EXPR ::= if SUB_EXPR then IF_BRANCH else IF_BRANCH | SUB_EXPR
IF_BRANCH ::= EXPR | { BLOCK }
SUB_EXPR ::= COMPAREE| COMPAREE (LE|GE|LT|GT|EQ|NE) EXPR
COMPAREE ::= FAC| (FAC(ADD | SUB) FAC)
FAC ::= APPLY | (APPLIER (MUL | DIV) APPLY)
APPLY ::= "(" ID APPLYEE* ")" | APPLYEE
APPLYEE ::= REF | CONST | EXPR | FUNC
* CONST ::= INT | STR | FLO | BOOL
BOOL ::= "true" | "false"
FUNC ::= FUNC_OPTION? ( ARGS? ) -> TYPE {BLOCK}
BLOCK ::= PROG (return ID |noReturn) ;
REF ::= VAR "." ID | VAR
VAR ::= ID
VAR_DEF ::= "let" VAR "=" EXPR
SET ::= VAR "=" EXPR
FUNC_OPTION ::= ( @ TYPE_HAS (, TYPE_HAS)* )
TYPE_HAS ::= TYPE_VAR "has" ID

View file

@ -1,45 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.harfbuzzTest = void 0;
var hb = require('harfbuzzjs/hbjs');
var fs = require('fs');
// some part of code derived from that by Ebrahim Byagowi,
// under MIT License
function harfbuzzTest(inputString) {
WebAssembly.instantiate(fs.readFileSync(__dirname + "/../3rdparty/harfbuzzjs/hb.wasm"))
.then(function (wsm) {
hb = hb(wsm.instance);
let fontdata = fs.readFileSync("/usr/share/fonts/truetype/freefont/FreeSerif.ttf");
//hbjs(fontdata.instance);
//console.log(a);
var blob = hb.createBlob(fontdata); // Load the font data into something Harfbuzz can use
var face = hb.createFace(blob, 0); // Select the first font in the file (there's normally only one!)
var font = hb.createFont(face); // Create a Harfbuzz font object from the face
var buffer = hb.createBuffer(); // Make a buffer to hold some text
buffer.addText(inputString); // Fill it with some stuff
buffer.guessSegmentProperties(); // Set script, language and direction
hb.shape(font, buffer); // Shape the text, determining glyph IDs and positions
var output = buffer.json();
// Enumerate the glyphs
console.log("id\tax\tdx\tdy");
var xCursor = 0;
var yCursor = 0;
for (var glyph of output) {
var glyphId = glyph.g;
var xAdvance = glyph.ax;
var xDisplacement = glyph.dx;
var yDisplacement = glyph.dy;
var svgPath = font.glyphToPath(glyphId);
console.log(glyphId + "\t" + xAdvance + "\t" + xDisplacement + "\t" + yDisplacement);
// You need to supply this bit
//drawAGlyph(svgPath, xCursor + xDisplacement, yDisplacement);
// xCursor += xAdvance;
}
// Release memory
buffer.destroy();
font.destroy();
face.destroy();
blob.destroy();
});
}
exports.harfbuzzTest = harfbuzzTest;

View file

@ -1,57 +0,0 @@
var hb = require('harfbuzzjs/hbjs');
var fs = require('fs');
// some part of code derived from that by Ebrahim Byagowi,
// under MIT License
export function harfbuzzTest(inputString: string){
WebAssembly.instantiate(fs.readFileSync(__dirname+"/../3rdparty/harfbuzzjs/hb.wasm"))
.then(function (wsm) {
hb = hb(wsm.instance);
let fontdata = fs.readFileSync("/usr/share/fonts/truetype/freefont/FreeSerif.ttf");
//hbjs(fontdata.instance);
//console.log(a);
var blob = hb.createBlob(fontdata); // Load the font data into something Harfbuzz can use
var face = hb.createFace(blob, 0); // Select the first font in the file (there's normally only one!)
var font = hb.createFont(face); // Create a Harfbuzz font object from the face
var buffer = hb.createBuffer(); // Make a buffer to hold some text
buffer.addText(inputString); // Fill it with some stuff
buffer.guessSegmentProperties(); // Set script, language and direction
hb.shape(font, buffer); // Shape the text, determining glyph IDs and positions
var output : Array<{g : number,
ax : number,
dx : number,
dy : number}> = buffer.json();
// Enumerate the glyphs
console.log("id\tax\tdx\tdy");
var xCursor = 0;
var yCursor = 0;
for (var glyph of output) {
var glyphId = glyph.g;
var xAdvance = glyph.ax;
var xDisplacement = glyph.dx;
var yDisplacement = glyph.dy;
var svgPath = font.glyphToPath(glyphId);
console.log(glyphId + "\t" + xAdvance + "\t" + xDisplacement + "\t" + yDisplacement);
// You need to supply this bit
//drawAGlyph(svgPath, xCursor + xDisplacement, yDisplacement);
// xCursor += xAdvance;
}
// Release memory
buffer.destroy();
font.destroy();
face.destroy();
blob.destroy();
});
}

1
src/index.d.ts vendored
View file

@ -1 +0,0 @@
export {};

View file

@ -1,404 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.OnceOrMoreDo = exports.matchAny = exports.m1TType = exports.tkTreeToSExp = void 0;
var fs = require('fs');
const node_process_1 = require("node:process");
const tk = __importStar(require("./tokenize.js"));
const util = __importStar(require("util"));
/**
* debug reprensenting
*/
let repr = (x) => { return util.inspect(x, { depth: null }); };
/**
* concated 2 `tkTree`s
* @param x the array to be concated
* @param y the item or array to ve concated
* @returns concated tkTree array, or thrown error if can't be concated.
*/
function concat(x, y) {
if (Array.isArray(x)) {
return x.concat(y);
}
else {
throw new Error("the tkTree can't be concated, because it's not an array.");
}
}
function slice(x, index, end) {
if (Array.isArray(x)) {
return x.slice(index, end);
}
else {
throw new Error("the tkTree can't be concated, because it's not an array.");
}
}
/**
* convert a `tkTree` AST to S-expr string
* @param t the `tkTree`
* @returns S-expr String
*/
function tkTreeToSExp(t) {
var str = "";
if (Array.isArray(t)) {
let strArray = t.map((x) => tkTreeToSExp(x));
str = "(" + strArray.join(" ") + ")";
}
else {
if (t === undefined) {
str = "%undefined";
}
else {
str = t.text;
}
}
return str;
}
exports.tkTreeToSExp = tkTreeToSExp;
/**
* @description
* match one token type.
*
* it returns a function which test if the type of first token of the `remained` part of
* the argument of the function is `typ` , if it's true, update the `TokenMatcheePair` wrapped
* in `Some`. Otherwise, it returns `None`.
* * @param typ : the type to be test.
* @returns the updated `TokenMatcheePair` wrapped in `Some(x)` or `None`.
*/
function m1TType(typ) {
return (m) => {
if (m.remained.length == 0) {
return { _tag: "None" };
}
/**
* token to be matched
* */
const ttbm = m.remained[0];
if (ttbm.type == typ) {
let new_matched = m.matched.concat(ttbm);
let result = {
_tag: "Some", value: {
matched: new_matched,
remained: m.remained.slice(1),
ast: ([ttbm]),
}
};
return result;
}
else {
return { _tag: "None" };
}
};
}
exports.m1TType = m1TType;
;
/**
* type int
*/
let tInt = m1TType(tk.TokenType.INT);
let tId = m1TType(tk.TokenType.ID);
let tAdd = m1TType(tk.TokenType.I_ADD);
let tSub = m1TType(tk.TokenType.I_SUB);
let tMul = m1TType(tk.TokenType.I_MUL);
let tDiv = m1TType(tk.TokenType.I_DIV);
let tLParen = m1TType(tk.TokenType.L_PAREN);
let tRParen = m1TType(tk.TokenType.R_PAREN);
let tComma = m1TType(tk.TokenType.COMMA);
let toSome = tk.toSome;
node_process_1.argv.forEach((val, index) => {
console.log(`${index}=${val}`);
});
/**
* like `m ==> f` in ocaml
* @param m matchee wrapped
* @param f matching function
* @returns wrapped result
*/
function thenDo(m, f) {
if (m._tag == "None") {
return m;
}
else {
var a = f(m.value);
if (a._tag == "Some") {
a.value.ast = concat(m.value.ast, a.value.ast);
}
return a;
}
}
/**
* like `f1 | f2` in regex
* @param f1 the first tried function
* @param f2 the second tried function
* @returns wrapped result
*/
function orDo(f1, f2) {
return (x) => {
let res1 = f1(x);
if (res1._tag == "Some") {
return res1;
}
else {
let res2 = f2(x);
return res2;
}
};
}
/**
*
* @param m : the `MatcheePair` to be consumed.
* @returns if the length of `m.remained` >= 1; consumes the matchee by 1 token
* and wraps it in `Some`,
* otherwise, returns `None`.
*/
function matchAny(m) {
if (m.remained.length >= 1) {
return {
_tag: "Some", value: {
matched: m.matched.concat(m.remained[0]),
remained: m.remained.slice(1),
ast: [m.remained[0]],
}
};
}
else {
return { _tag: "None" };
}
}
exports.matchAny = matchAny;
/**
* Danger : Maybe it's not enough to work.
* @description repeating matching function `f`
* zero or more times, like the asterisk `*` in regex `f*` .
* @param f : the function to be repeated 0+ times.
* @returns:the combined function
*/
function OnceOrMoreDo(f) {
return (x) => {
var wrappedOldX = { _tag: "Some", value: x };
var wrappedNewX = wrappedOldX;
var counter = -1;
while (wrappedNewX._tag != "None") {
wrappedOldX = wrappedNewX;
wrappedNewX = thenDo(wrappedOldX, f);
counter += 1;
}
;
if (counter <= 0) {
return { _tag: "None" };
}
let ast = wrappedOldX.value.ast;
wrappedOldX.value.ast = ast.slice(ast.length - counter);
console.log(repr(wrappedOldX.value.ast));
return wrappedOldX;
};
}
exports.OnceOrMoreDo = OnceOrMoreDo;
/**
* aux function for midfix operator
* @param f function
* @param signal the rule name
* @returns
*/
let midfix = (f, signal) => (x) => {
var a = f(x);
if (a._tag == "Some") {
let ast_tail = slice(a.value.ast, a.value.ast.length - 3);
let new_ast = [ast_tail];
a.value.ast = new_ast;
// console.log("+"+signal+"+"+repr(a));
}
return a;
};
let circumfix = (f, signal) => (x) => {
var a = f(x);
if (a._tag == "Some") {
console.log("$$$" + repr(a.value.ast));
let inner = a.value.ast[a.value.ast.length - 2];
var ast_middle;
// the list should not be (%list) (%apply) (%lambda) etc.
if (Array.isArray(inner)) {
if ('text' in inner[0] && (inner[0].text.slice(0, 1) != "%")) {
ast_middle = inner;
}
else {
ast_middle = [inner];
}
}
else {
ast_middle = [inner];
}
let new_ast = [ast_middle];
a.value.ast = new_ast;
}
return a;
};
/** single1 = tInt | "(" expr ")"*/
let single1 = circumfix((x) => thenDo(thenDo(thenDo(toSome(x), tLParen), expr), tRParen), "fac1");
let single2 = tInt;
let single = orDo(single1, single2);
/** args = single "," args | single */
let args1 = (x) => {
var ret = thenDo(thenDo(thenDo(toSome(x), single), tComma), args);
if (ret._tag == "Some") {
let retLength = ret.value.ast.length;
ret.value.ast = [[ret.value.ast[retLength - 3]].concat(ret.value.ast[retLength - 1])];
console.log("$$" + repr(ret.value.ast));
}
return ret;
};
let args2 = single;
let args = orDo(args1, args2);
/** callees = "(" args ")" | "(" ")" */
let callees1 = circumfix((x) => thenDo(thenDo(thenDo(toSome(x), tLParen), args), tRParen), "callees1");
let callees2 = (x) => {
let ret = thenDo(thenDo(toSome(x), tLParen), tRParen);
if (ret._tag == "Some") {
let new_ast = [[]];
ret.value.ast = new_ast;
}
return ret;
};
let callees = orDo(callees1, callees2);
/** %apply R combinating token */
let applyToken = {
text: "%apply",
type: tk.TokenType.ID,
col: 0,
ln: 0,
};
/** facAux = callees facAux | callees */
let facAux1 = (x) => {
var ret = thenDo(thenDo(toSome(x), callees), facAux);
if (ret._tag == "Some") {
console.log("1232345" + repr(tkTreeToSExp(ret.value.ast[ret.value.ast.length - 1])));
let last1 = ret.value.ast[ret.value.ast.length - 1];
let last2 = ret.value.ast[ret.value.ast.length - 2];
let b = [applyToken];
ret.value.ast = [b.concat([last2, last1])];
console.log("11111" + repr(tkTreeToSExp(ret.value.ast)));
}
;
return ret;
};
let facAux2 = callees;
let facAux = orDo(facAux1, facAux2);
/** fac = single facAux | single
* Issue1 to be fixed.
*/
let fac1 = (x) => {
var ret = thenDo(thenDo(toSome(x), single), facAux);
if (ret._tag == "Some") {
console.log("777" + repr(tkTreeToSExp(ret.value.ast)));
ret.value.ast = [applyToken, ret.value.ast[ret.value.ast.length - 2],
ret.value.ast[ret.value.ast.length - 1]];
ret.value.ast;
rearrangeTree(ret.value.ast);
console.log("888" + repr(tkTreeToSExp(ret.value.ast)));
}
return ret;
};
let fac2 = single;
let fac = orDo(fac1, fac2);
/**
* rearrangeTree : for applyToken subtree from right-combination to
* left-combination
* @input x a ast
* @return another ast
*/
function rearrangeTree(x) {
if (x !== undefined) {
for (var i = 1; i < x.length; i++) {
rearrangeTree(x[i]);
}
console.log("@@" + repr(x[0]));
if (x[0] == applyToken) {
if (Array.isArray(x[2]) && x[2][0] == applyToken) {
let rl = rearrangeTree(x[2][1]);
let rr = rearrangeTree(x[2][2]);
let l = rearrangeTree(x[1]);
x[0] = applyToken;
x[1] = [applyToken, l, rl];
x[2] = rr;
console.log("@@==" + repr(x));
return x;
}
else {
x[0] = applyToken;
x[1] = rearrangeTree(x[1]);
x[2] = rearrangeTree(x[2]);
console.log("@@==" + repr(x));
return x;
}
}
return x;
}
}
/**
*
* term1 = fac (MUL | DIV) fac
*/
let term1 = midfix((x) => thenDo(thenDo(thenDo(toSome(x), fac), orDo(tMul, tDiv)), fac), "term1");
/**
*
* term2 = int MUL int
*/
let term2 = fac;
/**
* term = term1 | term2
*/
let term = orDo(term1, term2);
/**
*
* expr1 = term ADD term
*/
let expr1 = midfix((x) => thenDo(thenDo(thenDo(toSome(x), term), orDo(tAdd, tSub)), term), "expr1");
/**
* expr2 = term
*/
let expr2 = term;
/**
* expr = expr1 | expr2
*/
let expr = orDo(expr1, expr2);
let tokens = tk.tokenize("1");
let tokens2 = tk.tokenize("1(2)");
let tokens3 = tk.tokenize("1(2)(3)");
let tokens4 = tk.tokenize("2()(4)(5,6)(7,8,9,10)");
//let tokens = tk.tokenize("(4-(3/4))");
//tk.tokenize(argv[2]);
let tokensFiltered = tokens4.filter((x) => {
return (x.type != tk.TokenType.NL
&& x.type != tk.TokenType.SP);
});
let beta = expr({
matched: [],
remained: tokensFiltered,
ast: []
});
if (beta._tag == "Some") {
beta.value.ast = rearrangeTree(beta.value.ast);
console.log(tkTreeToSExp(beta.value.ast));
}
console.log("RESULT=" + repr(beta));

View file

@ -1,476 +0,0 @@
var fs = require('fs');
import { argv, resourceUsage } from 'node:process';
import * as tk from './tokenize.js';
import * as util from 'util';
import { drawEllipsePath, reduceRotation } from 'pdf-lib';
import { isAnyArrayBuffer, isTypedArray } from 'node:util/types';
import { error } from 'node:console';
import { isUndefined } from 'node:util';
/**
* debug reprensenting
*/
let repr = (x : any)=>{return util.inspect(x, {depth: null})};
/**
* token tree type.
*/
type tkTree = tkTree[] | tk.Token
/**
* concated 2 `tkTree`s
* @param x the array to be concated
* @param y the item or array to ve concated
* @returns concated tkTree array, or thrown error if can't be concated.
*/
function concat(x: tkTree, y:tkTree): tkTree[] {
if (Array.isArray(x)){
return x.concat(y);
}else{
throw new Error("the tkTree can't be concated, because it's not an array.");
}
}
function slice(x: tkTree, index?:number, end?:number): tkTree[] {
if (Array.isArray(x)){
return x.slice(index,end);
}else{
throw new Error("the tkTree can't be concated, because it's not an array.");
}
}
/**
* TokenMatcheePair for tokens' parser combinator
*
* matched: the matched (now and before) tokens
*
* remained: tokens to be matched
*
* ast: abstract syntax tree
*/
export interface TokenMatcheePair {
matched: tk.Token[]
remained: tk.Token[]
ast : tkTree[]
}
/**
* convert a `tkTree` AST to S-expr string
* @param t the `tkTree`
* @returns S-expr String
*/
export function tkTreeToSExp(t: tkTree): string{
var str = "";
if (Array.isArray(t)){
let strArray = t.map((x)=>tkTreeToSExp(x));
str = "(" + strArray.join(" ") + ")";
}else{
if (t=== undefined){
str = "%undefined"
}else{
str = t.text;
}
}
return str;
}
/**
* @description
* match one token type.
*
* it returns a function which test if the type of first token of the `remained` part of
* the argument of the function is `typ` , if it's true, update the `TokenMatcheePair` wrapped
* in `Some`. Otherwise, it returns `None`.
* * @param typ : the type to be test.
* @returns the updated `TokenMatcheePair` wrapped in `Some(x)` or `None`.
*/
export function m1TType(typ: tk.TokenType):
(m: TokenMatcheePair) => tk.Maybe<TokenMatcheePair> {
return (m: TokenMatcheePair) => {
if (m.remained.length == 0) {
return { _tag: "None" };
}
/**
* token to be matched
* */
const ttbm = m.remained[0];
if (ttbm.type == typ) {
let new_matched = m.matched.concat(ttbm);
let result : tk.Some<TokenMatcheePair> = {
_tag: "Some", value: {
matched: new_matched,
remained: m.remained.slice(1),
ast: ([ttbm]),
}
};
return result;
}
else {
return { _tag: "None" };
}
}
};
/**
* type int
*/
let tInt = m1TType(tk.TokenType.INT);
let tId = m1TType(tk.TokenType.ID);
let tAdd = m1TType(tk.TokenType.I_ADD);
let tSub = m1TType(tk.TokenType.I_SUB);
let tMul = m1TType(tk.TokenType.I_MUL);
let tDiv = m1TType(tk.TokenType.I_DIV);
let tLParen = m1TType(tk.TokenType.L_PAREN);
let tRParen = m1TType(tk.TokenType.R_PAREN);
let tComma = m1TType(tk.TokenType.COMMA);
let toSome = tk.toSome;
argv.forEach((val, index) => {
console.log(`${index}=${val}`);
});
/**
* like `m ==> f` in ocaml
* @param m matchee wrapped
* @param f matching function
* @returns wrapped result
*/
function thenDo(m : tk.Maybe<TokenMatcheePair>, f : Function){
if (m._tag == "None"){
return m;
}else{
var a : tk.Maybe<TokenMatcheePair> = f(m.value);
if (a._tag == "Some"){
a.value.ast = concat(m.value.ast, a.value.ast);
}
return a;
}
}
/**
* like `f1 | f2` in regex
* @param f1 the first tried function
* @param f2 the second tried function
* @returns wrapped result
*/
function orDo(f1 : Function, f2 : Function){
return (x : TokenMatcheePair) =>{
let res1 : tk.Maybe<TokenMatcheePair> = f1(x);
if (res1._tag == "Some"){
return res1;
}else{
let res2 : tk.Maybe<TokenMatcheePair> = f2(x);
return res2;
}
}
}
/**
*
* @param m : the `MatcheePair` to be consumed.
* @returns if the length of `m.remained` >= 1; consumes the matchee by 1 token
* and wraps it in `Some`,
* otherwise, returns `None`.
*/
export function matchAny(m: TokenMatcheePair): tk.Maybe<TokenMatcheePair> {
if (m.remained.length >= 1) {
return {
_tag: "Some", value: {
matched: m.matched.concat(m.remained[0]),
remained: m.remained.slice(1),
ast : [m.remained[0]],
}
};
} else {
return { _tag: "None" };
}
}
/**
* Danger : Maybe it's not enough to work.
* @description repeating matching function `f`
* zero or more times, like the asterisk `*` in regex `f*` .
* @param f : the function to be repeated 0+ times.
* @returns:the combined function
*/
export function OnceOrMoreDo(f: Function): (x: TokenMatcheePair) =>
tk.Maybe<TokenMatcheePair> {
return (x) => {
var wrappedOldX: tk.Maybe<TokenMatcheePair> = { _tag: "Some", value: x };
var wrappedNewX: tk.Maybe<TokenMatcheePair> = wrappedOldX;
var counter = -1;
while (wrappedNewX._tag != "None") {
wrappedOldX = wrappedNewX;
wrappedNewX = thenDo(wrappedOldX, f);
counter += 1;
};
if (counter <= 0){
return { _tag: "None"};
}
let ast = wrappedOldX.value.ast ;
wrappedOldX.value.ast =ast.slice(ast.length-counter);
console.log(repr(wrappedOldX.value.ast));
return wrappedOldX; };
}
/**
* aux function for midfix operator
* @param f function
* @param signal the rule name
* @returns
*/
let midfix = (f : Function, signal? : string) => (x : TokenMatcheePair)=>{
var a = f(x);
if (a._tag == "Some"){
let ast_tail : tkTree[] = slice(a.value.ast,a.value.ast.length-3);
let new_ast = [ast_tail];
a.value.ast = new_ast;
// console.log("+"+signal+"+"+repr(a));
}
return a;
}
let circumfix = (f : Function, signal? : string) => (x : TokenMatcheePair)=>{
var a : tk.Maybe<TokenMatcheePair> = f(x);
if (a._tag == "Some"){
console.log("$$$"+repr(a.value.ast));
let inner = a.value.ast[a.value.ast.length-2];
var ast_middle : tkTree[];
// the list should not be (%list) (%apply) (%lambda) etc.
if (Array.isArray(inner)){
if ('text' in inner[0] && (inner[0].text.slice(0,1) != "%")){
ast_middle = inner;
}
else{
ast_middle = [inner];
}
}
else{
ast_middle = [inner];
}
let new_ast = [ast_middle];
a.value.ast = new_ast;
}
return a;
}
/** single1 = tInt | "(" expr ")"*/
let single1 = circumfix((x : TokenMatcheePair) =>
thenDo(thenDo(thenDo(toSome(x), tLParen), expr), tRParen), "fac1");
let single2= tInt;
let single = orDo(single1, single2);
/** args = single "," args | single */
let args1 = (x: TokenMatcheePair)=>{
var ret = thenDo(thenDo(thenDo(toSome(x), single), tComma), args);
if (ret._tag == "Some"){
let retLength = ret.value.ast.length;
ret.value.ast = [[ret.value.ast[retLength-3]].concat(ret.value.ast[retLength-1])];
console.log("$$"+repr(ret.value.ast));
}
return ret;
};
let args2 = single;
let args = orDo(args1, args2);
/** callees = "(" args ")" | "(" ")" */
let callees1 = circumfix((x : TokenMatcheePair) =>
thenDo(thenDo(thenDo(toSome(x), tLParen), args), tRParen), "callees1");
let callees2 = (x: TokenMatcheePair)=>{
let ret = thenDo(thenDo(toSome(x), tLParen), tRParen);
if (ret._tag == "Some"){
let new_ast : tkTree[] = [[]];
ret.value.ast = new_ast;
}
return ret};
let callees = orDo(callees1, callees2);
/** %apply R combinating token */
let applyToken = {
text: "%apply",
type: tk.TokenType.ID,
col: 0,
ln: 0,
}
/** facAux = callees facAux | callees */
let facAux1 = (x: TokenMatcheePair)=>{
var ret = thenDo(thenDo(toSome(x), callees), facAux);
if (ret._tag == "Some"){
console.log("1232345"+repr(tkTreeToSExp(ret.value.ast[ret.value.ast.length-1])));
let last1 = ret.value.ast[ret.value.ast.length-1];
let last2 = ret.value.ast[ret.value.ast.length-2];
let b : tkTree[] = [applyToken];
ret.value.ast = [b.concat([last2, last1])];
console.log("11111"+repr(tkTreeToSExp(ret.value.ast)));
};
return ret;}
let facAux2 = callees;
let facAux = orDo(facAux1, facAux2);
/** fac = single facAux | single
* Issue1 to be fixed.
*/
let fac1 = (x: TokenMatcheePair)=>{
var ret = thenDo(thenDo(toSome(x), single),facAux);
if(ret._tag == "Some"){
console.log("777"+repr(tkTreeToSExp(ret.value.ast)));
ret.value.ast = [applyToken, ret.value.ast[ret.value.ast.length-2],
ret.value.ast[ret.value.ast.length-1]];
ret.value.ast;
rearrangeTree(ret.value.ast);
console.log("888"+repr(tkTreeToSExp(ret.value.ast)));
}
return ret;};
let fac2 = single;
let fac = orDo(fac1, fac2);
/**
* rearrangeTree : for applyToken subtree from right-combination to
* left-combination
* @input x a ast
* @return another ast
*/
function rearrangeTree(x: any) : any {
if (x !== undefined){
for (var i=1;i<x.length;i++){
rearrangeTree(x[i]);
}
console.log("@@"+repr(x[0]));
if (x[0] == applyToken){
if (Array.isArray(x[2]) && x[2][0] == applyToken){
let rl = rearrangeTree(x[2][1]);
let rr = rearrangeTree(x[2][2]);
let l = rearrangeTree(x[1]);
x[0] = applyToken;
x[1] = [applyToken, l, rl];
x[2] = rr;
console.log("@@=="+repr(x));
return x;
}
else{
x[0] = applyToken;
x[1] = rearrangeTree(x[1]);
x[2] = rearrangeTree(x[2]);
console.log("@@=="+repr(x));
return x;
}
}
return x;
}
}
/**
*
* term1 = fac (MUL | DIV) fac
*/
let term1 = midfix((x : TokenMatcheePair)=>
thenDo(thenDo(thenDo(toSome(x), fac), orDo(tMul,tDiv)), fac), "term1");
/**
*
* term2 = int MUL int
*/
let term2 = fac;
/**
* term = term1 | term2
*/
let term = orDo(term1, term2);
/**
*
* expr1 = term ADD term
*/
let expr1 = midfix((x : TokenMatcheePair)=>
thenDo(thenDo(thenDo(toSome(x), term), orDo(tAdd,tSub)), term), "expr1");
/**
* expr2 = term
*/
let expr2 = term;
/**
* expr = expr1 | expr2
*/
let expr = orDo(expr1, expr2);
let tokens = tk.tokenize("1");
let tokens2 = tk.tokenize("1(2)");
let tokens3 = tk.tokenize("1(2)(3)");
let tokens4 = tk.tokenize("2()(4)(5,6)(7,8,9,10)");
//let tokens = tk.tokenize("(4-(3/4))");
//tk.tokenize(argv[2]);
let tokensFiltered = tokens4.filter(
(x)=>{return (x.type != tk.TokenType.NL
&& x.type != tk.TokenType.SP)});
let beta = expr({
matched : [] ,
remained : tokensFiltered,
ast : []});
if (beta._tag == "Some"){
beta.value.ast = rearrangeTree(beta.value.ast);
console.log(tkTreeToSExp(beta.value.ast));
}
console.log("RESULT="+repr(beta));

View file

@ -1,32 +0,0 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.pdfGenerate = void 0;
const fs_1 = require("fs");
const pdf_lib_1 = require("pdf-lib");
var fontkit = require('@pdf-lib/fontkit');
function pdfGenerate() {
return __awaiter(this, void 0, void 0, function* () {
const pdfDoc = yield pdf_lib_1.PDFDocument.create();
const page = pdfDoc.addPage();
pdfDoc.registerFontkit(fontkit);
const fontBytes = (0, fs_1.readFileSync)("/usr/share/fonts/uming.ttf");
const font2 = yield pdfDoc.embedFont(fontBytes, { subset: true });
const fontBytes2 = (0, fs_1.readFileSync)("/usr/share/fonts/truetype/noto/NotoSansArabic-Light.ttf");
const font3 = yield pdfDoc.embedFont(fontBytes2, { subset: true });
page.drawText("x=20, y=20", { x: 20, y: 20 });
page.drawText("x:20, y:100 天地人", { x: 20, y: 100, font: font2 });
page.drawText("عربي", { x: 50, y: 150, font: font3 });
const pdfBytes = yield pdfDoc.save();
(0, fs_1.writeFileSync)('/tmp/test.pdf', pdfBytes);
});
}
exports.pdfGenerate = pdfGenerate;

View file

@ -1,25 +0,0 @@
import { readFileSync, writeFileSync } from "fs";
import { PDFDocument } from "pdf-lib";
var fontkit = require('@pdf-lib/fontkit');
export async function pdfGenerate(){
const pdfDoc = await PDFDocument.create()
const page = pdfDoc.addPage()
pdfDoc.registerFontkit(fontkit);
const fontBytes = readFileSync("/usr/share/fonts/uming.ttf");
const font2 = await pdfDoc.embedFont(fontBytes, {subset:true})
const fontBytes2 = readFileSync("/usr/share/fonts/truetype/noto/NotoSansArabic-Light.ttf")
const font3 = await pdfDoc.embedFont(fontBytes2, {subset:true})
page.drawText("x=20, y=20", {x : 20, y : 20})
page.drawText("x:20, y:100 天地人", {x : 20, y : 100, font: font2})
page.drawText("عربي", {x : 50, y : 150, font: font3})
const pdfBytes = await pdfDoc.save();
writeFileSync('/tmp/test.pdf', pdfBytes);
}

View file

@ -1,565 +0,0 @@
import * as util from 'util';
var fs = require('fs');
export type Some<T> = { _tag: "Some"; value: T };
export type None = { _tag: "None" };
/**
* part for tokenize the input string
*/
/**
* wrap a x in a `Some(T)`
* @param x : variable to be wrapped.
* @returns wrapped `x`.
*/
export function toSome<T>(x: T): Some<T> {
return { _tag: "Some", value: x };
}
/**
* @description Like the `Some(a)` and `None` in Rust.
*
* @example
* ```ts
* let exam1 : Maybe<Number> = { _tag: "Some", value: 12 };
* let exam2 : Maybe<Number> = None;
* ```
*/
export type Maybe<T> = Some<T> | None;
/**
* @description
* the pair of the string to be matched later and the string that have been matched
* @var matched : have been matched
* @var remained : will be tested whether it'll be matched.
* @var matched_type (optional): the type of the matched string
*/
export interface MatcheePair {
matched: string
remained: string
matched_type?: TokenType
}
/**
* The types of Token
* NL, // newline
*
* SP, // half-width space and tab
*
* ID, // identifier
*
* STR, // string
*
* OP, // operator or something like it
*
* FLO, // float num
*
* INT, // integer
*
* I_* // integer manipulation
*
* F_* // float manipulation
*
* SEMI_C// semi-colon
*/
export enum TokenType {
NL, // newline
SP, // half-width space and tab
ID, // identifier
STR, // string
FLO, // float num
INT, // integer
F_ADD,
F_SUB,
F_MUL,
F_DIV,
I_ADD,
I_SUB,
I_MUL,
I_DIV,
L_PAREN, // (
R_PAREN, // )
L_BRACK, // [
R_BRACK, // ]
L_BRACE, // {
R_BRACE, // }
COMMA, // ,
DOT, // .
COLON, // :
SEMI_C, // ;
AT, // @
HASH, // #
EQ, // ==
SET, // =
GT, // > greater than
LT, // <less than
GE, // >=
LE, // <=
NE, // <>
APOS, // '
R_ARROW, // ->
TRUE, // true
FALSE, // false
IF, // if
}
/**
* tokenized token.
* @var text : the content text
* @var type (optional): the type of the token
* @var col : the column number
* @var ln : the line number
*/
export interface Token {
text: string,
type?: TokenType,
col: number,
ln: number,
}
/**
* @description
* it returns a function which test if the first char of the `remained` part of
* the argument of the function is `c`, if it's true, update the `MatchedPair` wrapped
* in `Some`. Otherwise, it returns `None`.
* * @param c : the char to be test.
* @returns the updated `MatchedPair` wrapped in `Some(x)` or `None`.
*/
export function match1Char(c: string): (m: MatcheePair) => Maybe<MatcheePair> {
return (m: MatcheePair) => {
if (m.remained.length == 0) {
return { _tag: "None" };
}
const charToBeMatched = m.remained[0];
if (charToBeMatched === c) {
return {
_tag: "Some", value: {
matched: m.matched + charToBeMatched,
remained: m.remained.substring(1)
}
};
}
else {
return { _tag: "None" };
}
}
};
/**
*
* @param m : the `MatcheePair` to be consumed.
* @returns if the length of `m.remained` >= 1; consumes the matchee by 1 char and wraps it in `Some`,
* otherwise, returns `None`.
*/
export function matchAny(m: MatcheePair): Maybe<MatcheePair> {
if (m.remained.length >= 1) {
return {
_tag: "Some", value: {
matched: m.matched + m.remained[0],
remained: m.remained.substring(1)
}
};
} else {
return { _tag: "None" };
}
}
/**
* @description
* it returns a function which test if the first char of the `remained` part of
* the argument of the function is between `l` and `u`, if it's true, update the `MatchedPair` wrapped
* in `Some`. Otherwise, it returns `None`.
* * @param l : lower bound char, 1-char string
* * @param u : upper bound char, 1-char string
* @returns the updated `MatchedPair` wrapped in `Some(x)` or `None`.
*/
export function matchRange(l: string, u: string): (m: MatcheePair) => Maybe<MatcheePair> {
let lCodepoint = charToCodepoint(l);
let uCodepoint = charToCodepoint(u);
if (l > u) {
throw new Error("Error: the codepoint of `" + l + "` is not smaller than `" + u + "`)");
}
return (m: MatcheePair) => {
if (m.remained.length < 1) {
return { _tag: "None" };
}
const charToBeMatched = m.remained[0];
const codePointToBeMatched = charToCodepoint(charToBeMatched);
if (codePointToBeMatched >= lCodepoint && codePointToBeMatched <= uCodepoint) {
return {
_tag: "Some", value: {
matched: m.matched + charToBeMatched,
remained: m.remained.substring(1)
}
};
}
else {
return { _tag: "None" };
}
}
};
/**
* check if a matcheePair `m` matches a stringv `s`.
* @param s the checker string.
* @returns `None` or matched pair wrapped in `Some`
*/
export function matchWord(s: string, ): (m: MatcheePair) => Maybe<MatcheePair> {
return (m)=>{
if (s.length==0){
return { _tag: "None" };
}
var someM : Maybe<MatcheePair> = toSome(m);
for (var idx : number=0; idx<s.length; idx++){
someM = thenDo(someM, match1Char(s[idx]))
}
return someM;
}
}
/**
* convert the one-char string to codepoint.
* @param s : the string to code point.
* @returns if `s.length > 1` return error; otherwise, return the codepoint of `s`.
*/
export function charToCodepoint(s: string): number {
if (s.length > 1) {
throw new Error("Error: the length of input string for " + s + "is " + s.length + `,
however, it should be 1.`);
} else {
return s.charCodeAt(0);
}
}
/**
* @description thendo(input, f, ...) like
* a ==> f
* @param input: the wrapped input.
* @param f: the function to be applied.
*
* @returns:the applied wrapped result `MatcheePair`.
*/
export function thenDo<T>(input: Maybe<T>, f: Function): Maybe<T> {
if (input._tag == "None") {
return input;
}
else {
let inner = input.value;
return f(inner);
}
}
/**
* @description "or", like the regex `( f1 | f2 )` .
* It returns a function `f` of which the argument is`x`.
* if `f1(x)` is None, then `f` returns `f2(x)`. Otherwise,
* `F` returns `f1(x)`.
* @param f1 : 1st function to be compared
* @param f2 : 2nd function to be compared
* @returns:the combined function
*/
export function orDo<T>(f1: Function, f2: Function): (x: T) => Maybe<T> {
return (x) => {
let f1x: Maybe<T> = (f1(x));
{
if (f1x._tag == "None") {
return f2(x);
}
else {
return f1x;
}
}
};
}
/**
* @description repeating matching function `f`
* zero or more times, like the asterisk `*` in regex `f*` .
* @param f : the function to be repeated 0+ times.
* @returns:the combined function
*/
export function zeroOrMoreDo<T>(f: Function): (x: T) => Maybe<T> {
return (x) => {
var wrapped_old_x: Maybe<T> = { _tag: "Some", value: x };
var wrapped_new_x: Maybe<T> = wrapped_old_x;
while (wrapped_new_x._tag != "None") {
wrapped_old_x = wrapped_new_x;
wrapped_new_x = thenDo(wrapped_old_x, f);
};
return wrapped_old_x;
};
}
/**
* @description Not. like the `^` inside regex of [^f].
* returns a function `F(x)` such that if `f(x)` is `None`,
* returns the x consuming a char; if `f(x)` is not None, F(x)
* returns `None`.
* @param f: the function forbidden to be matched.
* @returns: combined function `F`.
*/
export function notDo<T>(f: Function): (x: T) => Maybe<T> {
return (x) => {
let wrapped_x: Maybe<T> = {
_tag: "Some",
value: x
};
let f_x = thenDo(wrapped_x, f);
if (f_x._tag != "None") {
return { _tag: "None" };
} else {
return thenDo(wrapped_x, matchAny);
}
};
}
/**
* if `x` is matched by `f` once, returns `f(x)`. Otherwise,
* returns x
* similar to `?` in regex `f?`.
* @param f : the function to be matched
* @returns return wrapped f(x)
*/
export function zeroOrOnceDo<T>(f: Function): (x: T) => Maybe<T> {
return (x) => {
var wrapped_old_x: Maybe<T> = { _tag: "Some", value: x };
var wrapped_new_x = thenDo(wrapped_old_x, f);
if (wrapped_new_x._tag != "None") {
return wrapped_new_x;
} else {
return wrapped_old_x;
}
};
}
export function tokenize(input: string): Array<Token> {
var input_matchee_pair: Maybe<MatcheePair> = toSome(
{
matched: "",
remained: input
});
/**
* generate a parser of a basic term (b_term)
* @param pattern : the pattern parser
* @param token_type : the returning token type
* @returns a wrapped parser.
*/
function bTerm(pattern: Function, token_type: TokenType) {
return (x: MatcheePair) => {
let wrapped_x = toSome(x);
let result = pattern(wrapped_x);
if (result._tag == "Some") {
result.value.matched_type = token_type;
}
return result;
}
}
let d = matchRange('0', '9'); // \d
// [+-]
let plusMinus = orDo(match1Char('+'), match1Char('-'));
let s_aux = orDo(match1Char(' '), match1Char('\t')); // (" " | "\t")
// integer = ([+]|[-])?\d\d*
let integer = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(thenDo(x,
zeroOrOnceDo(plusMinus)), d),
zeroOrMoreDo(d)),
TokenType.INT);
// space = [ \t]+
let space = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, s_aux), zeroOrMoreDo(s_aux)),
TokenType.SP);
// newline = \r?\n
let newline = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x,
zeroOrOnceDo(match1Char('\r'))),
match1Char('\n')),
TokenType.NL);
// [_A-Za-z]
let idHead = orDo(orDo(matchRange('a', 'z'), matchRange('A', 'Z')), match1Char('_'));
let idRemained = orDo(idHead, matchRange('0', '9')); // [_A-Za-z0-9]
// id = [_A-Za-z][_A-Za-z0-9]*
let id = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x,
idHead),
zeroOrMoreDo(idRemained)),
TokenType.ID);
let doublequote = match1Char("\"");
// [\\][\"]
let escapeReverseSlash = (x: MatcheePair) =>
thenDo(thenDo(toSome(x), match1Char("\\")), doublequote);
// ([\\]["]|[^\"])*
let stringInnerPattern = zeroOrMoreDo(
orDo(escapeReverseSlash, notDo(match1Char("\""))));
// str = ["]([\\]["]|[^"])*["]
let str = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(thenDo(x, doublequote),
stringInnerPattern), doublequote),
TokenType.STR);
// float = [+-]?\d+[.]\d+
function floatPattern(x: Maybe<MatcheePair>) {
return thenDo(thenDo(thenDo(thenDo(thenDo(thenDo(x,
zeroOrOnceDo(plusMinus)), d),
zeroOrMoreDo(d)),
match1Char(".")), d),
zeroOrMoreDo(d))
};
let float = bTerm(floatPattern, TokenType.FLO);
// operators
// +.
let floatAdd = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("+")), match1Char(".")),
TokenType.F_ADD);
// +.
let floatSub = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("-")), match1Char(".")),
TokenType.F_SUB);
// *.
let floatMul = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("*")), match1Char(".")),
TokenType.F_MUL);
// /.
let floatDiv = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("/")), match1Char(".")),
TokenType.F_DIV);
// ==
let eq = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("=")), match1Char("=")),
TokenType.EQ);
// >=
let ge = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char(">")), match1Char("=")),
TokenType.GE);
// <=
let le = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("<")), match1Char("=")),
TokenType.LE);
// !=
let ne = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("!")), match1Char("=")),
TokenType.NE);
// ->
let rightArrow = bTerm((x: Maybe<MatcheePair>) =>
thenDo(thenDo(x, match1Char("-")), match1Char(">")),
TokenType.R_ARROW);
/**
* unary operator : generating the pattern of basic unary operator
* @param char : uniry char for the operator
* @param token_type : the corresponding token_type
*/
function unaryOp(char: string, token_type: TokenType) {
return bTerm((x: Maybe<MatcheePair>) => thenDo(x, match1Char(char)),
token_type);
};
let intAdd = unaryOp('+', TokenType.I_ADD);
let intSub = unaryOp('-', TokenType.I_SUB);
let intMul = unaryOp('*', TokenType.I_MUL);
let intDiv = unaryOp('/', TokenType.I_DIV);
let lParen = unaryOp('(', TokenType.L_PAREN);
let rParen = unaryOp(')', TokenType.R_PAREN);
let lBracket = unaryOp('[', TokenType.L_BRACK);
let rBracket = unaryOp(']', TokenType.R_BRACK);
let lBrace = unaryOp('{', TokenType.L_BRACE);
let rBrace = unaryOp('}', TokenType.R_BRACE);
let comma = unaryOp(',', TokenType.COMMA);
let dot = unaryOp('.', TokenType.DOT);
let colon = unaryOp(':', TokenType.COLON);
let semicolon = unaryOp(';', TokenType.SEMI_C);
let at = unaryOp('@', TokenType.AT);
let hash = unaryOp('#', TokenType.HASH);
let set = unaryOp('=', TokenType.SET);
let greaterthan = unaryOp('>', TokenType.GT);
let lessthan = unaryOp('<', TokenType.LE);
let apos = unaryOp('\'', TokenType.APOS);
let term = (token_list: Array<Token>, x: Some<MatcheePair>) => {
var ln = 1;
var col = 0;
var old_x = x;
let term_list = [
floatAdd, floatSub, floatMul, floatDiv,
intAdd, intSub, intMul, intDiv,
eq, ge, le, ne, rightArrow,
lParen, rParen, lBracket, rBracket, lBrace, rBrace,
comma, dot, colon, semicolon, at, hash,
set, greaterthan, lessthan, apos,
float, newline, space, id, integer, str];
let term_aux = term_list.reduce((x, y) => orDo(x, y));
var new_x: Maybe<MatcheePair> = thenDo(old_x, term_aux);
while (new_x._tag != "None") {
if (new_x.value.matched_type != TokenType.NL) {
col += new_x.value.matched.length;
token_list.push({
text: new_x.value.matched,
type: new_x.value.matched_type,
ln: ln,
col: col
});
}
else {
col = 0;
ln += 1;
token_list.push({
text: new_x.value.matched,
type: new_x.value.matched_type,
ln: ln,
col: col
});
}
old_x = toSome({
matched: "",
remained: new_x.value.remained
});
new_x = thenDo(old_x, term_aux);
}
if (old_x.value.remained.length) {
console.log(token_list);
throw new Error("the code can't be tokenized is near Ln. " + ln + ", Col." + col
+ ", starting with " + old_x.value.remained.substring(0, 10));
}
return token_list;
}
return term([], input_matchee_pair);
// TODO: id, string, space, basic operator, 3 marks: @, {, }.
}

86
tests/index.d.ts vendored
View file

@ -1,86 +0,0 @@
declare let assert: any;
declare let tokenize: any;
declare let a: any;
declare let example1: any;
declare let example2: any;
declare let thenDo: any;
declare let orDo: any;
declare let zeroOrMoreDo: any;
declare let notDo: any;
declare let matchAny: any;
declare let compPart1: any;
declare let compPart2: any;
declare let doThenTestee1: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes1: any;
declare let doThenTestee2: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes2: any;
declare let doThenTestee3: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes3: any;
declare let doThenTestee4: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes4: any;
declare let doThenTestee5: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes5: any;
declare let doThenTestee6: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes6: any;
declare let doThenTestee7: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes7: any;
declare let doThenTestee8: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes8: any;
declare let doThenTestee9: {
_tag: string;
value: {
matched: string;
remained: string;
};
};
declare let doTestRes9: any;
declare let harfbuzz: any;
declare let pdfManipulate: any;

View file

@ -1,95 +0,0 @@
"use strict";
let assert = require("assert");
let tokenize = require("../src/tokenize");
let a = tokenize.match1Char("我");
let example1 = a({ matched: "", remained: "我的" });
assert(example1._tag == "Some");
assert(example1.value.matched == "我");
assert(example1.value.remained == "的");
let example2 = a({ matched: "", remained: "妳的" });
assert(example2._tag == "None");
let thenDo = tokenize.thenDo;
let orDo = tokenize.orDo;
let zeroOrMoreDo = tokenize.zeroOrMoreDo;
let notDo = tokenize.notDo;
let matchAny = tokenize.matchAny;
// composed part x
let compPart1 = tokenize.match1Char("我");
let compPart2 = tokenize.match1Char("的");
let doThenTestee1 = { _tag: "Some", value: { matched: "", remained: "我的貓" } };
let doTestRes1 = thenDo(thenDo(doThenTestee1, compPart1), compPart2);
assert(doTestRes1._tag == "Some");
assert(doTestRes1.value.matched == "我的");
assert(doTestRes1.value.remained == "貓");
let doThenTestee2 = { _tag: "Some", value: { matched: "", remained: "我們" } };
let doTestRes2 = thenDo(thenDo(doThenTestee2, compPart1), compPart2);
assert(doTestRes2._tag == "None");
let doThenTestee3 = { _tag: "Some", value: { matched: "", remained: "我的貓" } };
let doTestRes3 = thenDo(thenDo(doThenTestee3, orDo(compPart1, compPart2)), compPart2);
assert(doTestRes3._tag == "Some");
assert(doTestRes3.value.matched == "我的");
assert(doTestRes3.value.remained == "貓");
let doThenTestee4 = { _tag: "Some", value: { matched: "", remained: "的的貓" } };
let doTestRes4 = thenDo(thenDo(doThenTestee4, orDo(compPart1, compPart2)), compPart2);
assert(doTestRes4._tag == "Some");
assert(doTestRes4.value.matched == "的的");
assert(doTestRes4.value.remained == "貓");
let doThenTestee5 = { _tag: "Some", value: { matched: "", remained: "的貓" } };
let doTestRes5 = thenDo(thenDo(doThenTestee5, zeroOrMoreDo(compPart1)), compPart2);
assert(doTestRes5._tag == "Some");
assert(doTestRes5.value.matched == "的");
assert(doTestRes5.value.remained == "貓");
let doThenTestee6 = { _tag: "Some", value: { matched: "", remained: "我我我的貓" } };
let doTestRes6 = thenDo(thenDo(doThenTestee6, zeroOrMoreDo(compPart1)), compPart2);
assert(doTestRes6._tag == "Some");
assert(doTestRes6.value.matched == "我我我的");
assert(doTestRes6.value.remained == "貓");
let doThenTestee7 = { _tag: "Some", value: { matched: "", remained: "我的" } };
let doTestRes7 = thenDo(thenDo(doThenTestee7, notDo(compPart1)), compPart2);
assert(doTestRes7._tag == "None");
let doThenTestee8 = { _tag: "Some", value: { matched: "", remained: "妳的" } };
let doTestRes8 = thenDo(thenDo(doThenTestee8, notDo(compPart1)), compPart2);
assert(doTestRes8._tag == "Some");
assert(doTestRes8.value.matched == "妳的");
let doThenTestee9 = { _tag: "Some", value: { matched: "", remained: "妳的" } };
let doTestRes9 = thenDo(doThenTestee9, matchAny);
assert(doTestRes9._tag == "Some");
assert(doTestRes9.value.matched == "妳");
assert(doTestRes9.value.remained == "的");
tokenize.tokenize("+123");
tokenize.tokenize("123");
tokenize.tokenize("-123");
tokenize.tokenize(" 123");
try {
tokenize.tokenize("c123");
}
catch (error) {
console.log(error);
}
tokenize.tokenize(" ");
tokenize.tokenize(" ");
tokenize.tokenize(" \t");
tokenize.tokenize(" \t123");
try {
tokenize.tokenize(" \t123aaa456");
}
catch (error) {
console.log(error);
}
tokenize.tokenize(" \t123\n456");
tokenize.tokenize("\"\"");
tokenize.tokenize("\"123\"");
tokenize.tokenize("\"1\\\"23\"");
tokenize.tokenize("\"1\\\"23\" abc123");
tokenize.tokenize("+0.012");
tokenize.tokenize("0.0");
tokenize.tokenize("-222.0");
tokenize.tokenize("1+1 ==2; 3+8 foo(12)");
console.log(tokenize.tokenize("2+2"));
// harfbuzz test
let harfbuzz = require("../src/harfbuzz.js");
harfbuzz.harfbuzzTest("123.abc");
// pdf test
let pdfManipulate = require("../src/pdfManipulate.js");
pdfManipulate.pdfGenerate("123.abc");
console.log("/tmp/test.pdf產出ah");

View file

@ -1,122 +0,0 @@
let assert = require("assert");
let tokenize = require("../src/tokenize");
let a = tokenize.match1Char("我");
let example1 = a({matched: "", remained: "我的"});
assert(example1._tag == "Some");
assert(example1.value.matched == "我");
assert(example1.value.remained == "的");
let example2 = a({matched: "", remained: "妳的"});
assert(example2._tag == "None");
let thenDo = tokenize.thenDo;
let orDo = tokenize.orDo;
let zeroOrMoreDo = tokenize.zeroOrMoreDo;
let notDo = tokenize.notDo;
let matchAny = tokenize.matchAny;
// composed part x
let compPart1 = tokenize.match1Char("我");
let compPart2 = tokenize.match1Char("的");
let doThenTestee1 = {_tag : "Some",value : {matched: "", remained: "我的貓"}};
let doTestRes1 = thenDo(thenDo(doThenTestee1, compPart1), compPart2);
assert(doTestRes1._tag == "Some");
assert(doTestRes1.value.matched == "我的");
assert(doTestRes1.value.remained == "貓");
let doThenTestee2 = {_tag : "Some",value : {matched: "", remained: "我們"}};
let doTestRes2 = thenDo(thenDo(doThenTestee2, compPart1), compPart2);
assert(doTestRes2._tag == "None");
let doThenTestee3 = {_tag : "Some",value : {matched: "", remained: "我的貓"}};
let doTestRes3 = thenDo(thenDo(doThenTestee3, orDo(compPart1, compPart2)), compPart2);
assert(doTestRes3._tag == "Some");
assert(doTestRes3.value.matched == "我的");
assert(doTestRes3.value.remained == "貓");
let doThenTestee4 = {_tag : "Some",value : {matched: "", remained: "的的貓"}};
let doTestRes4 = thenDo(thenDo(doThenTestee4, orDo(compPart1, compPart2)), compPart2);
assert(doTestRes4._tag == "Some");
assert(doTestRes4.value.matched == "的的");
assert(doTestRes4.value.remained == "貓");
let doThenTestee5 = {_tag : "Some",value : {matched: "", remained: "的貓"}};
let doTestRes5 = thenDo(thenDo(doThenTestee5, zeroOrMoreDo(compPart1)), compPart2);
assert(doTestRes5._tag == "Some");
assert(doTestRes5.value.matched == "的");
assert(doTestRes5.value.remained == "貓");
let doThenTestee6 = {_tag : "Some",value : {matched: "", remained: "我我我的貓"}};
let doTestRes6 = thenDo(thenDo(doThenTestee6, zeroOrMoreDo(compPart1)), compPart2);
assert(doTestRes6._tag == "Some");
assert(doTestRes6.value.matched == "我我我的");
assert(doTestRes6.value.remained == "貓");
let doThenTestee7 = {_tag : "Some",value : {matched: "", remained: "我的"}};
let doTestRes7 = thenDo(thenDo(doThenTestee7, notDo(compPart1)), compPart2);
assert(doTestRes7._tag == "None");
let doThenTestee8 = {_tag : "Some",value : {matched: "", remained: "妳的"}};
let doTestRes8 = thenDo(thenDo(doThenTestee8, notDo(compPart1)), compPart2);
assert(doTestRes8._tag == "Some");
assert(doTestRes8.value.matched == "妳的");
let doThenTestee9 = {_tag : "Some",value : {matched: "", remained: "妳的"}};
let doTestRes9 = thenDo(doThenTestee9, matchAny);
assert(doTestRes9._tag == "Some");
assert(doTestRes9.value.matched == "妳");
assert(doTestRes9.value.remained == "的");
tokenize.tokenize("+123");
tokenize.tokenize("123");
tokenize.tokenize("-123");
tokenize.tokenize(" 123");
try {
tokenize.tokenize("c123");
} catch (error) {
console.log(error);
}
tokenize.tokenize(" ");
tokenize.tokenize(" ");
tokenize.tokenize(" \t");
tokenize.tokenize(" \t123");
try {
tokenize.tokenize(" \t123aaa456");
} catch (error) {
console.log(error);
}
tokenize.tokenize(" \t123\n456");
tokenize.tokenize("\"\"");
tokenize.tokenize("\"123\"");
tokenize.tokenize("\"1\\\"23\"");
tokenize.tokenize("\"1\\\"23\" abc123");
tokenize.tokenize("+0.012");
tokenize.tokenize("0.0");
tokenize.tokenize("-222.0");
tokenize.tokenize("1+1 ==2; 3+8 foo(12)");
console.log(tokenize.tokenize("2+2"));
// harfbuzz test
let harfbuzz = require("../src/harfbuzz.js");
harfbuzz.harfbuzzTest("123.abc");
// pdf test
let pdfManipulate = require("../src/pdfManipulate.js");
pdfManipulate.pdfGenerate("123.abc");
console.log("/tmp/test.pdf產出ah");

View file

@ -1,109 +0,0 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Projects */
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "commonjs", /* Specify what module code is generated. */
// "rootDir": "./", /* Specify the root folder within your source files. */
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
// "outDir": "./", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
// "noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
/* Type Checking */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
}
}

View file

@ -1,9 +0,0 @@
{
"defaultSeverity": "error",
"extends": [
"tslint:recommended"
],
"jsRules": {},
"rules": {},
"rulesDirectory": []
}