add some language drafts
This commit is contained in:
parent
2b79487388
commit
1b0fcba94e
235 changed files with 39768 additions and 0 deletions
1
README.md
Normal file
1
README.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Many personal draughts of my toy langs unfinished.
|
6
ataabu/archive/README.md
Normal file
6
ataabu/archive/README.md
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
# ataabu
|
||||||
|
A toy language written in OCaml.
|
||||||
|
|
||||||
|
## How to use
|
||||||
|
|
||||||
|
make; build/ataabu
|
44
ataabu/archive/closure_conv
Normal file
44
ataabu/archive/closure_conv
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
#lang racket
|
||||||
|
(define segments '(exps (def a 12)
|
||||||
|
(def b 24)
|
||||||
|
(l (b x) (+ a b x))
|
||||||
|
(def c 24)
|
||||||
|
(+ b a 13)))
|
||||||
|
|
||||||
|
(define (prime? op) (memq op '(%+ %- %* %/)))
|
||||||
|
|
||||||
|
(define (find_fv x)
|
||||||
|
(match x
|
||||||
|
[`(def ,x ,y) (set-subtract (find_fv y) `(,x))]
|
||||||
|
[`(l ,args ,y) (set-subtract (find_fv y) args)]
|
||||||
|
[(? integer? _) '()]
|
||||||
|
[(? symbol? _) `(,x)]
|
||||||
|
[(list (? prime? op) args ...) (remove-duplicates (flatten (map find_fv args))) ]
|
||||||
|
[(list args ...) (remove-duplicates (flatten (map find_fv args))) ]
|
||||||
|
))
|
||||||
|
|
||||||
|
(find_fv `(+ a b b c))
|
||||||
|
(find_fv `(def x y))
|
||||||
|
(find_fv `(l (x y) (%+ x y z)))
|
||||||
|
(find_fv `(l (x y) (+ x y z)))
|
||||||
|
|
||||||
|
|
||||||
|
(define (replace-fv env fv_list body)
|
||||||
|
(match body
|
||||||
|
['() body]
|
||||||
|
[(? list? _) (cons (replace-fv env fv_list (car body)) (replace-fv env fv_list (cdr body)))]
|
||||||
|
[(? (lambda (x) (memq x fv_list))) `(arr-ref ,env ,(index-of fv_list body))]
|
||||||
|
[_ body]
|
||||||
|
))
|
||||||
|
|
||||||
|
(define (clos-conv clos)
|
||||||
|
(match clos
|
||||||
|
[`(l ,args ,body) (let ((env (gensym 'env)) (fv_list (find_fv clos))) `((FUN ,(append args `(,env)) ,(replace-fv env fv_list body)) (DEF ,env (ARR ,fv_list))))]
|
||||||
|
[_ clos]
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(clos-conv '(l (x y) (+ y z)))
|
||||||
|
(clos-conv '(def foo (l (y) (%+ y z))))
|
||||||
|
(clos-conv '((l (y x) (%+ x y z)) 3 5))
|
198
ataabu/archive/closure_conv.ml
Normal file
198
ataabu/archive/closure_conv.ml
Normal file
|
@ -0,0 +1,198 @@
|
||||||
|
module SS = Set.Make(String);;
|
||||||
|
open Printf;;
|
||||||
|
|
||||||
|
(*let ex_token_list2 = Tokenizer.total_parser "int a = 15; 3; 1 + 1; a; (lambda(int y){lambda(int x){y+x;};}(12));";;
|
||||||
|
let ex_parseoutput2 = Parser.stmts ex_token_list2;;
|
||||||
|
|
||||||
|
Parser.print_parseoutput ex_parseoutput2;;
|
||||||
|
|
||||||
|
Type_inf.type_infer ex_parseoutput2;;*)
|
||||||
|
|
||||||
|
let rec find_free_var l bound_vars =
|
||||||
|
match l with
|
||||||
|
|
||||||
|
| Parser.Item(item) ->
|
||||||
|
(*variable*)
|
||||||
|
(match item with
|
||||||
|
|
||||||
|
|Tokenizer.Token(var, "ID") ->
|
||||||
|
(if List.mem var !bound_vars then
|
||||||
|
[]
|
||||||
|
else
|
||||||
|
[var])
|
||||||
|
(* constant *)
|
||||||
|
| Tokenizer.Token(_, "INT") -> []
|
||||||
|
(* operator *)
|
||||||
|
| Tokenizer.Token(_, "OP") -> []
|
||||||
|
(* operator *)
|
||||||
|
| Tokenizer.Token(_, other_op_type) -> [])
|
||||||
|
| Parser.Ls(lst) -> (
|
||||||
|
match lst with
|
||||||
|
(*apply*)
|
||||||
|
| [Parser.Item(Tokenizer.Token("%apply", "ID")); caller; callee] ->
|
||||||
|
let list_combined = List.append (find_free_var caller bound_vars) (find_free_var callee bound_vars) in
|
||||||
|
let empty_set = SS.empty in
|
||||||
|
let set = List.fold_right SS.add list_combined empty_set in
|
||||||
|
SS.elements set
|
||||||
|
(*operator*)
|
||||||
|
| [Parser.Item(Tokenizer.Token(op_name, "OP")); lhs; rhs] ->
|
||||||
|
let list_combined = List.append (find_free_var lhs bound_vars) (find_free_var rhs bound_vars) in
|
||||||
|
let empty_set = SS.empty in
|
||||||
|
let set = List.fold_right SS.add list_combined empty_set in
|
||||||
|
SS.elements set
|
||||||
|
(*define*)
|
||||||
|
| [Parser.Item(Tokenizer.Token("%def", "ID")); typ; Parser.Item(Tokenizer.Token(id, "ID")); inner] ->
|
||||||
|
let list_inner = find_free_var inner bound_vars in
|
||||||
|
let list_id = find_free_var (Parser.Item(Tokenizer.Token(id, "ID"))) bound_vars in
|
||||||
|
let empty_set = SS.empty in
|
||||||
|
let set_inner = List.fold_right SS.add list_inner empty_set in
|
||||||
|
let set_id = List.fold_right SS.add list_id empty_set in
|
||||||
|
let set_result = SS.diff set_inner set_id in
|
||||||
|
let _ = (bound_vars := id::!bound_vars) in
|
||||||
|
SS.elements set_result
|
||||||
|
(*lambda*)
|
||||||
|
| [Parser.Item(Tokenizer.Token("lambda", "ID")); Parser.Ls([args_header; Parser.Ls([typ; arg])]); body] ->
|
||||||
|
let new_bound_var_frame = ref [] in
|
||||||
|
let list_body = find_free_var body new_bound_var_frame in
|
||||||
|
let list_arg = find_free_var arg new_bound_var_frame in
|
||||||
|
let empty_set = SS.empty in
|
||||||
|
let set_inner = List.fold_right SS.add list_body empty_set in
|
||||||
|
let set_id = List.fold_right SS.add list_arg empty_set in
|
||||||
|
let set_result = SS.diff set_inner set_id in
|
||||||
|
SS.elements set_result
|
||||||
|
|
||||||
|
| cmds ->
|
||||||
|
let cmds_free_var = List.map (fun x -> find_free_var x bound_vars) cmds in
|
||||||
|
let cmds_fv_flatten_ls = List.flatten cmds_free_var in
|
||||||
|
let empty_set = SS.empty in
|
||||||
|
let cmds_fv_flatten_ls_set = List.fold_right SS.add cmds_fv_flatten_ls empty_set in
|
||||||
|
SS.elements cmds_fv_flatten_ls_set
|
||||||
|
)
|
||||||
|
| _ -> [];;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(*
|
||||||
|
let ex_token_list = Tokenizer.total_parser "int a = 12 ; int d = 16; lambda(int b){a + b + d;};20;";;
|
||||||
|
let ex_parseoutput = Parser.stmts ex_token_list;;
|
||||||
|
|
||||||
|
Type_inf.type_infer ex_parseoutput;; *)
|
||||||
|
|
||||||
|
let closure_sym_no = ref 0;;
|
||||||
|
|
||||||
|
let genclosure =
|
||||||
|
fun () ->
|
||||||
|
let tmp = Printf.sprintf "clos%d" (!closure_sym_no) in
|
||||||
|
let _ = (closure_sym_no := !closure_sym_no + 1) in
|
||||||
|
tmp;;
|
||||||
|
|
||||||
|
|
||||||
|
(* Parser.print_parseoutput ex_parseoutput;; *)
|
||||||
|
|
||||||
|
let rec get_index_aux ls item idx =
|
||||||
|
if idx == (List.length ls) then -1
|
||||||
|
else
|
||||||
|
(if (List.nth ls idx) == item then idx
|
||||||
|
else get_index_aux ls item (idx+1))
|
||||||
|
|
||||||
|
let get_index ls item =
|
||||||
|
if List.mem item ls then
|
||||||
|
get_index_aux ls item 0
|
||||||
|
else -1
|
||||||
|
|
||||||
|
|
||||||
|
let rec replacing_vars ln fv clos_sym =
|
||||||
|
match ln with
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("lambda", "ID"));args; body]) ->
|
||||||
|
let body_replaced = replacing_vars body fv clos_sym in
|
||||||
|
let args_replaced = replacing_vars args fv clos_sym in
|
||||||
|
Parser.Ls([Parser.Item(Tokenizer.Token("lambda", "ID"));args_replaced; body_replaced])
|
||||||
|
| Parser.Ls([Parser.Ls(list)]) -> replacing_vars (Parser.Ls(list)) fv clos_sym
|
||||||
|
| Parser.Ls(list) -> Parser.Ls(List.map (fun x -> replacing_vars x fv clos_sym) list)
|
||||||
|
| Parser.Item(Tokenizer.Token(id, typ)) ->
|
||||||
|
if (List.mem id fv) then
|
||||||
|
(*let _ = print_string ("上大人" ^ id ^ "孔乙己") in *)
|
||||||
|
|
||||||
|
(let index = get_index fv id in
|
||||||
|
let sym_name = Printf.sprintf "fv[%d]" index in
|
||||||
|
Parser.Item(Tokenizer.Token(sym_name, "ID")))
|
||||||
|
else ln
|
||||||
|
| _ -> ln
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let rec closure_conv_replacing fv_outer fv_inner line =
|
||||||
|
|
||||||
|
|
||||||
|
let _ = print_string ("===" ^ (Parser.ast2string line) ^ "========") in
|
||||||
|
let _ = print_string "fv_inner: " in
|
||||||
|
let _ = List.map print_string fv_inner in
|
||||||
|
let _ = print_string "\tfv_outer: " in
|
||||||
|
let _ = List.map print_string fv_outer in
|
||||||
|
|
||||||
|
let tmp_list1 = List.map (fun var -> Parser.Item(Tokenizer.Token(var, "ID"))) fv_inner in
|
||||||
|
let fv_list = Parser.Ls(Parser.Item(Tokenizer.Token("%struct", "ID"))::tmp_list1) in
|
||||||
|
match line with
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("lambda", "ID")); Parser.Ls(args); Parser.Ls(body)]) ->
|
||||||
|
let new_fv = (find_free_var (Parser.Ls(body)) (ref [])) in
|
||||||
|
let _ = print_string "new_fv: " in
|
||||||
|
let _ = List.map print_string new_fv in
|
||||||
|
let _ = print_string "\n\n" in
|
||||||
|
let closure_symbol = (genclosure ()) in
|
||||||
|
let def_closure_list = Parser.Ls([Parser.Item(Tokenizer.Token("%def", "ID"));
|
||||||
|
Parser.Item(Tokenizer.Token("STRUCT", "ID"));
|
||||||
|
Parser.Item(Tokenizer.Token(closure_symbol, "ID"));
|
||||||
|
fv_list]) in
|
||||||
|
let replaced_body = List.map (fun l -> closure_conv_replacing fv_inner new_fv l) body in
|
||||||
|
let temp = Parser.Ls([Parser.Item(Tokenizer.Token("Object*", "ID")); Parser.Item(Tokenizer.Token(closure_symbol,"ID"))]) in
|
||||||
|
let replaced_lambda = Parser.Ls([Parser.Item(Tokenizer.Token("lambda", "ID")); Parser.Ls(args @ [temp]); Parser.Ls(replaced_body)]) in
|
||||||
|
let return_result = Parser.Ls([def_closure_list; replaced_lambda]) in
|
||||||
|
return_result
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("%apply" , "ID")); caller; callee]) ->
|
||||||
|
let caller_new = closure_conv_replacing fv_outer fv_inner caller in
|
||||||
|
let callee_new = closure_conv_replacing fv_outer fv_inner callee in
|
||||||
|
(match caller_new with
|
||||||
|
| Parser.Ls([closure_struct; closure_main]) ->
|
||||||
|
(match callee_new with
|
||||||
|
| Parser.Ls([callee_struct; callee_main]) -> Parser.Ls([closure_struct; callee_struct;
|
||||||
|
Parser.Ls([Parser.Item(Tokenizer.Token("%apply" , "ID")); closure_main; callee_main])])
|
||||||
|
| _ -> Parser.Ls([closure_struct;Parser.Ls([Parser.Item(Tokenizer.Token("%apply" , "ID")); closure_main; callee_new])]))
|
||||||
|
| _ -> line)
|
||||||
|
| _ -> replacing_vars line fv_outer (!closure_sym_no)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let closure_conv_aux2 parseoutput =
|
||||||
|
(match parseoutput with
|
||||||
|
| Parser.Success(Ls(lines), remained_tokens) ->
|
||||||
|
(let free_var = ref [] in
|
||||||
|
List.map (fun ln -> let fv = find_free_var ln free_var in
|
||||||
|
closure_conv_replacing [] fv ln) lines)
|
||||||
|
| _ -> []);;
|
||||||
|
|
||||||
|
let rec elim_paren_aux middle1 =
|
||||||
|
match middle1 with
|
||||||
|
| Parser.Ls([Parser.Ls(x)]) -> elim_paren_aux (Parser.Ls(x))
|
||||||
|
| Parser.Ls(x) -> Parser.Ls(List.map elim_paren_aux x)
|
||||||
|
| _ -> middle1
|
||||||
|
;;
|
||||||
|
|
||||||
|
let elim_paren middle1 = List.map elim_paren_aux middle1;;
|
||||||
|
|
||||||
|
let closure_conv_main input =
|
||||||
|
let middle1 = closure_conv_aux2 input in
|
||||||
|
let middle2 = elim_paren middle1 in
|
||||||
|
let rec modifier ls =
|
||||||
|
match ls with
|
||||||
|
| Parser.Ls([Parser.Ls(Parser.Item(Tokenizer.Token("%def", "ID"))::Parser.Item(Tokenizer.Token("STRUCT", "ID"))::rs1 ); rs2 ])::rs3 ->
|
||||||
|
Parser.Ls(Parser.Item(Tokenizer.Token("%def", "ID"))::Parser.Item(Tokenizer.Token("STRUCT", "ID"))::rs1)::rs2::rs3
|
||||||
|
| hd::rs ->hd::(modifier rs)
|
||||||
|
| _ -> ls in
|
||||||
|
modifier middle2;;
|
||||||
|
|
||||||
|
(*
|
||||||
|
List.map (fun x -> print_string (Parser.ast2string x)) (closure_conv_main ex_parseoutput);;*)
|
195
ataabu/archive/codegen.ml
Normal file
195
ataabu/archive/codegen.ml
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
open Printf;;
|
||||||
|
open Int;;
|
||||||
|
open StringLabels;;
|
||||||
|
|
||||||
|
let counter = ref 0;;
|
||||||
|
|
||||||
|
let gensym =
|
||||||
|
fun () ->
|
||||||
|
(let tmp = Printf.sprintf "sym%d" (!counter) in
|
||||||
|
counter := !counter + 1;
|
||||||
|
tmp)
|
||||||
|
|
||||||
|
(*
|
||||||
|
let ex_token_list = Tokenizer.total_parser "lambda(x){x;}(12);";;
|
||||||
|
Parser.print_parseoutput (Parser.stmts ex_token_list);;*)
|
||||||
|
|
||||||
|
let ex_token_list2 = Tokenizer.total_parser "((lambda(int x){lambda(int y){x + y;};}(7))(8));";;
|
||||||
|
let ex_parseoutput2 = Parser.stmts ex_token_list2;;
|
||||||
|
|
||||||
|
let infering_result = Type_inf.type_infer ex_parseoutput2;; (*type infering*)
|
||||||
|
let ex_parseoutput3 = Parser.Ls(Closure_conv.closure_conv_main ex_parseoutput2);; (*closure_conversion*)
|
||||||
|
|
||||||
|
|
||||||
|
print_string (Parser.ast2string ex_parseoutput3);;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let list_mut = ref (Parser.Ls([]));;
|
||||||
|
let main_str = ref "";;
|
||||||
|
let closure_counter = ref (-1);;
|
||||||
|
|
||||||
|
let get_args_sym_string x =
|
||||||
|
match x with
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token(arg_typ, "ID")); Parser.Item(Tokenizer.Token(arg_sym, "ID"))]) -> arg_sym
|
||||||
|
| _ -> ""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let rec codegen ast_tree main_str =
|
||||||
|
match ast_tree with
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token(op, "OP")); x; y]) -> let a = codegen_aux ast_tree main_str in (!main_str, a)
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("%apply", "ID")); x; y]) -> let a = codegen_aux ast_tree main_str in (!main_str, a)
|
||||||
|
| Parser.Ls(ls_inner) -> let a = (List.map (fun x -> codegen_aux x main_str) ls_inner) in (!main_str, (List.hd (List.rev a)))
|
||||||
|
| Parser.Item(x) -> let a = codegen_aux ast_tree main_str in (a, a)
|
||||||
|
| Parser.ASTFail -> ("", "")
|
||||||
|
|
||||||
|
and codegen_aux ast_tree main_str=
|
||||||
|
match ast_tree with
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("%apply", "ID")); caller; callee]) ->
|
||||||
|
let caller_side = codegen_aux caller main_str in
|
||||||
|
let callee_side = codegen_aux callee main_str in
|
||||||
|
let res_sym = gensym () in
|
||||||
|
let fmt = format_of_string "
|
||||||
|
Object %s;
|
||||||
|
%s = %s.value.func(%s, %s.free_var);
|
||||||
|
" in
|
||||||
|
let item_str = Printf.sprintf fmt res_sym res_sym caller_side callee_side caller_side in
|
||||||
|
main_str := !(main_str) ^ item_str;
|
||||||
|
res_sym
|
||||||
|
|
||||||
|
| Parser.Item(Tokenizer.Token(num, "INT")) ->
|
||||||
|
let sym = (gensym ()) in
|
||||||
|
let fmt = format_of_string "
|
||||||
|
Object %s;
|
||||||
|
%s.type =\"int\";
|
||||||
|
%s.value.inte = %d;\n" in
|
||||||
|
let item_str = Printf.sprintf fmt sym sym sym (int_of_string num) in
|
||||||
|
main_str := !(main_str) ^ item_str;
|
||||||
|
sym
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("lambda", "ID")); Parser.Ls(args_id::args); body ]) ->
|
||||||
|
let closure_con = !closure_counter in
|
||||||
|
let args_str_array = List.map get_args_sym_string args in
|
||||||
|
let arg_str = List.hd args_str_array in
|
||||||
|
let function_str = ref "" in
|
||||||
|
let (body_string, get_return_id) = codegen body function_str in
|
||||||
|
|
||||||
|
let sym_lambda = gensym () in
|
||||||
|
let sym_closure = gensym () in
|
||||||
|
let return_str = "return " ^ get_return_id ^ ";" in
|
||||||
|
let fmt = format_of_string "
|
||||||
|
Object %s (Object %s, Object* fv){
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
}
|
||||||
|
|
||||||
|
" in
|
||||||
|
let item_str_tmp = Printf.sprintf fmt sym_lambda arg_str body_string return_str in
|
||||||
|
|
||||||
|
let closure_str_fmt = format_of_string
|
||||||
|
|
||||||
|
"
|
||||||
|
%s
|
||||||
|
|
||||||
|
Object %s;
|
||||||
|
%s.type= \"func\";
|
||||||
|
%s.value.func = &%s;
|
||||||
|
%s.free_var = clos%d ;
|
||||||
|
" in
|
||||||
|
let item_str = Printf.sprintf closure_str_fmt item_str_tmp sym_closure sym_closure sym_closure sym_lambda sym_closure closure_con in
|
||||||
|
main_str := !(main_str) ^ item_str ;
|
||||||
|
sym_closure
|
||||||
|
|
||||||
|
| Parser.Item(Tokenizer.Token(var, "ID")) -> main_str := !(main_str) ^ "\t" ^ var ^ ";"; var
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("+", "OP")); x; y]) ->
|
||||||
|
let sym = (gensym ()) in
|
||||||
|
let lhs = codegen_aux x main_str in
|
||||||
|
let rhs = codegen_aux y main_str in
|
||||||
|
let fmt = format_of_string
|
||||||
|
"
|
||||||
|
Object %s;
|
||||||
|
%s.type = %s.type;
|
||||||
|
if (%s.type = \"int\"){
|
||||||
|
%s.value.inte = %s.value.inte + %s.value.inte;}
|
||||||
|
else if (%s.type = \"flo\"){
|
||||||
|
%s.value.doub = %s.value.doub + %s.value.doub;
|
||||||
|
}
|
||||||
|
%s;\n" in
|
||||||
|
let item_str = (Printf.sprintf fmt sym sym lhs lhs sym lhs rhs sym sym lhs rhs sym) in
|
||||||
|
let _ = (main_str := !(main_str) ^ item_str ) in
|
||||||
|
sym
|
||||||
|
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("%def", "ID")); Parser.Item(Tokenizer.Token("STRUCT", "ID")) ;
|
||||||
|
Parser.Item(Tokenizer.Token(clo_fv, "ID")) ; Parser.Ls(Parser.Item(Tokenizer.Token("%struct", "ID"))::fv_list)]) ->
|
||||||
|
(*let fv_list = List.tl fv_ls in (*fv = free variable*)*)
|
||||||
|
let fv_string_list = List.map (fun x -> match x with
|
||||||
|
|Parser.Item(Tokenizer.Token(x, "ID"))-> x
|
||||||
|
|_ -> "")
|
||||||
|
fv_list in
|
||||||
|
let result_rhs = "{" ^ (List.fold_right (fun x y -> x ^ ", " ^ y) fv_string_list "") ^ "}" in
|
||||||
|
let fmt = format_of_string "
|
||||||
|
Object %s[] = %s;\n\n" in
|
||||||
|
let item_str = (Printf.sprintf fmt clo_fv result_rhs) in
|
||||||
|
let _ = (main_str := !(main_str) ^ item_str ) in
|
||||||
|
let _ = closure_counter := !closure_counter + 1 in
|
||||||
|
""
|
||||||
|
| Parser.Ls([Parser.Item(Tokenizer.Token("%def", "ID")); typ; Parser.Item(Tokenizer.Token(lhs, "ID")); y]) ->
|
||||||
|
let rhs = codegen_aux y main_str in
|
||||||
|
let fmt = format_of_string
|
||||||
|
"
|
||||||
|
Object %s;
|
||||||
|
%s.type = %s.type;
|
||||||
|
if (%s.type = \"int\"){
|
||||||
|
%s.value.inte = %s.value.inte;}
|
||||||
|
else if (%s.type = \"flo\"){
|
||||||
|
%s.value.doub = %s.value.doub;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
%s.value.func = %s.value.func;
|
||||||
|
}\n" in
|
||||||
|
let item_str = (Printf.sprintf fmt lhs lhs rhs lhs lhs rhs lhs lhs rhs lhs rhs) in
|
||||||
|
let _ = (main_str := !(main_str) ^ item_str ) in
|
||||||
|
""
|
||||||
|
| Parser.Ls([Parser.Ls(inner)]) -> (codegen_aux (Parser.Ls(inner)) main_str)
|
||||||
|
| _ -> "0";;
|
||||||
|
|
||||||
|
let (output_var_string, _) = codegen ex_parseoutput3 main_str;;
|
||||||
|
|
||||||
|
let print_main str =
|
||||||
|
let preamble = format_of_string
|
||||||
|
"
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
|
||||||
|
typedef struct Object Object;
|
||||||
|
|
||||||
|
typedef union ObjectValue{
|
||||||
|
int inte;
|
||||||
|
double doub;
|
||||||
|
char *str;
|
||||||
|
Object (*func) (Object, Object*);
|
||||||
|
|
||||||
|
} ObjectValue;
|
||||||
|
|
||||||
|
typedef struct Object{
|
||||||
|
char* type;
|
||||||
|
ObjectValue value;
|
||||||
|
Object* free_var;
|
||||||
|
} Object;
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
%s
|
||||||
|
return 0;}
|
||||||
|
|
||||||
|
" in
|
||||||
|
Printf.sprintf preamble str;;
|
||||||
|
|
||||||
|
(*print_string output_var_string;; *)
|
||||||
|
|
||||||
|
print_string (print_main output_var_string);;
|
||||||
|
|
||||||
|
(*
|
||||||
|
Printf.printf "%s" (gensym ());;
|
||||||
|
Printf.printf "%s" (gensym ());;
|
||||||
|
Printf.printf "%s" (gensym ());;*)
|
98
ataabu/archive/foralise/a.c
Normal file
98
ataabu/archive/foralise/a.c
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
#include "lib.c"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
typedef struct LL {
|
||||||
|
int head;
|
||||||
|
struct LL * next;
|
||||||
|
} LL;
|
||||||
|
|
||||||
|
void main(void){
|
||||||
|
ref_hash_table = (RefHashTableItem*) malloc(sizeof(RefHashTableItem)* ref_hash_table_size);
|
||||||
|
|
||||||
|
for (size_t i=0;i<ref_hash_table_size;i++){
|
||||||
|
ref_hash_table[i].is_used = false;
|
||||||
|
}/*
|
||||||
|
|
||||||
|
RefHashTableItem a;
|
||||||
|
a.is_marked = false;
|
||||||
|
a.is_used = false;
|
||||||
|
a.refer_address = 20000;
|
||||||
|
a.refee_address = 0;
|
||||||
|
|
||||||
|
RefHashTableItem b;
|
||||||
|
b.is_marked = false;
|
||||||
|
b.is_used = false;
|
||||||
|
b.refer_address = 20000;
|
||||||
|
b.refee_address = 8964;
|
||||||
|
|
||||||
|
RefHashTableItem c;
|
||||||
|
c.is_marked = false;
|
||||||
|
c.is_used = false;
|
||||||
|
c.refer_address = 20001;
|
||||||
|
c.refee_address = 20000;
|
||||||
|
|
||||||
|
RefHashTableItem d;
|
||||||
|
d.is_marked = false;
|
||||||
|
d.is_used = false;
|
||||||
|
d.refer_address = 1895;
|
||||||
|
d.refee_address = 0;
|
||||||
|
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, a);
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, b);
|
||||||
|
ref_hash_table = (RefHashTableItem*) delete_items(ref_hash_table, b);
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, c);
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, d);
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, b); */
|
||||||
|
|
||||||
|
LL * a = malloc(sizeof(LL));
|
||||||
|
a->next = 0;
|
||||||
|
a->head = 12;
|
||||||
|
RefHashTableItem a_;
|
||||||
|
a_.is_used = true;
|
||||||
|
a_.is_marked = UNMARKED;
|
||||||
|
a_.refer_address = (size_t) a;
|
||||||
|
a_.refee_address = (size_t) a->next;
|
||||||
|
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, a_);
|
||||||
|
|
||||||
|
LL * b = malloc(sizeof(LL));
|
||||||
|
b->next = 0;
|
||||||
|
b->head = 88;
|
||||||
|
RefHashTableItem b_;
|
||||||
|
b_.is_used = true;
|
||||||
|
b_.is_marked = UNMARKED;
|
||||||
|
b_.refer_address = (size_t) b;
|
||||||
|
b_.refee_address = (size_t) b->next;
|
||||||
|
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, b_);
|
||||||
|
|
||||||
|
LL * c = malloc(sizeof(LL));
|
||||||
|
c->next = b;
|
||||||
|
c->head = 99;
|
||||||
|
|
||||||
|
RefHashTableItem c1_;
|
||||||
|
c1_.is_used = true;
|
||||||
|
c1_.is_marked = UNMARKED;
|
||||||
|
c1_.refer_address = (size_t) c;
|
||||||
|
c1_.refee_address = (size_t) c->next;
|
||||||
|
|
||||||
|
RefHashTableItem c2_;
|
||||||
|
c2_.is_used = true;
|
||||||
|
c2_.is_marked = UNMARKED;
|
||||||
|
c2_.refer_address = (size_t) c;
|
||||||
|
c2_.refee_address = 0;
|
||||||
|
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, c1_);
|
||||||
|
ref_hash_table = (RefHashTableItem*) add_item(ref_hash_table, c2_);
|
||||||
|
|
||||||
|
ref_hash_table = (RefHashTableItem*) unreachize_item(ref_hash_table, b_.refer_address);
|
||||||
|
|
||||||
|
ref_hash_table = (RefHashTableItem*) unreachize_item(ref_hash_table, a_.refer_address);
|
||||||
|
|
||||||
|
ref_hash_table = mark(ref_hash_table);
|
||||||
|
ref_hash_table = sweep(ref_hash_table);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
140
ataabu/archive/foralise/lib.c
Normal file
140
ataabu/archive/foralise/lib.c
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stdbool.h>
|
||||||
|
|
||||||
|
typedef struct LL LL;
|
||||||
|
|
||||||
|
typedef enum {MARKED, UNMARKED, UNREACHABLE} MarkingStatus;
|
||||||
|
|
||||||
|
typedef struct{
|
||||||
|
size_t refer_address;
|
||||||
|
size_t refee_address;
|
||||||
|
MarkingStatus is_marked;
|
||||||
|
bool is_used;
|
||||||
|
|
||||||
|
} RefHashTableItem;
|
||||||
|
|
||||||
|
size_t ref_hash_table_size = 2;
|
||||||
|
size_t number_of_item = 0;
|
||||||
|
|
||||||
|
RefHashTableItem *ref_hash_table;
|
||||||
|
|
||||||
|
RefHashTableItem* add_item(RefHashTableItem* table, RefHashTableItem item);
|
||||||
|
RefHashTableItem* delete_items(RefHashTableItem* table, RefHashTableItem item);
|
||||||
|
|
||||||
|
void mark_specific(RefHashTableItem* hash_table, size_t addr);
|
||||||
|
|
||||||
|
RefHashTableItem* mark(RefHashTableItem* hash_table){
|
||||||
|
for (size_t i =0; i<ref_hash_table_size; i++){
|
||||||
|
if (hash_table[i].is_used == true && hash_table[i].refee_address == 0 && hash_table[i].is_marked != UNREACHABLE){
|
||||||
|
hash_table[i].is_marked = MARKED;
|
||||||
|
size_t address = hash_table[i].refer_address;
|
||||||
|
mark_specific(hash_table, address);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash_table;
|
||||||
|
}
|
||||||
|
|
||||||
|
void mark_specific(RefHashTableItem* hash_table, size_t addr){
|
||||||
|
for (size_t i =0; i<ref_hash_table_size; i++){
|
||||||
|
if (hash_table[i].refer_address == addr){
|
||||||
|
if (hash_table[i].refee_address == 0){
|
||||||
|
hash_table[i].is_marked = MARKED;
|
||||||
|
}else{
|
||||||
|
mark_specific(hash_table, hash_table[i].refee_address);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
RefHashTableItem* sweep(RefHashTableItem* hash_table){
|
||||||
|
for (size_t i =0; i<ref_hash_table_size; i++){
|
||||||
|
if (hash_table[i].is_marked == UNREACHABLE || hash_table[i].is_marked == UNMARKED){
|
||||||
|
|
||||||
|
|
||||||
|
if (hash_table[i].refee_address == 0){
|
||||||
|
free((void*)(hash_table[i].refer_address));
|
||||||
|
hash_table = delete_items(hash_table, hash_table[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (size_t i=0; i<ref_hash_table_size; i++){
|
||||||
|
if ((hash_table[i].is_marked) != UNMARKED){
|
||||||
|
hash_table[i].is_marked = UNMARKED;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash_table;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
RefHashTableItem* resize_ref_table(RefHashTableItem* hash_table){
|
||||||
|
ref_hash_table_size = ref_hash_table_size * 2;
|
||||||
|
RefHashTableItem* new_hash_table = malloc(sizeof (RefHashTableItem)* ref_hash_table_size);
|
||||||
|
number_of_item = 0;
|
||||||
|
|
||||||
|
for (size_t i=0;i<ref_hash_table_size;i++){
|
||||||
|
new_hash_table[i].is_used = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (size_t i=0 ; i < (ref_hash_table_size / 2); i++){
|
||||||
|
if(hash_table[i].is_used == true){
|
||||||
|
add_item(new_hash_table, hash_table[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
RefHashTableItem* old_hash_table = hash_table;
|
||||||
|
hash_table = new_hash_table;
|
||||||
|
free(old_hash_table);
|
||||||
|
return hash_table;
|
||||||
|
}
|
||||||
|
|
||||||
|
RefHashTableItem* add_item(RefHashTableItem* table, RefHashTableItem item){
|
||||||
|
if (number_of_item >= ref_hash_table_size){
|
||||||
|
table = resize_ref_table(table);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t main_addr = item.refer_address;
|
||||||
|
size_t key = main_addr % ref_hash_table_size;
|
||||||
|
while (table[key].is_used == true){
|
||||||
|
key = (key + 1) % ref_hash_table_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
table[key].refer_address = item.refer_address;
|
||||||
|
table[key].refee_address = item.refee_address;
|
||||||
|
table[key].is_marked = item.is_marked;
|
||||||
|
table[key].is_used = true;
|
||||||
|
|
||||||
|
number_of_item += 1;
|
||||||
|
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
RefHashTableItem* unreachize_item(RefHashTableItem* table, size_t addr){
|
||||||
|
|
||||||
|
size_t key = addr % ref_hash_table_size;
|
||||||
|
while (table[key].refer_address != addr || table[key].refee_address != 0){
|
||||||
|
key = (key + 1) % ref_hash_table_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
table[key].is_marked = UNREACHABLE ;
|
||||||
|
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
RefHashTableItem* delete_items(RefHashTableItem* table, RefHashTableItem item){
|
||||||
|
|
||||||
|
for (unsigned i=0;i<ref_hash_table_size;i++){
|
||||||
|
if (table[i].refer_address == item.refer_address){
|
||||||
|
table[i].is_used = false;
|
||||||
|
number_of_item -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return table;
|
||||||
|
}
|
15
ataabu/archive/makefile
Normal file
15
ataabu/archive/makefile
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
ataabu: parser.cmx tokenizer.cmx type_inf.cmx closure_conv.cmx codegen.cmx
|
||||||
|
mkdir build
|
||||||
|
ocamlopt -o build/ataabu tokenizer.cmx parser.cmx type_inf.cmx closure_conv.cmx codegen.cmx
|
||||||
|
codegen.cmx: tokenizer.cmx parser.cmx type_inf.cmx closure_conv.cmx codegen.ml
|
||||||
|
ocamlopt -c codegen.ml
|
||||||
|
type_inf.cmx: tokenizer.cmx parser.cmx type_inf.ml
|
||||||
|
ocamlopt -c type_inf.ml
|
||||||
|
parser.cmx: tokenizer.cmx parser.ml
|
||||||
|
ocamlopt -c parser.ml
|
||||||
|
tokenizer.cmx: tokenizer.ml
|
||||||
|
ocamlopt -c tokenizer.ml
|
||||||
|
closure_conv.cmx: closure_conv.ml
|
||||||
|
ocamlopt -c closure_conv.ml
|
||||||
|
clean:
|
||||||
|
rm -rf *.cmi *.cmx *.cmo *.o build/
|
356
ataabu/archive/parser.ml
Normal file
356
ataabu/archive/parser.ml
Normal file
|
@ -0,0 +1,356 @@
|
||||||
|
open List;;
|
||||||
|
open StringLabels;;
|
||||||
|
|
||||||
|
type token = Tokenizer.token;;
|
||||||
|
|
||||||
|
|
||||||
|
type ast_tree =
|
||||||
|
| ASTFail
|
||||||
|
| Item of token
|
||||||
|
| Ls of (ast_tree list);;
|
||||||
|
|
||||||
|
let ast_example = Ls ([Item(Tokenizer.Token ("12", "INT"));Item(Tokenizer.Token ("+", "OP")); Item(Tokenizer.Token ("2", "INT")); ]);;
|
||||||
|
|
||||||
|
|
||||||
|
let not_empty_token token = match token with
|
||||||
|
| Tokenizer.Token( _ , token_type) -> match token_type with
|
||||||
|
| "SPACE" -> false
|
||||||
|
| "NL" -> false
|
||||||
|
| _ -> true;;
|
||||||
|
|
||||||
|
type parseoutput =
|
||||||
|
| Fail
|
||||||
|
| Success of ast_tree * token list;;
|
||||||
|
|
||||||
|
let consume1token ls =
|
||||||
|
match ls with
|
||||||
|
| [] -> Fail
|
||||||
|
| token :: token_rest -> Success ( Item(token), token_rest);;
|
||||||
|
|
||||||
|
|
||||||
|
let match_token_type token_type =
|
||||||
|
fun token_ls ->
|
||||||
|
let initial_result = consume1token token_ls in
|
||||||
|
match initial_result with
|
||||||
|
| Success (Item(Token(_ , type_name)) , rest) ->
|
||||||
|
if equal type_name token_type
|
||||||
|
then initial_result
|
||||||
|
else Fail
|
||||||
|
| Fail -> Fail
|
||||||
|
| _ -> Fail;;
|
||||||
|
|
||||||
|
let match_token_name_type token_name token_type =
|
||||||
|
fun token_ls ->
|
||||||
|
let initial_result = consume1token token_ls in
|
||||||
|
match initial_result with
|
||||||
|
| Success (Item(Token( nm , tp)) , rest) ->
|
||||||
|
if ((equal token_name nm) && (equal token_type tp))
|
||||||
|
then initial_result
|
||||||
|
else Fail
|
||||||
|
| Fail -> Fail
|
||||||
|
| _ -> Fail;;
|
||||||
|
|
||||||
|
|
||||||
|
let parseoutput_list2string str token =
|
||||||
|
str ^ (Tokenizer.token_to_string token);;
|
||||||
|
|
||||||
|
let rec ast2string ast_tree =
|
||||||
|
match ast_tree with
|
||||||
|
| ASTFail -> "ASTFail"
|
||||||
|
| Item(token) -> Tokenizer.token_to_string token
|
||||||
|
| Ls(ast) -> "Ls(" ^ (List.fold_left (fun str ast -> str ^ " " ^ (ast2string ast)) "" ast) ^ ")";;
|
||||||
|
|
||||||
|
let rec parseoutput2string input =
|
||||||
|
match input with
|
||||||
|
| Fail -> "Fail"
|
||||||
|
| Success(matched_ast, tkn_remained_ls) -> ast2string matched_ast ^
|
||||||
|
":::" ^ List.fold_left parseoutput_list2string "" tkn_remained_ls
|
||||||
|
|
||||||
|
|
||||||
|
let print_parseoutput input = print_string (parseoutput2string input);;
|
||||||
|
|
||||||
|
let (>>=) parseoutput parser_unit =
|
||||||
|
match parseoutput with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success(matched1 , remained1) ->
|
||||||
|
let result = parser_unit remained1 in
|
||||||
|
match result with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (Ls([]) , remained2) ->
|
||||||
|
parseoutput
|
||||||
|
| Success (matched2 , remained2) ->
|
||||||
|
match matched1 with
|
||||||
|
| Ls(matched_list1) -> Success (Ls(append matched_list1 [matched2]), remained2)
|
||||||
|
| Item(matched_item1) -> Success (Ls(append [matched1] [matched2]), remained2)
|
||||||
|
| ASTFail -> Fail;;
|
||||||
|
|
||||||
|
let (||) parser_unit1 parser_unit2 =
|
||||||
|
fun parseoutput ->
|
||||||
|
let middle1 = parser_unit1 parseoutput in
|
||||||
|
match middle1 with
|
||||||
|
| Success (_ , _) -> middle1
|
||||||
|
| Fail ->
|
||||||
|
let middle2 = parser_unit2 parseoutput in
|
||||||
|
match middle2 with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (_ , _) -> middle2;;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let rec ( >>=* ) input parser =
|
||||||
|
if input == Fail then
|
||||||
|
Fail
|
||||||
|
else
|
||||||
|
let middle0 = input >>= parser in
|
||||||
|
match middle0 with
|
||||||
|
| Success(Ls(_), remained_tokens) -> middle0 >>=* parser
|
||||||
|
| _ -> input
|
||||||
|
|
||||||
|
let rec correct_list ls =
|
||||||
|
match ls with
|
||||||
|
| Ls([lhs; Ls(op::rhs)]) -> Ls(op::lhs::[(correct_list(Ls(rhs)))])
|
||||||
|
| Ls([Item(Token(id, typ))]) -> Item(Token(id, typ))
|
||||||
|
| Ls([Ls(lst)]) -> (correct_list (Ls(lst)))
|
||||||
|
| _ -> ls
|
||||||
|
|
||||||
|
|
||||||
|
(*item2 = (expr) | int | flo | id *)
|
||||||
|
let rec item2 token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((fun i -> Success(Ls([]), i) >>= (match_token_name_type "(" "PAREN") >>= expr >>=(match_token_name_type ")" "PAREN"))
|
||||||
|
|| (match_token_type "FLO")
|
||||||
|
|| (match_token_type "INT")
|
||||||
|
|| (match_token_type "ID")) in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls([Ls([Item(Token("(", "PAREN")); x; Item(Token(")", "PAREN"))])]), remained) -> Success((correct_list x), remained)
|
||||||
|
| Success(ls, remained) -> Success((correct_list ls), remained)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
(*args = typ1 arg1, typ2 arg2... *)
|
||||||
|
and args token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= (match_token_name_type "(" "PAREN") >>=* (fun i -> Success(Ls([]), i) >>= (match_token_type "ID") >>= (match_token_type "ID")
|
||||||
|
>>=*(fun i -> Success(Ls([]), i) >>= (match_token_type "COMMA")
|
||||||
|
>>= (match_token_type "ID") >>= (match_token_type "ID"))) >>= (match_token_name_type ")" "PAREN") in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(left_paren::Ls(typ1::var1::other)::righ_paren), remained) ->
|
||||||
|
(*let _ = print_string "RESULT" in
|
||||||
|
let _ = print_string (ast2string (Ls(other))) in
|
||||||
|
let _ = print_string "\nEND_OF_RESULT\n" in *)
|
||||||
|
let remove_comma = fun ls -> match ls with Ls([l;typ;var]) -> Ls([typ;var]) | _ -> ls in
|
||||||
|
let other_removed_comma = List.map remove_comma other in
|
||||||
|
Success(Ls(Item(Token("%args", "ID"))::Ls([typ1;var1])::other_removed_comma), remained)
|
||||||
|
| Success(Ls(left_paren::righ_paren), remained) -> Success(Ls([Item(Token("%args", "ID"))]), remained)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
(*item = item2 | lambda "(" args ")" {stmts} *)
|
||||||
|
and item token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((fun i -> Success(Ls([]), i) >>= (match_token_name_type "lambda" "ID") >>= args
|
||||||
|
>>= (match_token_name_type "{" "BRACE") >>= stmts >>= (match_token_name_type "}" "BRACE"))
|
||||||
|
|| fun i -> Success(Ls([]), i) >>= item2) in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls([Ls(Item(Token("lambda", "ID"))::args::l_brace::[Item(Token("}", "BRACE"))])]) , remained) ->
|
||||||
|
Success(Ls([Item(Token("lambda", "ID"));args;Ls([])]), remained)
|
||||||
|
| Success(Ls([Ls(Item(Token("lambda", "ID"))::args::l_brace::body::r_brace)]) , remained) ->
|
||||||
|
Success(Ls([Item(Token("lambda", "ID"));args;body]), remained)
|
||||||
|
| Success(Ls([Ls([Item(Token(x,y))])]), remained) -> Success(Item(Token(x,y)), remained)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
|
||||||
|
and factor_more_callees token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= (match_token_name_type "(" "PAREN") >>= item >>=*
|
||||||
|
(fun i -> Success(Ls([]), i) >>= (match_token_type "COMMA") >>= item)
|
||||||
|
>>=(match_token_name_type ")" "PAREN") in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(Item(Token("(", "PAREN"))::first_callee::rest_lst), y) ->
|
||||||
|
let lst_without_r_paren = filter (fun x ->
|
||||||
|
match x with
|
||||||
|
| Item(_) -> false
|
||||||
|
| _ -> true) rest_lst in
|
||||||
|
let remove_comma = fun ls -> match ls with Ls([Item(Token(",", "COMMA")); x]) -> x | _ -> ls in
|
||||||
|
let lst_removed_comma = List.map remove_comma lst_without_r_paren in
|
||||||
|
Success(Ls(Item(Token("%callee", "ID"))::first_callee::lst_removed_comma), y)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
(*factor = item | item "(" morecollee ")" *)
|
||||||
|
and factor token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((fun i -> Success(Ls([]), i) >>= item >>= (match_token_name_type "(" "PAREN") >>= (match_token_name_type ")" "PAREN"))
|
||||||
|
|| (fun i -> Success(Ls([]), i) >>= item >>= factor_more_callees)
|
||||||
|
|| item) in
|
||||||
|
match result1 with
|
||||||
|
| Success(ASTFail, _) -> result1
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success(Item _, _) -> result1
|
||||||
|
| Success(Ls(other), remained) ->
|
||||||
|
let result2 = Success((correct_list (Ls(other))), remained) in
|
||||||
|
match result2 with
|
||||||
|
| Success(Ls[caller; Item(Token("(", "PAREN")); Item(Token(")", "PAREN"))], remained) ->
|
||||||
|
Success(Ls[Item(Token("%apply", "ID")); caller], remained)
|
||||||
|
| Success(Ls[caller; Item(Token("(", "PAREN")); callee ; Item(Token(")", "PAREN"))], remained) ->
|
||||||
|
Success(Ls[Item(Token("%apply", "ID")); caller; callee], remained)
|
||||||
|
| Success(Ls(Item(Token("%callee", "ID"))::op::rest), remained) -> let l1 = Item(Token("%apply", "ID"))::op::rest in
|
||||||
|
let l2 = List.filter (fun x -> match x with Ls([]) -> false | _ -> true) l1 in
|
||||||
|
Success(Ls(l2), remained)
|
||||||
|
| _ -> result2
|
||||||
|
|
||||||
|
|
||||||
|
(*
|
||||||
|
(* ( */ factor) *)
|
||||||
|
let rec factor_rest token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((match_token_name_type "*" "OP") || (match_token_name_type "/" "OP")) >>= (match_token_type "INT") >>= term_rest in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(_), remained_tokens) -> result1
|
||||||
|
| _ -> wrapper *)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(* ( */ factor) *)
|
||||||
|
and term_rest token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((match_token_name_type "*" "OP") || (match_token_name_type "/" "OP")) >>= factor >>= term_rest in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(_), remained_tokens) -> result1
|
||||||
|
| _ -> wrapper
|
||||||
|
|
||||||
|
(*term = factor ( */ factor)* *)
|
||||||
|
and term token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= factor >>= term_rest in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(x), remained) -> Success((correct_list (Ls(x))), remained)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
(* (+- term) *)
|
||||||
|
and expr_rest token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((match_token_name_type "+" "OP") || (match_token_name_type "-" "OP")) >>= term >>= expr_rest in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls([Item(x) ; Ls(lists)]), remained_tokens) -> Success(Ls((Item(x)::lists)), remained_tokens)
|
||||||
|
| Success(Ls(_), remained_tokens) -> result1
|
||||||
|
| _ -> wrapper
|
||||||
|
|
||||||
|
(*expr = term (+- term)* *)
|
||||||
|
and expr token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= term >>= expr_rest in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(x), remained) -> Success((correct_list (Ls(x))), remained)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
|
||||||
|
(* type = id | ( type -> type ) *)
|
||||||
|
and type_ token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= ((fun i -> Success(Ls([]), i) >>= (match_token_name_type "(" "PAREN") >>= type_ >>= (match_token_name_type "->" "IMPLY") >>= type_ >>= (match_token_name_type ")" "PAREN"))
|
||||||
|
|| (match_token_type "ID")) in
|
||||||
|
match result1 with
|
||||||
|
(*| Success(Ls([Item(x) ; Ls(lists)]), remained_tokens) -> Success(Ls((Item(x)::lists)), remained_tokens)*)
|
||||||
|
| Success(Ls([Item(x)]), remained_tokens) -> Success(Item(x), remained_tokens)
|
||||||
|
| Success(Ls([Ls(l_paren::lhs::imply::rhs::r_paren)]), remained_tokens) -> Success(Ls(imply::lhs::[rhs]), remained_tokens)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
(* var_def = type id expr ;*)
|
||||||
|
and var_def token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>= type_ >>= (match_token_type "ID") >>= (match_token_type "ASSIGN") >>= expr in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(typ::var::assign::expr), remained_tokens) -> Success(Ls(Item(Token("%def", "ID"))::typ::var::expr), remained_tokens)
|
||||||
|
| _ -> wrapper
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(* one_statement = var_def | expr ;*)
|
||||||
|
and one_statement token_list =
|
||||||
|
let token_list2 = List.filter (fun x -> match x with Tokenizer.Token(_, "SPACE") -> false | Tokenizer.Token(_, "NL") -> false | _ -> true) token_list in
|
||||||
|
let wrapper = Success(Ls([]), token_list2) in
|
||||||
|
let result1 = wrapper >>= ((fun i -> Success(Ls([]), i) >>= expr >>= (match_token_name_type ";" "SEMICO") )||(fun i -> Success(Ls([]), i) >>= var_def >>= (match_token_name_type ";" "SEMICO"))) in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(lst), remained_tokens) -> let lst2 = (correct_list (Ls(lst))) in
|
||||||
|
let lst2_inner = match lst2 with
|
||||||
|
| Ls(lst2_inner) -> lst2_inner
|
||||||
|
| _ -> [lst2] in
|
||||||
|
let lst_remove_semicolon = List.filter (fun x -> match x with Item(Token(_, "SEMICO")) -> false | _ -> true) lst2_inner in
|
||||||
|
Success((correct_list (Ls(lst_remove_semicolon))), remained_tokens)
|
||||||
|
| _ -> result1
|
||||||
|
|
||||||
|
(* stmts = one_statement* *)
|
||||||
|
and stmts token_list =
|
||||||
|
let wrapper = Success(Ls([]), token_list) in
|
||||||
|
let result1 = wrapper >>=* one_statement in
|
||||||
|
match result1 with
|
||||||
|
| Success(Ls(_), remained_tokens) -> result1
|
||||||
|
| _ -> result1 ;;
|
||||||
|
|
||||||
|
(*examples
|
||||||
|
|
||||||
|
let ex_token_list = Tokenizer.total_parser "lambda(int y){12;};";;
|
||||||
|
|
||||||
|
List.iter Tokenizer.print_token ex_token_list;;
|
||||||
|
print_string "字串輸出結果";
|
||||||
|
|
||||||
|
print_parseoutput (one_statement ex_token_list);;
|
||||||
|
print_string "\n\n";;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let ex_token_list = Tokenizer.total_parser "(2);";;
|
||||||
|
|
||||||
|
(* List.iter Tokenizer.print_token ex_token_list;; *)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print_parseoutput (one_statement ex_token_list);;
|
||||||
|
print_string "\n\n";;
|
||||||
|
|
||||||
|
|
||||||
|
let ex_token_list = Tokenizer.total_parser "7/(5+6)*7;";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "(7/(10-6)*a);";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "a(b);";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "a();";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "a(b,c,a);";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
|
||||||
|
print_string "\n\n";;*)
|
||||||
|
(*let ex_token_list = Tokenizer.total_parser "(int -> int) a = 2+ 3;a + b;";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;*)
|
||||||
|
(*print_string "\n\n";;
|
||||||
|
|
||||||
|
|
||||||
|
let ex_token_list = Tokenizer.total_parser "(int-> int) foo = lambda(int c){12;};foo(13);";;
|
||||||
|
print_string "ABACABRA";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "lambda(a){12;};";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "lambda(){};";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
print_string "\n\n";;
|
||||||
|
let ex_token_list = Tokenizer.total_parser "lambda(x){x;}(12);";;
|
||||||
|
print_parseoutput (stmts ex_token_list);;
|
||||||
|
|
||||||
|
*)
|
292
ataabu/archive/tokenizer.ml
Normal file
292
ataabu/archive/tokenizer.ml
Normal file
|
@ -0,0 +1,292 @@
|
||||||
|
open Uchar;;
|
||||||
|
open List;;
|
||||||
|
open StringLabels;;
|
||||||
|
|
||||||
|
(* TODO: Add parsing attribute and operator + - * / \n \r # = == < > ; ( ) [ ]*)
|
||||||
|
|
||||||
|
type tokenizeroutput =
|
||||||
|
| Fail
|
||||||
|
| Success of string * string;;
|
||||||
|
|
||||||
|
type token = Token of string * string;;
|
||||||
|
|
||||||
|
type aux_middle = Aux of tokenizeroutput * string
|
||||||
|
|
||||||
|
|
||||||
|
exception IndexException of string
|
||||||
|
|
||||||
|
let token_to_string token = match token with
|
||||||
|
| Token (str, token_type) -> "Token(\"" ^ str ^ "\", \"" ^ token_type ^"\")";;
|
||||||
|
|
||||||
|
|
||||||
|
let print_token token = print_string (token_to_string token);;
|
||||||
|
|
||||||
|
|
||||||
|
let print_parse_output output = match output with
|
||||||
|
| Success (car, cdr) -> print_string ("Success(" ^ car ^ ", " ^ cdr ^")\n")
|
||||||
|
| Fail -> print_string ("Fail!\n");;
|
||||||
|
|
||||||
|
|
||||||
|
let consume1char str =
|
||||||
|
match str with
|
||||||
|
| "" -> Fail
|
||||||
|
| _ -> Success ((sub str 0 1), (sub str 1 ((length str) - 1)));;
|
||||||
|
|
||||||
|
let match_range min max =
|
||||||
|
fun input ->
|
||||||
|
if min > max then
|
||||||
|
raise (IndexException (min ^ " should be not less than " ^ max))
|
||||||
|
else
|
||||||
|
let initial_result = consume1char input in
|
||||||
|
match initial_result with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (fst, rest) ->
|
||||||
|
let fst_code = to_int (of_char (get fst 0)) in
|
||||||
|
let max_code = to_int (of_char (get max 0)) in
|
||||||
|
let min_code = to_int (of_char (get min 0)) in
|
||||||
|
if fst_code >= min_code && fst_code <= max_code
|
||||||
|
then Success (fst, rest)
|
||||||
|
else Fail;;
|
||||||
|
|
||||||
|
let match_char pattern =
|
||||||
|
fun input ->
|
||||||
|
let initial_result = consume1char input in
|
||||||
|
match initial_result with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (fst, rest) ->
|
||||||
|
if equal fst pattern
|
||||||
|
then Success (fst, rest)
|
||||||
|
else Fail;;
|
||||||
|
|
||||||
|
let (>>=) input parser =
|
||||||
|
match input with
|
||||||
|
| Fail -> input
|
||||||
|
| Success (fst , snd_rest) ->
|
||||||
|
let middle = parser snd_rest in
|
||||||
|
match middle with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (snd, rest) ->
|
||||||
|
let fst_snd = fst ^ snd in
|
||||||
|
Success (fst_snd, rest);;
|
||||||
|
|
||||||
|
let ( >>=* ) (input : tokenizeroutput) parser =
|
||||||
|
if input == Fail then
|
||||||
|
Fail
|
||||||
|
else
|
||||||
|
let middle0 = input >>= parser in
|
||||||
|
if middle0 == Fail then
|
||||||
|
input
|
||||||
|
else
|
||||||
|
let rec parser_recursive i parser =
|
||||||
|
let middle = i >>= parser in
|
||||||
|
match middle with
|
||||||
|
| Fail -> i
|
||||||
|
| Success (a , b) -> (parser_recursive middle parser) in
|
||||||
|
(parser_recursive input parser);;
|
||||||
|
|
||||||
|
|
||||||
|
let not_match_char pattern =
|
||||||
|
fun input ->
|
||||||
|
let initial_result = consume1char input in
|
||||||
|
match initial_result with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (fst, rest) ->
|
||||||
|
if not (equal fst pattern)
|
||||||
|
then Success (fst, rest)
|
||||||
|
else Fail;;
|
||||||
|
|
||||||
|
let ( >>=? ) (input : tokenizeroutput) parser =
|
||||||
|
let middle = input >>= parser in
|
||||||
|
match middle with
|
||||||
|
| Fail -> input
|
||||||
|
| Success (a , b) -> middle;;
|
||||||
|
|
||||||
|
let ( || ) parser1 parser2 =
|
||||||
|
fun input ->
|
||||||
|
let middle1 = parser1 input in
|
||||||
|
match middle1 with
|
||||||
|
| Success (_ , _) -> middle1
|
||||||
|
| Fail ->
|
||||||
|
let middle2 = parser2 input in
|
||||||
|
match middle2 with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (_ , _) -> middle2;;
|
||||||
|
|
||||||
|
|
||||||
|
let ( ||** ) parser1 parser2 =
|
||||||
|
fun input ->
|
||||||
|
let middle1 = parser1 input in
|
||||||
|
match middle1 with
|
||||||
|
| Aux (Success (_ , _), _) -> middle1
|
||||||
|
| Aux(Fail, _) ->
|
||||||
|
let middle2 = parser2 input in
|
||||||
|
middle2;;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let parse_int input = (input
|
||||||
|
>>=* ((match_char "+") || (match_char "-")))
|
||||||
|
>>= (match_range "0" "9")
|
||||||
|
>>=* (match_range "0" "9");;
|
||||||
|
|
||||||
|
let parse_float input = (input
|
||||||
|
>>=* ((match_char "+") || (match_char "-")))
|
||||||
|
>>= (match_range "0" "9")
|
||||||
|
>>=* (match_range "0" "9")
|
||||||
|
>>= (match_char ".")
|
||||||
|
>>= (match_range "0" "9")
|
||||||
|
>>=* (match_range "0" "9");;
|
||||||
|
|
||||||
|
(*concat 2 parser
|
||||||
|
let ( >> ) parser1 parser2 =
|
||||||
|
fun input ->
|
||||||
|
if input == Fail then
|
||||||
|
Fail
|
||||||
|
else
|
||||||
|
let middle1 = parser1 input in
|
||||||
|
match middle1 with
|
||||||
|
| Fail -> Fail
|
||||||
|
| Success (_ , _) -> parser2 input *)
|
||||||
|
|
||||||
|
let inside_quote_mark = ((fun i -> Success("", i) >>= (match_char "\\") >>= (match_char "\"")) || (fun i -> Success("", i) >>= (not_match_char "\""))) ;;
|
||||||
|
|
||||||
|
let parse_string input = (input
|
||||||
|
>>= (match_char "\""))
|
||||||
|
>>=* (inside_quote_mark)
|
||||||
|
>>= (match_char "\"") ;;
|
||||||
|
|
||||||
|
let parse_operator input = input >>= ((match_char "+") || (match_char "-") || (match_char "*") || (match_char "/") || (match_char "%"));;
|
||||||
|
|
||||||
|
let parse_number_mark input = input >>= (match_char "#");;
|
||||||
|
|
||||||
|
let parse_equal input = input >>= (match_char "=") >>= (match_char "=");; (* == *)
|
||||||
|
|
||||||
|
let parse_imply input = input >>= (match_char "-") >>= (match_char ">");; (* -> *)
|
||||||
|
|
||||||
|
|
||||||
|
let parse_assign input = input >>= (match_char "=");;
|
||||||
|
|
||||||
|
let parse_semicolon input = input >>= (match_char ";");;
|
||||||
|
|
||||||
|
let parse_comma input = input >>= (match_char ",");;
|
||||||
|
|
||||||
|
let parse_parenthesis input = input >>= ((match_char "(")|| (match_char ")"));;
|
||||||
|
|
||||||
|
let parse_bracket input = input >>= ((match_char "[")|| (match_char "]"));;
|
||||||
|
|
||||||
|
let parse_brace input = input >>= ((match_char "{")|| (match_char "}"));;
|
||||||
|
|
||||||
|
let parse_newline input = input >>= (match_char "\n");;
|
||||||
|
|
||||||
|
let parse_spaces input = input >>= (match_char " ") >>=* (match_char " ");;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let parse_id input = (input
|
||||||
|
>>= ((match_char "_") || (match_range "a" "z") ||(match_range "A" "Z"))
|
||||||
|
>>=* ((match_char "_") || (match_range "0" "9") || (match_range "a" "z") ||(match_range "A" "Z")));;
|
||||||
|
|
||||||
|
let rec total_parser_aux input list =
|
||||||
|
match input with
|
||||||
|
| Success(_,"") -> list
|
||||||
|
| _ ->
|
||||||
|
let initial = ((fun i -> Aux ((parse_id i), "ID"))
|
||||||
|
||** (fun i -> Aux ((parse_float i) ,"FLO"))
|
||||||
|
||** (fun i -> Aux ((parse_int i) ,"INT"))
|
||||||
|
||** (fun i -> Aux ((parse_imply i), "IMPLY"))
|
||||||
|
||** (fun i -> Aux ((parse_operator i) ,"OP"))
|
||||||
|
||** (fun i -> Aux ((parse_number_mark i) ,"NUM_MRK"))
|
||||||
|
||** (fun i -> Aux ((parse_brace i) ,"BRACE"))
|
||||||
|
||** (fun i -> Aux ((parse_comma i) ,"COMMA"))
|
||||||
|
||** (fun i -> Aux ((parse_assign i), "ASSIGN"))
|
||||||
|
||** (fun i -> Aux ((parse_bracket i) ,"BRACK"))
|
||||||
|
||** (fun i -> Aux ((parse_parenthesis i) ,"PAREN"))
|
||||||
|
||** (fun i -> Aux ((parse_semicolon i), "SEMICO"))
|
||||||
|
||** (fun i -> Aux ((parse_newline i), "NL"))
|
||||||
|
||** (fun i -> Aux ((parse_spaces i) ,"SPACE")))
|
||||||
|
input in
|
||||||
|
match initial with
|
||||||
|
| Aux (Fail, _) -> let _ = print_string "Error" in []
|
||||||
|
| Aux (Success(matched, remained), token_type) -> total_parser_aux (
|
||||||
|
Success("", remained))
|
||||||
|
(append list [Token(matched, token_type );]);;
|
||||||
|
|
||||||
|
|
||||||
|
let rec total_parser input = total_parser_aux (Success("", input)) [];;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(* tests
|
||||||
|
|
||||||
|
List.iter (print_token) (total_parser "lambda(x){let a = 2;
|
||||||
|
|
||||||
|
return a + x;};");;
|
||||||
|
List.iter (print_token) (total_parser "12+中34;");;
|
||||||
|
|
||||||
|
print_parse_output (parse_id (Success ("", "_")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "_abc12c")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "_9")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "a_9A")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "if")));;
|
||||||
|
|
||||||
|
|
||||||
|
print_parse_output (parse_float (Success ("", "+2.0;")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "Class")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "BIGLETTER123__")));;
|
||||||
|
print_parse_output (parse_id (Success ("", "12a")));;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print_string ("Test 5\n");;
|
||||||
|
print_parse_output (((Success ("", "+1234a")) >>=? (match_char "+")) >>=* (match_range "0" "9"));;
|
||||||
|
print_parse_output (((Success ("", "1234a")) >>=? (match_char "+")) >>=* (match_range "0" "9"));;
|
||||||
|
print_parse_output (((Success ("", "-1234a")) >>=? (match_char "+")) >>=* (match_range "0" "9"));;
|
||||||
|
print_parse_output ((Success ("", "-1234a")) >>=* (match_range "0" "9"));;
|
||||||
|
|
||||||
|
|
||||||
|
print_string ("Test 6\n");;
|
||||||
|
print_parse_output ((Success ("", "+1234a")) >>= ( (match_char "+") || (match_char "-")));;
|
||||||
|
print_parse_output ((Success ("", "-1234a")) >>= ( (match_char "+") || (match_char "-")));;
|
||||||
|
print_parse_output ((Success ("", "1234a")) >>= ( (match_char "+") || (match_char "-")));;
|
||||||
|
|
||||||
|
print_string ("Test 7\n");;
|
||||||
|
print_parse_output (parse_int (Success ("", "+1234a")));
|
||||||
|
print_parse_output (parse_int (Success ("", "-1234a")));
|
||||||
|
print_parse_output (parse_int (Success ("", "1234a")));
|
||||||
|
print_parse_output (parse_int (Success ("", "+a")));
|
||||||
|
|
||||||
|
print_string ("Test 8\n");;
|
||||||
|
print_parse_output (parse_float (Success ("", "+1234.58a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "-1234.58a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "0.0a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "+0.58a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "0.58a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "-0.58a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "1234.8a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "1234a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "+1234a")));
|
||||||
|
print_parse_output (parse_float (Success ("", "-1234a")));
|
||||||
|
|
||||||
|
print_string ("Test 9\n");;
|
||||||
|
(* print_parse_output (inside_quote_mark (Success ("", "abc"))); *)
|
||||||
|
(* print_parse_output (inside_quote_mark (Success ("", "\"abc"))); *)
|
||||||
|
(* print_parse_output (inside_quote_mark (Success ("", "\\\"abc"))); *)
|
||||||
|
print_parse_output (parse_string (Success ("","\"123\"")));;
|
||||||
|
print_parse_output (parse_string (Success ("","\"12\\\"3\"")));;
|
||||||
|
print_parse_output (parse_string (Success ("","\"\\\"\\\"\"")));;
|
||||||
|
print_parse_output (parse_string (Success ("","\"\"")));;
|
||||||
|
|
||||||
|
*)
|
141
ataabu/archive/type_inf.ml
Normal file
141
ataabu/archive/type_inf.ml
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
open Hashtbl;;
|
||||||
|
type token = Tokenizer.token;;
|
||||||
|
type ast_tree = Parser.ast_tree;;
|
||||||
|
|
||||||
|
|
||||||
|
type type_ =
|
||||||
|
| Void
|
||||||
|
| Simp of string
|
||||||
|
| Imply of (type_ * type_)
|
||||||
|
| OpType of (type_ * type_ * type_)
|
||||||
|
| TypeError of string;;
|
||||||
|
|
||||||
|
let rec type2string typ =
|
||||||
|
match typ with
|
||||||
|
| Void -> "Void"
|
||||||
|
| Simp(a)-> "Unit(" ^ a ^ ")"
|
||||||
|
| Imply(a,b) -> "(" ^ type2string(a) ^ "->" ^ type2string(b) ^ ")"
|
||||||
|
| OpType(a,b,c) -> "(" ^ type2string(a) ^ "," ^ type2string(b) ^ "->" ^ type2string(c) ^ ")"
|
||||||
|
| TypeError(msg) -> "TypeError: " ^ msg;;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let rec equal_type x y =
|
||||||
|
match x with
|
||||||
|
| Void -> (match y with | Void -> true | _ -> false)
|
||||||
|
| Simp(a)-> (match y with | Simp(b) -> a = b | _ -> false)
|
||||||
|
| Imply(a,b) -> (match y with | Imply(x,y) -> (equal_type a x) && (equal_type b y) | _ -> false)
|
||||||
|
| OpType(a,b,c) -> (match y with | OpType(x,y,z) -> (equal_type a x) && (equal_type b y) && (equal_type c z) | _ -> false)
|
||||||
|
| TypeError(msg1) -> (match y with | TypeError(msg2) -> msg1 = msg2 | _ -> false);;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let type_inference_table = Hashtbl.create 10;;
|
||||||
|
|
||||||
|
|
||||||
|
Hashtbl.add type_inference_table "+" (OpType(Simp("INT") , Simp("INT" ) , Simp("INT")));;
|
||||||
|
Hashtbl.add type_inference_table "-" (OpType(Simp("INT"), Simp("INT") , Simp("INT")));;
|
||||||
|
Hashtbl.add type_inference_table "*" (OpType(Simp("INT") , Simp("INT"), Simp("INT")));;
|
||||||
|
|
||||||
|
let type_inference_table_list = ref [type_inference_table;];;
|
||||||
|
let reference_list = ref [[""]];;
|
||||||
|
|
||||||
|
let ref_list = ref[["c"; "d"];[];["a"; "b"]];; (*prepared for testing*)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let rec line_infer l ref_list type_list =
|
||||||
|
match l with
|
||||||
|
(*lambda *)
|
||||||
|
(*TODO: lambda as a input argument*)
|
||||||
|
| Parser.Ls ([Parser.Item(Tokenizer.Token("lambda", "ID")); Parser.Ls([Parser.Item(Tokenizer.Token("%args", "ID"));
|
||||||
|
Parser.Ls([Parser.Item(Tokenizer.Token(typ, "ID"));Parser.Item(Tokenizer.Token(id, "ID"))])]); Parser.Ls(body)]) ->
|
||||||
|
(let new_ref_list = ref ([ ]::!ref_list) in
|
||||||
|
let new_type_inference_table_block = Hashtbl.create 10 in
|
||||||
|
let real_typ = Simp(String.uppercase_ascii typ) in
|
||||||
|
let _ = Hashtbl.add new_type_inference_table_block id real_typ in
|
||||||
|
let new_type_inference_table_list = ref (new_type_inference_table_block::!type_list) in
|
||||||
|
let a = (List.map (fun x -> line_infer x new_ref_list new_type_inference_table_list) body) in
|
||||||
|
let last_of_a = (List.nth a ((List.length a)-1)) in
|
||||||
|
Imply(real_typ, last_of_a))
|
||||||
|
|
||||||
|
(*definition *)
|
||||||
|
| Parser.Ls ([Parser.Item(Tokenizer.Token("%def", "ID")); typ; Parser.Item(Tokenizer.Token(id, "ID")); rhs]) ->
|
||||||
|
(let rhs_type = line_infer rhs ref_list type_list in
|
||||||
|
(* let _ = print_string ("~~~" ^ id ^ "~~") in
|
||||||
|
let _ = (List.map print_string (List.nth !ref_list 0))in
|
||||||
|
let _ = print_string "~~~ " in *)
|
||||||
|
if List.mem id (List.nth !ref_list 0) then
|
||||||
|
TypeError ("duplicated defined.")
|
||||||
|
else(
|
||||||
|
match typ with
|
||||||
|
| Parser.Item(Tokenizer.Token(simp, "ID")) ->
|
||||||
|
if equal_type (Simp(String.uppercase_ascii simp)) rhs_type
|
||||||
|
then
|
||||||
|
let _ = (ref_list := (id::List.hd !ref_list)::(List.tl !ref_list)) in
|
||||||
|
let _ = Hashtbl.add type_inference_table id (Simp(String.uppercase_ascii simp)) in
|
||||||
|
Void
|
||||||
|
else TypeError ("lhs and rhs type unmatched.")
|
||||||
|
(*lambda : todo*)
|
||||||
|
| _ -> Void))
|
||||||
|
| Parser.Ls ([Parser.Item(Tokenizer.Token("%apply", "ID")); caller; callee]) ->
|
||||||
|
( let caller_type = line_infer caller ref_list type_list in
|
||||||
|
let callee_type = line_infer callee ref_list type_list in
|
||||||
|
|
||||||
|
(match caller_type with
|
||||||
|
| Imply(arg_type,ret_type) -> if equal_type callee_type arg_type then ret_type else TypeError ("arg type and callee type unmatched.")
|
||||||
|
| _ -> TypeError ("arg type and callee type unmatched.")
|
||||||
|
))
|
||||||
|
(* operator *)
|
||||||
|
| Parser.Ls ([Parser.Item(Tokenizer.Token(opr, "OP")); lhs; rhs]) ->
|
||||||
|
let lhs_type = line_infer lhs ref_list type_list in
|
||||||
|
let rhs_type = line_infer rhs ref_list type_list in
|
||||||
|
let op_type = Hashtbl.find (List.nth !type_inference_table_list (List.length !type_inference_table_list -1)) opr in
|
||||||
|
(match op_type with
|
||||||
|
| OpType(op_lhs_type, op_rhs_type, op_res_type) ->
|
||||||
|
if (equal_type lhs_type op_lhs_type) && (equal_type op_rhs_type rhs_type) then
|
||||||
|
op_res_type
|
||||||
|
else
|
||||||
|
TypeError ("op_type unmatched: " ^ opr)
|
||||||
|
| _ -> TypeError "operator unfound" )
|
||||||
|
| Parser.Ls (Parser.Item(Tokenizer.Token(opr, "OP"))::rest) -> TypeError ("operator unfound: " ^ opr)
|
||||||
|
(*variable*)
|
||||||
|
| Parser.Item(Tokenizer.Token(var, "ID")) ->
|
||||||
|
(let id_type = ref (Some(TypeError "unfound")) in
|
||||||
|
let checked = ref false in
|
||||||
|
let safe_find x var = (try Some(Hashtbl.find x var) with Not_found -> None) in
|
||||||
|
let checker = (fun x ->
|
||||||
|
(match safe_find x var with
|
||||||
|
| None -> ()
|
||||||
|
| tp_found -> if !checked == false then let _ = (id_type := tp_found) in let _ = (checked := true) in () else ())) in
|
||||||
|
let _ = List.iter checker !type_list in
|
||||||
|
( match !id_type with
|
||||||
|
| None -> TypeError ("inference get signal undound")
|
||||||
|
| Some(n) ->
|
||||||
|
let _ = (ref_list := (var::(List.hd !ref_list))::(List.tl !ref_list)) in
|
||||||
|
n )
|
||||||
|
)
|
||||||
|
| Parser.Item(Tokenizer.Token(const, typ)) -> Simp(typ)
|
||||||
|
| _ -> TypeError "other type error";;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
let type_infer parseoutput =
|
||||||
|
let result =match parseoutput with
|
||||||
|
| Parser.Success(Ls(lines), remained_tokens) ->
|
||||||
|
List.map (fun x -> line_infer x reference_list type_inference_table_list) lines
|
||||||
|
| _ -> [Void] in
|
||||||
|
|
||||||
|
let print_err typ = match typ with
|
||||||
|
| TypeError(msg) -> let _ = print_string ("TypeError:" ^ msg) in ()
|
||||||
|
| _ -> () in
|
||||||
|
List.map print_err result
|
||||||
|
|
||||||
|
(* let type_infer parseoutput =
|
||||||
|
match parseoutput with
|
||||||
|
| Parser.Success(Ls(lines), remained_tokens) ->
|
||||||
|
List.map (fun x -> print_string (type2string x ^ ";")) (List.map (fun x -> line_infer x ref_list type_inference_table_list) lines)
|
||||||
|
| _ -> List.map (fun x -> print_string (type2string x)) [Void]*)
|
||||||
|
|
||||||
|
|
BIN
ataabu/docs/planningDraft.pdf
Normal file
BIN
ataabu/docs/planningDraft.pdf
Normal file
Binary file not shown.
235
ataabu/docs/planningDraft.ty
Normal file
235
ataabu/docs/planningDraft.ty
Normal file
|
@ -0,0 +1,235 @@
|
||||||
|
|
||||||
|
|
||||||
|
#set text(
|
||||||
|
font : ("Noto Serif CJK TC"),
|
||||||
|
size : 12pt,
|
||||||
|
)
|
||||||
|
|
||||||
|
#show heading: set block(below: 1.5em)
|
||||||
|
|
||||||
|
#set heading(numbering: "1.")
|
||||||
|
|
||||||
|
#set align(center)
|
||||||
|
#block(
|
||||||
|
|
||||||
|
[#set text(
|
||||||
|
font : ("Noto Serif CJK TC"),
|
||||||
|
size : 20pt,
|
||||||
|
)
|
||||||
|
*Ataabu 語言的實作*
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
#block(above : 2em,
|
||||||
|
[#set text(
|
||||||
|
font : ("Noto Serif CJK TC"),
|
||||||
|
size : 14pt,
|
||||||
|
)
|
||||||
|
Tan, Kian-ting
|
||||||
|
|
||||||
|
#set text(
|
||||||
|
font : ("Noto Serif CJK TC"),
|
||||||
|
size : 10pt,
|
||||||
|
)])
|
||||||
|
|
||||||
|
#set align(left)
|
||||||
|
|
||||||
|
|
||||||
|
#outline(title:"目錄", indent: auto)
|
||||||
|
#pagebreak()
|
||||||
|
|
||||||
|
#set par(
|
||||||
|
first-line-indent: 2em,
|
||||||
|
justify: true,
|
||||||
|
leading: 1.1em,
|
||||||
|
)
|
||||||
|
|
||||||
|
= 句法解釋
|
||||||
|
|
||||||
|
以下針對這個語言的句法 (syntax),進行解釋。
|
||||||
|
== 語法草稿
|
||||||
|
|
||||||
|
+ 該語法範例碼如下,AST以後再行生成:
|
||||||
|
+ 一些注意事項:
|
||||||
|
+ 不支援可變變數
|
||||||
|
+ 不支援迴圈
|
||||||
|
|
||||||
|
#block(
|
||||||
|
```
|
||||||
|
int x = 10; # 常數定義法,這是註解
|
||||||
|
flo y = 10.1 + int2flo(x) / 3.0; # 浮點數
|
||||||
|
str name = "John"; # 字串,預設用utf-8
|
||||||
|
bool c = True; # 布林值,或是 False
|
||||||
|
int x = ({int x -> int : return x + 1;})(5); # gets 6
|
||||||
|
|
||||||
|
'''
|
||||||
|
這是多行註解
|
||||||
|
函數上面的多行註解會轉成 docString,先用 markdown 做子語言吧;
|
||||||
|
底下爲列表和陣列
|
||||||
|
'''
|
||||||
|
List(int) a_list = [1, 2, 3, 4, 5];
|
||||||
|
Array(int) a_array = array!([1, 2, 3, 4, 5]);
|
||||||
|
|
||||||
|
# 以下是 doc string 的範例,前面要加 @doc:
|
||||||
|
@doc '''
|
||||||
|
# Desc
|
||||||
|
find the sqrt sum of x and y
|
||||||
|
|
||||||
|
# Args
|
||||||
|
- x : lfs value
|
||||||
|
- y : rhs value
|
||||||
|
|
||||||
|
# Eg
|
||||||
|
sqrtsum(3, 4) == 25; # True
|
||||||
|
'''
|
||||||
|
fn sqrtSum = int x, int y -> int :
|
||||||
|
int z = x ** 2 + y ** 2;
|
||||||
|
return z;
|
||||||
|
|
||||||
|
fn isJohn = str s -> bool :
|
||||||
|
return case {
|
||||||
|
#print! 是巨集。!結尾是巨集名稱。 ++ 是字串相加
|
||||||
|
s == john -> {print!("Hi, " ++ s ++ "!\n");
|
||||||
|
True;}
|
||||||
|
else -> False;
|
||||||
|
};
|
||||||
|
|
||||||
|
#不返回值(void)的時候也要標註 return;
|
||||||
|
fn printCat = void -> void :
|
||||||
|
print!("cat!");
|
||||||
|
return ;
|
||||||
|
|
||||||
|
# 多型:
|
||||||
|
# @{} vars of Type with constraints
|
||||||
|
fn map = @(A, B : Any) # or @(A, B in Any)
|
||||||
|
(List A) origList; ( A -> B ) aFunction -> (List B) :
|
||||||
|
return match origList{
|
||||||
|
[] -> origList;
|
||||||
|
[x:xs] -> cons(aFunction(origList),
|
||||||
|
map(origList[1:], aFunction));
|
||||||
|
};
|
||||||
|
|
||||||
|
# 定義自定型別:
|
||||||
|
type Person = Person(str name, int age);
|
||||||
|
type Person = Person(str , int );
|
||||||
|
type TrafficLight = Red | Yellow | Green;
|
||||||
|
type List a = @(a : Any) Nil | Cons(a, List(a));
|
||||||
|
|
||||||
|
Person peter = Person{"Peter", 17};
|
||||||
|
|
||||||
|
print!(peter.name); # print "Peter"
|
||||||
|
|
||||||
|
debug!(peter); #印出 Person{name : "Peter", age : 17}這樣。
|
||||||
|
|
||||||
|
str myNameIsPeter = match peter
|
||||||
|
{
|
||||||
|
"Peter" _ -> "Is Peter ainm dom",
|
||||||
|
_ _ -> "Ní Peter ainm dom"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
str t = peter.name[5];
|
||||||
|
|
||||||
|
#分行字串
|
||||||
|
str multilineString = '''
|
||||||
|
jumps over the lazy dog.
|
||||||
|
'''
|
||||||
|
|
||||||
|
# 將peter的內容克隆一份,然後屬性name改成"John",傳予john
|
||||||
|
Person john = peter[name <- "John"];
|
||||||
|
```)
|
||||||
|
|
||||||
|
|
||||||
|
#block(```
|
||||||
|
@lang DSL # 匯入語言
|
||||||
|
import datetime; #匯入模組
|
||||||
|
importPath "/path/to/file.toy" #匯入路徑程式檔
|
||||||
|
|
||||||
|
|
||||||
|
@doc '''
|
||||||
|
這是給模組的 @doc,表示資訊
|
||||||
|
'''
|
||||||
|
|
||||||
|
```)
|
||||||
|
|
||||||
|
== 語法生成文法
|
||||||
|
|
||||||
|
以下用類似 BNF 文法來表示這個玩具語言:
|
||||||
|
|
||||||
|
- `a{n}` 表示a出現n次
|
||||||
|
- `a?` 表示a出現0,1次
|
||||||
|
- `a*` 表示a出現0次以上
|
||||||
|
- `a+` 表示a出現1次以上
|
||||||
|
- `#` 註解
|
||||||
|
- `\x` 脫逸字元 \x
|
||||||
|
- `ID` identifier
|
||||||
|
- `(not a [b…])` 不是 a (, b…) 的字元,方括號[]內表示非必要,可選。
|
||||||
|
- `ASCII_SPACE` : Ascii 的半形空白字元
|
||||||
|
- `CHAR`:任意字元
|
||||||
|
- `$`:指1個以上空白或縮排字元。
|
||||||
|
- `$*`:指0個以上空白或縮排字元。
|
||||||
|
|
||||||
|
`
|
||||||
|
ALL = LANG_HEADER? EMPTY_LINE* IMPORTS? EMPTY_LINE* (DOCSTRING)? BODY # 所有的內容
|
||||||
|
|
||||||
|
#匯入
|
||||||
|
IMPORTS = IMPORT {EMPTY_LINE* IMPORT}+
|
||||||
|
IMPORT = IMPORTLIB $ NL | IMPORT_PATH $ NL
|
||||||
|
IMPORTLIB = import $ ID # 匯入自函式庫
|
||||||
|
IMPORT_PATH = importPath $ ( $ " NON_EMPTY_STRING " $ ) #匯入自檔案路徑
|
||||||
|
|
||||||
|
#定義DSL
|
||||||
|
LANG_HEADER = # $ lang $ ID #使用語言的模式
|
||||||
|
|
||||||
|
# 定義DocumentString
|
||||||
|
DOCSTRING = @ $ doc $ DOCSTR_CONTENT
|
||||||
|
|
||||||
|
#定義DocumentString的細部。本段放在另一個解析器,x = anotherParser(DOCSTRCONTENT)
|
||||||
|
DOCSTR_CONTENT = ''' NL # $ DOCSTR_SECTION $ NL DOCSTR_SECTION_EXPLANATION'''
|
||||||
|
DOCSTR_SECTION = ID
|
||||||
|
|
||||||
|
# docstring的各項解釋,開頭不能用井號
|
||||||
|
DOCSTR_SECTION_EXPLANATION = {NON_HASH_CHAR NON_EMPTY_STRING NL}+
|
||||||
|
|
||||||
|
BODY = BODY_LINE*
|
||||||
|
BODY_LINE = INLINE_COMMENT | VAR_DEF | EXPR | TYPE_DEF | MACRO_DEF | EMPTY_LINE
|
||||||
|
INLINE_COMMENT = $* # NON_NL_CHAR+ NL
|
||||||
|
|
||||||
|
VAR_DEF = $* TYPE_ID $ VAR_ID $* = $* EXPR $* ;
|
||||||
|
|
||||||
|
# Type Definition, eg.
|
||||||
|
# type List a = @(a : Any) Nil | Cons(a, List(a));
|
||||||
|
TYPE_DEF = Type $ TYPEID $* = $* POLYMOR? $ SUM_TYPE ;
|
||||||
|
|
||||||
|
SUM_TYPE = PRODUCT_TYPE $* { | $* PRODUCT_TYPE $*}*
|
||||||
|
PRODUCT_TYPE = UNIRY_CONSTRUCT | RECORD | STRUCT;
|
||||||
|
UNIRY_CONSTRUCT = CONSTRUCT_ID;
|
||||||
|
RECORD = CONSTRUCT_ID $* ( $* CONSTRUCT_LIST $* )
|
||||||
|
CONSTRUCT_LIST = CONSTRUCT {$*, $* CONSTRUCT}*
|
||||||
|
CONSTRUCT = TYPE_VAR TYPE_AUG?
|
||||||
|
TYPE_AUG = $* ( $* TYPE_VAR TYPE_AUG_REMAINED* $* )
|
||||||
|
TYPE_AUG_REMAINED = $*, $* TYPE_VAR
|
||||||
|
|
||||||
|
# 定義 structure 和 attribution
|
||||||
|
STRUCT = ATTR_ID $* ( $* ATTR_LIST $* )
|
||||||
|
ATTR_LIST= ATTR {$*, $* ATTR}+
|
||||||
|
ATTR = CONSTRUCT ATTR_ID
|
||||||
|
ATTR_ID = ID
|
||||||
|
TYPE_ID = ID
|
||||||
|
|
||||||
|
# 空行
|
||||||
|
EMPTY_LINE = SPACE_TAB* NL
|
||||||
|
#換行
|
||||||
|
NL = \n | \r
|
||||||
|
|
||||||
|
#非半形井號字元
|
||||||
|
NON_HASH_CHAR = (not #)
|
||||||
|
|
||||||
|
#非換行字元
|
||||||
|
NON_NL_CHAR = (not \n \r)
|
||||||
|
NON_EMPTY_STRING = CHAR+ #非空串
|
||||||
|
EMPTY_STRING = CHAR{0} #空字串
|
||||||
|
STRING = NON_EMPTY_STRING | EMPTY_STRING
|
||||||
|
SPACE_TAB = ASCII_SPACE | \t #空白與縮排字元,簡稱為 $
|
||||||
|
ID = [a-zA-Z_][_a-zA-Z0-9]* # identifier, in regexp
|
||||||
|
```
|
14
tshunhue/AssemblyLanguage/example.s
Normal file
14
tshunhue/AssemblyLanguage/example.s
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
global main
|
||||||
|
|
||||||
|
section .text
|
||||||
|
main:
|
||||||
|
push rbp
|
||||||
|
mov rbp, rsp
|
||||||
|
mov rax,0x1
|
||||||
|
mov [rbp-8],rax
|
||||||
|
mov rax,0x5
|
||||||
|
mov [rbp-16],rax
|
||||||
|
add rax,[rbp-8]
|
||||||
|
mov rsp,rbp
|
||||||
|
pop rbp
|
||||||
|
ret
|
53
tshunhue/AssemblyLanguage/scheme/phase1.scm
Normal file
53
tshunhue/AssemblyLanguage/scheme/phase1.scm
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
;; 要用 nanopass 的表示型態嗎?
|
||||||
|
;;
|
||||||
|
;;
|
||||||
|
|
||||||
|
(define 64-register-ls '(rax rcx rdx rbx rsp rbp rsi rdi r8 r9 r10 r11 r12 r13 r14 r15))
|
||||||
|
(define 32-register-ls '(eax ecx ecx edx ebx ebp esi edi))
|
||||||
|
(define move-cmd-ls '(mov))
|
||||||
|
(define noary-cmd-ls '(cltd ret))
|
||||||
|
(define uni-arthimetic-cmd-ls '(mul div idiv))
|
||||||
|
(define bi-arthimetic-cmd-ls '(add sub mul imul div idiv))
|
||||||
|
|
||||||
|
(define (is-in-32-register-ls reg) (if (memq reg 32-register-ls) #t #f))
|
||||||
|
(define (is-in-64-register-ls reg) (if (memq reg 64-register-ls) #t #f))
|
||||||
|
(define (is-location loc) (if (or (is-in-32-register-ls loc) (is-in-64-register-ls loc)) #t #f))
|
||||||
|
(define (is-uniary-operator op) (if (memq op uni-arthimetic-cmd-ls) #t #f))
|
||||||
|
(define (is-binary-operator op) (if (memq op bi-arthimetic-cmd-ls) #t #f))
|
||||||
|
(define (is-noary-cmd op) (if (memq op noary-cmd-ls) #t #f))
|
||||||
|
|
||||||
|
(load "pmatch.scm")
|
||||||
|
|
||||||
|
(define (verify-phase1-line input)
|
||||||
|
(pmatch input
|
||||||
|
((mov ,des ,src)
|
||||||
|
(and (is-location des) (is-location src)))
|
||||||
|
((,bi-op ,des ,src)
|
||||||
|
(and (is-binary-operator bi-op) (is-location des) (is-location src)))
|
||||||
|
((,uni-op ,des)
|
||||||
|
(and (is-uniary-operator bi-op) (is-location des)))
|
||||||
|
((,op) (guard (is-noary-cmd op)) #t )
|
||||||
|
))
|
||||||
|
|
||||||
|
(verify-phase1-line '(mov eax ecx))
|
||||||
|
(verify-phase1-line '(cltd))
|
||||||
|
|
||||||
|
(define (verify-phase1 input)
|
||||||
|
(cond
|
||||||
|
(((eq? input '()) #t)
|
||||||
|
(#t (and (verify-phase-line (car input))
|
||||||
|
(verify-phase (cdr input)))))))
|
||||||
|
|
||||||
|
(define (print-assembly-code input)
|
||||||
|
(print-assembly-code-line (car input)
|
||||||
|
(print-assembly-code (cdr input))))
|
||||||
|
|
||||||
|
(define (print-assembly-codeline line)
|
||||||
|
(display (car line))
|
||||||
|
(cond
|
||||||
|
((= (length line) 2) (display " ")(display (cdar line)))
|
||||||
|
((= (length line) 3) (display " ")(display (cadr line))(display ",")(display (caddr line)))
|
||||||
|
(else '())))
|
||||||
|
|
||||||
|
(print-assembly-codeline
|
||||||
|
'(mov eax edx))
|
6
tshunhue/AssemblyLanguage/scheme/phase1.scm~
Normal file
6
tshunhue/AssemblyLanguage/scheme/phase1.scm~
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
(define 64-register-ls '(rax rcx rdx rbx rsp rbp rsi rdi))
|
||||||
|
(define 32-register-ls '(eax ecx ecx edx ebx ebp esi edi))
|
||||||
|
(define move-cmd-ls '(mov))
|
||||||
|
(define noary-cmd-ls '(cltd ret))
|
||||||
|
(define arthimetic-cmd-ls '(add sub mul imul div idiv))
|
||||||
|
|
70
tshunhue/AssemblyLanguage/scheme/pmatch.scm
Normal file
70
tshunhue/AssemblyLanguage/scheme/pmatch.scm
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
;; This is a new version of pmatch (August 8, 2012).
|
||||||
|
;; It has two important new features:
|
||||||
|
;; 1. It allows for a name to be given to the pmatch if an error ensues.
|
||||||
|
;; 2. A line from the specification has been removed. (see below). Without
|
||||||
|
;; that line removed, it was impossible for a pattern to be (quote ,x),
|
||||||
|
;; which might be worth having especially when we write an interpreter
|
||||||
|
;; for Scheme, which includes quote as a language form.
|
||||||
|
|
||||||
|
;;; Code written by Oleg Kiselyov
|
||||||
|
;; (http://pobox.com/~oleg/ftp/)
|
||||||
|
;;;
|
||||||
|
;;; Taken from leanTAP.scm
|
||||||
|
;;; http://kanren.cvs.sourceforge.net/kanren/kanren/mini/leanTAP.scm?view=log
|
||||||
|
|
||||||
|
; A simple linear pattern matcher
|
||||||
|
; It is efficient (generates code at macro-expansion time) and simple:
|
||||||
|
; it should work on any R5RS (and R6RS) Scheme system.
|
||||||
|
|
||||||
|
; (pmatch exp <clause> ...[<else-clause>])
|
||||||
|
; <clause> ::= (<pattern> <guard> exp ...)
|
||||||
|
; <else-clause> ::= (else exp ...)
|
||||||
|
; <guard> ::= boolean exp | ()
|
||||||
|
; <pattern> :: =
|
||||||
|
; ,var -- matches always and binds the var
|
||||||
|
; pattern must be linear! No check is done
|
||||||
|
; _ -- matches always
|
||||||
|
; 'exp -- comparison with exp (using equal?) REMOVED (August 8, 2012)
|
||||||
|
; exp -- comparison with exp (using equal?)
|
||||||
|
; (<pattern1> <pattern2> ...) -- matches the list of patterns
|
||||||
|
; (<pattern1> . <pattern2>) -- ditto
|
||||||
|
; () -- matches the empty list
|
||||||
|
|
||||||
|
(define-syntax pmatch
|
||||||
|
(syntax-rules (else guard)
|
||||||
|
((_ v (e ...) ...)
|
||||||
|
(pmatch-aux #f v (e ...) ...))
|
||||||
|
((_ v name (e ...) ...)
|
||||||
|
(pmatch-aux name v (e ...) ...))))
|
||||||
|
|
||||||
|
(define-syntax pmatch-aux
|
||||||
|
(syntax-rules (else guard)
|
||||||
|
((_ name (rator rand ...) cs ...)
|
||||||
|
(let ((v (rator rand ...)))
|
||||||
|
(pmatch-aux name v cs ...)))
|
||||||
|
((_ name v)
|
||||||
|
(begin
|
||||||
|
(if 'name
|
||||||
|
(printf "pmatch ~s failed\n~s\n" 'name v)
|
||||||
|
(printf "pmatch failed\n ~s\n" v))
|
||||||
|
(error 'pmatch "match failed")))
|
||||||
|
((_ name v (else e0 e ...)) (begin e0 e ...))
|
||||||
|
((_ name v (pat (guard g ...) e0 e ...) cs ...)
|
||||||
|
(let ((fk (lambda () (pmatch-aux name v cs ...))))
|
||||||
|
(ppat v pat (if (and g ...) (begin e0 e ...) (fk)) (fk))))
|
||||||
|
((_ name v (pat e0 e ...) cs ...)
|
||||||
|
(let ((fk (lambda () (pmatch-aux name v cs ...))))
|
||||||
|
(ppat v pat (begin e0 e ...) (fk))))))
|
||||||
|
|
||||||
|
(define-syntax ppat
|
||||||
|
(syntax-rules (? comma unquote)
|
||||||
|
((_ v ? kt kf) kt)
|
||||||
|
((_ v () kt kf) (if (null? v) kt kf))
|
||||||
|
; ((_ v (quote lit) kt kf) (if (equal? v (quote lit)) kt kf))
|
||||||
|
((_ v (unquote var) kt kf) (let ((var v)) kt))
|
||||||
|
((_ v (x . y) kt kf)
|
||||||
|
(if (pair? v)
|
||||||
|
(let ((vx (car v)) (vy (cdr v)))
|
||||||
|
(ppat vx x (ppat vy y kt kf) kf))
|
||||||
|
kf))
|
||||||
|
((_ v lit kt kf) (if (equal? v (quote lit)) kt kf))))
|
70
tshunhue/AssemblyLanguage/scheme/pmatch.scm~
Normal file
70
tshunhue/AssemblyLanguage/scheme/pmatch.scm~
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
;; This is a new version of pmatch (August 8, 2012).
|
||||||
|
;; It has two important new features:
|
||||||
|
;; 1. It allows for a name to be given to the pmatch if an error ensues.
|
||||||
|
;; 2. A line from the specification has been removed. (see below). Without
|
||||||
|
;; that line removed, it was impossible for a pattern to be (quote ,x),
|
||||||
|
;; which might be worth having especially when we write an interpreter
|
||||||
|
;; for Scheme, which includes quote as a language form.
|
||||||
|
|
||||||
|
;;; Code written by Oleg Kiselyov
|
||||||
|
;; (http://pobox.com/~oleg/ftp/)
|
||||||
|
;;;
|
||||||
|
;;; Taken from leanTAP.scm
|
||||||
|
;;; http://kanren.cvs.sourceforge.net/kanren/kanren/mini/leanTAP.scm?view=log
|
||||||
|
|
||||||
|
; A simple linear pattern matcher
|
||||||
|
; It is efficient (generates code at macro-expansion time) and simple:
|
||||||
|
; it should work on any R5RS (and R6RS) Scheme system.
|
||||||
|
|
||||||
|
; (pmatch exp <clause> ...[<else-clause>])
|
||||||
|
; <clause> ::= (<pattern> <guard> exp ...)
|
||||||
|
; <else-clause> ::= (else exp ...)
|
||||||
|
; <guard> ::= boolean exp | ()
|
||||||
|
; <pattern> :: =
|
||||||
|
; ,var -- matches always and binds the var
|
||||||
|
; pattern must be linear! No check is done
|
||||||
|
; _ -- matches always
|
||||||
|
; 'exp -- comparison with exp (using equal?) REMOVED (August 8, 2012)
|
||||||
|
; exp -- comparison with exp (using equal?)
|
||||||
|
; (<pattern1> <pattern2> ...) -- matches the list of patterns
|
||||||
|
; (<pattern1> . <pattern2>) -- ditto
|
||||||
|
; () -- matches the empty list
|
||||||
|
|
||||||
|
(define-syntax pmatch
|
||||||
|
(syntax-rules (else guard)
|
||||||
|
((_ v (e ...) ...)
|
||||||
|
(pmatch-aux #f v (e ...) ...))
|
||||||
|
((_ v name (e ...) ...)
|
||||||
|
(pmatch-aux name v (e ...) ...))))
|
||||||
|
|
||||||
|
(define-syntax pmatch-aux
|
||||||
|
(syntax-rules (else guard)
|
||||||
|
((_ name (rator rand ...) cs ...)
|
||||||
|
(let ((v (rator rand ...)))
|
||||||
|
(pmatch-aux name v cs ...)))
|
||||||
|
((_ name v)
|
||||||
|
(begin
|
||||||
|
(if 'name
|
||||||
|
(printf "pmatch ~s failed\n~s\n" 'name v)
|
||||||
|
(printf "pmatch failed\n ~s\n" v))
|
||||||
|
(error 'pmatch "match failed")))
|
||||||
|
((_ name v (else e0 e ...)) (begin e0 e ...))
|
||||||
|
((_ name v (pat (guard g ...) e0 e ...) cs ...)
|
||||||
|
(let ((fk (lambda () (pmatch-aux name v cs ...))))
|
||||||
|
(ppat v pat (if (and g ...) (begin e0 e ...) (fk)) (fk))))
|
||||||
|
((_ name v (pat e0 e ...) cs ...)
|
||||||
|
(let ((fk (lambda () (pmatch-aux name v cs ...))))
|
||||||
|
(ppat v pat (begin e0 e ...) (fk))))))
|
||||||
|
|
||||||
|
(define-syntax ppat
|
||||||
|
(syntax-rules (? comma unquote)
|
||||||
|
((_ v ? kt kf) kt)
|
||||||
|
((_ v () kt kf) (if (null? v) kt kf))
|
||||||
|
; ((_ v (quote lit) kt kf) (if (equal? v (quote lit)) kt kf))
|
||||||
|
((_ v (unquote var) kt kf) (let ((var v)) kt))
|
||||||
|
((_ v (x . y) kt kf)
|
||||||
|
(if (pair? v)
|
||||||
|
(let ((vx (car v)) (vy (cdr v)))
|
||||||
|
(ppat vx x (ppat vy y kt kf) kf))
|
||||||
|
kf))
|
||||||
|
((_ v lit kt kf) (if (equal? v (quote lit)) kt kf))))
|
29
tshunhue/BigNumOperation/.vscode/launch.json
vendored
Normal file
29
tshunhue/BigNumOperation/.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
{
|
||||||
|
// 使用 IntelliSense 以得知可用的屬性。
|
||||||
|
// 暫留以檢視現有屬性的描述。
|
||||||
|
// 如需詳細資訊,請瀏覽: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "gcc build and debug active file",
|
||||||
|
"type": "cppdbg",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${fileDirname}/${fileBasenameNoExtension}",
|
||||||
|
"args": [],
|
||||||
|
"stopAtEntry": false,
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"environment": [],
|
||||||
|
"externalConsole": false,
|
||||||
|
"MIMode": "gdb",
|
||||||
|
"setupCommands": [
|
||||||
|
{
|
||||||
|
"description": "Enable pretty-printing for gdb",
|
||||||
|
"text": "-enable-pretty-printing",
|
||||||
|
"ignoreFailures": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"preLaunchTask": "gcc build active file",
|
||||||
|
"miDebuggerPath": "/usr/bin/gdb"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
25
tshunhue/BigNumOperation/.vscode/tasks.json
vendored
Normal file
25
tshunhue/BigNumOperation/.vscode/tasks.json
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
// 如需 tasks.json 格式的文件,
|
||||||
|
// 請參閱 https://go.microsoft.com/fwlink/?LinkId=733558
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"label": "gcc build active file",
|
||||||
|
"command": "/usr/bin/gcc",
|
||||||
|
"args": [
|
||||||
|
"-g",
|
||||||
|
"${file}",
|
||||||
|
"-o",
|
||||||
|
"${fileDirname}/${fileBasenameNoExtension}"
|
||||||
|
],
|
||||||
|
"options": {
|
||||||
|
"cwd": "/usr/bin"
|
||||||
|
},
|
||||||
|
"problemMatcher": [
|
||||||
|
"$gcc"
|
||||||
|
],
|
||||||
|
"group": "build"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
24
tshunhue/BigNumOperation/general_type_definition.c
Normal file
24
tshunhue/BigNumOperation/general_type_definition.c
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
#include <stdbool.h>
|
||||||
|
|
||||||
|
typedef enum {INT, DOUBLE, CHAR} simple_type_id;
|
||||||
|
|
||||||
|
char* simple_type_list[] = {"INT", "DOUBLE", "CHAR"};
|
||||||
|
|
||||||
|
union simple_type{
|
||||||
|
int integer;
|
||||||
|
double double_num;
|
||||||
|
char ch;
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef struct variable_type{
|
||||||
|
simple_type_id id;
|
||||||
|
union simple_type simple_type;
|
||||||
|
} VarType;
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
VarType a;
|
||||||
|
a.id = 1;
|
||||||
|
a.simple_type.double_num = 0.789;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
868
tshunhue/BigNumOperation/huge_num.c
Normal file
868
tshunhue/BigNumOperation/huge_num.c
Normal file
|
@ -0,0 +1,868 @@
|
||||||
|
/*
|
||||||
|
huge_num.c - a simpified version of libtommath,
|
||||||
|
which is a public domain library or arbitary-precision number
|
||||||
|
arthimetic library.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "huge_num.h"
|
||||||
|
|
||||||
|
#define _2_32_ 4294967296
|
||||||
|
#define OVER_BOUND 999
|
||||||
|
|
||||||
|
typedef struct BigNum {
|
||||||
|
bool isNotNeg;
|
||||||
|
size_t used;
|
||||||
|
uint32_t* digit;
|
||||||
|
} BigNum;
|
||||||
|
|
||||||
|
typedef enum BigNumErrorCode {
|
||||||
|
OverBorder,
|
||||||
|
UnableToInitialized,
|
||||||
|
DivZero,
|
||||||
|
IllegalBase} BigNumErrorCode;
|
||||||
|
|
||||||
|
|
||||||
|
// init bigNum with digit_num (length of the array of num->digit). input "BigNum**"
|
||||||
|
int bigNumInit (BigNum** num, size_t digit_num){
|
||||||
|
bool unable_to_initalized = false;
|
||||||
|
*num = calloc(sizeof(BigNum), 1);
|
||||||
|
if (!*num){
|
||||||
|
unable_to_initalized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(*num)->digit = calloc(sizeof(uint32_t), digit_num);
|
||||||
|
(*num)->used = digit_num - 1;
|
||||||
|
|
||||||
|
if (!((*num)->digit)){
|
||||||
|
unable_to_initalized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (unable_to_initalized == true){
|
||||||
|
return UnableToInitialized;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void bigNumFree (BigNum* num){
|
||||||
|
free(num->digit);
|
||||||
|
free(num);
|
||||||
|
}
|
||||||
|
|
||||||
|
void bigNumCopy (BigNum* orig, BigNum * dest){
|
||||||
|
dest->used = orig->used;
|
||||||
|
dest->isNotNeg = orig->isNotNeg;
|
||||||
|
|
||||||
|
for (size_t i=0;i<=orig->used;i++){
|
||||||
|
dest->digit[i] = orig->digit[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int int32ToBigNum(BigNum* bigNum, int32_t i32Num){
|
||||||
|
if (i32Num>=0){
|
||||||
|
bigNum->isNotNeg = true;
|
||||||
|
bigNum->digit[0] = i32Num;
|
||||||
|
bigNum->used = 0;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
bigNum->isNotNeg = false;
|
||||||
|
bigNum->digit[0] = -i32Num;
|
||||||
|
bigNum->used = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool bigNumAbsLarger(BigNum* lhs, BigNum* rhs){
|
||||||
|
if(lhs->used > rhs->used){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if(lhs->used < rhs->used){
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
for(size_t i=lhs->used;i != -1 ; i--){
|
||||||
|
if (lhs->digit[i]>rhs->digit[i]){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if(lhs->digit[i]<rhs->digit[i]){
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool bigNumAbsEqual(BigNum* lhs, BigNum* rhs){
|
||||||
|
if(lhs->used > rhs->used){
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else if(lhs->used < rhs->used){
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
for(size_t i=lhs->used;i != -1;i--){
|
||||||
|
if (lhs->digit[i]!=rhs->digit[i]){
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool bigNumEqual(BigNum* lhs, BigNum* rhs){
|
||||||
|
if(lhs->isNotNeg == rhs->isNotNeg){
|
||||||
|
return bigNumAbsEqual(lhs, rhs);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
return false;
|
||||||
|
}}
|
||||||
|
|
||||||
|
bool bigNumLarger(BigNum* lhs, BigNum* rhs){
|
||||||
|
if(lhs->isNotNeg == true & rhs->isNotNeg == false){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if(lhs->isNotNeg == false & rhs->isNotNeg == true){
|
||||||
|
return false;
|
||||||
|
}else if (lhs->isNotNeg == true & rhs->isNotNeg == true){
|
||||||
|
return bigNumAbsLarger(lhs,rhs);
|
||||||
|
}else{
|
||||||
|
return bigNumAbsLarger(rhs,lhs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool bigNumLess(BigNum* lhs, BigNum* rhs){
|
||||||
|
return (!(bigNumEqual(lhs, rhs) || (bigNumAbsLarger(lhs, rhs))));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool bigNumAbsLess(BigNum* lhs, BigNum* rhs){
|
||||||
|
return !(bigNumAbsEqual(lhs, rhs) || bigNumAbsLarger(lhs, rhs));
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumAbsAdd(BigNum* bigger, BigNum* smaller, BigNum* result){
|
||||||
|
size_t n = bigger->used;
|
||||||
|
uint64_t carry = 0;
|
||||||
|
|
||||||
|
for (size_t i=0; i<=n;i++){
|
||||||
|
uint64_t b_i = bigger->digit[i];
|
||||||
|
uint64_t s_i;
|
||||||
|
|
||||||
|
if (i>smaller->used){
|
||||||
|
s_i = 0;
|
||||||
|
}else{
|
||||||
|
s_i = smaller->digit[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t res = (b_i + s_i + carry);
|
||||||
|
|
||||||
|
if (res >= _2_32_){
|
||||||
|
result->digit[i] = (uint32_t)(res - _2_32_);
|
||||||
|
carry = 1;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
result->digit[i] = (uint32_t)res;
|
||||||
|
carry = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if (carry == 1){
|
||||||
|
result->digit[n+1] = carry;
|
||||||
|
result->used = n+1;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
result->used = n;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumAbsSubtract(BigNum* bigger, BigNum* smaller, BigNum* result){
|
||||||
|
size_t n = bigger->used;
|
||||||
|
int64_t carry = 0;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
for (size_t i=0; i<=n;i++){
|
||||||
|
int64_t b_i = bigger->digit[i];
|
||||||
|
int64_t s_i;
|
||||||
|
|
||||||
|
if(i>smaller->used){
|
||||||
|
s_i = 0;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
s_i = smaller->digit[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
int64_t res_i;
|
||||||
|
if (b_i < s_i){
|
||||||
|
res_i = (b_i - s_i + _2_32_) + carry;
|
||||||
|
result->digit[i] = (uint32_t) res_i;
|
||||||
|
carry = -1;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
res_i = b_i + carry - s_i;
|
||||||
|
if (res_i < 0){
|
||||||
|
res_i = res_i + _2_32_;
|
||||||
|
result->digit[i] = (uint32_t) res_i;
|
||||||
|
carry = -1;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
result->digit[i] = res_i;
|
||||||
|
carry = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// update the res->used.
|
||||||
|
bigNumClearZero(result, n);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear the initial zero
|
||||||
|
void bigNumClearZero(BigNum* n, size_t initValue){
|
||||||
|
for (size_t i=initValue;i != -1;i--){
|
||||||
|
if(n->digit[i] != 0){
|
||||||
|
n->used = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (n->digit[i] == 0 & i == 0){
|
||||||
|
n->used = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumSubtract(BigNum* lhs, BigNum* rhs, BigNum* result){
|
||||||
|
if (lhs->isNotNeg == true & rhs->isNotNeg == false){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
if (bigNumAbsLarger(lhs, rhs)){
|
||||||
|
bigNumAbsAdd(lhs,rhs,result);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
bigNumAbsAdd(rhs,lhs,result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// -3 - 5 or -5 - 3
|
||||||
|
else if(lhs->isNotNeg == false & rhs->isNotNeg == true){
|
||||||
|
result->isNotNeg = false;
|
||||||
|
if (bigNumAbsLarger(lhs, rhs)){
|
||||||
|
bigNumAbsAdd(lhs,rhs,result);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
bigNumAbsAdd(rhs,lhs,result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if(lhs->isNotNeg == true & rhs->isNotNeg == true){
|
||||||
|
// 5 - 3
|
||||||
|
if(bigNumAbsLarger(lhs, rhs)){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
bigNumAbsSubtract(lhs, rhs, result);
|
||||||
|
}
|
||||||
|
// 5 - 8
|
||||||
|
else{
|
||||||
|
result->isNotNeg = false;
|
||||||
|
bigNumAbsSubtract(rhs, lhs, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
// (-5) - (-5) or (-5) - (-6)
|
||||||
|
if(bigNumAbsLarger(rhs, lhs) || bigNumAbsEqual(rhs,lhs)){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
bigNumAbsSubtract(rhs, lhs, result);
|
||||||
|
}
|
||||||
|
// (-5) - (-3)
|
||||||
|
else{
|
||||||
|
result->isNotNeg = false;
|
||||||
|
bigNumAbsSubtract(lhs, rhs, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumMultiply(BigNum*lhs, BigNum* rhs, BigNum* result){
|
||||||
|
|
||||||
|
|
||||||
|
if (lhs->isNotNeg == rhs->isNotNeg){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
}else{
|
||||||
|
result->isNotNeg = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t n = (lhs->used + 1) + (rhs->used + 1);
|
||||||
|
uint32_t* temp_result = calloc(sizeof(uint32_t), n); // actually length; n_used = n - 1
|
||||||
|
|
||||||
|
for (size_t i=0;i<n;i++){
|
||||||
|
temp_result[i] = ((uint32_t)0);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
for(size_t i=0; i<=rhs->used; i++){
|
||||||
|
uint64_t c = 0;
|
||||||
|
|
||||||
|
for (size_t j=0; j<=lhs->used; j++){
|
||||||
|
uint64_t res_i_j = (uint64_t)temp_result[i+j];
|
||||||
|
uint64_t l_j_r_i = (uint64_t)(lhs->digit[j]) * (uint64_t)(rhs->digit[i]);
|
||||||
|
uint64_t current_res = res_i_j + l_j_r_i + c;
|
||||||
|
|
||||||
|
uint64_t res_base = current_res % _2_32_;
|
||||||
|
uint64_t res_carry = current_res / _2_32_;
|
||||||
|
|
||||||
|
temp_result[i+j] = (uint32_t) res_base;
|
||||||
|
c = res_carry;
|
||||||
|
}
|
||||||
|
|
||||||
|
temp_result[i+(lhs->used)+1] = (uint32_t)c;
|
||||||
|
}
|
||||||
|
|
||||||
|
for(size_t i=0;i<n;i++){
|
||||||
|
result->digit[i] = temp_result[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
// correct the result->used
|
||||||
|
size_t n_used = n - 1;
|
||||||
|
bigNumClearZero(result, n_used);
|
||||||
|
|
||||||
|
free(temp_result);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumDivideOneDigit(BigNum* lhs, BigNum* rhs , BigNum* quotient, BigNum* remainder){
|
||||||
|
uint32_t rhs_number = rhs->digit[0];
|
||||||
|
|
||||||
|
for (size_t i=0;i<=lhs->used;i++){
|
||||||
|
quotient->digit[i]=0;
|
||||||
|
}
|
||||||
|
|
||||||
|
quotient->used = lhs->used;
|
||||||
|
|
||||||
|
uint64_t temp = 0;
|
||||||
|
|
||||||
|
for(size_t i=lhs->used; i != -1; i--){
|
||||||
|
temp = temp * _2_32_ + lhs->digit[i];
|
||||||
|
quotient->digit[i] = (uint32_t) (temp / (uint64_t)rhs_number);
|
||||||
|
temp = temp % (uint64_t) rhs_number;
|
||||||
|
}
|
||||||
|
|
||||||
|
remainder->used = 0;
|
||||||
|
remainder->digit[0] = (uint32_t)temp;
|
||||||
|
|
||||||
|
bigNumClearZero(quotient, quotient->used);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumShiftLeft(BigNum* big_num, size_t n){
|
||||||
|
if(n == 0){
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
big_num->used += n;
|
||||||
|
for(size_t i=big_num->used; i != n-1;i--){
|
||||||
|
big_num->digit[i] = big_num->digit[i-n];
|
||||||
|
}
|
||||||
|
|
||||||
|
for(size_t i=0;i<n;i++){
|
||||||
|
big_num->digit[i] = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if n > big_num->used, set n = big_num->used
|
||||||
|
int bigNumShiftRight(BigNum* big_num, size_t n){
|
||||||
|
if (n>big_num->used){
|
||||||
|
n = big_num->used;
|
||||||
|
}
|
||||||
|
|
||||||
|
big_num->used -= n;
|
||||||
|
|
||||||
|
for(size_t i=0; i<=big_num->used;i++){
|
||||||
|
big_num->digit[i] = big_num->digit[i+n];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumDivide(BigNum* lhs, BigNum* rhs, BigNum* quotient, BigNum* remainder){
|
||||||
|
|
||||||
|
if (bigNumAbsLarger(rhs, lhs)){
|
||||||
|
quotient->used = 0;
|
||||||
|
quotient->digit[0] = 0;
|
||||||
|
|
||||||
|
remainder->used = lhs->used;
|
||||||
|
for (size_t i=0; i<= lhs->used;i++){
|
||||||
|
remainder->digit[i] = lhs->digit[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
bool lhsIsNotNeg = lhs->isNotNeg;
|
||||||
|
bool rhsIsNotNeg = rhs->isNotNeg;
|
||||||
|
bool quotientIsNotNeg;
|
||||||
|
bool remainderIsNotNeg;
|
||||||
|
|
||||||
|
BigNum* bigNumZero;
|
||||||
|
bigNumInit(&bigNumZero,1);
|
||||||
|
int32ToBigNum(bigNumZero, 0);
|
||||||
|
|
||||||
|
if(bigNumAbsEqual(rhs, bigNumZero)){
|
||||||
|
bigNumFree(bigNumZero);
|
||||||
|
|
||||||
|
return DivZero;
|
||||||
|
}else{
|
||||||
|
bigNumFree(bigNumZero);
|
||||||
|
remainderIsNotNeg = lhs->isNotNeg;
|
||||||
|
|
||||||
|
if(lhs->isNotNeg == rhs->isNotNeg){
|
||||||
|
quotientIsNotNeg = true;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
quotientIsNotNeg = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set the isNotNeg to true temporily
|
||||||
|
lhs->isNotNeg = true;
|
||||||
|
rhs->isNotNeg = true;
|
||||||
|
quotient->isNotNeg = true;
|
||||||
|
remainder->isNotNeg = true;
|
||||||
|
|
||||||
|
if (rhs->used == 0){
|
||||||
|
bigNumDivideOneDigit(lhs,rhs,quotient,remainder);
|
||||||
|
}else{
|
||||||
|
bigNumDivideOther(lhs,rhs,quotient,remainder);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// recover to their isNotNeg
|
||||||
|
lhs->isNotNeg = lhsIsNotNeg;
|
||||||
|
rhs->isNotNeg = rhsIsNotNeg;
|
||||||
|
quotient->isNotNeg = quotientIsNotNeg;
|
||||||
|
remainder->isNotNeg = remainderIsNotNeg;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* big num division with larger digits(num->used >= 1). The Algorithm is from HoAC.
|
||||||
|
https://github.com/libtom/libtommath/blob/develop/s_mp_div_school.c
|
||||||
|
*/
|
||||||
|
int bigNumDivideOther(BigNum* lhs, BigNum* rhs, BigNum* quotient, BigNum* remainder){
|
||||||
|
// normalization such that y_t * normalcoeff >= 2 ** 32 / 2
|
||||||
|
uint32_t normalcoeff = (uint32_t)((_2_32_ / 2 + 1) / (uint64_t)rhs->digit[rhs->used]);
|
||||||
|
|
||||||
|
BigNum* bigNumNormalCoeff;
|
||||||
|
bigNumInit(&bigNumNormalCoeff, 1);
|
||||||
|
|
||||||
|
uint32ToBigNum(bigNumNormalCoeff, normalcoeff);
|
||||||
|
|
||||||
|
BigNum* temp;
|
||||||
|
bigNumInit(&temp, lhs->used+2);
|
||||||
|
// magnify temporily
|
||||||
|
bigNumMultiply(lhs, bigNumNormalCoeff, temp);
|
||||||
|
bigNumCopy(temp, lhs);
|
||||||
|
bigNumMultiply(rhs, bigNumNormalCoeff, temp);
|
||||||
|
bigNumCopy(temp, rhs);
|
||||||
|
|
||||||
|
|
||||||
|
size_t t = rhs->used; // rhs->used
|
||||||
|
|
||||||
|
size_t diff_l_and_r = lhs->used-t;
|
||||||
|
quotient->used = diff_l_and_r;
|
||||||
|
|
||||||
|
for(size_t j=0;j<=diff_l_and_r;j++){
|
||||||
|
quotient->digit[j] = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// the algorithm is from HoAC Ch14.
|
||||||
|
/* 2. While (x ≥ yb n−t ) do the following: q n−t ←q n−t + 1, x←x − yb n−t . */
|
||||||
|
bigNumCopy(lhs, remainder);
|
||||||
|
|
||||||
|
|
||||||
|
bigNumShiftLeft(rhs,diff_l_and_r);
|
||||||
|
while(!bigNumLess(remainder,rhs)){
|
||||||
|
quotient->digit[diff_l_and_r]++;
|
||||||
|
bigNumAbsSubtract(remainder,rhs,remainder);
|
||||||
|
}
|
||||||
|
|
||||||
|
bigNumShiftRight(rhs,diff_l_and_r); // recover rhs to the right value;
|
||||||
|
|
||||||
|
for(size_t i=remainder->used;i >= t + 1;i--){
|
||||||
|
if (remainder->digit[i]==rhs->digit[t]){
|
||||||
|
quotient->digit[i-t-1] = _2_32_ - 1;
|
||||||
|
}else{
|
||||||
|
uint64_t x_i = (uint64_t)(remainder->digit[i]);
|
||||||
|
uint64_t x_i_1 = (uint64_t)(remainder->digit[i-1]);
|
||||||
|
quotient->digit[i-t-1] = (uint32_t)((x_i * _2_32_ + x_i_1)/((uint64_t)(rhs->digit[t])));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* While (q i−t−1 (y t b + y t−1 ) > x i b 2 + x i−1 b + x i−2 )
|
||||||
|
do: q i−t−1 ←q i−t−1 − 1. */
|
||||||
|
BigNum* temp_lhs;
|
||||||
|
BigNum* temp_rhs;
|
||||||
|
BigNum* temp2;
|
||||||
|
|
||||||
|
bigNumInit(&temp_lhs,3);
|
||||||
|
bigNumInit(&temp_rhs,3);
|
||||||
|
bigNumInit(&temp2,3);
|
||||||
|
|
||||||
|
quotient->digit[i-t-1] += 1;
|
||||||
|
|
||||||
|
do{
|
||||||
|
quotient->digit[i-t-1] -= 1;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
temp_lhs->isNotNeg = true;
|
||||||
|
temp_lhs->digit[1] = ((t - 1) < 0) ? 0u : rhs->digit[t];
|
||||||
|
temp_lhs->digit[0] = rhs->digit[t-1];
|
||||||
|
temp_lhs->used = 1;
|
||||||
|
|
||||||
|
uint32ToBigNum(temp, quotient->digit[i-t-1]);
|
||||||
|
|
||||||
|
|
||||||
|
bigNumMultiply(temp_lhs, temp, temp2);
|
||||||
|
bigNumCopy(temp2, temp_lhs);
|
||||||
|
|
||||||
|
temp_rhs->isNotNeg = true;
|
||||||
|
temp_rhs->used = 2;
|
||||||
|
temp_rhs->digit[2] = remainder->digit[i];
|
||||||
|
temp_rhs->digit[1] = ((i - 1) < 0) ? 0u : remainder->digit[i-1];
|
||||||
|
temp_rhs->digit[0] = ((i - 2) < 0) ? 0u : remainder->digit[i-2];
|
||||||
|
|
||||||
|
}while(bigNumLarger(temp_lhs,temp_rhs));
|
||||||
|
|
||||||
|
bigNumFree(temp_lhs);
|
||||||
|
bigNumFree(temp_rhs);
|
||||||
|
|
||||||
|
/* x←x − q i−t−1 yb i−t−1 . */
|
||||||
|
BigNum* rhs_clone;
|
||||||
|
bigNumInit(&rhs_clone, (rhs->used)+1);
|
||||||
|
bigNumCopy(rhs,rhs_clone);
|
||||||
|
bigNumShiftLeft(rhs_clone,i-t-1);
|
||||||
|
|
||||||
|
uint32ToBigNum(temp, quotient->digit[i-t-1]);
|
||||||
|
bigNumMultiply(rhs_clone,temp,temp2);
|
||||||
|
bigNumCopy(temp2, rhs_clone);
|
||||||
|
|
||||||
|
bigNumSubtract(remainder,rhs_clone,remainder);
|
||||||
|
|
||||||
|
bigNumFree(rhs_clone);
|
||||||
|
bigNumFree(temp2);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if(remainder->isNotNeg == false){
|
||||||
|
BigNum* rhs_clone2;
|
||||||
|
bigNumInit(&rhs_clone2, (rhs->used)+1);
|
||||||
|
bigNumCopy(rhs,rhs_clone2);
|
||||||
|
bigNumShiftLeft(rhs_clone2,i-t-1);
|
||||||
|
|
||||||
|
bigNumAdd(remainder,rhs_clone2,remainder);
|
||||||
|
quotient->digit[i-t-1] = quotient->digit[i-t-1] - 1;
|
||||||
|
|
||||||
|
bigNumFree(rhs_clone2);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
BigNum* temp3;
|
||||||
|
bigNumInit(&temp3, lhs->used+1);
|
||||||
|
|
||||||
|
bigNumDivide(remainder, bigNumNormalCoeff, temp3, temp);
|
||||||
|
bigNumCopy(temp3, remainder);
|
||||||
|
bigNumDivide(lhs, bigNumNormalCoeff, temp3, temp);
|
||||||
|
bigNumCopy(temp3, lhs);
|
||||||
|
bigNumDivide(rhs, bigNumNormalCoeff, temp3, temp);
|
||||||
|
bigNumCopy(temp3, rhs);
|
||||||
|
bigNumFree(temp3);
|
||||||
|
|
||||||
|
|
||||||
|
bigNumClearZero(remainder, remainder->used);
|
||||||
|
bigNumClearZero(quotient, quotient->used);
|
||||||
|
|
||||||
|
bigNumFree(temp);
|
||||||
|
|
||||||
|
bigNumFree(bigNumNormalCoeff);
|
||||||
|
}
|
||||||
|
|
||||||
|
int bigNumAdd(BigNum* lhs, BigNum* rhs, BigNum* result){
|
||||||
|
if (lhs->isNotNeg == true & rhs->isNotNeg == true){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
if (bigNumAbsLarger(lhs, rhs)){
|
||||||
|
bigNumAbsAdd(lhs,rhs,result);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
bigNumAbsAdd(rhs,lhs,result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (lhs->isNotNeg == false & rhs->isNotNeg == false){
|
||||||
|
result->isNotNeg = false;
|
||||||
|
if(bigNumAbsLarger(lhs, rhs)){
|
||||||
|
bigNumAbsAdd(lhs, rhs, result);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
bigNumAbsAdd(rhs, lhs, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (lhs->isNotNeg == true & rhs->isNotNeg == false){
|
||||||
|
// 5 + (-3)
|
||||||
|
if(bigNumAbsLarger(lhs, rhs)){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
bigNumAbsSubtract(lhs, rhs, result);
|
||||||
|
}
|
||||||
|
// 5 + (-8)
|
||||||
|
else{
|
||||||
|
result->isNotNeg = false;
|
||||||
|
bigNumAbsSubtract(rhs, lhs, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
// (-5) + (5) or (-5) + (6)
|
||||||
|
if(bigNumAbsLarger(rhs, lhs) || bigNumAbsEqual(rhs,lhs)){
|
||||||
|
result->isNotNeg = true;
|
||||||
|
bigNumAbsSubtract(rhs, lhs, result);
|
||||||
|
}
|
||||||
|
// (-5) + (3)
|
||||||
|
else{
|
||||||
|
result->isNotNeg = false;
|
||||||
|
bigNumAbsSubtract(lhs, rhs, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void strrev(char* str){
|
||||||
|
if( str == 0 || *str == 0){
|
||||||
|
return ;
|
||||||
|
}
|
||||||
|
|
||||||
|
char x;
|
||||||
|
int i = 0;
|
||||||
|
int j = strlen(str) - 1;
|
||||||
|
|
||||||
|
while(i < j){
|
||||||
|
x = str[i];
|
||||||
|
str[i] = str[j];
|
||||||
|
str[j] = x;
|
||||||
|
i++;
|
||||||
|
j--;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// str to num (result num, should be inited first) in base (2 ~ 16).
|
||||||
|
int strToBigNum(BigNum* num, char* str, uint32_t base){
|
||||||
|
if (base != 2 & base != 8 & base != 10 & base != 16 ){
|
||||||
|
return IllegalBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t digitBorder = 0; // the border of the digit (0~9)
|
||||||
|
|
||||||
|
if(str[0] == '-'){
|
||||||
|
digitBorder = 1; // the border of the digit (0~9);
|
||||||
|
}
|
||||||
|
|
||||||
|
// set result num to be 0
|
||||||
|
num->used = 0;
|
||||||
|
num->digit[0] = 0;
|
||||||
|
|
||||||
|
|
||||||
|
for (size_t i= digitBorder; i<strlen(str); i++){
|
||||||
|
int32_t digit = hexCharToStr(str[i]);
|
||||||
|
|
||||||
|
if(isOverBorder(digit, base)){
|
||||||
|
return OverBorder;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
|
||||||
|
strToBigNumAux(num,digit,base);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isNeg = (str[0] == '-') & (num->digit>0 || num->used>0);
|
||||||
|
|
||||||
|
if (isNeg){
|
||||||
|
num->isNotNeg = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isOverBorder(int32_t digit, uint32_t base){
|
||||||
|
if (digit < 0 || digit >= base){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
bool is16base(char c){
|
||||||
|
if (c >= '0' & c <= '9'){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if (c >= 'a' & c <= 'f'){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if (c >= 'A' & c <= 'F'){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void strToBigNumAux(BigNum* num, int32_t digit, uint32_t base){
|
||||||
|
BigNum* numTimesBase;
|
||||||
|
BigNum* bigNumBase;
|
||||||
|
BigNum* bigNumDigit;
|
||||||
|
BigNum* tempResult;
|
||||||
|
|
||||||
|
bigNumInit(&numTimesBase, (num->used)+2);
|
||||||
|
bigNumInit(&bigNumBase, 1);
|
||||||
|
bigNumInit(&bigNumDigit, 1);
|
||||||
|
bigNumInit(&tempResult, (num->used)+2);
|
||||||
|
|
||||||
|
int32ToBigNum(bigNumBase,(int32_t)base);
|
||||||
|
int32ToBigNum(bigNumDigit, digit);
|
||||||
|
bigNumMultiply(num, bigNumBase, numTimesBase);
|
||||||
|
bigNumAdd(numTimesBase, bigNumDigit, tempResult);
|
||||||
|
|
||||||
|
bigNumCopy(tempResult, num);
|
||||||
|
|
||||||
|
bigNumFree(numTimesBase);
|
||||||
|
bigNumFree(bigNumBase);
|
||||||
|
bigNumFree(bigNumDigit);
|
||||||
|
bigNumFree(tempResult);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert 0~9, a~f, and A~F to number
|
||||||
|
int32_t hexCharToStr(char c){
|
||||||
|
if (c >= '0' & c <= '9'){
|
||||||
|
return c - '0';
|
||||||
|
}
|
||||||
|
else if (c >= 'a' & c <= 'f'){
|
||||||
|
return c - 'a' + 10;
|
||||||
|
}
|
||||||
|
// A~F
|
||||||
|
else if (c >='A' & c <= 'F'){
|
||||||
|
return c - 'A' + 10;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
return OVER_BOUND; // OverBound
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// BigNum to str (should be calloc-ed first) in base (2 / 8 / 10 / 16)
|
||||||
|
int bigNumToStr(BigNum* num, char* str, uint32_t base){
|
||||||
|
// clear the string
|
||||||
|
if (strlen(str) !=0){
|
||||||
|
memset(str,0,sizeof(char)*strlen(str));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (base != 2 & base != 8 & base != 10 & base != 16){
|
||||||
|
return IllegalBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (num->used == 0 & num->digit[0] == 0){ // num == 0
|
||||||
|
*str++ = '0';
|
||||||
|
*str++ = '\0';
|
||||||
|
return 0;
|
||||||
|
}else{
|
||||||
|
char* negChar = NULL;
|
||||||
|
if (num->isNotNeg == false){
|
||||||
|
negChar = "-\0";
|
||||||
|
}
|
||||||
|
|
||||||
|
BigNum* x;
|
||||||
|
BigNum* q; //quotient
|
||||||
|
BigNum* b; // base
|
||||||
|
BigNum* r; // remainder
|
||||||
|
bigNumInit(&x, num->used+1);
|
||||||
|
bigNumInit(&q, num->used+1);
|
||||||
|
bigNumInit(&b, 1);
|
||||||
|
bigNumInit(&r, 1);
|
||||||
|
|
||||||
|
bigNumCopy(num,x);
|
||||||
|
int32ToBigNum(b, (int32_t)base);
|
||||||
|
|
||||||
|
char* str_orig = str;
|
||||||
|
|
||||||
|
while(x->used>0||x->digit[0]>0){
|
||||||
|
int divide_error_code = bigNumDivide(x,b,q,r);
|
||||||
|
if (divide_error_code){
|
||||||
|
return divide_error_code;
|
||||||
|
}
|
||||||
|
bigNumCopy(q,x);
|
||||||
|
|
||||||
|
char str_i; // storing current digit char
|
||||||
|
if (base == 16){
|
||||||
|
str_i = intToHexChar(r->digit[0]);
|
||||||
|
}else{
|
||||||
|
str_i = (char)(((int)r->digit[0]) + '0');
|
||||||
|
}
|
||||||
|
*str++ = str_i;
|
||||||
|
}
|
||||||
|
*str++ = '\0';
|
||||||
|
|
||||||
|
str = str_orig;
|
||||||
|
|
||||||
|
if(negChar != NULL){
|
||||||
|
strcat(str, negChar);
|
||||||
|
}
|
||||||
|
|
||||||
|
strrev(str);
|
||||||
|
|
||||||
|
bigNumFree(x);
|
||||||
|
bigNumFree(q); //quotient
|
||||||
|
bigNumFree(b); // base
|
||||||
|
bigNumFree(r);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t uint32ToBigNum(BigNum* num, uint32_t ui32Num){
|
||||||
|
num->isNotNeg = true;
|
||||||
|
num->digit[0] = ui32Num;
|
||||||
|
num->used = 0;
|
||||||
|
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 0 ~ 9 => '0' ~ '9' ; 10 ~ 15 => 'a' ~ 'f'
|
||||||
|
char intToHexChar(int i){
|
||||||
|
if (i < 10){
|
||||||
|
return (char) (i + '0');
|
||||||
|
}
|
||||||
|
// 'a' ~ 'f'
|
||||||
|
else{
|
||||||
|
return (char) (i - 10 + 'a');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
43
tshunhue/BigNumOperation/huge_num.h
Normal file
43
tshunhue/BigNumOperation/huge_num.h
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
#ifndef HUGE_NUM
|
||||||
|
#define HUGE_NUM
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stdbool.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdint.h>
|
||||||
|
|
||||||
|
typedef struct BigNum BigNum;
|
||||||
|
|
||||||
|
int bigNumAbsAdd(BigNum* bigger, BigNum* smaller, BigNum* result);
|
||||||
|
bool bigNumAbsEqual(BigNum* lhs, BigNum* rhs);
|
||||||
|
bool bigNumAbsLarger(BigNum* lhs, BigNum* rhs);
|
||||||
|
bool bigNumAbsLess(BigNum* lhs, BigNum* rhs);
|
||||||
|
int bigNumAbsSubtract(BigNum* bigger, BigNum* smaller, BigNum* result);
|
||||||
|
int bigNumAdd(BigNum* lhs, BigNum* rhs, BigNum* result);
|
||||||
|
void bigNumCopy(BigNum* orig, BigNum * dest);
|
||||||
|
int bigNumDivide(BigNum* lhs, BigNum* rhs, BigNum* quotient, BigNum* remainder);
|
||||||
|
int bigNumDivideOneDigit(BigNum* lhs, BigNum* rhs , BigNum* quotient, BigNum* remainder);
|
||||||
|
int bigNumDivideOther(BigNum* lhs, BigNum* rhs , BigNum* quotient, BigNum* remainder);
|
||||||
|
bool bigNumEqual(BigNum* lhs, BigNum* rhs);
|
||||||
|
void bigNumFree(BigNum* num);
|
||||||
|
int bigNumInit(BigNum** num, size_t digit_num);
|
||||||
|
bool bigNumLarger(BigNum* lhs, BigNum* rhs);
|
||||||
|
bool bigNumLess(BigNum* lhs, BigNum* rhs);
|
||||||
|
int bigNumMultiply(BigNum*lhs, BigNum* rhs, BigNum* result);
|
||||||
|
int bigNumShiftLeft(BigNum* big_num, size_t n);
|
||||||
|
int bigNumShiftRight(BigNum* big_num, size_t n);
|
||||||
|
int bigNumSubtract(BigNum* lhs, BigNum* rhs, BigNum* result);
|
||||||
|
int bigNumToStr(BigNum* num, char* str, uint32_t base);
|
||||||
|
int int32ToBigNum(BigNum* bigNum, int32_t i32Num);
|
||||||
|
int strToBigNum(BigNum* num, char* str, uint32_t base);
|
||||||
|
void strToBigNumAux(BigNum* num, int32_t digit, uint32_t base);
|
||||||
|
bool isOverBorder(int32_t digit, uint32_t base);
|
||||||
|
bool is16base(char c);
|
||||||
|
int32_t hexCharToStr(char c);
|
||||||
|
void bigNumClearZero(BigNum* n, size_t initValue);
|
||||||
|
char intToHexChar(int i);
|
||||||
|
uint32_t uint32ToBigNum(BigNum* num, uint32_t u);
|
||||||
|
void main();
|
||||||
|
void strrev(char* str);
|
||||||
|
#endif
|
BIN
tshunhue/BigNumOperation/test/test
Executable file
BIN
tshunhue/BigNumOperation/test/test
Executable file
Binary file not shown.
124
tshunhue/BigNumOperation/test/test.c
Normal file
124
tshunhue/BigNumOperation/test/test.c
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
#include "../huge_num.h"
|
||||||
|
#include "../huge_num.c"
|
||||||
|
|
||||||
|
void main(){
|
||||||
|
BigNum* a;
|
||||||
|
BigNum* b;
|
||||||
|
bigNumInit(&a,88);
|
||||||
|
bigNumInit(&b,88);
|
||||||
|
strToBigNum(a, "-123456778122345566123345", 10);
|
||||||
|
strToBigNum(b, "-12345677812234556632254534", 10);
|
||||||
|
char* s = calloc(sizeof(char),100);
|
||||||
|
bigNumToStr(a,s,2);// -11010001001001001101011111000011000000011101010110000010000110011010101010001
|
||||||
|
printf("%s\n", s);
|
||||||
|
bigNumToStr(a,s,8); // -32111153703003526020632521
|
||||||
|
printf("%s\n", s);
|
||||||
|
bigNumToStr(a,s,10); // -123456778122345566123345
|
||||||
|
printf("%s\n", s);
|
||||||
|
bigNumToStr(a,s,16); // -1a249af8603ab0433551
|
||||||
|
printf("%s\n", s);
|
||||||
|
printf("%s\n", s);
|
||||||
|
strToBigNum(b, "-11010001001001001101011111000011000000011101010110000010000110011010101010001", 2);
|
||||||
|
bigNumToStr(b,s,10);
|
||||||
|
printf("%s\n", s); // -123456778122345566123345
|
||||||
|
strToBigNum(b, "-32111153703003526020632521", 8);
|
||||||
|
bigNumToStr(b,s,10); // -123456778122345566123345
|
||||||
|
printf("%s\n", s);
|
||||||
|
strToBigNum(b, "-123456778122345566123345", 10);
|
||||||
|
bigNumToStr(b,s,10); // -123456778122345566123345
|
||||||
|
printf("%s\n", s);
|
||||||
|
strToBigNum(b, "-1a249af8603ab0433551", 16);
|
||||||
|
|
||||||
|
BigNum* x1;
|
||||||
|
BigNum* x2;
|
||||||
|
BigNum* x3;
|
||||||
|
BigNum* x4;
|
||||||
|
bigNumInit(&x1,5);
|
||||||
|
bigNumInit(&x2,5);
|
||||||
|
bigNumInit(&x3,10);
|
||||||
|
bigNumInit(&x4,10);
|
||||||
|
strToBigNum(x1, "-340282366920938463463374607431768211455", 10);
|
||||||
|
strToBigNum(x2, "-340282366920938463463374607431768211457", 10);
|
||||||
|
strToBigNum(x4, "7", 10);
|
||||||
|
bigNumMultiply(x1,x2,x3);
|
||||||
|
bigNumToStr(x3,s,10); // 115792089237316195423570985008687907853269984665640564039457584007913129639935
|
||||||
|
printf("%s\n", s);
|
||||||
|
bigNumToStr(x3,s,16); // ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
|
||||||
|
printf("%s\n", s);
|
||||||
|
bigNumAdd(x1,x2,x3);
|
||||||
|
bigNumToStr(x3,s,10); // -680564733841876926926749214863536422912
|
||||||
|
printf("%s\n", s);
|
||||||
|
bigNumSubtract(x1,x2,x3);
|
||||||
|
bigNumToStr(x3,s,10);
|
||||||
|
printf("%s\n", s); // 2
|
||||||
|
|
||||||
|
strToBigNum(x1, "340282366920938463463374607431768211461", 10);
|
||||||
|
bigNumAbsSubtract(x1, x4, x3);
|
||||||
|
bigNumToStr(x3,s,10);
|
||||||
|
printf("%s\n", s); // 340282366920938463463374607431768211454
|
||||||
|
|
||||||
|
BigNum *fac100;
|
||||||
|
BigNum *bigNumI;
|
||||||
|
|
||||||
|
// factorial 100 (i.e. 100!) = 93326215443944152681699238856266700490715968264381621468592963895217599993229915608941463976156518286253697920827223758251185210916864000000000000000000000000
|
||||||
|
bigNumInit(&fac100, 1000);
|
||||||
|
int32ToBigNum(fac100, 1);
|
||||||
|
bigNumInit(&bigNumI, 1);
|
||||||
|
char* stringfac100 = malloc(sizeof(char)* 1000);
|
||||||
|
|
||||||
|
for (int i=1;i<=100;i++){
|
||||||
|
int32ToBigNum(bigNumI,i);
|
||||||
|
BigNum *bigNumTemp;
|
||||||
|
bigNumInit(&bigNumTemp, 1000);
|
||||||
|
bigNumMultiply(fac100,bigNumI,bigNumTemp);
|
||||||
|
bigNumCopy(bigNumTemp, fac100);
|
||||||
|
bigNumFree(bigNumTemp);
|
||||||
|
}
|
||||||
|
|
||||||
|
bigNumToStr(fac100, stringfac100, 10);
|
||||||
|
|
||||||
|
printf(stringfac100, "%s");
|
||||||
|
|
||||||
|
strToBigNum(x1, "432342134213421948921303840923289032829489328", 10);
|
||||||
|
strToBigNum(x2, "-12341234134913489343241234143231", 10);
|
||||||
|
|
||||||
|
printf("=====");
|
||||||
|
bigNumDivide(x1,x2,x3,x4);
|
||||||
|
bigNumToStr(x3,s,10); // 35032325737206
|
||||||
|
printf(s, "%s");
|
||||||
|
printf("------餘數");
|
||||||
|
bigNumToStr(x4,s,10);
|
||||||
|
printf(s, "%s"); // 6891701806548243026832959736742
|
||||||
|
printf("-----乘法");
|
||||||
|
bigNumMultiply(x1,x2,x3);
|
||||||
|
bigNumToStr(x3,s,10); // 5335635504716032129210531763619338947800253085209396735089431993751337938768
|
||||||
|
printf(s, "%s");
|
||||||
|
printf("------減法");
|
||||||
|
bigNumSubtract(x1,x2,x3); // 432342134213409607687168927433945791595346097
|
||||||
|
bigNumToStr(x3,s,10);
|
||||||
|
printf(s, "%s");
|
||||||
|
printf("------加法");
|
||||||
|
bigNumAdd(x1,x2,x3); // 432342134213434290155438754412632274063632559
|
||||||
|
bigNumToStr(x3,s,10);
|
||||||
|
printf(s, "%s");
|
||||||
|
|
||||||
|
printf("=====");
|
||||||
|
bigNumDivide(x2,x1,x3,x4);
|
||||||
|
bigNumToStr(x3,s,10); // 0
|
||||||
|
printf(s, "%s");
|
||||||
|
printf("------餘數");
|
||||||
|
bigNumToStr(x4,s,10);
|
||||||
|
printf(s, "%s"); // 12341234134913489343241234143231
|
||||||
|
printf("-----乘法");
|
||||||
|
bigNumMultiply(x2,x1,x3);
|
||||||
|
bigNumToStr(x3,s,10); // 5335635504716032129210531763619338947800253085209396735089431993751337938768
|
||||||
|
printf(s, "%s");
|
||||||
|
printf("------減法");
|
||||||
|
bigNumSubtract(x2,x1,x3); // -432342134213409607687168927433945791595346097
|
||||||
|
bigNumToStr(x3,s,10);
|
||||||
|
printf(s, "%s");
|
||||||
|
printf("------加法");
|
||||||
|
bigNumAdd(x2,x1,x3); // 432342134213434290155438754412632274063632559
|
||||||
|
bigNumToStr(x3,s,10);
|
||||||
|
printf(s, "%s");
|
||||||
|
}
|
76
tshunhue/CPS.scm
Normal file
76
tshunhue/CPS.scm
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
#lang racket
|
||||||
|
(define n 0)
|
||||||
|
|
||||||
|
(define (gen-new-symbol)
|
||||||
|
(let ((new-symbol-string (string-append "n" (number->string n))))
|
||||||
|
(set! n (+ n 1))
|
||||||
|
(string->symbol new-symbol-string)
|
||||||
|
))
|
||||||
|
|
||||||
|
(define (cps expr k)
|
||||||
|
(cond
|
||||||
|
;; indentifier -> (k i)
|
||||||
|
((not (list? expr)) `(,k ,expr))
|
||||||
|
;; + - * /
|
||||||
|
((memq (car expr) '(+ - * /))
|
||||||
|
(let* ((r1 (gen-new-symbol))
|
||||||
|
(r2 (gen-new-symbol))
|
||||||
|
(k1 `(lambda (,r2) (,k (,(car expr) ,r1 ,r2))))
|
||||||
|
(k2 `(lambda (,r1) ,(cps (list-ref expr 2) k1))))
|
||||||
|
(cps (list-ref expr 1) k2)
|
||||||
|
))
|
||||||
|
|
||||||
|
((eq? (car expr) 'set!)
|
||||||
|
(let* ((r1 (gen-new-symbol))
|
||||||
|
(k1 `(lambda (,r1) (,k (set! ,(list-ref expr 1) ,r1)))))
|
||||||
|
(cps (list-ref expr 2) k1)))
|
||||||
|
|
||||||
|
((eq? (car expr) 'if)
|
||||||
|
(let*
|
||||||
|
(
|
||||||
|
(r1 (gen-new-symbol))
|
||||||
|
(e1 (list-ref expr 1))
|
||||||
|
(e2 (list-ref expr 2))
|
||||||
|
(e3 (list-ref expr 3))
|
||||||
|
(e2-c (cps e2 k))
|
||||||
|
(e3-c (cps e3 k))
|
||||||
|
(e1-c `(lambda (,r1) (if ,r1 ,e2-c ,e3-c))))
|
||||||
|
(cps e1 e1-c)))
|
||||||
|
|
||||||
|
|
||||||
|
((eq? (car expr) 'begin)
|
||||||
|
(cond
|
||||||
|
;; (begin) = Error
|
||||||
|
((= (length expr) 1)
|
||||||
|
(error "begin must have its argument"))
|
||||||
|
|
||||||
|
;; (begin E0) = E0
|
||||||
|
((= (length expr) 2)
|
||||||
|
(cps (cadr expr) k))
|
||||||
|
|
||||||
|
;; (begin E0 E1)
|
||||||
|
((= (length expr) 3)
|
||||||
|
(let*
|
||||||
|
((r1 (gen-new-symbol))
|
||||||
|
(e0 (list-ref expr 1))
|
||||||
|
(e1 (list-ref expr 2))
|
||||||
|
(e0-c `(lambda (,r1) ,(cps e1 k))))
|
||||||
|
(cps e0 e0-c)
|
||||||
|
))
|
||||||
|
|
||||||
|
;; (begin E0 En ...) = (begin E0 (begin En ...))
|
||||||
|
(else
|
||||||
|
(let*
|
||||||
|
((begin-tail (cons 'begin (cddr expr))) ;; (begin En ...)
|
||||||
|
(e0 (cadr expr))
|
||||||
|
;; (begin E0 En...) = (begin E0 (begin En ...)))
|
||||||
|
(new-begin (list 'begin e0 begin-tail)))
|
||||||
|
(cps new-begin k)
|
||||||
|
))
|
||||||
|
))))
|
||||||
|
|
||||||
|
(define id (lambda (v) v))
|
||||||
|
(cps '(set! a (+ 7 8)) 'id)
|
||||||
|
(cps '(+ 1 2) 'id)
|
||||||
|
(cps '(if (+ 0 1) (+ 1 2) 2) 'id)
|
||||||
|
(cps '(begin (+ 79 89) (- (* 96 78) 95) (/ 687 77)) 'id)
|
142
tshunhue/GarbageCollection/hash_table.c
Normal file
142
tshunhue/GarbageCollection/hash_table.c
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
/*
|
||||||
|
hash_table.c - library used to reference count for variable;
|
||||||
|
*/
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
|
||||||
|
typedef enum {false, true} bool;
|
||||||
|
|
||||||
|
typedef struct ref_count_item{
|
||||||
|
bool removed; // whether the slot is removed
|
||||||
|
unsigned long location;
|
||||||
|
unsigned int ref_count;
|
||||||
|
unsigned long* dep_items;
|
||||||
|
} HashItem;
|
||||||
|
|
||||||
|
typedef struct ref_count_hash_table{
|
||||||
|
unsigned int size;
|
||||||
|
unsigned int var_num;
|
||||||
|
HashItem* item_ls;
|
||||||
|
} RefCountHashTable;
|
||||||
|
|
||||||
|
unsigned int hash_fun(unsigned long addr, unsigned int table_size){
|
||||||
|
return (addr/sizeof(unsigned int)) % table_size;
|
||||||
|
};
|
||||||
|
|
||||||
|
void hashtable_insert_ls(HashItem* item_ls, unsigned long addr, unsigned ref_count, unsigned long* dep_item_addr_ls, unsigned int ls_size);
|
||||||
|
|
||||||
|
void hashtable_resize(RefCountHashTable* hashtable, unsigned int size){
|
||||||
|
HashItem* new_item_ls = malloc(size * sizeof(HashItem));
|
||||||
|
memset(new_item_ls, 0, size * sizeof(HashItem));
|
||||||
|
|
||||||
|
for(int i=0;i<(hashtable->size);i++){
|
||||||
|
|
||||||
|
hashtable_insert_ls(new_item_ls, (hashtable->item_ls)[i].location,
|
||||||
|
(hashtable->item_ls)[i].ref_count,
|
||||||
|
(hashtable->item_ls)[i].dep_items, size);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
free(hashtable->item_ls);
|
||||||
|
hashtable->item_ls = new_item_ls;
|
||||||
|
|
||||||
|
hashtable->size = size;
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
void set_item_ref_count(HashItem* item, unsigned int new_ref_count){
|
||||||
|
item->ref_count = new_ref_count;
|
||||||
|
}
|
||||||
|
|
||||||
|
HashItem* find_ref_count_item(unsigned long addr, RefCountHashTable* hashtable){
|
||||||
|
unsigned int key = hash_fun(addr, hashtable->size);
|
||||||
|
|
||||||
|
HashItem *result_item;
|
||||||
|
result_item = 0; // set it to null initially
|
||||||
|
|
||||||
|
unsigned int hashtable_size = hashtable->size;
|
||||||
|
for(int i = 0; i<hashtable_size; i++){
|
||||||
|
int new_key = (key+i) % hashtable_size;
|
||||||
|
|
||||||
|
HashItem item_to_be_checked = (hashtable->item_ls)[new_key];
|
||||||
|
|
||||||
|
if (item_to_be_checked.location == addr && item_to_be_checked.removed == false){
|
||||||
|
|
||||||
|
result_item = &(hashtable->item_ls[new_key]);
|
||||||
|
return result_item;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printf("The item (addr: %lu) is not in the list.\n", addr);
|
||||||
|
return result_item;
|
||||||
|
}
|
||||||
|
|
||||||
|
void hashtable_insert_with_dep(RefCountHashTable* hashtable, unsigned long addr, unsigned ref_count, unsigned long* dep_item_addr_ls);
|
||||||
|
|
||||||
|
void hash_table_insert(RefCountHashTable* hashtable, unsigned long addr, unsigned ref_count){
|
||||||
|
unsigned long* null_dep_item_ls = malloc(sizeof(unsigned long));
|
||||||
|
memset(null_dep_item_ls,0, sizeof(unsigned long));
|
||||||
|
|
||||||
|
hashtable_insert_with_dep(hashtable, addr, ref_count, null_dep_item_ls);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
void hash_table_delete(unsigned long addr, RefCountHashTable* hashtable){
|
||||||
|
HashItem* to_be_deleted;
|
||||||
|
to_be_deleted = find_ref_count_item(addr, hashtable);
|
||||||
|
|
||||||
|
if (to_be_deleted != 0){
|
||||||
|
to_be_deleted->removed = true; // the item is removed
|
||||||
|
to_be_deleted->dep_items = 0;
|
||||||
|
to_be_deleted->ref_count = 0;
|
||||||
|
to_be_deleted->location = 0;
|
||||||
|
|
||||||
|
hashtable->size -= 1;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
printf("the item (addr: %lu is not found, so it can't be deleted.\n", addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// insert item with dependent item list
|
||||||
|
void hashtable_insert_with_dep(RefCountHashTable* hashtable, unsigned long addr, unsigned ref_count, unsigned long* dep_item_addr_ls){
|
||||||
|
if (hashtable->var_num + 1 == hashtable->size){
|
||||||
|
hashtable_resize(hashtable,(hashtable->size)*2);
|
||||||
|
}
|
||||||
|
|
||||||
|
hashtable_insert_ls(hashtable->item_ls, addr, ref_count, dep_item_addr_ls, hashtable->size);
|
||||||
|
|
||||||
|
hashtable->var_num += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
void hashtable_insert_ls(HashItem* item_ls, unsigned long addr, unsigned ref_count, unsigned long* dep_item_addr_ls, unsigned int ls_size){
|
||||||
|
int key = hash_fun(addr, ls_size);
|
||||||
|
|
||||||
|
while (item_ls[key].location != 0){
|
||||||
|
key = (key + 1) % (ls_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
item_ls[key].location = addr;
|
||||||
|
item_ls[key].removed = false; // reset to be unmoved
|
||||||
|
item_ls[key].ref_count = ref_count;
|
||||||
|
item_ls[key].dep_items = dep_item_addr_ls;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned int refcount_hashtable_size = 3;
|
||||||
|
RefCountHashTable refcount_hashtable;
|
||||||
|
|
||||||
|
|
||||||
|
// initialize the hash table
|
||||||
|
void initialize_hash_table() {
|
||||||
|
|
||||||
|
|
||||||
|
refcount_hashtable.size = refcount_hashtable_size;
|
||||||
|
refcount_hashtable.var_num = 0;
|
||||||
|
HashItem* hashtable_items = malloc(refcount_hashtable.size * sizeof(HashItem));
|
||||||
|
memset(hashtable_items, 0, refcount_hashtable.size * sizeof(HashItem));
|
||||||
|
refcount_hashtable.item_ls = hashtable_items;
|
||||||
|
}
|
25
tshunhue/LICENSE
Normal file
25
tshunhue/LICENSE
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
BSD 2-Clause License
|
||||||
|
|
||||||
|
Copyright (c) 2018, Chen, Chien-ting
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
5
tshunhue/README.md
Normal file
5
tshunhue/README.md
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
春花 Tshunhue
|
||||||
|
=============
|
||||||
|
這是一個叫做春花的程式語言(玩具語言)的考案。
|
||||||
|
|
||||||
|
詳細請參考 `./docs`。
|
20
tshunhue/docs/Makefile
Normal file
20
tshunhue/docs/Makefile
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line, and also
|
||||||
|
# from the environment for the first two.
|
||||||
|
SPHINXOPTS ?=
|
||||||
|
SPHINXBUILD ?= sphinx-build
|
||||||
|
SOURCEDIR = source
|
||||||
|
BUILDDIR = build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
BIN
tshunhue/docs/build/doctrees/Contents/a.doctree
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/Contents/a.doctree
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/doctrees/ample/a.doctree
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/ample/a.doctree
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/doctrees/ample/garbagecollection.doctree
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/ample/garbagecollection.doctree
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/doctrees/environment.pickle
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/environment.pickle
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/doctrees/index.doctree
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/index.doctree
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/doctrees/型別.doctree
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/型別.doctree
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/doctrees/基本函數介紹.doctree
vendored
Normal file
BIN
tshunhue/docs/build/doctrees/基本函數介紹.doctree
vendored
Normal file
Binary file not shown.
4
tshunhue/docs/build/html/.buildinfo
vendored
Normal file
4
tshunhue/docs/build/html/.buildinfo
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
# Sphinx build info version 1
|
||||||
|
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
|
||||||
|
config: f2d32bf1d61b4e47f24f572e50c2b379
|
||||||
|
tags: 645f666f9bcd5a90fca523b33c5a78b7
|
101
tshunhue/docs/build/html/Contents/a.html
vendored
Normal file
101
tshunhue/docs/build/html/Contents/a.html
vendored
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml" lang="zh-tw">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<title>Example — Tshunhue Documents documentation</title>
|
||||||
|
<link rel="stylesheet" href="../_static/alabaster.css" type="text/css" />
|
||||||
|
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
|
||||||
|
<script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
|
||||||
|
<script type="text/javascript" src="../_static/jquery.js"></script>
|
||||||
|
<script type="text/javascript" src="../_static/underscore.js"></script>
|
||||||
|
<script type="text/javascript" src="../_static/doctools.js"></script>
|
||||||
|
<script type="text/javascript" src="../_static/language_data.js"></script>
|
||||||
|
<link rel="index" title="Index" href="../genindex.html" />
|
||||||
|
<link rel="search" title="Search" href="../search.html" />
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="../_static/custom.css" type="text/css" />
|
||||||
|
|
||||||
|
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
|
||||||
|
|
||||||
|
</head><body>
|
||||||
|
|
||||||
|
|
||||||
|
<div class="document">
|
||||||
|
<div class="documentwrapper">
|
||||||
|
<div class="bodywrapper">
|
||||||
|
|
||||||
|
|
||||||
|
<div class="body" role="main">
|
||||||
|
|
||||||
|
<div class="section" id="example">
|
||||||
|
<h1>Example<a class="headerlink" href="#example" title="Permalink to this headline">¶</a></h1>
|
||||||
|
<p>ipsum lorem</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
|
||||||
|
<div class="sphinxsidebarwrapper">
|
||||||
|
<h1 class="logo"><a href="../index.html">Tshunhue Documents</a></h1>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<h3>Navigation</h3>
|
||||||
|
|
||||||
|
<div class="relations">
|
||||||
|
<h3>Related Topics</h3>
|
||||||
|
<ul>
|
||||||
|
<li><a href="../index.html">Documentation overview</a><ul>
|
||||||
|
</ul></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<div id="searchbox" style="display: none" role="search">
|
||||||
|
<h3 id="searchlabel">Quick search</h3>
|
||||||
|
<div class="searchformwrapper">
|
||||||
|
<form class="search" action="../search.html" method="get">
|
||||||
|
<input type="text" name="q" aria-labelledby="searchlabel" />
|
||||||
|
<input type="submit" value="Go" />
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<script type="text/javascript">$('#searchbox').show(0);</script>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="clearer"></div>
|
||||||
|
</div>
|
||||||
|
<div class="footer">
|
||||||
|
©2019, Yoxem Chen.
|
||||||
|
|
||||||
|
|
|
||||||
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 2.2.0</a>
|
||||||
|
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||||
|
|
||||||
|
|
|
||||||
|
<a href="../_sources/Contents/a.rst.txt"
|
||||||
|
rel="nofollow">Page source</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
3
tshunhue/docs/build/html/_sources/Contents/a.rst.txt
vendored
Normal file
3
tshunhue/docs/build/html/_sources/Contents/a.rst.txt
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
Example
|
||||||
|
=============
|
||||||
|
ipsum lorem
|
3
tshunhue/docs/build/html/_sources/a.rst.txt
vendored
Normal file
3
tshunhue/docs/build/html/_sources/a.rst.txt
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
Example
|
||||||
|
=============
|
||||||
|
ipsum lorem
|
2
tshunhue/docs/build/html/_sources/ample/a.rst.txt
vendored
Normal file
2
tshunhue/docs/build/html/_sources/ample/a.rst.txt
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
Example
|
||||||
|
=============
|
2
tshunhue/docs/build/html/_sources/ample/garbagecollection.rst.txt
vendored
Normal file
2
tshunhue/docs/build/html/_sources/ample/garbagecollection.rst.txt
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
Welcome to Tshunhue Documents's documentation!
|
||||||
|
==============================================
|
2
tshunhue/docs/build/html/_sources/garbagecollection.rst.txt
vendored
Normal file
2
tshunhue/docs/build/html/_sources/garbagecollection.rst.txt
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
Welcome to Tshunhue Documents's documentation!
|
||||||
|
==============================================
|
77
tshunhue/docs/build/html/_sources/index.rst.txt
vendored
Normal file
77
tshunhue/docs/build/html/_sources/index.rst.txt
vendored
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
.. Tshunhue Documents documentation master file, created by
|
||||||
|
sphinx-quickstart on Tue Sep 24 21:40:29 2019.
|
||||||
|
You can adapt this file completely to your liking, but it should at least
|
||||||
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
|
Welcome to Tshunhue Documents's documentation!
|
||||||
|
==============================================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 3
|
||||||
|
:caption: Contents
|
||||||
|
|
||||||
|
基本函數介紹
|
||||||
|
型別
|
||||||
|
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
|
|
||||||
|
|
||||||
|
Tshunhue 是什麼呢?
|
||||||
|
===================
|
||||||
|
Tshunhue(河洛語:春花)是一個 Lisp 系的玩具程式語言。主要的特徵有:
|
||||||
|
|
||||||
|
- 不可變物件。
|
||||||
|
- 閉包作為第一類物件。
|
||||||
|
- 靜態型別。
|
||||||
|
- 和型別 (Sum Type) 和積型別 (Product Type)。
|
||||||
|
|
||||||
|
|
||||||
|
其他
|
||||||
|
============
|
||||||
|
|
||||||
|
垃圾回收
|
||||||
|
------------
|
||||||
|
|
||||||
|
先不實現垃圾回收。若是要實現垃圾回收,可以用引用記數。做一個這樣的表格:
|
||||||
|
|
||||||
|
+------------+------------+
|
||||||
|
| 記憶體位址 | 引用次數 |
|
||||||
|
+------------+------------+
|
||||||
|
| 0x12345 | 1 |
|
||||||
|
| | |
|
||||||
|
| …… | …… |
|
||||||
|
+------------+------------+
|
||||||
|
|
||||||
|
以下有一個範例:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(Type IntPair (IntPair Int Int))
|
||||||
|
(def IntPair a (IntPair x y))
|
||||||
|
|
||||||
|
; 這個時候我們可以呼叫引用計數增加函數 inc_ref_count()
|
||||||
|
; inc_ref_count(&a);
|
||||||
|
; inc_ref_count(&x);
|
||||||
|
; inc_ref_count(&y);
|
||||||
|
...程式碼
|
||||||
|
; x 活著的程式碼區段結束時,呼叫引用記數減少函數 dec_ref_count()
|
||||||
|
; dec_ref_count(&a);
|
||||||
|
; dec_ref_count(&a[0]); // &x
|
||||||
|
; dec_ref_count(&a[1]); // &y
|
||||||
|
|
||||||
|
|
||||||
|
實做方式
|
||||||
|
------------
|
||||||
|
用 Python3 將原始碼轉成 AST,以 s-expression 表示。再進行型別推測後,進行閉包轉換,不進行 CPS 變換,利用 llvmlite 生成 LLVM IR。
|
||||||
|
|
||||||
|
需要的東西:
|
||||||
|
|
||||||
|
* llvmlite
|
||||||
|
* python3
|
||||||
|
|
71
tshunhue/docs/build/html/_sources/型別.rst.txt
vendored
Normal file
71
tshunhue/docs/build/html/_sources/型別.rst.txt
vendored
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
=============
|
||||||
|
型別
|
||||||
|
=============
|
||||||
|
|
||||||
|
簡單型別
|
||||||
|
=============
|
||||||
|
|
||||||
|
數字
|
||||||
|
-------------
|
||||||
|
|
||||||
|
* `int`:整數,初期可用 64 位元帶符號整數處理。
|
||||||
|
|
||||||
|
* `dou`:雙精度浮點數
|
||||||
|
|
||||||
|
字串
|
||||||
|
-------------
|
||||||
|
* `str`:字串
|
||||||
|
|
||||||
|
暫不支援 `char`。
|
||||||
|
|
||||||
|
布林
|
||||||
|
------------
|
||||||
|
* `bool`:布林值。#t:真值,#f:假值。
|
||||||
|
|
||||||
|
複雜型別
|
||||||
|
============
|
||||||
|
|
||||||
|
自訂型別
|
||||||
|
------------
|
||||||
|
可以自訂 product type 或 sum type。為求簡化,不支援型別當參數。
|
||||||
|
|
||||||
|
.. function :: (Type TYPE-NAME (Type-constronctor x1 x2 ...))
|
||||||
|
|
||||||
|
.. function :: (Type TYPE-NAME (U (Type-const1 x1 x2 ...)(Type-const2 x1 x2 ...)...))
|
||||||
|
|
||||||
|
.. function :: (Type TYPE-NAME (Type-constronctor))
|
||||||
|
|
||||||
|
.. function :: (Type TYPE-NAME (U (Type-const1)(Type-const2)...))
|
||||||
|
|
||||||
|
U 是 union 的意思,指 Sum Type。
|
||||||
|
|
||||||
|
例如:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(Type IntPair (IntPair Int Int))
|
||||||
|
|
||||||
|
|
||||||
|
如果可以模式匹配,就變這樣:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(\ ((IntPair x))
|
||||||
|
(match (x)
|
||||||
|
((IntPair 7 9) 9)
|
||||||
|
((IntPair 8 a) a)
|
||||||
|
((IntPair x y) (+ x y))))
|
||||||
|
|
||||||
|
Sum Type 的用法:
|
||||||
|
|
||||||
|
(Type OrigColor (U (Red)(Green)(Blue))) ; 三原色
|
||||||
|
|
||||||
|
如果可以模式匹配,就變這樣:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(orig-color-to-str ((OrigColor x))
|
||||||
|
(match (x)
|
||||||
|
((Red) "Red")
|
||||||
|
((Green) "Green")
|
||||||
|
((Blue) "Blue")))
|
164
tshunhue/docs/build/html/_sources/基本函數介紹.rst.txt
vendored
Normal file
164
tshunhue/docs/build/html/_sources/基本函數介紹.rst.txt
vendored
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
=============
|
||||||
|
基本函數介紹
|
||||||
|
=============
|
||||||
|
|
||||||
|
四則運算
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. function :: + : (-> int int int)
|
||||||
|
|
||||||
|
整數相加。
|
||||||
|
|
||||||
|
|
||||||
|
.. function :: - : (-> int int int)
|
||||||
|
|
||||||
|
整數相減。
|
||||||
|
|
||||||
|
|
||||||
|
.. function :: * : (-> int int int)
|
||||||
|
|
||||||
|
整數相乘。
|
||||||
|
|
||||||
|
.. function :: / : (-> int int int)
|
||||||
|
|
||||||
|
整數相除,得商數。除以 0 的時候跳出特例,中止程式。如果用 C 語言表示的話:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
static jmp_buf buf;
|
||||||
|
...
|
||||||
|
void int_div_closure_thunk(Object* closure, Object* lhs, Object* rhs, Object* result){
|
||||||
|
if (lhs->value==0){
|
||||||
|
printf("Exception: Div 0 Error.");
|
||||||
|
longjmp(buf,1);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
...
|
||||||
|
int main(void){
|
||||||
|
int jmpVal = setjmp(buf);
|
||||||
|
if (jmpVal == 0){
|
||||||
|
// main_body
|
||||||
|
}else{
|
||||||
|
// exit the program with the exception
|
||||||
|
printf("Exit the program with the exception");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.. function :: +. : (-> dou dou dou)
|
||||||
|
|
||||||
|
浮點數相加。
|
||||||
|
|
||||||
|
.. function :: -. : (-> dou dou dou)
|
||||||
|
|
||||||
|
浮點數相減。
|
||||||
|
|
||||||
|
.. function :: *. : (-> dou dou dou)
|
||||||
|
|
||||||
|
浮點數相乘。
|
||||||
|
|
||||||
|
.. function :: /. : (-> dou dou dou)
|
||||||
|
|
||||||
|
浮點數相除。
|
||||||
|
|
||||||
|
|
||||||
|
布爾值
|
||||||
|
============
|
||||||
|
|
||||||
|
.. function :: and : (-> bool bool bool)
|
||||||
|
|
||||||
|
AND。
|
||||||
|
|
||||||
|
|
||||||
|
.. function :: or : (-> bool bool bool)
|
||||||
|
|
||||||
|
OR。
|
||||||
|
|
||||||
|
.. function :: not : (-> bool bool)
|
||||||
|
|
||||||
|
NOT。
|
||||||
|
|
||||||
|
流程控制
|
||||||
|
============
|
||||||
|
.. function :: (if cond then alt)
|
||||||
|
|
||||||
|
若 cond 為真則返回 then,否則返回 alt。
|
||||||
|
|
||||||
|
影印字元
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. function :: print-str : (-> str void)
|
||||||
|
|
||||||
|
列印字串。``void`` 表示不返回任何東西。
|
||||||
|
|
||||||
|
.. function :: int-to-str : (-> int str)
|
||||||
|
|
||||||
|
整數轉成十進位字串(10 為 base)。
|
||||||
|
|
||||||
|
.. function :: dou-to-str : (-> int str)
|
||||||
|
|
||||||
|
浮點數轉成十進位字串(10 為 base)。
|
||||||
|
|
||||||
|
|
||||||
|
定義變數 (=)
|
||||||
|
=============
|
||||||
|
|
||||||
|
非函數
|
||||||
|
---------
|
||||||
|
|
||||||
|
|
||||||
|
語法如下:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(def Type var expression)
|
||||||
|
|
||||||
|
例如:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(def int a 10)
|
||||||
|
(def dou b (+ 3.1 7.0))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
..note ::
|
||||||
|
|
||||||
|
因為 tshunhue 裡面,所有變數都是不可變物件, ``define`` 不可以拿來重新設定值。定義閉包也一樣。
|
||||||
|
|
||||||
|
|
||||||
|
函數(閉包)
|
||||||
|
---------------
|
||||||
|
|
||||||
|
.. _defining-lambda:
|
||||||
|
|
||||||
|
閉包的定義比較複雜。但大致如下:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(def Type var lambda-function)
|
||||||
|
|
||||||
|
例如:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
(def (-> int int) int-add1 (\ ((int x)) (+ x 1)))
|
||||||
|
(def (-> dou dou dou) sqrt-sum (\ ((dou x)(dou y)) (+. (*. x x) (*. y y))))
|
||||||
|
|
||||||
|
匿名函數 (lambda function)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
定義:
|
||||||
|
|
||||||
|
|
||||||
|
.. function :: (\ () body+)
|
||||||
|
|
||||||
|
.. function :: (\ ((Type var)) body+)
|
||||||
|
|
||||||
|
.. function :: (\ ((Type1 var1)(Type2 var2)[...]*) body+)
|
||||||
|
|
||||||
|
|
||||||
|
所有函數都是第一類物件與閉包,用 lexical scope 保存前文變數。
|
||||||
|
|
||||||
|
|
701
tshunhue/docs/build/html/_static/alabaster.css
vendored
Normal file
701
tshunhue/docs/build/html/_static/alabaster.css
vendored
Normal file
|
@ -0,0 +1,701 @@
|
||||||
|
@import url("basic.css");
|
||||||
|
|
||||||
|
/* -- page layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-size: 17px;
|
||||||
|
background-color: #fff;
|
||||||
|
color: #000;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: 940px;
|
||||||
|
margin: 30px auto 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
float: left;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 0 0 220px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
width: 220px;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
border: 1px solid #B1B4B6;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
background-color: #fff;
|
||||||
|
color: #3E4349;
|
||||||
|
padding: 0 30px 0 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body > .section {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
width: 940px;
|
||||||
|
margin: 20px auto 30px auto;
|
||||||
|
font-size: 14px;
|
||||||
|
color: #888;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer a {
|
||||||
|
color: #888;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.caption {
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.relations {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.sphinxsidebar a {
|
||||||
|
color: #444;
|
||||||
|
text-decoration: none;
|
||||||
|
border-bottom: 1px dotted #999;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar a:hover {
|
||||||
|
border-bottom: 1px solid #999;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
padding: 18px 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper p.logo {
|
||||||
|
padding: 0;
|
||||||
|
margin: -10px 0 0 0px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper h1.logo {
|
||||||
|
margin-top: -10px;
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 5px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper h1.logo-name {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper p.blurb {
|
||||||
|
margin-top: 0;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3,
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
color: #444;
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 0 0 5px 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #444;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.logo a,
|
||||||
|
div.sphinxsidebar h3 a,
|
||||||
|
div.sphinxsidebar p.logo a:hover,
|
||||||
|
div.sphinxsidebar h3 a:hover {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p {
|
||||||
|
color: #555;
|
||||||
|
margin: 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
margin: 10px 0;
|
||||||
|
padding: 0;
|
||||||
|
color: #000;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul li.toctree-l1 > a {
|
||||||
|
font-size: 120%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul li.toctree-l2 > a {
|
||||||
|
font-size: 110%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar hr {
|
||||||
|
border: none;
|
||||||
|
height: 1px;
|
||||||
|
color: #AAA;
|
||||||
|
background: #AAA;
|
||||||
|
|
||||||
|
text-align: left;
|
||||||
|
margin-left: 0;
|
||||||
|
width: 50%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .badge {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .badge:hover {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* To address an issue with donation coming after search */
|
||||||
|
div.sphinxsidebar h3.donation {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- body styles ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #004B6B;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
color: #6D4100;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1,
|
||||||
|
div.body h2,
|
||||||
|
div.body h3,
|
||||||
|
div.body h4,
|
||||||
|
div.body h5,
|
||||||
|
div.body h6 {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 30px 0px 10px 0px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; }
|
||||||
|
div.body h2 { font-size: 180%; }
|
||||||
|
div.body h3 { font-size: 150%; }
|
||||||
|
div.body h4 { font-size: 130%; }
|
||||||
|
div.body h5 { font-size: 100%; }
|
||||||
|
div.body h6 { font-size: 100%; }
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
color: #DDD;
|
||||||
|
padding: 0 4px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink:hover {
|
||||||
|
color: #444;
|
||||||
|
background: #EAEAEA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li {
|
||||||
|
line-height: 1.4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
margin: 20px 0px;
|
||||||
|
padding: 10px 30px;
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition tt.xref, div.admonition code.xref, div.admonition a tt {
|
||||||
|
background-color: #FBFBFB;
|
||||||
|
border-bottom: 1px solid #fafafa;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-weight: normal;
|
||||||
|
font-size: 24px;
|
||||||
|
margin: 0 0 10px 0;
|
||||||
|
padding: 0;
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.last {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight {
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
dt:target, .highlight {
|
||||||
|
background: #FAF3E8;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.danger {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.error {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.caution {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.attention {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.important {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.note {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.tip {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.hint {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.seealso {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
background-color: #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title:after {
|
||||||
|
content: ":";
|
||||||
|
}
|
||||||
|
|
||||||
|
pre, tt, code {
|
||||||
|
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hll {
|
||||||
|
background-color: #FFC;
|
||||||
|
margin: 0 -12px;
|
||||||
|
padding: 0 12px;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.screenshot {
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname, tt.descclassname, code.descname, code.descclassname {
|
||||||
|
font-size: 0.95em;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname, code.descname {
|
||||||
|
padding-right: 0.08em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.screenshot {
|
||||||
|
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
box-shadow: 2px 2px 4px #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
border: 1px solid #888;
|
||||||
|
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
box-shadow: 2px 2px 4px #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils td, table.docutils th {
|
||||||
|
border: 1px solid #888;
|
||||||
|
padding: 0.25em 0.7em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list, table.footnote {
|
||||||
|
border: none;
|
||||||
|
-moz-box-shadow: none;
|
||||||
|
-webkit-box-shadow: none;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote {
|
||||||
|
margin: 15px 0;
|
||||||
|
width: 100%;
|
||||||
|
border: 1px solid #EEE;
|
||||||
|
background: #FDFDFD;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote + table.footnote {
|
||||||
|
margin-top: -15px;
|
||||||
|
border-top: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list th {
|
||||||
|
padding: 0 0.8em 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list td {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list p {
|
||||||
|
margin-bottom: 0.8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Cloned from
|
||||||
|
* https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68
|
||||||
|
*/
|
||||||
|
.field-name {
|
||||||
|
-moz-hyphens: manual;
|
||||||
|
-ms-hyphens: manual;
|
||||||
|
-webkit-hyphens: manual;
|
||||||
|
hyphens: manual;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td.label {
|
||||||
|
width: .1px;
|
||||||
|
padding: 0.3em 0 0.3em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td {
|
||||||
|
padding: 0.3em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl dd {
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
blockquote {
|
||||||
|
margin: 0 0 0 30px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul, ol {
|
||||||
|
/* Matches the 30px from the narrow-screen "li > ul" selector below */
|
||||||
|
margin: 10px 0 10px 30px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
background: #EEE;
|
||||||
|
padding: 7px 30px;
|
||||||
|
margin: 15px 0px;
|
||||||
|
line-height: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
background: #ffd;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl pre, blockquote pre, li pre {
|
||||||
|
margin-left: 0;
|
||||||
|
padding-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt, code {
|
||||||
|
background-color: #ecf0f3;
|
||||||
|
color: #222;
|
||||||
|
/* padding: 1px 2px; */
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.xref, code.xref, a tt {
|
||||||
|
background-color: #FBFBFB;
|
||||||
|
border-bottom: 1px solid #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.reference {
|
||||||
|
text-decoration: none;
|
||||||
|
border-bottom: 1px dotted #004B6B;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Don't put an underline on images */
|
||||||
|
a.image-reference, a.image-reference:hover {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.reference:hover {
|
||||||
|
border-bottom: 1px solid #6D4100;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.footnote-reference {
|
||||||
|
text-decoration: none;
|
||||||
|
font-size: 0.7em;
|
||||||
|
vertical-align: top;
|
||||||
|
border-bottom: 1px dotted #004B6B;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.footnote-reference:hover {
|
||||||
|
border-bottom: 1px solid #6D4100;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover tt, a:hover code {
|
||||||
|
background: #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@media screen and (max-width: 870px) {
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: 100%;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
margin-left: 0;
|
||||||
|
margin-top: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
margin-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
margin-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
li > ul {
|
||||||
|
/* Matches the 30px from the "ul, ol" selector above */
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.document {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bodywrapper {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@media screen and (max-width: 875px) {
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 20px 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
float: none;
|
||||||
|
background: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
display: block;
|
||||||
|
float: none;
|
||||||
|
width: 102.5%;
|
||||||
|
margin: 50px -30px -20px -30px;
|
||||||
|
padding: 10px 20px;
|
||||||
|
background: #333;
|
||||||
|
color: #FFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p,
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar a {
|
||||||
|
color: #AAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.logo {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: 100%;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
min-height: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rtd_doc_footer {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.document {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* misc. */
|
||||||
|
|
||||||
|
.revsys-inline {
|
||||||
|
display: none!important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Make nested-list/multi-paragraph items look better in Releases changelog
|
||||||
|
* pages. Without this, docutils' magical list fuckery causes inconsistent
|
||||||
|
* formatting between different release sub-lists.
|
||||||
|
*/
|
||||||
|
div#changelog > div.section > ul > li > p:only-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide fugly table cell borders in ..bibliography:: directive output */
|
||||||
|
table.docutils.citation, table.docutils.citation td, table.docutils.citation th {
|
||||||
|
border: none;
|
||||||
|
/* Below needed in some edge cases; if not applied, bottom shadows appear */
|
||||||
|
-moz-box-shadow: none;
|
||||||
|
-webkit-box-shadow: none;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* relbar */
|
||||||
|
|
||||||
|
.related {
|
||||||
|
line-height: 30px;
|
||||||
|
width: 100%;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related.top {
|
||||||
|
border-bottom: 1px solid #EEE;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related.bottom {
|
||||||
|
border-top: 1px solid #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related ul {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related li {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav#rellinks {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav#rellinks li+li:before {
|
||||||
|
content: "|";
|
||||||
|
}
|
||||||
|
|
||||||
|
nav#breadcrumbs li+li:before {
|
||||||
|
content: "\00BB";
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide certain items when printing */
|
||||||
|
@media print {
|
||||||
|
div.related {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
855
tshunhue/docs/build/html/_static/basic.css
vendored
Normal file
855
tshunhue/docs/build/html/_static/basic.css
vendored
Normal file
|
@ -0,0 +1,855 @@
|
||||||
|
/*
|
||||||
|
* basic.css
|
||||||
|
* ~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx stylesheet -- basic theme.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* -- main layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.clearer {
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.section::after {
|
||||||
|
display: block;
|
||||||
|
content: '';
|
||||||
|
clear: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- relbar ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
width: 100%;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related h3 {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 0 0 10px;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related li {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related li.right {
|
||||||
|
float: right;
|
||||||
|
margin-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- sidebar --------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
padding: 10px 5px 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
float: left;
|
||||||
|
width: 230px;
|
||||||
|
margin-left: -100%;
|
||||||
|
font-size: 90%;
|
||||||
|
word-wrap: break-word;
|
||||||
|
overflow-wrap : break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul,
|
||||||
|
div.sphinxsidebar ul.want-points {
|
||||||
|
margin-left: 20px;
|
||||||
|
list-style: square;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar form {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #98dbcc;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar #searchbox form.search {
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar #searchbox input[type="text"] {
|
||||||
|
float: left;
|
||||||
|
width: 80%;
|
||||||
|
padding: 0.25em;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar #searchbox input[type="submit"] {
|
||||||
|
float: left;
|
||||||
|
width: 20%;
|
||||||
|
border-left: none;
|
||||||
|
padding: 0.25em;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
img {
|
||||||
|
border: 0;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- search page ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
ul.search {
|
||||||
|
margin: 10px 0 0 20px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li {
|
||||||
|
padding: 5px 0 5px 20px;
|
||||||
|
background-image: url(file.png);
|
||||||
|
background-repeat: no-repeat;
|
||||||
|
background-position: 0 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li div.context {
|
||||||
|
color: #888;
|
||||||
|
margin: 2px 0 0 30px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.keywordmatches li.goodmatch a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- index page ------------------------------------------------------------ */
|
||||||
|
|
||||||
|
table.contentstable {
|
||||||
|
width: 90%;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.contentstable p.biglink {
|
||||||
|
line-height: 150%;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.biglink {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.linkdescr {
|
||||||
|
font-style: italic;
|
||||||
|
padding-top: 5px;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general index --------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.indextable {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable td {
|
||||||
|
text-align: left;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable ul {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable > tbody > tr > td > ul {
|
||||||
|
padding-left: 0em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.pcap {
|
||||||
|
height: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.cap {
|
||||||
|
margin-top: 10px;
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.toggler {
|
||||||
|
margin-right: 3px;
|
||||||
|
margin-top: 3px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.modindex-jumpbox {
|
||||||
|
border-top: 1px solid #ddd;
|
||||||
|
border-bottom: 1px solid #ddd;
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
padding: 0.4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.genindex-jumpbox {
|
||||||
|
border-top: 1px solid #ddd;
|
||||||
|
border-bottom: 1px solid #ddd;
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
padding: 0.4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- domain module index --------------------------------------------------- */
|
||||||
|
|
||||||
|
table.modindextable td {
|
||||||
|
padding: 2px;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general body styles --------------------------------------------------- */
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
min-width: 450px;
|
||||||
|
max-width: 800px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li, div.body blockquote {
|
||||||
|
-moz-hyphens: auto;
|
||||||
|
-ms-hyphens: auto;
|
||||||
|
-webkit-hyphens: auto;
|
||||||
|
hyphens: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
visibility: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.brackets:before,
|
||||||
|
span.brackets > a:before{
|
||||||
|
content: "[";
|
||||||
|
}
|
||||||
|
|
||||||
|
a.brackets:after,
|
||||||
|
span.brackets > a:after {
|
||||||
|
content: "]";
|
||||||
|
}
|
||||||
|
|
||||||
|
h1:hover > a.headerlink,
|
||||||
|
h2:hover > a.headerlink,
|
||||||
|
h3:hover > a.headerlink,
|
||||||
|
h4:hover > a.headerlink,
|
||||||
|
h5:hover > a.headerlink,
|
||||||
|
h6:hover > a.headerlink,
|
||||||
|
dt:hover > a.headerlink,
|
||||||
|
caption:hover > a.headerlink,
|
||||||
|
p.caption:hover > a.headerlink,
|
||||||
|
div.code-block-caption:hover > a.headerlink {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p.caption {
|
||||||
|
text-align: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body td {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.first {
|
||||||
|
margin-top: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.rubric {
|
||||||
|
margin-top: 30px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-left, .figure.align-left, object.align-left {
|
||||||
|
clear: left;
|
||||||
|
float: left;
|
||||||
|
margin-right: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-right, .figure.align-right, object.align-right {
|
||||||
|
clear: right;
|
||||||
|
float: right;
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-center, .figure.align-center, object.align-center {
|
||||||
|
display: block;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-default, .figure.align-default {
|
||||||
|
display: block;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-left {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-center {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-default {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-right {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- sidebars -------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.sidebar {
|
||||||
|
margin: 0 0 0.5em 1em;
|
||||||
|
border: 1px solid #ddb;
|
||||||
|
padding: 7px;
|
||||||
|
background-color: #ffe;
|
||||||
|
width: 40%;
|
||||||
|
float: right;
|
||||||
|
clear: right;
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.sidebar-title {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition, div.topic, blockquote {
|
||||||
|
clear: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- topics ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
padding: 7px;
|
||||||
|
margin: 10px 0 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.topic-title {
|
||||||
|
font-size: 1.1em;
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- admonitions ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
padding: 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition dt {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
margin: 0px 10px 5px 0px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p.centered {
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 25px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- content of sidebars/topics/admonitions -------------------------------- */
|
||||||
|
|
||||||
|
div.sidebar > :last-child,
|
||||||
|
div.topic > :last-child,
|
||||||
|
div.admonition > :last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar::after,
|
||||||
|
div.topic::after,
|
||||||
|
div.admonition::after,
|
||||||
|
blockquote::after {
|
||||||
|
display: block;
|
||||||
|
content: '';
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- tables ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.align-center {
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.align-default {
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table caption span.caption-number {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
table caption span.caption-text {
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils td, table.docutils th {
|
||||||
|
padding: 1px 8px 1px 5px;
|
||||||
|
border-top: 0;
|
||||||
|
border-left: 0;
|
||||||
|
border-right: 0;
|
||||||
|
border-bottom: 1px solid #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td, table.footnote th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
th {
|
||||||
|
text-align: left;
|
||||||
|
padding-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.citation {
|
||||||
|
border-left: solid 1px gray;
|
||||||
|
margin-left: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.citation td {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
th > :first-child,
|
||||||
|
td > :first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
th > :last-child,
|
||||||
|
td > :last-child {
|
||||||
|
margin-bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- figures --------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.figure {
|
||||||
|
margin: 0.5em;
|
||||||
|
padding: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.figure p.caption {
|
||||||
|
padding: 0.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.figure p.caption span.caption-number {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.figure p.caption span.caption-text {
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- field list styles ----------------------------------------------------- */
|
||||||
|
|
||||||
|
table.field-list td, table.field-list th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list ul {
|
||||||
|
margin: 0;
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-name {
|
||||||
|
-moz-hyphens: manual;
|
||||||
|
-ms-hyphens: manual;
|
||||||
|
-webkit-hyphens: manual;
|
||||||
|
hyphens: manual;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- hlist styles ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.hlist {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.hlist td {
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* -- other body styles ----------------------------------------------------- */
|
||||||
|
|
||||||
|
ol.arabic {
|
||||||
|
list-style: decimal;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.loweralpha {
|
||||||
|
list-style: lower-alpha;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.upperalpha {
|
||||||
|
list-style: upper-alpha;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.lowerroman {
|
||||||
|
list-style: lower-roman;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.upperroman {
|
||||||
|
list-style: upper-roman;
|
||||||
|
}
|
||||||
|
|
||||||
|
:not(li) > ol > li:first-child > :first-child,
|
||||||
|
:not(li) > ul > li:first-child > :first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
:not(li) > ol > li:last-child > :last-child,
|
||||||
|
:not(li) > ul > li:last-child > :last-child {
|
||||||
|
margin-bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.simple ol p,
|
||||||
|
ol.simple ul p,
|
||||||
|
ul.simple ol p,
|
||||||
|
ul.simple ul p {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.simple > li:not(:first-child) > p,
|
||||||
|
ul.simple > li:not(:first-child) > p {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.simple p,
|
||||||
|
ul.simple p {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.footnote > dt,
|
||||||
|
dl.citation > dt {
|
||||||
|
float: left;
|
||||||
|
margin-right: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.footnote > dd,
|
||||||
|
dl.citation > dd {
|
||||||
|
margin-bottom: 0em;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.footnote > dd:after,
|
||||||
|
dl.citation > dd:after {
|
||||||
|
content: "";
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: fit-content(30%) auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list > dt {
|
||||||
|
font-weight: bold;
|
||||||
|
word-break: break-word;
|
||||||
|
padding-left: 0.5em;
|
||||||
|
padding-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list > dt:after {
|
||||||
|
content: ":";
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list > dd {
|
||||||
|
padding-left: 0.5em;
|
||||||
|
margin-top: 0em;
|
||||||
|
margin-left: 0em;
|
||||||
|
margin-bottom: 0em;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl {
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd > :first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd ul, dd table {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd {
|
||||||
|
margin-top: 3px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl > dd:last-child,
|
||||||
|
dl > dd:last-child > :last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
dt:target, span.highlighted {
|
||||||
|
background-color: #fbe54e;
|
||||||
|
}
|
||||||
|
|
||||||
|
rect.highlighted {
|
||||||
|
fill: #fbe54e;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.glossary dt {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.optional {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig-paren {
|
||||||
|
font-size: larger;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versionmodified {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.system-message {
|
||||||
|
background-color: #fda;
|
||||||
|
padding: 5px;
|
||||||
|
border: 3px solid red;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footnote:target {
|
||||||
|
background-color: #ffa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block {
|
||||||
|
display: block;
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block .line-block {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
margin-left: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.guilabel, .menuselection {
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
.accelerator {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.classifier {
|
||||||
|
font-style: oblique;
|
||||||
|
}
|
||||||
|
|
||||||
|
.classifier:before {
|
||||||
|
font-style: normal;
|
||||||
|
margin: 0.5em;
|
||||||
|
content: ":";
|
||||||
|
}
|
||||||
|
|
||||||
|
abbr, acronym {
|
||||||
|
border-bottom: dotted 1px;
|
||||||
|
cursor: help;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- code displays --------------------------------------------------------- */
|
||||||
|
|
||||||
|
pre {
|
||||||
|
overflow: auto;
|
||||||
|
overflow-y: hidden; /* fixes display issues on Chrome browsers */
|
||||||
|
}
|
||||||
|
|
||||||
|
pre, div[class*="highlight-"] {
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.pre {
|
||||||
|
-moz-hyphens: none;
|
||||||
|
-ms-hyphens: none;
|
||||||
|
-webkit-hyphens: none;
|
||||||
|
hyphens: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div[class*="highlight-"] {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.linenos pre {
|
||||||
|
border: 0;
|
||||||
|
background-color: transparent;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable tbody {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable tr {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td.linenos {
|
||||||
|
padding-right: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td.code {
|
||||||
|
flex: 1;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.highlight .hll {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight pre,
|
||||||
|
table.highlighttable pre {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption + div {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption {
|
||||||
|
margin-top: 1em;
|
||||||
|
padding: 2px 5px;
|
||||||
|
font-size: small;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption code {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td.linenos,
|
||||||
|
div.doctest > div.highlight span.gp { /* gp: Generic.Prompt */
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption span.caption-number {
|
||||||
|
padding: 0.1em 0.3em;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption span.caption-text {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.literal-block-wrapper {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
code.descname {
|
||||||
|
background-color: transparent;
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
code.descclassname {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
code.xref, a code {
|
||||||
|
background-color: transparent;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-link {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-back {
|
||||||
|
float: right;
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
margin: -1px -10px;
|
||||||
|
padding: 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- math display ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
img.math {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body div.math p {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.eqno {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.eqno a.headerlink {
|
||||||
|
position: absolute;
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.math:hover a.headerlink {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- printout stylesheet --------------------------------------------------- */
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
div.document,
|
||||||
|
div.documentwrapper,
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 !important;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar,
|
||||||
|
div.related,
|
||||||
|
div.footer,
|
||||||
|
#top-link {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
1
tshunhue/docs/build/html/_static/css/badge_only.css
vendored
Normal file
1
tshunhue/docs/build/html/_static/css/badge_only.css
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}
|
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Bold.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Bold.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Bold.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Bold.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Regular.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Regular.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Regular.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/Roboto-Slab-Regular.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.eot
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.eot
vendored
Normal file
Binary file not shown.
2671
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.svg
vendored
Normal file
2671
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.svg
vendored
Normal file
File diff suppressed because it is too large
Load diff
After Width: | Height: | Size: 434 KiB |
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/fontawesome-webfont.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold-italic.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold-italic.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold-italic.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold-italic.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-bold.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal-italic.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal-italic.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal-italic.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal-italic.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/css/fonts/lato-normal.woff2
vendored
Normal file
Binary file not shown.
4
tshunhue/docs/build/html/_static/css/theme.css
vendored
Normal file
4
tshunhue/docs/build/html/_static/css/theme.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
tshunhue/docs/build/html/_static/custom.css
vendored
Normal file
1
tshunhue/docs/build/html/_static/custom.css
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
/* This file intentionally left blank. */
|
315
tshunhue/docs/build/html/_static/doctools.js
vendored
Normal file
315
tshunhue/docs/build/html/_static/doctools.js
vendored
Normal file
|
@ -0,0 +1,315 @@
|
||||||
|
/*
|
||||||
|
* doctools.js
|
||||||
|
* ~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx JavaScript utilities for all documentation.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* select a different prefix for underscore
|
||||||
|
*/
|
||||||
|
$u = _.noConflict();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* make the code below compatible with browsers without
|
||||||
|
* an installed firebug like debugger
|
||||||
|
if (!window.console || !console.firebug) {
|
||||||
|
var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
|
||||||
|
"dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
|
||||||
|
"profile", "profileEnd"];
|
||||||
|
window.console = {};
|
||||||
|
for (var i = 0; i < names.length; ++i)
|
||||||
|
window.console[names[i]] = function() {};
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* small helper function to urldecode strings
|
||||||
|
*/
|
||||||
|
jQuery.urldecode = function(x) {
|
||||||
|
return decodeURIComponent(x).replace(/\+/g, ' ');
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* small helper function to urlencode strings
|
||||||
|
*/
|
||||||
|
jQuery.urlencode = encodeURIComponent;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function returns the parsed url parameters of the
|
||||||
|
* current request. Multiple values per key are supported,
|
||||||
|
* it will always return arrays of strings for the value parts.
|
||||||
|
*/
|
||||||
|
jQuery.getQueryParameters = function(s) {
|
||||||
|
if (typeof s === 'undefined')
|
||||||
|
s = document.location.search;
|
||||||
|
var parts = s.substr(s.indexOf('?') + 1).split('&');
|
||||||
|
var result = {};
|
||||||
|
for (var i = 0; i < parts.length; i++) {
|
||||||
|
var tmp = parts[i].split('=', 2);
|
||||||
|
var key = jQuery.urldecode(tmp[0]);
|
||||||
|
var value = jQuery.urldecode(tmp[1]);
|
||||||
|
if (key in result)
|
||||||
|
result[key].push(value);
|
||||||
|
else
|
||||||
|
result[key] = [value];
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* highlight a given string on a jquery object by wrapping it in
|
||||||
|
* span elements with the given class name.
|
||||||
|
*/
|
||||||
|
jQuery.fn.highlightText = function(text, className) {
|
||||||
|
function highlight(node, addItems) {
|
||||||
|
if (node.nodeType === 3) {
|
||||||
|
var val = node.nodeValue;
|
||||||
|
var pos = val.toLowerCase().indexOf(text);
|
||||||
|
if (pos >= 0 &&
|
||||||
|
!jQuery(node.parentNode).hasClass(className) &&
|
||||||
|
!jQuery(node.parentNode).hasClass("nohighlight")) {
|
||||||
|
var span;
|
||||||
|
var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
|
||||||
|
if (isInSVG) {
|
||||||
|
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||||
|
} else {
|
||||||
|
span = document.createElement("span");
|
||||||
|
span.className = className;
|
||||||
|
}
|
||||||
|
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||||
|
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
|
||||||
|
document.createTextNode(val.substr(pos + text.length)),
|
||||||
|
node.nextSibling));
|
||||||
|
node.nodeValue = val.substr(0, pos);
|
||||||
|
if (isInSVG) {
|
||||||
|
var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
|
||||||
|
var bbox = node.parentElement.getBBox();
|
||||||
|
rect.x.baseVal.value = bbox.x;
|
||||||
|
rect.y.baseVal.value = bbox.y;
|
||||||
|
rect.width.baseVal.value = bbox.width;
|
||||||
|
rect.height.baseVal.value = bbox.height;
|
||||||
|
rect.setAttribute('class', className);
|
||||||
|
addItems.push({
|
||||||
|
"parent": node.parentNode,
|
||||||
|
"target": rect});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (!jQuery(node).is("button, select, textarea")) {
|
||||||
|
jQuery.each(node.childNodes, function() {
|
||||||
|
highlight(this, addItems);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var addItems = [];
|
||||||
|
var result = this.each(function() {
|
||||||
|
highlight(this, addItems);
|
||||||
|
});
|
||||||
|
for (var i = 0; i < addItems.length; ++i) {
|
||||||
|
jQuery(addItems[i].parent).before(addItems[i].target);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* backward compatibility for jQuery.browser
|
||||||
|
* This will be supported until firefox bug is fixed.
|
||||||
|
*/
|
||||||
|
if (!jQuery.browser) {
|
||||||
|
jQuery.uaMatch = function(ua) {
|
||||||
|
ua = ua.toLowerCase();
|
||||||
|
|
||||||
|
var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
|
||||||
|
/(webkit)[ \/]([\w.]+)/.exec(ua) ||
|
||||||
|
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
|
||||||
|
/(msie) ([\w.]+)/.exec(ua) ||
|
||||||
|
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
|
||||||
|
[];
|
||||||
|
|
||||||
|
return {
|
||||||
|
browser: match[ 1 ] || "",
|
||||||
|
version: match[ 2 ] || "0"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
jQuery.browser = {};
|
||||||
|
jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Small JavaScript module for the documentation.
|
||||||
|
*/
|
||||||
|
var Documentation = {
|
||||||
|
|
||||||
|
init : function() {
|
||||||
|
this.fixFirefoxAnchorBug();
|
||||||
|
this.highlightSearchWords();
|
||||||
|
this.initIndexTable();
|
||||||
|
if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) {
|
||||||
|
this.initOnKeyListeners();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* i18n support
|
||||||
|
*/
|
||||||
|
TRANSLATIONS : {},
|
||||||
|
PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; },
|
||||||
|
LOCALE : 'unknown',
|
||||||
|
|
||||||
|
// gettext and ngettext don't access this so that the functions
|
||||||
|
// can safely bound to a different name (_ = Documentation.gettext)
|
||||||
|
gettext : function(string) {
|
||||||
|
var translated = Documentation.TRANSLATIONS[string];
|
||||||
|
if (typeof translated === 'undefined')
|
||||||
|
return string;
|
||||||
|
return (typeof translated === 'string') ? translated : translated[0];
|
||||||
|
},
|
||||||
|
|
||||||
|
ngettext : function(singular, plural, n) {
|
||||||
|
var translated = Documentation.TRANSLATIONS[singular];
|
||||||
|
if (typeof translated === 'undefined')
|
||||||
|
return (n == 1) ? singular : plural;
|
||||||
|
return translated[Documentation.PLURALEXPR(n)];
|
||||||
|
},
|
||||||
|
|
||||||
|
addTranslations : function(catalog) {
|
||||||
|
for (var key in catalog.messages)
|
||||||
|
this.TRANSLATIONS[key] = catalog.messages[key];
|
||||||
|
this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
|
||||||
|
this.LOCALE = catalog.locale;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* add context elements like header anchor links
|
||||||
|
*/
|
||||||
|
addContextElements : function() {
|
||||||
|
$('div[id] > :header:first').each(function() {
|
||||||
|
$('<a class="headerlink">\u00B6</a>').
|
||||||
|
attr('href', '#' + this.id).
|
||||||
|
attr('title', _('Permalink to this headline')).
|
||||||
|
appendTo(this);
|
||||||
|
});
|
||||||
|
$('dt[id]').each(function() {
|
||||||
|
$('<a class="headerlink">\u00B6</a>').
|
||||||
|
attr('href', '#' + this.id).
|
||||||
|
attr('title', _('Permalink to this definition')).
|
||||||
|
appendTo(this);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* workaround a firefox stupidity
|
||||||
|
* see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075
|
||||||
|
*/
|
||||||
|
fixFirefoxAnchorBug : function() {
|
||||||
|
if (document.location.hash && $.browser.mozilla)
|
||||||
|
window.setTimeout(function() {
|
||||||
|
document.location.href += '';
|
||||||
|
}, 10);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* highlight the search words provided in the url in the text
|
||||||
|
*/
|
||||||
|
highlightSearchWords : function() {
|
||||||
|
var params = $.getQueryParameters();
|
||||||
|
var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
|
||||||
|
if (terms.length) {
|
||||||
|
var body = $('div.body');
|
||||||
|
if (!body.length) {
|
||||||
|
body = $('body');
|
||||||
|
}
|
||||||
|
window.setTimeout(function() {
|
||||||
|
$.each(terms, function() {
|
||||||
|
body.highlightText(this.toLowerCase(), 'highlighted');
|
||||||
|
});
|
||||||
|
}, 10);
|
||||||
|
$('<p class="highlight-link"><a href="javascript:Documentation.' +
|
||||||
|
'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
|
||||||
|
.appendTo($('#searchbox'));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* init the domain index toggle buttons
|
||||||
|
*/
|
||||||
|
initIndexTable : function() {
|
||||||
|
var togglers = $('img.toggler').click(function() {
|
||||||
|
var src = $(this).attr('src');
|
||||||
|
var idnum = $(this).attr('id').substr(7);
|
||||||
|
$('tr.cg-' + idnum).toggle();
|
||||||
|
if (src.substr(-9) === 'minus.png')
|
||||||
|
$(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
|
||||||
|
else
|
||||||
|
$(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
|
||||||
|
}).css('display', '');
|
||||||
|
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
|
||||||
|
togglers.click();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper function to hide the search marks again
|
||||||
|
*/
|
||||||
|
hideSearchWords : function() {
|
||||||
|
$('#searchbox .highlight-link').fadeOut(300);
|
||||||
|
$('span.highlighted').removeClass('highlighted');
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* make the url absolute
|
||||||
|
*/
|
||||||
|
makeURL : function(relativeURL) {
|
||||||
|
return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get the current relative url
|
||||||
|
*/
|
||||||
|
getCurrentURL : function() {
|
||||||
|
var path = document.location.pathname;
|
||||||
|
var parts = path.split(/\//);
|
||||||
|
$.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
|
||||||
|
if (this === '..')
|
||||||
|
parts.pop();
|
||||||
|
});
|
||||||
|
var url = parts.join('/');
|
||||||
|
return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
|
||||||
|
},
|
||||||
|
|
||||||
|
initOnKeyListeners: function() {
|
||||||
|
$(document).keydown(function(event) {
|
||||||
|
var activeElementType = document.activeElement.tagName;
|
||||||
|
// don't navigate when in search box or textarea
|
||||||
|
if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT'
|
||||||
|
&& !event.altKey && !event.ctrlKey && !event.metaKey && !event.shiftKey) {
|
||||||
|
switch (event.keyCode) {
|
||||||
|
case 37: // left
|
||||||
|
var prevHref = $('link[rel="prev"]').prop('href');
|
||||||
|
if (prevHref) {
|
||||||
|
window.location.href = prevHref;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
case 39: // right
|
||||||
|
var nextHref = $('link[rel="next"]').prop('href');
|
||||||
|
if (nextHref) {
|
||||||
|
window.location.href = nextHref;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// quick alias for translations
|
||||||
|
_ = Documentation.gettext;
|
||||||
|
|
||||||
|
$(document).ready(function() {
|
||||||
|
Documentation.init();
|
||||||
|
});
|
12
tshunhue/docs/build/html/_static/documentation_options.js
vendored
Normal file
12
tshunhue/docs/build/html/_static/documentation_options.js
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
var DOCUMENTATION_OPTIONS = {
|
||||||
|
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
|
||||||
|
VERSION: '',
|
||||||
|
LANGUAGE: 'zh-tw',
|
||||||
|
COLLAPSE_INDEX: false,
|
||||||
|
BUILDER: 'html',
|
||||||
|
FILE_SUFFIX: '.html',
|
||||||
|
LINK_SUFFIX: '.html',
|
||||||
|
HAS_SOURCE: true,
|
||||||
|
SOURCELINK_SUFFIX: '.txt',
|
||||||
|
NAVIGATION_WITH_KEYS: false
|
||||||
|
};
|
BIN
tshunhue/docs/build/html/_static/file.png
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/file.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 286 B |
BIN
tshunhue/docs/build/html/_static/fonts/FontAwesome.otf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/FontAwesome.otf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Inconsolata-Bold.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Inconsolata-Bold.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Inconsolata-Regular.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Inconsolata-Regular.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Inconsolata.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Inconsolata.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato-Bold.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato-Bold.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato-Regular.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato-Regular.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.eot
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.eot
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bold.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.eot
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.eot
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.eot
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.eot
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-italic.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.eot
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.eot
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.ttf
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.ttf
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Lato/lato-regular.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Bold.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Bold.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Bold.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Bold.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Light.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Light.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Light.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Light.woff2
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Regular.woff
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Regular.woff
vendored
Normal file
Binary file not shown.
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Regular.woff2
vendored
Normal file
BIN
tshunhue/docs/build/html/_static/fonts/Roboto-Slab-Regular.woff2
vendored
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue