add pdf conversion
This commit is contained in:
parent
c85b27f57a
commit
a29dec4c7a
8 changed files with 543 additions and 238 deletions
17
example/example1.clc
Normal file
17
example/example1.clc
Normal file
|
@ -0,0 +1,17 @@
|
|||
[docu [call font [[family "AR PL UMing TW"]] 123]
|
||||
|
||||
[call font [[family "AR PL UMing TW"]] 123]
|
||||
456
|
||||
[call font [[family "AR PL UMing TW"]] "789漢字"]
|
||||
|
||||
\[7 8 9 \] = \[[+ 5 2] 8 9\]
|
||||
|
||||
[call font [[family "AR PL UMing TW"]] 巨集轉換]
|
||||
|
||||
\[def-syntax foo \[\[_ x \] \[* x x\]\]\]
|
||||
|
||||
[def-syntax foo [[_ x][* x x]]]
|
||||
|
||||
8 * 8 = [foo 8]
|
||||
|
||||
]
|
82
playground/a.py
Normal file
82
playground/a.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
#-*-coding:utf-8-*-
|
||||
import sys, re
|
||||
from PyQt4.QtGui import *
|
||||
from PyQt4.QtCore import *
|
||||
|
||||
class MyHighlighter( QSyntaxHighlighter ):
|
||||
|
||||
def __init__( self, parent, theme ):
|
||||
|
||||
QSyntaxHighlighter.__init__( self, parent )
|
||||
self.parent = parent
|
||||
|
||||
self.parenthesis_color = [Qt.red, Qt.green, Qt.blue]
|
||||
|
||||
def textFormat(self, color):
|
||||
init_format = QTextCharFormat()
|
||||
brush = QBrush( color, Qt.SolidPattern )
|
||||
init_format.setForeground( brush )
|
||||
return init_format
|
||||
|
||||
|
||||
def highlightBlock( self, text ):
|
||||
|
||||
''' ( ( ) )
|
||||
paren_level ___0___|__1__|__2_|__1_|__0
|
||||
'''
|
||||
|
||||
paren_level = self.previousBlockState()
|
||||
if paren_level == -1: # 若是沒有上次的狀態,就設為0
|
||||
paren_level = 0
|
||||
paren_size = 1
|
||||
|
||||
|
||||
|
||||
iterator = re.finditer("[()]", text)
|
||||
|
||||
paran_and_offset = [{"paren": match.group(0), "offset": match.start()} for match in iterator]
|
||||
|
||||
print(paran_and_offset)
|
||||
for i in paran_and_offset:
|
||||
if i["paren"] == QString('('):
|
||||
print("paren_level %d" % paren_level)
|
||||
self.setFormat( i["offset"], paren_size , self.textFormat(self.parenthesis_color[paren_level]) )
|
||||
paren_level += 1
|
||||
elif i["paren"] == QString(')'):
|
||||
print(paren_level)
|
||||
paren_level -= 1
|
||||
self.setFormat( i["offset"], paren_size , self.textFormat(self.parenthesis_color[paren_level]) )
|
||||
else:
|
||||
pass
|
||||
|
||||
self.setCurrentBlockState(paren_level)
|
||||
|
||||
class HighlightingRule():
|
||||
|
||||
def __init__( self, pattern, format ):
|
||||
|
||||
self.pattern = pattern
|
||||
self.format = format
|
||||
|
||||
class TestApp( QMainWindow ):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
QMainWindow.__init__(self)
|
||||
font = QFont()
|
||||
font.setFamily( "Noto Sans Mono" )
|
||||
font.setFixedPitch( True )
|
||||
font.setPointSize( 11 )
|
||||
editor = QTextEdit()
|
||||
editor.setFont( font )
|
||||
highlighter = MyHighlighter( editor, "Classic" )
|
||||
self.setCentralWidget( editor )
|
||||
self.setWindowTitle( "Syntax Highlighter" )
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = QApplication( sys.argv )
|
||||
window = TestApp()
|
||||
window.show()
|
||||
sys.exit( app.exec_() )
|
||||
|
13
setup.py
13
setup.py
|
@ -3,14 +3,17 @@ import pdb # 先 import 今天要介紹的套件
|
|||
|
||||
from glob import glob
|
||||
from setuptools import find_packages, setup
|
||||
from src.Editor import __about__
|
||||
|
||||
about = {}
|
||||
with open("./src/Editor/__about__.py") as about_info:
|
||||
exec(about_info.read(), about)
|
||||
|
||||
third_party_files_and_dir = glob('thirdparty/**',recursive=True)
|
||||
third_party_files = [x for x in third_party_files_and_dir if not os.path.isdir(x)]
|
||||
|
||||
setup(
|
||||
name="Clochur",
|
||||
version=__about__.version_no,
|
||||
version=about['version_no'],
|
||||
author="Yoxem Chen",
|
||||
author_email="yoxem.tem98@nctu.edu.tw",
|
||||
description='''A S-expression like typesetting language powered by SILE engine
|
||||
|
@ -41,7 +44,7 @@ setup(
|
|||
},
|
||||
|
||||
packages=find_packages(where='src'),
|
||||
package_dir={'Editor': 'src/Editor'},
|
||||
package_dir={'Editor': 'src/Editor', 'Interpreter': 'src/Interpreter'},
|
||||
package_data={'Editor': ['*.pdf', '*.qrc',
|
||||
'../resources/*.svg',
|
||||
'../thirdparty/pdfjs/**',
|
||||
|
@ -53,3 +56,7 @@ setup(
|
|||
|
||||
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
import re
|
||||
from PyQt5.Qsci import QsciLexerCustom, QsciScintilla
|
||||
from PyQt5.QtGui import *
|
||||
from Parser import Parser
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -15,23 +17,25 @@ class ClochurLexer(QsciLexerCustom):
|
|||
0: 'Default',
|
||||
1: 'Keyword',
|
||||
2: 'Comment',
|
||||
3: 'String',
|
||||
4: 'Rainbow0',
|
||||
5: 'Rainbow1',
|
||||
6: 'Rainbow2',
|
||||
7: 'Rainbow3',
|
||||
8: 'Rainbow4',
|
||||
9: 'Rainbow5',
|
||||
10: 'Rainbow6',
|
||||
3: 'Number',
|
||||
4: 'String',
|
||||
5: 'Rainbow0',
|
||||
6: 'Rainbow1',
|
||||
7: 'Rainbow2',
|
||||
8: 'Rainbow3',
|
||||
9: 'Rainbow4',
|
||||
10: 'Rainbow5',
|
||||
11: 'Rainbow6',
|
||||
}
|
||||
|
||||
for (k,v) in self._styles.items():
|
||||
setattr(self, v, k)
|
||||
|
||||
self.QUOTES = ['"', "'"]
|
||||
self.QUOTES = ['"']
|
||||
self.PARENTHESIS = ["[", "]"]
|
||||
|
||||
self.PRIMARY = ['define', 'let' , '#t', '#f', 'lambda', '@', 'cond', 'if', 'docu']
|
||||
self.PRIMARY = ['define', 'def-syntax' , 'True', 'False', 'lambda', '-', '+',
|
||||
'*', '/', '>' ,'=','<','>=','<=', 'if', 'docu', 'font', 'font-family']
|
||||
|
||||
self.split_pattern = re.compile(r'(\s+|\\%|%|\\\[|\\\]|[[]|[]])')
|
||||
|
||||
|
@ -57,7 +61,9 @@ class ClochurLexer(QsciLexerCustom):
|
|||
elif style == self.Keyword:
|
||||
return QColor("#0000ff")
|
||||
elif style == self.Comment:
|
||||
return QColor("#005500")
|
||||
return QColor("#85cf65")
|
||||
elif style == self.Number:
|
||||
return QColor("#00aaff")
|
||||
elif style == self.String:
|
||||
return QColor("#ce5c00")
|
||||
elif style == self.Rainbow0:
|
||||
|
@ -122,6 +128,7 @@ class ClochurLexer(QsciLexerCustom):
|
|||
#print(line_utf8_splitted_len_pair)
|
||||
|
||||
is_comment = False
|
||||
is_string = False
|
||||
|
||||
i = 0
|
||||
if index > 0:
|
||||
|
@ -129,8 +136,11 @@ class ClochurLexer(QsciLexerCustom):
|
|||
rainbow_state = SCI(QsciScintilla.SCI_GETLINESTATE, index - 1)
|
||||
# print(rainbow_state)
|
||||
|
||||
tmp_parser = Parser()
|
||||
|
||||
for item in line_utf8_splitted_len_pair:
|
||||
|
||||
|
||||
'''comment'''
|
||||
if item["str"] == "%":
|
||||
is_comment = True
|
||||
|
@ -138,9 +148,25 @@ class ClochurLexer(QsciLexerCustom):
|
|||
new_state = self.Comment # end of comment
|
||||
elif item["str"] in self.PRIMARY: # keywords
|
||||
new_state = self.Keyword
|
||||
|
||||
# number
|
||||
elif re.match(tmp_parser.int_pattern,item["str"]):
|
||||
new_state = self.Number
|
||||
elif re.match(tmp_parser.float_pattern, item["str"]):
|
||||
new_state = self.Number
|
||||
|
||||
# string
|
||||
elif re.match(r'^["]([^"]|\\\")*["]$' ,item["str"]) or re.match(r"^[']([^']|\\\')*[']$" ,item["str"]):
|
||||
elif re.match(tmp_parser.string_pattern ,item["str"]):
|
||||
new_state = self.String
|
||||
elif re.match(r"[\"]([^\"\\]|[\\][\"\n\t]|[\\])*?", item["str"]):
|
||||
is_string = True
|
||||
new_state = self.String
|
||||
elif re.match(r"([^\"\\]|[\\][\"\n\t]|[\\])*?[\"]" ,item["str"]):
|
||||
new_state = self.String
|
||||
is_string = False
|
||||
elif is_string == True:
|
||||
new_state = self.String
|
||||
|
||||
#parenthesis: rainbow mode
|
||||
elif item["str"] == "[":
|
||||
new_state = getattr(self, "Rainbow" + str(rainbow_state))
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
from PyQt5.QtGui import *
|
||||
from PyQt5.Qsci import QsciScintilla
|
||||
|
||||
from .ClochurLexer import ClochurLexer
|
||||
from ClochurLexer import ClochurLexer
|
||||
|
||||
class CustomQsciEditor(QsciScintilla):
|
||||
def __init__(self, parent=None):
|
||||
|
|
|
@ -2,147 +2,7 @@
|
|||
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
class Parser():
|
||||
|
||||
def __init__(self):
|
||||
float_pattern =r"(?P<flo>[+-]?\d+[.]\d+)"
|
||||
bool_pattern = r"(?P<bool>True|False)"
|
||||
int_pattern =r"(?P<int>[+-]?\d+)"
|
||||
symbol_pattern = r"(?P<sym>[_a-zA-Z][-!._0-9a-zA-Z]*)"
|
||||
string_pattern = r"(?P<str>[\"]([^\"\\]|[\][\\\"\n\t])*[\"])"
|
||||
parenthesis_pattern = r"(?P<paren>[[]|[]])"
|
||||
percent_pattern = r"(?P<percent>[%])"
|
||||
space_pattern = r"(?P<space>[ \t]+)"
|
||||
newline_pattern = r"(?P<nl>)\n"
|
||||
inside_docu_pattern = r"(?P<other>([^%\[\]\n\s\\]|[\\][%\[\]]?)+)"
|
||||
|
||||
|
||||
self.total_pattern = re.compile("|".join([float_pattern,bool_pattern,int_pattern,symbol_pattern,
|
||||
string_pattern,parenthesis_pattern,
|
||||
percent_pattern,inside_docu_pattern,space_pattern,newline_pattern]))
|
||||
|
||||
self.clc_sexp = None
|
||||
self.tokenized = None
|
||||
self.parse_tree = None
|
||||
self.index = None
|
||||
|
||||
|
||||
def get_clc_sexp(self, clc):
|
||||
self.clc_sexp = clc
|
||||
self.tokenized = self.remove_comment(self.tokenize(self.clc_sexp))
|
||||
self.parse_tree = self.parse_main(self.tokenized)
|
||||
|
||||
def generate_printable_sexp(self, sexp):
|
||||
if isinstance(sexp, list):
|
||||
result = "["
|
||||
for i in sexp:
|
||||
result += (self.generate_printable_sexp(i) + " ")
|
||||
result += "]"
|
||||
|
||||
return result
|
||||
else:
|
||||
if sexp["type"] == "str":
|
||||
result = sexp["token"].replace("\\", "\\\\")
|
||||
result = "\""+ result[1:-1].replace("\"", "\\\"") + "\""
|
||||
return result
|
||||
else:
|
||||
return str(sexp["token"])
|
||||
|
||||
def tokenize(self, clc):
|
||||
line_no = 1
|
||||
column = 0
|
||||
column_offset = 0
|
||||
find_iterator = re.finditer(self.total_pattern, self.clc_sexp)
|
||||
result = []
|
||||
for i in find_iterator:
|
||||
column = i.start() - column_offset
|
||||
|
||||
if i.group(0) == '\n':
|
||||
item = {"token" : i.group(0), "line": line_no, "col" : column, "type": i.lastgroup}
|
||||
line_no += 1
|
||||
column_offset = i.end()
|
||||
else:
|
||||
item = {"token" : i.group(0), "line": line_no, "col" : column, "type": i.lastgroup}
|
||||
|
||||
|
||||
|
||||
result.append(item)
|
||||
|
||||
return result
|
||||
def remove_comment(self, series):
|
||||
result = []
|
||||
is_comment_token = False
|
||||
for i in series:
|
||||
if i["token"] == "%":
|
||||
is_comment_token = True
|
||||
elif i["token"] == "\n":
|
||||
if is_comment_token == True:
|
||||
is_comment_token = False
|
||||
else:
|
||||
result.append(i)
|
||||
elif is_comment_token == True:
|
||||
pass
|
||||
else:
|
||||
result.append(i)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def move_forward(self):
|
||||
self.index += 1
|
||||
|
||||
def parse_main(self, series):
|
||||
self.index = 0
|
||||
|
||||
processed_series = [{"token": "[", "line": None, "col": None, "type": None}] + series + \
|
||||
[{"token": "]", "line": None, "col": None, "type": None}]
|
||||
result = self.parse(processed_series)
|
||||
|
||||
if self.index < len(processed_series):
|
||||
raise Exception("the parenthesis ] is not balanced.")
|
||||
else:
|
||||
return result
|
||||
|
||||
def atom(self, series):
|
||||
result = series[self.index]
|
||||
if result["type"] == "int":
|
||||
result["token"] = int(result["token"])
|
||||
elif result["type"] == "flo":
|
||||
result["token"] = float(result["token"])
|
||||
else:
|
||||
pass
|
||||
self.move_forward()
|
||||
return result
|
||||
|
||||
def parse(self, series):
|
||||
result = None
|
||||
if series[self.index]["token"] == "[":
|
||||
result = []
|
||||
self.move_forward()
|
||||
try:
|
||||
while series[self.index]["token"] != "]":
|
||||
item = self.parse(series)
|
||||
result.append(item)
|
||||
|
||||
self.move_forward()
|
||||
|
||||
return result
|
||||
except IndexError:
|
||||
raise Exception("the parenthesis [ is not balanced.")
|
||||
|
||||
|
||||
else:
|
||||
result = self.atom(series)
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
from Parser import Parser
|
||||
|
||||
'''
|
||||
macro expansion for example:
|
||||
|
@ -152,7 +12,7 @@ the eclipsis (...) shouldn't be seperated from variable.
|
|||
[[_ x y] [+ x y]]
|
||||
[[_ x y z...] [+ x [foo y z...]]]]'''
|
||||
|
||||
class Intepreter:
|
||||
class Interpreter:
|
||||
def __init__(self):
|
||||
|
||||
self.macro_env = [dict()] # {"foo": {"before":[_ x y], "after":[+ x y]}, ...}
|
||||
|
@ -161,6 +21,49 @@ class Intepreter:
|
|||
self.macro_list = dict()
|
||||
self.silexml = ET.Element('sile')
|
||||
|
||||
self.preprocessing_commands = '''[def-syntax docu
|
||||
[[_ x] [SILE[docu-aux x]]]
|
||||
[[_ x y...] [SILE[docu-aux x y...]]]]
|
||||
|
||||
[def-syntax docu-aux
|
||||
[[_ x] [SILE-STRING-ADD![str x]]]
|
||||
[[_ [x...]] [SILE-STRING-ADD![str [x...]]]]
|
||||
[[_ x y...] [begin[docu-aux x] [docu-aux y...]]]]
|
||||
|
||||
[def-syntax font
|
||||
[[_ [para...] inner] [call font [para...] inner]]
|
||||
]
|
||||
|
||||
[def-syntax font-family
|
||||
[[_ font-f text] [font [[family font-f]] text]]
|
||||
]
|
||||
|
||||
[def-syntax font-size
|
||||
[[_ sz text] [font [[size sz]] text]]
|
||||
]
|
||||
|
||||
[script "packages/rules"] % for underline
|
||||
|
||||
[def-syntax underline
|
||||
[[_ text] [call underline text]]
|
||||
]
|
||||
|
||||
[def-syntax bold
|
||||
[[_ text] [font [[weight 900]] text]]
|
||||
]
|
||||
|
||||
[def-syntax italic
|
||||
[[_ text] [font [[style "italic"]] text]]
|
||||
]
|
||||
'''
|
||||
self.prepocess()
|
||||
|
||||
def prepocess(self):
|
||||
tmp_parser = Parser()
|
||||
parse_tree = tmp_parser.get_clc_sexp(self.preprocessing_commands)
|
||||
self.interprete(parse_tree)
|
||||
|
||||
|
||||
def remove_spaces_and_newlines(self, sexp):
|
||||
is_inside_defstx = False
|
||||
return self.remove_spaces_and_newlines_aux(sexp, is_inside_defstx)
|
||||
|
@ -173,7 +76,8 @@ class Intepreter:
|
|||
if isinstance(sexp[0], dict) and sexp[0]["token"] == "docu" \
|
||||
and is_inside_defstx == False:
|
||||
result = []
|
||||
for i in sexp[1:]:
|
||||
# the sexp[1] is a space, so skip it.
|
||||
for i in sexp[2:]:
|
||||
if isinstance(i, list):
|
||||
result.append(self.remove_spaces_and_newlines_aux(i, is_inside_defstx))
|
||||
elif i["type"] in ["space", "nl"]:
|
||||
|
@ -190,14 +94,48 @@ class Intepreter:
|
|||
else:
|
||||
return sexp
|
||||
|
||||
|
||||
def destring(self, string):
|
||||
tmp_parser = Parser()
|
||||
string_pattern = tmp_parser.string_pattern
|
||||
if isinstance(string, dict):
|
||||
string = string["token"]
|
||||
if not isinstance(string, str):
|
||||
string = str(string)
|
||||
if re.match(string_pattern, string):
|
||||
# reverse the escape characters
|
||||
print(string)
|
||||
string = re.sub(r'\\"(.+)',r'"\1',string)
|
||||
print(string)
|
||||
return string[1:-1]
|
||||
else:
|
||||
return string
|
||||
|
||||
# \[ => [ ; \] => ] ; \\ => \
|
||||
def remove_escaping_chars(self, sexp):
|
||||
if isinstance(sexp, list):
|
||||
sexp = [self.remove_escaping_chars(x) for x in sexp]
|
||||
elif not sexp["type"] in ["int", "flo"]:
|
||||
sexp_word = sexp["token"]
|
||||
sexp_word = sexp_word.replace("\\[", "[")
|
||||
sexp_word = sexp_word.replace("\\]", "]")
|
||||
sexp_word = sexp_word.replace("\\\\", "\\")
|
||||
sexp["token"] = sexp_word
|
||||
else:
|
||||
pass
|
||||
|
||||
return sexp
|
||||
|
||||
|
||||
def interprete(self, sexps):
|
||||
sexps = self.remove_escaping_chars(sexps)
|
||||
sexps = self.remove_spaces_and_newlines(sexps)
|
||||
result = None
|
||||
|
||||
#environment = [dict()]
|
||||
for sexp in sexps:
|
||||
self.interprete_aux(sexp)
|
||||
result = self.interprete_aux(sexp)
|
||||
|
||||
return result
|
||||
|
||||
def interprete_aux(self, sexp):
|
||||
if isinstance(sexp, dict):
|
||||
|
@ -295,7 +233,10 @@ class Intepreter:
|
|||
raise Exception("Ln %d, Col %d: the argument number of str should be 1" %
|
||||
(sexp[0]["line"], sexp[0]["col"]))
|
||||
else:
|
||||
return str(self.interprete_aux(sexp[1]))
|
||||
if isinstance(sexp[1], dict) and (not (sexp[1]["token"] in self.macro_list.keys())):
|
||||
return str(self.destring(sexp[1]["token"]))
|
||||
else:
|
||||
return str(self.destring(self.interprete_aux(sexp[1])))
|
||||
|
||||
elif sexp[0]["token"] == "str-append":
|
||||
if len(sexp) != 3:
|
||||
|
@ -311,6 +252,7 @@ class Intepreter:
|
|||
else:
|
||||
result = self.interprete_aux(sexp[1])
|
||||
print(result)
|
||||
return ""
|
||||
elif sexp[0]["token"] == "set!":
|
||||
if sexp[1]["type"] != "sym":
|
||||
raise Exception("Ln %d, Col %d: the type of %s should be symbol, not %s" %
|
||||
|
@ -347,6 +289,8 @@ class Intepreter:
|
|||
|
||||
self.macro_list[syntax_name] = result_list
|
||||
|
||||
return ""
|
||||
|
||||
elif sexp[0]["token"] == "begin":
|
||||
if len(sexp) == 1:
|
||||
raise Exception("Ln %d, Col %d: begin should have argument(s)!" %
|
||||
|
@ -362,12 +306,81 @@ class Intepreter:
|
|||
(sexp[1]["line"], sexp[1]["col"], sexp[1]))
|
||||
else:
|
||||
return Lambda(sexp[1], sexp[2], self.env)
|
||||
# [script "packages/font-fallback"]
|
||||
elif sexp[0]["token"] == "script":
|
||||
if not len(sexp) == 2:
|
||||
raise Exception("Ln %d, Col %d: argument length of script should be 1" %
|
||||
(sexp[0]["line"], sexp[0]["col"]))
|
||||
else:
|
||||
script_xml = ET.Element('script')
|
||||
script_xml.attrib["src"] = self.destring(sexp[1]["token"])
|
||||
self.silexml.append(script_xml)
|
||||
|
||||
elif sexp[0]["token"] == "docu-para":
|
||||
if not len(sexp) == 2:
|
||||
raise Exception("Ln %d, Col %d: argument length of docu-para should be 1" %
|
||||
(sexp[0]["line"], sexp[0]["col"]))
|
||||
attrib_dict = dict()
|
||||
for i in sexp[1]:
|
||||
attrib_name = i[0]["token"]
|
||||
attrib_value = self.destring(i[1]["token"])
|
||||
self.silexml.attrib[attrib_name] = attrib_value
|
||||
|
||||
# TODO: SILE argument:
|
||||
#
|
||||
# [docu-para [["pagesize" "a4"]]
|
||||
# [call callee {[[attr1 val1] [attr2 val2] ...]} {inner_val}]
|
||||
elif sexp[0]["token"] == "call":
|
||||
callee = sexp[1]["token"]
|
||||
call_xml = ET.Element(callee)
|
||||
if len(sexp) == 4 or (len(sexp) == 3 and isinstance(sexp[2], list)):
|
||||
for i in sexp[2]:
|
||||
attrib_name = i[0]["token"]
|
||||
attrib_value = self.destring(self.interprete_aux(i[1]))
|
||||
call_xml.attrib[attrib_name] = attrib_value
|
||||
|
||||
if len(sexp) == 4:
|
||||
call_xml.text = self.destring(self.interprete_aux(sexp[3]))
|
||||
|
||||
self.silexml.append(call_xml)
|
||||
return SubXMLElement(call_xml)
|
||||
elif len(sexp) == 3:
|
||||
call_xml.text = self.destring(self.interprete_aux(sexp[2]))
|
||||
self.silexml.append(call_xml)
|
||||
return SubXMLElement(call_xml)
|
||||
elif len(sexp) == 2:
|
||||
self.silexml.append(call_xml)
|
||||
return SubXMLElement(call_xml)
|
||||
else:
|
||||
raise Exception("Line %d, Col. %d, the form of call is mal-formed." % (sexp[0]["line"], sexp[0]["col"]))
|
||||
|
||||
# if it's a sub-xml-element, show the string form of it, or return the input unchanged.
|
||||
# It's recommended to use it only print it in terminal with 'print'
|
||||
elif sexp[0]["token"] == "xml-to-string":
|
||||
if len(sexp) != 2:
|
||||
raise Exception("Line %d, Col. %d, the argument of SHOW-XML-TREE is mal-formed" % (sexp[0]["line"], sexp[0]["col"]))
|
||||
else:
|
||||
res = self.interprete_aux(sexp[1])
|
||||
if isinstance(res, SubXMLElement):
|
||||
return ET.tostring(res.element, encoding='unicode')
|
||||
else:
|
||||
return res
|
||||
|
||||
# append string to <sile>
|
||||
elif sexp[0]["token"] == "SILE-STRING-ADD!":
|
||||
subelements_found = [x for x in self.silexml.iter() if x != self.silexml]
|
||||
if subelements_found:
|
||||
if subelements_found[-1].tail == None:
|
||||
subelements_found[-1].tail = self.interprete_aux(sexp[1])
|
||||
else:
|
||||
subelements_found[-1].tail += self.interprete_aux(sexp[1])
|
||||
else:
|
||||
if self.silexml.text == None:
|
||||
self.silexml.text = self.interprete_aux(sexp[1])
|
||||
else:
|
||||
self.silexml.text += self.interprete_aux(sexp[1])
|
||||
|
||||
|
||||
elif sexp[0]["token"] == "SILE":
|
||||
self.silexml.text = self.interprete_aux(sexp[1])
|
||||
inner = self.interprete_aux(sexp[1])
|
||||
|
||||
return ET.tostring(self.silexml, encoding="unicode")
|
||||
|
||||
else:
|
||||
|
@ -411,7 +424,7 @@ class Intepreter:
|
|||
def unify(self, sexp, before_stx, unification):
|
||||
for i in range(len(before_stx)):
|
||||
if isinstance(before_stx[i], list):
|
||||
unification = unify(sexp[i], before_stx[i], unification)
|
||||
unification = self.unify(sexp[i], before_stx[i], unification)
|
||||
elif before_stx[i]["token"] in unification.keys():
|
||||
raise Exception("the variable %s is double defined." % before-stx[i])
|
||||
elif re.match(r".+[.]{3}$", before_stx[i]["token"]):
|
||||
|
@ -449,8 +462,14 @@ class Intepreter:
|
|||
return after_stx
|
||||
|
||||
|
||||
|
||||
|
||||
# a sub xml element that is shown as a empty string, but inside it is a xml element
|
||||
class SubXMLElement:
|
||||
def __init__(self, element):
|
||||
self.element = element
|
||||
|
||||
def __str__(init):
|
||||
return ""
|
||||
# closure
|
||||
class Lambda:
|
||||
def __init__(self, vars, body, env):
|
||||
for i in vars:
|
||||
|
@ -461,64 +480,3 @@ class Lambda:
|
|||
self.vars = [i["token"] for i in vars]
|
||||
self.body = body
|
||||
self.env = env
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
'''test'''
|
||||
a = Parser()
|
||||
text = '''
|
||||
[def-syntax bar
|
||||
[[_ x y] [+ x y]]
|
||||
[[_ x y z...] [+ x [bar y z...]]]]
|
||||
|
||||
%[print[str[bar 156 6546 146514 10 6]]]
|
||||
|
||||
[define fac [lambda [x] [if [= x 1] 1 [* x [fac [- x 1]]]]]]
|
||||
|
||||
%[print [fac 6]]
|
||||
|
||||
[+[- 2 3][* 5.0 6]]
|
||||
[define var1 [+[- 2 3][* 5.0 6]]]
|
||||
[set! var1 [* 10 2]]
|
||||
[define foo [lambda [x y] [begin [+ x y][set! var1 10] 7]]]
|
||||
[foo 12 5]
|
||||
%[print [+ var1 5]]
|
||||
|
||||
[def-syntax docu
|
||||
[[_ x] [SILE[docu_aux x]]]
|
||||
[[_ x y...] [SILE[docu_aux x y...]]]]
|
||||
|
||||
[def-syntax docu_aux
|
||||
[[_ x] [str x]]
|
||||
[[_ [x...]] [str [x...]]]
|
||||
[[_ x y...] [str-append[docu_aux x] [docu_aux y...]]]]
|
||||
|
||||
[print [docu 貓]]
|
||||
[print[docu 我是貓 [+ 12 3],還沒有名字。]]
|
||||
'''
|
||||
|
||||
"""text = '''[[[ 123 1.23 abc "\\123\\\"貓貓貓"] 我是貓,喵\[喵\]貓\%。喵喵%喵
|
||||
]]'''
|
||||
|
||||
% TODO
|
||||
[def-syntax docu
|
||||
[[_ @ para x] [SILE[docu_aux x]]]
|
||||
[[_ @ para x y...] [SILE[docu_aux x y...]]]]
|
||||
|
||||
[def-syntax docu_aux
|
||||
[[_ x] [str x]]
|
||||
[[_ [x...] [str [x...]]]]
|
||||
[[_ x y...] [str-append[docu_aux x] [docu_aux y...]]]]]
|
||||
|
||||
"""
|
||||
|
||||
interp = Intepreter()
|
||||
|
||||
a.get_clc_sexp(text)
|
||||
|
||||
interp.interprete(a.parse_tree)
|
||||
|
||||
#print(a.parse_tree)
|
||||
print(a.generate_printable_sexp(a.parse_tree))
|
137
src/Editor/Parser.py
Normal file
137
src/Editor/Parser.py
Normal file
|
@ -0,0 +1,137 @@
|
|||
import re
|
||||
class Parser():
|
||||
|
||||
def __init__(self):
|
||||
float_pattern =r"(?P<flo>[+-]?\d+[.]\d+)"
|
||||
bool_pattern = r"(?P<bool>True|False)"
|
||||
int_pattern =r"(?P<int>[+-]?\d+)"
|
||||
symbol_pattern = r"(?P<sym>[_a-zA-Z][-!:._0-9a-zA-Z]*)"
|
||||
string_pattern = r"(?P<str>[\"]([^\"\\]|[\\][\\\"\n\t]|[\\])*?[\"])"
|
||||
parenthesis_pattern = r"(?P<paren>[[]|[]])"
|
||||
percent_pattern = r"(?P<percent>[%])"
|
||||
space_pattern = r"(?P<space>[ \t]+)"
|
||||
newline_pattern = r"(?P<nl>)\n"
|
||||
inside_docu_pattern = r"(?P<other>([^%\[\]\n\s\\]|[\\][%\[\]\\]?)+)"
|
||||
|
||||
|
||||
self.total_pattern = re.compile("|".join([float_pattern,bool_pattern,int_pattern,symbol_pattern,
|
||||
string_pattern,parenthesis_pattern,
|
||||
percent_pattern,inside_docu_pattern,space_pattern,newline_pattern]))
|
||||
|
||||
self.clc_sexp = None
|
||||
self.tokenized = None
|
||||
#self.parse_tree = None
|
||||
self.index = None
|
||||
self.string_pattern = string_pattern
|
||||
self.int_pattern = int_pattern
|
||||
self.float_pattern = float_pattern
|
||||
|
||||
|
||||
def get_clc_sexp(self, clc):
|
||||
self.clc_sexp = clc
|
||||
self.tokenized = self.remove_comment(self.tokenize(self.clc_sexp))
|
||||
parse_tree = self.parse_main(self.tokenized)
|
||||
return parse_tree
|
||||
|
||||
def generate_printable_sexp(self, sexp):
|
||||
if isinstance(sexp, list):
|
||||
result = "["
|
||||
for i in sexp:
|
||||
result += (self.generate_printable_sexp(i) + " ")
|
||||
result += "]"
|
||||
|
||||
return result
|
||||
else:
|
||||
if sexp["type"] == "str":
|
||||
result = sexp["token"].replace("\\", "\\\\")
|
||||
result = "\""+ result[1:-1].replace("\"", "\\\"") + "\""
|
||||
return result
|
||||
else:
|
||||
return str(sexp["token"])
|
||||
|
||||
def tokenize(self, clc):
|
||||
line_no = 1
|
||||
column = 0
|
||||
column_offset = 0
|
||||
find_iterator = re.finditer(self.total_pattern, self.clc_sexp)
|
||||
result = []
|
||||
for i in find_iterator:
|
||||
column = i.start() - column_offset
|
||||
|
||||
if i.group(0) == '\n':
|
||||
item = {"token" : i.group(0), "line": line_no, "col" : column, "type": i.lastgroup}
|
||||
line_no += 1
|
||||
column_offset = i.end()
|
||||
else:
|
||||
item = {"token" : i.group(0), "line": line_no, "col" : column, "type": i.lastgroup}
|
||||
|
||||
|
||||
|
||||
result.append(item)
|
||||
|
||||
return result
|
||||
def remove_comment(self, series):
|
||||
result = []
|
||||
is_comment_token = False
|
||||
for i in series:
|
||||
if i["token"] == "%":
|
||||
is_comment_token = True
|
||||
elif i["token"] == "\n":
|
||||
if is_comment_token == True:
|
||||
is_comment_token = False
|
||||
else:
|
||||
result.append(i)
|
||||
elif is_comment_token == True:
|
||||
pass
|
||||
else:
|
||||
result.append(i)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def move_forward(self):
|
||||
self.index += 1
|
||||
|
||||
def parse_main(self, series):
|
||||
self.index = 0
|
||||
|
||||
processed_series = [{"token": "[", "line": None, "col": None, "type": None}] + series + \
|
||||
[{"token": "]", "line": None, "col": None, "type": None}]
|
||||
result = self.parse(processed_series)
|
||||
|
||||
if self.index < len(processed_series):
|
||||
raise Exception("the parenthesis ] is not balanced.")
|
||||
else:
|
||||
return result
|
||||
|
||||
def atom(self, series):
|
||||
result = series[self.index]
|
||||
if result["type"] == "int":
|
||||
result["token"] = int(result["token"])
|
||||
elif result["type"] == "flo":
|
||||
result["token"] = float(result["token"])
|
||||
else:
|
||||
pass
|
||||
self.move_forward()
|
||||
return result
|
||||
|
||||
def parse(self, series):
|
||||
result = None
|
||||
if series[self.index]["token"] == "[":
|
||||
result = []
|
||||
self.move_forward()
|
||||
try:
|
||||
while series[self.index]["token"] != "]":
|
||||
item = self.parse(series)
|
||||
result.append(item)
|
||||
|
||||
self.move_forward()
|
||||
|
||||
return result
|
||||
except IndexError:
|
||||
raise Exception("the parenthesis [ is not balanced.")
|
||||
|
||||
|
||||
else:
|
||||
result = self.atom(series)
|
||||
return result
|
|
@ -4,19 +4,26 @@
|
|||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess
|
||||
import shutil
|
||||
|
||||
from PyQt5.QtCore import *
|
||||
from PyQt5.QtGui import *
|
||||
from PyQt5 import QtWebEngineWidgets
|
||||
from PyQt5.QtWidgets import *
|
||||
from PyQt5.Qsci import QsciScintilla
|
||||
|
||||
from . import qrc_resources
|
||||
from . import FindReplace, CustomQsciEditor
|
||||
import qrc_resources
|
||||
|
||||
from . import __about__
|
||||
import __about__
|
||||
import FindReplace
|
||||
from Interpreter import Interpreter, Lambda
|
||||
import CustomQsciEditor
|
||||
from Parser import Parser
|
||||
|
||||
filename = None
|
||||
|
||||
sile_command = 'sile'
|
||||
|
||||
dirname = os.path.abspath(os.path.dirname(__file__)) #os.path.dirname('__file__')
|
||||
PDFJS = os.path.join(dirname, '../thirdparty/pdfjs/web/viewer.html')
|
||||
|
@ -30,7 +37,9 @@ class PDFJSWidget(QtWebEngineWidgets.QWebEngineView):
|
|||
def __init__(self):
|
||||
super(PDFJSWidget, self).__init__()
|
||||
self.load(QUrl.fromUserInput("file://%s?file=file://%s" % (PDFJS, PDF)))
|
||||
print((dirname,PDFJS, PDF))
|
||||
#print((dirname,PDFJS, PDF))
|
||||
def load_path(self, path):
|
||||
self.load(QUrl.fromUserInput("file://%s?file=file://%s" % (PDFJS, path)))
|
||||
|
||||
|
||||
|
||||
|
@ -45,6 +54,7 @@ class Window(QMainWindow):
|
|||
|
||||
self.tmp_folder = '/tmp'
|
||||
self.tmp_file = 'clochur_tmp.json'
|
||||
self.tmp_output_file = str(hex(hash(int(time.time()))))[2:] # e1f513545c => e1f513545c.pdf, e1f513545c.s
|
||||
self.untitled_id = None
|
||||
|
||||
self.opened_file_dirname = os.path.expanduser("~")
|
||||
|
@ -72,6 +82,10 @@ class Window(QMainWindow):
|
|||
self.save_as_action = QAction(QIcon(":save-as.svg"), "Save as...", self)
|
||||
self.save_as_action.triggered.connect(self.save_as_call)
|
||||
|
||||
self.save_pdf_action = QAction(QIcon(":pdf.svg"), "Save &PDF", self)
|
||||
self.save_pdf_action.setShortcut('Ctrl+P')
|
||||
self.save_pdf_action.triggered.connect(self.save_pdf_call)
|
||||
|
||||
self.exit_action = QAction("&Exit", self)
|
||||
self.exit_action.setShortcut('Ctrl+Q')
|
||||
self.exit_action.triggered.connect(self.exit_call)
|
||||
|
@ -105,7 +119,9 @@ class Window(QMainWindow):
|
|||
self.select_all_action.triggered.connect(self.select_all_call)
|
||||
|
||||
|
||||
self.convert_action = QAction(QIcon(":convert.svg"), "Con&vert", self)
|
||||
self.convert_action = QAction(QIcon(":convert.svg"), "Conv&ert", self)
|
||||
self.convert_action.setShortcut('Ctrl+E')
|
||||
self.convert_action.triggered.connect(self.convert_call)
|
||||
|
||||
self.about_action = QAction("&About", self)
|
||||
self.about_action.triggered.connect(self.about_call)
|
||||
|
@ -121,8 +137,14 @@ class Window(QMainWindow):
|
|||
file_menu = menuBar.addMenu("&File")
|
||||
file_menu.addAction(self.new_action)
|
||||
file_menu.addAction(self.open_action)
|
||||
file_menu.addSeparator()
|
||||
|
||||
|
||||
file_menu.addAction(self.save_action)
|
||||
file_menu.addAction(self.save_as_action)
|
||||
file_menu.addAction(self.save_pdf_action)
|
||||
file_menu.addSeparator()
|
||||
|
||||
file_menu.addAction(self.exit_action)
|
||||
|
||||
edit_menu = menuBar.addMenu("&Edit")
|
||||
|
@ -207,7 +229,7 @@ class Window(QMainWindow):
|
|||
self.filename = os.path.basename(file_path[0])
|
||||
self.opened_file_dirname = os.path.dirname(file_path[0])
|
||||
self.file = open(file_path[0], 'w', encoding='utf-8')
|
||||
file_content = editor.text()
|
||||
file_content = self.editor.text()
|
||||
self.file.write(file_content)
|
||||
self.file.close()
|
||||
|
||||
|
@ -218,6 +240,45 @@ class Window(QMainWindow):
|
|||
self.setWindowTitle("Clochur - %s" % os.path.basename(file_path[0]))
|
||||
pass
|
||||
|
||||
def save_pdf_call(self):
|
||||
dest_pdf_path = QFileDialog.getSaveFileName(self, 'Save pdf as...', self.opened_file_dirname, "Porfable document format (*.pdf)")
|
||||
if dest_pdf_path[0] != '':
|
||||
self.convert_call()
|
||||
sile_pdf_path = os.path.join(self.tmp_folder, self.tmp_output_file+".pdf")
|
||||
shutil.copyfile(sile_pdf_path, dest_pdf_path[0])
|
||||
|
||||
|
||||
pass
|
||||
|
||||
def convert_call(self):
|
||||
text = self.editor.text()
|
||||
|
||||
parser = Parser()
|
||||
try:
|
||||
parse_tree = parser.get_clc_sexp(text)
|
||||
intepreter = Interpreter()
|
||||
result = intepreter.interprete(parse_tree)
|
||||
|
||||
sile_xml_path = os.path.join(self.tmp_folder, self.tmp_output_file+".xml")
|
||||
sile_pdf_path = os.path.join(self.tmp_folder, self.tmp_output_file+".pdf")
|
||||
|
||||
with open(sile_xml_path, "w") as xml:
|
||||
xml.write(result)
|
||||
xml.close()
|
||||
|
||||
subprocess.run([sile_command, sile_xml_path])
|
||||
pdf_js_webviewer_list = self.findChildren(QtWebEngineWidgets.QWebEngineView)
|
||||
pdf_js_webviewer = pdf_js_webviewer_list[-1]
|
||||
pdf_js_webviewer.load_path(sile_pdf_path)
|
||||
|
||||
except Exception as e:
|
||||
error_message = QErrorMessage()
|
||||
error_message.showMessage(str(e))
|
||||
error_message.exec_()
|
||||
|
||||
|
||||
|
||||
|
||||
def exit_call(self):
|
||||
|
||||
#reply = QMessageBox.question(self,'','Do You want to save this file? The text has been modified', QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel, QMessageBox.No)
|
||||
|
@ -235,12 +296,14 @@ class Window(QMainWindow):
|
|||
|
||||
elif reply == QMessageBox.No:
|
||||
self.removing_untitled_id()
|
||||
self.remove_tmp_outputs()
|
||||
app.exit()
|
||||
else:
|
||||
pass
|
||||
|
||||
else:
|
||||
self.removing_untitled_id()
|
||||
self.remove_tmp_outputs()
|
||||
app.exit()
|
||||
|
||||
def undo_call(self):
|
||||
|
@ -259,7 +322,7 @@ class Window(QMainWindow):
|
|||
self.editor.cut()
|
||||
|
||||
def find_and_replace_call(self):
|
||||
print(FindReplace)
|
||||
#print(FindReplace)
|
||||
find_replace_dialog = FindReplace.FindReplace(self)
|
||||
type(find_replace_dialog)
|
||||
find_replace_dialog.exec_()
|
||||
|
@ -280,8 +343,11 @@ class Window(QMainWindow):
|
|||
|
||||
editToolBar.addAction(self.new_action)
|
||||
editToolBar.addAction(self.open_action)
|
||||
tool_bar_separator = editToolBar.addAction('|')
|
||||
editToolBar.addAction(self.save_action)
|
||||
editToolBar.addAction(self.save_as_action)
|
||||
editToolBar.addAction(self.save_pdf_action)
|
||||
|
||||
|
||||
|
||||
tool_bar_separator = editToolBar.addAction('|')
|
||||
|
@ -357,6 +423,16 @@ class Window(QMainWindow):
|
|||
self.untitled_id = i
|
||||
|
||||
return "Untitled %d" % self.untitled_id
|
||||
|
||||
def remove_tmp_outputs(self):
|
||||
|
||||
sile_xml_path = os.path.join(self.tmp_folder, self.tmp_output_file+".xml")
|
||||
sile_pdf_path = os.path.join(self.tmp_folder, self.tmp_output_file+".pdf")
|
||||
if os.path.isfile(sile_xml_path):
|
||||
os.remove(sile_xml_path)
|
||||
|
||||
if os.path.isfile(sile_pdf_path):
|
||||
os.remove(sile_pdf_path)
|
||||
|
||||
|
||||
|
||||
|
@ -407,3 +483,5 @@ def entry_point():
|
|||
window.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
||||
entry_point()
|
||||
|
|
Loading…
Reference in a new issue