-
Notifications
You must be signed in to change notification settings - Fork 0
/
parser.sage
159 lines (117 loc) · 3.72 KB
/
parser.sage
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
import ply.lex as lex
import ply.yacc as yacc
from functools import reduce
from itertools import chain, combinations
def powerset(iterable):
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s)+1))
# List of token names. This is always required
tokens = (
'SPACE',
'FF',
'LPAREN',
'RPAREN',
'SHARE',
'RANDOM'
)
ids = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
assert len(ids) == 10 + 26 + 26
def MyParser(d, names_r, glitch=False):
nb_r = len(names_r)
## LEXER
# Regular expression rules for simple tokens
t_SPACE = r'\s+'
t_FF = r'\|'
t_LPAREN = r'\('
t_RPAREN = r'\)'
precedence = (
('left', 'FF'),
('left', 'SPACE')
)
# A regular expression rule with some action code
def t_SHARE(t):
r's[0-9a-zA-Z]{2}'
probe_r = vector(GF(2), nb_r)
probe_r.set_immutable()
probe_sh = matrix(GF(2), d + 1)
i = ids.index(t.value[1])
j = ids.index(t.value[2])
probe_sh[i, j] = 1
probe_sh.set_immutable()
s = set()
s.add((probe_r, probe_sh))
t.value = [s, t.value, set(), 0]
return t
def t_RANDOM(t):
r'r[0-9a-zA-Z]+'
if t.value not in names_r:
raise ValueError(t.value + " should be declared first")
probe_r = vector(GF(2), nb_r)
i = names_r.index(t.value)
probe_r[i] = 1
probe_r.set_immutable()
probe_sh = matrix(GF(2), d + 1)
probe_sh.set_immutable()
s = set()
s.add((probe_r, probe_sh))
t.value = [s, t.value, set(), 0]
return t
# Error handling rule
def t_error(t):
print("Token not recognised: {}".format(t.value))
raise ValueError
# Build the lexer from my environment and return it
lexer = lex.lex()
## PARSER
def p_expr_imm(t):
'''expr : SHARE
| RANDOM
'''
t[0] = t[1]
def p_expr_paren(t):
'expr : LPAREN expr RPAREN'
t[0] = [t[2][0], '(' + t[2][1] + ')', t[2][2], t[2][3]]
def p_expr_xor(t):
'expr : expr SPACE expr'
probe_expl = t[1][1] + ' ' + t[3][1]
probes = set()
if glitch:
s = t[1][0].union(t[3][0])
for e in s:
probe_r = e[0]
probe_sh = e[1]
probe_r.set_immutable()
probe_sh.set_immutable()
probes.add((probe_r, probe_sh, probe_expl))
else:
assert len(t[1][0]) == 1 and len(t[3][0]) == 1
probe_r1, probe_sh1 = t[1][0].pop()
probe_r2, probe_sh2 = t[3][0].pop()
probe_r = probe_r1 + probe_r2
probe_r.set_immutable()
probe_sh = probe_sh1 + probe_sh2
probe_sh.set_immutable()
probes.add((probe_r, probe_sh, probe_expl))
s = set()
s.add((probe_r, probe_sh))
new_probes = t[1][2].union(t[3][2]).union(probes)
nb_xor = t[1][3] + t[3][3] + 1
t[0] = [s, probe_expl, new_probes, nb_xor]
def p_expr_ff(t):
'''expr : expr SPACE FF
| expr FF'''
s = set()
new_p = reduce(lambda x, y: (x[0]+y[0],x[1]+y[1]), t[1][0])
new_p[0].set_immutable()
new_p[1].set_immutable()
s.add(new_p)
if t[2] == '|':
probe_expl = t[1][1] + '|'
else:
probe_expl = t[1][1] + ' |'
t[0] = [s, probe_expl, t[1][2], t[1][3]]
def p_error(p):
if p is None:
ValueError("Unexpected EOL, check your last char")
raise ValueError("Wrong Token " + p.type)
return yacc.yacc()