Details | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
1913 | jaeger | 1 | import tokenize |
2 | from tokenize import Token |
||
3 | if '.' in str(1.0): |
||
4 | from boot import * |
||
5 | |||
6 | def check(t,*vs): |
||
7 | if vs[0] == None: return True |
||
8 | if t.type in vs: return True |
||
9 | if t.type == 'symbol' and t.val in vs: return True |
||
10 | return False |
||
11 | |||
12 | def tweak(k,v): |
||
13 | P.stack.append((k,dmap[k])) |
||
14 | if v: dmap[k] = omap[k] |
||
15 | else: dmap[k] = {'lbp':0,'nud':itself} |
||
16 | def restore(): |
||
17 | k,v = P.stack.pop() |
||
18 | dmap[k] = v |
||
19 | |||
20 | def cpy(d): |
||
21 | r = {} |
||
22 | for k in d: r[k] = d[k] |
||
23 | return r |
||
24 | |||
25 | class PData: |
||
26 | def __init__(self,s,tokens): |
||
27 | self.s = s |
||
28 | self.tokens = tokens |
||
29 | self.pos = 0 |
||
30 | self.token = None |
||
31 | self.stack = [] |
||
32 | def init(self): |
||
33 | global omap,dmap |
||
34 | omap = cpy(base_dmap) |
||
35 | dmap = cpy(base_dmap) |
||
36 | self.advance() |
||
37 | def advance(self,val=None): |
||
38 | if not check(self.token,val): |
||
39 | error('expected '+val,self.token) |
||
40 | if self.pos < len(self.tokens): |
||
41 | t = self.tokens[self.pos] |
||
42 | self.pos += 1 |
||
43 | else: |
||
44 | t = Token((0,0),'eof','eof') |
||
45 | self.token = do(t) |
||
46 | return t |
||
47 | def error(ctx,t): |
||
48 | print t |
||
49 | tokenize.u_error(ctx,P.s,t.pos) |
||
50 | |||
51 | def nud(t): |
||
52 | #if 'nud' not in t: |
||
53 | # error('no nud',t) |
||
54 | return t.nud(t) |
||
55 | def led(t,left): |
||
56 | #if 'led' not in t: |
||
57 | # error('no led',t) |
||
58 | return t.led(t,left) |
||
59 | def get_lbp(t): |
||
60 | #if 'lbp' not in t: |
||
61 | # error('no lbp',t) |
||
62 | return t.lbp |
||
63 | def get_items(t): |
||
64 | #if 'items' not in t: |
||
65 | # error('no items',t) |
||
66 | return t.items |
||
67 | |||
68 | def expression(rbp): |
||
69 | t = P.token |
||
70 | advance() |
||
71 | left = nud(t) |
||
72 | while rbp < get_lbp(P.token): |
||
73 | t = P.token |
||
74 | advance() |
||
75 | left = led(t,left) |
||
76 | return left |
||
77 | |||
78 | def infix_led(t,left): |
||
79 | t.items = [left,expression(t.bp)] |
||
80 | return t |
||
81 | def infix_is(t,left): |
||
82 | if check(P.token,'not'): |
||
83 | t.val = 'isnot' |
||
84 | advance('not') |
||
85 | t.items = [left,expression(t.bp)] |
||
86 | return t |
||
87 | def infix_not(t,left): |
||
88 | advance('in') |
||
89 | t.val = 'notin' |
||
90 | t.items = [left,expression(t.bp)] |
||
91 | return t |
||
92 | def infix_tuple(t,left): |
||
93 | r = expression(t.bp) |
||
94 | if left.val == ',': |
||
95 | left.items.append(r) |
||
96 | return left |
||
97 | t.items = [left,r] |
||
98 | t.type = 'tuple' |
||
99 | return t |
||
100 | def lst(t): |
||
101 | if t == None: return [] |
||
102 | if check(t,',','tuple','statements'): |
||
103 | return get_items(t) |
||
104 | return [t] |
||
105 | def ilst(typ,t): |
||
106 | return Token(t.pos,typ,typ,lst(t)) |
||
107 | |||
108 | def call_led(t,left): |
||
109 | r = Token(t.pos,'call','$',[left]) |
||
110 | while not check(P.token,')'): |
||
111 | tweak(',',0) |
||
112 | r.items.append(expression(0)) |
||
113 | if P.token.val == ',': advance(',') |
||
114 | restore() |
||
115 | advance(")") |
||
116 | return r |
||
117 | def get_led(t,left): |
||
118 | r = Token(t.pos,'get','.',[left]) |
||
119 | items = [left] |
||
120 | more = False |
||
121 | while not check(P.token,']'): |
||
122 | more = False |
||
123 | if check(P.token,':'): |
||
124 | items.append(Token(P.token.pos,'symbol','None')) |
||
125 | else: |
||
126 | items.append(expression(0)) |
||
127 | if check(P.token,':'): |
||
128 | advance(':') |
||
129 | more = True |
||
130 | if more: |
||
131 | items.append(Token(P.token.pos,'symbol','None')) |
||
132 | if len(items) > 2: |
||
133 | items = [left,Token(t.pos,'slice',':',items[1:])] |
||
134 | r.items = items |
||
135 | advance("]") |
||
136 | return r |
||
137 | def dot_led(t,left): |
||
138 | r = expression(t.bp) |
||
139 | r.type = 'string' |
||
140 | t.items = [left,r] |
||
141 | return t |
||
142 | |||
143 | def itself(t): |
||
144 | return t |
||
145 | def paren_nud(t): |
||
146 | tweak(',',1) |
||
147 | r = expression(0) |
||
148 | restore() |
||
149 | advance(')') |
||
150 | return r |
||
151 | def list_nud(t): |
||
152 | t.type = 'list' |
||
153 | t.val = '[]' |
||
154 | t.items = [] |
||
155 | next = P.token |
||
156 | tweak(',',0) |
||
157 | while not check(P.token,'for',']'): |
||
158 | r = expression(0) |
||
159 | t.items.append(r) |
||
160 | if P.token.val == ',': advance(',') |
||
161 | if check(P.token,'for'): |
||
162 | t.type = 'comp' |
||
163 | advance('for') |
||
164 | tweak('in',0) |
||
165 | t.items.append(expression(0)) |
||
166 | advance('in') |
||
167 | t.items.append(expression(0)) |
||
168 | restore() |
||
169 | restore() |
||
170 | advance(']') |
||
171 | return t |
||
172 | def dict_nud(t): |
||
173 | t.type='dict' |
||
174 | t.val = '{}' |
||
175 | t.items = [] |
||
176 | tweak(',',0) |
||
177 | while not check(P.token,'}'): |
||
178 | t.items.append(expression(0)) |
||
179 | if check(P.token,':',','): advance() |
||
180 | restore() |
||
181 | advance('}') |
||
182 | return t |
||
183 | |||
184 | def advance(t=None): |
||
185 | return P.advance(t) |
||
186 | |||
187 | def block(): |
||
188 | items = [] |
||
189 | tok = P.token |
||
190 | |||
191 | while check(P.token,'nl'): advance() |
||
192 | if check(P.token,'indent'): |
||
193 | advance('indent') |
||
194 | while not check(P.token,'dedent'): |
||
195 | items.append(expression(0)) |
||
196 | while check(P.token,';','nl'): advance() |
||
197 | advance('dedent') |
||
198 | else: |
||
199 | items.append(expression(0)) |
||
200 | while check(P.token,';'): |
||
201 | advance(';') |
||
202 | items.append(expression(0)) |
||
203 | while check(P.token,'nl'): advance() |
||
204 | |||
205 | if len(items) > 1: |
||
206 | return Token(tok.pos,'statements',';',items) |
||
207 | return items.pop() |
||
208 | |||
209 | def def_nud(t): |
||
210 | items = t.items = [] |
||
211 | items.append(P.token); advance() |
||
212 | advance('(') |
||
213 | r = Token(t.pos,'symbol','():',[]) |
||
214 | items.append(r) |
||
215 | while not check(P.token,')'): |
||
216 | tweak(',',0) |
||
217 | r.items.append(expression(0)) |
||
218 | if check(P.token,','): advance(',') |
||
219 | restore() |
||
220 | advance(')') |
||
221 | advance(':') |
||
222 | items.append(block()) |
||
223 | return t |
||
224 | |||
225 | |||
226 | def while_nud(t): |
||
227 | items = t.items = [] |
||
228 | items.append(expression(0)) |
||
229 | advance(':') |
||
230 | items.append(block()) |
||
231 | return t |
||
232 | def class_nud(t): |
||
233 | items = t.items = [] |
||
234 | items.append(expression(0)) |
||
235 | advance(':') |
||
236 | items.append(ilst('methods',block())) |
||
237 | return t |
||
238 | |||
239 | def from_nud(t): |
||
240 | items = t.items = [] |
||
241 | items.append(expression(0)) |
||
242 | advance('import') |
||
243 | items.append(expression(0)) |
||
244 | return t |
||
245 | |||
246 | def for_nud(t): |
||
247 | items = t.items = [] |
||
248 | tweak('in',0) |
||
249 | items.append(expression(0)) |
||
250 | advance('in') |
||
251 | items.append(expression(0)) |
||
252 | restore() |
||
253 | advance(':') |
||
254 | items.append(block()) |
||
255 | return t |
||
256 | def if_nud(t): |
||
257 | items = t.items = [] |
||
258 | a = expression(0) |
||
259 | advance(':') |
||
260 | b = block() |
||
261 | items.append(Token(t.pos,'elif','elif',[a,b])) |
||
262 | while check(P.token,'elif'): |
||
263 | tok = P.token |
||
264 | advance('elif') |
||
265 | a = expression(0) |
||
266 | advance(':') |
||
267 | b = block() |
||
268 | items.append(Token(tok.pos,'elif','elif',[a,b])) |
||
269 | if check(P.token,'else'): |
||
270 | tok = P.token |
||
271 | advance('else') |
||
272 | advance(':') |
||
273 | b = block() |
||
274 | items.append(Token(tok.pos,'else','else',[b])) |
||
275 | return t |
||
276 | def try_nud(t): |
||
277 | items = t.items = [] |
||
278 | advance(':') |
||
279 | b = block() |
||
280 | items.append(b) |
||
281 | while check(P.token,'except'): |
||
282 | tok = P.token |
||
283 | advance('except') |
||
284 | if not check(P.token,':'): a = expression(0) |
||
285 | else: a = Token(tok.pos,'symbol','None') |
||
286 | advance(':') |
||
287 | b = block() |
||
288 | items.append(Token(tok.pos,'except','except',[a,b])) |
||
289 | if check(P.token,'else'): |
||
290 | tok = P.token |
||
291 | advance('else') |
||
292 | advance(':') |
||
293 | b = block() |
||
294 | items.append(Token(tok.pos,'else','else',[b])) |
||
295 | return t |
||
296 | def prefix_nud(t): |
||
297 | #bp = 70 |
||
298 | #if 'bp' in t: bp = t['bp'] |
||
299 | bp = t.bp |
||
300 | t.items = [expression(bp)] |
||
301 | return t |
||
302 | def prefix_nud0(t): |
||
303 | if check(P.token,'nl',';','eof','dedent'): return t |
||
304 | return prefix_nud(t) |
||
305 | def prefix_nuds(t): |
||
306 | r = expression(0) |
||
307 | return ilst(t.type,r) |
||
308 | |||
309 | def prefix_neg(t): |
||
310 | r = expression(50) |
||
311 | if r.type == 'number': |
||
312 | r.val = str(-float(r.val)) |
||
313 | return r |
||
314 | t.items = [Token(t.pos,'number','0'),r] |
||
315 | return t |
||
316 | def vargs_nud(t): |
||
317 | r = prefix_nud(t) |
||
318 | t.type = 'args' |
||
319 | t.val = '*' |
||
320 | return t |
||
321 | def nargs_nud(t): |
||
322 | r = prefix_nud(t) |
||
323 | t.type = 'nargs' |
||
324 | t.val = '**' |
||
325 | return t |
||
326 | |||
327 | |||
328 | base_dmap = { |
||
329 | ',':{'lbp':20,'bp':20,'led':infix_tuple}, |
||
330 | '+':{'lbp':50,'bp':50,'led':infix_led}, |
||
331 | '-':{'lbp':50,'nud':prefix_neg, |
||
332 | 'bp':50,'led':infix_led}, |
||
333 | 'not':{'lbp':35,'nud':prefix_nud,'bp':35, |
||
334 | 'bp':35,'led':infix_not }, |
||
335 | '%':{'lbp':60,'bp':60,'led':infix_led}, |
||
336 | '*':{'lbp':60,'nud':vargs_nud, |
||
337 | 'bp':60,'led':infix_led,}, |
||
338 | '**': {'lbp':65,'nud':nargs_nud, |
||
339 | 'bp':65,'led':infix_led,}, |
||
340 | '/':{'lbp':60,'bp':60,'led':infix_led}, |
||
341 | '(':{'lbp':70,'nud':paren_nud, |
||
342 | 'bp':80,'led':call_led,}, |
||
343 | '[':{'lbp':70,'nud':list_nud, |
||
344 | 'bp':80,'led':get_led,}, |
||
345 | '{':{'lbp':0,'nud':dict_nud,}, |
||
346 | '.':{'lbp':80,'bp':80,'led':dot_led,'type':'get',}, |
||
347 | 'break':{'lbp':0,'nud':itself,'type':'break'}, |
||
348 | 'pass':{'lbp':0,'nud':itself,'type':'pass'}, |
||
349 | 'continue':{'lbp':0,'nud':itself,'type':'continue'}, |
||
350 | 'eof':{'lbp':0,'type':'eof','val':'eof'}, |
||
351 | 'def':{'lbp':0,'nud':def_nud,'type':'def',}, |
||
352 | 'while':{'lbp':0,'nud':while_nud,'type':'while',}, |
||
353 | 'for':{'lbp':0,'nud':for_nud,'type':'for',}, |
||
354 | 'try':{'lbp':0,'nud':try_nud,'type':'try',}, |
||
355 | 'if':{'lbp':0,'nud':if_nud,'type':'if',}, |
||
356 | 'class':{'lbp':0,'nud':class_nud,'type':'class',}, |
||
357 | 'raise':{'lbp':0,'nud':prefix_nud0,'type':'raise','bp':20,}, |
||
358 | 'return':{'lbp':0,'nud':prefix_nud0,'type':'return','bp':10,}, |
||
359 | 'import':{'lbp':0,'nud':prefix_nuds,'type':'import','bp':20,}, |
||
360 | 'from':{'lbp':0,'nud':from_nud,'type':'from','bp':20,}, |
||
361 | 'del':{'lbp':0,'nud':prefix_nuds,'type':'del','bp':10,}, |
||
362 | 'global':{'lbp':0,'nud':prefix_nuds,'type':'globals','bp':20,}, |
||
363 | |||
364 | '=':{ |
||
365 | 'lbp':10,'bp':9,'led':infix_led, |
||
366 | }, |
||
367 | } |
||
368 | |||
369 | def i_infix(bp,led,*vs): |
||
370 | for v in vs: base_dmap[v] = {'lbp':bp,'bp':bp,'led':led} |
||
371 | i_infix(40,infix_led,'<','>','<=','>=','!=','==') |
||
372 | i_infix(40,infix_is,'is','in') |
||
373 | i_infix(10,infix_led,'+=','-=','*=','/=') |
||
374 | i_infix(31,infix_led,'and','&') |
||
375 | i_infix(30,infix_led,'or','|') |
||
376 | i_infix(36,infix_led,'<<','>>') |
||
377 | def i_terms(*vs): |
||
378 | for v in vs: base_dmap[v] = {'lbp':0,'nud':itself} |
||
379 | i_terms(')','}',']',';',':','nl','elif','else','True','False','None','name','string','number','indent','dedent','except') |
||
380 | base_dmap['nl']['val'] = 'nl' |
||
381 | |||
382 | def gmap(t,v): |
||
383 | if v not in dmap: |
||
384 | error('unknown "%s"'%v,t) |
||
385 | return dmap[v] |
||
386 | |||
387 | def do(t): |
||
388 | if t.type == 'symbol': r = gmap(t,t.val) |
||
389 | else: r = gmap(t,t.type) |
||
390 | merge(t,r) |
||
391 | return t |
||
392 | def do_module(): |
||
393 | tok = P.token |
||
394 | items = [] |
||
395 | while not check(P.token,'eof'): |
||
396 | items.append(block()) |
||
397 | if len(items) > 1: |
||
398 | return Token(tok.pos,'statements',';',items) |
||
399 | return items.pop() |
||
400 | |||
401 | def parse(s,tokens,wrap=0): |
||
402 | global P |
||
403 | s = tokenize.clean(s) |
||
404 | P=PData(s,tokens); P.init() |
||
405 | r = do_module() |
||
406 | P = None |
||
407 | return r','><','>=','>','>>> |
||
408 |