1 | n/a | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. |
---|
2 | n/a | # Licensed to PSF under a Contributor Agreement. |
---|
3 | n/a | |
---|
4 | n/a | """Convert graminit.[ch] spit out by pgen to Python code. |
---|
5 | n/a | |
---|
6 | n/a | Pgen is the Python parser generator. It is useful to quickly create a |
---|
7 | n/a | parser from a grammar file in Python's grammar notation. But I don't |
---|
8 | n/a | want my parsers to be written in C (yet), so I'm translating the |
---|
9 | n/a | parsing tables to Python data structures and writing a Python parse |
---|
10 | n/a | engine. |
---|
11 | n/a | |
---|
12 | n/a | Note that the token numbers are constants determined by the standard |
---|
13 | n/a | Python tokenizer. The standard token module defines these numbers and |
---|
14 | n/a | their names (the names are not used much). The token numbers are |
---|
15 | n/a | hardcoded into the Python tokenizer and into pgen. A Python |
---|
16 | n/a | implementation of the Python tokenizer is also available, in the |
---|
17 | n/a | standard tokenize module. |
---|
18 | n/a | |
---|
19 | n/a | On the other hand, symbol numbers (representing the grammar's |
---|
20 | n/a | non-terminals) are assigned by pgen based on the actual grammar |
---|
21 | n/a | input. |
---|
22 | n/a | |
---|
23 | n/a | Note: this module is pretty much obsolete; the pgen module generates |
---|
24 | n/a | equivalent grammar tables directly from the Grammar.txt input file |
---|
25 | n/a | without having to invoke the Python pgen C program. |
---|
26 | n/a | |
---|
27 | n/a | """ |
---|
28 | n/a | |
---|
29 | n/a | # Python imports |
---|
30 | n/a | import re |
---|
31 | n/a | |
---|
32 | n/a | # Local imports |
---|
33 | n/a | from pgen2 import grammar, token |
---|
34 | n/a | |
---|
35 | n/a | |
---|
36 | n/a | class Converter(grammar.Grammar): |
---|
37 | n/a | """Grammar subclass that reads classic pgen output files. |
---|
38 | n/a | |
---|
39 | n/a | The run() method reads the tables as produced by the pgen parser |
---|
40 | n/a | generator, typically contained in two C files, graminit.h and |
---|
41 | n/a | graminit.c. The other methods are for internal use only. |
---|
42 | n/a | |
---|
43 | n/a | See the base class for more documentation. |
---|
44 | n/a | |
---|
45 | n/a | """ |
---|
46 | n/a | |
---|
47 | n/a | def run(self, graminit_h, graminit_c): |
---|
48 | n/a | """Load the grammar tables from the text files written by pgen.""" |
---|
49 | n/a | self.parse_graminit_h(graminit_h) |
---|
50 | n/a | self.parse_graminit_c(graminit_c) |
---|
51 | n/a | self.finish_off() |
---|
52 | n/a | |
---|
53 | n/a | def parse_graminit_h(self, filename): |
---|
54 | n/a | """Parse the .h file written by pgen. (Internal) |
---|
55 | n/a | |
---|
56 | n/a | This file is a sequence of #define statements defining the |
---|
57 | n/a | nonterminals of the grammar as numbers. We build two tables |
---|
58 | n/a | mapping the numbers to names and back. |
---|
59 | n/a | |
---|
60 | n/a | """ |
---|
61 | n/a | try: |
---|
62 | n/a | f = open(filename) |
---|
63 | n/a | except OSError as err: |
---|
64 | n/a | print("Can't open %s: %s" % (filename, err)) |
---|
65 | n/a | return False |
---|
66 | n/a | self.symbol2number = {} |
---|
67 | n/a | self.number2symbol = {} |
---|
68 | n/a | lineno = 0 |
---|
69 | n/a | for line in f: |
---|
70 | n/a | lineno += 1 |
---|
71 | n/a | mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) |
---|
72 | n/a | if not mo and line.strip(): |
---|
73 | n/a | print("%s(%s): can't parse %s" % (filename, lineno, |
---|
74 | n/a | line.strip())) |
---|
75 | n/a | else: |
---|
76 | n/a | symbol, number = mo.groups() |
---|
77 | n/a | number = int(number) |
---|
78 | n/a | assert symbol not in self.symbol2number |
---|
79 | n/a | assert number not in self.number2symbol |
---|
80 | n/a | self.symbol2number[symbol] = number |
---|
81 | n/a | self.number2symbol[number] = symbol |
---|
82 | n/a | return True |
---|
83 | n/a | |
---|
84 | n/a | def parse_graminit_c(self, filename): |
---|
85 | n/a | """Parse the .c file written by pgen. (Internal) |
---|
86 | n/a | |
---|
87 | n/a | The file looks as follows. The first two lines are always this: |
---|
88 | n/a | |
---|
89 | n/a | #include "pgenheaders.h" |
---|
90 | n/a | #include "grammar.h" |
---|
91 | n/a | |
---|
92 | n/a | After that come four blocks: |
---|
93 | n/a | |
---|
94 | n/a | 1) one or more state definitions |
---|
95 | n/a | 2) a table defining dfas |
---|
96 | n/a | 3) a table defining labels |
---|
97 | n/a | 4) a struct defining the grammar |
---|
98 | n/a | |
---|
99 | n/a | A state definition has the following form: |
---|
100 | n/a | - one or more arc arrays, each of the form: |
---|
101 | n/a | static arc arcs_<n>_<m>[<k>] = { |
---|
102 | n/a | {<i>, <j>}, |
---|
103 | n/a | ... |
---|
104 | n/a | }; |
---|
105 | n/a | - followed by a state array, of the form: |
---|
106 | n/a | static state states_<s>[<t>] = { |
---|
107 | n/a | {<k>, arcs_<n>_<m>}, |
---|
108 | n/a | ... |
---|
109 | n/a | }; |
---|
110 | n/a | |
---|
111 | n/a | """ |
---|
112 | n/a | try: |
---|
113 | n/a | f = open(filename) |
---|
114 | n/a | except OSError as err: |
---|
115 | n/a | print("Can't open %s: %s" % (filename, err)) |
---|
116 | n/a | return False |
---|
117 | n/a | # The code below essentially uses f's iterator-ness! |
---|
118 | n/a | lineno = 0 |
---|
119 | n/a | |
---|
120 | n/a | # Expect the two #include lines |
---|
121 | n/a | lineno, line = lineno+1, next(f) |
---|
122 | n/a | assert line == '#include "pgenheaders.h"\n', (lineno, line) |
---|
123 | n/a | lineno, line = lineno+1, next(f) |
---|
124 | n/a | assert line == '#include "grammar.h"\n', (lineno, line) |
---|
125 | n/a | |
---|
126 | n/a | # Parse the state definitions |
---|
127 | n/a | lineno, line = lineno+1, next(f) |
---|
128 | n/a | allarcs = {} |
---|
129 | n/a | states = [] |
---|
130 | n/a | while line.startswith("static arc "): |
---|
131 | n/a | while line.startswith("static arc "): |
---|
132 | n/a | mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", |
---|
133 | n/a | line) |
---|
134 | n/a | assert mo, (lineno, line) |
---|
135 | n/a | n, m, k = list(map(int, mo.groups())) |
---|
136 | n/a | arcs = [] |
---|
137 | n/a | for _ in range(k): |
---|
138 | n/a | lineno, line = lineno+1, next(f) |
---|
139 | n/a | mo = re.match(r"\s+{(\d+), (\d+)},$", line) |
---|
140 | n/a | assert mo, (lineno, line) |
---|
141 | n/a | i, j = list(map(int, mo.groups())) |
---|
142 | n/a | arcs.append((i, j)) |
---|
143 | n/a | lineno, line = lineno+1, next(f) |
---|
144 | n/a | assert line == "};\n", (lineno, line) |
---|
145 | n/a | allarcs[(n, m)] = arcs |
---|
146 | n/a | lineno, line = lineno+1, next(f) |
---|
147 | n/a | mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) |
---|
148 | n/a | assert mo, (lineno, line) |
---|
149 | n/a | s, t = list(map(int, mo.groups())) |
---|
150 | n/a | assert s == len(states), (lineno, line) |
---|
151 | n/a | state = [] |
---|
152 | n/a | for _ in range(t): |
---|
153 | n/a | lineno, line = lineno+1, next(f) |
---|
154 | n/a | mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) |
---|
155 | n/a | assert mo, (lineno, line) |
---|
156 | n/a | k, n, m = list(map(int, mo.groups())) |
---|
157 | n/a | arcs = allarcs[n, m] |
---|
158 | n/a | assert k == len(arcs), (lineno, line) |
---|
159 | n/a | state.append(arcs) |
---|
160 | n/a | states.append(state) |
---|
161 | n/a | lineno, line = lineno+1, next(f) |
---|
162 | n/a | assert line == "};\n", (lineno, line) |
---|
163 | n/a | lineno, line = lineno+1, next(f) |
---|
164 | n/a | self.states = states |
---|
165 | n/a | |
---|
166 | n/a | # Parse the dfas |
---|
167 | n/a | dfas = {} |
---|
168 | n/a | mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) |
---|
169 | n/a | assert mo, (lineno, line) |
---|
170 | n/a | ndfas = int(mo.group(1)) |
---|
171 | n/a | for i in range(ndfas): |
---|
172 | n/a | lineno, line = lineno+1, next(f) |
---|
173 | n/a | mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', |
---|
174 | n/a | line) |
---|
175 | n/a | assert mo, (lineno, line) |
---|
176 | n/a | symbol = mo.group(2) |
---|
177 | n/a | number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) |
---|
178 | n/a | assert self.symbol2number[symbol] == number, (lineno, line) |
---|
179 | n/a | assert self.number2symbol[number] == symbol, (lineno, line) |
---|
180 | n/a | assert x == 0, (lineno, line) |
---|
181 | n/a | state = states[z] |
---|
182 | n/a | assert y == len(state), (lineno, line) |
---|
183 | n/a | lineno, line = lineno+1, next(f) |
---|
184 | n/a | mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) |
---|
185 | n/a | assert mo, (lineno, line) |
---|
186 | n/a | first = {} |
---|
187 | n/a | rawbitset = eval(mo.group(1)) |
---|
188 | n/a | for i, c in enumerate(rawbitset): |
---|
189 | n/a | byte = ord(c) |
---|
190 | n/a | for j in range(8): |
---|
191 | n/a | if byte & (1<<j): |
---|
192 | n/a | first[i*8 + j] = 1 |
---|
193 | n/a | dfas[number] = (state, first) |
---|
194 | n/a | lineno, line = lineno+1, next(f) |
---|
195 | n/a | assert line == "};\n", (lineno, line) |
---|
196 | n/a | self.dfas = dfas |
---|
197 | n/a | |
---|
198 | n/a | # Parse the labels |
---|
199 | n/a | labels = [] |
---|
200 | n/a | lineno, line = lineno+1, next(f) |
---|
201 | n/a | mo = re.match(r"static label labels\[(\d+)\] = {$", line) |
---|
202 | n/a | assert mo, (lineno, line) |
---|
203 | n/a | nlabels = int(mo.group(1)) |
---|
204 | n/a | for i in range(nlabels): |
---|
205 | n/a | lineno, line = lineno+1, next(f) |
---|
206 | n/a | mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line) |
---|
207 | n/a | assert mo, (lineno, line) |
---|
208 | n/a | x, y = mo.groups() |
---|
209 | n/a | x = int(x) |
---|
210 | n/a | if y == "0": |
---|
211 | n/a | y = None |
---|
212 | n/a | else: |
---|
213 | n/a | y = eval(y) |
---|
214 | n/a | labels.append((x, y)) |
---|
215 | n/a | lineno, line = lineno+1, next(f) |
---|
216 | n/a | assert line == "};\n", (lineno, line) |
---|
217 | n/a | self.labels = labels |
---|
218 | n/a | |
---|
219 | n/a | # Parse the grammar struct |
---|
220 | n/a | lineno, line = lineno+1, next(f) |
---|
221 | n/a | assert line == "grammar _PyParser_Grammar = {\n", (lineno, line) |
---|
222 | n/a | lineno, line = lineno+1, next(f) |
---|
223 | n/a | mo = re.match(r"\s+(\d+),$", line) |
---|
224 | n/a | assert mo, (lineno, line) |
---|
225 | n/a | ndfas = int(mo.group(1)) |
---|
226 | n/a | assert ndfas == len(self.dfas) |
---|
227 | n/a | lineno, line = lineno+1, next(f) |
---|
228 | n/a | assert line == "\tdfas,\n", (lineno, line) |
---|
229 | n/a | lineno, line = lineno+1, next(f) |
---|
230 | n/a | mo = re.match(r"\s+{(\d+), labels},$", line) |
---|
231 | n/a | assert mo, (lineno, line) |
---|
232 | n/a | nlabels = int(mo.group(1)) |
---|
233 | n/a | assert nlabels == len(self.labels), (lineno, line) |
---|
234 | n/a | lineno, line = lineno+1, next(f) |
---|
235 | n/a | mo = re.match(r"\s+(\d+)$", line) |
---|
236 | n/a | assert mo, (lineno, line) |
---|
237 | n/a | start = int(mo.group(1)) |
---|
238 | n/a | assert start in self.number2symbol, (lineno, line) |
---|
239 | n/a | self.start = start |
---|
240 | n/a | lineno, line = lineno+1, next(f) |
---|
241 | n/a | assert line == "};\n", (lineno, line) |
---|
242 | n/a | try: |
---|
243 | n/a | lineno, line = lineno+1, next(f) |
---|
244 | n/a | except StopIteration: |
---|
245 | n/a | pass |
---|
246 | n/a | else: |
---|
247 | n/a | assert 0, (lineno, line) |
---|
248 | n/a | |
---|
249 | n/a | def finish_off(self): |
---|
250 | n/a | """Create additional useful structures. (Internal).""" |
---|
251 | n/a | self.keywords = {} # map from keyword strings to arc labels |
---|
252 | n/a | self.tokens = {} # map from numeric token values to arc labels |
---|
253 | n/a | for ilabel, (type, value) in enumerate(self.labels): |
---|
254 | n/a | if type == token.NAME and value is not None: |
---|
255 | n/a | self.keywords[value] = ilabel |
---|
256 | n/a | elif value is None: |
---|
257 | n/a | self.tokens[type] = ilabel |
---|