1 | n/a | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. |
---|
2 | n/a | # Licensed to PSF under a Contributor Agreement. |
---|
3 | n/a | |
---|
4 | n/a | # Modifications: |
---|
5 | n/a | # Copyright 2006 Google, Inc. All Rights Reserved. |
---|
6 | n/a | # Licensed to PSF under a Contributor Agreement. |
---|
7 | n/a | |
---|
8 | n/a | """Parser driver. |
---|
9 | n/a | |
---|
10 | n/a | This provides a high-level interface to parse a file into a syntax tree. |
---|
11 | n/a | |
---|
12 | n/a | """ |
---|
13 | n/a | |
---|
14 | n/a | __author__ = "Guido van Rossum <guido@python.org>" |
---|
15 | n/a | |
---|
16 | n/a | __all__ = ["Driver", "load_grammar"] |
---|
17 | n/a | |
---|
18 | n/a | # Python imports |
---|
19 | n/a | import codecs |
---|
20 | n/a | import io |
---|
21 | n/a | import os |
---|
22 | n/a | import logging |
---|
23 | n/a | import sys |
---|
24 | n/a | |
---|
25 | n/a | # Pgen imports |
---|
26 | n/a | from . import grammar, parse, token, tokenize, pgen |
---|
27 | n/a | |
---|
28 | n/a | |
---|
29 | n/a | class Driver(object): |
---|
30 | n/a | |
---|
31 | n/a | def __init__(self, grammar, convert=None, logger=None): |
---|
32 | n/a | self.grammar = grammar |
---|
33 | n/a | if logger is None: |
---|
34 | n/a | logger = logging.getLogger() |
---|
35 | n/a | self.logger = logger |
---|
36 | n/a | self.convert = convert |
---|
37 | n/a | |
---|
38 | n/a | def parse_tokens(self, tokens, debug=False): |
---|
39 | n/a | """Parse a series of tokens and return the syntax tree.""" |
---|
40 | n/a | # XXX Move the prefix computation into a wrapper around tokenize. |
---|
41 | n/a | p = parse.Parser(self.grammar, self.convert) |
---|
42 | n/a | p.setup() |
---|
43 | n/a | lineno = 1 |
---|
44 | n/a | column = 0 |
---|
45 | n/a | type = value = start = end = line_text = None |
---|
46 | n/a | prefix = "" |
---|
47 | n/a | for quintuple in tokens: |
---|
48 | n/a | type, value, start, end, line_text = quintuple |
---|
49 | n/a | if start != (lineno, column): |
---|
50 | n/a | assert (lineno, column) <= start, ((lineno, column), start) |
---|
51 | n/a | s_lineno, s_column = start |
---|
52 | n/a | if lineno < s_lineno: |
---|
53 | n/a | prefix += "\n" * (s_lineno - lineno) |
---|
54 | n/a | lineno = s_lineno |
---|
55 | n/a | column = 0 |
---|
56 | n/a | if column < s_column: |
---|
57 | n/a | prefix += line_text[column:s_column] |
---|
58 | n/a | column = s_column |
---|
59 | n/a | if type in (tokenize.COMMENT, tokenize.NL): |
---|
60 | n/a | prefix += value |
---|
61 | n/a | lineno, column = end |
---|
62 | n/a | if value.endswith("\n"): |
---|
63 | n/a | lineno += 1 |
---|
64 | n/a | column = 0 |
---|
65 | n/a | continue |
---|
66 | n/a | if type == token.OP: |
---|
67 | n/a | type = grammar.opmap[value] |
---|
68 | n/a | if debug: |
---|
69 | n/a | self.logger.debug("%s %r (prefix=%r)", |
---|
70 | n/a | token.tok_name[type], value, prefix) |
---|
71 | n/a | if p.addtoken(type, value, (prefix, start)): |
---|
72 | n/a | if debug: |
---|
73 | n/a | self.logger.debug("Stop.") |
---|
74 | n/a | break |
---|
75 | n/a | prefix = "" |
---|
76 | n/a | lineno, column = end |
---|
77 | n/a | if value.endswith("\n"): |
---|
78 | n/a | lineno += 1 |
---|
79 | n/a | column = 0 |
---|
80 | n/a | else: |
---|
81 | n/a | # We never broke out -- EOF is too soon (how can this happen???) |
---|
82 | n/a | raise parse.ParseError("incomplete input", |
---|
83 | n/a | type, value, (prefix, start)) |
---|
84 | n/a | return p.rootnode |
---|
85 | n/a | |
---|
86 | n/a | def parse_stream_raw(self, stream, debug=False): |
---|
87 | n/a | """Parse a stream and return the syntax tree.""" |
---|
88 | n/a | tokens = tokenize.generate_tokens(stream.readline) |
---|
89 | n/a | return self.parse_tokens(tokens, debug) |
---|
90 | n/a | |
---|
91 | n/a | def parse_stream(self, stream, debug=False): |
---|
92 | n/a | """Parse a stream and return the syntax tree.""" |
---|
93 | n/a | return self.parse_stream_raw(stream, debug) |
---|
94 | n/a | |
---|
95 | n/a | def parse_file(self, filename, encoding=None, debug=False): |
---|
96 | n/a | """Parse a file and return the syntax tree.""" |
---|
97 | n/a | stream = codecs.open(filename, "r", encoding) |
---|
98 | n/a | try: |
---|
99 | n/a | return self.parse_stream(stream, debug) |
---|
100 | n/a | finally: |
---|
101 | n/a | stream.close() |
---|
102 | n/a | |
---|
103 | n/a | def parse_string(self, text, debug=False): |
---|
104 | n/a | """Parse a string and return the syntax tree.""" |
---|
105 | n/a | tokens = tokenize.generate_tokens(io.StringIO(text).readline) |
---|
106 | n/a | return self.parse_tokens(tokens, debug) |
---|
107 | n/a | |
---|
108 | n/a | |
---|
109 | n/a | def _generate_pickle_name(gt): |
---|
110 | n/a | head, tail = os.path.splitext(gt) |
---|
111 | n/a | if tail == ".txt": |
---|
112 | n/a | tail = "" |
---|
113 | n/a | return head + tail + ".".join(map(str, sys.version_info)) + ".pickle" |
---|
114 | n/a | |
---|
115 | n/a | |
---|
116 | n/a | def load_grammar(gt="Grammar.txt", gp=None, |
---|
117 | n/a | save=True, force=False, logger=None): |
---|
118 | n/a | """Load the grammar (maybe from a pickle).""" |
---|
119 | n/a | if logger is None: |
---|
120 | n/a | logger = logging.getLogger() |
---|
121 | n/a | gp = _generate_pickle_name(gt) if gp is None else gp |
---|
122 | n/a | if force or not _newer(gp, gt): |
---|
123 | n/a | logger.info("Generating grammar tables from %s", gt) |
---|
124 | n/a | g = pgen.generate_grammar(gt) |
---|
125 | n/a | if save: |
---|
126 | n/a | logger.info("Writing grammar tables to %s", gp) |
---|
127 | n/a | try: |
---|
128 | n/a | g.dump(gp) |
---|
129 | n/a | except OSError as e: |
---|
130 | n/a | logger.info("Writing failed: %s", e) |
---|
131 | n/a | else: |
---|
132 | n/a | g = grammar.Grammar() |
---|
133 | n/a | g.load(gp) |
---|
134 | n/a | return g |
---|
135 | n/a | |
---|
136 | n/a | |
---|
137 | n/a | def _newer(a, b): |
---|
138 | n/a | """Inquire whether file a was written since file b.""" |
---|
139 | n/a | if not os.path.exists(a): |
---|
140 | n/a | return False |
---|
141 | n/a | if not os.path.exists(b): |
---|
142 | n/a | return True |
---|
143 | n/a | return os.path.getmtime(a) >= os.path.getmtime(b) |
---|
144 | n/a | |
---|
145 | n/a | |
---|
146 | n/a | def main(*args): |
---|
147 | n/a | """Main program, when run as a script: produce grammar pickle files. |
---|
148 | n/a | |
---|
149 | n/a | Calls load_grammar for each argument, a path to a grammar text file. |
---|
150 | n/a | """ |
---|
151 | n/a | if not args: |
---|
152 | n/a | args = sys.argv[1:] |
---|
153 | n/a | logging.basicConfig(level=logging.INFO, stream=sys.stdout, |
---|
154 | n/a | format='%(message)s') |
---|
155 | n/a | for gt in args: |
---|
156 | n/a | load_grammar(gt, save=True, force=True) |
---|
157 | n/a | return True |
---|
158 | n/a | |
---|
159 | n/a | if __name__ == "__main__": |
---|
160 | n/a | sys.exit(int(not main())) |
---|