ยปCore Development>Code coverage>Lib/lib2to3/pgen2/driver.py

Python code coverage for Lib/lib2to3/pgen2/driver.py

#countcontent
1n/a# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
2n/a# Licensed to PSF under a Contributor Agreement.
3n/a
4n/a# Modifications:
5n/a# Copyright 2006 Google, Inc. All Rights Reserved.
6n/a# Licensed to PSF under a Contributor Agreement.
7n/a
8n/a"""Parser driver.
9n/a
10n/aThis provides a high-level interface to parse a file into a syntax tree.
11n/a
12n/a"""
13n/a
14n/a__author__ = "Guido van Rossum <guido@python.org>"
15n/a
16n/a__all__ = ["Driver", "load_grammar"]
17n/a
18n/a# Python imports
19n/aimport codecs
20n/aimport io
21n/aimport os
22n/aimport logging
23n/aimport sys
24n/a
25n/a# Pgen imports
26n/afrom . import grammar, parse, token, tokenize, pgen
27n/a
28n/a
29n/aclass Driver(object):
30n/a
31n/a def __init__(self, grammar, convert=None, logger=None):
32n/a self.grammar = grammar
33n/a if logger is None:
34n/a logger = logging.getLogger()
35n/a self.logger = logger
36n/a self.convert = convert
37n/a
38n/a def parse_tokens(self, tokens, debug=False):
39n/a """Parse a series of tokens and return the syntax tree."""
40n/a # XXX Move the prefix computation into a wrapper around tokenize.
41n/a p = parse.Parser(self.grammar, self.convert)
42n/a p.setup()
43n/a lineno = 1
44n/a column = 0
45n/a type = value = start = end = line_text = None
46n/a prefix = ""
47n/a for quintuple in tokens:
48n/a type, value, start, end, line_text = quintuple
49n/a if start != (lineno, column):
50n/a assert (lineno, column) <= start, ((lineno, column), start)
51n/a s_lineno, s_column = start
52n/a if lineno < s_lineno:
53n/a prefix += "\n" * (s_lineno - lineno)
54n/a lineno = s_lineno
55n/a column = 0
56n/a if column < s_column:
57n/a prefix += line_text[column:s_column]
58n/a column = s_column
59n/a if type in (tokenize.COMMENT, tokenize.NL):
60n/a prefix += value
61n/a lineno, column = end
62n/a if value.endswith("\n"):
63n/a lineno += 1
64n/a column = 0
65n/a continue
66n/a if type == token.OP:
67n/a type = grammar.opmap[value]
68n/a if debug:
69n/a self.logger.debug("%s %r (prefix=%r)",
70n/a token.tok_name[type], value, prefix)
71n/a if p.addtoken(type, value, (prefix, start)):
72n/a if debug:
73n/a self.logger.debug("Stop.")
74n/a break
75n/a prefix = ""
76n/a lineno, column = end
77n/a if value.endswith("\n"):
78n/a lineno += 1
79n/a column = 0
80n/a else:
81n/a # We never broke out -- EOF is too soon (how can this happen???)
82n/a raise parse.ParseError("incomplete input",
83n/a type, value, (prefix, start))
84n/a return p.rootnode
85n/a
86n/a def parse_stream_raw(self, stream, debug=False):
87n/a """Parse a stream and return the syntax tree."""
88n/a tokens = tokenize.generate_tokens(stream.readline)
89n/a return self.parse_tokens(tokens, debug)
90n/a
91n/a def parse_stream(self, stream, debug=False):
92n/a """Parse a stream and return the syntax tree."""
93n/a return self.parse_stream_raw(stream, debug)
94n/a
95n/a def parse_file(self, filename, encoding=None, debug=False):
96n/a """Parse a file and return the syntax tree."""
97n/a stream = codecs.open(filename, "r", encoding)
98n/a try:
99n/a return self.parse_stream(stream, debug)
100n/a finally:
101n/a stream.close()
102n/a
103n/a def parse_string(self, text, debug=False):
104n/a """Parse a string and return the syntax tree."""
105n/a tokens = tokenize.generate_tokens(io.StringIO(text).readline)
106n/a return self.parse_tokens(tokens, debug)
107n/a
108n/a
109n/adef _generate_pickle_name(gt):
110n/a head, tail = os.path.splitext(gt)
111n/a if tail == ".txt":
112n/a tail = ""
113n/a return head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
114n/a
115n/a
116n/adef load_grammar(gt="Grammar.txt", gp=None,
117n/a save=True, force=False, logger=None):
118n/a """Load the grammar (maybe from a pickle)."""
119n/a if logger is None:
120n/a logger = logging.getLogger()
121n/a gp = _generate_pickle_name(gt) if gp is None else gp
122n/a if force or not _newer(gp, gt):
123n/a logger.info("Generating grammar tables from %s", gt)
124n/a g = pgen.generate_grammar(gt)
125n/a if save:
126n/a logger.info("Writing grammar tables to %s", gp)
127n/a try:
128n/a g.dump(gp)
129n/a except OSError as e:
130n/a logger.info("Writing failed: %s", e)
131n/a else:
132n/a g = grammar.Grammar()
133n/a g.load(gp)
134n/a return g
135n/a
136n/a
137n/adef _newer(a, b):
138n/a """Inquire whether file a was written since file b."""
139n/a if not os.path.exists(a):
140n/a return False
141n/a if not os.path.exists(b):
142n/a return True
143n/a return os.path.getmtime(a) >= os.path.getmtime(b)
144n/a
145n/a
146n/adef main(*args):
147n/a """Main program, when run as a script: produce grammar pickle files.
148n/a
149n/a Calls load_grammar for each argument, a path to a grammar text file.
150n/a """
151n/a if not args:
152n/a args = sys.argv[1:]
153n/a logging.basicConfig(level=logging.INFO, stream=sys.stdout,
154n/a format='%(message)s')
155n/a for gt in args:
156n/a load_grammar(gt, save=True, force=True)
157n/a return True
158n/a
159n/aif __name__ == "__main__":
160n/a sys.exit(int(not main()))