]> git.proxmox.com Git - mirror_edk2.git/blobdiff - AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/driver.py
edk2: Remove AppPkg, StdLib, StdLibPrivateInternalFiles
[mirror_edk2.git] / AppPkg / Applications / Python / Python-2.7.2 / Lib / lib2to3 / pgen2 / driver.py
diff --git a/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/driver.py b/AppPkg/Applications/Python/Python-2.7.2/Lib/lib2to3/pgen2/driver.py
deleted file mode 100644 (file)
index ccf7bca..0000000
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.\r
-# Licensed to PSF under a Contributor Agreement.\r
-\r
-# Modifications:\r
-# Copyright 2006 Google, Inc. All Rights Reserved.\r
-# Licensed to PSF under a Contributor Agreement.\r
-\r
-"""Parser driver.\r
-\r
-This provides a high-level interface to parse a file into a syntax tree.\r
-\r
-"""\r
-\r
-__author__ = "Guido van Rossum <guido@python.org>"\r
-\r
-__all__ = ["Driver", "load_grammar"]\r
-\r
-# Python imports\r
-import codecs\r
-import os\r
-import logging\r
-import StringIO\r
-import sys\r
-\r
-# Pgen imports\r
-from . import grammar, parse, token, tokenize, pgen\r
-\r
-\r
-class Driver(object):\r
-\r
-    def __init__(self, grammar, convert=None, logger=None):\r
-        self.grammar = grammar\r
-        if logger is None:\r
-            logger = logging.getLogger()\r
-        self.logger = logger\r
-        self.convert = convert\r
-\r
-    def parse_tokens(self, tokens, debug=False):\r
-        """Parse a series of tokens and return the syntax tree."""\r
-        # XXX Move the prefix computation into a wrapper around tokenize.\r
-        p = parse.Parser(self.grammar, self.convert)\r
-        p.setup()\r
-        lineno = 1\r
-        column = 0\r
-        type = value = start = end = line_text = None\r
-        prefix = u""\r
-        for quintuple in tokens:\r
-            type, value, start, end, line_text = quintuple\r
-            if start != (lineno, column):\r
-                assert (lineno, column) <= start, ((lineno, column), start)\r
-                s_lineno, s_column = start\r
-                if lineno < s_lineno:\r
-                    prefix += "\n" * (s_lineno - lineno)\r
-                    lineno = s_lineno\r
-                    column = 0\r
-                if column < s_column:\r
-                    prefix += line_text[column:s_column]\r
-                    column = s_column\r
-            if type in (tokenize.COMMENT, tokenize.NL):\r
-                prefix += value\r
-                lineno, column = end\r
-                if value.endswith("\n"):\r
-                    lineno += 1\r
-                    column = 0\r
-                continue\r
-            if type == token.OP:\r
-                type = grammar.opmap[value]\r
-            if debug:\r
-                self.logger.debug("%s %r (prefix=%r)",\r
-                                  token.tok_name[type], value, prefix)\r
-            if p.addtoken(type, value, (prefix, start)):\r
-                if debug:\r
-                    self.logger.debug("Stop.")\r
-                break\r
-            prefix = ""\r
-            lineno, column = end\r
-            if value.endswith("\n"):\r
-                lineno += 1\r
-                column = 0\r
-        else:\r
-            # We never broke out -- EOF is too soon (how can this happen???)\r
-            raise parse.ParseError("incomplete input",\r
-                                   type, value, (prefix, start))\r
-        return p.rootnode\r
-\r
-    def parse_stream_raw(self, stream, debug=False):\r
-        """Parse a stream and return the syntax tree."""\r
-        tokens = tokenize.generate_tokens(stream.readline)\r
-        return self.parse_tokens(tokens, debug)\r
-\r
-    def parse_stream(self, stream, debug=False):\r
-        """Parse a stream and return the syntax tree."""\r
-        return self.parse_stream_raw(stream, debug)\r
-\r
-    def parse_file(self, filename, encoding=None, debug=False):\r
-        """Parse a file and return the syntax tree."""\r
-        stream = codecs.open(filename, "r", encoding)\r
-        try:\r
-            return self.parse_stream(stream, debug)\r
-        finally:\r
-            stream.close()\r
-\r
-    def parse_string(self, text, debug=False):\r
-        """Parse a string and return the syntax tree."""\r
-        tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)\r
-        return self.parse_tokens(tokens, debug)\r
-\r
-\r
-def load_grammar(gt="Grammar.txt", gp=None,\r
-                 save=True, force=False, logger=None):\r
-    """Load the grammar (maybe from a pickle)."""\r
-    if logger is None:\r
-        logger = logging.getLogger()\r
-    if gp is None:\r
-        head, tail = os.path.splitext(gt)\r
-        if tail == ".txt":\r
-            tail = ""\r
-        gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"\r
-    if force or not _newer(gp, gt):\r
-        logger.info("Generating grammar tables from %s", gt)\r
-        g = pgen.generate_grammar(gt)\r
-        if save:\r
-            logger.info("Writing grammar tables to %s", gp)\r
-            try:\r
-                g.dump(gp)\r
-            except IOError, e:\r
-                logger.info("Writing failed:"+str(e))\r
-    else:\r
-        g = grammar.Grammar()\r
-        g.load(gp)\r
-    return g\r
-\r
-\r
-def _newer(a, b):\r
-    """Inquire whether file a was written since file b."""\r
-    if not os.path.exists(a):\r
-        return False\r
-    if not os.path.exists(b):\r
-        return True\r
-    return os.path.getmtime(a) >= os.path.getmtime(b)\r