⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 antlr3.py

📁 antlr最新版本V3源代码
💻 PY
📖 第 1 页 / 共 5 页
字号:
            self.type = oldToken.type            self.line = oldToken.line            self.charPositionInLine = oldToken.charPositionInLine            self.channel = oldToken.channel                    self.text = text        self.type = type        self.line = None        self.charPositionInLine = None        self.channel = channel        self.index = None    def getText(self):        return self.text    def setText(self, text):        self.text = text    def getType(self):        return self.type     def setType(self, ttype):        self.type = ttype        def getLine(self):        return self.line        def setLine(self, line):        self.line = line    def getCharPositionInLine(self):        return self.charPositionInLine        def setCharPositionInLine(self, pos):        self.charPositionInLine = pos    def getChannel(self):        return self.channel        def setChannel(self, channel):        self.channel = channel        def getTokenIndex(self):        return self.index        def setTokenIndex(self, index):        self.index = index    def toString(self):        channelStr = ""        if self.channel > 0:            channelStr = ",channel="+self.channel                    txt = self.text        if txt is None:            txt = "<no text>"        return "[@%r,%r,<%r>%s,%r:%r]" % (self.index,                                          txt,                                          self.type,                                          channelStr,                                          self.line,                                          self.charPositionInLine                                          )        __str__ = toString    __repr__ = toStringEOF_TOKEN = CommonToken(type=EOF)	INVALID_TOKEN = CommonToken(type=INVALID_TOKEN_TYPE)# In an action, a lexer rule can set token to this SKIP_TOKEN and ANTLR# will avoid creating a token for this symbol and try to fetch another.SKIP_TOKEN = CommonToken(type=INVALID_TOKEN_TYPE)"""ANTLR3 runtime package"""# [The "BSD licence"]# Copyright (c) 2005-2006 Terence Parr# All rights reserved.## Redistribution and use in source and binary forms, with or without# modification, are permitted provided that the following conditions# are met:# 1. Redistributions of source code must retain the above copyright#    notice, this list of conditions and the following disclaimer.# 2. Redistributions in binary form must reproduce the above copyright#    notice, this list of conditions and the following disclaimer in the#    documentation and/or other materials provided with the distribution.# 3. The name of the author may not be used to endorse or promote products#    derived from this software without specific prior written permission.## THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.import codecsfrom StringIO import StringIOfrom antlr3.constants import DEFAULT_CHANNEL, EOFfrom antlr3.tokens import Token, EOF_TOKEN############################################################################## basic interfaces#   IntStream#    +- CharStream#    \- TokenStream7## subclasses must implemented all methods#############################################################################class IntStream(object):    """    A simple stream of integers used when all I care about is the char    or token type sequence (such as interpretation).    """        def consume(self):        raise NotImplementedError        def LA(self, i):        """Get int at current input pointer + i ahead where i=1 is next int.        Negative indexes are allowed.  LA(-1) is previous token (token	just matched).  LA(-i) where i is before first token should	yield -1, invalid char / EOF.	"""                raise NotImplementedError            def mark(self):        """        Tell the stream to start buffering if it hasn't already.  Return        current input position, index(), or some other marker so that        when passed to rewind() you get back to the same spot.        rewind(mark()) should not affect the input cursor.  The Lexer        track line/col info as well as input index so its markers are        not pure input indexes.  Same for tree node streams.        """        raise NotImplementedError    def index(self):        """        Return the current input symbol index 0..n where n indicates the        last symbol has been read.  The index is the symbol about to be        read not the most recently read symbol.        """        raise NotImplementedError    def rewind(self, marker=None):        """        Reset the stream so that next call to index would return marker.        The marker will usually be index() but it doesn't have to be.  It's        just a marker to indicate what state the stream was in.  This is        essentially calling release() and seek().  If there are markers        created after this marker argument, this routine must unroll them        like a stack.  Assume the state the stream was in when this marker        was created.        If marker is None:        Rewind to the input position of the last marker.        Used currently only after a cyclic DFA and just        before starting a sem/syn predicate to get the        input position back to the start of the decision.        Do not "pop" the marker off the state.  mark(i)        and rewind(i) should balance still. It is        like invoking rewind(last marker) but it should not "pop"        the marker off.  It's like seek(last marker's input position).       	"""        raise NotImplementedError    def release(self, marker=None):        """        You may want to commit to a backtrack but don't want to force the        stream to keep bookkeeping objects around for a marker that is        no longer necessary.  This will have the same behavior as        rewind() except it releases resources without the backward seek.        This must throw away resources for all markers back to the marker        argument.  So if you're nested 5 levels of mark(), and then release(2)        you have to release resources for depths 2..5.	"""        raise NotImplementedError    def seek(self, index):        """        Set the input cursor to the position indicated by index.  This is        normally used to seek ahead in the input stream.  No buffering is        required to do this unless you know your stream will use seek to        move backwards such as when backtracking.        This is different from rewind in its multi-directional        requirement and in that its argument is strictly an input cursor        (index).        For char streams, seeking forward must update the stream state such        as line number.  For seeking backwards, you will be presumably        backtracking using the mark/rewind mechanism that restores state and        so this method does not need to update state when seeking backwards.        Currently, this method is only used for efficient backtracking using        memoization, but in the future it may be used for incremental parsing.        The index is 0..n-1.  A seek to position i means that LA(1) will        return the ith symbol.  So, seeking to 0 means LA(1) will return the        first element in the stream.         """        raise NotImplementedError    def size(self):        """        Only makes sense for streams that buffer everything up probably, but        might be useful to display the entire stream or for testing.  This        value includes a single EOF.	"""        raise NotImplementedErrorclass CharStream(IntStream):    """A source of characters for an ANTLR lexer"""    # pylint does not realize that this is an interface, too    #pylint: disable-msg=W0223        EOF = -1    def substring(self, start, stop):        """        For infinite streams, you don't need this; primarily I'm providing        a useful interface for action code.  Just make sure actions don't        use this on streams that don't support it.        """        raise NotImplementedError                def LT(self, i):        """        Get the ith character of lookahead.  This is the same usually as        LA(i).  This will be used for labels in the generated        lexer code.  I'd prefer to return a char here type-wise, but it's        probably better to be 32-bit clean and be consistent with LA.        """        raise NotImplementedError    def getLine(self):        """ANTLR tracks the line information automatically"""        raise NotImplementedError    def setLine(self, line):        """        Because this stream can rewind, we need to be able to reset the line        """        raise NotImplementedError    def getCharPositionInLine(self):        """        The index of the character relative to the beginning of the line 0..n-1        """        raise NotImplementedError    def setCharPositionInLine(self, pos):        raise NotImplementedErrorclass TokenStream(IntStream):    """A stream of tokens accessing tokens from a TokenSource"""        # pylint does not realize that this is an interface, too    #pylint: disable-msg=W0223        def LT(self, k):        """        Get Token at current input pointer + i ahead where i=1 is next Token.        i<0 indicates tokens in the past.  So -1 is previous token and -2 is        two tokens ago. LT(0) is undefined.  For i>=n, return Token.EOFToken.        Return null for LT(0) and any index that results in an absolute address        that is negative.	"""        raise NotImplementedError    def get(self, i):        """        Get a token at an absolute index i; 0..n-1.  This is really only        needed for profiling and debugging and token stream rewriting.        If you don't want to buffer up tokens, then this method makes no        sense for you.  Naturally you can't use the rewrite stream feature.        I believe DebugTokenStream can easily be altered to not use        this method, removing the dependency.        """        raise NotImplementedError    def getTokenSource(self):        """        Where is this stream pulling tokens from?  This is not the name, but        the object that provides Token objects.	"""        raise NotImplementedError    def toString(self, start=None, stop=None):        """        Return the text of all tokens from start to stop, inclusive.        If the stream does not buffer all the tokens then it can just        return "" or null;  Users should not access $ruleLabel.text in        an action of course in that case.        Because the user is not required to use a token with an index stored        in it, we must provide a means for two token objects themselves to        indicate the start/end location.  Most often this will just delegate        to the other toString(int,int).  This is also parallel with        the TreeNodeStream.toString(Object,Object).	"""        raise NotImplementedError        ############################################################################## character streams for use in lexers#   CharStream#   \- ANTLRStringStream#############################################################################class ANTLRStringStream(CharStream):    """    A pretty quick CharStream that pulls all data from an array    directly.  Every method call counts in the lexer.    """        def __init__(self, data):        CharStream.__init__(self)          	# The data being scanned        self.data = data	# How many characters are actually in the buffer        self.n = len(data) 	# 0..n-1 index into string of next char        self.p = 0	# line number 1..n within the input        self.line = 1 	# The index of the character relative to the beginning of the        # line 0..n-1        self.charPositionInLine = 0	# A list of CharStreamState objects that tracks the stream state        # values line, charPositionInLine, and p that can change as you        # move through the input stream.  Indexed from 0..markDepth-1.        self._markers = [ ]    def reset(self):        """        Reset the stream so that it's in the same state it was        when the object was created *except* the data array is not        touched.        """

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -