📄 python.stg
字号:
/* [The "BSD licence"] Copyright (c) 2005-2006 Terence Parr All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*//* in sync with Java/Java.stg revision 107 */group Python implements ANTLRCore;/** The overall file structure of a recognizer; stores methods for rules * and cyclic DFAs plus support code. */outputFile(LEXER,PARSER,TREE_PARSER, actionScope, actions, docComment, recognizer, name, tokens, tokenNames, rules, cyclicDFAs, bitsets, buildTemplate, buildAST, rewrite, profile, backtracking, synpreds, memoize, numRules, fileName, ANTLRVersion, generatedTimestamp, trace, scopes, superClass, literals) ::=<<# $ANTLR <ANTLRVersion> <fileName> <generatedTimestamp><@imports>from antlr3 import *<if(TREE_PARSER)>from antlr3.tree import *<\n><endif>from antlr3.compat import set, frozenset<@end><actions.(actionScope).header><! <docComment> !># for convenience in actionsHIDDEN = BaseRecognizer.HIDDEN# token types<tokens:{<it.name>=<it.type>}; separator="\n"><recognizer>>>lexer(grammar, name, tokens, scopes, rules, numRules, labelType="Token", filterMode) ::= <<class <name>(Lexer): <scopes:{<if(it.isDynamicGlobalScope)><globalAttributeScope(scope=it)><endif>}> grammarFileName = "<fileName>" def __init__(self, input=None): Lexer.__init__(self, input)<if(backtracking)> self.ruleMemo = {}<endif> <cyclicDFAs:{dfa | <cyclicDFAInit(dfa)>}; seperator="\n"> <actions.lexer.init> <actions.lexer.members><if(filterMode)> <filteringNextToken()><endif> <rules; separator="\n\n"> <synpreds:{p | <lexerSynpred(p)>}> <cyclicDFAs:cyclicDFA()> <! dump tables for all DFA !>>>/** A override of Lexer.nextToken() that backtracks over mTokens() looking * for matches. No error can be generated upon error; just rewind, consume * a token and then try again. backtracking needs to be set as well. * Make rule memoization happen only at levels above 1 as we start mTokens * at backtracking==1. */filteringNextToken() ::= <<def nextToken(self): while True: if self.input.LA(1) == EOF: return EOF_TOKEN self.token = None self.channel = DEFAULT_CHANNEL self.tokenStartCharIndex = self.input.index() self.tokenStartCharPositionInLine = self.input.charPositionInLine self.tokenStartLine = self.input.line self._text = None try: m = self.input.mark() self.backtracking = 1 <! means we won't throw slow exception !> self.failed = False self.mTokens() self.backtracking = 0 <! mTokens backtracks with synpred at backtracking==2 and we set the synpredgate to allow actions at level 1. !> if self.failed: self.input.rewind(m) self.input.consume() <! advance one char and try again !> else: self.emit() return self.token except RecognitionException, re: # shouldn't happen in backtracking mode, but... self.reportError(re) self.recover(re)def memoize(self, input, ruleIndex, ruleStartIndex): if self.backtracking > 1: # is Lexer always superclass? Lexer.memoize(self, input, ruleIndex, ruleStartIndex)def alreadyParsedRule(self, input, ruleIndex): if self.backtracking > 1: return Lexer.alreadyParsedRule(self, input, ruleIndex) return False>>filteringActionGate() ::= "self.backtracking == 1"/** How to generate a parser */genericParser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, inputStreamType, superClass, ASTLabelType="Object", labelType, members) ::= <<# token namestokenNames = [ "\<invalid>", "\<EOR>", "\<DOWN>", "\<UP>", <tokenNames; wrap, separator=", ">]<scopes:{<if(it.isDynamicGlobalScope)><globalAttributeScopeClass(scope=it)><endif>}><rules:{<ruleAttributeScopeClass(scope=it.ruleDescriptor.ruleScope)>}>class <name>(<superClass>): grammarFileName = "<fileName>" tokenNames = tokenNames def __init__(self, input): <superClass>.__init__(self, input)<if(backtracking)> self.ruleMemo = {}<endif> <cyclicDFAs:{dfa | <cyclicDFAInit(dfa)>}; seperator="\n"> <scopes:{<if(it.isDynamicGlobalScope)><globalAttributeScopeStack(scope=it)><endif>}> <rules:{<ruleAttributeScopeStack(scope=it.ruleDescriptor.ruleScope)>}> <actions.parser.init> <@members> <@end> <members> <rules; separator="\n\n"> <synpreds:{p | <synpred(p)>}> <cyclicDFAs:cyclicDFA()> <! dump tables for all DFA !> <bitsets:{FOLLOW_<it.name>_in_<it.inName><it.tokenIndex> = frozenset([<it.tokenTypes:{<it>};separator=", ">])<\n>}>>>parser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, ASTLabelType, superClass="Parser", labelType="Token", members={<actions.parser.members>}) ::= <<<genericParser(inputStreamType="TokenStream", ...)>>>/** How to generate a tree parser; same as parser except the input * stream is a different type. */treeParser(grammar, name, scopes, tokens, tokenNames, globalAction, rules, numRules, bitsets, labelType={<ASTLabelType>}, ASTLabelType="Object", superClass="TreeParser", members={<actions.treeparser.members>}) ::= <<<genericParser(inputStreamType="TreeNodeStream", ...)>>>/** A simpler version of a rule template that is specific to the imaginary * rules created for syntactic predicates. As they never have return values * nor parameters etc..., just give simplest possible method. Don't do * any of the normal memoization stuff in here either; it's a waste. * As predicates cannot be inlined into the invoking rule, they need to * be in a rule by themselves. */synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::=<<# $ANTLR start <ruleName>def <ruleName>_fragment(self, <ruleDescriptor.parameterScope:parameterScope(scope=it)>):<if(trace)> self.traceIn("<ruleName>_fragment", <ruleDescriptor.index>) try: <block> finally: self.traceOut("<ruleName>_fragment", <ruleDescriptor.index>)<else> <block><endif># $ANTLR end <ruleName>>>synpred(name) ::= <<def <name>(self): self.backtracking += 1 <@start()> start = self.input.mark() self.<name>_fragment() success = not self.failed self.input.rewind(start) <@stop()> self.backtracking -= 1 self.failed = False return success>>lexerSynpred(name) ::= <<<synpred(name)>>>ruleMemoization(name) ::= <<<if(memoize)>if self.backtracking > 0 and self.alreadyParsedRule(self.input, <ruleDescriptor.index>): return <ruleReturnValue()><endif>>>/** How to test for failure and return from rule */checkRuleBacktrackFailure() ::= <<<if(backtracking)>if self.failed: return <ruleReturnValue()><endif>>>/** This rule has failed, exit indicating failure during backtrack */ruleBacktrackFailure() ::= <<<if(backtracking)>if self.backtracking > 0: self.failed = True return <ruleReturnValue()><\n><endif>>>/** How to generate code for a rule. This includes any return type * data aggregates required for multiple return values. */rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::= <<<returnScope(scope=ruleDescriptor.returnScope)># $ANTLR start <ruleName># <fileName>:<description>def <ruleName>(self, <ruleDescriptor.parameterScope:parameterScope(scope=it)>):<if(trace)> self.traceIn("<ruleName>", <ruleDescriptor.index>)<\n><endif> <ruleScopeSetUp()> <ruleDeclarations()> <ruleLabelDefs()> <ruleDescriptor.actions.init> <@preamble()> try: try: <ruleMemoization(name=ruleName)> <block> <ruleCleanUp()> <(ruleDescriptor.actions.after):execAction()><if(exceptions)> <exceptions:{e|<catch(decl=e.decl,action=e.action)><\n>}><else><if(!emptyRule)><if(actions.(actionScope).rulecatch)> <actions.(actionScope).rulecatch><else> except RecognitionException, re: self.reportError(re) self.recover(self.input, re)<endif><else> finally: pass<endif><endif> finally:<if(trace)> self.traceOut("<ruleName>", <ruleDescriptor.index>)<\n><endif> <memoize()> <ruleScopeCleanUp()> <finally> pass <@postamble()> return <ruleReturnValue()># $ANTLR end <ruleName>>>catch(decl,action) ::= <<except <e.decl>: <e.action>>>ruleDeclarations() ::= <<<if(ruleDescriptor.hasMultipleReturnValues)>retval = self.<ruleDescriptor.name>_return()retval.start = self.input.LT(1)<\n><else><ruleDescriptor.returnScope.attributes:{ a |<a.name> = <if(a.initValue)><a.initValue><else>None<endif>}><endif><if(memoize)><ruleDescriptor.name>_StartIndex = self.input.index()<endif>>>ruleScopeSetUp() ::= <<<ruleDescriptor.useScopes:{self.<it>_stack.append(<it>_scope())}; separator="\n"><ruleDescriptor.ruleScope:{self.<it.name>_stack.append(<it.name>_scope())}; separator="\n">>>ruleScopeCleanUp() ::= <<<ruleDescriptor.useScopes:{self.<it>_stack.pop()}; separator="\n"><ruleDescriptor.ruleScope:{self.<it.name>_stack.pop()}; separator="\n">>>ruleLabelDefs() ::= <<<[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels] :{<it.label.text> = None}; separator="\n"><[ruleDescriptor.tokenListLabels,ruleDescriptor.ruleListLabels] :{list_<it.label.text> = None}; separator="\n"><[ruleDescriptor.ruleLabels,ruleDescriptor.ruleListLabels] :ruleLabelDef(label=it); separator="\n"><ruleDescriptor.ruleListLabels:{<it.label.text> = None}; separator="\n">>>lexerRuleLabelDefs() ::= <<<[ruleDescriptor.tokenLabels, ruleDescriptor.tokenListLabels, ruleDescriptor.ruleLabels] :{<it.label.text> = None}; separator="\n"><ruleDescriptor.charLabels:{<it.label.text> = None}; separator="\n"><[ruleDescriptor.tokenListLabels, ruleDescriptor.ruleListLabels, ruleDescriptor.ruleListLabels] :{list_<it.label.text> = None}; separator="\n">>>ruleReturnValue() ::= <<<if(!ruleDescriptor.isSynPred)><if(ruleDescriptor.hasReturnValue)><if(ruleDescriptor.hasSingleReturnValue)><ruleDescriptor.singleValueReturnName><else>retval<endif><endif><endif>>>ruleCleanUp() ::= <<<if(ruleDescriptor.hasMultipleReturnValues)><if(!TREE_PARSER)>retval.stop = self.input.LT(-1)<\n><endif><endif>>>memoize() ::= <<<if(memoize)><if(backtracking)>if self.backtracking > 0: self.memoize(self.input, <ruleDescriptor.index>, <ruleDescriptor.name>_StartIndex)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -