📄 buildnfa.g
字号:
{ GrammarAST stNode = (GrammarAST)n.getFirstChild(); IntSet notSet = grammar.complement(stNode.getSetValue()); stNode.setSetValue(notSet); if ( notSet.isNil() ) { ErrorManager.grammarError(ErrorManager.MSG_EMPTY_COMPLEMENT, grammar, #n.token); } g=factory.build_Set(notSet); } ) {#n.followingNFAState = g.right;} ) | #(RANGE a:atom b:atom) {g = factory.build_Range(grammar.getTokenType(#a.getText()), grammar.getTokenType(#b.getText()));} | #(CHAR_RANGE c1:CHAR_LITERAL c2:CHAR_LITERAL) { if ( grammar.type==Grammar.LEXER ) { g = factory.build_CharRange(#c1.getText(), #c2.getText()); } } | #(ASSIGN ID g=atom_or_notatom) | #(PLUS_ASSIGN ID g=atom) | g=ebnf | g=tree | #( SYNPRED block ) | ACTION | pred:SEMPRED {g = factory.build_SemanticPredicate(#pred);} | spred:SYN_SEMPRED {g = factory.build_SemanticPredicate(#spred);} | gpred:GATED_SEMPRED {g = factory.build_SemanticPredicate(#gpred);} | EPSILON {g = factory.build_Epsilon();} ;ebnf returns [StateCluster g=null]{ StateCluster b = null;} : #( BLOCK b=block EOB ) { // track decision if > 1 alts if ( grammar.getNumberOfAltsForDecisionNFA(b.left)>1 ) { b.left.setDescription(grammar.grammarTreeToString(#BLOCK,false)); b.left.setDecisionASTNode(#BLOCK); int d = grammar.assignDecisionNumber( b.left ); grammar.setDecisionNFA( d, b.left ); grammar.setDecisionBlockAST(d, #BLOCK); } g = b; } | #( OPTIONAL #( blk:BLOCK b=block EOB ) ) { g = factory.build_Aoptional(b); g.left.setDescription(grammar.grammarTreeToString(#ebnf,false)); // there is always at least one alt even if block has just 1 alt int d = grammar.assignDecisionNumber( g.left ); grammar.setDecisionNFA(d, g.left); grammar.setDecisionBlockAST(d, #blk); g.left.setDecisionASTNode(#ebnf); } | #( CLOSURE #( BLOCK b=block eob:EOB ) ) { g = factory.build_Astar(b); // track the loop back / exit decision point b.right.setDescription("()* loopback of "+grammar.grammarTreeToString(#ebnf,false)); int d = grammar.assignDecisionNumber( b.right ); grammar.setDecisionNFA(d, b.right); grammar.setDecisionBlockAST(d, #BLOCK); b.right.setDecisionASTNode(#eob); // make block entry state also have same decision for interpreting grammar NFAState altBlockState = (NFAState)g.left.transition(0).target; altBlockState.setDecisionASTNode(#ebnf); altBlockState.setDecisionNumber(d); g.left.setDecisionNumber(d); // this is the bypass decision (2 alts) g.left.setDecisionASTNode(#ebnf); } | #( POSITIVE_CLOSURE #( blk2:BLOCK b=block eob3:EOB ) ) { g = factory.build_Aplus(b); // don't make a decision on left edge, can reuse loop end decision // track the loop back / exit decision point b.right.setDescription("()+ loopback of "+grammar.grammarTreeToString(#ebnf,false)); int d = grammar.assignDecisionNumber( b.right ); grammar.setDecisionNFA(d, b.right); grammar.setDecisionBlockAST(d, #blk2); b.right.setDecisionASTNode(#eob3); // make block entry state also have same decision for interpreting grammar NFAState altBlockState = (NFAState)g.left.transition(0).target; altBlockState.setDecisionASTNode(#ebnf); altBlockState.setDecisionNumber(d); } ;tree returns [StateCluster g=null]{StateCluster e=null;} : #( TREE_BEGIN {GrammarAST el=(GrammarAST)_t;} g=element { StateCluster down = factory.build_Atom(Label.DOWN); // TODO set following states for imaginary nodes? //el.followingNFAState = down.right; g = factory.build_AB(g,down); } ( {el=(GrammarAST)_t;} e=element {g = factory.build_AB(g,e);} )* { StateCluster up = factory.build_Atom(Label.UP); //el.followingNFAState = up.right; g = factory.build_AB(g,up); // tree roots point at right edge of DOWN for LOOK computation later #tree.NFATreeDownState = down.left; } ) ;atom_or_notatom returns [StateCluster g=null] : g=atom | #( n:NOT ( c:CHAR_LITERAL (ast1:ast_suffix)? { int ttype=0; if ( grammar.type==Grammar.LEXER ) { ttype = Grammar.getCharValueFromGrammarCharLiteral(#c.getText()); } else { ttype = grammar.getTokenType(#c.getText()); } IntSet notAtom = grammar.complement(ttype); if ( notAtom.isNil() ) { ErrorManager.grammarError(ErrorManager.MSG_EMPTY_COMPLEMENT, grammar, #c.token, #c.getText()); } g=factory.build_Set(notAtom); } | t:TOKEN_REF (ast3:ast_suffix)? { int ttype = grammar.getTokenType(t.getText()); IntSet notAtom = grammar.complement(ttype); if ( notAtom.isNil() ) { ErrorManager.grammarError(ErrorManager.MSG_EMPTY_COMPLEMENT, grammar, #t.token, #t.getText()); } g=factory.build_Set(notAtom); } | g=set { GrammarAST stNode = (GrammarAST)n.getFirstChild(); IntSet notSet = grammar.complement(stNode.getSetValue()); stNode.setSetValue(notSet); if ( notSet.isNil() ) { ErrorManager.grammarError(ErrorManager.MSG_EMPTY_COMPLEMENT, grammar, #n.token); } g=factory.build_Set(notSet); } ) {#n.followingNFAState = g.right;} ) ;atom returns [StateCluster g=null] : #( r:RULE_REF (rarg:ARG_ACTION)? (as1:ast_suffix)? ) { NFAState start = grammar.getRuleStartState(r.getText()); if ( start!=null ) { int ruleIndex = grammar.getRuleIndex(r.getText()); g = factory.build_RuleRef(ruleIndex, start); r.followingNFAState = g.right; if ( g.left.transition(0) instanceof RuleClosureTransition && grammar.type!=Grammar.LEXER ) { addFollowTransition(r.getText(), g.right); } // else rule ref got inlined to a set } } | #( t:TOKEN_REF (targ:ARG_ACTION)? (as2:ast_suffix)? ) { if ( grammar.type==Grammar.LEXER ) { NFAState start = grammar.getRuleStartState(t.getText()); if ( start!=null ) { int ruleIndex = grammar.getRuleIndex(t.getText()); g = factory.build_RuleRef(ruleIndex, start); // don't add FOLLOW transitions in the lexer; // only exact context should be used. } } else { int tokenType = grammar.getTokenType(t.getText()); g = factory.build_Atom(tokenType); t.followingNFAState = g.right; } } | #( c:CHAR_LITERAL (as3:ast_suffix)? ) { if ( grammar.type==Grammar.LEXER ) { g = factory.build_CharLiteralAtom(c.getText()); } else { int tokenType = grammar.getTokenType(c.getText()); g = factory.build_Atom(tokenType); c.followingNFAState = g.right; } } | #( s:STRING_LITERAL (as4:ast_suffix)? ) { if ( grammar.type==Grammar.LEXER ) { g = factory.build_StringLiteralAtom(s.getText()); } else { int tokenType = grammar.getTokenType(s.getText()); g = factory.build_Atom(tokenType); s.followingNFAState = g.right; } } | #( w:WILDCARD (as5:ast_suffix)? ) {g = factory.build_Wildcard();} | g=set ;ast_suffix{if ( grammar.getOption("output")==null ) { ErrorManager.grammarError(ErrorManager.MSG_REWRITE_OR_OP_WITH_NO_OUTPUT_OPTION, grammar, #ast_suffix.token, currentRuleName);}} : ROOT | RULEROOT | BANG ;set returns [StateCluster g=null]{IntSet elements=new IntervalSet();#set.setSetValue(elements); // track set for use by code gen} : #( s:SET (setElement[elements])+ ( ast:ast_suffix )? ) { g = factory.build_Set(elements); #s.followingNFAState = g.right; } //{System.out.println("set elements="+elements.toString(grammar));} ;setElement[IntSet elements]{ int ttype;} : c:CHAR_LITERAL { if ( grammar.type==Grammar.LEXER ) { ttype = Grammar.getCharValueFromGrammarCharLiteral(c.getText()); } else { ttype = grammar.getTokenType(c.getText()); } if ( elements.member(ttype) ) { ErrorManager.grammarError(ErrorManager.MSG_DUPLICATE_SET_ENTRY, grammar, #c.token, #c.getText()); } elements.add(ttype); } | t:TOKEN_REF { ttype = grammar.getTokenType(t.getText()); if ( elements.member(ttype) ) { ErrorManager.grammarError(ErrorManager.MSG_DUPLICATE_SET_ENTRY, grammar, #t.token, #t.getText()); } elements.add(ttype); } | s:STRING_LITERAL { ttype = grammar.getTokenType(s.getText()); if ( elements.member(ttype) ) { ErrorManager.grammarError(ErrorManager.MSG_DUPLICATE_SET_ENTRY, grammar, #s.token, #s.getText()); } elements.add(ttype); } | #(CHAR_RANGE c1:CHAR_LITERAL c2:CHAR_LITERAL) { if ( grammar.type==Grammar.LEXER ) { int a = Grammar.getCharValueFromGrammarCharLiteral(c1.getText()); int b = Grammar.getCharValueFromGrammarCharLiteral(c2.getText()); elements.addAll(IntervalSet.of(a,b)); } } ;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -