📄 nescparser.g
字号:
##.setType( NInterfaceParamArgs );
}
;
// added:attributes
// orig:StdCParser
functionDef
{ String declName; }
: ( (functionDeclSpecifiers)=> ds:functionDeclSpecifiers
| //epsilon
)
declName = d:declarator[true]
{
AST d2, ds2;
d2 = astFactory.dupList(#d);
ds2 = astFactory.dupList(#ds);
getParserContext().getSymbolTable().add(declName, #(null, ds2, d2));
pushScope(declName);
}
( attributeDecl )*
( declaration )* (VARARGS)? ( SEMI! )*
{ popScope(); }
compoundStatement[declName]
{ ## = #( #[NFunctionDef], ## );}
;
// added: attributes
// orig: GnuCParser
functionPrefix
{ String declName; }
: ( (functionDeclSpecifiers)=> ds:functionDeclSpecifiers
| //epsilon
)
declName = d:declarator[true]
( attributeDecl )*
( declaration )* (VARARGS)? ( SEMI )*
LCURLY
;
// added: use symbol table in parser state object
// orig: StdCParser
parameterDeclaration
{ String declName; }
: ds:declSpecifiers
( ( declarator[false] )=> declName = d:declarator[false]
{
AST d2, ds2;
d2 = astFactory.dupList(#d);
ds2 = astFactory.dupList(#ds);
getParserContext().getSymbolTable().add(declName, #(null, ds2, d2));
}
| nonemptyAbstractDeclarator
)?
{
## = #( #[NParameterDeclaration], ## );
}
;
// added: attributes
// orig: GnuCParser
enumSpecifier
: "enum"^
( ( ID LCURLY )=> i:ID LCURLY enumList[i.getText()] RCURLY
| LCURLY enumList["anonymous"] RCURLY
( attributeDecl )*
| ID
)
;
// added: use symbol table in parser state object
// orig: StdCParser
enumerator[String enumName]
: i:ID { getParserContext().getSymbolTable().add( i.getText(),
#( null,
#[LITERAL_enum, "enum"],
#[ ID, enumName]
)
);
}
(ASSIGN constExpr)?
;
// added: use symbol table in parser state object
// orig: GnuCParser
initDecl[AST declarationSpecifiers]
{ String declName = ""; }
: declName = d:declarator[false]
{ AST ds1, d1;
ds1 = astFactory.dupList(declarationSpecifiers);
d1 = astFactory.dupList(#d);
getParserContext().getSymbolTable().add(declName, #(null, ds1, d1) );
}
( attributeDecl )*
( ASSIGN initializer
| COLON expr
)?
{ ## = #( #[NInitDecl], ## ); }
;
// added: use symbol table in parser state object
// orig: GnuCParser
nestedFunctionDef
{ String declName; }
: ( "auto" )? //only for nested functions
( (functionDeclSpecifiers)=> ds:functionDeclSpecifiers
)?
declName = d:declarator[false]
{
AST d2, ds2;
d2 = astFactory.dupList(#d);
ds2 = astFactory.dupList(#ds);
getParserContext().getSymbolTable().add(declName, #(null, ds2, d2));
pushScope(declName);
}
( declaration )*
{ popScope(); }
compoundStatement[declName]
{ ## = #( #[NFunctionDef], ## );}
;
// node types
nescDummy:
NInterfaceParamArgs
| NModule
| NModuleFile
| NInterface
| NInterfaceFile
| NConfiguration
| NConfigurationFile
| NUses
| NProvides
| NImplementation
| NInterfaceDescription
| NIncludes
| NIncludeFile
| NIncludeFileName
| NText
| NInterfaceParameterList
| NComponentList
| NConnectionList
;
{
import isis.anp.common.CToken;
import isis.anp.common.LineObject;
import isis.anp.common.PreprocessorInfoChannel;
import java.io.InputStream;
import java.io.Reader;
import java.util.Hashtable;
import antlr.ANTLRHashString;
import antlr.ByteBuffer;
import antlr.CharBuffer;
import antlr.CharStreamException;
import antlr.CharStreamIOException;
import antlr.InputBuffer;
import antlr.LexerSharedInputState;
import antlr.NoViableAltForCharException;
import antlr.RecognitionException;
import antlr.SemanticException;
import antlr.Token;
import antlr.TokenStream;
import antlr.TokenStreamException;
import antlr.TokenStreamIOException;
import antlr.TokenStreamRecognitionException;
import antlr.collections.impl.BitSet;
}
class NesCLexer extends GnuCLexer;
options
{
k = 3;
importVocab = NesC;
testLiterals = false;
}
tokens {
LITERAL___extension__ = "__extension__";
}
{
public void initialize(String src)
{
setOriginalSource(src);
initialize();
}
public void initialize()
{
literals.put(new ANTLRHashString("__alignof__", this), new Integer(LITERAL___alignof));
literals.put(new ANTLRHashString("__asm", this), new Integer(LITERAL_asm));
literals.put(new ANTLRHashString("__asm__", this), new Integer(LITERAL_asm));
literals.put(new ANTLRHashString("__attribute__", this), new Integer(LITERAL___attribute));
literals.put(new ANTLRHashString("__complex__", this), new Integer(LITERAL___complex));
literals.put(new ANTLRHashString("__const", this), new Integer(LITERAL_const));
literals.put(new ANTLRHashString("__const__", this), new Integer(LITERAL_const));
literals.put(new ANTLRHashString("__imag__", this), new Integer(LITERAL___imag));
literals.put(new ANTLRHashString("__inline", this), new Integer(LITERAL_inline));
literals.put(new ANTLRHashString("__inline__", this), new Integer(LITERAL_inline));
literals.put(new ANTLRHashString("__real__", this), new Integer(LITERAL___real));
literals.put(new ANTLRHashString("__signed", this), new Integer(LITERAL_signed));
literals.put(new ANTLRHashString("__signed__", this), new Integer(LITERAL_signed));
literals.put(new ANTLRHashString("__typeof", this), new Integer(LITERAL_typeof));
literals.put(new ANTLRHashString("__typeof__", this), new Integer(LITERAL_typeof));
literals.put(new ANTLRHashString("__volatile", this), new Integer(LITERAL_volatile));
literals.put(new ANTLRHashString("__volatile__", this), new Integer(LITERAL_volatile));
}
LineObject lineObject = new LineObject();
String originalSource = "";
PreprocessorInfoChannel preprocessorInfoChannel = new PreprocessorInfoChannel();
int tokenNumber = 0;
boolean countingTokens = true;
int deferredLineCount = 0;
public void setCountingTokens(boolean ct)
{
countingTokens = ct;
if ( countingTokens ) {
tokenNumber = 0;
}
else {
tokenNumber = 1;
}
}
public void setOriginalSource(String src)
{
originalSource = src;
lineObject.setSource(src);
}
public void setSource(String src)
{
lineObject.setSource(src);
}
public PreprocessorInfoChannel getPreprocessorInfoChannel()
{
return preprocessorInfoChannel;
}
public void setPreprocessingDirective(String pre)
{
preprocessorInfoChannel.addLineForTokenNumber( pre, new Integer(tokenNumber) );
}
protected Token makeToken(int t)
{
if ( t != Token.SKIP && countingTokens) {
tokenNumber++;
}
CToken tok = (CToken) super.makeToken(t);
tok.setLine(lineObject.line);
tok.setSource(lineObject.source);
tok.setTokenNumber(tokenNumber);
// System.out.println("Token #"+tok.getTokenNumber()+" at " + tok.getSource() + ":" + tok.getLine() + ":" + tok.getColumn());
for(int i=0; i<deferredLineCount; i++) {
newline();
}
// lineObject.line += deferredLineCount;
deferredLineCount = 0;
return tok;
}
public void deferredNewline() {
deferredLineCount++;
}
public void newline() {
super.newline();
lineObject.newline();
}
/*
// pop lexer on EOF
public void uponEOF() throws TokenStreamException, CharStreamException {
System.out.println("!!!!!!Found EOF "+lineObject.getSource());
try {
// don't allow EOF until main lexer. Force the
// selector to retry for another token.
System.out.println("!!!!!!Popping");
NesCParser.lexerSelector.pop(); // return to old lexer/stream
System.out.println("!!!!!!Popped. Retrying");
NesCParser.lexerSelector.retry();
System.out.println("!!!!!!Retry succeeded");
} catch (NoSuchElementException e) {
System.out.println("!!!!!!NoSuchElementException");
// return a real EOF if nothing in stack
}
}
*/
}
protected LineDirective
{
boolean oldCountingTokens = countingTokens;
countingTokens = false;
}
:
{
lineObject = new LineObject();
deferredLineCount = 0;
}
("line")? //this would be for if the directive started "#line", but not there for GNU directives
(Space)+
n:Number {
lineObject.setLine(Integer.parseInt(n.getText())-1);
}
(Space)+
( fn:StringLiteral { try {
lineObject.setSource(fn.getText().substring(1,fn.getText().length()-1));
}
catch (StringIndexOutOfBoundsException e) { /*not possible*/ }
}
| fi:ID { lineObject.setSource(fi.getText());
}
)?
(Space)*
("1" { lineObject.setEnteringFile(true); } )?
(Space)*
("2" { lineObject.setReturningToFile(true); } )?
(Space)*
("3" { lineObject.setSystemHeader(true); } )?
(Space)*
("4" { lineObject.setTreatAsC(true); } )?
(~('\r' | '\n'))*
("\r\n" | "\r" | "\n")
{
preprocessorInfoChannel.addLineForTokenNumber(new LineObject(lineObject), new Integer(tokenNumber));
countingTokens = oldCountingTokens;
newline();
// System.out.println("ColAtEnd="+getColumn());
}
;
BACKPTR: "<-";
Comment
: { LA(3) != '*' }?
( "/*"
( { LA(2) != '/' }? '*'
| "\r\n" { deferredNewline(); }
| ( '\r' | '\n' ) { deferredNewline(); }
| ~( '*'| '\r' | '\n' )
)*
"*/" { _ttype = Token.SKIP;
}
)
;
TinyDoc
: "/**" { // System.out.println("LEXER: found start of TinyDoc comment");
}
( { LA(2) != '/' }? '*'
| "\r\n" { deferredNewline(); }
| ( '\r' | '\n' ) { deferredNewline(); }
| ~( '*'| '\r' | '\n' )
)*
"*/"
;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -