gnuclexer.java
来自「plugin for eclipse」· Java 代码 · 共 2,396 行 · 第 1/5 页
JAVA
2,396 行
// $ANTLR : "expandedGnuCParser.g" -> "GnuCLexer.java"$
package isis.anp.gnuc;
import java.io.InputStream;
import antlr.TokenStreamException;
import antlr.TokenStreamIOException;
import antlr.TokenStreamRecognitionException;
import antlr.CharStreamException;
import antlr.CharStreamIOException;
import antlr.ANTLRException;
import java.io.Reader;
import java.util.Hashtable;
import antlr.CharScanner;
import antlr.InputBuffer;
import antlr.ByteBuffer;
import antlr.CharBuffer;
import antlr.Token;
import antlr.CommonToken;
import antlr.RecognitionException;
import antlr.NoViableAltForCharException;
import antlr.MismatchedCharException;
import antlr.TokenStream;
import antlr.ANTLRHashString;
import antlr.LexerSharedInputState;
import antlr.collections.impl.BitSet;
import antlr.SemanticException;
import isis.anp.common.CToken;
import isis.anp.common.LineObject;
import isis.anp.common.PreprocessorInfoChannel;
import java.io.InputStream;
import java.io.Reader;
import java.util.Hashtable;
import antlr.ANTLRHashString;
import antlr.ByteBuffer;
import antlr.CharBuffer;
import antlr.CharStreamException;
import antlr.CharStreamIOException;
import antlr.InputBuffer;
import antlr.LexerSharedInputState;
import antlr.NoViableAltForCharException;
import antlr.RecognitionException;
import antlr.Token;
import antlr.TokenStream;
import antlr.TokenStreamException;
import antlr.TokenStreamIOException;
import antlr.TokenStreamRecognitionException;
import antlr.collections.impl.BitSet;
public class GnuCLexer extends antlr.CharScanner implements GnuCLexerTokenTypes, TokenStream
{
public void initialize(String src)
{
setOriginalSource(src);
initialize();
}
public void initialize()
{
literals.put(new ANTLRHashString("__alignof__", this), new Integer(LITERAL___alignof));
literals.put(new ANTLRHashString("__asm", this), new Integer(LITERAL_asm));
literals.put(new ANTLRHashString("__asm__", this), new Integer(LITERAL_asm));
literals.put(new ANTLRHashString("__attribute__", this), new Integer(LITERAL___attribute));
literals.put(new ANTLRHashString("__complex__", this), new Integer(LITERAL___complex));
literals.put(new ANTLRHashString("__const", this), new Integer(LITERAL_const));
literals.put(new ANTLRHashString("__const__", this), new Integer(LITERAL_const));
literals.put(new ANTLRHashString("__imag__", this), new Integer(LITERAL___imag));
literals.put(new ANTLRHashString("__inline", this), new Integer(LITERAL_inline));
literals.put(new ANTLRHashString("__inline__", this), new Integer(LITERAL_inline));
literals.put(new ANTLRHashString("__real__", this), new Integer(LITERAL___real));
literals.put(new ANTLRHashString("__signed", this), new Integer(LITERAL_signed));
literals.put(new ANTLRHashString("__signed__", this), new Integer(LITERAL_signed));
literals.put(new ANTLRHashString("__typeof", this), new Integer(LITERAL_typeof));
literals.put(new ANTLRHashString("__typeof__", this), new Integer(LITERAL_typeof));
literals.put(new ANTLRHashString("__volatile", this), new Integer(LITERAL_volatile));
literals.put(new ANTLRHashString("__volatile__", this), new Integer(LITERAL_volatile));
}
LineObject lineObject = new LineObject();
String originalSource = "";
PreprocessorInfoChannel preprocessorInfoChannel = new PreprocessorInfoChannel();
int tokenNumber = 0;
boolean countingTokens = true;
int deferredLineCount = 0;
public void setCountingTokens(boolean ct)
{
countingTokens = ct;
if ( countingTokens ) {
tokenNumber = 0;
}
else {
tokenNumber = 1;
}
}
public void setOriginalSource(String src)
{
originalSource = src;
lineObject.setSource(src);
}
public void setSource(String src)
{
lineObject.setSource(src);
}
public PreprocessorInfoChannel getPreprocessorInfoChannel()
{
return preprocessorInfoChannel;
}
public void setPreprocessingDirective(String pre)
{
preprocessorInfoChannel.addLineForTokenNumber( pre, new Integer(tokenNumber) );
}
protected Token makeToken(int t)
{
if ( t != Token.SKIP && countingTokens) {
tokenNumber++;
}
CToken tok = (CToken) super.makeToken(t);
tok.setLine(lineObject.line);
tok.setSource(lineObject.source);
tok.setTokenNumber(tokenNumber);
lineObject.line += deferredLineCount;
deferredLineCount = 0;
return tok;
}
public void deferredNewline() {
deferredLineCount++;
}
public void newline() {
super.newline();
lineObject.newline();
}
public GnuCLexer(InputStream in) {
this(new ByteBuffer(in));
}
public GnuCLexer(Reader in) {
this(new CharBuffer(in));
}
public GnuCLexer(InputBuffer ib) {
this(new LexerSharedInputState(ib));
}
public GnuCLexer(LexerSharedInputState state) {
super(state);
caseSensitiveLiterals = true;
setCaseSensitive(true);
literals = new Hashtable();
literals.put(new ANTLRHashString("extern", this), new Integer(15));
literals.put(new ANTLRHashString("__real", this), new Integer(147));
literals.put(new ANTLRHashString("case", this), new Integer(44));
literals.put(new ANTLRHashString("short", this), new Integer(20));
literals.put(new ANTLRHashString("break", this), new Integer(42));
literals.put(new ANTLRHashString("while", this), new Integer(37));
literals.put(new ANTLRHashString("typeof", this), new Integer(143));
literals.put(new ANTLRHashString("inline", this), new Integer(142));
literals.put(new ANTLRHashString("unsigned", this), new Integer(26));
literals.put(new ANTLRHashString("const", this), new Integer(17));
literals.put(new ANTLRHashString("float", this), new Integer(23));
literals.put(new ANTLRHashString("return", this), new Integer(43));
literals.put(new ANTLRHashString("sizeof", this), new Integer(79));
literals.put(new ANTLRHashString("do", this), new Integer(38));
literals.put(new ANTLRHashString("__label__", this), new Integer(141));
literals.put(new ANTLRHashString("typedef", this), new Integer(4));
literals.put(new ANTLRHashString("if", this), new Integer(46));
literals.put(new ANTLRHashString("double", this), new Integer(24));
literals.put(new ANTLRHashString("volatile", this), new Integer(6));
literals.put(new ANTLRHashString("__attribute", this), new Integer(145));
literals.put(new ANTLRHashString("union", this), new Integer(11));
literals.put(new ANTLRHashString("register", this), new Integer(14));
literals.put(new ANTLRHashString("auto", this), new Integer(13));
literals.put(new ANTLRHashString("goto", this), new Integer(40));
literals.put(new ANTLRHashString("enum", this), new Integer(12));
literals.put(new ANTLRHashString("int", this), new Integer(21));
literals.put(new ANTLRHashString("for", this), new Integer(39));
literals.put(new ANTLRHashString("char", this), new Integer(19));
literals.put(new ANTLRHashString("default", this), new Integer(45));
literals.put(new ANTLRHashString("__imag", this), new Integer(148));
literals.put(new ANTLRHashString("__alignof", this), new Integer(146));
literals.put(new ANTLRHashString("static", this), new Integer(16));
literals.put(new ANTLRHashString("continue", this), new Integer(41));
literals.put(new ANTLRHashString("struct", this), new Integer(10));
literals.put(new ANTLRHashString("signed", this), new Integer(25));
literals.put(new ANTLRHashString("else", this), new Integer(47));
literals.put(new ANTLRHashString("void", this), new Integer(18));
literals.put(new ANTLRHashString("switch", this), new Integer(48));
literals.put(new ANTLRHashString("long", this), new Integer(22));
literals.put(new ANTLRHashString("__extension__", this), new Integer(149));
literals.put(new ANTLRHashString("asm", this), new Integer(5));
literals.put(new ANTLRHashString("__complex", this), new Integer(144));
}
public Token nextToken() throws TokenStreamException {
Token theRetToken=null;
tryAgain:
for (;;) {
Token _token = null;
int _ttype = Token.INVALID_TYPE;
resetText();
try { // for char stream error handling
try { // for lexical error handling
switch ( LA(1)) {
case '\t': case '\n': case '\u000c': case '\r':
case ' ':
{
mWhitespace(true);
theRetToken=_returnToken;
break;
}
case '.': case '0': case '1': case '2':
case '3': case '4': case '5': case '6':
case '7': case '8': case '9':
{
mNumber(true);
theRetToken=_returnToken;
break;
}
case '\'':
{
mCharLiteral(true);
theRetToken=_returnToken;
break;
}
case '"':
{
mStringLiteral(true);
theRetToken=_returnToken;
break;
}
case ':':
{
mCOLON(true);
theRetToken=_returnToken;
break;
}
case ',':
{
mCOMMA(true);
theRetToken=_returnToken;
break;
}
case '?':
{
mQUESTION(true);
theRetToken=_returnToken;
break;
}
case ';':
{
mSEMI(true);
theRetToken=_returnToken;
break;
}
case '(':
{
mLPAREN(true);
theRetToken=_returnToken;
break;
}
case ')':
{
mRPAREN(true);
theRetToken=_returnToken;
break;
}
case '[':
{
mLBRACKET(true);
theRetToken=_returnToken;
break;
}
case ']':
{
mRBRACKET(true);
theRetToken=_returnToken;
break;
}
case '{':
{
mLCURLY(true);
theRetToken=_returnToken;
break;
}
case '}':
{
mRCURLY(true);
theRetToken=_returnToken;
break;
}
case '~':
{
mBNOT(true);
theRetToken=_returnToken;
break;
}
case '#':
{
mPREPROC_DIRECTIVE(true);
theRetToken=_returnToken;
break;
}
default:
if ((LA(1)=='>') && (LA(2)=='>') && (LA(3)=='=')) {
mRSHIFT_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='<') && (LA(2)=='<') && (LA(3)=='=')) {
mLSHIFT_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='L') && (LA(2)=='\'')) {
mWideCharLiteral(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='L') && (LA(2)=='"')) {
mWideStringLiteral(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='-') && (LA(2)=='>')) {
mPTR(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='=') && (LA(2)=='=')) {
mEQUAL(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='!') && (LA(2)=='=')) {
mNOT_EQUAL(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='<') && (LA(2)=='=')) {
mLTE(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='>') && (LA(2)=='=')) {
mGTE(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='/') && (LA(2)=='=')) {
mDIV_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='+') && (LA(2)=='=')) {
mPLUS_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='+') && (LA(2)=='+')) {
mINC(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='-') && (LA(2)=='=')) {
mMINUS_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='-') && (LA(2)=='-')) {
mDEC(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='*') && (LA(2)=='=')) {
mSTAR_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='%') && (LA(2)=='=')) {
mMOD_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='>') && (LA(2)=='>') && (true)) {
mRSHIFT(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='<') && (LA(2)=='<') && (true)) {
mLSHIFT(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='&') && (LA(2)=='&')) {
mLAND(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='|') && (LA(2)=='|')) {
mLOR(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='&') && (LA(2)=='=')) {
mBAND_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='|') && (LA(2)=='=')) {
mBOR_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='^') && (LA(2)=='=')) {
mBXOR_ASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='/') && (LA(2)=='*')) {
mComment(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='/') && (LA(2)=='/')) {
mCPPComment(true);
theRetToken=_returnToken;
}
else if ((_tokenSet_0.member(LA(1))) && (true)) {
mIDMEAT(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='=') && (true)) {
mASSIGN(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='<') && (true)) {
mLT(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='>') && (true)) {
mGT(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='/') && (true)) {
mDIV(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='+') && (true)) {
mPLUS(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='-') && (true)) {
mMINUS(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='*') && (true)) {
mSTAR(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='%') && (true)) {
mMOD(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='!') && (true)) {
mLNOT(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='&') && (true)) {
mBAND(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='|') && (true)) {
mBOR(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='^') && (true)) {
mBXOR(true);
theRetToken=_returnToken;
}
else {
if (LA(1)==EOF_CHAR) {uponEOF(); _returnToken = makeToken(Token.EOF_TYPE);}
else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
}
if ( _returnToken==null ) continue tryAgain; // found SKIP token
_ttype = _returnToken.getType();
_returnToken.setType(_ttype);
return _returnToken;
}
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?