⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 c.stg

📁 ANTLR(ANother Tool for Language Recognition)它是这样的一种工具
💻 STG
📖 第 1 页 / 共 5 页
字号:
#undef	    followPush#undef	    followPop#undef	    precover#undef	    preportError#undef	    LA#undef	    LT#undef	    exConstruct#undef	    inputConsume#undef	    markMyWords#undef	    rewindInput#undef	    rewindLast#undef	    perrorRecovery#undef	    hasFailed#undef	    failedFlag#undef	    precoverFromMismatchedSet#undef	    precoverFromMismatchedElement#define	    prsr		    ctx->pParser  #define	    rec			    prsr->rec#define	    haveAlreadyParsedRule(r) rec->alreadyParsedRule(rec, r)#define	    theInput()		    prsr->tstream#define	    strStream		    theInput()#define	    hasException()	    (rec->error == ANTLR3_TRUE)#define	    theException()	    rec->exception#define	    tmatch(t, fs)	    rec->match(rec, t, fs)#define	    tmatchAny()		    rec->matchAny(rec)#define	    followStk()		    rec->following#define	    followPush(x)	    followStk()->push(followStk(), ((void *)(x)), NULL)#define	    followPop()		    followStk()->pop(followStk())#define	    precover()		    rec->recover(rec)#define	    preportError()	    rec->reportError(rec)#define	    LA(n)		    theInput()->istream->LA(theInput()->istream, n)#define	    LT(n)		    theInput()->LT(theInput(), n)#define	    exConstruct()	    rec->exConstruct(rec)#define	    inputConsume()	    theInput()->istream->consume(theInput()->istream)#define	    markMyWords()	    theInput()->istream->mark(theInput()->istream)#define	    rewindInput(m)	    theInput()->istream->rewind(theInput()->istream, m)#define	    rewindLast()	    theInput()->istream->rewindLast(theInput()->istream)#define	    perrorRecovery()	    rec->errorRecovery#define	    _fsp		    rec->_fsp#define	    failedFlag		    rec->failed#define	    hasFailed()		    (failedFlag == ANTLR3_TRUE)#define	    backtracking	    rec->backtracking#define	    precoverFromMismatchedSet(s)	rec->recoverFromMismatchedSet(rec, s)#define	    precoverFromMismatchedElement(e)	rec->recoverFromMismatchedElement(rec, s)<endif><if(TREE_PARSER)>/* Macros for accessing things in the parser */ #undef	    prsr#undef	    rec		    #undef	    haveAlreadyParsedRule#undef	    theInput#undef	    strStream#undef	    hasException#undef	    theException#undef	    tmatch#undef	    tmatchAny#undef	    followStk#undef	    followPush#undef	    followPop#undef	    precover#undef	    preportError#undef	    LA#undef	    LT#undef	    exConstruct#undef	    inputConsume#undef	    markMyWords#undef	    rewindInput#undef	    rewindLast#undef	    perrorRecovery#undef	    hasFailed#undef	    failedFlag#undef	    precoverFromMismatchedSet#undef	    precoverFromMismatchedElement#undef	    backtracking#define	    prsr		    ctx->pTreeParser  #define	    rec			    prsr->rec#define	    haveAlreadyParsedRule(r) rec->alreadyParsedRule(rec, r)#define	    theInput()		    prsr->ctnstream#define	    strStream		    theInput()->tnstream#define	    hasException()	    (rec->error == ANTLR3_TRUE)#define	    theException()	    rec->exception#define	    tmatch(t, fs)	    rec->match(rec, t, fs)#define	    tmatchAny()		    rec->matchAny(rec)#define	    followStk()		    rec->following#define	    followPush(x)	    followStk()->push(followStk(), ((void *)(x)), NULL)#define	    followPop()		    followStk()->pop(followStk())#define	    precover()		    rec->recover(rec)#define	    preportError()	    rec->reportError(rec)#define	    LA(n)		    theInput()->tnstream->istream->LA(theInput()->tnstream->istream, n)#define	    LT(n)		    theInput()->tnstream->LT(theInput()->tnstream, n)#define	    exConstruct()	    rec->exConstruct(rec)#define	    inputConsume()	    theInput()->tnstream->istream->consume(theInput()->tnstream->istream)#define	    markMyWords()	    theInput()->tnstream->istream->mark(theInput()->tnstream->istream)#define	    rewindInput(m)	    theInput()->tnstream->istream->rewind(theInput()->tnstream->istream, m)#define	    rewindLast(m)	    theInput()->tnstream->istream->rewindLast(theInput()->tnstream->istream)#define	    perrorRecovery()	    rec->errorRecovery#define	    _fsp		    rec->_fsp#define	    failedFlag		    rec->failed#define	    hasFailed()		    (failedFlag == ANTLR3_TRUE)#define	    backtracking	    rec->backtracking#define	    precoverFromMismatchedSet(s)	rec->recoverFromMismatchedSet(rec, s)#define	    precoverFromMismatchedElement(e)	rec->recoverFromMismatchedElement(rec, s)<endif>#endif#ifndef	ANTLR3_TOKEN_STRUCT#define ANTLR3_TOKEN_STRUCT/** Work around because Token.UP and Token.DOWN are defined *  literally to dfaEdge() - consult with Ter on this one. */typedef struct ANTLR3_TOKEN_STRUCT_struct{    ANTLR3_INT32   INVALID;    ANTLR3_INT32   EOR;    ANTLR3_INT32   DOWN;    ANTLR3_INT32   UP;}    TOKEN_STRUCT;    #endif/* END - Note:Keep extra linefeed to satisfy UNIX systems */>>grammarType() ::= <<<if(PARSER)>parser<endif><if(LEXER)>lexer<endif><if(TREE_PARSER)>tree parser<endif>>>mainName() ::= <<<if(PARSER)><name><endif><if(LEXER)><name><endif><if(TREE_PARSER)><name><endif>>>headerReturnScope(ruleDescriptor) ::= "<returnScope(...)>"headerReturnType(ruleDescriptor) ::= "<returnType()>"// Produce the lexer output//lexer(  grammar,		name,        tokens,        scopes,        rules,         numRules,         labelType="pANTLR3_COMMON_TOKEN",        filterMode) ::= <<<if(filterMode)>/* Forward declare implementation function for ANTLR3_TOKEN_SOURCE interface. */static pANTLR3_COMMON_TOKEN <name>NextToken   (pANTLR3_LEXER          lexerContext);<endif>/* Forward declare the locally static matching functions we have generated. */<rules:{r | static <headerReturnType(ruleDescriptor=r.ruleDescriptor)>	m<r.ruleDescriptor.name>    (p<name> ctx<if(r.ruleDescriptor.parameterScope)>, <endif><r.ruleDescriptor.parameterScope:parameterScope(scope=it)>);}; separator="\n";>static void	<name>Free(p<name> ctx);<scopes:{<if(it.isDynamicGlobalScope)><globalAttributeScope(scope=it)><endif>}><actions.lexer.members>static void<name>Free  (p<name> ctx){<if(memoize)>    ruleMemo->free(ruleMemo);<endif>    lexr->free(lexr);        ANTLR3_FREE(ctx);}/** \brief Name of the gramar file that generated this code */static unsigned char fileName[] = "<fileName>";/** \brief Return the name of the grammar file that generated this code. */static unsigned char * getGrammarFileName(){	return fileName;}<if(filterMode)>    <filteringNextToken()><endif>/** \brief Create a new lexer called <name> * * \param[in] instream Pointer to an initialized input stream * * \return  *     - Success p<name> initialized for the lex start *     - Fail (p<name>)(ANTLR3_ERR_NOMEM) */ANTLR3_API p<name> <name>New         (pANTLR3_INPUT_STREAM     instream){    p<name> lexCtx; /* Context structure we will build and return   */    lexCtx = (p<name>) ANTLR3_MALLOC(sizeof(<name>));    if  (lexCtx == NULL)    {        /* Failed to allocate memory for lexer context */        return  (p<name>)ANTLR3_ERR_NOMEM;    }    /* -------------------------------------------------------------------     * Memory for basic structure is allocated, now to fill in     * in base ANTLR3 structures. We intialize the function pointers     * for the standard ANTLR3 lexer function set, but upon return     * from here, the programmer may set the pointers to provide custom     * implementations of each function.      *     * We don't use the macros defined in <name>.h here so you can get a sense     * of what goes where.     */        /* Create a base lexer, using the supplied input stream     */    lexCtx->pLexer	= antlr3LexerNewStream(ANTLR3_SIZE_HINT, instream);    /* Check that we allocated the memory correctly     */    if	(lexCtx->pLexer == (pANTLR3_LEXER)ANTLR3_ERR_NOMEM)    {	ANTLR3_FREE(lexCtx);	return  (p<name>)ANTLR3_ERR_NOMEM;    }<if(memoize)>    /* Create a LIST for recording rule memos.     */    lexCtx->pLexer->rec->ruleMemo    = antlr3ListNew(ANTLR3_SIZE_HINT);<endif>    /* Install the implementation of our <name> interface     */    <rules:{r | lexCtx->m<r.ruleDescriptor.name>	= m<r.ruleDescriptor.name>;}; separator="\n";>        /** When the nextToken() call is made to this lexer's pANTLR3_TOKEN_SOURCE     *  it will call mTokens() in this generated code, and will pass it the ctx     * pointer of this lexer, not the context of the base lexer, so store that now.     */    lexCtx->pLexer->ctx	    = lexCtx;        /** Install the token matching function     */    lexCtx->pLexer->mTokens = (void (*) (void *))(mTokens);        lexCtx->getGrammarFileName	= getGrammarFileName;    lexCtx->free		= <name>Free;    <if(filterMode)>    /* We have filter mode turned on, so install the filtering nextToken function     */    lexCtx->lexer->tokSource->nextToken = <name>NextToken;<endif>    /* Return the newly built lexer to the caller     */    return  lexCtx;}<if(cyclicDFAs)>/* ========================================================================= * DFA tables for the lexer */<cyclicDFAs:cyclicDFA()> <! dump tables for all DFA !>/* ========================================================================= * End of DFA tables for the lexer */<endif> /* ========================================================================= * Functions to match the lexer grammar defined tokens from the input stream */<rules; separator="\n\n">/* ========================================================================= * Lexer matching rules end. * ========================================================================= */<if(synpreds)>/* ========================================================================= * Lexer syntactic predicates */<synpreds:{p | <lexerSynpred(predname=p)>}>/* ========================================================================= * Lexer syntactic predicates end. * ========================================================================= */<endif>/* End of Lexer code * ================================================ * ================================================ */ >>/** An override of the lexer's nextToken() method that backtracks over mTokens() looking *  for matches.  No error can be generated upon error; just rewind, consume *  a token and then try again.  backtracking needs to be set as well. *  Make rule memoization happen only at levels above 1 as we start mTokens *  at backtracking==1. */filteringNextToken() ::= <<static ANTLR3_TOKEN <name>nextToken(p<name> lexCtx) {    while (true) {        if ( LA(1)==CharStream.EOF ) {            return ANTLR_EOF_TOKEN;        }    ltoken  = NULL;    tokenStartCharIndex = getCharIndex();	text = NULL;        try {            int m = input.mark();            backtracking=1; <! means we won't throw slow exception !>            failed=false;            mTokens();            backtracking=0;            <! mTokens backtracks with synpred at backtracking==2               and we set the synpredgate to allow actions at level 1. !>            if ( failed ) {                input.rewind(m);                input.consume(); <! advance one char and try again !>            }            else {                return token;            }        }        catch (RecognitionException re) {            // shouldn't happen in backtracking mode, but...            reportError(re);            recover(re);        }    }}public void memoize(IntStream input,		int ruleIndex,		int ruleStartIndex){if ( backtracking>1 ) super.memoize(input, ruleIndex, ruleStartIndex);}public boolean alreadyParsedRule(IntStream input, int ruleIndex) {if ( backtracking>1 ) return super.alreadyParsedRule(input, ruleIndex);return false;}>>filteringActionGate() ::= "backtracking==1"/** How to generate a parser */genericParser(  grammar,				name,                 scopes,                 tokens,                 tokenNames,                 rules,                 numRules,                bitsets,                inputStreamType,                superClass,                ASTLabelType="pANTLR3_BASE_TREE",                labelType,				members              ) ::= <</** \brief Table of all token names in symbolic order, mainly used for *         error reporting. */static pANTLR3_UINT8   <name>TokenNames[]     = {        (pANTLR3_UINT8) "\<invalid>",       /* String to print to indicate an invalid token */        (pANTLR3_UINT8) "\<EOR>",        (pANTLR3_UINT8) "\<DOWN>",         (pANTLR3_UINT8) "\<UP>",         <tokenNames:{(pANTLR3_UINT8) <it>}; separator=",\n">       };/* Work around invalid dfaEdge() attribute passing * in tree parser */ static TOKEN_STRUCT Token = {ANTLR3_TOKEN_INVALID, ANTLR3_EOR_TOKEN_TYPE, ANTLR3_TOKEN_DOWN, ANTLR3_TOKEN_UP };    <@members>    <@end>/* Forward declare the locally static matching functions we have generated. */<rules:{r | static <headerReturnType(ruleDescriptor=r.ruleDescriptor)>	<r.ruleDescriptor.name>    (p<name> ctx<if(r.ruleDescriptor.parameterScope)>, <endif><r.ruleDescriptor.parameterScope:parameterScope(scope=it)>);}; separator="\n";>static void	<name>Free(p<name> ctx);/* Function to initlize bitsets */static	void <name>LoadFollowSets();/* Function to destroy bitsets */static	void <name>FreeFollowSets();/* For use in tree output where we are accumulating rule labels via label += ruleRef * we need a function that knows how to free a return scope when the list is destroyed.  * We cannot just use ANTLR3_FREE because in debug tracking mode, this is a macro. */static	void freeScope(void * scope){    ANTLR3_FREE(scope);}/** \brief Name of the gramar file that generated this code */static unsigned char fileName[] = "<fileName>";/** \brief Return the name of the grammar file that generated this code. */static unsigned char * getGrammarFileName(){

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -