📄 ruby.stg
字号:
/* [The "BSD license"] Copyright (c) 2006 Martin Traverso All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/group Ruby implements ANTLRCore;/** The overall file structure of a recognizer; stores methods for rules * and cyclic DFAs plus support code. */outputFile(LEXER,PARSER,TREE_PARSER, actionScope, actions, docComment, recognizer, name, tokens, tokenNames, rules, cyclicDFAs, bitsets, buildTemplate, buildAST, rewrite, profile, backtracking, synpreds, memoize, numRules, fileName, ANTLRVersion, generatedTimestamp, trace, scopes, superClass, literals) ::=<<# <name> (<fileName>)# Generated by ANTLR <ANTLRVersion> on <generatedTimestamp><docComment><recognizer>>>/** * Inherits parameters from outputFile(...) * * labelType is not used for Ruby (no explicit type declarations) */lexer(grammar, name, tokens, scopes, rules, numRules, labelType, filterMode) ::=<<class <name> require 'stringio' <tokens:{<it.name>=<it.type>}; separator="\n"> def initialize(input) input = StringIO.new(input) if input.respond_to?(:to_str) @input = CharStream.new(input) @backtracking = 0 @failed = false <actions.lexer.init> end def next_token @token = nil if <LA(1)> == :EOF <if(trace)> puts "Token => EOF" <endif> return :EOF end match_Tokens() <if(trace)> puts "Token => #{@token.token_type} ('#{@token.text}')" <endif> return @token end class Token attr_reader :token_type attr_reader :int_type attr_reader :line attr_reader :pos attr_reader :text attr_reader :channel def initialize(token_type, int_type, line, pos, text, channel = nil) @token_type = token_type @int_type = int_type @line = line @pos = pos @text = text @channel = channel end alias :to_i :int_type end <actions.lexer.members> private class CharStream attr_reader :line attr_reader :column attr_reader :index def initialize(input) @buffer = "" @input = input @line = 1 @column = 0 @index = 0; end # returns a Fixnum between 0 and 0xFFFF or :EOF def look_ahead(pos) offset = @index + pos - 1 if @buffer.length \< offset + 1 char = @input.read(offset + 1 - @buffer.length) @buffer \<\< char if not char.nil? end if offset \< @buffer.length @buffer[offset] else :EOF end end def mark @state = { :index => @index, :line => @line, :column => @column } return 0 end def rewind(marker) @index = @state[:index] @line = @state[:line] @column = @state[:column] end def consume look_ahead(1) # force a read from the input if necessary @column = @column + 1 if @buffer[@index] == ?\n @line = @line + 1 @column = 0 end @index = @index + 1 end def substring(start, stop) @buffer.slice(start, stop - start + 1) end end def match(value = nil) @failed = false case when value.nil? @input.consume() when value.respond_to?(:to_str) catch (:done) do value.each_byte do |c| @failed ||= !(<isolatedLookaheadTest(atom="c", k=1)>) @input.consume() if !@failed throw :done if @failed end end else @failed = !(<isolatedLookaheadTest(atom="value", k=1)>) @input.consume() if !@failed end if @failed && @backtracking \<= 0 raise "Expected #{value.respond_to?(:chr) ? value.chr : value}" end end def match_range(from, to) char = <LA(1)> if char \>= from || char \<= to @failed = false match() elsif @backtracking > 0 @failed = true else raise "Expected [#{from.chr}..#{to.chr}]" end end <rules; separator="\n\n"> <synpreds: synpred(); separator="\n\n"> <dfaClass()> <cyclicDFAs: cyclicDFA()>end>>parser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, ASTLabelType, superClass, labelType, members) ::=<<require '<grammar.name>Lexer'class <name> attr_reader :lexer TOKENS = [ <tokenNames: {[<it>, <i>]}; separator=",\n"> ].inject({}) { |hash, pair| name = pair[0] index = pair[1] + 3 # hardcoded for now... no way to get this value from ANTLR if name[0] == ?' hash[:"T#{index}"] = index else hash[:"#{name}"] = index end hash } TOKENS[:EOF] = -1 def initialize(input) if input.respond_to?(:to_str) || input.respond_to?(:read) input = <grammar.name>Lexer.new(input) end @lexer = input @input = TokenStream.new(input) @backtracking = 0 @failed = false <actions.parser.init> <if(trace)> @indent = 0 <endif> end <rules; separator="\n\n"> <actions.parser.members> private class TokenStream attr_reader :index def initialize(input) @buffer = [] @input = input @channel = nil @index = 0; end # returns a Token def look_ahead(pos) offset = @index + pos - 1 while @buffer[-1] != :EOF && @buffer.length \< offset + 1 token = @input.next_token if token == :EOF || token.channel == @channel @buffer \<\< token end end offset = -1 if offset >= @buffer.length if offset \< @buffer.length @buffer[offset] end end def mark @state = { :index => @index } return 0 end def rewind(marker) @index = @state[:index] end def consume look_ahead(1) # force a read from the input if necessary @index = @index + 1 end end def match(token = nil) if token.nil? || <LA(1)> == token @input.consume @failed = false return elsif @backtracking > 0 @failed = true else raise "Expected #{token}" end end def look_ahead(k) token = @input.look_ahead(k) if token != :EOF token = token.token_type end token end <synpreds: synpred(); separator="\n\n"> <dfaClass()> <cyclicDFAs: cyclicDFA()>end>>/** How to generate a tree parser; same as parser except the input * stream is a different type. */treeParser(grammar, name, scopes, tokens, tokenNames, globalAction, rules, numRules, bitsets, labelType, ASTLabelType, superClass, members) ::=<< raise "treeParser not implemented">>/** A simpler version of a rule template that is specific to the imaginary * rules created for syntactic predicates. As they never have return values * nor parameters etc..., just give simplest possible method. Don't do * any of the normal memoization stuff in here either; it's a waste. * As predicates cannot be inlined into the invoking rule, they need to * be in a rule by themselves. */synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::=<<# <description>def <ruleName>_fragment <block>end>>/** How to generate code for a rule. This includes any return type * data aggregates required for multiple return values. */rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::=<<# <description>def <ruleName>(<ruleDescriptor.parameterScope:parameterScope(scope=it)>) <if(trace)> puts " " * @indent + "<ruleName>" @indent += 1 <endif> <ruleDescriptor.actions.init> <if(!ruleDescriptor.isSynPred)> <if(ruleDescriptor.hasReturnValue)> <if(ruleDescriptor.hasSingleReturnValue)> <ruleDescriptor.singleValueReturnName> = nil <endif> <endif> <endif> <block> <if(trace)> @indent -= 1 <endif> <if(!ruleDescriptor.isSynPred)> <if(ruleDescriptor.hasReturnValue)> <if(ruleDescriptor.hasSingleReturnValue)> <ruleDescriptor.singleValueReturnName> <endif> <endif> <endif>end>>/** How to generate a rule in the lexer; naked blocks are used for * fragment rules. */lexerRule(ruleName,nakedBlock,ruleDescriptor,block,memoize) ::=<<def match_<ruleName>(<ruleDescriptor.parameterScope:parameterScope(scope=it)>) <ruleDescriptor.actions.init> <if(nakedBlock)> <block><\n> <else> start = @input.index line = @input.line column = @input.column channel = nil <block> if @token.nil? text = @input.substring(start, @input.index - 1) @token = Token.new(:<ruleName>, <ruleName>, line, column, text, channel) end<\n> <endif>end>>/** How to generate code for the implicitly-defined lexer grammar rule * that chooses between lexer rules. */tokensRule(ruleName,nakedBlock,args,block,ruleDescriptor) ::=<<<lexerRule(...)>>>filteringNextToken() ::=<< raise "filteringNextToken not implemented">>filteringActionGate() ::=
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -