tc_ruby.rb
来自「用ruby on rails写的一个博客程序,还不错..ruby on rail」· RB 代码 · 共 872 行 · 第 1/2 页
RB
872 行
require File.dirname(__FILE__) + "/tokenizer_testcase"class TC_Syntax_Ruby < TokenizerTestCase syntax "ruby" def test_empty tokenize "" assert_no_next_token end def test_constant tokenize "Foo" assert_next_token :constant, "Foo" end def test_ident tokenize "foo" assert_next_token :ident, "foo" end def test_comment_eol tokenize "# a comment\nfoo" assert_next_token :comment, "# a comment" assert_next_token :normal, "\n" assert_next_token :ident, "foo" end def test_comment_block tokenize "=begin\nthis is a comment\n=end\nnoncomment" assert_next_token :comment, "=begin\nthis is a comment\n=end" assert_next_token :normal, "\n" assert_next_token :ident, "noncomment" end def test_comment_block_with_CRNL tokenize "=begin\r\nthis is a comment\r\n=end\r\nnoncomment" assert_next_token :comment, "=begin\r\nthis is a comment\r\n=end" assert_next_token :normal, "\r\n" assert_next_token :ident, "noncomment" end def test_keyword Syntax::Ruby::KEYWORDS.each do |word| tokenize word assert_next_token :keyword, word end Syntax::Ruby::KEYWORDS.each do |word| tokenize "foo.#{word}" skip_token 2 assert_next_token :ident, word end end def test__END__ tokenize "__END__\n\nblah blah blah" assert_next_token :comment, "__END__\n\nblah blah blah" end def test__END__with_CRNL tokenize "__END__\r\nblah blah blah" assert_next_token :comment, "__END__\r\nblah blah blah" end def test_def_paren tokenize "def foo(bar)" assert_next_token :keyword, "def " assert_next_token :method, "foo" assert_next_token :punct, "(" assert_next_token :ident, "bar" assert_next_token :punct, ")" end def test_def_space tokenize "def foo bar" assert_next_token :keyword, "def " assert_next_token :method, "foo" assert_next_token :normal, " " assert_next_token :ident, "bar" end def test_def_semicolon tokenize "def foo;" assert_next_token :keyword, "def " assert_next_token :method, "foo" assert_next_token :punct, ";" end def test_def_eol tokenize "def foo" assert_next_token :keyword, "def " assert_next_token :method, "foo" end def test_class_space tokenize "class Foo\n" assert_next_token :keyword, "class " assert_next_token :class, "Foo" assert_next_token :normal, "\n" end def test_class_semicolon tokenize "class Foo;" assert_next_token :keyword, "class " assert_next_token :class, "Foo" assert_next_token :punct, ";" end def test_class_extend tokenize "class Foo< Bang" assert_next_token :keyword, "class " assert_next_token :class, "Foo" assert_next_token :punct, "<" assert_next_token :normal, " " assert_next_token :constant, "Bang" end def test_module_space tokenize "module Foo\n" assert_next_token :keyword, "module " assert_next_token :module, "Foo" assert_next_token :normal, "\n" end def test_module_semicolon tokenize "module Foo;" assert_next_token :keyword, "module " assert_next_token :module, "Foo" assert_next_token :punct, ";" end def test_module_other tokenize "module Foo!\n" assert_next_token :keyword, "module " assert_next_token :module, "Foo!" end def test_scope_operator tokenize "Foo::Bar" assert_next_token :constant, "Foo" assert_next_token :punct, "::" assert_next_token :constant, "Bar" end def test_symbol_dquote tokenize ':"foo"' assert_next_token :symbol, ':"' assert_next_token :symbol, '', :region_open assert_next_token :symbol, 'foo' assert_next_token :symbol, '', :region_close assert_next_token :symbol, '"' assert_no_next_token end def test_symbol_squote tokenize ":'foo'" assert_next_token :symbol, ":'" assert_next_token :symbol, "", :region_open assert_next_token :symbol, "foo" assert_next_token :symbol, "", :region_close assert_next_token :symbol, "'" assert_no_next_token end def test_symbol tokenize ":foo_123" assert_next_token :symbol, ":foo_123" tokenize ":123" assert_next_token :punct, ":" assert_next_token :number, "123" tokenize ":foo=" assert_next_token :symbol, ":foo=" tokenize ":foo!" assert_next_token :symbol, ":foo!" tokenize ":foo?" assert_next_token :symbol, ":foo?" end def test_char tokenize "?." assert_next_token :char, "?." tokenize '?\n' assert_next_token :char, '?\n' end def test_specials %w{__FILE__ __LINE__ true false nil self}.each do |word| tokenize word assert_next_token :constant, word end %w{__FILE__ __LINE__ true false nil self}.each do |word| tokenize "#{word}?" assert_next_token :ident, "#{word}?" end %w{__FILE__ __LINE__ true false nil self}.each do |word| tokenize "#{word}!" assert_next_token :ident, "#{word}!" end %w{__FILE__ __LINE__ true false nil self}.each do |word| tokenize "x.#{word}" skip_token 2 assert_next_token :ident, word end end def test_pct_r tokenize '%r{foo#{x}bar}' assert_next_token :punct, "%r{" assert_next_token :regex, "", :region_open assert_next_token :regex, "foo" assert_next_token :expr, '#{x}' assert_next_token :regex, "bar" assert_next_token :regex, "", :region_close assert_next_token :punct, "}" tokenize '%r-foo#{x}bar-' assert_next_token :punct, "%r-" assert_next_token :regex, "", :region_open assert_next_token :regex, "foo" assert_next_token :expr, '#{x}' assert_next_token :regex, "bar" assert_next_token :regex, "", :region_close assert_next_token :punct, "-" end def test_pct_r_with_wakas tokenize '%r<foo#{x}bar> foo' assert_next_token :punct, "%r<" assert_next_token :regex, "", :region_open assert_next_token :regex, "foo" assert_next_token :expr, '#{x}' assert_next_token :regex, "bar" assert_next_token :regex, "", :region_close assert_next_token :punct, ">" assert_next_token :normal, " " assert_next_token :ident, "foo" end def test_pct_w_brace tokenize '%w{foo bar baz}' assert_next_token :punct, "%w{" assert_next_token :string, '', :region_open assert_next_token :string, 'foo bar baz' assert_next_token :string, '', :region_close assert_next_token :punct, "}" end def test_pct_w tokenize '%w-foo#{x} bar baz-' assert_next_token :punct, "%w-" assert_next_token :string, '', :region_open assert_next_token :string, 'foo#{x} bar baz' assert_next_token :string, '', :region_close assert_next_token :punct, "-" end def test_pct_q tokenize '%q-hello #{world}-' assert_next_token :punct, "%q-" assert_next_token :string, '', :region_open assert_next_token :string, 'hello #{world}' assert_next_token :string, '', :region_close assert_next_token :punct, "-" end def test_pct_s tokenize '%s-hello #{world}-' assert_next_token :punct, "%s-" assert_next_token :symbol, '', :region_open assert_next_token :symbol, 'hello #{world}' assert_next_token :symbol, '', :region_close assert_next_token :punct, "-" end def test_pct_W tokenize '%W-foo#{x} bar baz-' assert_next_token :punct, "%W-" assert_next_token :string, '', :region_open assert_next_token :string, 'foo' assert_next_token :expr, '#{x}' assert_next_token :string, ' bar baz' assert_next_token :string, '', :region_close assert_next_token :punct, "-" end def test_pct_Q tokenize '%Q-hello #{world}-' assert_next_token :punct, "%Q-" assert_next_token :string, '', :region_open assert_next_token :string, 'hello ' assert_next_token :expr, '#{world}' assert_next_token :string, '', :region_close assert_next_token :punct, "-" end def test_pct_x tokenize '%x-ls /blah/#{foo}-' assert_next_token :punct, "%x-" assert_next_token :string, '', :region_open assert_next_token :string, 'ls /blah/' assert_next_token :expr, '#{foo}' assert_next_token :string, '', :region_close assert_next_token :punct, "-" end def test_pct_string tokenize '%-hello #{world}-' assert_next_token :punct, "%-" assert_next_token :string, '', :region_open assert_next_token :string, 'hello ' assert_next_token :expr, '#{world}' assert_next_token :string, '', :region_close assert_next_token :punct, "-" end def test_bad_pct_string tokenize '%0hello #{world}0' assert_next_token :punct, "%" assert_next_token :number, '0' assert_next_token :ident, 'hello' assert_next_token :normal, ' ' assert_next_token :comment, '#{world}0' end def test_shift_left tokenize 'foo << 5' assert_next_token :ident, "foo" assert_next_token :normal, " " assert_next_token :punct, "<<" assert_next_token :normal, " " assert_next_token :number, "5" end def test_shift_left_no_white tokenize 'foo<<5' assert_next_token :ident, "foo" assert_next_token :punct, "<<" assert_next_token :number, "5" end def test_here_doc_no_opts tokenize "foo <<EOF\n foo\n bar\n baz\nEOF" assert_next_token :ident, "foo" assert_next_token :normal, " " assert_next_token :punct, "<<" assert_next_token :constant, "EOF" assert_next_token :string, "", :region_open assert_next_token :string, "\n foo\n bar\n baz\n" assert_next_token :string, "", :region_close assert_next_token :constant, "EOF" end def test_here_doc_no_opts_missing_end tokenize "foo <<EOF\n foo\n bar\n baz\n EOF" assert_next_token :ident, "foo" assert_next_token :normal, " " assert_next_token :punct, "<<" assert_next_token :constant, "EOF" assert_next_token :string, "", :region_open assert_next_token :string, "\n foo\n bar\n baz\n EOF" assert_no_next_token end def test_here_doc_float_right tokenize "foo <<-EOF\n foo\n bar\n baz\n EOF" assert_next_token :ident, "foo" assert_next_token :normal, " " assert_next_token :punct, "<<-" assert_next_token :constant, "EOF" assert_next_token :string, "", :region_open assert_next_token :string, "\n foo\n bar\n baz\n" assert_next_token :string, "", :region_close assert_next_token :constant, " EOF" end def test_here_doc_single_quotes tokenize "foo <<'EOF'\n foo\#{x}\n bar\n baz\nEOF" assert_next_token :ident, "foo" assert_next_token :normal, " " assert_next_token :punct, "<<'" assert_next_token :constant, "EOF" assert_next_token :punct, "'" assert_next_token :string, "", :region_open assert_next_token :string, "\n foo\#{x}\n bar\n baz\n" assert_next_token :string, "", :region_close assert_next_token :constant, "EOF" end def test_here_doc_double_quotes tokenize "foo <<\"EOF\"\n foo\#{x}\n bar\n baz\nEOF" assert_next_token :ident, "foo" assert_next_token :normal, " " assert_next_token :punct, "<<\"" assert_next_token :constant, "EOF" assert_next_token :punct, "\"" assert_next_token :string, "", :region_open assert_next_token :string, "\n foo" assert_next_token :expr, '#{x}' assert_next_token :string, "\n bar\n baz\n" assert_next_token :string, "", :region_close assert_next_token :constant, "EOF" end def test_space tokenize "\n \t\t\n\n\r\n" assert_next_token :normal, "\n \t\t\n\n\r\n" end def test_number tokenize "1 1.0 1e5 1.0e5 1_2.5 1_2.5_2 1_2.5_2e3_2" assert_next_token :number, "1" skip_token assert_next_token :number, "1.0" skip_token assert_next_token :number, "1e5" skip_token assert_next_token :number, "1.0e5" skip_token assert_next_token :number, "1_2.5" skip_token assert_next_token :number, "1_2.5_2" skip_token assert_next_token :number, "1_2.5_2e3_2" end def test_dquoted_string tokenize '"foo #{x} bar\"\n\tbaz\xA5b\5\1234"' assert_next_token :punct, '"'
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?