tc_ruby.rb

来自「用ruby on rails写的一个博客程序,还不错..ruby on rail」· RB 代码 · 共 872 行 · 第 1/2 页

RB
872
字号
    assert_next_token :string, '', :region_open    assert_next_token :string, 'foo '    assert_next_token :expr, '#{x}'    assert_next_token :string, ' bar'    assert_next_token :escape, '\"\n\t'    assert_next_token :string, 'baz'    assert_next_token :escape, '\xA5'    assert_next_token :string, 'b'    assert_next_token :escape, '\5\123'    assert_next_token :string, '4'    assert_next_token :string, '', :region_close    assert_next_token :punct, '"'  end  def test_squoted_string    tokenize '\'foo #{x} bar\\\'\n\tbaz\\\\\xA5b\5\1234\''    assert_next_token :punct, "'"    assert_next_token :string, "", :region_open    assert_next_token :string, 'foo #{x} bar'    assert_next_token :escape, '\\\''    assert_next_token :string, '\n\tbaz'    assert_next_token :escape, '\\\\'    assert_next_token :string, '\xA5b\5\1234'    assert_next_token :string, "", :region_close    assert_next_token :punct, "'"  end  def test_dot_selector    tokenize 'foo.nil'    skip_token    assert_next_token :punct, "."    assert_next_token :ident, "nil"  end  def test_dot_range_inclusive    tokenize 'foo..nil'    skip_token    assert_next_token :punct, ".."    assert_next_token :constant, "nil"  end  def test_dot_range_exclusive    tokenize 'foo...nil'    skip_token    assert_next_token :punct, "..."    assert_next_token :constant, "nil"  end  def test_dot_range_many    tokenize 'foo.....nil'    skip_token    assert_next_token :punct, "....."    assert_next_token :constant, "nil"  end  def test_attribute    tokenize '@var_foo'    assert_next_token :attribute, "@var_foo"  end  def test_global    tokenize '$var_foo'    assert_next_token :global, "$var_foo"    tokenize '$12'    assert_next_token :global, "$12"    tokenize '$/f'    assert_next_token :global, "$/"    tokenize "$\n"    assert_next_token :global, "$"  end  def test_paren_delimiter    tokenize '%w(a)'    assert_next_token :punct, "%w("    assert_next_token :string, "", :region_open    assert_next_token :string, "a"    assert_next_token :string, "", :region_close    assert_next_token :punct, ")"  end  def test_division    tokenize 'm / 3'    assert_next_token :ident, "m"    assert_next_token :normal, " "    assert_next_token :punct, "/"    assert_next_token :normal, " "    assert_next_token :number, "3"  end  def test_regex    tokenize 'm =~ /3/'    assert_next_token :ident, "m"    assert_next_token :normal, " "    assert_next_token :punct, "=~"    assert_next_token :normal, " "    assert_next_token :punct, "/"    assert_next_token :regex, "", :region_open    assert_next_token :regex, "3"    assert_next_token :regex, "", :region_close    assert_next_token :punct, "/"  end  def test_heredoc_with_trailing_text    tokenize "foo('here', <<EOF)\n  A heredoc.\nEOF\nfoo"    assert_next_token :ident,  "foo"    assert_next_token :punct,  "('"    assert_next_token :string, '', :region_open    assert_next_token :string, 'here'    assert_next_token :string, '', :region_close    assert_next_token :punct,  "',"    assert_next_token :normal, ' '    assert_next_token :punct,  '<<'    assert_next_token :constant, "EOF"    assert_next_token :punct,  ')'    assert_next_token :string, "", :region_open    assert_next_token :string, "\n  A heredoc.\n"    assert_next_token :string, "", :region_close    assert_next_token :constant, "EOF"    assert_next_token :normal, "\n"    assert_next_token :ident,  "foo"  end  def test_multiple_heredocs    tokenize <<'TEST'foo('here', <<EOF, 'there', <<-'FOO', 'blah')First heredoc, right here.Expressions are #{allowed}EOF    Another heredoc, immediately after the first.    Expressions are not #{allowed}  FOOTEST    assert_next_token :ident,  "foo"    assert_next_token :punct,  "('"    assert_next_token :string, '', :region_open    assert_next_token :string, 'here'    assert_next_token :string, '', :region_close    assert_next_token :punct,  "',"    assert_next_token :normal, ' '    assert_next_token :punct,  '<<'    assert_next_token :constant, "EOF"    assert_next_token :punct,  ','    assert_next_token :normal, ' '    assert_next_token :punct,  "'"    assert_next_token :string, '', :region_open    assert_next_token :string, 'there'    assert_next_token :string, '', :region_close    assert_next_token :punct,  "',"    assert_next_token :normal, ' '    assert_next_token :punct,  "<<-'"    assert_next_token :constant, "FOO"    assert_next_token :punct,  "',"    assert_next_token :normal, ' '    assert_next_token :punct,  "'"    assert_next_token :string, '', :region_open    assert_next_token :string, 'blah'    assert_next_token :string, '', :region_close    assert_next_token :punct,  "')"    assert_next_token :string, "", :region_open    assert_next_token :string, "\nFirst heredoc, right here.\nExpressions are "    assert_next_token :expr, '#{allowed}'    assert_next_token :string, "\n"    assert_next_token :string, "", :region_close    assert_next_token :constant, "EOF"    assert_next_token :string, "", :region_open    assert_next_token :string, "\n    Another heredoc, immediately after the first.\n    Expressions are not \#{allowed}\n"    assert_next_token :string, "", :region_close    assert_next_token :constant, "  FOO"  end  def test_carldr_bad_heredoc_001    tokenize <<'TEST'str = <<ENDhere document #{1 + 1}ENDif strTEST    assert_next_token :ident, "str"    assert_next_token :normal, " "    assert_next_token :punct, "="    assert_next_token :normal, " "    assert_next_token :punct, "<<"    assert_next_token :constant, "END"    assert_next_token :string, "", :region_open    assert_next_token :string, "\nhere document "    assert_next_token :expr, '#{1 + 1}'    assert_next_token :string, "\n"    assert_next_token :string, "", :region_close    assert_next_token :constant, "END"    assert_next_token :normal, "\n\n"    assert_next_token :keyword, "if"    assert_next_token :normal, " "    assert_next_token :ident, "str"  end  def test_regex_after_keyword    tokenize "when /[0-7]/\nfoo"    assert_next_token :keyword, "when"    assert_next_token :normal, " "    assert_next_token :punct, "/"    assert_next_token :regex, "", :region_open    assert_next_token :regex, "[0-7]"    assert_next_token :regex, "", :region_close    assert_next_token :punct, "/"    assert_next_token :normal, "\n"    assert_next_token :ident, "foo"  end  def test_heredoc_after_lparen    tokenize "foo(<<SRC, obj)\nblah blah\nSRC\nfoo"    assert_next_token :ident, "foo"    assert_next_token :punct, "(<<"    assert_next_token :constant, "SRC"    assert_next_token :punct, ","    assert_next_token :normal, " "    assert_next_token :ident, "obj"    assert_next_token :punct, ")"    assert_next_token :string, "", :region_open    assert_next_token :string, "\nblah blah\n"    assert_next_token :string, "", :region_close    assert_next_token :constant, "SRC"    assert_next_token :normal, "\n"    assert_next_token :ident, "foo"  end  def test_division_after_parens    tokenize "(a+b)/2"    assert_next_token :punct, "("    assert_next_token :ident, "a"    assert_next_token :punct, "+"    assert_next_token :ident, "b"    assert_next_token :punct, ")/"    assert_next_token :number, "2"  end  def test_heredoc_with_CRNL    tokenize "foo <<SRC\r\nSome text\r\nSRC\r\nfoo"    assert_next_token :ident, "foo"    assert_next_token :normal, " "    assert_next_token :punct, "<<"    assert_next_token :constant, "SRC"    assert_next_token :string, "", :region_open    assert_next_token :string, "\r\nSome text\r\n"    assert_next_token :string, "", :region_close    assert_next_token :constant, "SRC"    assert_next_token :normal, "\r\n"    assert_next_token :ident, "foo"  end  def test_question_mark_at_newline    tokenize "foo ?\n 'bar': 'baz'"    assert_next_token :ident, "foo"    assert_next_token :normal, " "    assert_next_token :punct, "?"    assert_next_token :normal, "\n "    assert_next_token :punct, "'"    assert_next_token :string, "", :region_open    assert_next_token :string, "bar"    assert_next_token :string, "", :region_close    assert_next_token :punct, "':"    assert_next_token :normal, " "    assert_next_token :punct, "'"    assert_next_token :string, "", :region_open    assert_next_token :string, "baz"    assert_next_token :string, "", :region_close    assert_next_token :punct, "'"  end  def test_question_mark_and_escaped_newline    tokenize "foo ?\\\n 'bar': 'baz'"    assert_next_token :ident, "foo"    assert_next_token :normal, " "    assert_next_token :punct, "?\\"    assert_next_token :normal, "\n "    assert_next_token :punct, "'"    assert_next_token :string, "", :region_open    assert_next_token :string, "bar"    assert_next_token :string, "", :region_close    assert_next_token :punct, "':"    assert_next_token :normal, " "    assert_next_token :punct, "'"    assert_next_token :string, "", :region_open    assert_next_token :string, "baz"    assert_next_token :string, "", :region_close    assert_next_token :punct, "'"  end  def test_highlighted_subexpression    tokenizer.set :expressions => :highlight    tokenize '"la la #{["hello", "world"].each { |f| puts "string #{f}" }}"'    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :string, "la la "    assert_next_token :expr, "", :region_open    assert_next_token :expr, '#{'    assert_next_token :punct, '["'    assert_next_token :string, "", :region_open    assert_next_token :string, 'hello'    assert_next_token :string, "", :region_close    assert_next_token :punct, '",'    assert_next_token :normal, ' '    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :string, "world"    assert_next_token :string, "", :region_close    assert_next_token :punct, '"].'    assert_next_token :ident, 'each'    assert_next_token :normal, ' '    assert_next_token :punct, '{'    assert_next_token :normal, ' '    assert_next_token :punct, '|'    assert_next_token :ident, 'f'    assert_next_token :punct, '|'    assert_next_token :normal, ' '    assert_next_token :ident, 'puts'    assert_next_token :normal, ' '    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :string, "string "    assert_next_token :expr, "", :region_open    assert_next_token :expr, '#{'    assert_next_token :ident, 'f'    assert_next_token :expr, '}'    assert_next_token :expr, "", :region_close    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'    assert_next_token :normal, ' '    assert_next_token :punct, '}'    assert_next_token :expr, '}'    assert_next_token :expr, "", :region_close    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'  end  def test_expr_in_braces    tokenize '"#{f}"'    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :expr, '#{f}'    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'  end  def test_expr_in_braces_with_nested_braces    tokenize '"#{loop{break}}"'    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :expr, '#{loop{break}}'    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'  end  def test_expr_with_global_var    tokenize '"#$f"'    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :expr, '#$f'    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'  end  def test_expr_with_instance_var    tokenize '"#@f"'    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :expr, '#@f'    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'  end  def test_expr_with_class_var    tokenize '"#@@f"'    assert_next_token :punct, '"'    assert_next_token :string, "", :region_open    assert_next_token :expr, '#@@f'    assert_next_token :string, "", :region_close    assert_next_token :punct, '"'  end  def test_qmark_space    tokenize "? "    assert_next_token :punct, "?"    assert_next_token :normal, " "  end  def test_capitalized_method    tokenize "obj.Foo"    skip_token 2    assert_next_token :ident, "Foo"  end  def test_hexadecimal_literal    tokenize "0xDEADbeef 0X1234567890ABCDEFG"    assert_next_token :number, "0xDEADbeef"    skip_token    assert_next_token :number, "0X1234567890ABCDEF"    assert_next_token :constant, "G"  end  def test_binary_literal    tokenize "0b2 0b0 0b101 0B123"    assert_next_token :number, "0"    assert_next_token :ident, "b2"    skip_token    assert_next_token :number, "0b0"    skip_token    assert_next_token :number, "0b101"    skip_token    assert_next_token :number, "0B123"  end  def test_octal_literal    tokenize "0o9 0o12345670abc 0O12345678"    assert_next_token :number, "0"    assert_next_token :ident, "o9"    skip_token    assert_next_token :number, "0o12345670"    assert_next_token :ident, "abc"    skip_token    assert_next_token :number, "0O12345678"  end  def test_decimal_literal    tokenize "0dA 0d1234567890abc 0D1234567890"    assert_next_token :number, "0"    assert_next_token :ident, "dA"    skip_token    assert_next_token :number, "0d1234567890"    assert_next_token :ident, "abc"    skip_token    assert_next_token :number, "0D1234567890"  endend

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?