⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 goclparser.java

📁 UML设计测试工具
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
			case LESS:			case GREATER:			case LESS_EQUAL:			case GREATER_EQUAL:			case PLUS:			case MINUS:			case STAR:			case SLASH:			case LITERAL_div:			case ARROW:			case DOT:			case BAR:			case LITERAL_then:			case LITERAL_else:			case LITERAL_endif:			case RBRACE:			case DOTDOT:			{				break;			}			default:			{				throw new NoViableAltException(LT(1), getFilename());			}			}			}		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_16);		}		return n;	}		public final ASTTypeArgExpression  typeExpression(		ASTExpression source, boolean followsArrow	) throws RecognitionException, TokenStreamException {		ASTTypeArgExpression n;				ASTType t = null; n = null;				try {      // for error handling			MyToken opToken = (MyToken) LT(1);			{			switch ( LA(1)) {			case LITERAL_oclAsType:			{				match(LITERAL_oclAsType);				break;			}			case LITERAL_oclIsKindOf:			{				match(LITERAL_oclIsKindOf);				break;			}			case LITERAL_oclIsTypeOf:			{				match(LITERAL_oclIsTypeOf);				break;			}			default:			{				throw new NoViableAltException(LT(1), getFilename());			}			}			}			match(LPAREN);			t=type();			match(RPAREN);			n = new ASTTypeArgExpression(opToken, source, t, followsArrow);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_16);		}		return n;	}		public final ASTElemVarsDeclaration  elemVarsDeclaration() throws RecognitionException, TokenStreamException {		ASTElemVarsDeclaration n;				List idList; ASTType t = null; n = null;				try {      // for error handling			idList=idList();			{			switch ( LA(1)) {			case COLON:			{				match(COLON);				t=type();				break;			}			case BAR:			case SEMI:			{				break;			}			default:			{				throw new NoViableAltException(LT(1), getFilename());			}			}			}			n = new ASTElemVarsDeclaration(idList, t);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_23);		}		return n;	}		public final ASTVariableInitialization  variableInitialization() throws RecognitionException, TokenStreamException {		ASTVariableInitialization n;				Token  name = null;		ASTType t; ASTExpression e; n = null;				try {      // for error handling			name = LT(1);			match(IDENT);			match(COLON);			t=type();			match(EQUAL);			e=expression();			n = new ASTVariableInitialization((MyToken) name, t, e);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_24);		}		return n;	}		public final ASTCollectionLiteral  collectionLiteral() throws RecognitionException, TokenStreamException {		ASTCollectionLiteral n;				ASTCollectionItem ci; n = null;				try {      // for error handling			MyToken op = (MyToken) LT(1);			{			switch ( LA(1)) {			case LITERAL_Set:			{				match(LITERAL_Set);				break;			}			case LITERAL_Sequence:			{				match(LITERAL_Sequence);				break;			}			case LITERAL_Bag:			{				match(LITERAL_Bag);				break;			}			default:			{				throw new NoViableAltException(LT(1), getFilename());			}			}			}			n = new ASTCollectionLiteral(op);			match(LBRACE);			ci=collectionItem();			n.addItem(ci);			{			_loop71:			do {				if ((LA(1)==COMMA)) {					match(COMMA);					ci=collectionItem();					n.addItem(ci);				}				else {					break _loop71;				}							} while (true);			}			match(RBRACE);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_16);		}		return n;	}		public final ASTEmptyCollectionLiteral  emptyCollectionLiteral() throws RecognitionException, TokenStreamException {		ASTEmptyCollectionLiteral n;				ASTType t = null; n = null;				try {      // for error handling			match(LITERAL_oclEmpty);			match(LPAREN);			t=collectionType();			match(RPAREN);			n = new ASTEmptyCollectionLiteral(t);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_16);		}		return n;	}		public final ASTUndefinedLiteral  undefinedLiteral() throws RecognitionException, TokenStreamException {		ASTUndefinedLiteral n;				ASTType t = null; n = null;				try {      // for error handling			match(LITERAL_oclUndefined);			match(LPAREN);			t=type();			match(RPAREN);			n = new ASTUndefinedLiteral(t);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_16);		}		return n;	}		public final ASTTupleLiteral  tupleLiteral() throws RecognitionException, TokenStreamException {		ASTTupleLiteral n;				ASTTupleItem ti; n = null; List tiList = new ArrayList();				try {      // for error handling			match(LITERAL_Tuple);			match(LBRACE);			ti=tupleItem();			tiList.add(ti);			{			_loop78:			do {				if ((LA(1)==COMMA)) {					match(COMMA);					ti=tupleItem();					tiList.add(ti);				}				else {					break _loop78;				}							} while (true);			}			match(RBRACE);			n = new ASTTupleLiteral(tiList);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_16);		}		return n;	}		public final ASTCollectionItem  collectionItem() throws RecognitionException, TokenStreamException {		ASTCollectionItem n;				ASTExpression e; n = new ASTCollectionItem();				try {      // for error handling			e=expression();			n.setFirst(e);			{			switch ( LA(1)) {			case DOTDOT:			{				match(DOTDOT);				e=expression();				n.setSecond(e);				break;			}			case COMMA:			case RBRACE:			{				break;			}			default:			{				throw new NoViableAltException(LT(1), getFilename());			}			}			}		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_25);		}		return n;	}		public final ASTCollectionType  collectionType() throws RecognitionException, TokenStreamException {		ASTCollectionType n;				ASTType elemType = null; n = null;				try {      // for error handling			MyToken op = (MyToken) LT(1);			{			switch ( LA(1)) {			case LITERAL_Collection:			{				match(LITERAL_Collection);				break;			}			case LITERAL_Set:			{				match(LITERAL_Set);				break;			}			case LITERAL_Sequence:			{				match(LITERAL_Sequence);				break;			}			case LITERAL_Bag:			{				match(LITERAL_Bag);				break;			}			default:			{				throw new NoViableAltException(LT(1), getFilename());			}			}			}			match(LPAREN);			elemType=type();			match(RPAREN);			n = new ASTCollectionType(op, elemType);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_3);		}		return n;	}		public final ASTTupleItem  tupleItem() throws RecognitionException, TokenStreamException {		ASTTupleItem n;				Token  name = null;		ASTExpression e; n = null;				try {      // for error handling			name = LT(1);			match(IDENT);			match(COLON);			e=expression();			n = new ASTTupleItem((MyToken) name, e);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_25);		}		return n;	}		public final ASTSimpleType  simpleType() throws RecognitionException, TokenStreamException {		ASTSimpleType n;				Token  name = null;		n = null;				try {      // for error handling			name = LT(1);			match(IDENT);			n = new ASTSimpleType((MyToken) name);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_3);		}		return n;	}		public final ASTTupleType  tupleType() throws RecognitionException, TokenStreamException {		ASTTupleType n;				ASTTuplePart tp; n = null; List tpList = new ArrayList();				try {      // for error handling			match(LITERAL_Tuple);			match(LPAREN);			tp=tuplePart();			tpList.add(tp);			{			_loop87:			do {				if ((LA(1)==COMMA)) {					match(COMMA);					tp=tuplePart();					tpList.add(tp);				}				else {					break _loop87;				}							} while (true);			}			match(RPAREN);			n = new ASTTupleType(tpList);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_3);		}		return n;	}		public final ASTTuplePart  tuplePart() throws RecognitionException, TokenStreamException {		ASTTuplePart n;				Token  name = null;		ASTType t; n = null;				try {      // for error handling			name = LT(1);			match(IDENT);			match(COLON);			t=type();			n = new ASTTuplePart((MyToken) name, t);		}		catch (RecognitionException ex) {			reportError(ex);			consume();			consumeUntil(_tokenSet_1);		}		return n;	}			public static final String[] _tokenNames = {		"<0>",		"EOF",		"<2>",		"NULL_TREE_LOOKAHEAD",		"'('",		"','",		"')'",		"an identifier",		"':'",		"\"let\"",		"'='",		"\"in\"",		"\"implies\"",		"\"or\"",		"\"xor\"",		"\"and\"",		"'<>'",		"'<'",		"'>'",		"'<='",		"'>='",		"'+'",		"'-'",		"'*'",		"'/'",		"\"div\"",		"\"not\"",		"'->'",		"'.'",		"\"allInstances\"",		"'|'",		"\"iterate\"",		"';'",		"'['",		"']'",		"'@'",		"\"pre\"",		"\"oclAsType\"",		"\"oclIsKindOf\"",		"\"oclIsTypeOf\"",		"\"if\"",		"\"then\"",		"\"else\"",		"\"endif\"",		"\"true\"",		"\"false\"",		"INT",		"REAL",		"STRING",		"'#'",		"\"Set\"",		"\"Sequence\"",		"\"Bag\"",		"'{'",		"'}'",		"'..'",		"\"oclEmpty\"",		"\"oclUndefined\"",		"\"Tuple\"",		"\"Collection\""	};		private static final long[] mk_tokenSet_0() {		long[] data = { 2L, 0L};		return data;	}	public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());	private static final long[] mk_tokenSet_1() {		long[] data = { 96L, 0L};		return data;	}	public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());	private static final long[] mk_tokenSet_2() {		long[] data = { 5368709376L, 0L};		return data;	}	public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());	private static final long[] mk_tokenSet_3() {		long[] data = { 5368710240L, 0L};		return data;	}	public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());	private static final long[] mk_tokenSet_4() {		long[] data = { 54058589764978786L, 0L};		return data;	}	public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());	private static final long[] mk_tokenSet_5() {		long[] data = { 54058589764982882L, 0L};		return data;	}	public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());	private static final long[] mk_tokenSet_6() {		long[] data = { 54058589764991074L, 0L};		return data;	}	public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());	private static final long[] mk_tokenSet_7() {		long[] data = { 54058589765007458L, 0L};		return data;	}	public static final BitSet _tokenSet_7 = new BitSet(mk_tokenSet_7());	private static final long[] mk_tokenSet_8() {		long[] data = { 54058589765040226L, 0L};		return data;	}	public static final BitSet _tokenSet_8 = new BitSet(mk_tokenSet_8());	private static final long[] mk_tokenSet_9() {		long[] data = { 54058589765106786L, 0L};		return data;	}	public static final BitSet _tokenSet_9 = new BitSet(mk_tokenSet_9());	private static final long[] mk_tokenSet_10() {		long[] data = { 54058589767072866L, 0L};		return data;	}	public static final BitSet _tokenSet_10 = new BitSet(mk_tokenSet_10());	private static final long[] mk_tokenSet_11() {		long[] data = { 54058589773364322L, 0L};		return data;	}	public static final BitSet _tokenSet_11 = new BitSet(mk_tokenSet_11());	private static final long[] mk_tokenSet_12() {		long[] data = { 54058589832084578L, 0L};		return data;	}	public static final BitSet _tokenSet_12 = new BitSet(mk_tokenSet_12());	private static final long[] mk_tokenSet_13() {		long[] data = { 964220158080L, 0L};		return data;	}	public static final BitSet _tokenSet_13 = new BitSet(mk_tokenSet_13());	private static final long[] mk_tokenSet_14() {		long[] data = { 54058633184410738L, 0L};		return data;	}	public static final BitSet _tokenSet_14 = new BitSet(mk_tokenSet_14());	private static final long[] mk_tokenSet_15() {		long[] data = { 1143914240390725362L, 0L};		return data;	}	public static final BitSet _tokenSet_15 = new BitSet(mk_tokenSet_15());	private static final long[] mk_tokenSet_16() {		long[] data = { 54058590234737762L, 0L};		return data;	}	public static final BitSet _tokenSet_16 = new BitSet(mk_tokenSet_16());	private static final long[] mk_tokenSet_17() {		long[] data = { 513394829139378832L, 0L};		return data;	}	public static final BitSet _tokenSet_17 = new BitSet(mk_tokenSet_17());	private static final long[] mk_tokenSet_18() {		long[] data = { 522402072881002480L, 0L};		return data;	}	public static final BitSet _tokenSet_18 = new BitSet(mk_tokenSet_18());	private static final long[] mk_tokenSet_19() {		long[] data = { 1152921483132010482L, 0L};		return data;	}	public static final BitSet _tokenSet_19 = new BitSet(mk_tokenSet_19());	private static final long[] mk_tokenSet_20() {		long[] data = { 567453488087301874L, 0L};		return data;	}	public static final BitSet _tokenSet_20 = new BitSet(mk_tokenSet_20());	private static final long[] mk_tokenSet_21() {		long[] data = { 576460678752108530L, 0L};		return data;	}	public static final BitSet _tokenSet_21 = new BitSet(mk_tokenSet_21());	private static final long[] mk_tokenSet_22() {		long[] data = { 522402071807260368L, 0L};		return data;	}	public static final BitSet _tokenSet_22 = new BitSet(mk_tokenSet_22());	private static final long[] mk_tokenSet_23() {		long[] data = { 5368709120L, 0L};		return data;	}	public static final BitSet _tokenSet_23 = new BitSet(mk_tokenSet_23());	private static final long[] mk_tokenSet_24() {		long[] data = { 1073741824L, 0L};		return data;	}	public static final BitSet _tokenSet_24 = new BitSet(mk_tokenSet_24());	private static final long[] mk_tokenSet_25() {		long[] data = { 18014398509482016L, 0L};		return data;	}	public static final BitSet _tokenSet_25 = new BitSet(mk_tokenSet_25());		}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -