⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 columndefinitionnode.java

📁 derby database source code.good for you.
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
		}		// Now validate the default		validateDefault(dd, td);	}	/**	 * Check the validity of the autoincrement values for this node.	 * The following errors are thrown by this routine.	 * 1. 42z21 Invalid Increment; i.e 0.	 * 2. 42z22 Invalid Type; autoincrement created on a non-exact-numeric type	 * 3. 42995 The requested function does not apply to global temporary tables	 *	 * @param 		dd		DataDictionary.	 * @param		td		table descriptor.	 * @param		tableType	base table or declared global temporary table.	 *	 * @exception 	StandardException if autoincrement default is incorrect; i.e	 * 				if increment is 0 or if initial or increment values are out	 * 				of range for the datatype.	 */	public void validateAutoincrement(DataDictionary dd, TableDescriptor td, int tableType)	     throws StandardException	{		if (isAutoincrement == false)			return;		if (tableType == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE)			throw StandardException.newException(SQLState.LANG_NOT_ALLOWED_FOR_DECLARED_GLOBAL_TEMP_TABLE);		if (autoincrementIncrement == 0)			throw StandardException.newException(SQLState.LANG_AI_INVALID_INCREMENT, getColumnName());		int jdbctype = dataTypeServices.getTypeId().getJDBCTypeId();		switch (jdbctype)		{		case Types.TINYINT:			autoincrementCheckRange((long)Byte.MIN_VALUE, 									(long)Byte.MAX_VALUE, 									TypeId.TINYINT_NAME);			break;		case Types.SMALLINT:			autoincrementCheckRange((long)Short.MIN_VALUE, 									(long)Short.MAX_VALUE,									TypeId.SMALLINT_NAME);			break;		case Types.INTEGER:			autoincrementCheckRange((long)Integer.MIN_VALUE, 									(long)Integer.MAX_VALUE,									TypeId.INTEGER_NAME);			break;		case Types.BIGINT:			autoincrementCheckRange(Long.MIN_VALUE, Long.MAX_VALUE,									TypeId.LONGINT_NAME);			break;		default:			throw StandardException.newException(SQLState.LANG_AI_INVALID_TYPE,												 getColumnName());		}	}	/**	 * checks to see if autoincrementIncrement and autoincrementInitial	 * are within the bounds of the type whose min and max values are	 * passed into this routine.	 */	private	void autoincrementCheckRange(long minValue, long maxValue,									String typeName)				throws StandardException						{		if ((minValue > autoincrementIncrement) || 			(maxValue < autoincrementIncrement))		{			throw StandardException.newException(								 SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, typeName);		}		if ((minValue > autoincrementStart) || 			(maxValue < autoincrementStart))		{			throw StandardException.newException(								 SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, typeName);		}				}	/**	 * Check the validity of the default for this node.	 *	 * @param td		The TableDescriptor.	 *	 * @return Nothing.	 *	 * @exception StandardException		Thrown on error	 */	void validateDefault(DataDictionary dd, TableDescriptor td)		throws StandardException	{		if (defaultNode == null)			return;		//Examin whether default value is autoincrement.		if (isAutoincrement){			defaultInfo = createDefaultInfoOfAutoInc();			return;		}						//Judged as default value is constant value.				CompilerContext cc = getCompilerContext();		ValueNode defaultTree = defaultNode.getDefaultTree();		/* bind the default.		 * Verify that it does not contain any ColumnReferences or subqueries		 * and that it is type compatable with the column.		 */		final int previousReliability = cc.getReliability();		try		{			/*				Defaults cannot have dependencies as they				should just be constants. Code used to exist				to handle dependencies in defaults, now this				is under sanity to ensure no dependencies exist.			 */			ProviderList apl = null;			ProviderList prevAPL = null;			if (SanityManager.DEBUG) {				apl = new ProviderList();				prevAPL = cc.getCurrentAuxiliaryProviderList();				cc.setCurrentAuxiliaryProviderList(apl);			}						// Tell the compiler context to only allow deterministic nodes			cc.setReliability( CompilerContext.DEFAULT_RESTRICTION );			defaultTree = defaultTree.bindExpression(							(FromList) getNodeFactory().getNode(								C_NodeTypes.FROM_LIST,								getNodeFactory().doJoinOrderOptimization(),								getContextManager()), 							(SubqueryList) null,							(Vector) null);			TypeId columnTypeId = (TypeId) dataTypeServices.getTypeId();			TypeId defaultTypeId = defaultTree.getTypeId();			// Check for 'invalid default' errors (42894)			// before checking for 'not storable' errors (42821).			if (!defaultTypeIsValid(columnTypeId, dataTypeServices,					defaultTypeId, defaultTree, defaultNode.getDefaultText()))			{					throw StandardException.newException(						SQLState.LANG_DB2_INVALID_DEFAULT_VALUE,						this.name);			}			// Now check 'not storable' errors.			if (! getTypeCompiler(columnTypeId).								storable(defaultTypeId, getClassFactory()))			{				throw StandardException.newException(SQLState.LANG_NOT_STORABLE, 					columnTypeId.getSQLTypeName(),					defaultTypeId.getSQLTypeName() );			}			// Save off the default text			// RESOLVEDEFAULT - Convert to constant if possible			defaultInfo = new DefaultInfoImpl(false,							  defaultNode.getDefaultText(), 							  defaultValue);			if (SanityManager.DEBUG)			{				/* Save the APL off in the constraint node */				if (apl.size() > 0)				{					SanityManager.THROWASSERT("DEFAULT clause has unexpected dependencies");				}				// Restore the previous AuxiliaryProviderList				cc.setCurrentAuxiliaryProviderList(prevAPL);			}		}		finally		{			cc.setReliability(previousReliability);		}	}	private static DefaultInfoImpl createDefaultInfoOfAutoInc(){		return new DefaultInfoImpl(true,					   null, 					   null);	}		/**	 * Check the validity of the default for this node	 *	 * @param columnType TypeId of the target column.	 * @param columnDesc Description of the type of the	 *		target column.	 * @param defaultType TypeId of the default node.	 * @param defaultNode Parsed ValueNode for the default value.	 * @param defaultText Unparsed default value (as entered	 * 		by user).	 * @return True if the defaultNode abides by the restrictions	 * 	imposed by DB2 on default constants; false otherwise.	 *	 */	public boolean defaultTypeIsValid(TypeId columnType,		DataTypeDescriptor columnDesc, TypeId defaultType,		ValueNode defaultNode, String defaultText)	{		if (defaultText.length() > Limits.DB2_CHAR_MAXWIDTH)		// DB2 spec says this isn't allowed.			return false;		/* We can use info about the way the parser works		 * to guide this process a little (see the getNumericNode()		 * method in sqlgrammar.jj):		 *		 * 1) Tinyint and Smallints are both parsed as "INT" types,	 	 *	  while integers larger than a basic "INT" are parsed into		 *	  "LONGINT" or, if needed, "DECIMAL".		 * 2) Floats, doubles, and decimals with fractional parts		 *	  are all parsed as "DECIMAL".		 * 3) All strings are parsed as "CHAR" constants (no varchar		 *	  or any others; see stringLiteral() method in		 *	  sqlgrammar.jj).		 */		int colType = columnType.getTypeFormatId();		int defType = (defaultType == null ? -1 : defaultType.getTypeFormatId());		if (!defaultNode.isConstantExpression()) {		// then we have a built-in function, such as "user"		// or "current schema".  If the function is a datetime		// value function, then we don't need any special		// action; however, if it's a "user" or "current schema"		// function, then the column must be a char type with		// minimum lengths matching those of DB2 (note that		// such limits are ONLY enforced on defaults, not at		// normal insertion time).			boolean charCol = ((colType == StoredFormatIds.CHAR_TYPE_ID) ||				(colType == StoredFormatIds.VARCHAR_TYPE_ID) ||				(colType == StoredFormatIds.LONGVARCHAR_TYPE_ID));			if (defaultNode instanceof SpecialFunctionNode) {				switch (defaultNode.getNodeType())				{				case C_NodeTypes.USER_NODE:				case C_NodeTypes.CURRENT_USER_NODE:				case C_NodeTypes.SESSION_USER_NODE:				case C_NodeTypes.SYSTEM_USER_NODE:				// DB2 enforces min length of 8.				// Note also: any size under 30 gives a warning in DB2.					return (charCol && (columnDesc.getMaximumWidth() >=						Limits.DB2_MIN_COL_LENGTH_FOR_CURRENT_USER));				case C_NodeTypes.CURRENT_SCHEMA_NODE:				// DB2 enforces min length of 128.					return (charCol && (columnDesc.getMaximumWidth() >=						Limits.DB2_MIN_COL_LENGTH_FOR_CURRENT_SCHEMA));				default:					// else, function not allowed.					return false;				}			}		}		switch (colType) {			case StoredFormatIds.INT_TYPE_ID:			// DB2 doesn't allow floating point values to be used			// as defaults for integer columns (they ARE allowed			// as part of normal insertions, but not as defaults).			// If the default is an integer that's too big, then			// it won't have type INT_TYPE_ID (it'll be either			// LONGINT or DECIMAL)--so we only allow the default			// value if it's integer.				return (defType == StoredFormatIds.INT_TYPE_ID);			case StoredFormatIds.LONGINT_TYPE_ID:			// This is a BIGINT column: we allow smallints, ints,			// and big int constants.  Smallint and int literals			// are both covered by INT_TYPE; big int literals are			// covered by LONG_INT type.				return ((defType == StoredFormatIds.INT_TYPE_ID)					|| (defType == StoredFormatIds.LONGINT_TYPE_ID));				case StoredFormatIds.DECIMAL_TYPE_ID:				if (defType == StoredFormatIds.DECIMAL_TYPE_ID) {				// only valid if scale and precision are within				// those of the column.  Note that scale here should				// exclude any trailing 0's after the decimal					DataTypeDescriptor defDesc = defaultNode.getTypeServices();					int len = defaultText.length();					int precision = defDesc.getPrecision();					int scale = defDesc.getScale();					for (int i = 1; i <= scale; scale--, precision--) {						if (defaultText.charAt(len - i) != '0')							break;					}					return ((scale <= columnDesc.getScale()) &&						((precision - scale) <=						(columnDesc.getPrecision() - columnDesc.getScale())));				}				else if ((defType == StoredFormatIds.LONGINT_TYPE_ID) ||					(defType == StoredFormatIds.INT_TYPE_ID)) {				// only valid if number of digits is within limits of				// the decimal column.  We'll check this at insertion time;				// see Beetle 5585 regarding the need to move that check to				// here instead of waiting until insert time.  Until that's				// done, just allow this and wait for insertion...					return true;				}				else				// no other types allowed.					return false;			case StoredFormatIds.CHAR_TYPE_ID:			case StoredFormatIds.VARCHAR_TYPE_ID:			case StoredFormatIds.LONGVARCHAR_TYPE_ID:			// only valid if the default type is a character string.			// That's not to say that all character defaults are			// valid, but we only check for character string here;			// further checking will be done at insertion time.  See			// beetle 5585 regarding the need to move that check			// to here instead of waiting until insert time.				return (defType == StoredFormatIds.CHAR_TYPE_ID);			case StoredFormatIds.BIT_TYPE_ID:			case StoredFormatIds.VARBIT_TYPE_ID:			case StoredFormatIds.LONGVARBIT_TYPE_ID:			// only valid if the default type is a BIT string.				return (defType == StoredFormatIds.BIT_TYPE_ID);			case StoredFormatIds.USERDEFINED_TYPE_ID_V3:			// default is only valid if it's the same type as the column.				return (defType == colType);			case StoredFormatIds.BLOB_TYPE_ID:			case StoredFormatIds.CLOB_TYPE_ID:			case StoredFormatIds.SMALLINT_TYPE_ID:			case StoredFormatIds.REAL_TYPE_ID:			case StoredFormatIds.DOUBLE_TYPE_ID:			case StoredFormatIds.DATE_TYPE_ID:			case StoredFormatIds.TIME_TYPE_ID:			case StoredFormatIds.TIMESTAMP_TYPE_ID:			// For these types, validity checks will be performed			// by Cloudscape at insertion time--see beetle 5585 regarding			// the need to do such checks here instead of later.  For now,			// just assume we're okay.				return true;			default:			// All other default type checks either 			// (TINYINT, NATIONAL_CHAR, etc), or 2) require a DB2 cast-			// function (ex. blob(...), which Cloudscape doesn't			// support yet--see Beetle 5281), and so they are not			// valid for Cloudscape running in DB2 compatibility mode.				return false;		}	}}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -