📄 configuration.java
字号:
add( doc ); return this; } catch ( MappingException me ) { throw me; } catch ( Exception e ) { log.error( "Could not configure datastore from input stream", e ); throw new MappingException( e ); } finally { try { xmlInputStream.close(); } catch ( IOException ioe ) { log.error( "could not close input stream", ioe ); } } } /** * Read mappings from an application resource * * @param path a resource * @param classLoader a <tt>ClassLoader</tt> to use */ public Configuration addResource(String path, ClassLoader classLoader) throws MappingException { log.info( "Mapping resource: " + path ); InputStream rsrc = classLoader.getResourceAsStream( path ); if ( rsrc == null ) throw new MappingException( "Resource: " + path + " not found" ); try { return addInputStream( rsrc ); } catch ( MappingException me ) { throw new MappingException( "Error reading resource: " + path, me ); } } /** * Read mappings from an application resource trying different classloaders. * This method will try to load the resource first from the thread context * classloader and then from the classloader that loaded Hibernate. */ public Configuration addResource(String path) throws MappingException { log.info( "Mapping resource: " + path ); InputStream rsrc = Thread.currentThread().getContextClassLoader().getResourceAsStream( path ); if ( rsrc == null ) rsrc = Environment.class.getClassLoader().getResourceAsStream( path ); if ( rsrc == null ) throw new MappingException( "Resource: " + path + " not found" ); try { return addInputStream( rsrc ); } catch ( MappingException me ) { throw new MappingException( "Error reading resource: " + path, me ); } } /** * Read a mapping from an application resource, using a convention. * The class <tt>foo.bar.Foo</tt> is mapped by the file <tt>foo/bar/Foo.hbm.xml</tt>. * * @param persistentClass the mapped class */ public Configuration addClass(Class persistentClass) throws MappingException { String fileName = persistentClass.getName().replace( '.', '/' ) + ".hbm.xml"; log.info( "Mapping resource: " + fileName ); InputStream rsrc = persistentClass.getClassLoader().getResourceAsStream( fileName ); if ( rsrc == null ) throw new MappingException( "Resource: " + fileName + " not found" ); try { return addInputStream( rsrc ); } catch ( MappingException me ) { throw new MappingException( "Error reading resource: " + fileName, me ); } } /** * Read all mappings from a jar file * * @param jar a jar file */ public Configuration addJar(File jar) throws MappingException { log.info( "Searching for mapping documents in jar: " + jar.getName() ); final JarFile jarFile; try { jarFile = new JarFile( jar ); } catch ( IOException ioe ) { log.error( "Could not configure datastore from jar: " + jar.getName(), ioe ); throw new MappingException( "Could not configure datastore from jar: " + jar.getName(), ioe ); } Enumeration jarEntries = jarFile.entries(); while ( jarEntries.hasMoreElements() ) { ZipEntry ze = ( ZipEntry ) jarEntries.nextElement(); if ( ze.getName().endsWith( ".hbm.xml" ) ) { log.info( "Found mapping documents in jar: " + ze.getName() ); try { addInputStream( jarFile.getInputStream( ze ) ); } catch ( MappingException me ) { throw me; } catch ( Exception e ) { log.error( "Could not configure datastore from jar: " + jar.getName(), e ); throw new MappingException( "Could not configure datastore from jar: " + jar.getName(), e ); } } } return this; } /** * Read all mapping documents from a directory tree. Assume that any * file named <tt>*.hbm.xml</tt> is a mapping document. * * @param dir a directory */ public Configuration addDirectory(File dir) throws MappingException { File[] files = dir.listFiles(); for ( int i = 0; i < files.length; i++ ) { if ( files[i].isDirectory() ) { addDirectory( files[i] ); } else if ( files[i].getName().endsWith( ".hbm.xml" ) ) { addFile( files[i] ); } } return this; } private Iterator iterateGenerators(Dialect dialect) throws MappingException { TreeMap generators = new TreeMap(); Iterator iter = classes.values().iterator(); while ( iter.hasNext() ) { PersistentClass pc = ( PersistentClass ) iter.next(); if ( !pc.isInherited() ) { IdentifierGenerator ig = pc.getIdentifier() .createIdentifierGenerator( dialect, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA), pc.getEntityName() ); if ( ig instanceof PersistentIdentifierGenerator ) { generators.put( ( ( PersistentIdentifierGenerator ) ig ).generatorKey(), ig ); } } } iter = collections.values().iterator(); while ( iter.hasNext() ) { Collection collection = ( Collection ) iter.next(); if ( collection.isIdentified() ) { IdentifierGenerator ig = ( ( IdentifierCollection ) collection ).getIdentifier() .createIdentifierGenerator( dialect, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA), null ); if ( ig instanceof PersistentIdentifierGenerator ) { generators.put( ( ( PersistentIdentifierGenerator ) ig ).generatorKey(), ig ); } } } return generators.values().iterator(); } /** * Generate DDL for dropping tables * * @see org.hibernate.tool.hbm2ddl.SchemaExport */ public String[] generateDropSchemaScript(Dialect dialect) throws HibernateException { secondPassCompile(); ArrayList script = new ArrayList( 50 ); if ( dialect.dropConstraints() ) { Iterator iter = getTableMappings(); while ( iter.hasNext() ) { Table table = ( Table ) iter.next(); if ( table.isPhysicalTable() ) { Iterator subIter = table.getForeignKeyIterator(); while ( subIter.hasNext() ) { ForeignKey fk = ( ForeignKey ) subIter.next(); if ( fk.isPhysicalConstraint() ) { script.add( fk.sqlDropString( dialect, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } } } } } Iterator iter = getTableMappings(); while ( iter.hasNext() ) { Table table = ( Table ) iter.next(); if ( table.isPhysicalTable() ) { /*Iterator subIter = table.getIndexIterator(); while ( subIter.hasNext() ) { Index index = (Index) subIter.next(); if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) { script.add( index.sqlDropString(dialect) ); } }*/ script.add( table.sqlDropString( dialect, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } } iter = iterateGenerators( dialect ); while ( iter.hasNext() ) { String[] lines = ( ( PersistentIdentifierGenerator ) iter.next() ).sqlDropStrings( dialect ); for ( int i = 0; i < lines.length; i++ ) script.add( lines[i] ); } return ArrayHelper.toStringArray( script ); } /** * Generate DDL for creating tables * * @see org.hibernate.tool.hbm2ddl.SchemaExport */ public String[] generateSchemaCreationScript(Dialect dialect) throws HibernateException { secondPassCompile(); ArrayList script = new ArrayList( 50 ); Iterator iter = getTableMappings(); while ( iter.hasNext() ) { Table table = ( Table ) iter.next(); if ( table.isPhysicalTable() ) { script.add( table.sqlCreateString( dialect, mapping, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } } iter = getTableMappings(); while ( iter.hasNext() ) { Table table = ( Table ) iter.next(); if ( table.isPhysicalTable() ) { if( !dialect.supportsUniqueConstraintInCreateAlterTable() ) { Iterator subIter = table.getUniqueKeyIterator(); while ( subIter.hasNext() ) { UniqueKey uk = (UniqueKey) subIter.next(); script.add( uk.sqlCreateString(dialect, mapping, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } } Iterator subIter = table.getIndexIterator(); while ( subIter.hasNext() ) { Index index = ( Index ) subIter.next(); script.add( index.sqlCreateString( dialect, mapping, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } if ( dialect.hasAlterTable() ) { subIter = table.getForeignKeyIterator(); while ( subIter.hasNext() ) { ForeignKey fk = ( ForeignKey ) subIter.next(); if ( fk.isPhysicalConstraint() ) script.add( fk.sqlCreateString( dialect, mapping, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } } } } iter = iterateGenerators( dialect ); while ( iter.hasNext() ) { String[] lines = ( ( PersistentIdentifierGenerator ) iter.next() ).sqlCreateStrings( dialect ); for ( int i = 0; i < lines.length; i++ ) script.add( lines[i] ); } return ArrayHelper.toStringArray( script ); } /** * Generate DDL for altering tables * * @see org.hibernate.tool.hbm2ddl.SchemaUpdate */ public String[] generateSchemaUpdateScript(Dialect dialect, DatabaseMetadata databaseMetadata) throws HibernateException { secondPassCompile(); ArrayList script = new ArrayList( 50 ); Iterator iter = getTableMappings(); while ( iter.hasNext() ) { Table table = ( Table ) iter.next(); if ( table.isPhysicalTable() ) { TableMetadata tableInfo = databaseMetadata.getTableMetadata( table.getName(), table.getSchema(), table.getCatalog() ); if ( tableInfo == null ) { script.add( table.sqlCreateString( dialect, mapping, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } else { Iterator subiter = table.sqlAlterStrings( dialect, mapping, tableInfo, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ); while ( subiter.hasNext() ) script.add( subiter.next() ); } } } iter = getTableMappings(); while ( iter.hasNext() ) { Table table = ( Table ) iter.next(); if ( table.isPhysicalTable() ) { TableMetadata tableInfo = databaseMetadata.getTableMetadata( table.getName(), table.getSchema(), table.getCatalog() ); if ( dialect.hasAlterTable() ) { Iterator subIter = table.getForeignKeyIterator(); while ( subIter.hasNext() ) { ForeignKey fk = ( ForeignKey ) subIter.next(); if ( fk.isPhysicalConstraint() ) { boolean create = tableInfo == null || ( tableInfo.getForeignKeyMetadata( fk.getName() ) == null && ( //Icky workaround for MySQL bug: !( dialect instanceof MySQLDialect ) || tableInfo.getIndexMetadata( fk.getName() ) == null ) ); if ( create ) script.add( fk.sqlCreateString( dialect, mapping, properties.getProperty(Environment.DEFAULT_CATALOG), properties.getProperty(Environment.DEFAULT_SCHEMA) ) ); } } } } /*//broken, 'cos we don't generate these with names in SchemaExport subIter = table.getIndexIterator(); while ( subIter.hasNext() ) { Index index = (Index) subIter.next(); if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) { if ( tableInfo==null || tableInfo.getIndexMetadata( index.getFilterName() ) == null ) { script.add( index.sqlCreateString(dialect, mapping) ); } } } //broken, 'cos we don't generate these with names in SchemaExport subIter = table.getUniqueKeyIterator();
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -