📄 annotationconfiguration.java
字号:
try { Class classValidator = ReflectHelper.classForName( "org.hibernate.validator.ClassValidator", this.getClass() ); Class messageInterpolator = ReflectHelper.classForName( "org.hibernate.validator.MessageInterpolator", this.getClass() ); validatorCtr = classValidator.getDeclaredConstructor( Class.class, ResourceBundle.class, messageInterpolator, Map.class, ReflectionManager.class ); applyMethod = classValidator.getMethod( "apply", PersistentClass.class ); } catch (ClassNotFoundException e) { if ( !isValidatorNotPresentLogged ) { log.info( "Hibernate Validator not found: ignoring" ); } isValidatorNotPresentLogged = true; } catch (NoSuchMethodException e) { throw new AnnotationException( e ); } if ( applyMethod != null && applyOnDdl ) { for (PersistentClass persistentClazz : (Collection<PersistentClass>) classes.values()) { //integrate the validate framework String className = persistentClazz.getClassName(); if ( StringHelper.isNotEmpty( className ) ) { try { Object validator = validatorCtr.newInstance( ReflectHelper.classForName( className ), null, null, null, reflectionManager ); applyMethod.invoke( validator, persistentClazz ); } catch (Exception e) { log.warn( "Unable to apply constraints on DDL for " + className, e ); } } } } } /** * Processes FKSecondPass instances trying to resolve any * graph circularity (ie PK made of a many to one linking to * an entity having a PK made of a ManyToOne ...). */ private void processFkSecondPassInOrder() { log.debug( "processing fk mappings (*ToOne and JoinedSubclass)" ); List<FkSecondPass> fkSecondPasses = getFKSecondPassesOnly(); if (fkSecondPasses.size() == 0) { return; // nothing to do here } // split FkSecondPass instances into primary key and non primary key FKs. // While doing so build a map of class names to FkSecondPass instances depending on this class. Map<String, Set<FkSecondPass>> isADependencyOf = new HashMap<String, Set<FkSecondPass>>(); List endOfQueueFkSecondPasses = new ArrayList( fkSecondPasses.size() ); for (FkSecondPass sp : fkSecondPasses) { if ( sp.isInPrimaryKey() ) { String referenceEntityName = sp.getReferencedEntityName(); PersistentClass classMapping = getClassMapping( referenceEntityName ); String dependentTable = classMapping.getTable().getQuotedName(); if ( !isADependencyOf.containsKey( dependentTable ) ) { isADependencyOf.put( dependentTable, new HashSet<FkSecondPass>() ); } isADependencyOf.get( dependentTable ).add( sp ); } else { endOfQueueFkSecondPasses.add( sp ); } } // using the isADependencyOf map we order the FkSecondPass recursively instances into the right order for processing List<FkSecondPass> orderedFkSecondPasses = new ArrayList( fkSecondPasses.size() ); for (String tableName : isADependencyOf.keySet()) { buildRecursiveOrderedFkSecondPasses(orderedFkSecondPasses, isADependencyOf, tableName, tableName); } // process the ordered FkSecondPasses for ( FkSecondPass sp : orderedFkSecondPasses ) { sp.doSecondPass( classes ); } processEndOfQueue(endOfQueueFkSecondPasses); } private void processEndOfQueue(List endOfQueueFkSecondPasses) { /* * If a second pass raises a recoverableException, queue it for next round * stop of no pass has to be processed or if the number of pass to processes * does not diminish between two rounds. * If some failing pass remain, raise the original exception */ boolean stopProcess = false; RuntimeException originalException = null; while ( ! stopProcess ) { List failingSecondPasses = new ArrayList(); Iterator it = endOfQueueFkSecondPasses.listIterator(); while ( it.hasNext() ) { final SecondPass pass = (SecondPass) it.next(); try { pass.doSecondPass( classes ); } catch (RecoverableException e) { failingSecondPasses.add( pass ); if (originalException == null) originalException = (RuntimeException) e.getCause(); } } stopProcess = failingSecondPasses.size() == 0 || failingSecondPasses.size() == endOfQueueFkSecondPasses.size(); endOfQueueFkSecondPasses = failingSecondPasses; } if (endOfQueueFkSecondPasses.size() > 0) { throw originalException; } } /** * @return Returns a list of all <code>secondPasses</code> instances which are a instance of * <code>FkSecondPass</code>. */ private List<FkSecondPass> getFKSecondPassesOnly() { Iterator iter = secondPasses.iterator(); List<FkSecondPass> fkSecondPasses = new ArrayList<FkSecondPass>(secondPasses.size()); while ( iter.hasNext() ) { SecondPass sp = (SecondPass) iter.next(); //do the second pass of fk before the others and remove them if ( sp instanceof FkSecondPass ) { fkSecondPasses.add( (FkSecondPass) sp ); iter.remove(); } } return fkSecondPasses; } /** * Recursively builds a list of FkSecondPass instances ready to be processed in this order. * Checking all dependencies recursively seems quite expensive, but the original code just relied * on some sort of table name sorting which failed in certain circumstances. * * @param orderedFkSecondPasses The list containing the <code>FkSecondPass<code> instances ready * for processing. * @param isADependencyOf Our lookup data structure to determine dependencies between tables * @param startTable Table name to start recursive algorithm. * @param currentTable The current table name used to check for 'new' dependencies. * * @see ANN-722 ANN-730 */ private void buildRecursiveOrderedFkSecondPasses( List orderedFkSecondPasses, Map<String, Set<FkSecondPass>> isADependencyOf, String startTable, String currentTable) { Set<FkSecondPass> dependencies = isADependencyOf.get(currentTable); // bottom out if (dependencies == null || dependencies.size() == 0) { return; } for (FkSecondPass sp : dependencies) { String dependentTable = sp.getValue().getTable().getQuotedName(); if (dependentTable.compareTo(startTable) == 0) { StringBuilder sb = new StringBuilder( "Foreign key circularity dependency involving the following tables: "); throw new AnnotationException(sb.toString()); } buildRecursiveOrderedFkSecondPasses(orderedFkSecondPasses, isADependencyOf, startTable, dependentTable); if (!orderedFkSecondPasses.contains(sp)) { orderedFkSecondPasses.add(0, sp); } } } private void processArtifactsOfType(String artifact) { if ( "hbm".equalsIgnoreCase( artifact ) ) { log.debug( "Process hbm files" ); for (Document document : hbmDocuments) { super.add( document ); } hbmDocuments.clear(); hbmEntities.clear(); } else if ( "class".equalsIgnoreCase( artifact ) ) { log.debug( "Process annotated classes" ); //bind classes in the correct order calculating some inheritance state List<XClass> orderedClasses = orderAndFillHierarchy( annotatedClasses ); Map<XClass, InheritanceState> inheritanceStatePerClass = AnnotationBinder.buildInheritanceStates( orderedClasses, reflectionManager ); ExtendedMappings mappings = createExtendedMappings(); for (XClass clazz : orderedClasses) { //todo use the same extended mapping AnnotationBinder.bindClass( clazz, inheritanceStatePerClass, mappings ); } annotatedClasses.clear(); annotatedClassEntities.clear(); } else { log.warn( "Unknown artifact: {}", artifact ); } } private void removeConflictedArtifact(String artifact) { if ( "hbm".equalsIgnoreCase( artifact ) ) { for (String entity : hbmEntities.keySet()) { if ( annotatedClassEntities.containsKey( entity ) ) { annotatedClasses.remove( annotatedClassEntities.get( entity ) ); annotatedClassEntities.remove( entity ); } } } else if ( "class".equalsIgnoreCase( artifact ) ) { for (String entity : annotatedClassEntities.keySet()) { if ( hbmEntities.containsKey( entity ) ) { hbmDocuments.remove( hbmEntities.get( entity ) ); hbmEntities.remove( entity ); } } } } private void buildUniqueKeyFromColumnNames(String[] columnNames, Table table, String keyName) { UniqueKey uc; int size = columnNames.length; Column[] columns = new Column[size]; Set<Column> unbound = new HashSet<Column>(); Set<Column> unboundNoLogical = new HashSet<Column>(); ExtendedMappings mappings = createExtendedMappings(); for (int index = 0; index < size; index++) { String columnName; try { columnName = mappings.getPhysicalColumnName( columnNames[index], table ); columns[index] = new Column( columnName ); unbound.add( columns[index] ); //column equals and hashcode is based on column name } catch (MappingException e) { unboundNoLogical.add( new Column( columnNames[index] ) ); } } for (Column column : columns) { if ( table.containsColumn( column ) ) { uc = table.getOrCreateUniqueKey( keyName ); uc.addColumn( table.getColumn( column ) ); unbound.remove( column ); } } if ( unbound.size() > 0 || unboundNoLogical.size() > 0 ) { StringBuilder sb = new StringBuilder( "Unable to create unique key constraint (" ); for (String columnName : columnNames) { sb.append( columnName ).append( ", " ); } sb.setLength( sb.length() - 2 ); sb.append( ") on table " ).append( table.getName() ).append( ": " ); for (Column column : unbound) { sb.append( column.getName() ).append( ", " ); } for (Column column : unboundNoLogical) { sb.append( column.getName() ).append( ", " ); } sb.setLength( sb.length() - 2 ); sb.append( " not found" ); throw new AnnotationException( sb.toString() ); } } @Override protected void parseMappingElement(Element subelement, String name) { Attribute rsrc = subelement.attribute( "resource" ); Attribute file = subelement.attribute( "file" ); Attribute jar = subelement.attribute( "jar" ); Attribute pckg = subelement.attribute( "package" ); Attribute clazz = subelement.attribute( "class" ); if ( rsrc != null ) { log.debug( "{} <- {}", name, rsrc ); addResource( rsrc.getValue() ); } else if ( jar != null ) { log.debug( "{} <- {}", name, jar ); addJar( new File( jar.getValue() ) ); } else if ( file != null ) { log.debug( "{} <- {}", name, file ); addFile( file.getValue() ); } else if ( pckg != null ) { log.debug( "{} <- {}", name, pckg ); addPackage( pckg.getValue() ); } else if ( clazz != null ) { log.debug( "{} <- {}", name, clazz ); Class loadedClass; try { loadedClass = ReflectHelper.classForName( clazz.getValue() ); } catch (ClassNotFoundException cnf) { throw new MappingException( "Unable to load class declared as <mapping class=\"" + clazz.getValue() + "\"/> in the configuration:", cnf ); } catch (NoClassDefFoundError ncdf) { throw new MappingException( "Unable to load class declared as <mapping class=\"" + clazz.getValue() + "\"/> in the configuration:", ncdf ); } addAnnotatedClass( loadedClass ); } else { throw new MappingException( "<mapping> element in configuration specifies no attributes" ); } } @Override protected void add(org.dom4j.Document doc) throws MappingException { boolean ejb3Xml = "entity-mappings".equals( doc.getRootElement().getName() ); if ( inSecondPass ) { //if in second pass bypass the queueing, getExtendedQueue reuse this method if ( !ejb3Xml ) super.add( doc ); } else { if ( !ejb3Xml ) { final Element hmNode = doc.getRootElement(); Attribute packNode = hmNode.attribute( "package" ); String defaultPackage = packNode != null ? packNode.getValue() : ""; Set<String> entityNames = new HashSet<String>(); findClassNames( defaultPackage, hmNode, entityNames ); for (String entity : entityNames) { hbmEntities.put( entity, doc ); } hbmDocuments.add( doc ); } else { List<String> classnames = ( (EJB3ReflectionManager) reflectionManager ).getXMLContext().addDocument( doc ); for (String classname : classnames) { try { annotatedClasses.add( reflectionManager.classForName( classname, this.getClass() ) ); } catch (ClassNotFoundException e) { throw new AnnotationException( "Unable to load class defined in XML: " + classname, e ); } } } } } private static void findClassNames( String defaultPackage, final Element startNode, final java.util.Set names ) { // if we have some extends we need to check if those classes possibly could be inside the // same hbm.xml file... Iterator[] classes = new Iterator[4]; classes[0] = startNode.elementIterator( "class" ); classes[1] = startNode.elementIterator( "subclass" ); classes[2] = startNode.elementIterator( "joined-subclass" ); classes[3] = startNode.elementIterator( "union-subclass" ); Iterator classIterator = new JoinedIterator( classes );
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -