autoeconnpartitioning.java

来自「Semantic Web Ontology Editor」· Java 代码 · 共 582 行 · 第 1/2 页

JAVA
582
字号
	     		for (Iterator iter = new HashSet(totalOnts).iterator(); iter.hasNext(); ) { 					OWLOntology ont = (OWLOntology) iter.next(); 					Set foreign = ont.getForeignOntologies(); 					for (Iterator iter2 = foreign.iterator(); iter2.hasNext();) { 						URI uri = (URI) iter2.next(); 						for (Iterator iter3 = partitions.iterator(); iter3.hasNext();) { 							OWLOntology par = (OWLOntology) iter3.next(); 							if (par.getURI().equals(uri)) { 								totalOnts.add(par); 							} 						} 					} 				}	     	}	     	for (Iterator iter = totalOnts.iterator(); iter.hasNext();) {	     		OWLOntology ont = (OWLOntology) iter.next();	     		AxiomCollector ac = new AxiomCollector(ont);	     		module.addAll(ac.axiomize(ont));	     			     	}     	}     	catch (OWLException ex) {     		ex.printStackTrace();     	}        return module;     }      		 public List findPartitions(boolean applyWithoutConfirm, boolean debugMessages, boolean saveToDisk, boolean doNotAddSwoop) throws OWLException, Exception{	 	List newPartition = new ArrayList();	 	OWLOntology source = swoopModel.getSelectedOntology();	 	OWLClass owlThing = source.getOWLDataFactory().getOWLThing();	 	Map updatedInverses = new HashMap();		OWLOntology target = null;		String trace = " ";		partitions = new ArrayList();		movedClasses.add(owlThing);	 	int count =1;	 		 	//*** start partitioning	 	System.out.println("start partitioning:"+swoopModel.getTimeStamp());	 		 	// turn OFF checkpointing!!	 	boolean saveCheckPointSetting = swoopModel.getEnableAutoSaveChkPts();	 	swoopModel.setEnableAutoSaveChkPts(false, false);	 		 	// initialize EconnIterativePartitioning	 	//Start timers	 	timers.start();	 	timers2.start();	 		 	StringWriter sw =  new StringWriter();	 	EconnIterativePartitioning iterativeRefactor = new EconnIterativePartitioning(swoopModel);	 	iterativeRefactor.setOutput(sw);		iterativeRefactor.setDebug(debugMessages);		iterativeRefactor.setApply(applyWithoutConfirm);		iterativeRefactor.init(swoopModel.getSelectedOntology(),target,linksToSource,boundedInverses);		//Stop timer for computing indices		timers2.stop();		// after computing indices (only once), we get expressivity		sourceExpressivity=iterativeRefactor.getExpressivity();				// iteratively partition!!		Set auxSet = new HashSet();		//Set auxSet = SetUtils.union(swoopModel.getReasoner().subClassesOf(owlThing));	 	auxSet.addAll(source.getClasses());	 	Iterator k = auxSet.iterator();	 	while(k.hasNext()){			OWLClass cla = (OWLClass)k.next();						if(!(movedClasses.contains(cla))){//				OWLBuilder builder = new OWLBuilder();				  URI uri;				  try {				  	// create uri from current ontology being partitioned				  	String uriStr = swoopModel.shortForm(source.getURI());				  	if (uriStr.indexOf(":")>=0) uriStr = uriStr.substring(uriStr.indexOf(":")+1, uriStr.length());				  	if (uriStr.indexOf(".")>=0) uriStr = uriStr.substring(0, uriStr.lastIndexOf("."));					uri = new URI("http://www.mindswap.org/"+uriStr+"_partition" +count+".owl");//										target = source.getOWLDataFactory().getOWLOntology(uri, uri);					OWLDataFactory df = ontology.getOWLDataFactory();										partitions.add(target);					System.out.println("Using the iterative partitioning algorithm..");										iterativeRefactor.reset(swoopModel.getSelectedOntology(),target,linksToSource,boundedInverses);					timers4.start();					iterativeRefactor.RunStateMachine(cla);					timers4.stop();					Iterator h = iterativeRefactor.getMovedClasses().iterator();				    while(h.hasNext()){				    	OWLDescription c = (OWLDescription)h.next();				    	if(c instanceof OWLClass){				    		movedClasses.add(c);				    	}				    }				    iterativeRefactor.computeForeignEntities();					timers5.start();					iterativeRefactor.makeChanges();					timers5.stop();					timers3.start();					iterativeRefactor.verifyLinks(partitions);					timers3.stop();					linksToSource = iterativeRefactor.getLinksToSource();					boundedInverses = iterativeRefactor.getBoundedInverses();		            updatedInverses.putAll(iterativeRefactor.getUpdatedInverses());					if(iterativeRefactor.getApply()==false){				    	timers6.start();					    changes = iterativeRefactor.getChanges();						swoopModel.addUncommittedChanges(changes, false);						 						System.out.println("Applying the changes");						swoopModel.applyOntologyChanges(false, false);						System.out.println("Applied all the changes");						timers6.stop();					}				    				    System.out.println("finished partitioning iteration "+count);					count++;				  } 				  catch (Exception e) {			   		e.printStackTrace();				  }				}			}	 	timers.stop();	 	System.out.println("end partitioning:"+swoopModel.getTimeStamp());	 		 	//Final check on correctness of link references	 	//The idea is to turn into object properties the link properties	 	//linking to source that haven't been used at all	 		 	for(Iterator iter = partitions.iterator(); iter.hasNext(); ){	 		OWLOntology partition = (OWLOntology)iter.next();	 		for(Iterator it = partition.getLinkProperties().iterator(); it.hasNext();){	 			OWLObjectProperty prop = (OWLObjectProperty)it.next();	 	  			 			if(updatedInverses.containsKey(prop)){  			  			OntologyChange oc = new SetLinkTarget(partition,prop,(URI)updatedInverses.get(prop),null);  			  			//changes.add(oc);  				   	    oc.accept((ChangeVisitor)partition);  			    }		        else{		 			if(prop.getLinkTarget().equals(source.getURI())){		 						 				OntologyChange oc = new SetLinkTarget(partition,prop,null,null);				  	   	oc.accept((ChangeVisitor)partition);				 	}			   }	 		}	 	}	 		 	System.out.println("-------------------------");	 	System.out.println("Timers in Miliseconds");        System.out.println("-------------------------");	 	System.out.println(timers.toString());	 	System.out.println(timers3.toString());	 	System.out.println(timers2.toString());	 	System.out.println(timers4.toString());	 	System.out.println(timers5.toString());	 	System.out.println(timers6.toString());	 		 	System.out.println("-------------------------");	 		 		 	if(!test) {	 			 		// if not test, then its called from Swoop			trace = trace.concat("<HR>");//			trace = trace.concat("<b> STEP: </b> " + count + "<br><hr>");			String traceStep = sw.getBuffer().toString();			trace = trace.concat(traceStep);					// post proc to insert partitions into swoopmodel		 	// serialize and reparse each partition! still works fast		 	int index = 1;		 	for (Iterator iter = partitions.iterator(); iter.hasNext();) {		 		OWLOntology partition = (OWLOntology) iter.next();		 		StringWriter st = new StringWriter();		 		CorrectedRDFRenderer rdfRenderer = new CorrectedRDFRenderer();		 		rdfRenderer.renderOntology(partition, st);		 		String partitionSource = st.toString();		 		if (saveToDisk) {		 			// save partition to disk if partition size is large		 			String fName = "partitions/"+this.getPartitionQName(partition.getURI());		 			FileWriter writer = new FileWriter(new File(fName));		            writer.write(partitionSource);		            writer.close();		 		}		 		else {		 			// reparse partition back into swoop			 		StringReader reader = new StringReader(partitionSource);					newPartition.add(swoopModel.loadOntologyInRDF(reader, partition.getURI()));		 		}		 	}		 	// add reparsed partitions to swoopModel			// replace current ontology with copy		 	if (!saveToDisk) {		 		if (!doNotAddSwoop) {		 			swoopModel.removeOntology(source.getURI());		 			swoopModel.addOntologies(newPartition);		 		}		 		else return newPartition;		 				 	}			else {				// display message pointing to saved partitions				JOptionPane.showMessageDialog(null, "Ontology Partitions saved in the SWOOP/Partitions/ directory.", "Partitions Generated", JOptionPane.INFORMATION_MESSAGE);			}					 	// restore setting on enableCheckPoints			swoopModel.setEnableAutoSaveChkPts(saveCheckPointSetting, false);				 	}	 			//Display statistics		int counting = 0;		int nLinkProps = 0;		String statistics = " ";		for(Iterator i = partitions.iterator(); i.hasNext(); ) 		{		    OWLOntology ont = (OWLOntology) i.next();		    statistics = statistics.concat("<b> Ontology: </b> " + ont.getURI()+ "<br>\n");		    counting++;		    Map foreignEntities = ont.getForeignEntities();		    Set foreignEntitySet = foreignEntities.keySet();		    int numForeignClasses = 0;		    		    for (Iterator it = foreignEntitySet.iterator(); it.hasNext(); )		    {		    	OWLEntity ent = (OWLEntity)it.next();		    	if ( ent instanceof OWLClass)		    		numForeignClasses++;		    }		    		    		    statistics = statistics.concat("Number of classes " + (ont.getClasses().size() - numForeignClasses) + "<br>\n");		    statistics = statistics.concat("Number of Link Properties " + ont.getLinkProperties().size()+ "<br>\n");		    int nobjectprops = ont.getObjectProperties().size() -  ont.getLinkProperties().size();		    nLinkProps =+ nLinkProps + ont.getLinkProperties().size();		    statistics = statistics.concat("Number of Object Properties " + nobjectprops+ "<br> \n");			statistics = statistics.concat("Number of Datatype Properties " + ont.getDataProperties().size()+ " <br>\n");			statistics = statistics.concat("Number of Individuals " + ont.getIndividuals().size()+ " <br> \n");			statistics = statistics.concat(" <hr> <br>");		}		statistics = statistics.concat("<b> General Statistics </b> <br> ");		statistics = statistics.concat("Total Number of Components:  " + counting + "<br>"  );		statistics = statistics.concat("Total Number of LinkProperties: " + nLinkProps + "<br>");		statistics = statistics.concat("Expressivity of the Original Ontology: " + sourceExpressivity+ "<br>");					/* This snippet of code here makes sure that every OWLOntology in partitions is present		 *   in the SwoopModel.  This is necessary because cloning of OWLOntologies during		 *   partitioning causes the pointers to get mangled up.		 */		Collection ontologySet = swoopModel.getOntologies();		for (int counter = 0 ; counter < partitions.size(); counter++ )		{			OWLOntology partition = (OWLOntology)partitions.get( counter );			if (!ontologySet.contains( partition ))				partitions.set( counter, swoopModel.getOntology( partition.getURI() ));		}		swoopModel.setSelectedOntology( (OWLOntology)partitions.get(0) );				if (!test) {			RefactoringSummary summary = new RefactoringSummary(new JFrame(), swoopModel, partitions, statistics, trace);		}		else {			System.out.println(statistics);		}				//System.out.println("Started with class: " + initial.getURI());		return newPartition;	} 		  public Map getTargetExpressivities(){	  	return targetExpressivity;	  }	  public String getSourceExpressivity(){	  	return sourceExpressivity;	  }	  	  public Map getLinkExpressivity(){	  	return linkExpressivity;	  }	  	  private String getPartitionQName(URI uri) {	  	String uriStr = uri.toString();	  	return uriStr.substring(uriStr.lastIndexOf("/")+1, uriStr.length());	  }	  }

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?