⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bencitationtuinoseg.java

📁 这是一个matlab的java实现。里面有许多内容。请大家慢慢捉摸。
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
		if (useFeatureInduction.value()) {	    System.out.println("\n\nINDUCING FEATURES FOR TEST INSTANCES");	    fi.induceFeaturesFor(itestlist, false, false);		}		CorefClusterAdv cl = null;		//CorefClusterAdv cl_old = null;		CorefClusterAdv cl_old = null;		//training		//CitationClustering cl = new CitationClustering();		if (oldCluster) {	    cl_old = new CorefClusterAdv(instancePipe);	    cl_old.setTrueNumStop (useTrueNumClusters.value());	    cl_old.train(ilist);		}		if (newCluster) {			cl = new CorefClusterAdv(instancePipe, tmodel);			cl.setTrueNumStop (useTrueNumClusters.value());			cl.setConfWeightedScores(useWeightedAvg.value());			cl.setOptimality (useOptimal.value());			cl.setRBeamSize (rBeamSize.value());			cl.setNBestInference (useNBest.value()); // actually use n-best list in		//coref			cl.setFullPartition(fullPartition.value());			int si = searchIters.value();			int	sd = searchReductions.value();			cl.setSearchParams (si, sd);			if (loadMEFile.value() != null)				cl.loadME(loadMEFile.value());			else				cl.train(ilist);			cl.testClassifier(itestlist);		}		Collection key = makeCollections(allnodes); // make key collections		//System.out.println("KEY: " + key);		//System.out.println("NODES: " + nodes);		Collection testKey = makeCollections(test_nodes);		Collection s = null;		if (newCluster) {			//cl.setKeyPartitioning(key);	    //s = cl.clusterMentions(ilist, allnodes, optimalNBest.value(), useCorrelational.value());		}		System.out.println("Resulting clustering: " + s);		Collection c1 = null;		if (oldCluster) {				    cl_old.setKeyPartitioning(testKey);	    c1 = cl_old.clusterMentions(ilist, allnodes, optimalNBest.value(), false);	    if (newCluster) {				System.out.println("Objective fn of KEY: " +													 cl.evaluatePartitioningExternal(ilist, allnodes, key, optimalNBest.value()));				System.out.println("Objective fn of GREEDY CLUSTERING: " +													 cl.evaluatePartitioningExternal(ilist, allnodes, c1, optimalNBest.value()));	    }		}		//			System.out.println("Objective fn of KEY w/optimal edges: " +		//											 cl.evaluatePartitioningExternal(ilist, nodes, key, true));		if (oldCluster) {				    //			System.out.println("Objective fn of OLD CLUSTERING w/optimal edges: " +	    //											 cl.evaluatePartitioningExternal(ilist, nodes, c1, true));	    ClusterEvaluate eval1 = new ClusterEvaluate(key, c1);	    eval1.evaluate();	    System.out.println("Threshold Training Cluster F1: " + eval1.getF1());	    System.out.println("Threshold Training Cluster Recall: " + eval1.getRecall());	    System.out.println("Threshold Training Cluster Precision: " + eval1.getPrecision());	    System.out.println("Number of clusters " + c1.size());	    PairEvaluate p1 = new PairEvaluate (key, c1);	    p1.evaluate();	    System.out.println("Threshold Pair F1: " + p1.getF1());	    System.out.println("Threshold Pair Recall: " + p1.getRecall());	    System.out.println("Threshold Pair Precision: " + p1.getPrecision());		}					if (newCluster) {			if (s != null) {				ClusterEvaluate eval = new ClusterEvaluate(key, s);				eval.evaluate();				PairEvaluate pairEval = new PairEvaluate(key, s);				pairEval.evaluate();				/*				System.out.println("Objective fn of CORRELATIONAL CLUSTERING Training: " +													 cl.evaluatePartitioningExternal(ilist, allnodes, s, optimalNBest.value()));				*/				//eval.printVerbose();				System.out.println("ObjFn Training Cluster F1: " + eval.getF1());				System.out.println("ObjFn Training Cluster Recall: " + eval.getRecall());				System.out.println("ObjFnTraining Cluster Precision: " + eval.getPrecision());				System.out.println("Number of clusters " + s.size());				System.out.println("ObjFn Pair F1: " + pairEval.getF1());				System.out.println("ObjFn Pair Recall: " + pairEval.getRecall());				System.out.println("ObjFn Pair Precision: " + pairEval.getPrecision());			}		}				cl.setKeyPartitioning (testKey);		if (oldCluster) {	    //evaluate on testing set	    Collection testS_old = cl_old.clusterMentions(itestlist, test_nodes, -1, useCorrelational.value());	    //Collection testS_old = cl_old.clusterMentions(itestlist, test_nodes, -1, false);	    //Collection testS_old = cl_old.clusterMentions(itestlist, test_nodes);	    ClusterEvaluate eval_t_old = new ClusterEvaluate(testKey, testS_old);	    eval_t_old.evaluate();	    if (newCluster) {								System.out.println("Objective fn of OLD CLUSTERING: " +													 cl.evaluatePartitioningExternal(itestlist, test_nodes, testS_old, optimalNBest.value()));	    }	    System.out.println("Threshold Testing Cluster F1: " + eval_t_old.getF1());	    System.out.println("Threshold Testing Cluster Recall: " + eval_t_old.getRecall());	    System.out.println("Threshold Testing Cluster Precision: " + eval_t_old.getPrecision());	    System.out.println("Number of clusters " + testS_old.size());	    PairEvaluate p_t_old = new PairEvaluate (testKey, testS_old);	    p_t_old.evaluate();	    System.out.println("Threshold Pair F1: " + p_t_old.getF1());	    System.out.println("Threshold Pair Recall: " + p_t_old.getRecall());	    System.out.println("Threshold Pair Precision: " + p_t_old.getPrecision());		}		if (newCluster) {	    Collection testS = cl.clusterMentions(itestlist, test_nodes, -1, useCorrelational.value());	    ClusterEvaluate evalTest = new ClusterEvaluate(testKey, testS);	    evalTest.evaluate();	    evalTest.printVerbose();	    PairEvaluate pairEvalTest = new PairEvaluate(testKey, testS);	    pairEvalTest.evaluate();	    System.out.println("TESTING Objective fn of KEY: " +												 cl.evaluatePartitioningExternal(itestlist, test_nodes, testKey, optimalNBest.value()));	    System.out.println("TESTING Objective fn of CORRELATIONAL CLUSTERING Testing: " +												 cl.evaluatePartitioningExternal(itestlist, test_nodes, testS, optimalNBest.value()));	    //cl.exportGraph("/tmp/testGraphEdges");	    //eval.printVerbose();	    System.out.println("TESTING ObjFn Cluster F1: " + evalTest.getF1());	    System.out.println("TESTING ObjFn Cluster Recall: " + evalTest.getRecall());	    System.out.println("TESTING ObjFn Cluster Precision: " + evalTest.getPrecision());	    System.out.println("Number of clusters " + testS.size());	    System.out.println("TESTING ObjFn Pair F1: " + pairEvalTest.getF1());	    System.out.println("TESTING ObjFn Pair Recall: " + pairEvalTest.getRecall());	    System.out.println("TESTING ObjFn Pair Precision: " + pairEvalTest.getPrecision());			if (outputFile.value() != null)				printClustersToFile (testS, outputFile.value());		}					/*			System.out.println("Final parameters used: ");			double [] ps = cl.getClassifier().getParameters();			for (int k=0; k < Array.getLength(ps); k++) {			System.out.print(" " + ps[k]);			}*/	}	protected static void printClustersToFile (Collection citations, String file) {		try {	    BufferedWriter out = new BufferedWriter(new FileWriter(file));			printClustersAsReceived (citations, out);			out.close();		} catch (Exception e) {e.printStackTrace();}	}	protected static void printClustersAsReceived (Collection citations, BufferedWriter out) {		int refNum = 1;		int clNum = 1;		for (Iterator it = citations.iterator(); it.hasNext();) {			Collection cl = (Collection)it.next();			for (Iterator i2 = cl.iterator(); i2.hasNext(); ) {				Citation c = (Citation)i2.next();				try {					out.write("<NEWREFERENCE>\n");					out.write("<meta reference_no=\"" + refNum +										"\" cluster_no=\"" + clNum + "\"></meta>");					out.write(c.getOrigString());				} catch (Exception e) {}				refNum++;			}			clNum++;		}	}	protected static void printCollectionReferences (Collection collection) {		Iterator i1 = collection.iterator();		while (i1.hasNext()) {	    Iterator i2 = ((Collection)i1.next()).iterator();	    while (i2.hasNext()) {				Object o = i2.next();				if (o instanceof Node) {					Node n = (Node)o;					System.out.println("Node: " + n);					System.out.println("Node label: " + n.getLabel());					System.out.println("Node index: " + n.getIndex());				} else {					System.out.println("Node: " + o);				}	    }		}	}	public static double scoreCitations(List citations) {		double score = 0.0;		for (Iterator i = citations.iterator(); i.hasNext(); ) {	    score += (double)((Citation)i.next()).getScore();		}		return score/(double)citations.size();	}	/*		This method will create a collection of collections from the citation nodes	*/	/*		protected static Collection makeCollections (ArrayList nodes) {		HashMap map = new HashMap(); // keep an index of node label values to collections		Collection collection = new LinkedHashSet();		for (int i=0; i<nodes.size(); i++) {		Node n = (Node)nodes.get(i);		Object o1 = n.getLabel();		Collection c = (Collection)map.get(o1);		if (c != null) {		c.add(n);		//System.out.println("adding new node " + n + " to existing collection");		} else {		Collection newC = new LinkedHashSet();		System.out.println("Creating new collection");		newC.add(n);		map.put(o1, newC);		}		}		Iterator i1 = map.values().iterator();		while (i1.hasNext()) {		collection.add((Collection)i1.next());		}		return collection;		}*/	// this version assumes nodes are actually citations	protected static Collection makeCollections (ArrayList nodes) {		HashMap map = new HashMap(); // keep an index of node label values to collections		Collection collection = new LinkedHashSet();		for (int i=0; i < nodes.size(); i++) {	    Citation n = (Citation)nodes.get(i);	    Object o1 = n.getLabel();	    Collection c = (Collection)map.get(o1);	    if (c != null) {				c.add(n);				//System.out.println("adding new node " + n + " to existing collection with " + o1);	    } else {				Collection newC = new LinkedHashSet();				//System.out.println("Creating new collection -> id: " + o1);				newC.add(n);				map.put(o1, newC);	    }		}		Iterator i1 = map.values().iterator();		while (i1.hasNext()) {	    collection.add((Collection)i1.next());		}		return collection;	}	protected static InstanceList makePairs(Pipe instancePipe, ArrayList nodes) {		System.out.println("PairIterator...");		long timeStart = System.currentTimeMillis();		InstanceList ilist = new InstanceList (instancePipe);		ilist.add (new NodePairIterator (nodes) );		System.out.println("====");		long timeEnd = System.currentTimeMillis();		double timeElapse = (timeEnd - timeStart)/(1000.000);		System.out.println("Time elapses " + timeElapse + " seconds for computing pair iterator.");		return ilist;	}	protected static InstanceList makePairs(Pipe instancePipe, ArrayList nodes, List pairs) {		System.out.println("PairIterator...");		long timeStart = System.currentTimeMillis();		InstanceList ilist = new InstanceList (instancePipe);		ilist.add (new NodePairIterator (nodes, pairs) );		System.out.println("====");		long timeEnd = System.currentTimeMillis();		double timeElapse = (timeEnd - timeStart)/(1000.000);		System.out.println("Time elapses " + timeElapse + " seconds for computing pair iterator.");		return ilist;	}	/*		protected static List runCanopies(List files) throws Exception {		double loose = 0.3;		double tight = 0.7;		String indexName = "/tmp/index";		Analyzer analyzer = new SimpleAnalyzer();		//Analyzer analyzer = new NGramAnalyzer();		//Analyzer analyzer = new TriGramAnalyzer();		//QueryConstructor queryConstructor = new QueryConstructorSimple(analyzer);		QueryConstructor queryConstructor = new QueryConstructorAuthDateTitle(analyzer);		IndexFiles.indexFiles(files, indexName, analyzer);		CanopyMaker cm = new CanopyMaker(indexName, queryConstructor);		cm.setLooseThreshold(loose);		cm.setTightThreshold(tight);		cm.makeCanopies();		Util.allScores(cm);		return Util.getUniquePairsFromSets(Util.convertIds(cm.getCanopies(), cm.getDocIdToDocno()));		}	*/}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -