📄 testindexreader.java
字号:
try { reader.deleteDocument(11); } catch (ArrayIndexOutOfBoundsException e) { gotException = true; } reader.close(); writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false); // We must add more docs to get a new segment written for(int i=0;i<11;i++) { addDoc(writer, "aaa"); } // Without the fix for LUCENE-140 this call will // [incorrectly] hit a "docs out of order" // IllegalStateException because above out-of-bounds // deleteDocument corrupted the index: writer.optimize(); if (!gotException) { fail("delete of out-of-bounds doc number failed to hit exception"); } dir.close(); } public void testExceptionReleaseWriteLockJIRA768() throws IOException { Directory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true); addDoc(writer, "aaa"); writer.close(); IndexReader reader = IndexReader.open(dir); try { reader.deleteDocument(1); fail("did not hit exception when deleting an invalid doc number"); } catch (ArrayIndexOutOfBoundsException e) { // expected } reader.close(); if (IndexReader.isLocked(dir)) { fail("write lock is still held after close"); } reader = IndexReader.open(dir); try { reader.setNorm(1, "content", (float) 2.0); fail("did not hit exception when calling setNorm on an invalid doc number"); } catch (ArrayIndexOutOfBoundsException e) { // expected } reader.close(); if (IndexReader.isLocked(dir)) { fail("write lock is still held after close"); } dir.close(); } private String arrayToString(String[] l) { String s = ""; for(int i=0;i<l.length;i++) { if (i > 0) { s += "\n "; } s += l[i]; } return s; } public void testOpenReaderAfterDelete() throws IOException { File dirFile = new File(System.getProperty("tempDir"), "deletetest"); Directory dir = FSDirectory.getDirectory(dirFile); try { IndexReader reader = IndexReader.open(dir); fail("expected FileNotFoundException"); } catch (FileNotFoundException e) { // expected } dirFile.delete(); // Make sure we still get a CorruptIndexException (not NPE): try { IndexReader reader = IndexReader.open(dir); fail("expected FileNotFoundException"); } catch (FileNotFoundException e) { // expected } } private void deleteReaderReaderConflict(boolean optimize) throws IOException { Directory dir = getDirectory(); Term searchTerm1 = new Term("content", "aaa"); Term searchTerm2 = new Term("content", "bbb"); Term searchTerm3 = new Term("content", "ccc"); // add 100 documents with term : aaa // add 100 documents with term : bbb // add 100 documents with term : ccc IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true); for (int i = 0; i < 100; i++) { addDoc(writer, searchTerm1.text()); addDoc(writer, searchTerm2.text()); addDoc(writer, searchTerm3.text()); } if(optimize) writer.optimize(); writer.close(); // OPEN TWO READERS // Both readers get segment info as exists at this time IndexReader reader1 = IndexReader.open(dir); assertEquals("first opened", 100, reader1.docFreq(searchTerm1)); assertEquals("first opened", 100, reader1.docFreq(searchTerm2)); assertEquals("first opened", 100, reader1.docFreq(searchTerm3)); assertTermDocsCount("first opened", reader1, searchTerm1, 100); assertTermDocsCount("first opened", reader1, searchTerm2, 100); assertTermDocsCount("first opened", reader1, searchTerm3, 100); IndexReader reader2 = IndexReader.open(dir); assertEquals("first opened", 100, reader2.docFreq(searchTerm1)); assertEquals("first opened", 100, reader2.docFreq(searchTerm2)); assertEquals("first opened", 100, reader2.docFreq(searchTerm3)); assertTermDocsCount("first opened", reader2, searchTerm1, 100); assertTermDocsCount("first opened", reader2, searchTerm2, 100); assertTermDocsCount("first opened", reader2, searchTerm3, 100); // DELETE DOCS FROM READER 2 and CLOSE IT // delete documents containing term: aaa // when the reader is closed, the segment info is updated and // the first reader is now stale reader2.deleteDocuments(searchTerm1); assertEquals("after delete 1", 100, reader2.docFreq(searchTerm1)); assertEquals("after delete 1", 100, reader2.docFreq(searchTerm2)); assertEquals("after delete 1", 100, reader2.docFreq(searchTerm3)); assertTermDocsCount("after delete 1", reader2, searchTerm1, 0); assertTermDocsCount("after delete 1", reader2, searchTerm2, 100); assertTermDocsCount("after delete 1", reader2, searchTerm3, 100); reader2.close(); // Make sure reader 1 is unchanged since it was open earlier assertEquals("after delete 1", 100, reader1.docFreq(searchTerm1)); assertEquals("after delete 1", 100, reader1.docFreq(searchTerm2)); assertEquals("after delete 1", 100, reader1.docFreq(searchTerm3)); assertTermDocsCount("after delete 1", reader1, searchTerm1, 100); assertTermDocsCount("after delete 1", reader1, searchTerm2, 100); assertTermDocsCount("after delete 1", reader1, searchTerm3, 100); // ATTEMPT TO DELETE FROM STALE READER // delete documents containing term: bbb try { reader1.deleteDocuments(searchTerm2); fail("Delete allowed from a stale index reader"); } catch (IOException e) { /* success */ } // RECREATE READER AND TRY AGAIN reader1.close(); reader1 = IndexReader.open(dir); assertEquals("reopened", 100, reader1.docFreq(searchTerm1)); assertEquals("reopened", 100, reader1.docFreq(searchTerm2)); assertEquals("reopened", 100, reader1.docFreq(searchTerm3)); assertTermDocsCount("reopened", reader1, searchTerm1, 0); assertTermDocsCount("reopened", reader1, searchTerm2, 100); assertTermDocsCount("reopened", reader1, searchTerm3, 100); reader1.deleteDocuments(searchTerm2); assertEquals("deleted 2", 100, reader1.docFreq(searchTerm1)); assertEquals("deleted 2", 100, reader1.docFreq(searchTerm2)); assertEquals("deleted 2", 100, reader1.docFreq(searchTerm3)); assertTermDocsCount("deleted 2", reader1, searchTerm1, 0); assertTermDocsCount("deleted 2", reader1, searchTerm2, 0); assertTermDocsCount("deleted 2", reader1, searchTerm3, 100); reader1.close(); // Open another reader to confirm that everything is deleted reader2 = IndexReader.open(dir); assertEquals("reopened 2", 100, reader2.docFreq(searchTerm1)); assertEquals("reopened 2", 100, reader2.docFreq(searchTerm2)); assertEquals("reopened 2", 100, reader2.docFreq(searchTerm3)); assertTermDocsCount("reopened 2", reader2, searchTerm1, 0); assertTermDocsCount("reopened 2", reader2, searchTerm2, 0); assertTermDocsCount("reopened 2", reader2, searchTerm3, 100); reader2.close(); dir.close(); } private void addDocumentWithFields(IndexWriter writer) throws IOException { Document doc = new Document(); doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.add(new Field("text","test1", Field.Store.YES, Field.Index.TOKENIZED)); doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO)); doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.TOKENIZED)); writer.addDocument(doc); } private void addDocumentWithDifferentFields(IndexWriter writer) throws IOException { Document doc = new Document(); doc.add(new Field("keyword2","test1", Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.add(new Field("text2","test1", Field.Store.YES, Field.Index.TOKENIZED)); doc.add(new Field("unindexed2","test1", Field.Store.YES, Field.Index.NO)); doc.add(new Field("unstored2","test1", Field.Store.NO, Field.Index.TOKENIZED)); writer.addDocument(doc); } private void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException { Document doc = new Document(); doc.add(new Field("tvnot","tvnot", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO)); doc.add(new Field("termvector","termvector", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.YES)); doc.add(new Field("tvoffset","tvoffset", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_OFFSETS)); doc.add(new Field("tvposition","tvposition", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS)); doc.add(new Field("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); writer.addDocument(doc); } private void addDoc(IndexWriter writer, String value) throws IOException { Document doc = new Document(); doc.add(new Field("content", value, Field.Store.NO, Field.Index.TOKENIZED)); writer.addDocument(doc); } private void rmDir(File dir) { File[] files = dir.listFiles(); for (int i = 0; i < files.length; i++) { files[i].delete(); } dir.delete(); } public static void assertIndexEquals(IndexReader index1, IndexReader index2) throws IOException { assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs()); assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc()); assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions()); assertEquals("Only one index is optimized.", index1.isOptimized(), index2.isOptimized()); // check field names Collection fields1 = index1.getFieldNames(FieldOption.ALL); Collection fields2 = index1.getFieldNames(FieldOption.ALL); assertEquals("IndexReaders have different numbers of fields.", fields1.size(), fields2.size()); Iterator it1 = fields1.iterator(); Iterator it2 = fields1.iterator(); while (it1.hasNext()) { assertEquals("Different field names.", (String) it1.next(), (String) it2.next()); } // check norms it1 = fields1.iterator(); while (it1.hasNext()) { String curField = (String) it1.next(); byte[] norms1 = index1.norms(curField); byte[] norms2 = index2.norms(curField); assertEquals(norms1.length, norms2.length); for (int i = 0; i < norms1.length; i++) { assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", norms1[i], norms2[i]); } } // check deletions for (int i = 0; i < index1.maxDoc(); i++) { assertEquals("Doc " + i + " only deleted in one index.", index1.isDeleted(i), index2.isDeleted(i)); } // check stored fields for (int i = 0; i < index1.maxDoc(); i++) { if (!index1.isDeleted(i)) { Document doc1 = index1.document(i); Document doc2 = index2.document(i); fields1 = doc1.getFields(); fields2 = doc2.getFields(); assertEquals("Different numbers of fields for doc " + i + ".", fields1.size(), fields2.size()); it1 = fields1.iterator(); it2 = fields2.iterator(); while (it1.hasNext()) { Field curField1 = (Field) it1.next(); Field curField2 = (Field) it2.next(); assertEquals("Different fields names for doc " + i + ".", curField1.name(), curField2.name()); assertEquals("Different field values for doc " + i + ".", curField1.stringValue(), curField2.stringValue()); } } } // check dictionary and posting lists TermEnum enum1 = index1.terms(); TermEnum enum2 = index2.terms(); TermPositions tp1 = index1.termPositions(); TermPositions tp2 = index2.termPositions(); while(enum1.next()) { assertTrue(enum2.next()); assertEquals("Different term in dictionary.", enum1.term(), enum2.term()); tp1.seek(enum1.term()); tp2.seek(enum1.term()); while(tp1.next()) { assertTrue(tp2.next()); assertEquals("Different doc id in postinglist of term " + enum1.term() + ".", tp1.doc(), tp2.doc()); assertEquals("Different term frequence in postinglist of term " + enum1.term() + ".", tp1.freq(), tp2.freq()); for (int i = 0; i < tp1.freq(); i++) { assertEquals("Different positions in postinglist of term " + enum1.term() + ".", tp1.nextPosition(), tp2.nextPosition()); } } } }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -