⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 testindexreader.java

📁 lucene2.2.0版本
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
          dir.setMaxSizeInBytes(thisDiskFree);          dir.setRandomIOExceptionRate(rate, diskFree);          try {            if (0 == x) {              int docId = 12;              for(int i=0;i<13;i++) {                reader.deleteDocument(docId);                reader.setNorm(docId, "contents", (float) 2.0);                docId += 12;              }            }            reader.close();            success = true;            if (0 == x) {              done = true;            }          } catch (IOException e) {            if (debug) {              System.out.println("  hit IOException: " + e);            }            err = e;            if (1 == x) {              e.printStackTrace();              fail(testName + " hit IOException after disk space was freed up");            }          }          // Whether we succeeded or failed, check that all          // un-referenced files were in fact deleted (ie,          // we did not create garbage).  Just create a          // new IndexFileDeleter, have it delete          // unreferenced files, then verify that in fact          // no files were deleted:          String[] startFiles = dir.list();          SegmentInfos infos = new SegmentInfos();          infos.read(dir);          IndexFileDeleter d = new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null);          String[] endFiles = dir.list();          Arrays.sort(startFiles);          Arrays.sort(endFiles);          //for(int i=0;i<startFiles.length;i++) {          //  System.out.println("  startFiles: " + i + ": " + startFiles[i]);          //}          if (!Arrays.equals(startFiles, endFiles)) {            String successStr;            if (success) {              successStr = "success";            } else {              successStr = "IOException";              err.printStackTrace();            }            fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + arrayToString(startFiles) + "\n  after delete:\n    " + arrayToString(endFiles));          }          // Finally, verify index is not corrupt, and, if          // we succeeded, we see all docs changed, and if          // we failed, we see either all docs or no docs          // changed (transactional semantics):          IndexReader newReader = null;          try {            newReader = IndexReader.open(dir);          } catch (IOException e) {            e.printStackTrace();            fail(testName + ":exception when creating IndexReader after disk full during close: " + e);          }          /*          int result = newReader.docFreq(searchTerm);          if (success) {            if (result != END_COUNT) {              fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);            }          } else {            // On hitting exception we still may have added            // all docs:            if (result != START_COUNT && result != END_COUNT) {              err.printStackTrace();              fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);            }          }          */          IndexSearcher searcher = new IndexSearcher(newReader);          Hits hits = null;          try {            hits = searcher.search(new TermQuery(searchTerm));          } catch (IOException e) {            e.printStackTrace();            fail(testName + ": exception when searching: " + e);          }          int result2 = hits.length();          if (success) {            if (result2 != END_COUNT) {              fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);            }          } else {            // On hitting exception we still may have added            // all docs:            if (result2 != START_COUNT && result2 != END_COUNT) {              err.printStackTrace();              fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);            }          }          searcher.close();          newReader.close();          if (result2 == END_COUNT) {            break;          }        }        dir.close();        // Try again with 10 more bytes of free space:        diskFree += 10;      }      startDir.close();    }    public void testDocsOutOfOrderJIRA140() throws IOException {      Directory dir = new MockRAMDirectory();            IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      for(int i=0;i<11;i++) {        addDoc(writer, "aaa");      }      writer.close();      IndexReader reader = IndexReader.open(dir);      // Try to delete an invalid docId, yet, within range      // of the final bits of the BitVector:      boolean gotException = false;      try {        reader.deleteDocument(11);      } catch (ArrayIndexOutOfBoundsException e) {        gotException = true;      }      reader.close();      writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);      // We must add more docs to get a new segment written      for(int i=0;i<11;i++) {        addDoc(writer, "aaa");      }      // Without the fix for LUCENE-140 this call will      // [incorrectly] hit a "docs out of order"      // IllegalStateException because above out-of-bounds      // deleteDocument corrupted the index:      writer.optimize();      if (!gotException) {        fail("delete of out-of-bounds doc number failed to hit exception");      }      dir.close();    }    public void testExceptionReleaseWriteLockJIRA768() throws IOException {      Directory dir = new MockRAMDirectory();            IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDoc(writer, "aaa");      writer.close();      IndexReader reader = IndexReader.open(dir);      try {        reader.deleteDocument(1);        fail("did not hit exception when deleting an invalid doc number");      } catch (ArrayIndexOutOfBoundsException e) {        // expected      }      reader.close();      if (IndexReader.isLocked(dir)) {        fail("write lock is still held after close");      }      reader = IndexReader.open(dir);      try {        reader.setNorm(1, "content", (float) 2.0);        fail("did not hit exception when calling setNorm on an invalid doc number");      } catch (ArrayIndexOutOfBoundsException e) {        // expected      }      reader.close();      if (IndexReader.isLocked(dir)) {        fail("write lock is still held after close");      }      dir.close();    }    private String arrayToString(String[] l) {      String s = "";      for(int i=0;i<l.length;i++) {        if (i > 0) {          s += "\n    ";        }        s += l[i];      }      return s;    }    public void testOpenReaderAfterDelete() throws IOException {      File dirFile = new File(System.getProperty("tempDir"),                          "deletetest");      Directory dir = FSDirectory.getDirectory(dirFile);      try {        IndexReader reader = IndexReader.open(dir);        fail("expected FileNotFoundException");      } catch (FileNotFoundException e) {        // expected      }      dirFile.delete();      // Make sure we still get a CorruptIndexException (not NPE):      try {        IndexReader reader = IndexReader.open(dir);        fail("expected FileNotFoundException");      } catch (FileNotFoundException e) {        // expected      }    }    private void deleteReaderReaderConflict(boolean optimize) throws IOException    {        Directory dir = getDirectory();        Term searchTerm1 = new Term("content", "aaa");        Term searchTerm2 = new Term("content", "bbb");        Term searchTerm3 = new Term("content", "ccc");        //  add 100 documents with term : aaa        //  add 100 documents with term : bbb        //  add 100 documents with term : ccc        IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        for (int i = 0; i < 100; i++)        {            addDoc(writer, searchTerm1.text());            addDoc(writer, searchTerm2.text());            addDoc(writer, searchTerm3.text());        }        if(optimize)          writer.optimize();        writer.close();        // OPEN TWO READERS        // Both readers get segment info as exists at this time        IndexReader reader1 = IndexReader.open(dir);        assertEquals("first opened", 100, reader1.docFreq(searchTerm1));        assertEquals("first opened", 100, reader1.docFreq(searchTerm2));        assertEquals("first opened", 100, reader1.docFreq(searchTerm3));        assertTermDocsCount("first opened", reader1, searchTerm1, 100);        assertTermDocsCount("first opened", reader1, searchTerm2, 100);        assertTermDocsCount("first opened", reader1, searchTerm3, 100);        IndexReader reader2 = IndexReader.open(dir);        assertEquals("first opened", 100, reader2.docFreq(searchTerm1));        assertEquals("first opened", 100, reader2.docFreq(searchTerm2));        assertEquals("first opened", 100, reader2.docFreq(searchTerm3));        assertTermDocsCount("first opened", reader2, searchTerm1, 100);        assertTermDocsCount("first opened", reader2, searchTerm2, 100);        assertTermDocsCount("first opened", reader2, searchTerm3, 100);        // DELETE DOCS FROM READER 2 and CLOSE IT        // delete documents containing term: aaa        // when the reader is closed, the segment info is updated and        // the first reader is now stale        reader2.deleteDocuments(searchTerm1);        assertEquals("after delete 1", 100, reader2.docFreq(searchTerm1));        assertEquals("after delete 1", 100, reader2.docFreq(searchTerm2));        assertEquals("after delete 1", 100, reader2.docFreq(searchTerm3));        assertTermDocsCount("after delete 1", reader2, searchTerm1, 0);        assertTermDocsCount("after delete 1", reader2, searchTerm2, 100);        assertTermDocsCount("after delete 1", reader2, searchTerm3, 100);        reader2.close();        // Make sure reader 1 is unchanged since it was open earlier        assertEquals("after delete 1", 100, reader1.docFreq(searchTerm1));        assertEquals("after delete 1", 100, reader1.docFreq(searchTerm2));        assertEquals("after delete 1", 100, reader1.docFreq(searchTerm3));        assertTermDocsCount("after delete 1", reader1, searchTerm1, 100);        assertTermDocsCount("after delete 1", reader1, searchTerm2, 100);        assertTermDocsCount("after delete 1", reader1, searchTerm3, 100);        // ATTEMPT TO DELETE FROM STALE READER        // delete documents containing term: bbb        try {            reader1.deleteDocuments(searchTerm2);            fail("Delete allowed from a stale index reader");        } catch (IOException e) {            /* success */        }        // RECREATE READER AND TRY AGAIN        reader1.close();        reader1 = IndexReader.open(dir);        assertEquals("reopened", 100, reader1.docFreq(searchTerm1));        assertEquals("reopened", 100, reader1.docFreq(searchTerm2));        assertEquals("reopened", 100, reader1.docFreq(searchTerm3));        assertTermDocsCount("reopened", reader1, searchTerm1, 0);        assertTermDocsCount("reopened", reader1, searchTerm2, 100);        assertTermDocsCount("reopened", reader1, searchTerm3, 100);        reader1.deleteDocuments(searchTerm2);        assertEquals("deleted 2", 100, reader1.docFreq(searchTerm1));        assertEquals("deleted 2", 100, reader1.docFreq(searchTerm2));        assertEquals("deleted 2", 100, reader1.docFreq(searchTerm3));        assertTermDocsCount("deleted 2", reader1, searchTerm1, 0);        assertTermDocsCount("deleted 2", reader1, searchTerm2, 0);        assertTermDocsCount("deleted 2", reader1, searchTerm3, 100);        reader1.close();        // Open another reader to confirm that everything is deleted        reader2 = IndexReader.open(dir);        assertEquals("reopened 2", 100, reader2.docFreq(searchTerm1));        assertEquals("reopened 2", 100, reader2.docFreq(searchTerm2));        assertEquals("reopened 2", 100, reader2.docFreq(searchTerm3));        assertTermDocsCount("reopened 2", reader2, searchTerm1, 0);        assertTermDocsCount("reopened 2", reader2, searchTerm2, 0);        assertTermDocsCount("reopened 2", reader2, searchTerm3, 100);        reader2.close();        dir.close();    }    private void addDocumentWithFields(IndexWriter writer) throws IOException    {        Document doc = new Document();        doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.UN_TOKENIZED));        doc.add(new Field("text","test1", Field.Store.YES, Field.Index.TOKENIZED));        doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));        doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.TOKENIZED));        writer.addDocument(doc);    }    private void addDocumentWithDifferentFields(IndexWriter writer) throws IOException    {        Document doc = new Document();        doc.add(new Field("keyword2","test1", Field.Store.YES, Field.Index.UN_TOKENIZED));        doc.add(new Field("text2","test1", Field.Store.YES, Field.Index.TOKENIZED));        doc.add(new Field("unindexed2","test1", Field.Store.YES, Field.Index.NO));        doc.add(new Field("unstored2","test1", Field.Store.NO, Field.Index.TOKENIZED));        writer.addDocument(doc);    }    private void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException    {        Document doc = new Document();        doc.add(new Field("tvnot","tvnot", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));        doc.add(new Field("termvector","termvector", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.YES));        doc.add(new Field("tvoffset","tvoffset", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_OFFSETS));        doc.add(new Field("tvposition","tvposition", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS));        doc.add(new Field("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));                writer.addDocument(doc);    }        private void addDoc(IndexWriter writer, String value) throws IOException    {        Document doc = new Document();        doc.add(new Field("content", value, Field.Store.NO, Field.Index.TOKENIZED));        writer.addDocument(doc);    }    private void rmDir(File dir) {        File[] files = dir.listFiles();        for (int i = 0; i < files.length; i++) {            files[i].delete();        }        dir.delete();    }    }

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -