⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 testindexreader.java

📁 lucene2.2.0版本
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
        // we should not be holding the write lock now:        assertTrue("not locked", !IndexReader.isLocked(dir));        reader2.close();        dir.close();        rmDir(indexDir);    }    // Make sure you can set norms & commit, and there are    // no extra norms files left:    public void testWritingNormsNoReader() throws IOException    {        Directory dir = new MockRAMDirectory();        IndexWriter writer = null;        IndexReader reader = null;        Term searchTerm = new Term("content", "aaa");        //  add 1 documents with term : aaa        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        writer.setUseCompoundFile(false);        addDoc(writer, searchTerm.text());        writer.close();        //  now open reader & set norm for doc 0 (writes to        //  _0_1.s0)        reader = IndexReader.open(dir);        reader.setNorm(0, "content", (float) 2.0);        reader.close();                //  now open reader again & set norm for doc 0 (writes to _0_2.s0)        reader = IndexReader.open(dir);        reader.setNorm(0, "content", (float) 2.0);        reader.close();        assertFalse("failed to remove first generation norms file on writing second generation",                    dir.fileExists("_0_1.s0"));                dir.close();    }    public void testDeleteReaderWriterConflictUnoptimized() throws IOException{      deleteReaderWriterConflict(false);    }    public void testOpenEmptyDirectory() throws IOException{      String dirName = "test.empty";      File fileDirName = new File(dirName);      if (!fileDirName.exists()) {        fileDirName.mkdir();      }      try {        IndexReader reader = IndexReader.open(fileDirName);        fail("opening IndexReader on empty directory failed to produce FileNotFoundException");      } catch (FileNotFoundException e) {        // GOOD      }      rmDir(fileDirName);    }        public void testDeleteReaderWriterConflictOptimized() throws IOException{        deleteReaderWriterConflict(true);    }    private void deleteReaderWriterConflict(boolean optimize) throws IOException    {        //Directory dir = new RAMDirectory();        Directory dir = getDirectory();        Term searchTerm = new Term("content", "aaa");        Term searchTerm2 = new Term("content", "bbb");        //  add 100 documents with term : aaa        IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        for (int i = 0; i < 100; i++)        {            addDoc(writer, searchTerm.text());        }        writer.close();        // OPEN READER AT THIS POINT - this should fix the view of the        // index at the point of having 100 "aaa" documents and 0 "bbb"        IndexReader reader = IndexReader.open(dir);        assertEquals("first docFreq", 100, reader.docFreq(searchTerm));        assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));        assertTermDocsCount("first reader", reader, searchTerm, 100);        assertTermDocsCount("first reader", reader, searchTerm2, 0);        // add 100 documents with term : bbb        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), false);        for (int i = 0; i < 100; i++)        {            addDoc(writer, searchTerm2.text());        }        // REQUEST OPTIMIZATION        // This causes a new segment to become current for all subsequent        // searchers. Because of this, deletions made via a previously open        // reader, which would be applied to that reader's segment, are lost        // for subsequent searchers/readers        if(optimize)          writer.optimize();        writer.close();        // The reader should not see the new data        assertEquals("first docFreq", 100, reader.docFreq(searchTerm));        assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));        assertTermDocsCount("first reader", reader, searchTerm, 100);        assertTermDocsCount("first reader", reader, searchTerm2, 0);        // DELETE DOCUMENTS CONTAINING TERM: aaa        // NOTE: the reader was created when only "aaa" documents were in        int deleted = 0;        try {            deleted = reader.deleteDocuments(searchTerm);            fail("Delete allowed on an index reader with stale segment information");        } catch (StaleReaderException e) {            /* success */        }        // Re-open index reader and try again. This time it should see        // the new data.        reader.close();        reader = IndexReader.open(dir);        assertEquals("first docFreq", 100, reader.docFreq(searchTerm));        assertEquals("first docFreq", 100, reader.docFreq(searchTerm2));        assertTermDocsCount("first reader", reader, searchTerm, 100);        assertTermDocsCount("first reader", reader, searchTerm2, 100);        deleted = reader.deleteDocuments(searchTerm);        assertEquals("deleted count", 100, deleted);        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));        assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);        assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);        reader.close();        // CREATE A NEW READER and re-test        reader = IndexReader.open(dir);        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));        assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);        assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);        reader.close();    }  private Directory getDirectory() throws IOException {    return FSDirectory.getDirectory(new File(System.getProperty("tempDir"), "testIndex"));  }  public void testFilesOpenClose() throws IOException    {        // Create initial data set        File dirFile = new File(System.getProperty("tempDir"), "testIndex");        Directory dir = getDirectory();        IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        addDoc(writer, "test");        writer.close();        dir.close();        // Try to erase the data - this ensures that the writer closed all files        _TestUtil.rmDir(dirFile);        dir = getDirectory();        // Now create the data set again, just as before        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        addDoc(writer, "test");        writer.close();        dir.close();        // Now open existing directory and test that reader closes all files        dir = getDirectory();        IndexReader reader1 = IndexReader.open(dir);        reader1.close();        dir.close();        // The following will fail if reader did not close        // all files        _TestUtil.rmDir(dirFile);    }    public void testLastModified() throws IOException {      assertFalse(IndexReader.indexExists("there_is_no_such_index"));      Directory dir = new MockRAMDirectory();      assertFalse(IndexReader.indexExists(dir));      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      assertTrue(IndexReader.isLocked(dir));		// writer open, so dir is locked      writer.close();      assertTrue(IndexReader.indexExists(dir));      IndexReader reader = IndexReader.open(dir);      assertFalse(IndexReader.isLocked(dir));		// reader only, no lock      long version = IndexReader.lastModified(dir);      reader.close();      // modify index and check version has been      // incremented:      writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      writer.close();      reader = IndexReader.open(dir);      assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));      reader.close();      dir.close();    }    public void testVersion() throws IOException {      assertFalse(IndexReader.indexExists("there_is_no_such_index"));      Directory dir = new MockRAMDirectory();      assertFalse(IndexReader.indexExists(dir));      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      assertTrue(IndexReader.isLocked(dir));		// writer open, so dir is locked      writer.close();      assertTrue(IndexReader.indexExists(dir));      IndexReader reader = IndexReader.open(dir);      assertFalse(IndexReader.isLocked(dir));		// reader only, no lock      long version = IndexReader.getCurrentVersion(dir);      reader.close();      // modify index and check version has been      // incremented:      writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      writer.close();      reader = IndexReader.open(dir);      assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));      reader.close();      dir.close();    }    public void testLock() throws IOException {      Directory dir = new MockRAMDirectory();      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      writer.close();      writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), false);      IndexReader reader = IndexReader.open(dir);      try {        reader.deleteDocument(0);        fail("expected lock");      } catch(IOException e) {        // expected exception      }      IndexReader.unlock(dir);		// this should not be done in the real world!       reader.deleteDocument(0);      reader.close();      writer.close();      dir.close();    }    public void testUndeleteAll() throws IOException {      Directory dir = new MockRAMDirectory();      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      addDocumentWithFields(writer);      writer.close();      IndexReader reader = IndexReader.open(dir);      reader.deleteDocument(0);      reader.deleteDocument(1);      reader.undeleteAll();      reader.close();      reader = IndexReader.open(dir);      assertEquals(2, reader.numDocs());	// nothing has really been deleted thanks to undeleteAll()      reader.close();      dir.close();    }    public void testUndeleteAllAfterClose() throws IOException {      Directory dir = new MockRAMDirectory();      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      addDocumentWithFields(writer);      writer.close();      IndexReader reader = IndexReader.open(dir);      reader.deleteDocument(0);      reader.deleteDocument(1);      reader.close();      reader = IndexReader.open(dir);      reader.undeleteAll();      assertEquals(2, reader.numDocs());	// nothing has really been deleted thanks to undeleteAll()      reader.close();      dir.close();    }    public void testUndeleteAllAfterCloseThenReopen() throws IOException {      Directory dir = new MockRAMDirectory();      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);      addDocumentWithFields(writer);      addDocumentWithFields(writer);      writer.close();      IndexReader reader = IndexReader.open(dir);      reader.deleteDocument(0);      reader.deleteDocument(1);      reader.close();      reader = IndexReader.open(dir);      reader.undeleteAll();      reader.close();      reader = IndexReader.open(dir);      assertEquals(2, reader.numDocs());	// nothing has really been deleted thanks to undeleteAll()      reader.close();      dir.close();    }    public void testDeleteReaderReaderConflictUnoptimized() throws IOException{      deleteReaderReaderConflict(false);    }        public void testDeleteReaderReaderConflictOptimized() throws IOException{      deleteReaderReaderConflict(true);    }    /**     * Make sure if reader tries to commit but hits disk     * full that reader remains consistent and usable.     */    public void testDiskFull() throws IOException {      boolean debug = false;      Term searchTerm = new Term("content", "aaa");      int START_COUNT = 157;      int END_COUNT = 144;            // First build up a starting index:      RAMDirectory startDir = new MockRAMDirectory();      IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);      for(int i=0;i<157;i++) {        Document d = new Document();        d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));        d.add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.TOKENIZED));        writer.addDocument(d);      }      writer.close();      long diskUsage = startDir.sizeInBytes();      long diskFree = diskUsage+100;            IOException err = null;      boolean done = false;      // Iterate w/ ever increasing free disk space:      while(!done) {        MockRAMDirectory dir = new MockRAMDirectory(startDir);        IndexReader reader = IndexReader.open(dir);        // For each disk size, first try to commit against        // dir that will hit random IOExceptions & disk        // full; after, give it infinite disk space & turn        // off random IOExceptions & retry w/ same reader:        boolean success = false;        for(int x=0;x<2;x++) {          double rate = 0.05;          double diskRatio = ((double) diskFree)/diskUsage;          long thisDiskFree;          String testName;          if (0 == x) {            thisDiskFree = diskFree;            if (diskRatio >= 2.0) {              rate /= 2;            }            if (diskRatio >= 4.0) {              rate /= 2;            }            if (diskRatio >= 6.0) {              rate = 0.0;            }            if (debug) {              System.out.println("\ncycle: " + diskFree + " bytes");            }            testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";          } else {            thisDiskFree = 0;            rate = 0.0;            if (debug) {              System.out.println("\ncycle: same writer: unlimited disk space");            }            testName = "reader re-use after disk full";          }

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -