⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 testindexreader.java

📁 Lucene a java open-source SearchEngine Framework
💻 JAVA
📖 第 1 页 / 共 4 页
字号:
        try {          reader.undeleteAll();          fail("undeleteAll after close failed to throw IOException");        } catch (AlreadyClosedException e) {          // expected        }    }    // Make sure we get lock obtain failed exception with 2 writers:    public void testLockObtainFailed() throws IOException    {        Directory dir = new RAMDirectory();        IndexWriter writer = null;        IndexReader reader = null;        Term searchTerm = new Term("content", "aaa");        //  add 11 documents with term : aaa        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        for (int i = 0; i < 11; i++)        {            addDoc(writer, searchTerm.text());        }        // Create reader:        reader = IndexReader.open(dir);        // Try to make changes        try {          reader.deleteDocument(4);          fail("deleteDocument should have hit LockObtainFailedException");        } catch (LockObtainFailedException e) {          // expected        }        try {          reader.setNorm(5, "aaa", 2.0f);          fail("setNorm should have hit LockObtainFailedException");        } catch (LockObtainFailedException e) {          // expected        }        try {          reader.undeleteAll();          fail("undeleteAll should have hit LockObtainFailedException");        } catch (LockObtainFailedException e) {          // expected        }        writer.close();        reader.close();    }    // Make sure you can set norms & commit even if a reader    // is open against the index:    public void testWritingNorms() throws IOException    {        String tempDir = System.getProperty("tempDir");        if (tempDir == null)            throw new IOException("tempDir undefined, cannot run test");        File indexDir = new File(tempDir, "lucenetestnormwriter");        Directory dir = FSDirectory.getDirectory(indexDir);        IndexWriter writer;        IndexReader reader;        Term searchTerm = new Term("content", "aaa");        //  add 1 documents with term : aaa        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        addDoc(writer, searchTerm.text());        writer.close();        //  now open reader & set norm for doc 0        reader = IndexReader.open(dir);        reader.setNorm(0, "content", (float) 2.0);        // we should be holding the write lock now:        assertTrue("locked", IndexReader.isLocked(dir));        reader.commit();        // we should not be holding the write lock now:        assertTrue("not locked", !IndexReader.isLocked(dir));        // open a 2nd reader:        IndexReader reader2 = IndexReader.open(dir);        // set norm again for doc 0        reader.setNorm(0, "content", (float) 3.0);        assertTrue("locked", IndexReader.isLocked(dir));        reader.close();        // we should not be holding the write lock now:        assertTrue("not locked", !IndexReader.isLocked(dir));        reader2.close();        dir.close();        rmDir(indexDir);    }    // Make sure you can set norms & commit, and there are    // no extra norms files left:    public void testWritingNormsNoReader() throws IOException    {        Directory dir = new MockRAMDirectory();        IndexWriter writer = null;        IndexReader reader = null;        Term searchTerm = new Term("content", "aaa");        //  add 1 documents with term : aaa        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        writer.setUseCompoundFile(false);        addDoc(writer, searchTerm.text());        writer.close();        //  now open reader & set norm for doc 0 (writes to        //  _0_1.s0)        reader = IndexReader.open(dir);        reader.setNorm(0, "content", (float) 2.0);        reader.close();                //  now open reader again & set norm for doc 0 (writes to _0_2.s0)        reader = IndexReader.open(dir);        reader.setNorm(0, "content", (float) 2.0);        reader.close();        assertFalse("failed to remove first generation norms file on writing second generation",                    dir.fileExists("_0_1.s0"));                dir.close();    }    public void testDeleteReaderWriterConflictUnoptimized() throws IOException{      deleteReaderWriterConflict(false);    }    public void testOpenEmptyDirectory() throws IOException{      String dirName = "test.empty";      File fileDirName = new File(dirName);      if (!fileDirName.exists()) {        fileDirName.mkdir();      }      try {        IndexReader reader = IndexReader.open(fileDirName);        fail("opening IndexReader on empty directory failed to produce FileNotFoundException");      } catch (FileNotFoundException e) {        // GOOD      }      rmDir(fileDirName);    }        public void testDeleteReaderWriterConflictOptimized() throws IOException{        deleteReaderWriterConflict(true);    }    private void deleteReaderWriterConflict(boolean optimize) throws IOException    {        //Directory dir = new RAMDirectory();        Directory dir = getDirectory();        Term searchTerm = new Term("content", "aaa");        Term searchTerm2 = new Term("content", "bbb");        //  add 100 documents with term : aaa        IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        for (int i = 0; i < 100; i++)        {            addDoc(writer, searchTerm.text());        }        writer.close();        // OPEN READER AT THIS POINT - this should fix the view of the        // index at the point of having 100 "aaa" documents and 0 "bbb"        IndexReader reader = IndexReader.open(dir);        assertEquals("first docFreq", 100, reader.docFreq(searchTerm));        assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));        assertTermDocsCount("first reader", reader, searchTerm, 100);        assertTermDocsCount("first reader", reader, searchTerm2, 0);        // add 100 documents with term : bbb        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), false);        for (int i = 0; i < 100; i++)        {            addDoc(writer, searchTerm2.text());        }        // REQUEST OPTIMIZATION        // This causes a new segment to become current for all subsequent        // searchers. Because of this, deletions made via a previously open        // reader, which would be applied to that reader's segment, are lost        // for subsequent searchers/readers        if(optimize)          writer.optimize();        writer.close();        // The reader should not see the new data        assertEquals("first docFreq", 100, reader.docFreq(searchTerm));        assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));        assertTermDocsCount("first reader", reader, searchTerm, 100);        assertTermDocsCount("first reader", reader, searchTerm2, 0);        // DELETE DOCUMENTS CONTAINING TERM: aaa        // NOTE: the reader was created when only "aaa" documents were in        int deleted = 0;        try {            deleted = reader.deleteDocuments(searchTerm);            fail("Delete allowed on an index reader with stale segment information");        } catch (StaleReaderException e) {            /* success */        }        // Re-open index reader and try again. This time it should see        // the new data.        reader.close();        reader = IndexReader.open(dir);        assertEquals("first docFreq", 100, reader.docFreq(searchTerm));        assertEquals("first docFreq", 100, reader.docFreq(searchTerm2));        assertTermDocsCount("first reader", reader, searchTerm, 100);        assertTermDocsCount("first reader", reader, searchTerm2, 100);        deleted = reader.deleteDocuments(searchTerm);        assertEquals("deleted count", 100, deleted);        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));        assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);        assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);        reader.close();        // CREATE A NEW READER and re-test        reader = IndexReader.open(dir);        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));        assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);        assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);        reader.close();    }  private Directory getDirectory() throws IOException {    return FSDirectory.getDirectory(new File(System.getProperty("tempDir"), "testIndex"));  }  public void testFilesOpenClose() throws IOException    {        // Create initial data set        File dirFile = new File(System.getProperty("tempDir"), "testIndex");        Directory dir = getDirectory();        IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        addDoc(writer, "test");        writer.close();        dir.close();        // Try to erase the data - this ensures that the writer closed all files        _TestUtil.rmDir(dirFile);        dir = getDirectory();        // Now create the data set again, just as before        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);        addDoc(writer, "test");        writer.close();        dir.close();        // Now open existing directory and test that reader closes all files        dir = getDirectory();        IndexReader reader1 = IndexReader.open(dir);        reader1.close();        dir.close();        // The following will fail if reader did not close        // all files        _TestUtil.rmDir(dirFile);    }    public void testLastModified() throws IOException {      assertFalse(IndexReader.indexExists("there_is_no_such_index"));      final File fileDir = new File(System.getProperty("tempDir"), "testIndex");      for(int i=0;i<2;i++) {        try {          final Directory dir;          if (0 == i)            dir = new MockRAMDirectory();          else            dir = getDirectory();          assertFalse(IndexReader.indexExists(dir));          IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);          addDocumentWithFields(writer);          assertTrue(IndexReader.isLocked(dir));		// writer open, so dir is locked          writer.close();          assertTrue(IndexReader.indexExists(dir));          IndexReader reader = IndexReader.open(dir);          assertFalse(IndexReader.isLocked(dir));		// reader only, no lock          long version = IndexReader.lastModified(dir);          if (i == 1) {            long version2 = IndexReader.lastModified(fileDir);            assertEquals(version, version2);          }          reader.close();          // modify index and check version has been          // incremented:          while(true) {            try {              Thread.sleep(1000);              break;            } catch (InterruptedException ie) {              Thread.currentThread().interrupt();            }          }          writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);          addDocumentWithFields(writer);          writer.close();          reader = IndexReader.open(dir);          assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));          reader.close();          dir.close();        } finally {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -