⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 segmentreader.java

📁 Lucene a java open-source SearchEngine Framework
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
package org.apache.lucene.index;/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */import java.io.IOException;import java.util.Arrays;import java.util.Collection;import java.util.HashMap;import java.util.HashSet;import java.util.Iterator;import java.util.Map;import java.util.Set;import java.util.Vector;import org.apache.lucene.document.Document;import org.apache.lucene.document.FieldSelector;import org.apache.lucene.search.DefaultSimilarity;import org.apache.lucene.store.BufferedIndexInput;import org.apache.lucene.store.Directory;import org.apache.lucene.store.IndexInput;import org.apache.lucene.store.IndexOutput;import org.apache.lucene.util.BitVector;/** * @version $Id: SegmentReader.java 603061 2007-12-10 21:49:41Z gsingers $ */class SegmentReader extends DirectoryIndexReader {  private String segment;  private SegmentInfo si;  private int readBufferSize;  FieldInfos fieldInfos;  private FieldsReader fieldsReader;  TermInfosReader tis;  TermVectorsReader termVectorsReaderOrig = null;  ThreadLocal termVectorsLocal = new ThreadLocal();  BitVector deletedDocs = null;  private boolean deletedDocsDirty = false;  private boolean normsDirty = false;  private boolean undeleteAll = false;  private boolean rollbackDeletedDocsDirty = false;  private boolean rollbackNormsDirty = false;  private boolean rollbackUndeleteAll = false;  IndexInput freqStream;  IndexInput proxStream;  // optionally used for the .nrm file shared by multiple norms  private IndexInput singleNormStream;  // Compound File Reader when based on a compound file segment  CompoundFileReader cfsReader = null;  CompoundFileReader storeCFSReader = null;    // indicates the SegmentReader with which the resources are being shared,  // in case this is a re-opened reader  private SegmentReader referencedSegmentReader = null;    private class Norm {    volatile int refCount;    boolean useSingleNormStream;        public synchronized void incRef() {      assert refCount > 0;      refCount++;    }    public synchronized void decRef() throws IOException {      assert refCount > 0;      if (refCount == 1) {        close();      }      refCount--;    }        public Norm(IndexInput in, boolean useSingleNormStream, int number, long normSeek)    {      refCount = 1;      this.in = in;      this.number = number;      this.normSeek = normSeek;      this.useSingleNormStream = useSingleNormStream;    }    private IndexInput in;    private byte[] bytes;    private boolean dirty;    private int number;    private long normSeek;    private boolean rollbackDirty;    private void reWrite(SegmentInfo si) throws IOException {      // NOTE: norms are re-written in regular directory, not cfs      si.advanceNormGen(this.number);      IndexOutput out = directory().createOutput(si.getNormFileName(this.number));      try {        out.writeBytes(bytes, maxDoc());      } finally {        out.close();      }      this.dirty = false;    }        /** Closes the underlying IndexInput for this norm.     * It is still valid to access all other norm properties after close is called.     * @throws IOException     */    private synchronized void close() throws IOException {      if (in != null && !useSingleNormStream) {        in.close();      }      in = null;    }  }    /**   * Increments the RC of this reader, as well as   * of all norms this reader is using   */  protected synchronized void incRef() {    super.incRef();    Iterator it = norms.values().iterator();    while (it.hasNext()) {      Norm norm = (Norm) it.next();      norm.incRef();    }  }    /**   * only increments the RC of this reader, not tof    * he norms. This is important whenever a reopen()   * creates a new SegmentReader that doesn't share   * the norms with this one    */  private synchronized void incRefReaderNotNorms() {    super.incRef();  }  protected synchronized void decRef() throws IOException {    super.decRef();    Iterator it = norms.values().iterator();    while (it.hasNext()) {      Norm norm = (Norm) it.next();      norm.decRef();    }  }    private synchronized void decRefReaderNotNorms() throws IOException {    super.decRef();  }    Map norms = new HashMap();    /** The class which implements SegmentReader. */  private static Class IMPL;  static {    try {      String name =        System.getProperty("org.apache.lucene.SegmentReader.class",                           SegmentReader.class.getName());      IMPL = Class.forName(name);    } catch (ClassNotFoundException e) {      throw new RuntimeException("cannot load SegmentReader class: " + e, e);    } catch (SecurityException se) {      try {        IMPL = Class.forName(SegmentReader.class.getName());      } catch (ClassNotFoundException e) {        throw new RuntimeException("cannot load default SegmentReader class: " + e, e);      }    }  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  public static SegmentReader get(SegmentInfo si) throws CorruptIndexException, IOException {    return get(si.dir, si, null, false, false, BufferedIndexInput.BUFFER_SIZE, true);  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  static SegmentReader get(SegmentInfo si, boolean doOpenStores) throws CorruptIndexException, IOException {    return get(si.dir, si, null, false, false, BufferedIndexInput.BUFFER_SIZE, doOpenStores);  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  public static SegmentReader get(SegmentInfo si, int readBufferSize) throws CorruptIndexException, IOException {    return get(si.dir, si, null, false, false, readBufferSize, true);  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  static SegmentReader get(SegmentInfo si, int readBufferSize, boolean doOpenStores) throws CorruptIndexException, IOException {    return get(si.dir, si, null, false, false, readBufferSize, doOpenStores);  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  public static SegmentReader get(SegmentInfos sis, SegmentInfo si,                                  boolean closeDir) throws CorruptIndexException, IOException {    return get(si.dir, si, sis, closeDir, true, BufferedIndexInput.BUFFER_SIZE, true);  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  public static SegmentReader get(Directory dir, SegmentInfo si,                                  SegmentInfos sis,                                  boolean closeDir, boolean ownDir,                                  int readBufferSize)    throws CorruptIndexException, IOException {    return get(dir, si, sis, closeDir, ownDir, readBufferSize, true);  }  /**   * @throws CorruptIndexException if the index is corrupt   * @throws IOException if there is a low-level IO error   */  public static SegmentReader get(Directory dir, SegmentInfo si,                                  SegmentInfos sis,                                  boolean closeDir, boolean ownDir,                                  int readBufferSize,                                  boolean doOpenStores)    throws CorruptIndexException, IOException {    SegmentReader instance;    try {      instance = (SegmentReader)IMPL.newInstance();    } catch (Exception e) {      throw new RuntimeException("cannot load SegmentReader class: " + e, e);    }    instance.init(dir, sis, closeDir);    instance.initialize(si, readBufferSize, doOpenStores);    return instance;  }  private void initialize(SegmentInfo si, int readBufferSize, boolean doOpenStores) throws CorruptIndexException, IOException {    segment = si.name;    this.si = si;    this.readBufferSize = readBufferSize;    boolean success = false;    try {      // Use compound file directory for some files, if it exists      Directory cfsDir = directory();      if (si.getUseCompoundFile()) {        cfsReader = new CompoundFileReader(directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);        cfsDir = cfsReader;      }      final Directory storeDir;      if (doOpenStores) {        if (si.getDocStoreOffset() != -1) {          if (si.getDocStoreIsCompoundFile()) {            storeCFSReader = new CompoundFileReader(directory(), si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, readBufferSize);            storeDir = storeCFSReader;          } else {            storeDir = directory();          }        } else {          storeDir = cfsDir;        }      } else        storeDir = null;      // No compound file exists - use the multi-file format      fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");      final String fieldsSegment;      if (si.getDocStoreOffset() != -1)        fieldsSegment = si.getDocStoreSegment();      else        fieldsSegment = segment;      if (doOpenStores) {        fieldsReader = new FieldsReader(storeDir, fieldsSegment, fieldInfos, readBufferSize,                                        si.getDocStoreOffset(), si.docCount);        // Verify two sources of "maxDoc" agree:        if (si.getDocStoreOffset() == -1 && fieldsReader.size() != si.docCount) {          throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReader.size() + " but segmentInfo shows " + si.docCount);        }      }      tis = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize);            loadDeletedDocs();      // make sure that all index files have been read or are kept open      // so that if an index update removes them we'll still have them      freqStream = cfsDir.openInput(segment + ".frq", readBufferSize);      proxStream = cfsDir.openInput(segment + ".prx", readBufferSize);      openNorms(cfsDir, readBufferSize);      if (doOpenStores && fieldInfos.hasVectors()) { // open term vector files only as needed        final String vectorsSegment;        if (si.getDocStoreOffset() != -1)          vectorsSegment = si.getDocStoreSegment();        else          vectorsSegment = segment;        termVectorsReaderOrig = new TermVectorsReader(storeDir, vectorsSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);      }      success = true;    } finally {      // With lock-less commits, it's entirely possible (and      // fine) to hit a FileNotFound exception above.  In      // this case, we want to explicitly close any subset      // of things that were opened so that we don't have to      // wait for a GC to do so.      if (!success) {        doClose();

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -