⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 cachingprovider.java

📁 JSPWiki,100%Java开发的一套完整WIKI程序
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
                all.add( ((CacheItem)i.next()).m_page );            }        }        return all;    }    // Null text for no page    // Returns null if no page could be found.    private synchronized CacheItem addPage( String pageName, String text )        throws ProviderException    {        CacheItem item = null;        WikiPage newpage = m_provider.getPageInfo( pageName, WikiPageProvider.LATEST_VERSION );        if( newpage != null )        {            item = new CacheItem();            item.m_page = newpage;            if( text != null )            {                m_textCache.putInCache( pageName, text );            }            m_cache.put( pageName, item );        }        return item;    }    public Collection getAllChangedSince( Date date )    {        return m_provider.getAllChangedSince( date );    }    public int getPageCount()        throws ProviderException    {        return m_provider.getPageCount();    }    public Collection findPages( QueryItem[] query )    {        //        //  If the provider is a fast searcher, then        //  just pass this request through.        //        if( m_provider instanceof FastSearch )        {            return m_provider.findPages( query );        }                TreeSet res = new TreeSet( new SearchResultComparator() );        SearchMatcher matcher = new SearchMatcher( query );        Collection allPages = null;        try        {            if( m_useLucene )            {// To keep the scoring mechanism the same, we'll only use Lucene to determine which pages to score.                allPages = searchLucene(query);            }            else            {                allPages = getAllPages();            }        }        catch( ProviderException pe )        {            log.error( "Unable to retrieve page list", pe );            return( null );        }        Iterator it = allPages.iterator();        while( it.hasNext() )        {            try            {                WikiPage page = (WikiPage) it.next();                String pageName = page.getName();                String pageContent = getTextFromCache( pageName );                SearchResult comparison = matcher.matchPageContent( pageName, pageContent );                if( comparison != null )                {                    res.add( comparison );                }            }            catch( RepositoryModifiedException rme )            {                // FIXME: What to do in this case???	    }            catch( ProviderException pe )            {                log.error( "Unable to retrieve page from cache", pe );            }            catch( IOException ioe )            {                log.error( "Failed to search page", ioe );            }        }            return( res );    }    /**     * @param queryTerms     * @return Collection of WikiPage items for the pages that Lucene claims will match the search.     */    private Collection searchLucene( QueryItem[] queryTerms )    {        try        {            Searcher searcher = new IndexSearcher(m_luceneDirectory);            BooleanQuery query = new BooleanQuery();            for ( int curr = 0; curr < queryTerms.length; curr++ )            {                QueryItem queryTerm = queryTerms[curr];                if( queryTerm.word.indexOf(' ') >= 0 )                {   // this is a phrase search                    StringTokenizer tok = new StringTokenizer(queryTerm.word);                    while( tok.hasMoreTokens() )                    {// Just find pages with the words, so that included stop words don't mess up search.                        String word = tok.nextToken();                        query.add(new TermQuery(new Term(LUCENE_PAGE_CONTENTS, word)),                                queryTerm.type == QueryItem.REQUIRED,                                queryTerm.type == QueryItem.FORBIDDEN);                    }/* Since we're not using Lucene to score, no reason to use PhraseQuery, which removes stop words.                    PhraseQuery phraseQ = new PhraseQuery();                    StringTokenizer tok = new StringTokenizer(queryTerm.word);                    while (tok.hasMoreTokens()) {                        String word = tok.nextToken();                        phraseQ.add(new Term(LUCENE_PAGE_CONTENTS, word));                    }                    query.add(phraseQ,                            queryTerm.type == QueryItem.REQUIRED,                            queryTerm.type == QueryItem.FORBIDDEN);*/                }                else                { // single word query                    query.add(new TermQuery(new Term(LUCENE_PAGE_CONTENTS, queryTerm.word)),                            queryTerm.type == QueryItem.REQUIRED,                            queryTerm.type == QueryItem.FORBIDDEN);                }            }            Hits hits = searcher.search(query);            ArrayList list = new ArrayList(hits.length());            for ( int curr = 0; curr < hits.length(); curr++ )            {                Document doc = hits.doc(curr);                String pageName = doc.get(LUCENE_ID);                list.add(getPageInfo(pageName, WikiPageProvider.LATEST_VERSION));            }            searcher.close();            return list;        }        catch ( Exception e )        {            log.error("Failed during Lucene search", e);            return Collections.EMPTY_LIST;        }    }    public WikiPage getPageInfo( String page, int version )        throws ProviderException    {         CacheItem item = (CacheItem)m_cache.get( page );        int latestcached = (item != null) ? item.m_page.getVersion() : Integer.MIN_VALUE;               if( version == WikiPageProvider.LATEST_VERSION ||            version == latestcached )        {            if( checkIfPageChanged( item ) )            {                revalidatePage( item.m_page );                throw new RepositoryModifiedException( page );            }            if( item == null )            {                item = addPage( page, null );                if( item == null )                {                    return null;                }            }            return item.m_page;        }                else        {            // We do not cache old versions.            return m_provider.getPageInfo( page, version );        }    }    public List getVersionHistory( String page )        throws ProviderException    {        List history = null;        try        {            history = (List)m_historyCache.getFromCache( page,                                                         m_refreshPeriod );            log.debug("History cache hit for page "+page);            m_historyCacheHits++;        }        catch( NeedsRefreshException e )        {            history = m_provider.getVersionHistory( page );            m_historyCache.putInCache( page, history );            log.debug("History cache miss for page "+page);            m_historyCacheMisses++;        }        return history;    }    public synchronized String getProviderInfo()    {                      int cachedPages = 0;        long totalSize  = 0;                /*        for( Iterator i = m_cache.values().iterator(); i.hasNext(); )        {            CacheItem item = (CacheItem) i.next();            String text = (String) item.m_text.get();            if( text != null )            {                cachedPages++;                totalSize += text.length()*2;            }        }        totalSize = (totalSize+512)/1024L;        */        return("Real provider: "+m_provider.getClass().getName()+               "<br />Cache misses: "+m_cacheMisses+               "<br />Cache hits: "+m_cacheHits+               "<br />History cache hits: "+m_historyCacheHits+               "<br />History cache misses: "+m_historyCacheMisses+               "<br />Cache consistency checks: "+m_milliSecondsBetweenChecks+"ms"+               "<br />Lucene enabled: "+(m_useLucene?"yes":"no") );    }    public void deleteVersion( String pageName, int version )        throws ProviderException    {        //        //  Luckily, this is such a rare operation it is okay        //  to synchronize against the whole thing.        //        synchronized( this )        {            CacheItem item = (CacheItem)m_cache.get( pageName );            int latestcached = (item != null) ? item.m_page.getVersion() : Integer.MIN_VALUE;                    //            //  If we have this version cached, remove from cache.            //            if( version == WikiPageProvider.LATEST_VERSION ||                version == latestcached )            {                m_cache.remove( pageName );            }            m_provider.deleteVersion( pageName, version );        }    }    public void deletePage( String pageName )        throws ProviderException    {        //        //  See note in deleteVersion().        //        synchronized(this)        {            if( m_useLucene )             {                deleteFromLucene(getPageInfo(pageName, WikiPageProvider.LATEST_VERSION));            }            m_cache.remove( pageName );            m_provider.deletePage( pageName );        }    }    /**     *  Returns the actual used provider.     *  @since 2.0     */    public WikiPageProvider getRealProvider()    {        return m_provider;    }    private class CacheItem    {        WikiPage      m_page;        long          m_lastChecked = 0L;    }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -