⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 wikiengine.java

📁 jspwiki source code,jspwiki source code
💻 JAVA
📖 第 1 页 / 共 5 页
字号:
     *  @param context  The WikiContext in which the page is to be rendered     *  @return Rendered page text     */    public String textToHTML( WikiContext context, String pagedata )    {        String result = "";        boolean runFilters = "true".equals(m_variableManager.getValue(context,PROP_RUNFILTERS,"true"));        StopWatch sw = new StopWatch();        sw.start();        try        {            if( runFilters )                pagedata = m_filterManager.doPreTranslateFiltering( context, pagedata );            result = m_renderingManager.getHTML( context, pagedata );            if( runFilters )                result = m_filterManager.doPostTranslateFiltering( context, result );        }        catch( FilterException e )        {            // FIXME: Don't yet know what to do        }        sw.stop();        if( log.isDebugEnabled() )            log.debug("Page "+context.getRealPage().getName()+" rendered, took "+sw );        return result;    }    /**     * Protected method that signals that the WikiEngine will be     * shut down by the servlet container. It is called by     * {@link WikiServlet#destroy()}. When this method is called,     * it fires a "shutdown" WikiEngineEvent to all registered     * listeners.     */    protected void shutdown()    {        fireEvent( WikiEngineEvent.SHUTDOWN );        m_filterManager.destroy();    }    /**     *  Reads a WikiPageful of data from a String and returns all links     *  internal to this Wiki in a Collection.     *     *  @param page The WikiPage to scan     *  @param pagedata The page contents     *  @return a Collection of Strings     */    public Collection scanWikiLinks( WikiPage page, String pagedata )    {        LinkCollector localCollector = new LinkCollector();        textToHTML( new WikiContext(this,page),                    pagedata,                    localCollector,                    null,                    localCollector,                    false,                    true );        return localCollector.getLinks();    }    /**     *  Just convert WikiText to HTML.     *     *  @param context The WikiContext in which to do the conversion     *  @param pagedata The data to render     *  @param localLinkHook Is called whenever a wiki link is found     *  @param extLinkHook   Is called whenever an external link is found     *     *  @return HTML-rendered page text.     */    public String textToHTML( WikiContext context,                              String pagedata,                              StringTransmutator localLinkHook,                              StringTransmutator extLinkHook )    {        return textToHTML( context, pagedata, localLinkHook, extLinkHook, null, true, false );    }    /**     *  Just convert WikiText to HTML.     *     *  @param context The WikiContext in which to do the conversion     *  @param pagedata The data to render     *  @param localLinkHook Is called whenever a wiki link is found     *  @param extLinkHook   Is called whenever an external link is found     *  @param attLinkHook   Is called whenever an attachment link is found     *  @return HTML-rendered page text.     */    public String textToHTML( WikiContext context,                              String pagedata,                              StringTransmutator localLinkHook,                              StringTransmutator extLinkHook,                              StringTransmutator attLinkHook )    {        return textToHTML( context, pagedata, localLinkHook, extLinkHook, attLinkHook, true, false );    }    /**     *  Helper method for doing the HTML translation.     *     *  @param context The WikiContext in which to do the conversion     *  @param pagedata The data to render     *  @param localLinkHook Is called whenever a wiki link is found     *  @param extLinkHook   Is called whenever an external link is found     *  @param parseAccessRules Parse the access rules if we encounter them     *  @param justParse Just parses the pagedata, does not actually render.  In this case,     *                   this methods an empty string.     *  @return HTML-rendered page text.     */    private String textToHTML( WikiContext context,                               String pagedata,                               StringTransmutator localLinkHook,                               StringTransmutator extLinkHook,                               StringTransmutator attLinkHook,                               boolean            parseAccessRules,                               boolean            justParse )    {        String result = "";        if( pagedata == null )        {            log.error("NULL pagedata to textToHTML()");            return null;        }        boolean runFilters = "true".equals(m_variableManager.getValue(context,PROP_RUNFILTERS,"true"));        try        {            StopWatch sw = new StopWatch();            sw.start();            if( runFilters )                pagedata = m_filterManager.doPreTranslateFiltering( context, pagedata );            MarkupParser mp = m_renderingManager.getParser( context, pagedata );            mp.addLocalLinkHook( localLinkHook );            mp.addExternalLinkHook( extLinkHook );            mp.addAttachmentLinkHook( attLinkHook );            if( !parseAccessRules ) mp.disableAccessRules();            WikiDocument doc = mp.parse();            //            //  In some cases it's better just to parse, not to render            //            if( !justParse )            {                result = m_renderingManager.getHTML( context, doc );                if( runFilters )                    result = m_filterManager.doPostTranslateFiltering( context, result );            }            sw.stop();            if( log.isDebugEnabled() )                log.debug("Page "+context.getRealPage().getName()+" rendered, took "+sw );        }        catch( IOException e )        {            log.error("Failed to scan page data: ", e);        }        catch( FilterException e )        {            // FIXME: Don't yet know what to do        }        return result;    }    /**     *  Updates all references for the given page.     *     *  @param page wiki page for which references should be updated     */    public void updateReferences( WikiPage page )    {        String pageData = getPureText( page.getName(), WikiProvider.LATEST_VERSION );        m_referenceManager.updateReferences( page.getName(),                                             scanWikiLinks( page, pageData ) );    }    /**     *  Writes the WikiText of a page into the     *  page repository. If the <code>jspwiki.properties</code> file contains     *  the property <code>jspwiki.approver.workflow.saveWikiPage</code> and     *  its value resolves to a valid user, {@link com.ecyrd.jspwiki.auth.authorize.Group}     *  or {@link com.ecyrd.jspwiki.auth.authorize.Role}, this method will     *  place a {@link com.ecyrd.jspwiki.workflow.Decision} in the approver's     *  workflow inbox and throw a {@link com.ecyrd.jspwiki.workflow.DecisionRequiredException}.     *  If the submitting user is authenticated and the page save is rejected,     *  a notification will be placed in the user's decision queue.     *     *  @since 2.1.28     *  @param context The current WikiContext     *  @param text    The Wiki markup for the page.     *  @throws WikiException if the save operation encounters an error during the     *  save operation. If the page-save operation requires approval, the exception will     *  be of type {@link com.ecyrd.jspwiki.workflow.DecisionRequiredException}. Individual     *  PageFilters, such as the {@link com.ecyrd.jspwiki.filters.SpamFilter} may also     *  throw a {@link com.ecyrd.jspwiki.filters.RedirectException}.     */    public void saveText( WikiContext context, String text )        throws WikiException    {        // Check if page data actually changed; bail if not        WikiPage page = context.getPage();        String oldText = getPureText( page );        String proposedText = TextUtil.normalizePostData( text );        if ( oldText != null && oldText.equals( proposedText ) )        {            return;        }        // Check if creation of empty pages is allowed; bail if not        boolean allowEmpty = TextUtil.getBooleanProperty( m_properties,                                                           PROP_ALLOW_CREATION_OF_EMPTY_PAGES,                                                           false );        if ( !allowEmpty && !pageExists( page ) && text.trim().equals( "" ) )          {            return;        }                // Create approval workflow for page save; add the diffed, proposed        // and old text versions as Facts for the approver (if approval is required)        // If submitter is authenticated, any reject messages will appear in his/her workflow inbox.        WorkflowBuilder builder = WorkflowBuilder.getBuilder( this );        Principal submitter = context.getCurrentUser();        Task prepTask = new PageManager.PreSaveWikiPageTask( context, proposedText );        Task completionTask = new PageManager.SaveWikiPageTask();        String diffText = m_differenceManager.makeDiff( context, oldText, proposedText );        boolean isAuthenticated = context.getWikiSession().isAuthenticated();        Fact[] facts = new Fact[5];        facts[0] = new Fact( PageManager.FACT_PAGE_NAME, page.getName() );        facts[1] = new Fact( PageManager.FACT_DIFF_TEXT, diffText );        facts[2] = new Fact( PageManager.FACT_PROPOSED_TEXT, proposedText );        facts[3] = new Fact( PageManager.FACT_CURRENT_TEXT, oldText);        facts[4] = new Fact( PageManager.FACT_IS_AUTHENTICATED, Boolean.valueOf( isAuthenticated ) );        String rejectKey = isAuthenticated ? PageManager.SAVE_REJECT_MESSAGE_KEY : null;        Workflow workflow = builder.buildApprovalWorkflow( submitter,                                                           PageManager.SAVE_APPROVER,                                                           prepTask,                                                           PageManager.SAVE_DECISION_MESSAGE_KEY,                                                           facts,                                                           completionTask,                                                           rejectKey );        m_workflowMgr.start(workflow);        // Let callers know if the page-save requires approval        if ( workflow.getCurrentStep() instanceof Decision )        {            throw new DecisionRequiredException( "The page contents must be approved before they become active." );        }    }    /**     *  Returns the number of pages in this Wiki     *  @return The total number of pages.     */    public int getPageCount()    {        return m_pageManager.getTotalPageCount();    }    /**     *  Returns the provider name.     *  @return The full class name of the current page provider.     */    public String getCurrentProvider()    {        return m_pageManager.getProvider().getClass().getName();    }    /**     *  Return information about current provider.  This method just calls     *  the corresponding PageManager method, which in turn calls the     *  provider method.     *     *  @return A textual description of the current provider.     *  @since 1.6.4     */    public String getCurrentProviderInfo()    {        return m_pageManager.getProviderDescription();    }    /**     *  Returns a Collection of WikiPages, sorted in time     *  order of last change (i.e. first object is the most     *  recently changed).  This method also includes attachments.     *     *  @return Collection of WikiPage objects.  In reality, the returned     *          collection is a Set, but due to API compatibility reasons,     *          we're not changing the signature soon...     */    // FIXME: Should really get a Date object and do proper comparisons.    //        This is terribly wasteful.    @SuppressWarnings("unchecked")    public Collection getRecentChanges()    {        try        {            Collection<WikiPage>   pages = m_pageManager.getAllPages();            Collection<Attachment>  atts = m_attachmentManager.getAllAttachments();            TreeSet<WikiPage> sortedPages = new TreeSet<WikiPage>( new PageTimeComparator() );            sortedPages.addAll( pages );            sortedPages.addAll( atts );            return sortedPages;        }        catch( ProviderException e )        {            log.error( "Unable to fetch all pages: ",e);            return null;        }    }    /**     *  Parses an incoming search request, then     *  does a search.     *  <P>     *  The query is dependent on the actual chosen search provider - each one of them has     *  a language of its own.     *     *  @param query The query string     *  @return A Collection of SearchResult objects.     *  @throws ProviderException If the searching failed     *  @throws IOException       If the searching failed     */    //    // FIXME: Should also have attributes attached.    //    public Collection findPages( String query )        throws ProviderException, IOException    {        Collection results = m_searchManager.findPages( query );        return results;    }    /**     *  Finds the corresponding WikiPage object based on the page name.  It always finds     *  the latest version of a page.     *     *  @param pagereq The name of the page to look for.     *  @return A WikiPage object, or null, if the page by the name could not be found.     *

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -