📄 weblogplugin.java
字号:
stopTime.set( Calendar.MINUTE, 59 ); stopTime.set( Calendar.SECOND, 59 ); StringBuffer sb = new StringBuffer(); try { List<WikiPage> blogEntries = findBlogEntries( engine.getPageManager(), weblogName, startTime.getTime(), stopTime.getTime() ); Collections.sort( blogEntries, new PageDateComparator() ); sb.append("<div class=\"weblog\">\n"); for( Iterator i = blogEntries.iterator(); i.hasNext() && maxEntries-- > 0 ; ) { WikiPage p = (WikiPage) i.next(); if( mgr.checkPermission( context.getWikiSession(), new PagePermission(p, PagePermission.VIEW_ACTION) ) ) { addEntryHTML(context, entryFormat, hasComments, sb, p); } } sb.append("</div>\n"); } catch( ProviderException e ) { log.error( "Could not locate blog entries", e ); throw new PluginException( "Could not locate blog entries: "+e.getMessage() ); } return sb.toString(); } /** * Generates HTML for an entry. * * @param context * @param entryFormat * @param hasComments True, if comments are enabled. * @param buffer The buffer to which we add. * @param entry * @throws ProviderException */ private void addEntryHTML(WikiContext context, DateFormat entryFormat, boolean hasComments, StringBuffer buffer, WikiPage entry) throws ProviderException { WikiEngine engine = context.getEngine(); buffer.append("<div class=\"weblogentry\">\n"); // // Heading // buffer.append("<div class=\"weblogentryheading\">\n"); Date entryDate = entry.getLastModified(); buffer.append( entryFormat.format(entryDate) ); buffer.append("</div>\n"); // // Append the text of the latest version. Reset the // context to that page. // WikiContext entryCtx = (WikiContext) context.clone(); entryCtx.setPage( entry ); String html = engine.getHTML( entryCtx, engine.getPage(entry.getName()) ); // Extract the first h1/h2/h3 as title, and replace with null buffer.append("<div class=\"weblogentrytitle\">\n"); Matcher matcher = HEADINGPATTERN.matcher( html ); if ( matcher.find() ) { String title = matcher.group(2); html = matcher.replaceFirst(""); buffer.append( title ); } else { buffer.append( entry.getName() ); } buffer.append("</div>\n"); buffer.append("<div class=\"weblogentrybody\">\n"); buffer.append( html ); buffer.append("</div>\n"); // // Append footer // buffer.append("<div class=\"weblogentryfooter\">\n"); String author = entry.getAuthor(); if( author != null ) { if( engine.pageExists(author) ) { author = "<a href=\""+entryCtx.getURL( WikiContext.VIEW, author )+"\">"+engine.beautifyTitle(author)+"</a>"; } } else { author = "AnonymousCoward"; } buffer.append("By "+author+" "); buffer.append( "<a href=\""+entryCtx.getURL(WikiContext.VIEW, entry.getName())+"\">Permalink</a>" ); String commentPageName = TextUtil.replaceString( entry.getName(), "blogentry", "comments" ); if( hasComments ) { int numComments = guessNumberOfComments( engine, commentPageName ); // // We add the number of comments to the URL so that // the user's browsers would realize that the page // has changed. // buffer.append( " " ); buffer.append( "<a target=\"_blank\" href=\""+ entryCtx.getURL(WikiContext.COMMENT, commentPageName, "nc="+numComments)+ "\">Comments? ("+ numComments+ ")</a>" ); } buffer.append("</div>\n"); // // Done, close // buffer.append("</div>\n"); } private int guessNumberOfComments( WikiEngine engine, String commentpage ) throws ProviderException { String pagedata = engine.getPureText( commentpage, WikiProvider.LATEST_VERSION ); if( pagedata == null || pagedata.trim().length() == 0 ) { return 0; } return TextUtil.countSections( pagedata ); } /** * Attempts to locate all pages that correspond to the * blog entry pattern. Will only consider the days on the dates; not the hours and minutes. * * @param mgr A PageManager which is used to get the pages * @param baseName The basename (e.g. "Main" if you want "Main_blogentry_xxxx") * @param start The date which is the first to be considered * @param end The end date which is the last to be considered * @return a list of pages with their FIRST revisions. * @throws ProviderException If something goes wrong */ public List findBlogEntries( PageManager mgr, String baseName, Date start, Date end ) throws ProviderException { Collection everyone = mgr.getAllPages(); ArrayList<WikiPage> result = new ArrayList<WikiPage>(); baseName = makeEntryPage( baseName ); SimpleDateFormat fmt = new SimpleDateFormat(DEFAULT_DATEFORMAT); for( Iterator i = everyone.iterator(); i.hasNext(); ) { WikiPage p = (WikiPage)i.next(); String pageName = p.getName(); if( pageName.startsWith( baseName ) ) { // // Check the creation date from the page name. // We do this because RCSFileProvider is very slow at getting a // specific page version. // try { //log.debug("Checking: "+pageName); int firstScore = pageName.indexOf('_',baseName.length()-1 ); if( firstScore != -1 && firstScore+1 < pageName.length() ) { int secondScore = pageName.indexOf('_', firstScore+1); if( secondScore != -1 ) { String creationDate = pageName.substring( firstScore+1, secondScore ); //log.debug(" Creation date: "+creationDate); Date pageDay = fmt.parse( creationDate ); // // Add the first version of the page into the list. This way // the page modified date becomes the page creation date. // if( pageDay != null && pageDay.after(start) && pageDay.before(end) ) { WikiPage firstVersion = mgr.getPageInfo( pageName, 1 ); result.add( firstVersion ); } } } } catch( Exception e ) { log.debug("Page name :"+pageName+" was suspected as a blog entry but it isn't because of parsing errors",e); } } } return result; } /** * Reverse comparison. */ private static class PageDateComparator implements Comparator<WikiPage> { public int compare( WikiPage page1, WikiPage page2 ) { if( page1 == null || page2 == null ) { return 0; } return page2.getLastModified().compareTo( page1.getLastModified() ); } } /** * Mark us as being a real weblog. * {@inheritDoc} */ public void executeParser(PluginContent element, WikiContext context, Map params) { context.getPage().setAttribute( ATTR_ISWEBLOG, "true" ); }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -