📄 referencemanager.java
字号:
throws NoSuchAlgorithmException { MessageDigest digest = MessageDigest.getInstance("MD5"); byte[] dig; try { dig = digest.digest( pageName.getBytes("UTF-8") ); } catch (UnsupportedEncodingException e) { throw new InternalWikiException("AAAAGH! UTF-8 is gone! My eyes! It burns...!"); } return TextUtil.toHexString(dig)+".cache"; } /** * Reads the serialized data from the disk back to memory. * Returns the date when the data was last written on disk */ private synchronized long unserializeAttrsFromDisk(WikiPage p) throws IOException, ClassNotFoundException { ObjectInputStream in = null; long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); // // Find attribute cache, and check if it exists // File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); f = new File( f, getHashFileName(p.getName()) ); if( !f.exists() ) { return 0L; } log.debug("Deserializing attributes for "+p.getName()); in = new ObjectInputStream( new BufferedInputStream(new FileInputStream(f)) ); long ver = in.readLong(); if( ver != serialVersionUID ) { log.debug("File format has changed; cannot deserialize."); return 0L; } saved = in.readLong(); String name = in.readUTF(); if( !name.equals(p.getName()) ) { log.debug("File name does not match ("+name+"), skipping..."); return 0L; // Not here } long entries = in.readLong(); for( int i = 0; i < entries; i++ ) { String key = in.readUTF(); Object value = in.readObject(); p.setAttribute( key, value ); log.debug(" attr: "+key+"="+value); } in.close(); sw.stop(); log.debug("Read serialized data for "+name+" successfully in "+sw); p.setHasMetadata(); } catch( NoSuchAlgorithmException e ) { log.fatal("No MD5!?!"); } finally { if( in != null ) in.close(); } return saved; } /** * Serializes hashmaps to disk. The format is private, don't touch it. */ private synchronized void serializeAttrsToDisk( WikiPage p ) { ObjectOutputStream out = null; StopWatch sw = new StopWatch(); sw.start(); try { File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); if( !f.exists() ) f.mkdirs(); // // Create a digest for the name // f = new File( f, getHashFileName(p.getName()) ); // FIXME: There is a concurrency issue here... Set entries = p.getAttributes().entrySet(); if( entries.size() == 0 ) { // Nothing to serialize, therefore we will just simply remove the // serialization file so that the next time we boot, we don't // deserialize old data. f.delete(); return; } out = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(f)) ); out.writeLong( serialVersionUID ); out.writeLong( System.currentTimeMillis() ); // Timestamp out.writeUTF( p.getName() ); out.writeLong( entries.size() ); for( Iterator i = entries.iterator(); i.hasNext(); ) { Map.Entry e = (Map.Entry) i.next(); if( e.getValue() instanceof Serializable ) { out.writeUTF( (String)e.getKey() ); out.writeObject( e.getValue() ); } } out.close(); } catch( IOException e ) { log.error("Unable to serialize!"); try { if( out != null ) out.close(); } catch( IOException ex ) {} } catch( NoSuchAlgorithmException e ) { log.fatal("No MD5 algorithm!?!"); } finally { sw.stop(); log.debug("serialization for "+p.getName()+" done - took "+sw); } } /** * After the page has been saved, updates the reference lists. * * @param context {@inheritDoc} * @param content {@inheritDoc} */ public void postSave( WikiContext context, String content ) { WikiPage page = context.getPage(); updateReferences( page.getName(), context.getEngine().scanWikiLinks( page, content ) ); serializeAttrsToDisk( page ); } /** * Updates the m_referedTo and m_referredBy hashmaps when a page has been * deleted. * <P> * Within the m_refersTo map the pagename is a key. The whole key-value-set * has to be removed to keep the map clean. * Within the m_referredBy map the name is stored as a value. Since a key * can have more than one value we have to delete just the key-value-pair * referring page:deleted page. * * @param page Name of the page to remove from the maps. */ public synchronized void pageRemoved( WikiPage page ) { String pageName = page.getName(); pageRemoved(pageName); } private void pageRemoved(String pageName) { Collection<String> refTo = m_refersTo.get( pageName ); if( refTo != null ) { Iterator itRefTo = refTo.iterator(); while( itRefTo.hasNext() ) { String referredPageName = (String)itRefTo.next(); Set<String> refBy = m_referredBy.get( referredPageName ); if( refBy == null ) throw new InternalWikiException("Refmgr out of sync: page "+pageName+" refers to "+referredPageName+", which has null referrers."); refBy.remove(pageName); m_referredBy.remove( referredPageName ); // We won't put it back again if it becomes empty and does not exist. It will be added // later on anyway, if it becomes referenced again. if( !(refBy.isEmpty() && !m_engine.pageExists(referredPageName)) ) { m_referredBy.put( referredPageName, refBy ); } } log.debug("Removing from m_refersTo HashMap key:value "+pageName+":"+m_refersTo.get( pageName )); m_refersTo.remove( pageName ); } Set<String> refBy = m_referredBy.get( pageName ); if( refBy == null || refBy.isEmpty() ) { m_referredBy.remove( pageName ); } // // Remove any traces from the disk, too // serializeToDisk(); try { File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR ); f = new File( f, getHashFileName(pageName) ); if( f.exists() ) f.delete(); } catch( NoSuchAlgorithmException e ) { log.error("What do you mean - no such algorithm?", e); } } /** * Updates the referred pages of a new or edited WikiPage. If a refersTo * entry for this page already exists, it is removed and a new one is built * from scratch. Also calls updateReferredBy() for each referenced page. * <P> * This is the method to call when a new page has been created and we * want to a) set up its references and b) notify the referred pages * of the references. Use this method during run-time. * * @param page Name of the page to update. * @param references A Collection of Strings, each one pointing to a page this page references. */ public synchronized void updateReferences( String page, Collection references ) { internalUpdateReferences(page, references); serializeToDisk(); } /** * Updates the referred pages of a new or edited WikiPage. If a refersTo * entry for this page already exists, it is removed and a new one is built * from scratch. Also calls updateReferredBy() for each referenced page. * <p> * This method does not synchronize the database to disk. * * @param page Name of the page to update. * @param references A Collection of Strings, each one pointing to a page this page references. */ private void internalUpdateReferences(String page, Collection references) { page = getFinalPageName( page ); // // Create a new entry in m_refersTo. // Collection oldRefTo = m_refersTo.get( page ); m_refersTo.remove( page ); TreeSet<String> cleanedRefs = new TreeSet<String>(); for( Iterator i = references.iterator(); i.hasNext(); ) { String ref = (String)i.next(); ref = getFinalPageName( ref ); cleanedRefs.add( ref ); } m_refersTo.put( page, cleanedRefs ); // // We know the page exists, since it's making references somewhere. // If an entry for it didn't exist previously in m_referredBy, make // sure one is added now. // if( !m_referredBy.containsKey( page ) ) { m_referredBy.put( page, new TreeSet<String>() ); } // // Get all pages that used to be referred to by 'page' and // remove that reference. (We don't want to try to figure out // which particular references were removed...) // cleanReferredBy( page, oldRefTo, cleanedRefs ); // // Notify all referred pages of their referinesshoodicity. // Iterator<String> it = cleanedRefs.iterator(); while( it.hasNext() ) { String referredPageName = it.next(); updateReferredBy( getFinalPageName(referredPageName), page ); } } /** * Returns the refers-to list. For debugging. * * @return The refers-to list. */ protected Map getRefersTo() { return m_refersTo; } /** * Returns the referred-by list. For debugging. * * @return Referred-by lists. */ protected Map getReferredBy() { return m_referredBy; } /** * Cleans the 'referred by' list, removing references by 'referrer' to * any other page. Called after 'referrer' is removed. */ private void cleanReferredBy( String referrer, Collection oldReferred, Collection<String> newReferred ) { // Two ways to go about this. One is to look up all pages previously // referred by referrer and remove referrer from their lists, and let // the update put them back in (except possibly removed ones). // The other is to get the old referred to list, compare to the new, // and tell the ones missing in the latter to remove referrer from // their list. Hm. We'll just try the first for now. Need to come // back and optimize this a bit. if( oldReferred == null ) return; Iterator it = oldReferred.iterator(); while( it.hasNext() ) { String referredPage = (String)it.next(); Set oldRefBy = m_referredBy.get( referredPage ); if( oldRefBy != null ) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -