📄 dxdisksubtable.java
字号:
// You can redistribute this software and/or modify it under the terms of// the Ozone Library License version 1 published by ozone-db.org.//// The original code and portions created by SMB are// Copyright (C) 1997-@year@ by SMB GmbH. All rights reserved.//// $Id: DxDiskSubTable.java,v 1.14 2004/03/03 19:06:47 wieslawf Exp $package org.ozoneDB.DxLib;import org.ozoneDB.io.stream.ResolvingObjectInputStream;import java.io.*;import java.util.zip.*;/** * @author <a href="http://www.softwarebuero.de/">SMB</a> * @author <a href="http://www.medium.net/">Medium.net</a> */public final class DxDiskSubTable implements Externalizable { final static long serialVersionUID = 2; public static int timeCount = (int)System.currentTimeMillis(); private int size; private int bitSize; private int maxDepth; //protected String fileName; private File file; private int depth; private int hashMask; private int hashMaskShift; // todo: ckeck if changing to private with an accessor // provides acceptable performance protected DxDiskHashMap grandParent; private DxDiskHashNode[] table; // todo: ckeck if changing to private with an accessor // provides acceptable performance protected long accessTime = timeCount++; private boolean dirty; private int subTableCount = 0; /** * All the items (including subTables) in the local list. */ protected int itemCount = 0; public DxDiskSubTable(DxDiskHashMap grandParent) { this.grandParent = grandParent; } public DxDiskSubTable( DxDiskHashMap _grandParent, int _depth ) { grandParent = _grandParent; depth = _depth; bitSize = grandParent.levelTableBitSize( depth ); maxDepth = grandParent.maxDepth(); hashMaskShift = 0; int i = depth; while (i < maxDepth) { hashMaskShift += grandParent.levelTableBitSize( ++i ); } int bitMask = 1 << hashMaskShift; hashMask = 0; for (i = 0; i < bitSize; i++) { hashMask = hashMask | bitMask; bitMask <<= 1; } size = 1 << bitSize; table = new DxDiskHashNode[size]; file = grandParent.newSubTableFile(); } /* public String filename() { return fileName; } */ public File getFile() { return file; } public DxDiskHashNode[] table() { return table; } public DxDiskHashNode[] fetchedTable() throws Exception { fetchTable(); // touch(); return table; } public void empty() { table = null; } public int count() { return itemCount; } public int maxDepth() { return maxDepth; } public int depth() { return depth; } public int bitSize() { return bitSize; } public int hashMask() { return hashMask; } public void deleteFile() { //System.out.println ("deleteFile()..."); //File file = new File( fileName ); if (file.exists()) { file.delete(); } } public int hashKey( int key ) { return (key & hashMask) >>> hashMaskShift; } protected void fetchTable() throws Exception { grandParent.bufferAccesses++; if (table == null) { synchronized (this) { grandParent.readRequest( this ); readTable(); } } else { grandParent.bufferHits++; } } protected synchronized void touch() { accessTime = timeCount++; } public boolean isLeaf() { return subTableCount == 0; } public boolean isDirty() { return dirty; } public synchronized boolean addForKey( Object obj, Object key ) throws Exception { fetchTable(); boolean answer = true; int localKey = hashKey( key.hashCode() ); DxDiskHashNode node = table[localKey]; if (node != null) { //node ist ein blatt if (node instanceof DxDiskHashNodeLeaf) { DxDiskHashNodeLeaf oldNode = (DxDiskHashNodeLeaf)node; if (depth < maxDepth) { DxDiskHashNodeBranch newNode = grandParent.newNodeBranch(); newNode.subTable = new DxDiskSubTable( grandParent, depth + 1 ); //im alten node kann nur ein element stehen newNode.subTable.addForKey( oldNode.element.data, oldNode.element.key ); answer = newNode.subTable.addForKey( obj, key ); if (answer) { grandParent.readRequest( newNode.subTable ); // HACK: the readRequest() call did in one rare case set // the table member variable to null. Maybe there is a // cleaner solution than simply re-reading the table, // like for instance preventing the table becoming // null in the first place fetchTable(); table[localKey] = newNode; subTableCount++; } else { table[localKey] = oldNode; } } else { //maximale tiefe ist erreicht answer = oldNode.addForKey( obj, key ); } } else { //node ist ein branch ((DxDiskHashNodeBranch)node).subTable.addForKey( obj, key ); } } else { //node existiert noch nicht DxDiskHashNodeLeaf newNode = grandParent.newNodeLeaf(); newNode.addForKey( obj, key ); table[localKey] = newNode; itemCount++; } //muss ganz unten stehen, damit abraeumen korrekt ist touch(); dirty = true; return answer; } public final Object elementForKey( Object key, int hashCode ) throws Exception { fetchTable(); int localKey = hashKey( hashCode ); Object answer = null; DxDiskHashNode node = table[localKey]; if (node != null) { if (node instanceof DxDiskHashNodeLeaf) { answer = ((DxDiskHashNodeLeaf)node).elementForKey( key, hashCode ); } else { answer = ((DxDiskHashNodeBranch)node).subTable.elementForKey( key, hashCode ); } } //muss ganz unten stehen, damit anraeumen korrekt ist touch(); return answer; } protected synchronized void elementDone( DxDiskHashCompatible obj ) { } public synchronized Object removeForKey( Object key ) throws Exception { fetchTable(); Object answer = null; int localKey = hashKey( key.hashCode() ); DxDiskHashNode node = table[localKey]; if (node != null) { if (node instanceof DxDiskHashNodeLeaf) { answer = ((DxDiskHashNodeLeaf)node).removeForKey( key ); if (((DxDiskHashNodeLeaf)node).element == null) { table[localKey] = null; itemCount--; } } else { DxDiskHashNodeBranch oldNode = (DxDiskHashNodeBranch)node; answer = oldNode.subTable.removeForKey( key ); if (oldNode.subTable.itemCount == 0) { grandParent.deleteRequest( oldNode.subTable ); oldNode.subTable.deleteFile(); table[localKey] = null; itemCount--; subTableCount--; } } } // System.out.println ("remove: key: " + key + " - depth: " + depth + " - count: " + itemCount); //muss ganz unten stehen, damit anraeumen korrekt ist touch(); dirty = true; return answer; } /** * Schreibt nur die representation in einem HashNode aber * nicht die tabelle selber. */ public void writeExternal( ObjectOutput out ) throws IOException { out.writeUTF( file.getName() ); out.writeInt( bitSize ); out.writeInt( size ); out.writeInt( maxDepth ); out.writeInt( depth ); // the tables are written in old file format // it handles all old table configurations // if we throw it away the tables will be converted automatically int shift = grandParent.oldTablesHashMaskShift(); if (shift > 0 && depth != maxDepth) { out.writeInt( hashMask << shift ); } else { out.writeInt( hashMask ); } out.writeInt( subTableCount ); out.writeInt( itemCount ); } public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { String fileName = in.readUTF(); bitSize = in.readInt(); size = in.readInt(); maxDepth = in.readInt(); depth = in.readInt(); hashMask = in.readInt(); subTableCount = in.readInt(); itemCount = in.readInt(); // we have to modify the incorrect hashMask // to backward compatibility with old ozone idtable files // it handles all old table configurations int shift = grandParent.oldTablesHashMaskShift(); if (shift > 0 && depth != maxDepth) { hashMask >>>= shift; } file = grandParent.getFileForFilename(fileName); // force tabel to be read on next access if (itemCount > 0) { table = null; } // for speed up and simplify a local hashKey() function hashMaskShift = 0; if (depth != maxDepth) { int mask = hashMask; while ((mask & 1) == 0) { mask >>>= 1; hashMaskShift++; } } accessTime = 0; dirty = false; } /** * Schreibt den inhalt der ganzen tabelle aber nicht die * sub-tabellen. Der name wird aus dem baseFileName und */ public void writeTable() throws IOException { // System.out.println ("schreiben: " + fileName); // ObjectOutputStream out = new ObjectOutputStream (new BufferedOutputStream (new FileOutputStream (fileName), 4*1024)); OutputStream out = new FileOutputStream( file ); out = new GZIPOutputStream( out ); out = new BufferedOutputStream( out, 4096 ); ObjectOutputStream oout = new ObjectOutputStream( out ); try { synchronized (table) { oout.writeInt( itemCount ); for (int i = 0; i < size; i++) { if (table[i] != null) { oout.writeShort( i ); oout.writeByte( table[i] instanceof DxDiskHashNodeLeaf ? 1 : 2 ); table[i].writeExternal( oout ); } } } dirty = false; } finally { oout.close(); } } public synchronized void readTable() throws IOException, ClassNotFoundException { // System.out.println ("lesen: " + fileName); table = new DxDiskHashNode[size]; // ObjectInputStream in = new ObjectInputStream (new BufferedInputStream (new FileInputStream (fileName), 4*1024)); InputStream in = new FileInputStream( file ); in = new GZIPInputStream( in ); in = new BufferedInputStream( in, 4096 ); ObjectInputStream oin = new ResolvingObjectInputStream( in ); try { int count = oin.readInt(); for (int i = 0; i < count; i++) { int index = oin.readShort(); if (index < 0) { // correct for 2s complement negative index += (int) Short.MAX_VALUE - (int) Short.MIN_VALUE + 1; } byte nodeType = oin.readByte(); if (nodeType == 1) { table[index] = grandParent.newNodeLeaf(); } else { table[index] = grandParent.newNodeBranch(); } table[index].readExternal( oin ); if (nodeType == 2) { ((DxDiskHashNodeBranch)table[index]).subTable.grandParent = grandParent; } } touch(); dirty = false; } finally { oin.close(); } } }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -