replaced double check locking (http://www.javaworld.com/javaworld/jw-02-2001/jw-0209-double.html) with the actual logic

- prepare the cache prior to use if you want to have the hash cache.
also fix the ejection policy to not clear the cache, but merely to remove sufficient values.
though maybe clearing the cache is the right thing to do so as to avoid ejection churn... hmm.
both of these fixes brought to you by the keen eyes of the one called mihi
This commit is contained in:
jrandom
2004-06-20 04:27:58 +00:00
committed by zzz
parent 232f6f158d
commit 5166eab5ee
2 changed files with 37 additions and 16 deletions

View File

@ -58,22 +58,30 @@ public class Hash extends DataStructureImpl {
}
/**
* Calculate the xor with the current object and the specified hash,
* caching values where possible. Currently this keeps up to MAX_CACHED_XOR
* (1024) entries, and uses an essentially random ejection policy. Later
* perhaps go for an LRU or FIFO?
* Prepare this hash's cache for xor values - very few hashes will need it,
* so we don't want to waste the memory, and lazy initialization would incur
* online overhead to verify the initialization.
*
*/
public byte[] cachedXor(Hash key) {
if (_xorCache == null) {
// we dont want to create two of these
public void prepareCache() {
synchronized (this) {
if (_xorCache == null)
_xorCache = new HashMap(MAX_CACHED_XOR);
}
}
// i think we can get away with this being outside the synchronized block
/**
* Calculate the xor with the current object and the specified hash,
* caching values where possible. Currently this keeps up to MAX_CACHED_XOR
* (1024) entries, and uses an essentially random ejection policy. Later
* perhaps go for an LRU or FIFO?
*
* @throws IllegalStateException if you try to use the cache without first
* preparing this object's cache via .prepareCache()
*/
public byte[] cachedXor(Hash key) throws IllegalStateException {
if (_xorCache == null)
throw new IllegalStateException("To use the cache, you must first prepare it");
byte[] distance = (byte[])_xorCache.get(key);
if (distance == null) {
@ -85,7 +93,8 @@ public class Hash extends DataStructureImpl {
Set keys = new HashSet(toRemove);
// this removes essentially random keys - we dont maintain any sort
// of LRU or age. perhaps we should?
for (Iterator iter = _xorCache.keySet().iterator(); iter.hasNext(); )
int removed = 0;
for (Iterator iter = _xorCache.keySet().iterator(); iter.hasNext() && removed < toRemove; removed++)
keys.add(iter.next());
for (Iterator iter = keys.iterator(); iter.hasNext(); )
_xorCache.remove(iter.next());
@ -94,7 +103,7 @@ public class Hash extends DataStructureImpl {
_xorCache.put(key, (Object)distance);
cached = _xorCache.size();
}
if (false && (_log.shouldLog(Log.DEBUG))) {
if (_log.shouldLog(Log.DEBUG)) {
// explicit buffer, since the compiler can't guess how long it'll be
StringBuffer buf = new StringBuffer(128);
buf.append("miss [").append(cached).append("] from ");
@ -103,7 +112,7 @@ public class Hash extends DataStructureImpl {
_log.debug(buf.toString(), new Exception());
}
} else {
if (false && (_log.shouldLog(Log.DEBUG))) {
if (_log.shouldLog(Log.DEBUG)) {
// explicit buffer, since the compiler can't guess how long it'll be
StringBuffer buf = new StringBuffer(128);
buf.append("hit from ");
@ -172,6 +181,7 @@ public class Hash extends DataStructureImpl {
private static void testFill() {
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
local.prepareCache();
for (int i = 0; i < MAX_CACHED_XOR; i++) {
byte t[] = new byte[HASH_LENGTH];
for (int j = 0; j < HASH_LENGTH; j++)
@ -184,9 +194,11 @@ public class Hash extends DataStructureImpl {
return;
}
}
_log.debug("Fill test passed");
}
private static void testOverflow() {
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
local.prepareCache();
for (int i = 0; i < MAX_CACHED_XOR*2; i++) {
byte t[] = new byte[HASH_LENGTH];
for (int j = 0; j < HASH_LENGTH; j++)
@ -207,10 +219,12 @@ public class Hash extends DataStructureImpl {
}
}
}
_log.debug("overflow test passed");
}
private static void testFillCheck() {
Set hashes = new HashSet();
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
local.prepareCache();
// fill 'er up
for (int i = 0; i < MAX_CACHED_XOR; i++) {
byte t[] = new byte[HASH_LENGTH];
@ -248,5 +262,6 @@ public class Hash extends DataStructureImpl {
return;
}
}
_log.debug("Fill check test passed");
}
}

View File

@ -55,6 +55,8 @@ class KBucketImpl implements KBucket {
public Hash getLocal() { return _local; }
private void setLocal(Hash local) {
_local = local;
// we want to make sure we've got the cache in place before calling cachedXor
_local.prepareCache();
if (_log.shouldLog(Log.DEBUG))
_log.debug("Local hash reset to " + (local == null ? "null" : DataHelper.toHexString(local.getData())));
}
@ -343,7 +345,9 @@ class KBucketImpl implements KBucket {
int low = 1;
int high = 3;
Log log = I2PAppContext.getGlobalContext().logManager().getLog(KBucketImpl.class);
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), Hash.FAKE_HASH);
Hash local = Hash.FAKE_HASH;
local.prepareCache();
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), local);
bucket.setRange(low, high);
Hash lowerBoundKey = bucket.getRangeBeginKey();
Hash upperBoundKey = bucket.getRangeEndKey();
@ -378,7 +382,9 @@ class KBucketImpl implements KBucket {
int high = 200;
byte hash[] = new byte[Hash.HASH_LENGTH];
RandomSource.getInstance().nextBytes(hash);
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), new Hash(hash));
Hash local = new Hash(hash);
local.prepareCache();
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), local);
bucket.setRange(low, high);
Hash lowerBoundKey = bucket.getRangeBeginKey();
Hash upperBoundKey = bucket.getRangeEndKey();