replaced double check locking (http://www.javaworld.com/javaworld/jw-02-2001/jw-0209-double.html) with the actual logic
- prepare the cache prior to use if you want to have the hash cache. also fix the ejection policy to not clear the cache, but merely to remove sufficient values. though maybe clearing the cache is the right thing to do so as to avoid ejection churn... hmm. both of these fixes brought to you by the keen eyes of the one called mihi
This commit is contained in:
@ -57,23 +57,31 @@ public class Hash extends DataStructureImpl {
|
|||||||
_base64ed = null;
|
_base64ed = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare this hash's cache for xor values - very few hashes will need it,
|
||||||
|
* so we don't want to waste the memory, and lazy initialization would incur
|
||||||
|
* online overhead to verify the initialization.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void prepareCache() {
|
||||||
|
synchronized (this) {
|
||||||
|
if (_xorCache == null)
|
||||||
|
_xorCache = new HashMap(MAX_CACHED_XOR);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculate the xor with the current object and the specified hash,
|
* Calculate the xor with the current object and the specified hash,
|
||||||
* caching values where possible. Currently this keeps up to MAX_CACHED_XOR
|
* caching values where possible. Currently this keeps up to MAX_CACHED_XOR
|
||||||
* (1024) entries, and uses an essentially random ejection policy. Later
|
* (1024) entries, and uses an essentially random ejection policy. Later
|
||||||
* perhaps go for an LRU or FIFO?
|
* perhaps go for an LRU or FIFO?
|
||||||
*
|
*
|
||||||
|
* @throws IllegalStateException if you try to use the cache without first
|
||||||
|
* preparing this object's cache via .prepareCache()
|
||||||
*/
|
*/
|
||||||
public byte[] cachedXor(Hash key) {
|
public byte[] cachedXor(Hash key) throws IllegalStateException {
|
||||||
if (_xorCache == null) {
|
if (_xorCache == null)
|
||||||
// we dont want to create two of these
|
throw new IllegalStateException("To use the cache, you must first prepare it");
|
||||||
synchronized (this) {
|
|
||||||
if (_xorCache == null)
|
|
||||||
_xorCache = new HashMap(MAX_CACHED_XOR);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// i think we can get away with this being outside the synchronized block
|
|
||||||
byte[] distance = (byte[])_xorCache.get(key);
|
byte[] distance = (byte[])_xorCache.get(key);
|
||||||
|
|
||||||
if (distance == null) {
|
if (distance == null) {
|
||||||
@ -85,7 +93,8 @@ public class Hash extends DataStructureImpl {
|
|||||||
Set keys = new HashSet(toRemove);
|
Set keys = new HashSet(toRemove);
|
||||||
// this removes essentially random keys - we dont maintain any sort
|
// this removes essentially random keys - we dont maintain any sort
|
||||||
// of LRU or age. perhaps we should?
|
// of LRU or age. perhaps we should?
|
||||||
for (Iterator iter = _xorCache.keySet().iterator(); iter.hasNext(); )
|
int removed = 0;
|
||||||
|
for (Iterator iter = _xorCache.keySet().iterator(); iter.hasNext() && removed < toRemove; removed++)
|
||||||
keys.add(iter.next());
|
keys.add(iter.next());
|
||||||
for (Iterator iter = keys.iterator(); iter.hasNext(); )
|
for (Iterator iter = keys.iterator(); iter.hasNext(); )
|
||||||
_xorCache.remove(iter.next());
|
_xorCache.remove(iter.next());
|
||||||
@ -94,7 +103,7 @@ public class Hash extends DataStructureImpl {
|
|||||||
_xorCache.put(key, (Object)distance);
|
_xorCache.put(key, (Object)distance);
|
||||||
cached = _xorCache.size();
|
cached = _xorCache.size();
|
||||||
}
|
}
|
||||||
if (false && (_log.shouldLog(Log.DEBUG))) {
|
if (_log.shouldLog(Log.DEBUG)) {
|
||||||
// explicit buffer, since the compiler can't guess how long it'll be
|
// explicit buffer, since the compiler can't guess how long it'll be
|
||||||
StringBuffer buf = new StringBuffer(128);
|
StringBuffer buf = new StringBuffer(128);
|
||||||
buf.append("miss [").append(cached).append("] from ");
|
buf.append("miss [").append(cached).append("] from ");
|
||||||
@ -103,7 +112,7 @@ public class Hash extends DataStructureImpl {
|
|||||||
_log.debug(buf.toString(), new Exception());
|
_log.debug(buf.toString(), new Exception());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (false && (_log.shouldLog(Log.DEBUG))) {
|
if (_log.shouldLog(Log.DEBUG)) {
|
||||||
// explicit buffer, since the compiler can't guess how long it'll be
|
// explicit buffer, since the compiler can't guess how long it'll be
|
||||||
StringBuffer buf = new StringBuffer(128);
|
StringBuffer buf = new StringBuffer(128);
|
||||||
buf.append("hit from ");
|
buf.append("hit from ");
|
||||||
@ -172,6 +181,7 @@ public class Hash extends DataStructureImpl {
|
|||||||
|
|
||||||
private static void testFill() {
|
private static void testFill() {
|
||||||
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
|
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
|
||||||
|
local.prepareCache();
|
||||||
for (int i = 0; i < MAX_CACHED_XOR; i++) {
|
for (int i = 0; i < MAX_CACHED_XOR; i++) {
|
||||||
byte t[] = new byte[HASH_LENGTH];
|
byte t[] = new byte[HASH_LENGTH];
|
||||||
for (int j = 0; j < HASH_LENGTH; j++)
|
for (int j = 0; j < HASH_LENGTH; j++)
|
||||||
@ -184,9 +194,11 @@ public class Hash extends DataStructureImpl {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_log.debug("Fill test passed");
|
||||||
}
|
}
|
||||||
private static void testOverflow() {
|
private static void testOverflow() {
|
||||||
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
|
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
|
||||||
|
local.prepareCache();
|
||||||
for (int i = 0; i < MAX_CACHED_XOR*2; i++) {
|
for (int i = 0; i < MAX_CACHED_XOR*2; i++) {
|
||||||
byte t[] = new byte[HASH_LENGTH];
|
byte t[] = new byte[HASH_LENGTH];
|
||||||
for (int j = 0; j < HASH_LENGTH; j++)
|
for (int j = 0; j < HASH_LENGTH; j++)
|
||||||
@ -207,10 +219,12 @@ public class Hash extends DataStructureImpl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_log.debug("overflow test passed");
|
||||||
}
|
}
|
||||||
private static void testFillCheck() {
|
private static void testFillCheck() {
|
||||||
Set hashes = new HashSet();
|
Set hashes = new HashSet();
|
||||||
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
|
Hash local = new Hash(new byte[HASH_LENGTH]); // all zeroes
|
||||||
|
local.prepareCache();
|
||||||
// fill 'er up
|
// fill 'er up
|
||||||
for (int i = 0; i < MAX_CACHED_XOR; i++) {
|
for (int i = 0; i < MAX_CACHED_XOR; i++) {
|
||||||
byte t[] = new byte[HASH_LENGTH];
|
byte t[] = new byte[HASH_LENGTH];
|
||||||
@ -248,5 +262,6 @@ public class Hash extends DataStructureImpl {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_log.debug("Fill check test passed");
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -55,6 +55,8 @@ class KBucketImpl implements KBucket {
|
|||||||
public Hash getLocal() { return _local; }
|
public Hash getLocal() { return _local; }
|
||||||
private void setLocal(Hash local) {
|
private void setLocal(Hash local) {
|
||||||
_local = local;
|
_local = local;
|
||||||
|
// we want to make sure we've got the cache in place before calling cachedXor
|
||||||
|
_local.prepareCache();
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Local hash reset to " + (local == null ? "null" : DataHelper.toHexString(local.getData())));
|
_log.debug("Local hash reset to " + (local == null ? "null" : DataHelper.toHexString(local.getData())));
|
||||||
}
|
}
|
||||||
@ -343,7 +345,9 @@ class KBucketImpl implements KBucket {
|
|||||||
int low = 1;
|
int low = 1;
|
||||||
int high = 3;
|
int high = 3;
|
||||||
Log log = I2PAppContext.getGlobalContext().logManager().getLog(KBucketImpl.class);
|
Log log = I2PAppContext.getGlobalContext().logManager().getLog(KBucketImpl.class);
|
||||||
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), Hash.FAKE_HASH);
|
Hash local = Hash.FAKE_HASH;
|
||||||
|
local.prepareCache();
|
||||||
|
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), local);
|
||||||
bucket.setRange(low, high);
|
bucket.setRange(low, high);
|
||||||
Hash lowerBoundKey = bucket.getRangeBeginKey();
|
Hash lowerBoundKey = bucket.getRangeBeginKey();
|
||||||
Hash upperBoundKey = bucket.getRangeEndKey();
|
Hash upperBoundKey = bucket.getRangeEndKey();
|
||||||
@ -378,7 +382,9 @@ class KBucketImpl implements KBucket {
|
|||||||
int high = 200;
|
int high = 200;
|
||||||
byte hash[] = new byte[Hash.HASH_LENGTH];
|
byte hash[] = new byte[Hash.HASH_LENGTH];
|
||||||
RandomSource.getInstance().nextBytes(hash);
|
RandomSource.getInstance().nextBytes(hash);
|
||||||
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), new Hash(hash));
|
Hash local = new Hash(hash);
|
||||||
|
local.prepareCache();
|
||||||
|
KBucketImpl bucket = new KBucketImpl(I2PAppContext.getGlobalContext(), local);
|
||||||
bucket.setRange(low, high);
|
bucket.setRange(low, high);
|
||||||
Hash lowerBoundKey = bucket.getRangeBeginKey();
|
Hash lowerBoundKey = bucket.getRangeBeginKey();
|
||||||
Hash upperBoundKey = bucket.getRangeEndKey();
|
Hash upperBoundKey = bucket.getRangeEndKey();
|
||||||
|
Reference in New Issue
Block a user