2005-02-24 jrandom

* Cache temporary memory allocation in the DSA's SHA1 impl, and the packet
      data in the streaming lib.
    * Fixed a streaming lib bug where the connection initiator would fail the
      stream if the ACK to their SYN was lost.
This commit is contained in:
jrandom
2005-02-24 18:05:25 +00:00
committed by zzz
parent f61618e4a4
commit 00f27d4400
18 changed files with 428 additions and 237 deletions

View File

@ -33,20 +33,28 @@ import java.math.BigInteger;
import java.util.Arrays;
import net.i2p.I2PAppContext;
import net.i2p.data.ByteArray;
import net.i2p.data.Hash;
import net.i2p.data.Signature;
import net.i2p.data.SigningPrivateKey;
import net.i2p.data.SigningPublicKey;
import net.i2p.util.ByteCache;
import net.i2p.util.Log;
import net.i2p.util.NativeBigInteger;
public class DSAEngine {
private Log _log;
private I2PAppContext _context;
private SHA1EntryCache _cache;
private ByteCache _rbyteCache;
private ByteCache _sbyteCache;
public DSAEngine(I2PAppContext context) {
_log = context.logManager().getLog(DSAEngine.class);
_context = context;
_cache = new SHA1EntryCache();
_rbyteCache = ByteCache.getInstance(16, 20);
_sbyteCache = ByteCache.getInstance(16, 20);
}
public static DSAEngine getInstance() {
return I2PAppContext.getGlobalContext().dsa();
@ -59,8 +67,10 @@ public class DSAEngine {
try {
byte[] sigbytes = signature.getData();
byte rbytes[] = new byte[20];
byte sbytes[] = new byte[20];
ByteArray rbyteBA = _rbyteCache.acquire();
ByteArray sbyteBA = _sbyteCache.acquire();
byte rbytes[] = rbyteBA.getData(); //new byte[20];
byte sbytes[] = sbyteBA.getData(); //new byte[20];
for (int x = 0; x < 40; x++) {
if (x < 20) {
rbytes[x] = sigbytes[x];
@ -70,10 +80,18 @@ public class DSAEngine {
}
BigInteger s = new NativeBigInteger(1, sbytes);
BigInteger r = new NativeBigInteger(1, rbytes);
_rbyteCache.release(rbyteBA);
_sbyteCache.release(sbyteBA);
BigInteger y = new NativeBigInteger(1, verifyingKey.getData());
BigInteger w = s.modInverse(CryptoConstants.dsaq);
byte data[] = calculateHash(signedData, offset, size).getData();
SHAEntryCache.CacheEntry entry = _cache.acquire(size);
byte data[] = calculateHash(signedData, offset, size, entry).getData();
NativeBigInteger bi = new NativeBigInteger(1, data);
_cache.release(entry);
BigInteger u1 = bi.multiply(w).mod(CryptoConstants.dsaq);
BigInteger u2 = r.multiply(w).mod(CryptoConstants.dsaq);
BigInteger modval = CryptoConstants.dsag.modPow(u1, CryptoConstants.dsap);
@ -110,11 +128,18 @@ public class DSAEngine {
BigInteger r = CryptoConstants.dsag.modPow(k, CryptoConstants.dsap).mod(CryptoConstants.dsaq);
BigInteger kinv = k.modInverse(CryptoConstants.dsaq);
Hash h = calculateHash(data, offset, length);
SHAEntryCache.CacheEntry entry = _cache.acquire(length);
Hash h = calculateHash(data, offset, length, entry);
if (h == null) return null;
if (h == null) {
_cache.release(entry);
return null;
}
BigInteger M = new NativeBigInteger(1, h.getData());
_cache.release(entry);
BigInteger x = new NativeBigInteger(1, signingKey.getData());
BigInteger s = (kinv.multiply(M.add(x.multiply(r)))).mod(CryptoConstants.dsaq);
@ -160,7 +185,17 @@ public class DSAEngine {
private int[] H0 = { 0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0};
private Hash calculateHash(byte[] source, int offset, int len) {
static final int getWordlength(int sourceLength) {
long length = sourceLength * 8;
int k = 448 - (int) ((length + 1) % 512);
if (k < 0) {
k += 512;
}
int padbytes = k / 8;
return sourceLength / 4 + padbytes / 4 + 3;
}
private Hash calculateHash(byte[] source, int offset, int len, SHA256EntryCache.CacheEntry entry) {
long length = len * 8;
int k = 448 - (int) ((length + 1) % 512);
if (k < 0) {
@ -168,7 +203,7 @@ public class DSAEngine {
}
int padbytes = k / 8;
int wordlength = len / 4 + padbytes / 4 + 3;
int[] M0 = new int[wordlength];
int[] M0 = (entry != null ? entry.M0 : new int[wordlength]);
int wordcount = 0;
int x = 0;
for (x = 0; x < (len / 4) * 4; x += 4) {
@ -201,13 +236,13 @@ public class DSAEngine {
}
M0[wordlength - 2] = (int) (length >>> 32);
M0[wordlength - 1] = (int) (length);
int[] H = new int[5];
int[] H = (entry != null ? entry.H : new int[5]);
for (x = 0; x < 5; x++) {
H[x] = H0[x];
}
int blocks = M0.length / 16;
int[] W = new int[80];
int[] W = (entry != null ? entry.W : new int[80]);
for (int bl = 0; bl < blocks; bl++) {
int a = H[0];
int b = H[1];
@ -241,13 +276,15 @@ public class DSAEngine {
H[4] = add(e, H[4]);
}
byte[] hashbytes = new byte[20];
byte[] hashbytes = (entry != null ? entry.hashbytes : new byte[20]);
for (x = 0; x < 5; x++) {
hashbytes[x * 4] = (byte) (H[x] << 0 >>> 24);
hashbytes[x * 4 + 1] = (byte) (H[x] << 8 >>> 24);
hashbytes[x * 4 + 2] = (byte) (H[x] << 16 >>> 24);
hashbytes[x * 4 + 3] = (byte) (H[x] << 24 >>> 24);
}
if (entry != null)
return entry.hash;
Hash hash = new Hash();
hash.setData(hashbytes);
return hash;

View File

@ -0,0 +1,37 @@
package net.i2p.crypto;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import net.i2p.I2PAppContext;
import net.i2p.data.Hash;
/**
* Cache the objects used in DSA's SHA1 calculateHash method to reduce
* memory churn. The CacheEntry should be held onto as long as the
* data referenced in it is needed (which often is only one or two lines
* of code)
*
*/
public class SHA1EntryCache extends SHA256EntryCache {
protected CacheEntry createNew(int payload) {
return new SHA1CacheEntry(payload);
}
/**
* all the data alloc'ed in a calculateHash call
*/
public static class SHA1CacheEntry extends SHAEntryCache.CacheEntry {
public SHA1CacheEntry(int payload) {
wordlength = DSAEngine.getWordlength(payload);
bucket = payload;
hashbytes = new byte[20];
M0 = new int[wordlength];
W = new int[80];
H = new int[5];
hash = new Hash();
hash.setData(hashbytes);
}
}
}

View File

@ -14,163 +14,20 @@ import net.i2p.data.Hash;
* of code)
*
*/
public final class SHA256EntryCache {
private static final int ONE_KB = 0;
private static final int FOUR_KB = 1;
private static final int EIGHT_KB = 2;
private static final int SIXTEEN_KB = 3;
private static final int THIRTYTWO_KB = 4;
private static final int FOURTYEIGHT_KB = 5;
private static final int LARGER = 6;
/**
* Array of Lists of free CacheEntry objects, indexed
* by the payload size they are capable of handling
*/
private List _available[] = new List[6];
/** count up how often we use the cache for each size */
private long _used[] = new long[7];
private int _sizes[] = new int[] { 1024,4*1024,8*1024,16*1024,32*1024,48*1024 };
/** no more than 32 at each size level */
private static final int MAX_CACHED = 64;
public class SHA256EntryCache extends SHAEntryCache {
public SHA256EntryCache() {
for (int i = 0; i < _available.length; i++) {
_available[i] = new ArrayList(MAX_CACHED);
//for (int j = 0; j < MAX_CACHED; j++)
// _available[i].add(new CacheEntry(_sizes[i]));
}
super();
}
/**
* Get the next available structure, either from the cache or a brand new one
*
*/
public final CacheEntry acquire(int payload) {
int entrySize = getBucket(payload);
switch (entrySize) {
case 1024:
_used[ONE_KB]++;
synchronized (_available[ONE_KB]) {
if (_available[ONE_KB].size() > 0) {
return (CacheEntry)_available[ONE_KB].remove(0);
}
}
break;
case 4*1024:
_used[FOUR_KB]++;
synchronized (_available[FOUR_KB]) {
if (_available[FOUR_KB].size() > 0) {
return (CacheEntry)_available[FOUR_KB].remove(0);
}
}
break;
case 8*1024:
_used[EIGHT_KB]++;
synchronized (_available[EIGHT_KB]) {
if (_available[EIGHT_KB].size() > 0) {
return (CacheEntry)_available[EIGHT_KB].remove(0);
}
}
break;
case 16*1024:
_used[SIXTEEN_KB]++;
synchronized (_available[SIXTEEN_KB]) {
if (_available[SIXTEEN_KB].size() > 0) {
return (CacheEntry)_available[SIXTEEN_KB].remove(0);
}
}
break;
case 32*1024:
_used[THIRTYTWO_KB]++;
synchronized (_available[THIRTYTWO_KB]) {
if (_available[THIRTYTWO_KB].size() > 0) {
return (CacheEntry)_available[THIRTYTWO_KB].remove(0);
}
}
break;
case 48*1024:
_used[FOURTYEIGHT_KB]++;
synchronized (_available[FOURTYEIGHT_KB]) {
if (_available[FOURTYEIGHT_KB].size() > 0) {
return (CacheEntry)_available[FOURTYEIGHT_KB].remove(0);
}
}
break;
default:
_used[LARGER]++;
// not for the bucket, so make it exact
return new CacheEntry(payload);
}
return new CacheEntry(entrySize);
}
/**
* Put this structure back onto the available cache for reuse
*
*/
public final void release(CacheEntry entry) {
entry.reset();
if (false) return;
switch (entry.bucket) {
case 1024:
synchronized (_available[ONE_KB]) {
if (_available[ONE_KB].size() < MAX_CACHED) {
_available[ONE_KB].add(entry);
}
}
return;
case 4*1024:
synchronized (_available[FOUR_KB]) {
if (_available[FOUR_KB].size() < MAX_CACHED) {
_available[FOUR_KB].add(entry);
}
}
return;
case 8*1024:
synchronized (_available[EIGHT_KB]) {
if (_available[EIGHT_KB].size() < MAX_CACHED) {
_available[EIGHT_KB].add(entry);
}
}
return;
case 16*1024:
synchronized (_available[SIXTEEN_KB]) {
if (_available[SIXTEEN_KB].size() < MAX_CACHED) {
_available[SIXTEEN_KB].add(entry);
}
}
return;
case 32*1024:
synchronized (_available[THIRTYTWO_KB]) {
if (_available[THIRTYTWO_KB].size() < MAX_CACHED) {
_available[THIRTYTWO_KB].add(entry);
}
}
return;
case 48*1024:
synchronized (_available[FOURTYEIGHT_KB]) {
if (_available[FOURTYEIGHT_KB].size() < MAX_CACHED) {
_available[FOURTYEIGHT_KB].add(entry);
}
}
return;
}
protected CacheEntry createNew(int payload) {
return new SHA256CacheEntry(payload);
}
/**
* all the data alloc'ed in a calculateHash call
*/
public static final class CacheEntry {
byte hashbytes[];
int W[];
int M0[];
int H[];
Hash hash;
int wordlength;
int bucket;
public CacheEntry(int payload) {
public static class SHA256CacheEntry extends SHAEntryCache.CacheEntry {
public SHA256CacheEntry(int payload) {
wordlength = SHA256Generator.getWordlength(payload);
bucket = payload;
hashbytes = new byte[32];
@ -180,30 +37,6 @@ public final class SHA256EntryCache {
hash = new Hash();
hash.setData(hashbytes);
}
public final void reset() {
Arrays.fill(hashbytes, (byte)0x0);
Arrays.fill(M0, (byte)0x0);
Arrays.fill(W, (byte)0x0);
Arrays.fill(H, (byte)0x0);
}
}
private static final int getBucket(int payload) {
if (payload <= 1024)
return 1024;
else if (payload <= 4*1024)
return 4*1024;
else if (payload <= 8*1024)
return 8*1024;
else if (payload <= 16*1024)
return 16*1024;
else if (payload <= 32*1024)
return 32*1024;
else if (payload <= 48*1024)
return 48*1024;
else
return payload;
}
public static void main(String args[]) {

View File

@ -72,8 +72,8 @@ public final class SHA256Generator {
return rv;
}
private final SHA256EntryCache.CacheEntry getNewEntry(int payloadSize) {
return new SHA256EntryCache.CacheEntry(payloadSize);
private final SHA256EntryCache.SHA256CacheEntry getNewEntry(int payloadSize) {
return new SHA256EntryCache.SHA256CacheEntry(payloadSize);
}
/** Calculate the SHA-256 has of the source

View File

@ -0,0 +1,206 @@
package net.i2p.crypto;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import net.i2p.I2PAppContext;
import net.i2p.data.Hash;
/**
* Cache the objects used in SHA256Generator's calculate method to reduce
* memory churn. The CacheEntry should be held onto as long as the
* data referenced in it is needed (which often is only one or two lines
* of code)
*
*/
public abstract class SHAEntryCache {
private static final int ONE_KB = 0;
private static final int FOUR_KB = 1;
private static final int EIGHT_KB = 2;
private static final int SIXTEEN_KB = 3;
private static final int THIRTYTWO_KB = 4;
private static final int FOURTYEIGHT_KB = 5;
private static final int LARGER = 6;
/**
* Array of Lists of free CacheEntry objects, indexed
* by the payload size they are capable of handling
*/
private List _available[] = new List[6];
/** count up how often we use the cache for each size */
private long _used[] = new long[7];
private int _sizes[] = new int[] { 1024,4*1024,8*1024,16*1024,32*1024,48*1024 };
/** no more than 32 at each size level */
private static final int MAX_CACHED = 64;
public SHAEntryCache() {
for (int i = 0; i < _available.length; i++) {
_available[i] = new ArrayList(MAX_CACHED);
//for (int j = 0; j < MAX_CACHED; j++)
// _available[i].add(new CacheEntry(_sizes[i]));
}
}
/**
* Overridden by the impl to provide a brand new cache entry, capable
* of sustaining the data necessary to digest the specified payload
*
*/
protected abstract CacheEntry createNew(int payload);
/**
* Get the next available structure, either from the cache or a brand new one
*
*/
public final CacheEntry acquire(int payload) {
int entrySize = getBucket(payload);
switch (entrySize) {
case 1024:
_used[ONE_KB]++;
synchronized (_available[ONE_KB]) {
if (_available[ONE_KB].size() > 0) {
return (CacheEntry)_available[ONE_KB].remove(0);
}
}
break;
case 4*1024:
_used[FOUR_KB]++;
synchronized (_available[FOUR_KB]) {
if (_available[FOUR_KB].size() > 0) {
return (CacheEntry)_available[FOUR_KB].remove(0);
}
}
break;
case 8*1024:
_used[EIGHT_KB]++;
synchronized (_available[EIGHT_KB]) {
if (_available[EIGHT_KB].size() > 0) {
return (CacheEntry)_available[EIGHT_KB].remove(0);
}
}
break;
case 16*1024:
_used[SIXTEEN_KB]++;
synchronized (_available[SIXTEEN_KB]) {
if (_available[SIXTEEN_KB].size() > 0) {
return (CacheEntry)_available[SIXTEEN_KB].remove(0);
}
}
break;
case 32*1024:
_used[THIRTYTWO_KB]++;
synchronized (_available[THIRTYTWO_KB]) {
if (_available[THIRTYTWO_KB].size() > 0) {
return (CacheEntry)_available[THIRTYTWO_KB].remove(0);
}
}
break;
case 48*1024:
_used[FOURTYEIGHT_KB]++;
synchronized (_available[FOURTYEIGHT_KB]) {
if (_available[FOURTYEIGHT_KB].size() > 0) {
return (CacheEntry)_available[FOURTYEIGHT_KB].remove(0);
}
}
break;
default:
_used[LARGER]++;
// not for the bucket, so make it exact
return createNew(payload);
}
return createNew(payload);
}
/**
* Put this structure back onto the available cache for reuse
*
*/
public final void release(CacheEntry entry) {
entry.reset();
if (false) return;
switch (entry.bucket) {
case 1024:
synchronized (_available[ONE_KB]) {
if (_available[ONE_KB].size() < MAX_CACHED) {
_available[ONE_KB].add(entry);
}
}
return;
case 4*1024:
synchronized (_available[FOUR_KB]) {
if (_available[FOUR_KB].size() < MAX_CACHED) {
_available[FOUR_KB].add(entry);
}
}
return;
case 8*1024:
synchronized (_available[EIGHT_KB]) {
if (_available[EIGHT_KB].size() < MAX_CACHED) {
_available[EIGHT_KB].add(entry);
}
}
return;
case 16*1024:
synchronized (_available[SIXTEEN_KB]) {
if (_available[SIXTEEN_KB].size() < MAX_CACHED) {
_available[SIXTEEN_KB].add(entry);
}
}
return;
case 32*1024:
synchronized (_available[THIRTYTWO_KB]) {
if (_available[THIRTYTWO_KB].size() < MAX_CACHED) {
_available[THIRTYTWO_KB].add(entry);
}
}
return;
case 48*1024:
synchronized (_available[FOURTYEIGHT_KB]) {
if (_available[FOURTYEIGHT_KB].size() < MAX_CACHED) {
_available[FOURTYEIGHT_KB].add(entry);
}
}
return;
}
}
/**
* all the data alloc'ed in a calculateHash call
*/
public static abstract class CacheEntry {
byte hashbytes[];
int W[];
int M0[];
int H[];
Hash hash;
int wordlength;
int bucket;
protected CacheEntry() {}
public final void reset() {
Arrays.fill(hashbytes, (byte)0x0);
Arrays.fill(M0, (byte)0x0);
Arrays.fill(W, (byte)0x0);
Arrays.fill(H, (byte)0x0);
}
}
private static final int getBucket(int payload) {
if (payload <= 1024)
return 1024;
else if (payload <= 4*1024)
return 4*1024;
else if (payload <= 8*1024)
return 8*1024;
else if (payload <= 16*1024)
return 16*1024;
else if (payload <= 32*1024)
return 32*1024;
else if (payload <= 48*1024)
return 48*1024;
else
return payload;
}
}

View File

@ -66,8 +66,8 @@ class TransientSessionKeyManager extends SessionKeyManager {
super(context);
_log = context.logManager().getLog(TransientSessionKeyManager.class);
_context = context;
_outboundSessions = new HashMap(64);
_inboundTagSets = new HashMap(1024);
_outboundSessions = new HashMap(1024);
_inboundTagSets = new HashMap(64*1024);
}
private TransientSessionKeyManager() { this(null); }

View File

@ -25,11 +25,14 @@ public final class ByteCache {
*/
public static ByteCache getInstance(int cacheSize, int size) {
Integer sz = new Integer(size);
ByteCache cache = null;
synchronized (_caches) {
if (!_caches.containsKey(sz))
_caches.put(sz, new ByteCache(cacheSize, size));
return (ByteCache)_caches.get(sz);
cache = (ByteCache)_caches.get(sz);
}
cache.resize(cacheSize);
return cache;
}
private Log _log;
/** list of available and available entries */
@ -56,6 +59,11 @@ public final class ByteCache {
_log = I2PAppContext.getGlobalContext().logManager().getLog(ByteCache.class);
}
private void resize(int maxCachedEntries) {
if (_maxCached >= maxCachedEntries) return;
_maxCached = maxCachedEntries;
}
/**
* Get the next available structure, either from the cache or a brand new one
*

View File

@ -30,7 +30,7 @@ public class SimpleTimer {
_context = I2PAppContext.getGlobalContext();
_log = _context.logManager().getLog(SimpleTimer.class);
_events = new TreeMap();
_eventTimes = new HashMap();
_eventTimes = new HashMap(1024);
_readyEvents = new ArrayList(4);
I2PThread runner = new I2PThread(new SimpleTimerRunner());
runner.setName("SimpleTimer");