forked from I2P_Developers/i2p.i2p
* JobQueue: Change queue from a List to a Set for efficiency
This commit is contained in:
12
history.txt
12
history.txt
@ -1,3 +1,15 @@
|
||||
2011-09-06 zzz
|
||||
* Crypto: Rework use of SHA256 for efficiency and
|
||||
to avoid clogging the Hash cache with one-time hashes,
|
||||
and avoiding the global cache lock.
|
||||
This also greatly increases Hash cache hit rates.
|
||||
Also use SimpleByteCache for temporary byte buffers.
|
||||
* I2PTunnel: Save keys to privkey file when enabling
|
||||
persistent key after tunnel creation (ticket #480)
|
||||
* JobQueue: Change queue from a List to a Set for efficiency
|
||||
* PrivateKeyFile: Add more constructors
|
||||
* SDSCache: Use weak refs; increase size for pub keys
|
||||
|
||||
2011-09-04 zzz
|
||||
* NetDB:
|
||||
- Try again to fix ISJ deadlock, thx devzero
|
||||
|
@ -13,11 +13,13 @@ import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
@ -44,7 +46,7 @@ public class JobQueue {
|
||||
/** list of jobs that are ready to run ASAP */
|
||||
private final BlockingQueue<Job> _readyJobs;
|
||||
/** list of jobs that are scheduled for running in the future */
|
||||
private final List<Job> _timedJobs;
|
||||
private final Set<Job> _timedJobs;
|
||||
/** job name to JobStat for that job */
|
||||
private final Map<String, JobStats> _jobStats;
|
||||
/** how many job queue runners can go concurrently */
|
||||
@ -135,7 +137,7 @@ public class JobQueue {
|
||||
|
||||
_alive = true;
|
||||
_readyJobs = new LinkedBlockingQueue();
|
||||
_timedJobs = new ArrayList(64);
|
||||
_timedJobs = new HashSet(64);
|
||||
_jobLock = new Object();
|
||||
_queueRunners = new ConcurrentHashMap(RUNNERS);
|
||||
_jobStats = new ConcurrentHashMap();
|
||||
@ -454,8 +456,8 @@ public class JobQueue {
|
||||
List<Job> toAdd = null;
|
||||
try {
|
||||
synchronized (_jobLock) {
|
||||
for (int i = 0; i < _timedJobs.size(); i++) {
|
||||
Job j = _timedJobs.get(i);
|
||||
for (Iterator<Job> iter = _timedJobs.iterator(); iter.hasNext(); ) {
|
||||
Job j = iter.next();
|
||||
// find jobs due to start before now
|
||||
long timeLeft = j.getTiming().getStartAfter() - now;
|
||||
if (timeLeft <= 0) {
|
||||
@ -464,8 +466,7 @@ public class JobQueue {
|
||||
|
||||
if (toAdd == null) toAdd = new ArrayList(4);
|
||||
toAdd.add(j);
|
||||
_timedJobs.remove(i);
|
||||
i--; // so the index stays consistent
|
||||
iter.remove();
|
||||
} else {
|
||||
if ( (timeToWait <= 0) || (timeLeft < timeToWait) )
|
||||
timeToWait = timeLeft;
|
||||
@ -519,8 +520,7 @@ public class JobQueue {
|
||||
*/
|
||||
private void updateJobTimings(long delta) {
|
||||
synchronized (_jobLock) {
|
||||
for (int i = 0; i < _timedJobs.size(); i++) {
|
||||
Job j = _timedJobs.get(i);
|
||||
for (Job j : _timedJobs) {
|
||||
j.getTiming().offsetChanged(delta);
|
||||
}
|
||||
for (Job j : _readyJobs) {
|
||||
|
@ -18,7 +18,7 @@ public class RouterVersion {
|
||||
/** deprecated */
|
||||
public final static String ID = "Monotone";
|
||||
public final static String VERSION = CoreVersion.VERSION;
|
||||
public final static long BUILD = 11;
|
||||
public final static long BUILD = 12;
|
||||
|
||||
/** for example "-test" */
|
||||
public final static String EXTRA = "";
|
||||
|
Reference in New Issue
Block a user