propagate from branch 'i2p.i2p' (head ea4e5cdef1ba51f6eb24828f05dd7ef66d5188ae)

to branch 'i2p.i2p.zzz.android' (head 03199a42adae7400f4627e7391fe3768493fd65f)
This commit is contained in:
zzz
2011-06-14 18:48:26 +00:00
197 changed files with 32864 additions and 3027 deletions

View File

@ -53,7 +53,7 @@
</javac>
</target>
<target name="jar" depends="compile, jarUpToDate" unless="jar.uptodate" >
<target name="listChangedFiles" if="mtn.available" >
<exec executable="mtn" outputproperty="workspace.changes" errorproperty="mtn.error2" failifexecutionfails="false" >
<arg value="list" />
<arg value="changed" />
@ -65,6 +65,11 @@
<arg value="[:space:]" />
<arg value="," />
</exec>
</target>
<target name="jar" depends="compile, jarUpToDate, listChangedFiles" unless="jar.uptodate" >
<!-- set if unset -->
<property name="workspace.changes.tr" value="" />
<jar destfile="./build/router.jar" basedir="./build/obj" includes="**/*.class" >
<manifest>
<attribute name="Build-Date" value="${build.timestamp}" />

View File

@ -18,7 +18,7 @@ public class RouterVersion {
/** deprecated */
public final static String ID = "Monotone";
public final static String VERSION = CoreVersion.VERSION;
public final static long BUILD = 18;
public final static long BUILD = 27;
/** for example "-test" */
public final static String EXTRA = "";

View File

@ -26,8 +26,8 @@ class FloodfillMonitorJob extends JobImpl {
private static final int REQUEUE_DELAY = 60*60*1000;
private static final long MIN_UPTIME = 2*60*60*1000;
private static final long MIN_CHANGE_DELAY = 6*60*60*1000;
private static final int MIN_FF = 90;
private static final int MAX_FF = 180;
private static final int MIN_FF = 110;
private static final int MAX_FF = 220;
private static final String PROP_FLOODFILL_PARTICIPANT = "router.floodfillParticipant";
public FloodfillMonitorJob(RouterContext context, FloodfillNetworkDatabaseFacade facade) {

View File

@ -367,12 +367,8 @@ public class FloodfillNetworkDatabaseFacade extends KademliaNetworkDatabaseFacad
long maxMemory = Runtime.getRuntime().maxMemory();
if (maxMemory == Long.MAX_VALUE)
maxMemory = 128*1024*1024l;
if (maxMemory < 64*1024*1024)
MAX_DB_BEFORE_SKIPPING_SEARCH = 600;
else if (maxMemory < 128*1024*1024)
MAX_DB_BEFORE_SKIPPING_SEARCH = 900;
else
MAX_DB_BEFORE_SKIPPING_SEARCH = 1250;
// 250 for every 32 MB, min of 250, max of 1250
MAX_DB_BEFORE_SKIPPING_SEARCH = (int) Math.max(250l, Math.min(1250l, maxMemory / ((32 * 1024 * 1024l) / 250)));
}
/**

View File

@ -269,8 +269,9 @@ class ProfilePersistenceHelper {
_log.debug("Loaded the profile for " + peer.toBase64() + " from " + file.getName());
return profile;
} catch (IllegalArgumentException iae) {
_log.error("Error loading profile from " +file.getName(), iae);
} catch (Exception e) {
if (_log.shouldLog(Log.WARN))
_log.warn("Error loading properties from " + file.getAbsolutePath(), e);
file.delete();
return null;
}
@ -300,7 +301,7 @@ class ProfilePersistenceHelper {
return 0.0;
}
private void loadProps(Properties props, File file) {
private void loadProps(Properties props, File file) throws IOException {
InputStream fin = null;
try {
fin = new BufferedInputStream(new FileInputStream(file), 1);
@ -318,9 +319,6 @@ class ProfilePersistenceHelper {
_log.info("Loading compressed profile data from " + file.getName());
DataHelper.loadProps(props, new GZIPInputStream(fin));
}
} catch (IOException ioe) {
if (_log.shouldLog(Log.WARN))
_log.warn("Error loading properties from " + file.getName(), ioe);
} finally {
try {
if (fin != null) fin.close();