* NetDB: Add support for reseeding with su3 files (ticket #934)

This commit is contained in:
zzz
2014-06-28 19:37:59 +00:00
parent 44073732e2
commit ce2694e8fb
3 changed files with 157 additions and 3 deletions

View File

@ -1,4 +1,5 @@
2014-06-28 zzz
* NetDB: Add support for reseeding with su3 files (ticket #934)
* SAM:
- Support SIGNATURE_TYPE, bump to 3.1 (ticket #1318)
- Private key checking fixes (ticket #1318)

View File

@ -18,7 +18,7 @@ public class RouterVersion {
/** deprecated */
public final static String ID = "Monotone";
public final static String VERSION = CoreVersion.VERSION;
public final static long BUILD = 10;
public final static long BUILD = 11;
/** for example "-test" */
public final static String EXTRA = "";

View File

@ -7,6 +7,7 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
@ -17,6 +18,7 @@ import java.util.Set;
import java.util.StringTokenizer;
import net.i2p.I2PAppContext;
import net.i2p.crypto.SU3File;
import net.i2p.data.Base64;
import net.i2p.data.DataHelper;
import net.i2p.data.Hash;
@ -25,6 +27,7 @@ import net.i2p.router.RouterContext;
import net.i2p.router.util.EventLog;
import net.i2p.router.util.RFC822Date;
import net.i2p.util.EepGet;
import net.i2p.util.FileUtil;
import net.i2p.util.I2PAppThread;
import net.i2p.util.Log;
import net.i2p.util.SecureDirectory;
@ -49,8 +52,12 @@ public class Reseeder {
// Reject unreasonably big files, because we download into a ByteArrayOutputStream.
private static final long MAX_RESEED_RESPONSE_SIZE = 2 * 1024 * 1024;
private static final long MAX_SU3_RESPONSE_SIZE = 1024 * 1024;
/** limit to spend on a single host, to avoid getting stuck on one that is seriously overloaded */
private static final int MAX_TIME_PER_HOST = 7 * 60 * 1000;
private static final long MAX_FILE_AGE = 30*24*60*60*1000L;
/** change to false if hosts not ready at release */
private static final boolean ENABLE_SU3 = true;
/**
* NOTE - URLs that are in both the standard and SSL groups must use the same hostname and path,
@ -95,6 +102,8 @@ public class Reseeder {
// Temp disabled since h2ik have been AWOL since 06-03-2013
//"https://i2p.feared.eu/";
private static final String SU3_FILENAME = "i2pseeds.su3";
public static final String PROP_PROXY_HOST = "router.reseedProxyHost";
public static final String PROP_PROXY_PORT = "router.reseedProxyPort";
/** @since 0.8.2 */
@ -284,7 +293,11 @@ public class Reseeder {
int total = 0;
for (int i = 0; i < URLList.size() && _isRunning; i++) {
String url = URLList.get(i);
int dl = reseedOne(url, echoStatus);
int dl = 0;
if (ENABLE_SU3)
dl = reseedSU3(url + SU3_FILENAME, echoStatus);
if (dl <= 0)
dl = reseedOne(url, echoStatus);
if (dl > 0) {
total += dl;
// Don't go on to the next URL if we have enough
@ -321,6 +334,7 @@ public class Reseeder {
*
* We update the status here.
*
* @param seedURL the URL of the directory, must end in '/'
* @param echoStatus apparently always false
* @return count of routerinfos successfully fetched
**/
@ -417,6 +431,103 @@ public class Reseeder {
}
}
/**
* Fetch an su3 file containing routerInfo files
*
* We update the status here.
*
* @param seedURL the URL of the SU3 file
* @param echoStatus apparently always false
* @return count of routerinfos successfully fetched
* @since 0.9.14
**/
private int reseedSU3(String seedURL, boolean echoStatus) {
File contentRaw = null;
File zip = null;
File tmpDir = null;
try {
_checker.setStatus(_("Reseeding: fetching seed URL."));
System.err.println("Reseeding from " + seedURL);
URL dir = new URL(seedURL);
contentRaw = fetchURL(dir);
if (contentRaw == null) {
// Logging deprecated here since attemptFailed() provides better info
_log.warn("Failed reading seed URL: " + seedURL);
System.err.println("Reseed got no router infos from " + seedURL);
return 0;
}
SU3File su3 = new SU3File(_context, contentRaw);
zip = new File(_context.getTempDir(), "reseed-" + _context.random().nextInt() + ".zip");
su3.verifyAndMigrate(zip);
int type = su3.getContentType();
if (type != SU3File.CONTENT_RESEED)
throw new IOException("Bad content type " + type);
tmpDir = new File(_context.getTempDir(), "reseeds-" + _context.random().nextInt());
if (!FileUtil.extractZip(zip, tmpDir))
throw new IOException("Bad zip file");
Hash ourHash = _context.routerHash();
String ourB64 = ourHash != null ? ROUTERINFO_PREFIX + ourHash.toBase64() + ROUTERINFO_SUFFIX : "";
File[] files = tmpDir.listFiles();
if (files == null || files.length == 0)
throw new IOException("No files in zip");
List<File> fList = Arrays.asList(files);
Collections.shuffle(fList, _context.random());
long minTime = _context.clock().now() - MAX_FILE_AGE;
int fetched = 0;
int errors = 0;
File netDbDir = new SecureDirectory(_context.getRouterDir(), "netDb");
if (!netDbDir.exists())
netDbDir.mkdirs();
// 400 max from one URL
for (Iterator<File> iter = fList.iterator(); iter.hasNext() && fetched < 400; ) {
File f = iter.next();
String name = f.getName();
if (name.length() != ROUTERINFO_PREFIX.length() + 44 + ROUTERINFO_SUFFIX.length() ||
name.equals(ourB64) ||
f.length() > 10*1024 ||
f.lastModified() < minTime ||
!f.isFile()) {
if (_log.shouldLog(Log.WARN))
_log.warn("Skipping " + f);
f.delete();
errors++;
continue;
}
File to = new File(netDbDir, name);
if (FileUtil.rename(f, to)) {
fetched++;
} else {
f.delete();
errors++;
}
// Give up on this host after lots of errors
if (errors >= 5)
break;
}
_checker.setStatus(
_("Reseeding: fetching router info from seed URL ({0} successful, {1} errors).", fetched, errors));
System.err.println("Reseed got " + fetched + " router infos from " + seedURL + " with " + errors + " errors");
if (fetched > 0)
_context.netDb().rescan();
return fetched;
} catch (Throwable t) {
_log.warn("Error reseeding", t);
System.err.println("Reseed got no router infos from " + seedURL);
return 0;
} finally {
if (contentRaw != null)
contentRaw.delete();
if (zip != null)
zip.delete();
if (tmpDir != null)
FileUtil.rmdir(tmpDir, false);
}
}
/**
* Always throws an exception if something fails.
* We do NOT validate the received data here - that is done in PersistentDataStore
@ -479,11 +590,53 @@ public class Reseeder {
}
}
get.addStatusListener(ReseedRunner.this);
if (get.fetch())
if (get.fetch() && get.getStatusCode() == 200)
return baos.toByteArray();
return null;
}
/**
* Fetch a URL to a file.
*
* @return null on error
* @since 0.9.14
*/
private File fetchURL(URL url) throws IOException {
File out = new File(_context.getTempDir(), "reseed-" + _context.random().nextInt() + ".tmp");
EepGet get;
boolean ssl = url.toString().startsWith("https");
if (ssl) {
SSLEepGet sslget;
// TODO SSL PROXY
if (_sslState == null) {
sslget = new SSLEepGet(I2PAppContext.getGlobalContext(), out.getPath(), url.toString());
// save state for next time
_sslState = sslget.getSSLState();
} else {
sslget = new SSLEepGet(I2PAppContext.getGlobalContext(), out.getPath(), url.toString(), _sslState);
}
get = sslget;
// TODO SSL PROXY AUTH
} else {
// Do a (probably) non-proxied eepget into file with 0 retries
boolean shouldProxy = _proxyHost != null && _proxyHost.length() > 0 && _proxyPort > 0;
get = new EepGet(I2PAppContext.getGlobalContext(), shouldProxy, _proxyHost, _proxyPort, 0, 0, MAX_SU3_RESPONSE_SIZE,
out.getPath(), null, url.toString(), false, null, null);
if (shouldProxy && _context.getBooleanProperty(PROP_PROXY_AUTH_ENABLE)) {
String user = _context.getProperty(PROP_PROXY_USERNAME);
String pass = _context.getProperty(PROP_PROXY_PASSWORD);
if (user != null && user.length() > 0 &&
pass != null && pass.length() > 0)
get.addAuthorization(user, pass);
}
}
get.addStatusListener(ReseedRunner.this);
if (get.fetch() && get.getStatusCode() == 200)
return out;
out.delete();
return null;
}
/**
* @param name valid Base64 hash
* @return true on success, false if skipped