2005-08-27 jrandom
* Minor logging and optimization tweaks in the router and SDK * Use ISO-8859-1 in the XML files (thanks redzara!) * The consolePassword config property can now be used to bypass the router console's nonce checking, allowing CLI restarts
This commit is contained in:
@ -1,4 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="ISO-8859-1"?>
|
||||
<!DOCTYPE web-app
|
||||
PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.2//EN"
|
||||
"http://java.sun.com/j2ee/dtds/web-app_2.2.dtd">
|
||||
@ -14,4 +14,4 @@
|
||||
<welcome-file>index.html</welcome-file>
|
||||
<welcome-file>index.jsp</welcome-file>
|
||||
</welcome-file-list>
|
||||
</web-app>
|
||||
</web-app>
|
||||
|
@ -20,6 +20,7 @@ public class FormHandler {
|
||||
protected Log _log;
|
||||
private String _nonce;
|
||||
protected String _action;
|
||||
protected String _passphrase;
|
||||
private List _errors;
|
||||
private List _notices;
|
||||
private boolean _processed;
|
||||
@ -32,6 +33,7 @@ public class FormHandler {
|
||||
_processed = false;
|
||||
_valid = true;
|
||||
_nonce = null;
|
||||
_passphrase = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -51,6 +53,7 @@ public class FormHandler {
|
||||
|
||||
public void setNonce(String val) { _nonce = val; }
|
||||
public void setAction(String val) { _action = val; }
|
||||
public void setPassphrase(String val) { _passphrase = val; }
|
||||
|
||||
/**
|
||||
* Override this to perform the final processing (in turn, adding formNotice
|
||||
@ -119,8 +122,14 @@ public class FormHandler {
|
||||
String noncePrev = System.getProperty(getClass().getName() + ".noncePrev");
|
||||
if ( ( (nonce == null) || (!_nonce.equals(nonce)) ) &&
|
||||
( (noncePrev == null) || (!_nonce.equals(noncePrev)) ) ) {
|
||||
addFormError("Invalid nonce, are you being spoofed?");
|
||||
_valid = false;
|
||||
|
||||
String expected = _context.getProperty("consolePassword");
|
||||
if ( (expected != null) && (expected.trim().length() > 0) && (expected.equals(_passphrase)) ) {
|
||||
// ok
|
||||
} else {
|
||||
addFormError("Invalid nonce, are you being spoofed?");
|
||||
_valid = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="ISO-8859-1"?>
|
||||
<!DOCTYPE web-app
|
||||
PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.2//EN"
|
||||
"http://java.sun.com/j2ee/dtds/web-app_2.2.dtd">
|
||||
@ -14,4 +14,4 @@
|
||||
<welcome-file>index.html</welcome-file>
|
||||
<welcome-file>index.jsp</welcome-file>
|
||||
</welcome-file-list>
|
||||
</web-app>
|
||||
</web-app>
|
||||
|
@ -20,32 +20,39 @@ public class BlogManager {
|
||||
private File _userDir;
|
||||
private File _cacheDir;
|
||||
private File _tempDir;
|
||||
private File _rootDir;
|
||||
private Archive _archive;
|
||||
|
||||
static {
|
||||
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
|
||||
String rootDir = I2PAppContext.getGlobalContext().getProperty("syndie.rootDir");
|
||||
if (rootDir == null)
|
||||
rootDir = System.getProperty("user.home");
|
||||
rootDir = rootDir + File.separatorChar + ".syndie";
|
||||
if (false) {
|
||||
if (rootDir == null)
|
||||
rootDir = System.getProperty("user.home");
|
||||
rootDir = rootDir + File.separatorChar + ".syndie";
|
||||
} else {
|
||||
if (rootDir == null)
|
||||
rootDir = "./syndie";
|
||||
}
|
||||
_instance = new BlogManager(I2PAppContext.getGlobalContext(), rootDir);
|
||||
}
|
||||
public static BlogManager instance() { return _instance; }
|
||||
|
||||
public BlogManager(I2PAppContext ctx, String rootDir) {
|
||||
_context = ctx;
|
||||
File root = new File(rootDir);
|
||||
root.mkdirs();
|
||||
_blogKeyDir = new File(root, "blogkeys");
|
||||
_privKeyDir = new File(root, "privkeys");
|
||||
_rootDir = new File(rootDir);
|
||||
_rootDir.mkdirs();
|
||||
readConfig();
|
||||
_blogKeyDir = new File(_rootDir, "blogkeys");
|
||||
_privKeyDir = new File(_rootDir, "privkeys");
|
||||
String archiveDir = _context.getProperty("syndie.archiveDir");
|
||||
if (archiveDir != null)
|
||||
_archiveDir = new File(archiveDir);
|
||||
else
|
||||
_archiveDir = new File(root, "archive");
|
||||
_userDir = new File(root, "users");
|
||||
_cacheDir = new File(root, "cache");
|
||||
_tempDir = new File(root, "temp");
|
||||
_archiveDir = new File(_rootDir, "archive");
|
||||
_userDir = new File(_rootDir, "users");
|
||||
_cacheDir = new File(_rootDir, "cache");
|
||||
_tempDir = new File(_rootDir, "temp");
|
||||
_blogKeyDir.mkdirs();
|
||||
_privKeyDir.mkdirs();
|
||||
_archiveDir.mkdirs();
|
||||
@ -56,6 +63,39 @@ public class BlogManager {
|
||||
_archive.regenerateIndex();
|
||||
}
|
||||
|
||||
private void readConfig() {
|
||||
File config = new File(_rootDir, "syndie.config");
|
||||
if (config.exists()) {
|
||||
try {
|
||||
Properties p = new Properties();
|
||||
DataHelper.loadProps(p, config);
|
||||
for (Iterator iter = p.keySet().iterator(); iter.hasNext(); ) {
|
||||
String key = (String)iter.next();
|
||||
System.setProperty(key, p.getProperty(key));
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
ioe.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void writeConfig() {
|
||||
File config = new File(_rootDir, "syndie.config");
|
||||
FileOutputStream out = null;
|
||||
try {
|
||||
out = new FileOutputStream(config);
|
||||
for (Iterator iter = _context.getPropertyNames().iterator(); iter.hasNext(); ) {
|
||||
String name = (String)iter.next();
|
||||
if (name.startsWith("syndie."))
|
||||
out.write((name + '=' + _context.getProperty(name) + '\n').getBytes());
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
ioe.printStackTrace();
|
||||
} finally {
|
||||
if (out != null) try { out.close(); } catch (IOException ioe) {}
|
||||
}
|
||||
}
|
||||
|
||||
public BlogInfo createBlog(String name, String description, String contactURL, String archives[]) {
|
||||
return createBlog(name, null, description, contactURL, archives);
|
||||
}
|
||||
|
@ -261,6 +261,14 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
*
|
||||
*/
|
||||
public void receiveBlog(String name, String hash, String tag, long entryId, List locations, String description) {
|
||||
if (!continueBody()) { return; }
|
||||
if (hash == null) return;
|
||||
|
||||
System.out.println("Receiving the blog: " + name + "/" + hash + "/" + tag + "/" + entryId +"/" + locations + ": "+ description);
|
||||
byte blogData[] = Base64.decode(hash);
|
||||
if ( (blogData == null) || (blogData.length != Hash.HASH_LENGTH) )
|
||||
return;
|
||||
|
||||
Blog b = new Blog();
|
||||
b.name = name;
|
||||
b.hash = hash;
|
||||
@ -270,13 +278,6 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
if (!_blogs.contains(b))
|
||||
_blogs.add(b);
|
||||
|
||||
if (!continueBody()) { return; }
|
||||
if (hash == null) return;
|
||||
|
||||
System.out.println("Receiving the blog: " + name + "/" + hash + "/" + tag + "/" + entryId +"/" + locations + ": "+ description);
|
||||
byte blogData[] = Base64.decode(hash);
|
||||
if ( (blogData == null) || (blogData.length != Hash.HASH_LENGTH) )
|
||||
return;
|
||||
Hash blog = new Hash(blogData);
|
||||
if (entryId > 0) {
|
||||
String pageURL = getPageURL(blog, tag, entryId, -1, -1, true, (_user != null ? _user.getShowImages() : false));
|
||||
@ -306,13 +307,14 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
_bodyBuffer.append("\">Tag: ").append(sanitizeString(tag)).append("</a>");
|
||||
}
|
||||
if ( (locations != null) && (locations.size() > 0) ) {
|
||||
_bodyBuffer.append(" <select name=\"archiveLocation\">");
|
||||
_bodyBuffer.append(" Archives: ");
|
||||
for (int i = 0; i < locations.size(); i++) {
|
||||
SafeURL surl = (SafeURL)locations.get(i);
|
||||
_bodyBuffer.append("<option value=\"").append(Base64.encode(surl.toString())).append("\">");
|
||||
_bodyBuffer.append(sanitizeString(surl.toString())).append("</option>\n");
|
||||
if (_user.getAuthenticated() && _user.getAllowAccessRemote())
|
||||
_bodyBuffer.append("<a href=\"").append(getArchiveURL(blog, surl)).append("\">").append(sanitizeString(surl.toString())).append("</a> ");
|
||||
else
|
||||
_bodyBuffer.append(sanitizeString(surl.toString())).append(' ');
|
||||
}
|
||||
_bodyBuffer.append("</select>");
|
||||
}
|
||||
_bodyBuffer.append("] ");
|
||||
}
|
||||
@ -656,8 +658,8 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
}
|
||||
if (!unsafe) return str;
|
||||
|
||||
str = str.replace('<', '_');
|
||||
str = str.replace('>', '-');
|
||||
str = str.replace('<', '_'); // this should be <
|
||||
str = str.replace('>', '-'); // this should be >
|
||||
if (!allowNL) {
|
||||
str = str.replace('\n', ' ');
|
||||
str = str.replace('\r', ' ');
|
||||
@ -668,6 +670,7 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
|
||||
public static final String sanitizeURL(String str) { return Base64.encode(str); }
|
||||
public static final String sanitizeTagParam(String str) {
|
||||
str = str.replace('&', '_'); // this should be &
|
||||
if (str.indexOf('\"') < 0)
|
||||
return sanitizeString(str);
|
||||
str = str.replace('\"', '\'');
|
||||
@ -753,4 +756,10 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
buf.append(ArchiveViewerBean.PARAM_SHOW_IMAGES).append('=').append(showImages).append('&');
|
||||
return buf.toString();
|
||||
}
|
||||
public static String getArchiveURL(Hash blog, SafeURL archiveLocation) {
|
||||
return "remote.jsp?"
|
||||
//+ "action=Continue..." // should this be the case?
|
||||
+ "&schema=" + sanitizeTagParam(archiveLocation.getSchema())
|
||||
+ "&location=" + sanitizeTagParam(archiveLocation.getLocation());
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,8 @@ public class ArchiveServlet extends HttpServlet {
|
||||
return;
|
||||
} else if (path.endsWith(Archive.INDEX_FILE)) {
|
||||
renderSummary(resp);
|
||||
} else if (path.endsWith("export.zip")) {
|
||||
ExportServlet.export(req, resp);
|
||||
} else {
|
||||
String blog = getBlog(path);
|
||||
if (path.endsWith(Archive.METADATA_FILE)) {
|
||||
|
100
apps/syndie/java/src/net/i2p/syndie/web/ExportServlet.java
Normal file
100
apps/syndie/java/src/net/i2p/syndie/web/ExportServlet.java
Normal file
@ -0,0 +1,100 @@
|
||||
package net.i2p.syndie.web;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.zip.*;
|
||||
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.servlet.ServletException;
|
||||
|
||||
import net.i2p.data.*;
|
||||
import net.i2p.syndie.*;
|
||||
import net.i2p.syndie.data.*;
|
||||
|
||||
/**
|
||||
* Dump out a whole series of blog metadata and entries as a zip stream. All metadata
|
||||
* is written before any entries, so it can be processed in order safely.
|
||||
*
|
||||
* HTTP parameters:
|
||||
* = meta (multiple values): base64 hash of the blog for which metadata is requested
|
||||
* = entry (multiple values): blog URI of an entry being requested
|
||||
*/
|
||||
public class ExportServlet extends HttpServlet {
|
||||
|
||||
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||
export(req, resp);
|
||||
}
|
||||
|
||||
public static void export(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||
String meta[] = req.getParameterValues("meta");
|
||||
String entries[] = req.getParameterValues("entry");
|
||||
resp.setContentType("application/x-syndie-zip");
|
||||
resp.setStatus(200);
|
||||
OutputStream out = resp.getOutputStream();
|
||||
ZipOutputStream zo = new ZipOutputStream(out);
|
||||
|
||||
List metaFiles = getMetaFiles(meta);
|
||||
|
||||
ZipEntry ze = null;
|
||||
byte buf[] = new byte[1024];
|
||||
int read = -1;
|
||||
for (int i = 0; metaFiles != null && i < metaFiles.size(); i++) {
|
||||
ze = new ZipEntry("meta" + i);
|
||||
ze.setTime(0);
|
||||
zo.putNextEntry(ze);
|
||||
FileInputStream in = new FileInputStream((File)metaFiles.get(i));
|
||||
while ( (read = in.read(buf)) != -1)
|
||||
zo.write(buf, 0, read);
|
||||
zo.closeEntry();
|
||||
}
|
||||
|
||||
List entryFiles = getEntryFiles(entries);
|
||||
for (int i = 0; entryFiles != null && i < entryFiles.size(); i++) {
|
||||
ze = new ZipEntry("entry" + i);
|
||||
ze.setTime(0);
|
||||
zo.putNextEntry(ze);
|
||||
FileInputStream in = new FileInputStream((File)entryFiles.get(i));
|
||||
while ( (read = in.read(buf)) != -1)
|
||||
zo.write(buf, 0, read);
|
||||
zo.closeEntry();
|
||||
}
|
||||
|
||||
zo.finish();
|
||||
zo.close();
|
||||
}
|
||||
|
||||
private static List getMetaFiles(String blogHashes[]) {
|
||||
if ( (blogHashes == null) || (blogHashes.length <= 0) ) return null;
|
||||
File dir = BlogManager.instance().getArchive().getArchiveDir();
|
||||
List rv = new ArrayList(blogHashes.length);
|
||||
for (int i = 0; i < blogHashes.length; i++) {
|
||||
byte hv[] = Base64.decode(blogHashes[i]);
|
||||
if ( (hv == null) || (hv.length != Hash.HASH_LENGTH) )
|
||||
continue;
|
||||
File blogDir = new File(dir, blogHashes[i]);
|
||||
File metaFile = new File(blogDir, Archive.METADATA_FILE);
|
||||
if (metaFile.exists())
|
||||
rv.add(metaFile);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
||||
private static List getEntryFiles(String blogURIs[]) {
|
||||
if ( (blogURIs == null) || (blogURIs.length <= 0) ) return null;
|
||||
File dir = BlogManager.instance().getArchive().getArchiveDir();
|
||||
List rv = new ArrayList(blogURIs.length);
|
||||
for (int i = 0; i < blogURIs.length; i++) {
|
||||
BlogURI uri = new BlogURI(blogURIs[i]);
|
||||
if (uri.getEntryId() < 0)
|
||||
continue;
|
||||
File blogDir = new File(dir, uri.getKeyHash().toBase64());
|
||||
File entryFile = new File(blogDir, uri.getEntryId() + ".snd");
|
||||
if (entryFile.exists())
|
||||
rv.add(entryFile);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
}
|
@ -3,6 +3,7 @@ package net.i2p.syndie.web;
|
||||
import java.io.*;
|
||||
import java.text.*;
|
||||
import java.util.*;
|
||||
import java.util.zip.*;
|
||||
import net.i2p.I2PAppContext;
|
||||
import net.i2p.data.*;
|
||||
import net.i2p.util.EepGet;
|
||||
@ -108,6 +109,62 @@ public class RemoteArchiveBean {
|
||||
fetch(urls, tmpFiles, user, new BlogStatusListener());
|
||||
}
|
||||
|
||||
public void fetchSelectedBulk(User user, Map parameters) {
|
||||
String entries[] = ArchiveViewerBean.getStrings(parameters, "entry");
|
||||
String action = ArchiveViewerBean.getString(parameters, "action");
|
||||
if ("Fetch all new entries".equals(action)) {
|
||||
ArchiveIndex localIndex = BlogManager.instance().getArchive().getIndex();
|
||||
List uris = new ArrayList();
|
||||
List matches = new ArrayList();
|
||||
for (Iterator iter = _remoteIndex.getUniqueBlogs().iterator(); iter.hasNext(); ) {
|
||||
Hash blog = (Hash)iter.next();
|
||||
_remoteIndex.selectMatchesOrderByEntryId(matches, blog, null);
|
||||
for (int i = 0; i < matches.size(); i++) {
|
||||
BlogURI uri = (BlogURI)matches.get(i);
|
||||
if (!localIndex.getEntryIsKnown(uri))
|
||||
uris.add(uri);
|
||||
}
|
||||
matches.clear();
|
||||
}
|
||||
entries = new String[uris.size()];
|
||||
for (int i = 0; i < uris.size(); i++)
|
||||
entries[i] = ((BlogURI)uris.get(i)).toString();
|
||||
}
|
||||
if ( (entries == null) || (entries.length <= 0) ) return;
|
||||
StringBuffer url = new StringBuffer(512);
|
||||
url.append(buildExportURL());
|
||||
Set meta = new HashSet();
|
||||
for (int i = 0; i < entries.length; i++) {
|
||||
BlogURI uri = new BlogURI(entries[i]);
|
||||
if (uri.getEntryId() >= 0) {
|
||||
url.append("entry=").append(uri.toString()).append('&');
|
||||
meta.add(uri.getKeyHash());
|
||||
_statusMessages.add("Scheduling blog post fetching for " + HTMLRenderer.sanitizeString(entries[i]));
|
||||
}
|
||||
}
|
||||
for (Iterator iter = meta.iterator(); iter.hasNext(); ) {
|
||||
Hash blog = (Hash)iter.next();
|
||||
url.append("meta=").append(blog.toBase64()).append('&');
|
||||
_statusMessages.add("Scheduling blog metadata fetching for " + blog.toBase64());
|
||||
}
|
||||
List urls = new ArrayList(1);
|
||||
urls.add(url.toString());
|
||||
List tmpFiles = new ArrayList(1);
|
||||
try {
|
||||
File tmp = File.createTempFile("fetchBulk", ".zip", BlogManager.instance().getTempDir());
|
||||
tmpFiles.add(tmp);
|
||||
fetch(urls, tmpFiles, user, new BulkFetchListener(tmp));
|
||||
} catch (IOException ioe) {
|
||||
_statusMessages.add("Internal error creating temporary file to fetch " + HTMLRenderer.sanitizeString(url.toString()) + ": " + ioe.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private String buildExportURL() {
|
||||
String loc = _remoteLocation.trim();
|
||||
int root = loc.lastIndexOf('/');
|
||||
return loc.substring(0, root + 1) + "export.zip?";
|
||||
}
|
||||
|
||||
private String buildEntryURL(BlogURI uri) {
|
||||
String loc = _remoteLocation.trim();
|
||||
int root = loc.lastIndexOf('/');
|
||||
@ -300,6 +357,85 @@ public class RemoteArchiveBean {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Receive the status of a fetch for the zip containing blogs and metadata (as generated by
|
||||
* the ExportServlet)
|
||||
*/
|
||||
private class BulkFetchListener implements EepGet.StatusListener {
|
||||
private File _tmp;
|
||||
public BulkFetchListener(File tmp) {
|
||||
_tmp = tmp;
|
||||
}
|
||||
public void attemptFailed(String url, long bytesTransferred, long bytesRemaining, int currentAttempt, int numRetries, Exception cause) {
|
||||
_statusMessages.add("Attempt " + currentAttempt + " failed after " + bytesTransferred + (cause != null ? cause.getMessage() : ""));
|
||||
}
|
||||
|
||||
public void bytesTransferred(long alreadyTransferred, int currentWrite, long bytesTransferred, long bytesRemaining, String url) {}
|
||||
public void transferComplete(long alreadyTransferred, long bytesTransferred, long bytesRemaining, String url, String outputFile) {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url.substring(0, url.indexOf('?'))) + " successful, importing the data");
|
||||
File file = new File(outputFile);
|
||||
ZipInputStream zi = null;
|
||||
try {
|
||||
zi = new ZipInputStream(new FileInputStream(file));
|
||||
|
||||
while (true) {
|
||||
ZipEntry entry = zi.getNextEntry();
|
||||
if (entry == null)
|
||||
break;
|
||||
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
|
||||
byte buf[] = new byte[1024];
|
||||
int read = -1;
|
||||
while ( (read = zi.read(buf)) != -1)
|
||||
out.write(buf, 0, read);
|
||||
|
||||
if (entry.getName().startsWith("meta")) {
|
||||
BlogInfo i = new BlogInfo();
|
||||
i.load(new ByteArrayInputStream(out.toByteArray()));
|
||||
boolean ok = BlogManager.instance().getArchive().storeBlogInfo(i);
|
||||
if (ok) {
|
||||
_statusMessages.add("Blog info for " + HTMLRenderer.sanitizeString(i.getProperty(BlogInfo.NAME)) + " imported");
|
||||
} else {
|
||||
_statusMessages.add("Blog info at " + HTMLRenderer.sanitizeString(url) + " was corrupt / invalid / forged");
|
||||
}
|
||||
} else if (entry.getName().startsWith("entry")) {
|
||||
EntryContainer c = new EntryContainer();
|
||||
c.load(new ByteArrayInputStream(out.toByteArray()));
|
||||
BlogURI uri = c.getURI();
|
||||
if ( (uri == null) || (uri.getKeyHash() == null) ) {
|
||||
_statusMessages.add("Blog post " + HTMLRenderer.sanitizeString(entry.getName()) + " was corrupt - no URI");
|
||||
continue;
|
||||
}
|
||||
Archive a = BlogManager.instance().getArchive();
|
||||
BlogInfo info = a.getBlogInfo(uri);
|
||||
if (info == null) {
|
||||
_statusMessages.add("Blog post " + HTMLRenderer.sanitizeString(entry.getName()) + " cannot be imported, as we don't have their blog metadata");
|
||||
continue;
|
||||
}
|
||||
boolean ok = a.storeEntry(c);
|
||||
if (!ok) {
|
||||
_statusMessages.add("Blog post " + uri.toString() + " has an invalid signature");
|
||||
continue;
|
||||
} else {
|
||||
_statusMessages.add("Blog post " + uri.toString() + " imported");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BlogManager.instance().getArchive().regenerateIndex();
|
||||
} catch (IOException ioe) {
|
||||
ioe.printStackTrace();
|
||||
_statusMessages.add("Error importing from " + HTMLRenderer.sanitizeString(url) + ": " + ioe.getMessage());
|
||||
} finally {
|
||||
if (zi != null) try { zi.close(); } catch (IOException ioe) {}
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
public void transferFailed(String url, long bytesTransferred, long bytesRemaining, int currentAttempt) {
|
||||
_statusMessages.add("Fetch of " + HTMLRenderer.sanitizeString(url) + " failed after " + bytesTransferred);
|
||||
_tmp.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public void postSelectedEntries(User user, Map parameters) {
|
||||
String entries[] = ArchiveViewerBean.getStrings(parameters, "localentry");
|
||||
@ -366,7 +502,7 @@ public class RemoteArchiveBean {
|
||||
List entries = new ArrayList();
|
||||
for (Iterator iter = remoteBlogs.iterator(); iter.hasNext(); ) {
|
||||
Hash blog = (Hash)iter.next();
|
||||
buf = new StringBuffer(1024);
|
||||
buf.setLength(0);
|
||||
int shownEntries = 0;
|
||||
buf.append("<tr><td colspan=\"5\" align=\"left\" valign=\"top\">\n");
|
||||
BlogInfo info = archive.getBlogInfo(blog);
|
||||
@ -403,6 +539,11 @@ public class RemoteArchiveBean {
|
||||
buf.append("</td>\n");
|
||||
buf.append("</tr>\n");
|
||||
}
|
||||
if (shownEntries > 0) {
|
||||
out.write(buf.toString());
|
||||
buf.setLength(0);
|
||||
}
|
||||
int remote = shownEntries;
|
||||
|
||||
// now for posts in known blogs that we have and they don't
|
||||
entries.clear();
|
||||
@ -429,7 +570,7 @@ public class RemoteArchiveBean {
|
||||
}
|
||||
}
|
||||
|
||||
if (shownEntries > 0) // skip blogs we have already syndicated
|
||||
if (shownEntries > remote) // skip blogs we have already syndicated
|
||||
out.write(buf.toString());
|
||||
}
|
||||
|
||||
@ -477,6 +618,7 @@ public class RemoteArchiveBean {
|
||||
if (localNew > 0) {
|
||||
out.write("<input type=\"submit\" name=\"action\" value=\"Post selected entries\" /> \n");
|
||||
}
|
||||
out.write("<hr />\n");
|
||||
}
|
||||
private final SimpleDateFormat _dateFormat = new SimpleDateFormat("yyyy/MM/dd", Locale.UK);
|
||||
private String getDate(long when) {
|
||||
|
@ -19,13 +19,14 @@ if (!user.getAuthenticated() || !user.getAllowAccessRemote()) {
|
||||
%>Sorry, you are not allowed to access remote archives from here. Perhaps you should install Syndie yourself?<%
|
||||
} else { %>Import from:
|
||||
<select name="schema">
|
||||
<option value="web">I2P/TOR/Freenet</option>
|
||||
<option value="mnet">MNet</option>
|
||||
<option value="feedspace">Feedspace</option>
|
||||
<option value="usenet">Usenet</option>
|
||||
<option value="web" <%=("web".equals(request.getParameter("schema")) ? "selected=\"true\"" : "")%>>I2P/TOR/Freenet</option>
|
||||
<option value="mnet" <%=("mnet".equals(request.getParameter("schema")) ? "selected=\"true\"" : "")%>>MNet</option>
|
||||
<option value="feedspace" <%=("feedspace".equals(request.getParameter("schema")) ? "selected=\"true\"" : "")%>>Feedspace</option>
|
||||
<option value="usenet" <%=("usenet".equals(request.getParameter("schema")) ? "selected=\"true\"" : "")%>>Usenet</option>
|
||||
</select>
|
||||
Proxy <input type="text" size="10" name="proxyhost" value="localhost" />:<input type="text" size="4" name="proxyport" value="4444" />
|
||||
<input name="location" size="40" /> <input type="submit" name="action" value="Continue..." /><br />
|
||||
<input name="location" size="40" value="<%=(request.getParameter("location") != null ? request.getParameter("location") : "")%>" />
|
||||
<input type="submit" name="action" value="Continue..." /><br />
|
||||
<%
|
||||
String action = request.getParameter("action");
|
||||
if ("Continue...".equals(action)) {
|
||||
@ -33,9 +34,11 @@ Proxy <input type="text" size="10" name="proxyhost" value="localhost" />:<input
|
||||
} else if ("Fetch metadata".equals(action)) {
|
||||
remote.fetchMetadata(user, request.getParameterMap());
|
||||
} else if ("Fetch selected entries".equals(action)) {
|
||||
remote.fetchSelectedEntries(user, request.getParameterMap());
|
||||
//remote.fetchSelectedEntries(user, request.getParameterMap());
|
||||
remote.fetchSelectedBulk(user, request.getParameterMap());
|
||||
} else if ("Fetch all new entries".equals(action)) {
|
||||
remote.fetchAllEntries(user, request.getParameterMap());
|
||||
//remote.fetchAllEntries(user, request.getParameterMap());
|
||||
remote.fetchSelectedBulk(user, request.getParameterMap());
|
||||
} else if ("Post selected entries".equals(action)) {
|
||||
remote.postSelectedEntries(user, request.getParameterMap());
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="ISO-8859-1"?>
|
||||
<!DOCTYPE web-app
|
||||
PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.2//EN"
|
||||
"http://java.sun.com/j2ee/dtds/web-app_2.2.dtd">
|
||||
@ -29,4 +29,4 @@
|
||||
<welcome-file>index.html</welcome-file>
|
||||
<welcome-file>index.jsp</welcome-file>
|
||||
</welcome-file-list>
|
||||
</web-app>
|
||||
</web-app>
|
||||
|
@ -141,14 +141,17 @@ public class LogManager {
|
||||
public Log getLog(Class cls, String name) {
|
||||
Log rv = null;
|
||||
String scope = Log.getScope(name, cls);
|
||||
boolean isNew = false;
|
||||
synchronized (_logs) {
|
||||
rv = (Log)_logs.get(scope);
|
||||
if (rv == null) {
|
||||
rv = new Log(this, cls, name);
|
||||
_logs.put(scope, rv);
|
||||
isNew = true;
|
||||
}
|
||||
}
|
||||
updateLimit(rv);
|
||||
if (isNew)
|
||||
updateLimit(rv);
|
||||
return rv;
|
||||
}
|
||||
public List getLogs() {
|
||||
|
@ -1,4 +1,10 @@
|
||||
$Id: history.txt,v 1.229 2005/08/23 16:25:49 jrandom Exp $
|
||||
$Id: history.txt,v 1.230 2005/08/24 17:55:27 jrandom Exp $
|
||||
|
||||
2005-08-27 jrandom
|
||||
* Minor logging and optimization tweaks in the router and SDK
|
||||
* Use ISO-8859-1 in the XML files (thanks redzara!)
|
||||
* The consolePassword config property can now be used to bypass the router
|
||||
console's nonce checking, allowing CLI restarts
|
||||
|
||||
2005-08-24 jrandom
|
||||
* Catch errors with corrupt tunnel messages more gracefully (no need to
|
||||
|
@ -15,9 +15,9 @@ import net.i2p.CoreVersion;
|
||||
*
|
||||
*/
|
||||
public class RouterVersion {
|
||||
public final static String ID = "$Revision: 1.218 $ $Date: 2005/08/23 16:25:49 $";
|
||||
public final static String ID = "$Revision: 1.219 $ $Date: 2005/08/24 17:55:26 $";
|
||||
public final static String VERSION = "0.6.0.3";
|
||||
public final static long BUILD = 2;
|
||||
public final static long BUILD = 3;
|
||||
public static void main(String args[]) {
|
||||
System.out.println("I2P Router version: " + VERSION);
|
||||
System.out.println("Router ID: " + RouterVersion.ID);
|
||||
|
@ -146,7 +146,7 @@ public class ClientConnectionRunner {
|
||||
|
||||
/** current client's sessionId */
|
||||
SessionId getSessionId() { return _sessionId; }
|
||||
void setSessionId(SessionId id) { _sessionId = id; }
|
||||
void setSessionId(SessionId id) { if (id != null) _sessionId = id; }
|
||||
/** data for the current leaseRequest, or null if there is no active leaseSet request */
|
||||
LeaseRequestState getLeaseRequest() { return _leaseRequest; }
|
||||
void setLeaseRequest(LeaseRequestState req) { _leaseRequest = req; }
|
||||
|
@ -71,9 +71,6 @@ public class GarlicMessageReceiver {
|
||||
handleClove(clove);
|
||||
}
|
||||
} else {
|
||||
if (_log.shouldLog(Log.ERROR))
|
||||
_log.error("CloveMessageParser failed to decrypt the message [" + message.getUniqueId()
|
||||
+ "]");
|
||||
if (_log.shouldLog(Log.WARN))
|
||||
_log.warn("CloveMessageParser failed to decrypt the message [" + message.getUniqueId()
|
||||
+ "]", new Exception("Decrypt garlic failed"));
|
||||
|
@ -12,6 +12,8 @@ import java.math.BigInteger;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import net.i2p.I2PAppContext;
|
||||
import net.i2p.data.DataHelper;
|
||||
@ -22,7 +24,7 @@ import net.i2p.util.RandomSource;
|
||||
class KBucketImpl implements KBucket {
|
||||
private Log _log;
|
||||
/** set of Hash objects for the peers in the kbucket */
|
||||
private Set _entries;
|
||||
private List _entries;
|
||||
/** we center the kbucket set on the given hash, and derive distances from this */
|
||||
private Hash _local;
|
||||
/** include if any bits equal or higher to this bit (in big endian order) */
|
||||
@ -34,7 +36,7 @@ class KBucketImpl implements KBucket {
|
||||
public KBucketImpl(I2PAppContext context, Hash local) {
|
||||
_context = context;
|
||||
_log = context.logManager().getLog(KBucketImpl.class);
|
||||
_entries = new HashSet();
|
||||
_entries = new ArrayList(64); //new HashSet();
|
||||
setLocal(local);
|
||||
}
|
||||
|
||||
@ -193,14 +195,16 @@ class KBucketImpl implements KBucket {
|
||||
public Set getEntries() {
|
||||
Set entries = new HashSet(64);
|
||||
synchronized (_entries) {
|
||||
entries.addAll(_entries);
|
||||
for (int i = 0; i < _entries.size(); i++)
|
||||
entries.add((Hash)_entries.get(i));
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
public Set getEntries(Set toIgnoreHashes) {
|
||||
Set entries = new HashSet(64);
|
||||
synchronized (_entries) {
|
||||
entries.addAll(_entries);
|
||||
for (int i = 0; i < _entries.size(); i++)
|
||||
entries.add((Hash)_entries.get(i));
|
||||
entries.removeAll(toIgnoreHashes);
|
||||
}
|
||||
return entries;
|
||||
@ -208,22 +212,26 @@ class KBucketImpl implements KBucket {
|
||||
|
||||
public void getEntries(SelectionCollector collector) {
|
||||
synchronized (_entries) {
|
||||
for (Iterator iter = _entries.iterator(); iter.hasNext(); ) {
|
||||
collector.add((Hash)iter.next());
|
||||
}
|
||||
for (int i = 0; i < _entries.size(); i++)
|
||||
collector.add((Hash)_entries.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
public void setEntries(Set entries) {
|
||||
synchronized (_entries) {
|
||||
_entries.clear();
|
||||
_entries.addAll(entries);
|
||||
for (Iterator iter = entries.iterator(); iter.hasNext(); ) {
|
||||
Hash entry = (Hash)iter.next();
|
||||
if (!_entries.contains(entry))
|
||||
_entries.add(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int add(Hash peer) {
|
||||
synchronized (_entries) {
|
||||
_entries.add(peer);
|
||||
if (!_entries.contains(peer))
|
||||
_entries.add(peer);
|
||||
return _entries.size();
|
||||
}
|
||||
}
|
||||
|
@ -807,7 +807,7 @@ public class KademliaNetworkDatabaseFacade extends NetworkDatabaseFacade {
|
||||
}
|
||||
|
||||
/** smallest allowed period */
|
||||
private static final int MIN_PER_PEER_TIMEOUT = 1*1000;
|
||||
private static final int MIN_PER_PEER_TIMEOUT = 2*1000;
|
||||
private static final int MAX_PER_PEER_TIMEOUT = 5*1000;
|
||||
|
||||
public int getPeerTimeout(Hash peer) {
|
||||
|
@ -84,7 +84,7 @@ public class RepublishLeaseSetJob extends JobImpl {
|
||||
public void runJob() {
|
||||
if (_log.shouldLog(Log.WARN))
|
||||
_log.warn("FAILED publishing of the leaseSet for " + _dest.toBase64());
|
||||
RepublishLeaseSetJob.this.requeue(30*1000);
|
||||
RepublishLeaseSetJob.this.requeue(getContext().random().nextInt(60*1000));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,8 +39,8 @@ class StoreJob extends JobImpl {
|
||||
private long _expiration;
|
||||
private PeerSelector _peerSelector;
|
||||
|
||||
private final static int PARALLELIZATION = 3; // how many sent at a time
|
||||
private final static int REDUNDANCY = 6; // we want the data sent to 6 peers
|
||||
private final static int PARALLELIZATION = 4; // how many sent at a time
|
||||
private final static int REDUNDANCY = 4; // we want the data sent to 6 peers
|
||||
/**
|
||||
* additionally send to 1 outlier(s), in case all of the routers chosen in our
|
||||
* REDUNDANCY set are attacking us by accepting DbStore messages but dropping
|
||||
@ -75,6 +75,7 @@ class StoreJob extends JobImpl {
|
||||
getContext().statManager().createRateStat("netDb.storePeers", "How many peers each netDb must be sent to before success?", "NetworkDatabase", new long[] { 5*60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||
getContext().statManager().createRateStat("netDb.storeFailedPeers", "How many peers each netDb must be sent to before failing completely?", "NetworkDatabase", new long[] { 5*60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||
getContext().statManager().createRateStat("netDb.ackTime", "How long does it take for a peer to ack a netDb store?", "NetworkDatabase", new long[] { 5*60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||
getContext().statManager().createRateStat("netDb.replyTimeout", "How long after a netDb send does the timeout expire (when the peer doesn't reply in time)?", "NetworkDatabase", new long[] { 60*1000, 5*60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||
_facade = facade;
|
||||
_state = new StoreState(getContext(), key, data, toSkip);
|
||||
_onSuccess = onSuccess;
|
||||
@ -154,8 +155,15 @@ class StoreJob extends JobImpl {
|
||||
_state.addSkipped(peer);
|
||||
} else {
|
||||
int peerTimeout = _facade.getPeerTimeout(peer);
|
||||
//RateStat failing = prof.getDBHistory().getFailedLookupRate();
|
||||
//Rate failed = failing.getRate(60*60*1000);
|
||||
PeerProfile prof = getContext().profileOrganizer().getProfile(peer);
|
||||
RateStat failing = prof.getDBHistory().getFailedLookupRate();
|
||||
Rate failed = failing.getRate(60*60*1000);
|
||||
long failedCount = failed.getCurrentEventCount()+failed.getLastEventCount();
|
||||
if (failedCount > 10) {
|
||||
_state.addSkipped(peer);
|
||||
continue;
|
||||
}
|
||||
//
|
||||
//if (failed.getCurrentEventCount() + failed.getLastEventCount() > avg) {
|
||||
// _state.addSkipped(peer);
|
||||
//}
|
||||
@ -250,7 +258,7 @@ class StoreJob extends JobImpl {
|
||||
_state.addPending(peer.getIdentity().getHash());
|
||||
|
||||
SendSuccessJob onReply = new SendSuccessJob(getContext(), peer);
|
||||
FailedJob onFail = new FailedJob(getContext(), peer);
|
||||
FailedJob onFail = new FailedJob(getContext(), peer, getContext().clock().now());
|
||||
StoreMessageSelector selector = new StoreMessageSelector(getContext(), getJobId(), peer, token, expiration);
|
||||
|
||||
TunnelInfo outTunnel = selectOutboundTunnel();
|
||||
@ -321,10 +329,12 @@ class StoreJob extends JobImpl {
|
||||
*/
|
||||
private class FailedJob extends JobImpl {
|
||||
private RouterInfo _peer;
|
||||
private long _sendOn;
|
||||
|
||||
public FailedJob(RouterContext enclosingContext, RouterInfo peer) {
|
||||
public FailedJob(RouterContext enclosingContext, RouterInfo peer, long sendOn) {
|
||||
super(enclosingContext);
|
||||
_peer = peer;
|
||||
_sendOn = sendOn;
|
||||
}
|
||||
public void runJob() {
|
||||
if (_log.shouldLog(Log.WARN))
|
||||
@ -332,6 +342,7 @@ class StoreJob extends JobImpl {
|
||||
+ " timed out sending " + _state.getTarget());
|
||||
_state.replyTimeout(_peer.getIdentity().getHash());
|
||||
getContext().profileManager().dbStoreFailed(_peer.getIdentity().getHash());
|
||||
getContext().statManager().addRateData("netDb.replyTimeout", getContext().clock().now() - _sendOn, 0);
|
||||
|
||||
sendNext();
|
||||
}
|
||||
|
@ -113,8 +113,8 @@ public class MessageReceiver implements Runnable {
|
||||
m.setUniqueId(state.getMessageId());
|
||||
return m;
|
||||
} catch (I2NPMessageException ime) {
|
||||
if (_log.shouldLog(Log.ERROR))
|
||||
_log.error("Message invalid: " + state, ime);
|
||||
if (_log.shouldLog(Log.WARN))
|
||||
_log.warn("Message invalid: " + state, ime);
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
_log.log(Log.CRIT, "Error dealing with a message: " + state, e);
|
||||
|
@ -148,8 +148,8 @@ public class FragmentHandler {
|
||||
//Hash v = _context.sha().calculateHash(preV, 0, validLength);
|
||||
boolean eq = DataHelper.eq(v.getData(), 0, preprocessed, offset + HopProcessor.IV_LENGTH, 4);
|
||||
if (!eq) {
|
||||
if (_log.shouldLog(Log.ERROR))
|
||||
_log.error("Corrupt tunnel message - verification fails: \n" + Base64.encode(preprocessed, offset+HopProcessor.IV_LENGTH, 4)
|
||||
if (_log.shouldLog(Log.WARN))
|
||||
_log.warn("Corrupt tunnel message - verification fails: \n" + Base64.encode(preprocessed, offset+HopProcessor.IV_LENGTH, 4)
|
||||
+ "\n" + Base64.encode(v.getData(), 0, 4));
|
||||
if (_log.shouldLog(Log.WARN))
|
||||
_log.warn("nomatching endpoint: # pad bytes: " + (paddingEnd-(HopProcessor.IV_LENGTH+4)-1) + "\n"
|
||||
|
Reference in New Issue
Block a user