2005-11-10 jrandom
* First pass to a new threaded Syndie interface, which isn't enabled by default, as its not done yet.
@ -244,6 +244,7 @@ public class Archive {
|
||||
}
|
||||
|
||||
public List listEntries(BlogURI uri, String tag, SessionKey blogKey) {
|
||||
if (uri == null) return new ArrayList();
|
||||
return listEntries(uri.getKeyHash(), uri.getEntryId(), tag, blogKey);
|
||||
}
|
||||
public List listEntries(Hash blog, long entryId, String tag, SessionKey blogKey) {
|
||||
|
@ -19,6 +19,7 @@ class ArchiveIndexer {
|
||||
public static ArchiveIndex index(I2PAppContext ctx, Archive source) {
|
||||
Log log = ctx.logManager().getLog(ArchiveIndexer.class);
|
||||
LocalArchiveIndex rv = new LocalArchiveIndex(ctx);
|
||||
WritableThreadIndex threads = new WritableThreadIndex();
|
||||
rv.setGeneratedOn(ctx.clock().now());
|
||||
|
||||
File rootDir = source.getArchiveDir();
|
||||
@ -79,6 +80,7 @@ class ArchiveIndexer {
|
||||
allEntries++;
|
||||
totalSize += entry.getCompleteSize();
|
||||
String entryTags[] = entry.getTags();
|
||||
threads.addEntry(entry.getURI(), entryTags);
|
||||
for (int t = 0; t < entryTags.length; t++) {
|
||||
if (!tags.containsKey(entryTags[t])) {
|
||||
tags.put(entryTags[t], new TreeMap());
|
||||
@ -98,13 +100,20 @@ class ArchiveIndexer {
|
||||
parser.parse(entry.getEntry().getText(), rec);
|
||||
String reply = rec.getHeader(HTMLRenderer.HEADER_IN_REPLY_TO);
|
||||
if (reply != null) {
|
||||
String forceNewThread = rec.getHeader(HTMLRenderer.HEADER_FORCE_NEW_THREAD);
|
||||
if ( (forceNewThread != null) && (Boolean.valueOf(forceNewThread).booleanValue()) ) {
|
||||
// ignore the parent
|
||||
} else {
|
||||
BlogURI parent = new BlogURI(reply.trim());
|
||||
if ( (parent.getKeyHash() != null) && (parent.getEntryId() >= 0) )
|
||||
if ( (parent.getKeyHash() != null) && (parent.getEntryId() >= 0) ) {
|
||||
rv.addReply(parent, entry.getURI());
|
||||
else if (log.shouldLog(Log.WARN))
|
||||
threads.addParent(parent, entry.getURI());
|
||||
} else if (log.shouldLog(Log.WARN)) {
|
||||
log.warn("Parent of " + entry.getURI() + " is not valid: [" + reply.trim() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
long lowestEntryId = -1;
|
||||
for (Iterator iter = tags.keySet().iterator(); iter.hasNext(); ) {
|
||||
@ -150,6 +159,11 @@ class ArchiveIndexer {
|
||||
rv.addNewestEntry(uri);
|
||||
}
|
||||
|
||||
threads.organizeTree();
|
||||
if (_log.shouldLog(Log.DEBUG))
|
||||
_log.debug("Tree: \n" + threads.toString());
|
||||
rv.setThreadedIndex(threads);
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,8 @@ public class BlogManager {
|
||||
if (rootDir == null)
|
||||
rootDir = "./syndie";
|
||||
}
|
||||
_instance = new BlogManager(I2PAppContext.getGlobalContext(), rootDir);
|
||||
_instance = new BlogManager(I2PAppContext.getGlobalContext(), rootDir, false);
|
||||
_instance.getArchive().regenerateIndex();
|
||||
}
|
||||
return _instance;
|
||||
}
|
||||
|
43
apps/syndie/java/src/net/i2p/syndie/HeaderReceiver.java
Normal file
@ -0,0 +1,43 @@
|
||||
package net.i2p.syndie;
|
||||
|
||||
import java.util.*;
|
||||
import net.i2p.syndie.sml.SMLParser;
|
||||
|
||||
public class HeaderReceiver implements SMLParser.EventReceiver {
|
||||
private Properties _headers;
|
||||
public HeaderReceiver() { _headers = null; }
|
||||
public String getHeader(String name) { return (_headers != null ? _headers.getProperty(name) : null); }
|
||||
public void receiveHeader(String header, String value) {
|
||||
if (_headers == null) _headers = new Properties();
|
||||
_headers.setProperty(header, value);
|
||||
}
|
||||
|
||||
public void receiveAddress(String name, String schema, String protocol, String location, String anchorText) {}
|
||||
public void receiveArchive(String name, String description, String locationSchema, String location, String postingKey, String anchorText) {}
|
||||
public void receiveAttachment(int id, String anchorText) {}
|
||||
public void receiveBegin() {}
|
||||
public void receiveBlog(String name, String blogKeyHash, String blogPath, long blogEntryId, List blogArchiveLocations, String anchorText) {}
|
||||
public void receiveBold(String text) {}
|
||||
public void receiveCode(String text, String codeLocationSchema, String codeLocation) {}
|
||||
public void receiveCut(String summaryText) {}
|
||||
public void receiveEnd() {}
|
||||
public void receiveGT() {}
|
||||
public void receiveH1(String text) {}
|
||||
public void receiveH2(String text) {}
|
||||
public void receiveH3(String text) {}
|
||||
public void receiveH4(String text) {}
|
||||
public void receiveH5(String text) {}
|
||||
public void receiveHR() {}
|
||||
public void receiveHeaderEnd() {}
|
||||
public void receiveImage(String alternateText, int attachmentId) {}
|
||||
public void receiveItalic(String text) {}
|
||||
public void receiveLT() {}
|
||||
public void receiveLeftBracket() {}
|
||||
public void receiveLink(String schema, String location, String text) {}
|
||||
public void receiveNewline() {}
|
||||
public void receivePlain(String text) {}
|
||||
public void receivePre(String text) {}
|
||||
public void receiveQuote(String text, String whoQuoted, String quoteLocationSchema, String quoteLocation) {}
|
||||
public void receiveRightBracket() {}
|
||||
public void receiveUnderline(String text) {}
|
||||
}
|
104
apps/syndie/java/src/net/i2p/syndie/ThreadNodeImpl.java
Normal file
@ -0,0 +1,104 @@
|
||||
package net.i2p.syndie;
|
||||
|
||||
import java.util.*;
|
||||
import net.i2p.data.Hash;
|
||||
import net.i2p.syndie.data.BlogURI;
|
||||
import net.i2p.syndie.data.ThreadNode;
|
||||
|
||||
/**
|
||||
* Simple memory intensive (but fast) node impl
|
||||
*
|
||||
*/
|
||||
class ThreadNodeImpl implements ThreadNode {
|
||||
/** write once, never updated once the tree is created */
|
||||
private Collection _recursiveAuthors;
|
||||
/** contains the BlogURI instances */
|
||||
private Collection _recursiveEntries;
|
||||
/** write once, never updated once the tree is created */
|
||||
private List _children;
|
||||
private BlogURI _entry;
|
||||
private ThreadNode _parent;
|
||||
private BlogURI _parentEntry;
|
||||
private Collection _tags;
|
||||
private Collection _recursiveTags;
|
||||
private long _mostRecentPostDate;
|
||||
private Hash _mostRecentPostAuthor;
|
||||
|
||||
public ThreadNodeImpl() {
|
||||
_recursiveAuthors = new HashSet(1);
|
||||
_recursiveEntries = new HashSet(1);
|
||||
_children = new ArrayList(1);
|
||||
_entry = null;
|
||||
_parent = null;
|
||||
_parentEntry = null;
|
||||
_tags = new HashSet();
|
||||
_recursiveTags = new HashSet();
|
||||
_mostRecentPostDate = -1;
|
||||
_mostRecentPostAuthor = null;
|
||||
}
|
||||
|
||||
void setEntry(BlogURI entry) { _entry = entry; }
|
||||
void addAuthor(Hash author) { _recursiveAuthors.add(author); }
|
||||
void addChild(ThreadNodeImpl child) {
|
||||
if (!_children.contains(child))
|
||||
_children.add(child);
|
||||
}
|
||||
void setParent(ThreadNodeImpl parent) { _parent = parent; }
|
||||
void setParentEntry(BlogURI parent) { _parentEntry = parent; }
|
||||
void addTag(String tag) {
|
||||
_tags.add(tag);
|
||||
_recursiveTags.add(tag);
|
||||
}
|
||||
|
||||
void summarizeThread() {
|
||||
_recursiveAuthors.add(_entry.getKeyHash());
|
||||
_recursiveEntries.add(_entry);
|
||||
_mostRecentPostDate = _entry.getEntryId();
|
||||
_mostRecentPostAuthor = _entry.getKeyHash();
|
||||
|
||||
// we need to go through all children (recursively), in case the
|
||||
// tree is out of order (which it shouldn't be, if its built carefully...)
|
||||
for (int i = 0; i < _children.size(); i++) {
|
||||
ThreadNodeImpl node = (ThreadNodeImpl)_children.get(i);
|
||||
node.summarizeThread();
|
||||
if (node.getMostRecentPostDate() > _mostRecentPostDate) {
|
||||
_mostRecentPostDate = node.getMostRecentPostDate();
|
||||
_mostRecentPostAuthor = node.getMostRecentPostAuthor();
|
||||
}
|
||||
_recursiveTags.addAll(node.getRecursiveTags());
|
||||
_recursiveAuthors.addAll(node.getRecursiveAuthors());
|
||||
_recursiveEntries.addAll(node.getRecursiveEntries());
|
||||
}
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
StringBuffer buf = new StringBuffer();
|
||||
buf.append("<node><entry>").append(getEntry().toString()).append("</entry>\n");
|
||||
buf.append("<tags>").append(getTags()).append("</tags>\n");
|
||||
buf.append("<recursiveTags>").append(getRecursiveTags()).append("</recursiveTags>\n");
|
||||
buf.append("<children>\n");
|
||||
for (int i = 0; i < _children.size(); i++)
|
||||
buf.append(_children.get(i).toString());
|
||||
buf.append("</children>\n");
|
||||
buf.append("</node>\n");
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
private Collection getRecursiveAuthors() { return _recursiveAuthors; }
|
||||
private Collection getRecursiveEntries() { return _recursiveEntries; }
|
||||
|
||||
// interface-specified methods doing what one would expect...
|
||||
public boolean containsAuthor(Hash author) { return _recursiveAuthors.contains(author); }
|
||||
public boolean containsEntry(BlogURI uri) { return _recursiveEntries.contains(uri); }
|
||||
public ThreadNode getChild(int index) { return (ThreadNode)_children.get(index); }
|
||||
public int getChildCount() { return _children.size(); }
|
||||
public BlogURI getEntry() { return _entry; }
|
||||
public ThreadNode getParent() { return _parent; }
|
||||
public BlogURI getParentEntry() { return _parentEntry; }
|
||||
public boolean containsTag(String tag) { return _tags.contains(tag); }
|
||||
public Collection getTags() { return _tags; }
|
||||
public Collection getRecursiveTags() { return _recursiveTags; }
|
||||
public long getMostRecentPostDate() { return _mostRecentPostDate; }
|
||||
public Hash getMostRecentPostAuthor() { return _mostRecentPostAuthor; }
|
||||
public Iterator getRecursiveAuthorIterator() { return _recursiveAuthors.iterator(); }
|
||||
}
|
148
apps/syndie/java/src/net/i2p/syndie/WritableThreadIndex.java
Normal file
@ -0,0 +1,148 @@
|
||||
package net.i2p.syndie;
|
||||
|
||||
import java.util.*;
|
||||
import net.i2p.I2PAppContext;
|
||||
import net.i2p.data.DataHelper;
|
||||
import net.i2p.data.Hash;
|
||||
import net.i2p.syndie.data.*;
|
||||
import net.i2p.syndie.sml.SMLParser;
|
||||
import net.i2p.syndie.sml.HTMLRenderer;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
class WritableThreadIndex extends ThreadIndex {
|
||||
/** map of child (BlogURI) to parent (BlogURI) */
|
||||
private Map _parents;
|
||||
/** map of entry (BlogURI) to tags (String[]) */
|
||||
private Map _tags;
|
||||
private static final String[] NO_TAGS = new String[0];
|
||||
/** b0rk if the thread seems to go too deep */
|
||||
private static final int MAX_THREAD_DEPTH = 64;
|
||||
|
||||
WritableThreadIndex() {
|
||||
super();
|
||||
_parents = new HashMap();
|
||||
_tags = new TreeMap(new NewestEntryFirstComparator());
|
||||
}
|
||||
|
||||
void addParent(BlogURI parent, BlogURI child) { _parents.put(child, parent); }
|
||||
void addEntry(BlogURI entry, String tags[]) {
|
||||
if (tags == null) tags = NO_TAGS;
|
||||
String oldTags[] = (String[])_tags.put(entry, tags);
|
||||
}
|
||||
|
||||
/**
|
||||
* pull the data added together into threads, and stash them in the
|
||||
* roots, organized chronologically
|
||||
*
|
||||
*/
|
||||
void organizeTree() {
|
||||
Map nodes = new HashMap(_tags.size());
|
||||
for (Iterator iter = _tags.keySet().iterator(); iter.hasNext(); ) {
|
||||
BlogURI entry = (BlogURI)iter.next();
|
||||
String tags[] = (String[])_tags.get(entry);
|
||||
BlogURI parent = (BlogURI)_parents.get(entry);
|
||||
ThreadNodeImpl node = new ThreadNodeImpl();
|
||||
node.setEntry(entry);
|
||||
if (tags != null)
|
||||
for (int i = 0; i < tags.length; i++)
|
||||
node.addTag(tags[i]);
|
||||
if (parent != null)
|
||||
node.setParentEntry(parent);
|
||||
addEntry(entry, node);
|
||||
nodes.put(entry, node);
|
||||
}
|
||||
|
||||
SMLParser parser = new SMLParser(I2PAppContext.getGlobalContext());
|
||||
HeaderReceiver rec = new HeaderReceiver();
|
||||
Archive archive = BlogManager.instance().getArchive();
|
||||
|
||||
TreeSet roots = new TreeSet(new NewestNodeFirstComparator());
|
||||
for (Iterator iter = nodes.keySet().iterator(); iter.hasNext(); ) {
|
||||
BlogURI entry = (BlogURI)iter.next();
|
||||
ThreadNodeImpl node = (ThreadNodeImpl)nodes.get(entry);
|
||||
int depth = 0;
|
||||
// climb the tree
|
||||
while (node.getParentEntry() != null) {
|
||||
ThreadNodeImpl parent = (ThreadNodeImpl)nodes.get(node.getParentEntry());
|
||||
if (parent == null) break;
|
||||
|
||||
// if the parent doesn't want replies, only include replies under the tree
|
||||
// if they're written by the same author
|
||||
BlogURI parentURI = parent.getEntry();
|
||||
EntryContainer parentEntry = archive.getEntry(parentURI);
|
||||
if (parentEntry != null) {
|
||||
parser.parse(parentEntry.getEntry().getText(), rec);
|
||||
String refuse = rec.getHeader(HTMLRenderer.HEADER_REFUSE_REPLIES);
|
||||
if ( (refuse != null) && (Boolean.valueOf(refuse).booleanValue()) ) {
|
||||
if (parent.getEntry().getKeyHash().equals(entry.getKeyHash())) {
|
||||
// same author, allow the reply
|
||||
} else {
|
||||
// different author, refuse
|
||||
parent = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node.setParent(parent);
|
||||
parent.addChild(node);
|
||||
node = parent;
|
||||
depth++;
|
||||
if (depth > MAX_THREAD_DEPTH)
|
||||
break;
|
||||
}
|
||||
|
||||
node.summarizeThread();
|
||||
roots.add(node);
|
||||
}
|
||||
|
||||
// store them, sorted by most recently updated thread first
|
||||
for (Iterator iter = roots.iterator(); iter.hasNext(); )
|
||||
addRoot((ThreadNode)iter.next());
|
||||
|
||||
_parents.clear();
|
||||
_tags.clear();
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
StringBuffer buf = new StringBuffer();
|
||||
buf.append("<threadIndex>");
|
||||
for (int i = 0; i < getRootCount(); i++) {
|
||||
ThreadNode root = getRoot(i);
|
||||
buf.append(root.toString());
|
||||
}
|
||||
buf.append("</threadIndex>\n");
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
/** sort BlogURI instances with the highest entryId first */
|
||||
private class NewestEntryFirstComparator implements Comparator {
|
||||
public int compare(Object lhs, Object rhs) {
|
||||
BlogURI left = (BlogURI)lhs;
|
||||
BlogURI right = (BlogURI)rhs;
|
||||
if (left.getEntryId() > right.getEntryId()) {
|
||||
return -1;
|
||||
} else if (left.getEntryId() == right.getEntryId()) {
|
||||
return DataHelper.compareTo(left.getKeyHash().getData(), right.getKeyHash().getData());
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
/** sort ThreadNodeImpl instances with the highest entryId first */
|
||||
private class NewestNodeFirstComparator implements Comparator {
|
||||
public int compare(Object lhs, Object rhs) {
|
||||
ThreadNodeImpl left = (ThreadNodeImpl)lhs;
|
||||
ThreadNodeImpl right = (ThreadNodeImpl)rhs;
|
||||
if (left.getEntry().getEntryId() > right.getEntry().getEntryId()) {
|
||||
return -1;
|
||||
} else if (left.getEntry().getEntryId() == right.getEntry().getEntryId()) {
|
||||
return DataHelper.compareTo(left.getEntry().getKeyHash().getData(), right.getEntry().getKeyHash().getData());
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -32,6 +32,7 @@ public class ArchiveIndex {
|
||||
/** parent message to a set of replies, ordered with the oldest first */
|
||||
protected Map _replies;
|
||||
protected Properties _headers;
|
||||
private ThreadIndex _threadedIndex;
|
||||
|
||||
public ArchiveIndex() {
|
||||
this(I2PAppContext.getGlobalContext(), false);
|
||||
@ -48,6 +49,7 @@ public class ArchiveIndex {
|
||||
_headers = new Properties();
|
||||
_replies = Collections.synchronizedMap(new HashMap());
|
||||
_generatedOn = -1;
|
||||
_threadedIndex = null;
|
||||
if (shouldLoad)
|
||||
setIsLocal("true");
|
||||
}
|
||||
@ -61,6 +63,8 @@ public class ArchiveIndex {
|
||||
public long getTotalSize() { return _totalSize; }
|
||||
public long getNewSize() { return _newSize; }
|
||||
public long getGeneratedOn() { return _generatedOn; }
|
||||
public ThreadIndex getThreadedIndex() { return _threadedIndex; }
|
||||
public void setThreadedIndex(ThreadIndex index) { _threadedIndex = index; }
|
||||
|
||||
public String getNewSizeStr() {
|
||||
if (_newSize < 1024) return _newSize + "";
|
||||
|
@ -74,7 +74,9 @@ public class BlogURI {
|
||||
DataHelper.eq(_blogHash, ((BlogURI)obj)._blogHash);
|
||||
}
|
||||
public int hashCode() {
|
||||
int rv = (int)_entryId;
|
||||
int rv = (int)((_entryId >>> 32) & 0x7FFFFFFF);
|
||||
rv += (_entryId & 0x7FFFFFFF);
|
||||
|
||||
if (_blogHash != null)
|
||||
rv += _blogHash.hashCode();
|
||||
return rv;
|
||||
|
@ -0,0 +1,88 @@
|
||||
package net.i2p.syndie.data;
|
||||
|
||||
import java.util.*;
|
||||
import net.i2p.syndie.*;
|
||||
import net.i2p.data.*;
|
||||
import net.i2p.client.naming.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FilteredThreadIndex extends ThreadIndex {
|
||||
private User _user;
|
||||
private Archive _archive;
|
||||
private ThreadIndex _baseIndex;
|
||||
private Collection _filteredTags;
|
||||
private List _roots;
|
||||
private List _ignoredAuthors;
|
||||
|
||||
public static final String GROUP_FAVORITE = "Favorite";
|
||||
public static final String GROUP_IGNORE = "Ignore";
|
||||
|
||||
public FilteredThreadIndex(User user, Archive archive, Collection tags) {
|
||||
super();
|
||||
_user = user;
|
||||
_archive = archive;
|
||||
_baseIndex = _archive.getIndex().getThreadedIndex();
|
||||
_filteredTags = tags;
|
||||
if (_filteredTags == null)
|
||||
_filteredTags = Collections.EMPTY_SET;
|
||||
|
||||
_ignoredAuthors = new ArrayList();
|
||||
for (Iterator iter = user.getPetNameDB().iterator(); iter.hasNext(); ) {
|
||||
PetName pn = (PetName)iter.next();
|
||||
if (pn.isMember(GROUP_IGNORE)) {
|
||||
try {
|
||||
Hash h = new Hash();
|
||||
h.fromBase64(pn.getLocation());
|
||||
_ignoredAuthors.add(h);
|
||||
} catch (DataFormatException dfe) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
filter();
|
||||
}
|
||||
|
||||
private void filter() {
|
||||
_roots = new ArrayList(_baseIndex.getRootCount());
|
||||
for (int i = 0; i < _baseIndex.getRootCount(); i++) {
|
||||
ThreadNode node = _baseIndex.getRoot(i);
|
||||
if (!isIgnored(node, _ignoredAuthors, _filteredTags))
|
||||
_roots.add(node);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean isIgnored(ThreadNode node, List ignoredAuthors, Collection requestedTags) {
|
||||
boolean allAuthorsIgnored = true;
|
||||
for (Iterator iter = node.getRecursiveAuthorIterator(); iter.hasNext(); ) {
|
||||
Hash author = (Hash)iter.next();
|
||||
if (!ignoredAuthors.contains(author)) {
|
||||
allAuthorsIgnored = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ( (allAuthorsIgnored) && (ignoredAuthors.size() > 0) )
|
||||
return true;
|
||||
if (requestedTags.size() > 0) {
|
||||
for (Iterator iter = requestedTags.iterator(); iter.hasNext(); )
|
||||
if (node.getRecursiveTags().contains(iter.next()))
|
||||
return false;
|
||||
// authors we aren't ignoring have posted in the thread, but the user is filtering
|
||||
// posts by tags, and this thread doesn't include any of those tags
|
||||
return true;
|
||||
} else {
|
||||
// we aren't filtering by tags, and we haven't been refused by the author
|
||||
// filtering
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public int getRootCount() { return _roots.size(); }
|
||||
public ThreadNode getRoot(int index) { return (ThreadNode)_roots.get(index); }
|
||||
public ThreadNode getNode(BlogURI uri) { return _baseIndex.getNode(uri); }
|
||||
public Collection getFilteredTags() { return _filteredTags; }
|
||||
}
|
49
apps/syndie/java/src/net/i2p/syndie/data/ThreadIndex.java
Normal file
@ -0,0 +1,49 @@
|
||||
package net.i2p.syndie.data;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* List of threads, ordered with the most recently updated thread first.
|
||||
* Each node in the tree summarizes everything underneath it as well.
|
||||
*
|
||||
*/
|
||||
public class ThreadIndex {
|
||||
/** ordered list of threads, with most recent first */
|
||||
private List _roots;
|
||||
/** map of BlogURI to ThreadNode */
|
||||
private Map _nodes;
|
||||
|
||||
protected ThreadIndex() {
|
||||
// no need to synchronize, since the thread index doesn't change after
|
||||
// its first built
|
||||
_roots = new ArrayList();
|
||||
_nodes = new HashMap(64);
|
||||
}
|
||||
|
||||
public int getRootCount() { return _roots.size(); }
|
||||
public ThreadNode getRoot(int index) { return (ThreadNode)_roots.get(index); }
|
||||
public ThreadNode getNode(BlogURI uri) { return (ThreadNode)_nodes.get(uri); }
|
||||
/**
|
||||
* get the root of the thread that the given uri is located in, or -1.
|
||||
* The implementation depends only on getRoot/getNode/getRootCount and not on the
|
||||
* data structures, so should be safe for subclasses who adjust those methods
|
||||
*
|
||||
*/
|
||||
public int getRoot(BlogURI uri) {
|
||||
ThreadNode node = getNode(uri);
|
||||
if (node == null) return -1;
|
||||
while (node.getParent() != null)
|
||||
node = node.getParent();
|
||||
for (int i = 0; i < getRootCount(); i++) {
|
||||
ThreadNode cur = getRoot(i);
|
||||
if (cur.equals(node))
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/** call this in the right order - most recently updated thread first */
|
||||
protected void addRoot(ThreadNode node) { _roots.add(node); }
|
||||
/** invocation order here doesn't matter */
|
||||
protected void addEntry(BlogURI uri, ThreadNode node) { _nodes.put(uri, node); }
|
||||
}
|
34
apps/syndie/java/src/net/i2p/syndie/data/ThreadNode.java
Normal file
@ -0,0 +1,34 @@
|
||||
package net.i2p.syndie.data;
|
||||
|
||||
import java.util.*;
|
||||
import net.i2p.data.Hash;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface ThreadNode {
|
||||
/** current post */
|
||||
public BlogURI getEntry();
|
||||
/** how many direct replies there are to the current entry */
|
||||
public int getChildCount();
|
||||
/** the given direct reply */
|
||||
public ThreadNode getChild(int index);
|
||||
/** parent this is actually a reply to */
|
||||
public BlogURI getParentEntry();
|
||||
/** parent in the tree, maybe not a direct parent, but the closest one */
|
||||
public ThreadNode getParent();
|
||||
/** true if this entry, or any child, is written by the given author */
|
||||
public boolean containsAuthor(Hash author);
|
||||
/** true if this node, or any child, includes the given URI */
|
||||
public boolean containsEntry(BlogURI uri);
|
||||
/** list of tags (String) of this node only */
|
||||
public Collection getTags();
|
||||
/** list of tags (String) of this node or any children in the tree */
|
||||
public Collection getRecursiveTags();
|
||||
/** date of the most recent post, recursive */
|
||||
public long getMostRecentPostDate();
|
||||
/** author of the most recent post, recurisve */
|
||||
public Hash getMostRecentPostAuthor();
|
||||
/** walk across the authors of the entire thread */
|
||||
public Iterator getRecursiveAuthorIterator();
|
||||
}
|
@ -25,6 +25,7 @@ public class TransparentArchiveIndex extends ArchiveIndex {
|
||||
public long getTotalSize() { return index().getTotalSize(); }
|
||||
public long getNewSize() { return index().getNewSize(); }
|
||||
public long getGeneratedOn() { return index().getGeneratedOn(); }
|
||||
public ThreadIndex getThreadedIndex() { return index().getThreadedIndex(); }
|
||||
|
||||
public String getNewSizeStr() { return index().getNewSizeStr(); }
|
||||
public String getTotalSizeStr() { return index().getTotalSizeStr(); }
|
||||
|
@ -755,6 +755,10 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
public static final String HEADER_STYLE = "Style";
|
||||
public static final String HEADER_PETNAME = "PetName";
|
||||
public static final String HEADER_TAGS = "Tags";
|
||||
/** if set to true, don't display the message in the same thread, though keep a parent reference */
|
||||
public static final String HEADER_FORCE_NEW_THREAD = "ForceNewThread";
|
||||
/** if set to true, don't let anyone else reply in the same thread (but let the original author reply) */
|
||||
public static final String HEADER_REFUSE_REPLIES = "RefuseReplies";
|
||||
|
||||
private void renderSubjectCell() {
|
||||
_preBodyBuffer.append("<form action=\"index.jsp\">");
|
||||
@ -880,7 +884,7 @@ public class HTMLRenderer extends EventReceiverImpl {
|
||||
}
|
||||
|
||||
private final SimpleDateFormat _dateFormat = new SimpleDateFormat("yyyy/MM/dd", Locale.UK);
|
||||
private final String getEntryDate(long when) {
|
||||
public final String getEntryDate(long when) {
|
||||
synchronized (_dateFormat) {
|
||||
try {
|
||||
String str = _dateFormat.format(new Date(when));
|
||||
|
1053
apps/syndie/java/src/net/i2p/syndie/web/ViewThreadedServlet.java
Normal file
BIN
apps/syndie/jsp/images/addToFavorites.png
Normal file
After Width: | Height: | Size: 275 B |
BIN
apps/syndie/jsp/images/addToIgnored.png
Normal file
After Width: | Height: | Size: 266 B |
BIN
apps/syndie/jsp/images/collapse.png
Normal file
After Width: | Height: | Size: 917 B |
BIN
apps/syndie/jsp/images/expand.png
Normal file
After Width: | Height: | Size: 922 B |
BIN
apps/syndie/jsp/images/favorites.png
Normal file
After Width: | Height: | Size: 463 B |
BIN
apps/syndie/jsp/images/noSubthread.png
Normal file
After Width: | Height: | Size: 129 B |
BIN
apps/syndie/jsp/images/threadIndent.png
Normal file
After Width: | Height: | Size: 129 B |
3
apps/syndie/jsp/index.html
Normal file
@ -0,0 +1,3 @@
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"><html
|
||||
><head><title>Syndie</title></head
|
||||
><body><meta http-equiv="refresh" content="0;url=index.jsp" /><a href="index.jsp">Enter</a></body></html>
|
16
apps/syndie/jsp/switchuser.jsp
Normal file
@ -0,0 +1,16 @@
|
||||
<%@page contentType="text/html; charset=UTF-8" pageEncoding="UTF-8" import="net.i2p.syndie.web.*" %><%
|
||||
request.setCharacterEncoding("UTF-8");
|
||||
%><!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 TRANSITIONAL//EN" "http://www.w3c.org/TR/1999/REC-html401-19991224/loose.dtd">
|
||||
<html>
|
||||
<head>
|
||||
<title>Syndie</title>
|
||||
<link href="style.jsp" rel="stylesheet" type="text/css" >
|
||||
</head>
|
||||
<body>
|
||||
<form action="threads.jsp" method="GET">
|
||||
Syndie login: <input type="text" name="login" /><br />
|
||||
Password: <input type="password" name="password" /><br />
|
||||
<input type="submit" name="action" value="Login" />
|
||||
<input type="submit" name="action" value="Cancel" />
|
||||
</form>
|
||||
</body>
|
@ -14,6 +14,11 @@
|
||||
<servlet-class>net.i2p.syndie.web.RSSServlet</servlet-class>
|
||||
</servlet>
|
||||
|
||||
<servlet>
|
||||
<servlet-name>net.i2p.syndie.web.ViewThreadedServlet</servlet-name>
|
||||
<servlet-class>net.i2p.syndie.web.ViewThreadedServlet</servlet-class>
|
||||
</servlet>
|
||||
|
||||
<servlet>
|
||||
<servlet-name>net.i2p.syndie.UpdaterServlet</servlet-name>
|
||||
<servlet-class>net.i2p.syndie.UpdaterServlet</servlet-class>
|
||||
@ -22,10 +27,12 @@
|
||||
|
||||
<!-- precompiled servlets -->
|
||||
|
||||
<!--
|
||||
<servlet-mapping>
|
||||
<servlet-name>net.i2p.syndie.jsp.index_jsp</servlet-name>
|
||||
<url-pattern>/</url-pattern>
|
||||
</servlet-mapping>
|
||||
-->
|
||||
<servlet-mapping>
|
||||
<servlet-name>net.i2p.syndie.web.ArchiveServlet</servlet-name>
|
||||
<url-pattern>/archive/*</url-pattern>
|
||||
@ -34,6 +41,10 @@
|
||||
<servlet-name>net.i2p.syndie.web.RSSServlet</servlet-name>
|
||||
<url-pattern>/rss.jsp</url-pattern>
|
||||
</servlet-mapping>
|
||||
<servlet-mapping>
|
||||
<servlet-name>net.i2p.syndie.web.ViewThreadedServlet</servlet-name>
|
||||
<url-pattern>/threads.jsp</url-pattern>
|
||||
</servlet-mapping>
|
||||
|
||||
<session-config>
|
||||
<session-timeout>
|
||||
|