added stat - tunnel.unknownTunnelTimeLeft which gathers the time until expiration of tunnel messages we're dropping because we dont know about the tunnel Id.
formatting / logging
This commit is contained in:
@ -57,7 +57,7 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
private final static int FORWARD_PRIORITY = 400;
|
private final static int FORWARD_PRIORITY = 400;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
StatManager.getInstance().createFrequencyStat("tunnel.unknownTunnelFrequency", "How often do we receive tunnel messages for unknown tunnels?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
StatManager.getInstance().createRateStat("tunnel.unknownTunnelTimeLeft", "How much time is left on tunnel messages we receive that are for unknown tunnels?", "Tunnels", new long[] { 5*60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||||
StatManager.getInstance().createRateStat("tunnel.gatewayMessageSize", "How large are the messages we are forwarding on as an inbound gateway?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
StatManager.getInstance().createRateStat("tunnel.gatewayMessageSize", "How large are the messages we are forwarding on as an inbound gateway?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||||
StatManager.getInstance().createRateStat("tunnel.relayMessageSize", "How large are the messages we are forwarding on as a participant in a tunnel?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
StatManager.getInstance().createRateStat("tunnel.relayMessageSize", "How large are the messages we are forwarding on as a participant in a tunnel?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||||
StatManager.getInstance().createRateStat("tunnel.endpointMessageSize", "How large are the messages we are forwarding in as an outbound endpoint?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
StatManager.getInstance().createRateStat("tunnel.endpointMessageSize", "How large are the messages we are forwarding in as an outbound endpoint?", "Tunnels", new long[] { 60*1000l, 60*60*1000l, 24*60*60*1000l });
|
||||||
@ -80,17 +80,23 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
if (_from != null)
|
if (_from != null)
|
||||||
from = _from.getHash();
|
from = _from.getHash();
|
||||||
MessageHistory.getInstance().droppedTunnelMessage(id, from);
|
MessageHistory.getInstance().droppedTunnelMessage(id, from);
|
||||||
_log.error("Received a message for an unknown tunnel [" + id.getTunnelId() + "], dropping it: " + _message, getAddedBy());
|
if (_log.shouldLog(Log.ERROR))
|
||||||
StatManager.getInstance().updateFrequency("tunnel.unknownTunnelFrequency");
|
_log.error("Received a message for an unknown tunnel [" + id.getTunnelId()
|
||||||
|
+ "], dropping it: " + _message, getAddedBy());
|
||||||
|
long timeRemaining = _message.getMessageExpiration().getTime() - Clock.getInstance().now();
|
||||||
|
StatManager.getInstance().addRateData("tunnel.unknownTunnelTimeLeft", timeRemaining, 0);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
info = getUs(info);
|
info = getUs(info);
|
||||||
if (info == null) {
|
if (info == null) {
|
||||||
|
if (_log.shouldLog(Log.ERROR))
|
||||||
_log.error("We are not part of a known tunnel?? wtf! drop.", getAddedBy());
|
_log.error("We are not part of a known tunnel?? wtf! drop.", getAddedBy());
|
||||||
StatManager.getInstance().updateFrequency("tunnel.unknownTunnelFrequency");
|
long timeRemaining = _message.getMessageExpiration().getTime() - Clock.getInstance().now();
|
||||||
|
StatManager.getInstance().addRateData("tunnel.unknownTunnelTimeLeft", timeRemaining, 0);
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Tunnel message received for tunnel: \n" + info);
|
_log.debug("Tunnel message received for tunnel: \n" + info);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,9 +111,9 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
JobQueue.getInstance().addJob(new HandleGatewayMessageJob(msg, info, data.length));
|
JobQueue.getInstance().addJob(new HandleGatewayMessageJob(msg, info, data.length));
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
if (_log.shouldLog(Log.WARN))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("We are the gateway and the endpoint for tunnel " + id.getTunnelId());
|
_log.debug("We are the gateway and the endpoint for tunnel " + id.getTunnelId());
|
||||||
if (_log.shouldLog(Log.WARN))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Process locally");
|
_log.debug("Process locally");
|
||||||
if (info.getDestination() != null) {
|
if (info.getDestination() != null) {
|
||||||
if (!ClientManagerFacade.getInstance().isLocal(info.getDestination())) {
|
if (!ClientManagerFacade.getInstance().isLocal(info.getDestination())) {
|
||||||
@ -117,7 +123,9 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
_log.debug("Dropping message for disconnected client: " + _message);
|
_log.debug("Dropping message for disconnected client: " + _message);
|
||||||
|
|
||||||
MessageHistory.getInstance().droppedOtherMessage(_message);
|
MessageHistory.getInstance().droppedOtherMessage(_message);
|
||||||
MessageHistory.getInstance().messageProcessingError(_message.getUniqueId(), _message.getClass().getName(), "Disconnected client");
|
MessageHistory.getInstance().messageProcessingError(_message.getUniqueId(),
|
||||||
|
_message.getClass().getName(),
|
||||||
|
"Disconnected client");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -128,7 +136,8 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
if (_log.shouldLog(Log.ERROR))
|
if (_log.shouldLog(Log.ERROR))
|
||||||
_log.error("Body is null! content of message.getData() = [" + DataHelper.toString(_message.getData()) + "]", getAddedBy());
|
_log.error("Body is null! content of message.getData() = [" +
|
||||||
|
DataHelper.toString(_message.getData()) + "]", getAddedBy());
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Message that failed: " + _message, getAddedBy());
|
_log.debug("Message that failed: " + _message, getAddedBy());
|
||||||
return;
|
return;
|
||||||
@ -151,12 +160,15 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
} else {
|
} else {
|
||||||
if (info.getNextHop() != null) {
|
if (info.getNextHop() != null) {
|
||||||
if (_log.shouldLog(Log.INFO))
|
if (_log.shouldLog(Log.INFO))
|
||||||
_log.info("Message for tunnel " + id.getTunnelId() + " received where we're not the gateway and there are remaining hops, so forward it on to "
|
_log.info("Message for tunnel " + id.getTunnelId()
|
||||||
|
+ " received where we're not the gateway and there are remaining hops, so forward it on to "
|
||||||
+ info.getNextHop().toBase64() + " via SendTunnelMessageJob");
|
+ info.getNextHop().toBase64() + " via SendTunnelMessageJob");
|
||||||
|
|
||||||
StatManager.getInstance().addRateData("tunnel.relayMessageSize", _message.getData().length, 0);
|
StatManager.getInstance().addRateData("tunnel.relayMessageSize",
|
||||||
|
_message.getData().length, 0);
|
||||||
|
|
||||||
JobQueue.getInstance().addJob(new SendMessageDirectJob(_message, info.getNextHop(), Clock.getInstance().now() + FORWARD_TIMEOUT, FORWARD_PRIORITY));
|
JobQueue.getInstance().addJob(new SendMessageDirectJob(_message, info.getNextHop(),
|
||||||
|
Clock.getInstance().now() + FORWARD_TIMEOUT, FORWARD_PRIORITY));
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
@ -174,7 +186,8 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
_log.error("Argh, somehow we don't have the decryption key and we have no more steps", getAddedBy());
|
_log.error("Argh, somehow we don't have the decryption key and we have no more steps", getAddedBy());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
DeliveryInstructions instructions = getInstructions(_message.getEncryptedDeliveryInstructions(), ourPlace.getEncryptionKey().getKey());
|
DeliveryInstructions instructions = getInstructions(_message.getEncryptedDeliveryInstructions(),
|
||||||
|
ourPlace.getEncryptionKey().getKey());
|
||||||
if (instructions == null) {
|
if (instructions == null) {
|
||||||
if (_log.shouldLog(Log.ERROR))
|
if (_log.shouldLog(Log.ERROR))
|
||||||
_log.error("We are the endpoint of a non-zero length tunnel and we don't have instructions. DROP.", getAddedBy());
|
_log.error("We are the endpoint of a non-zero length tunnel and we don't have instructions. DROP.", getAddedBy());
|
||||||
@ -187,7 +200,8 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
body = decryptBody(_message.getData(), instructions.getEncryptionKey());
|
body = decryptBody(_message.getData(), instructions.getEncryptionKey());
|
||||||
} else {
|
} else {
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Body in the tunnel is NOT encrypted: " + instructions + "\n" + _message, new Exception("Hmmm..."));
|
_log.debug("Body in the tunnel is NOT encrypted: " + instructions
|
||||||
|
+ "\n" + _message, new Exception("Hmmm..."));
|
||||||
body = getBody(_message.getData());
|
body = getBody(_message.getData());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,20 +263,24 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
private void sendToTunnel(Hash router, TunnelId id, I2NPMessage body) {
|
private void sendToTunnel(Hash router, TunnelId id, I2NPMessage body) {
|
||||||
// TODO: we may want to send it via a tunnel later on, but for now, direct will do.
|
// TODO: we may want to send it via a tunnel later on, but for now, direct will do.
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Sending on to requested tunnel " + id.getTunnelId() + " on router " + router.toBase64());
|
_log.debug("Sending on to requested tunnel " + id.getTunnelId() + " on router "
|
||||||
|
+ router.toBase64());
|
||||||
TunnelMessage msg = new TunnelMessage();
|
TunnelMessage msg = new TunnelMessage();
|
||||||
msg.setTunnelId(id);
|
msg.setTunnelId(id);
|
||||||
try {
|
try {
|
||||||
ByteArrayOutputStream baos = new ByteArrayOutputStream(1024);
|
ByteArrayOutputStream baos = new ByteArrayOutputStream(1024);
|
||||||
body.writeBytes(baos);
|
body.writeBytes(baos);
|
||||||
msg.setData(baos.toByteArray());
|
msg.setData(baos.toByteArray());
|
||||||
JobQueue.getInstance().addJob(new SendMessageDirectJob(msg, router, Clock.getInstance().now() + FORWARD_TIMEOUT, FORWARD_PRIORITY));
|
long exp = Clock.getInstance().now() + FORWARD_TIMEOUT;
|
||||||
|
JobQueue.getInstance().addJob(new SendMessageDirectJob(msg, router, exp, FORWARD_PRIORITY));
|
||||||
|
|
||||||
String bodyType = body.getClass().getName();
|
String bodyType = body.getClass().getName();
|
||||||
MessageHistory.getInstance().wrap(bodyType, body.getUniqueId(), TunnelMessage.class.getName(), msg.getUniqueId());
|
MessageHistory.getInstance().wrap(bodyType, body.getUniqueId(), TunnelMessage.class.getName(), msg.getUniqueId());
|
||||||
} catch (DataFormatException dfe) {
|
} catch (DataFormatException dfe) {
|
||||||
|
if (_log.shouldLog(Log.ERROR))
|
||||||
_log.error("Error writing out the message to forward to the tunnel", dfe);
|
_log.error("Error writing out the message to forward to the tunnel", dfe);
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
|
if (_log.shouldLog(Log.ERROR))
|
||||||
_log.error("Error writing out the message to forward to the tunnel", ioe);
|
_log.error("Error writing out the message to forward to the tunnel", ioe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -271,7 +289,8 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
// TODO: we may want to send it via a tunnel later on, but for now, direct will do.
|
// TODO: we may want to send it via a tunnel later on, but for now, direct will do.
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
if (_log.shouldLog(Log.DEBUG))
|
||||||
_log.debug("Sending on to requested router " + router.toBase64());
|
_log.debug("Sending on to requested router " + router.toBase64());
|
||||||
JobQueue.getInstance().addJob(new SendMessageDirectJob(body, router, Clock.getInstance().now() + FORWARD_TIMEOUT, FORWARD_PRIORITY));
|
long exp = Clock.getInstance().now() + FORWARD_TIMEOUT;
|
||||||
|
JobQueue.getInstance().addJob(new SendMessageDirectJob(body, router, exp, FORWARD_PRIORITY));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void sendToLocal(I2NPMessage body) {
|
private void sendToLocal(I2NPMessage body) {
|
||||||
@ -286,9 +305,11 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
boolean valid = MessageValidator.getInstance().validateMessage(msg.getUniqueId(), msg.getMessageExpiration().getTime());
|
boolean valid = MessageValidator.getInstance().validateMessage(msg.getUniqueId(), msg.getMessageExpiration().getTime());
|
||||||
if (!valid) {
|
if (!valid) {
|
||||||
if (_log.shouldLog(Log.WARN))
|
if (_log.shouldLog(Log.WARN))
|
||||||
_log.warn("Duplicate data message received [" + msg.getUniqueId() + " expiring on " + msg.getMessageExpiration() + "]");
|
_log.warn("Duplicate data message received [" + msg.getUniqueId()
|
||||||
|
+ " expiring on " + msg.getMessageExpiration() + "]");
|
||||||
MessageHistory.getInstance().droppedOtherMessage(msg);
|
MessageHistory.getInstance().droppedOtherMessage(msg);
|
||||||
MessageHistory.getInstance().messageProcessingError(msg.getUniqueId(), msg.getClass().getName(), "Duplicate payload");
|
MessageHistory.getInstance().messageProcessingError(msg.getUniqueId(), msg.getClass().getName(),
|
||||||
|
"Duplicate payload");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -406,7 +427,8 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void dropped() {
|
public void dropped() {
|
||||||
MessageHistory.getInstance().messageProcessingError(_message.getUniqueId(), _message.getClass().getName(), "Dropped due to overload");
|
MessageHistory.getInstance().messageProcessingError(_message.getUniqueId(), _message.getClass().getName(),
|
||||||
|
"Dropped due to overload");
|
||||||
}
|
}
|
||||||
|
|
||||||
////
|
////
|
||||||
@ -463,7 +485,8 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
} else {
|
} else {
|
||||||
if (_log.shouldLog(Log.INFO))
|
if (_log.shouldLog(Log.INFO))
|
||||||
_log.info("Message for tunnel " + _info.getTunnelId() +
|
_log.info("Message for tunnel " + _info.getTunnelId() +
|
||||||
" received at the gateway (us), but its a 0 length tunnel though it is a " + _body.getClass().getName() + ", so process it locally");
|
" received at the gateway (us), but its a 0 length tunnel though it is a "
|
||||||
|
+ _body.getClass().getName() + ", so process it locally");
|
||||||
InNetMessage msg = new InNetMessage();
|
InNetMessage msg = new InNetMessage();
|
||||||
msg.setFromRouter(_from);
|
msg.setFromRouter(_from);
|
||||||
msg.setFromRouterHash(_fromHash);
|
msg.setFromRouterHash(_fromHash);
|
||||||
@ -513,10 +536,11 @@ public class HandleTunnelMessageJob extends JobImpl {
|
|||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
// Honor the delivery instructions
|
// Honor the delivery instructions
|
||||||
//TunnelMonitor.endpointReceive(ourPlace.getTunnelId(), body.getClass().getName(), instructions, ourPlace.getDestination());
|
|
||||||
if (_log.shouldLog(Log.INFO))
|
if (_log.shouldLog(Log.INFO))
|
||||||
_log.info("Message for tunnel " + _ourPlace.getTunnelId().getTunnelId() + " received where we're the endpoint containing a "
|
_log.info("Message for tunnel " + _ourPlace.getTunnelId().getTunnelId()
|
||||||
+ _body.getClass().getName() + " message, so honor the delivery instructions: " + _instructions.toString());
|
+ " received where we're the endpoint containing a "
|
||||||
|
+ _body.getClass().getName() + " message, so honor the delivery instructions: "
|
||||||
|
+ _instructions.toString());
|
||||||
honorInstructions(_instructions, _body);
|
honorInstructions(_instructions, _body);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user