Ignore, and don't save to memory, messages larger than 20MB (temporary) #637

Merged
Atheros1 merged 1 commits from master into master 2014-02-05 08:47:59 +01:00
Showing only changes of commit 0de659a04a - Show all commits

View File

@ -116,13 +116,18 @@ class receiveDataThread(threading.Thread):
if len(self.data) < 24: # if so little of the data has arrived that we can't even read the checksum then wait for more data.
return
if self.data[0:4] != '\xe9\xbe\xb4\xd9':
if shared.verbose >= 1:
with shared.printLock:
print 'The magic bytes were not correct. First 40 bytes of data: ' + repr(self.data[0:40])
#if shared.verbose >= 1:
# with shared.printLock:
# print 'The magic bytes were not correct. First 40 bytes of data: ' + repr(self.data[0:40])
self.data = ""
return
self.payloadLength, = unpack('>L', self.data[16:20])
if self.payloadLength > 20000000:
logger.info('The incoming message, which we have not yet download, is too large. Ignoring it. (unfortunately there is no way to tell the other node to stop sending it except to disconnect.) Message size: %s' % self.payloadLength)
self.data = self.data[self.payloadLength + 24:]
self.processData()
return
if len(self.data) < self.payloadLength + 24: # check if the whole message has arrived yet.
return
if self.data[20:24] != hashlib.sha512(self.data[24:self.payloadLength + 24]).digest()[0:4]: # test the checksum in the message. If it is correct...
@ -137,7 +142,7 @@ class receiveDataThread(threading.Thread):
shared.knownNodesLock.acquire()
shared.knownNodes[self.streamNumber][self.peer] = int(time.time())
shared.knownNodesLock.release()
if self.payloadLength <= 180000000: # If the size of the message is greater than 180MB, ignore it. (I get memory errors when processing messages much larger than this though it is concievable that this value will have to be lowered if some systems are less tolarant of large messages.)
remoteCommand = self.data[4:16]
with shared.printLock:
print 'remoteCommand', repr(remoteCommand.replace('\x00', '')), ' from', self.peer