Smarter advertisement of object hashes and peers #494

Merged
Atheros1 merged 5 commits from master into master 2013-09-10 02:29:20 +02:00
4 changed files with 10 additions and 8 deletions
Showing only changes of commit 831edf0d24 - Show all commits

View File

@ -20,17 +20,14 @@ class objectHashHolder(threading.Thread):
self.collectionOfLists[i] = [] self.collectionOfLists[i] = []
def run(self): def run(self):
print 'objectHashHolder running.'
iterator = 0 iterator = 0
while not self.shutdown: while not self.shutdown:
if len(self.collectionOfLists[iterator]) > 0: if len(self.collectionOfLists[iterator]) > 0:
print 'objectHashHolder is submitting', len(self.collectionOfLists[iterator]), 'items to the queue.'
self.sendDataThreadMailbox.put((0, 'sendinv', self.collectionOfLists[iterator])) self.sendDataThreadMailbox.put((0, 'sendinv', self.collectionOfLists[iterator]))
self.collectionOfLists[iterator] = [] self.collectionOfLists[iterator] = []
iterator += 1 iterator += 1
iterator %= 10 iterator %= 10
time.sleep(1) time.sleep(1)
print 'objectHashHolder shutting down.'
def holdHash(self,hash): def holdHash(self,hash):
self.collectionOfLists[random.randrange(0, 10)].append(hash) self.collectionOfLists[random.randrange(0, 10)].append(hash)

View File

@ -108,7 +108,6 @@ class sendDataThread(threading.Thread):
# to our peer after waiting a random amount of time # to our peer after waiting a random amount of time
# unless we have a long list of messages in our queue # unless we have a long list of messages in our queue
# to send. # to send.
random.seed()
time.sleep(random.randrange(0, 10)) time.sleep(random.randrange(0, 10))
self.sock.sendall(data) self.sock.sendall(data)
self.lastTimeISentData = int(time.time()) self.lastTimeISentData = int(time.time())
@ -130,7 +129,6 @@ class sendDataThread(threading.Thread):
if hash not in self.someObjectsOfWhichThisRemoteNodeIsAlreadyAware: if hash not in self.someObjectsOfWhichThisRemoteNodeIsAlreadyAware:
payload += hash payload += hash
if payload != '': if payload != '':
print 'within sendinv, payload contains', len(payload)/32, 'hashes.'
payload = encodeVarint(len(payload)/32) + payload payload = encodeVarint(len(payload)/32) + payload
headerData = '\xe9\xbe\xb4\xd9' # magic bits, slighly different from Bitcoin's magic bits. headerData = '\xe9\xbe\xb4\xd9' # magic bits, slighly different from Bitcoin's magic bits.
headerData += 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00' headerData += 'inv\x00\x00\x00\x00\x00\x00\x00\x00\x00'
@ -149,8 +147,6 @@ class sendDataThread(threading.Thread):
shared.sendDataQueues.remove(self.mailbox) shared.sendDataQueues.remove(self.mailbox)
print 'sendDataThread thread (ID:', str(id(self)) + ') ending now. Was connected to', self.peer print 'sendDataThread thread (ID:', str(id(self)) + ') ending now. Was connected to', self.peer
break break
else:
print '(within sendinv) payload was empty. Not sending anything' #testing.
elif command == 'pong': elif command == 'pong':
self.someObjectsOfWhichThisRemoteNodeIsAlreadyAware.clear() # To save memory, let us clear this data structure from time to time. As its function is to help us keep from sending inv messages to peers which sent us the same inv message mere seconds earlier, it will be fine to clear this data structure from time to time. self.someObjectsOfWhichThisRemoteNodeIsAlreadyAware.clear() # To save memory, let us clear this data structure from time to time. As its function is to help us keep from sending inv messages to peers which sent us the same inv message mere seconds earlier, it will be fine to clear this data structure from time to time.
if self.lastTimeISentData < (int(time.time()) - 298): if self.lastTimeISentData < (int(time.time()) - 298):

View File

@ -7,6 +7,7 @@ from helper_sql import *
'''The singleCleaner class is a timer-driven thread that cleans data structures to free memory, resends messages when a remote node doesn't respond, and sends pong messages to keep connections alive if the network isn't busy. '''The singleCleaner class is a timer-driven thread that cleans data structures to free memory, resends messages when a remote node doesn't respond, and sends pong messages to keep connections alive if the network isn't busy.
It cleans these data structures in memory: It cleans these data structures in memory:
inventory (moves data to the on-disk sql database) inventory (moves data to the on-disk sql database)
inventorySets (clears then reloads data out of sql database)
It cleans these tables on the disk: It cleans these tables on the disk:
inventory (clears data more than 2 days and 12 hours old) inventory (clears data more than 2 days and 12 hours old)
@ -109,4 +110,12 @@ class singleCleaner(threading.Thread):
shared.workerQueue.put(('sendmessage', '')) shared.workerQueue.put(('sendmessage', ''))
shared.UISignalQueue.put(( shared.UISignalQueue.put((
'updateStatusBar', 'Doing work necessary to again attempt to deliver a message...')) 'updateStatusBar', 'Doing work necessary to again attempt to deliver a message...'))
# Let's also clear and reload shared.inventorySets to keep it from
# taking up an unnecessary amount of memory.
for streamNumber in shared.inventorySets:
shared.inventorySets[streamNumber] = set()
queryData = sqlQuery('''SELECT hash FROM inventory WHERE streamnumber=?''', streamNumber)
for row in queryData:
shared.inventorySets[streamNumber].add(row[0])
time.sleep(300) time.sleep(300)

View File

@ -304,7 +304,7 @@ def doCleanShutdown():
def broadcastToSendDataQueues(data): def broadcastToSendDataQueues(data):
# logger.debug('running broadcastToSendDataQueues') # logger.debug('running broadcastToSendDataQueues')
for q in sendDataQueues: for q in sendDataQueues:
q.put((data)) q.put(data)
def flushInventory(): def flushInventory():
#Note that the singleCleanerThread clears out the inventory dictionary from time to time, although it only clears things that have been in the dictionary for a long time. This clears the inventory dictionary Now. #Note that the singleCleanerThread clears out the inventory dictionary from time to time, although it only clears things that have been in the dictionary for a long time. This clears the inventory dictionary Now.