Objects to be downloaded fixes
- tries to avoid calling senddata it it would block receiveDataThread, allowing fore more asynchronous operation - request objects in chunks of 100 (CPU performance optimisation)
This commit is contained in:
parent
f079ff5b99
commit
dbe15d0b99
|
@ -217,9 +217,9 @@ class receiveDataThread(threading.Thread):
|
||||||
self.data = self.data[payloadLength + protocol.Header.size:] # take this message out and then process the next message
|
self.data = self.data[payloadLength + protocol.Header.size:] # take this message out and then process the next message
|
||||||
|
|
||||||
if self.data == '': # if there are no more messages
|
if self.data == '': # if there are no more messages
|
||||||
while Missing().len() > 0 and not self.sendDataThreadQueue.full():
|
if Missing().len() > 0 and self.sendDataThreadQueue.qsize() < 10:
|
||||||
objectHash = Missing().pull()
|
for objectHash in Missing().pull(100):
|
||||||
if objectHash is None:
|
if self.sendDataThreadQueue.full():
|
||||||
break
|
break
|
||||||
if objectHash in Inventory():
|
if objectHash in Inventory():
|
||||||
logger.debug('Inventory already has object listed in inv message.')
|
logger.debug('Inventory already has object listed in inv message.')
|
||||||
|
|
|
@ -101,14 +101,20 @@ class Missing(object):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
return len(self.hashes)
|
return len(self.hashes)
|
||||||
|
|
||||||
def pull(self):
|
def pull(self, count=1):
|
||||||
|
if count < 1:
|
||||||
|
raise ValueError("Must be at least one")
|
||||||
with self.lock:
|
with self.lock:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
since = now - 300 # once every 5 minutes
|
since = now - 300 # once every 5 minutes
|
||||||
try:
|
try:
|
||||||
objectHash = random.choice({k:v for k, v in self.hashes.iteritems() if current_thread().peer in self.hashes[k]['peers'] and self.hashes[k]['requested'] < since}.keys())
|
matchingHashes = {k:v for k, v in self.hashes.iteritems() if current_thread().peer in self.hashes[k]['peers'] and self.hashes[k]['requested'] < since}
|
||||||
|
if count > len(matchingHashes):
|
||||||
|
count = len(matchingHashes)
|
||||||
|
objectHashes = random.sample(matchingHashes, count)
|
||||||
except (IndexError, KeyError): # list is empty
|
except (IndexError, KeyError): # list is empty
|
||||||
return None
|
return None
|
||||||
|
for objectHash in objectHashes:
|
||||||
try:
|
try:
|
||||||
self.hashes[objectHash]['peers'].remove(current_thread().peer)
|
self.hashes[objectHash]['peers'].remove(current_thread().peer)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -117,7 +123,7 @@ class Missing(object):
|
||||||
self.delete(objectHash)
|
self.delete(objectHash)
|
||||||
else:
|
else:
|
||||||
self.hashes[objectHash]['requested'] = now
|
self.hashes[objectHash]['requested'] = now
|
||||||
return objectHash
|
return objectHashes
|
||||||
|
|
||||||
def delete(self, objectHash):
|
def delete(self, objectHash):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user