import time
import addresses
import helper_random
import protocol
from dandelion import Dandelion
from debug import logger
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from objectracker import missingObjects
class DownloadThread(StoppableThread):
minPending = 200
maxRequestChunk = 1000
requestTimeout = 60
cleanInterval = 60
requestExpires = 3600
def __init__(self):
super(DownloadThread, self).__init__(name="Downloader")
logger.info("init download thread")
self.lastCleaned = time.time()
def cleanPending(self):
deadline = time.time() - DownloadThread.requestExpires
try:
toDelete = [k for k, v in missingObjects.iteritems() if v < deadline]
except RuntimeError:
pass
else:
for i in toDelete:
del missingObjects[i]
def run(self):
while not self._stopped:
requested = 0
# Choose downloading peers randomly
connections = [
x for x in
BMConnectionPool().inboundConnections.values() + BMConnectionPool().outboundConnections.values()
if x.fullyEstablished]
helper_random.randomshuffle(connections)
requestChunk = max(int(min(DownloadThread.maxRequestChunk, len(missingObjects)) / len(connections)), 1)
except ZeroDivisionError:
requestChunk = 1
for i in connections:
now = time.time()
# avoid unnecessary delay
if i.skipUntil >= now:
continue
request = i.objectsNewToMe.randomKeys(requestChunk)
except KeyError:
payload = bytearray()
chunkCount = 0
for chunk in request:
if chunk in Inventory() and not Dandelion().hasHash(chunk):
del i.objectsNewToMe[chunk]
payload.extend(chunk)
chunkCount += 1
missingObjects[chunk] = now
if not chunkCount:
payload[0:0] = addresses.encodeVarint(chunkCount)
i.append_write_buf(protocol.CreatePacket('getdata', payload))
logger.debug("%s:%i Requesting %i objects", i.destination.host, i.destination.port, chunkCount)
requested += chunkCount
if time.time() >= self.lastCleaned + DownloadThread.cleanInterval:
self.cleanPending()
if not requested:
self.stop.wait(1)