Download optimisation

- more accurate tracking
- randomise download order
- longer cycle
This commit is contained in:
Peter Šurda 2017-10-22 11:32:37 +02:00
parent 8b06fdf648
commit 75a6f605c1
Signed by untrusted user: PeterSurda
GPG Key ID: 0C5F50C0B5F37D87
2 changed files with 18 additions and 10 deletions

View File

@ -9,9 +9,10 @@ from helper_threading import StoppableThread
#from inventory import Inventory #from inventory import Inventory
from network.connectionpool import BMConnectionPool from network.connectionpool import BMConnectionPool
import protocol import protocol
from state import missingObjects
class DownloadThread(threading.Thread, StoppableThread): class DownloadThread(threading.Thread, StoppableThread):
maxPending = 200 minPending = 200
requestChunk = 1000 requestChunk = 1000
requestTimeout = 60 requestTimeout = 60
cleanInterval = 60 cleanInterval = 60
@ -22,12 +23,17 @@ class DownloadThread(threading.Thread, StoppableThread):
self.initStop() self.initStop()
self.name = "Downloader" self.name = "Downloader"
logger.info("init download thread") logger.info("init download thread")
self.pending = {}
self.lastCleaned = time.time() self.lastCleaned = time.time()
def cleanPending(self): def cleanPending(self):
deadline = time.time() - DownloadThread.requestExpires deadline = time.time() - DownloadThread.requestExpires
self.pending = {k: v for k, v in self.pending.iteritems() if v >= deadline} try:
toDelete = [k for k, v in missingObjects.iteritems() if v < deadline]
except RuntimeError:
pass
else:
for i in toDelete:
del missingObjects[i]
self.lastCleaned = time.time() self.lastCleaned = time.time()
def run(self): def run(self):
@ -41,11 +47,12 @@ class DownloadThread(threading.Thread, StoppableThread):
timedOut = now - DownloadThread.requestTimeout timedOut = now - DownloadThread.requestTimeout
# this may take a while, but it needs a consistency so I think it's better to lock a bigger chunk # this may take a while, but it needs a consistency so I think it's better to lock a bigger chunk
with i.objectsNewToMeLock: with i.objectsNewToMeLock:
downloadPending = len(list((k for k, v in i.objectsNewToMe.iteritems() if k in self.pending and self.pending[k] > timedOut))) downloadPending = len(list((k for k, v in i.objectsNewToMe.iteritems() if k in missingObjects and missingObjects[k] > timedOut)))
if downloadPending >= DownloadThread.maxPending: if downloadPending >= DownloadThread.minPending:
continue continue
# keys with True values in the dict # keys with True values in the dict
request = list((k for k, v in i.objectsNewToMe.iteritems() if k not in self.pending or self.pending[k] < timedOut)) request = list((k for k, v in i.objectsNewToMe.iteritems() if k not in missingObjects or missingObjects[k] < timedOut))
random.shuffle(request)
if not request: if not request:
continue continue
if len(request) > DownloadThread.requestChunk - downloadPending: if len(request) > DownloadThread.requestChunk - downloadPending:
@ -53,7 +60,7 @@ class DownloadThread(threading.Thread, StoppableThread):
# mark them as pending # mark them as pending
for k in request: for k in request:
i.objectsNewToMe[k] = False i.objectsNewToMe[k] = False
self.pending[k] = now missingObjects[k] = now
payload = bytearray() payload = bytearray()
payload.extend(addresses.encodeVarint(len(request))) payload.extend(addresses.encodeVarint(len(request)))
@ -65,4 +72,4 @@ class DownloadThread(threading.Thread, StoppableThread):
if time.time() >= self.lastCleaned + DownloadThread.cleanInterval: if time.time() >= self.lastCleaned + DownloadThread.cleanInterval:
self.cleanPending() self.cleanPending()
if not requested: if not requested:
self.stop.wait(1) self.stop.wait(5)

View File

@ -83,7 +83,8 @@ class ObjectTracker(object):
except KeyError: except KeyError:
pass pass
if hashId not in Inventory(): if hashId not in Inventory():
missingObjects[hashId] = None if hashId not in missingObjects:
missingObjects[hashId] = time.time()
with self.objectsNewToMeLock: with self.objectsNewToMeLock:
self.objectsNewToMe[hashId] = True self.objectsNewToMe[hashId] = True
elif hashId in Dandelion().hashMap: elif hashId in Dandelion().hashMap: