2018-02-03 11:46:39 +01:00
|
|
|
from collections import namedtuple
|
|
|
|
from random import choice, sample
|
2017-09-25 01:17:04 +02:00
|
|
|
from threading import RLock
|
2017-10-06 12:19:34 +02:00
|
|
|
from time import time
|
2017-09-25 01:17:04 +02:00
|
|
|
|
2017-10-06 12:19:34 +02:00
|
|
|
from bmconfigparser import BMConfigParser
|
2018-02-03 11:46:39 +01:00
|
|
|
import network.connectionpool
|
|
|
|
from debug import logging
|
|
|
|
from queues import invQueue
|
2017-09-25 01:17:04 +02:00
|
|
|
from singleton import Singleton
|
2018-02-03 11:46:39 +01:00
|
|
|
import state
|
2017-09-25 01:17:04 +02:00
|
|
|
|
|
|
|
# randomise routes after 600 seconds
|
|
|
|
REASSIGN_INTERVAL = 600
|
2018-02-03 11:46:39 +01:00
|
|
|
# trigger fluff due to expiration in 2 minutes
|
|
|
|
FLUFF_TRIGGER_TIMEOUT = 120
|
2017-10-20 01:21:49 +02:00
|
|
|
MAX_STEMS = 2
|
2017-09-25 01:17:04 +02:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
Stem = namedtuple('Stem', ['child', 'stream', 'timeout'])
|
|
|
|
|
2017-09-25 01:17:04 +02:00
|
|
|
@Singleton
|
2017-10-20 01:21:49 +02:00
|
|
|
class Dandelion():
|
2017-09-25 01:17:04 +02:00
|
|
|
def __init__(self):
|
2018-02-03 11:46:39 +01:00
|
|
|
# currently assignable child stems
|
2017-10-20 01:21:49 +02:00
|
|
|
self.stem = []
|
2018-02-03 11:46:39 +01:00
|
|
|
# currently assigned parent <-> child mappings
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap = {}
|
2018-02-03 11:46:39 +01:00
|
|
|
# currently existing objects in stem mode
|
2017-10-20 01:21:49 +02:00
|
|
|
self.hashMap = {}
|
2018-02-03 11:46:39 +01:00
|
|
|
# when to rerandomise routes
|
2017-10-20 01:21:49 +02:00
|
|
|
self.refresh = time() + REASSIGN_INTERVAL
|
2017-09-25 01:17:04 +02:00
|
|
|
self.lock = RLock()
|
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def addHash(self, hashId, source=None, stream=1):
|
|
|
|
if not state.dandelion:
|
2017-10-06 12:19:34 +02:00
|
|
|
return
|
2017-09-25 01:17:04 +02:00
|
|
|
with self.lock:
|
2018-02-03 11:46:39 +01:00
|
|
|
self.hashMap[hashId] = Stem(
|
|
|
|
self.getNodeStem(source),
|
|
|
|
stream,
|
|
|
|
time() + FLUFF_TRIGGER_TIMEOUT)
|
|
|
|
|
|
|
|
def setHashStream(self, hashId, stream=1):
|
|
|
|
with self.lock:
|
|
|
|
if hashId in self.hashMap:
|
|
|
|
self.hashMap[hashId] = Stem(
|
|
|
|
self.hashMap[hashId].child,
|
|
|
|
stream,
|
|
|
|
time() + FLUFF_TRIGGER_TIMEOUT)
|
2017-09-25 01:17:04 +02:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def removeHash(self, hashId, reason="no reason specified"):
|
|
|
|
logging.debug("%s entering fluff mode due to %s.", ''.join('%02x'%ord(i) for i in hashId), reason)
|
2017-09-25 01:17:04 +02:00
|
|
|
with self.lock:
|
|
|
|
try:
|
2017-10-20 01:21:49 +02:00
|
|
|
del self.hashMap[hashId]
|
2017-09-25 01:17:04 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2017-11-18 09:47:17 +01:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def hasHash(self, hashId):
|
|
|
|
return hashId in self.hashMap
|
|
|
|
|
|
|
|
def objectChildStem(self, hashId):
|
|
|
|
return self.hashMap[hashId].child
|
2017-10-20 01:21:49 +02:00
|
|
|
|
|
|
|
def maybeAddStem(self, connection):
|
|
|
|
# fewer than MAX_STEMS outbound connections at last reshuffle?
|
|
|
|
with self.lock:
|
|
|
|
if len(self.stem) < MAX_STEMS:
|
|
|
|
self.stem.append(connection)
|
2018-02-03 11:46:39 +01:00
|
|
|
for k in (k for k, v in self.nodeMap.iteritems() if v is None):
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap[k] = connection
|
2018-02-03 11:46:39 +01:00
|
|
|
for k, v in {k: v for k, v in self.hashMap.iteritems() if v.child is None}.iteritems():
|
|
|
|
self.hashMap[k] = Stem(connection, v.stream, time() + FLUFF_TRIGGER_TIMEOUT)
|
|
|
|
invQueue.put((v.stream, k, v.child))
|
|
|
|
|
2017-10-20 01:21:49 +02:00
|
|
|
|
|
|
|
def maybeRemoveStem(self, connection):
|
|
|
|
# is the stem active?
|
|
|
|
with self.lock:
|
|
|
|
if connection in self.stem:
|
|
|
|
self.stem.remove(connection)
|
|
|
|
# active mappings to pointing to the removed node
|
2018-02-03 11:46:39 +01:00
|
|
|
for k in (k for k, v in self.nodeMap.iteritems() if v == connection):
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap[k] = None
|
2018-02-03 11:46:39 +01:00
|
|
|
for k, v in {k: v for k, v in self.hashMap.iteritems() if v.child == connection}.iteritems():
|
|
|
|
self.hashMap[k] = Stem(None, v.stream, time() + FLUFF_TRIGGER_TIMEOUT)
|
2017-10-20 01:21:49 +02:00
|
|
|
|
|
|
|
def pickStem(self, parent=None):
|
|
|
|
try:
|
|
|
|
# pick a random from available stems
|
|
|
|
stem = choice(range(len(self.stem)))
|
|
|
|
if self.stem[stem] == parent:
|
|
|
|
# one stem available and it's the parent
|
|
|
|
if len(self.stem) == 1:
|
|
|
|
return None
|
|
|
|
# else, pick the other one
|
|
|
|
return self.stem[1 - stem]
|
|
|
|
# all ok
|
|
|
|
return self.stem[stem]
|
|
|
|
except IndexError:
|
|
|
|
# no stems available
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getNodeStem(self, node=None):
|
|
|
|
with self.lock:
|
|
|
|
try:
|
|
|
|
return self.nodeMap[node]
|
|
|
|
except KeyError:
|
2018-02-03 11:46:39 +01:00
|
|
|
self.nodeMap[node] = self.pickStem(node)
|
2017-10-20 01:21:49 +02:00
|
|
|
return self.nodeMap[node]
|
|
|
|
|
|
|
|
def expire(self):
|
|
|
|
with self.lock:
|
|
|
|
deadline = time()
|
2018-02-03 11:46:39 +01:00
|
|
|
# only expire those that have a child node, i.e. those without a child not will stick around
|
|
|
|
toDelete = [[v.stream, k, v.child] for k, v in self.hashMap.iteritems() if v.timeout < deadline and v.child]
|
|
|
|
for row in toDelete:
|
|
|
|
self.removeHash(row[1], 'expiration')
|
|
|
|
invQueue.put((row[0], row[1], row[2]))
|
2017-10-20 01:21:49 +02:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def reRandomiseStems(self):
|
2017-10-20 01:21:49 +02:00
|
|
|
with self.lock:
|
2018-02-03 11:46:39 +01:00
|
|
|
try:
|
|
|
|
# random two connections
|
|
|
|
self.stem = sample(network.connectionpool.BMConnectionPool().outboundConnections.values(), MAX_STEMS)
|
|
|
|
# not enough stems available
|
|
|
|
except ValueError:
|
|
|
|
self.stem = network.connectionpool.BMConnectionPool().outboundConnections.values()
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap = {}
|
|
|
|
# hashMap stays to cater for pending stems
|
|
|
|
self.refresh = time() + REASSIGN_INTERVAL
|