2019-08-31 15:11:05 +02:00
|
|
|
"""
|
2020-01-06 12:44:47 +01:00
|
|
|
Dandelion class definition, tracks stages
|
2019-08-31 15:11:05 +02:00
|
|
|
"""
|
2019-08-06 13:04:33 +02:00
|
|
|
import logging
|
2018-02-03 11:46:39 +01:00
|
|
|
from collections import namedtuple
|
2020-01-06 12:44:47 +01:00
|
|
|
from random import choice, expovariate, sample
|
2017-09-25 01:17:04 +02:00
|
|
|
from threading import RLock
|
2017-10-06 12:19:34 +02:00
|
|
|
from time import time
|
2017-09-25 01:17:04 +02:00
|
|
|
|
2018-07-17 13:28:56 +02:00
|
|
|
import connectionpool
|
|
|
|
import state
|
2018-02-03 11:46:39 +01:00
|
|
|
from queues import invQueue
|
2017-09-25 01:17:04 +02:00
|
|
|
from singleton import Singleton
|
|
|
|
|
|
|
|
# randomise routes after 600 seconds
|
|
|
|
REASSIGN_INTERVAL = 600
|
2018-02-06 22:28:56 +01:00
|
|
|
|
|
|
|
# trigger fluff due to expiration
|
|
|
|
FLUFF_TRIGGER_FIXED_DELAY = 10
|
|
|
|
FLUFF_TRIGGER_MEAN_DELAY = 30
|
|
|
|
|
2017-10-20 01:21:49 +02:00
|
|
|
MAX_STEMS = 2
|
2017-09-25 01:17:04 +02:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
Stem = namedtuple('Stem', ['child', 'stream', 'timeout'])
|
|
|
|
|
2019-08-06 13:04:33 +02:00
|
|
|
logger = logging.getLogger('default')
|
|
|
|
|
2018-07-17 13:28:56 +02:00
|
|
|
|
2017-09-25 01:17:04 +02:00
|
|
|
@Singleton
|
2020-01-06 12:44:47 +01:00
|
|
|
class Dandelion: # pylint: disable=old-style-class
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Dandelion class for tracking stem/fluff stages."""
|
2017-09-25 01:17:04 +02:00
|
|
|
def __init__(self):
|
2018-02-03 11:46:39 +01:00
|
|
|
# currently assignable child stems
|
2017-10-20 01:21:49 +02:00
|
|
|
self.stem = []
|
2018-02-03 11:46:39 +01:00
|
|
|
# currently assigned parent <-> child mappings
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap = {}
|
2018-02-03 11:46:39 +01:00
|
|
|
# currently existing objects in stem mode
|
2017-10-20 01:21:49 +02:00
|
|
|
self.hashMap = {}
|
2018-02-03 11:46:39 +01:00
|
|
|
# when to rerandomise routes
|
2017-10-20 01:21:49 +02:00
|
|
|
self.refresh = time() + REASSIGN_INTERVAL
|
2017-09-25 01:17:04 +02:00
|
|
|
self.lock = RLock()
|
|
|
|
|
2019-08-31 15:11:05 +02:00
|
|
|
@staticmethod
|
|
|
|
def poissonTimeout(start=None, average=0):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Generate deadline using Poisson distribution"""
|
2018-02-06 22:28:56 +01:00
|
|
|
if start is None:
|
|
|
|
start = time()
|
|
|
|
if average == 0:
|
|
|
|
average = FLUFF_TRIGGER_MEAN_DELAY
|
2018-07-17 13:28:56 +02:00
|
|
|
return start + expovariate(1.0 / average) + FLUFF_TRIGGER_FIXED_DELAY
|
2018-02-06 22:28:56 +01:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def addHash(self, hashId, source=None, stream=1):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Add inventory vector to dandelion stem"""
|
2018-02-03 11:46:39 +01:00
|
|
|
if not state.dandelion:
|
2017-10-06 12:19:34 +02:00
|
|
|
return
|
2017-09-25 01:17:04 +02:00
|
|
|
with self.lock:
|
2018-02-03 11:46:39 +01:00
|
|
|
self.hashMap[hashId] = Stem(
|
2018-07-17 13:28:56 +02:00
|
|
|
self.getNodeStem(source),
|
|
|
|
stream,
|
|
|
|
self.poissonTimeout())
|
2018-02-03 11:46:39 +01:00
|
|
|
|
|
|
|
def setHashStream(self, hashId, stream=1):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""
|
|
|
|
Update stream for inventory vector (as inv/dinv commands don't
|
|
|
|
include streams, we only learn this after receiving the object)
|
|
|
|
"""
|
2018-02-03 11:46:39 +01:00
|
|
|
with self.lock:
|
|
|
|
if hashId in self.hashMap:
|
|
|
|
self.hashMap[hashId] = Stem(
|
2018-07-17 13:28:56 +02:00
|
|
|
self.hashMap[hashId].child,
|
|
|
|
stream,
|
|
|
|
self.poissonTimeout())
|
2017-09-25 01:17:04 +02:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def removeHash(self, hashId, reason="no reason specified"):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Switch inventory vector from stem to fluff mode"""
|
2019-08-07 17:31:08 +02:00
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug(
|
|
|
|
'%s entering fluff mode due to %s.',
|
|
|
|
''.join('%02x' % ord(i) for i in hashId), reason)
|
2017-09-25 01:17:04 +02:00
|
|
|
with self.lock:
|
|
|
|
try:
|
2017-10-20 01:21:49 +02:00
|
|
|
del self.hashMap[hashId]
|
2017-09-25 01:17:04 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2017-11-18 09:47:17 +01:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def hasHash(self, hashId):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Is inventory vector in stem mode?"""
|
2018-02-03 11:46:39 +01:00
|
|
|
return hashId in self.hashMap
|
|
|
|
|
|
|
|
def objectChildStem(self, hashId):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Child (i.e. next) node for an inventory vector during stem mode"""
|
2018-02-03 11:46:39 +01:00
|
|
|
return self.hashMap[hashId].child
|
2017-10-20 01:21:49 +02:00
|
|
|
|
|
|
|
def maybeAddStem(self, connection):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""
|
|
|
|
If we had too few outbound connections, add the current one to the
|
|
|
|
current stem list. Dandelion as designed by the authors should
|
|
|
|
always have two active stem child connections.
|
|
|
|
"""
|
2017-10-20 01:21:49 +02:00
|
|
|
# fewer than MAX_STEMS outbound connections at last reshuffle?
|
|
|
|
with self.lock:
|
|
|
|
if len(self.stem) < MAX_STEMS:
|
|
|
|
self.stem.append(connection)
|
2018-02-03 11:46:39 +01:00
|
|
|
for k in (k for k, v in self.nodeMap.iteritems() if v is None):
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap[k] = connection
|
2018-07-17 13:28:56 +02:00
|
|
|
for k, v in {
|
2019-08-31 15:11:05 +02:00
|
|
|
k: v for k, v in self.hashMap.iteritems()
|
|
|
|
if v.child is None
|
2018-07-17 13:28:56 +02:00
|
|
|
}.iteritems():
|
|
|
|
self.hashMap[k] = Stem(
|
|
|
|
connection, v.stream, self.poissonTimeout())
|
2018-02-03 11:46:39 +01:00
|
|
|
invQueue.put((v.stream, k, v.child))
|
|
|
|
|
2017-10-20 01:21:49 +02:00
|
|
|
def maybeRemoveStem(self, connection):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""
|
|
|
|
Remove current connection from the stem list (called e.g. when
|
|
|
|
a connection is closed).
|
|
|
|
"""
|
2017-10-20 01:21:49 +02:00
|
|
|
# is the stem active?
|
|
|
|
with self.lock:
|
|
|
|
if connection in self.stem:
|
|
|
|
self.stem.remove(connection)
|
|
|
|
# active mappings to pointing to the removed node
|
2018-07-17 13:28:56 +02:00
|
|
|
for k in (
|
2020-01-06 12:44:47 +01:00
|
|
|
k for k, v in self.nodeMap.iteritems()
|
|
|
|
if v == connection
|
2018-07-17 13:28:56 +02:00
|
|
|
):
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap[k] = None
|
2018-07-17 13:28:56 +02:00
|
|
|
for k, v in {
|
2019-08-31 15:11:05 +02:00
|
|
|
k: v for k, v in self.hashMap.iteritems()
|
|
|
|
if v.child == connection
|
2018-07-17 13:28:56 +02:00
|
|
|
}.iteritems():
|
|
|
|
self.hashMap[k] = Stem(
|
|
|
|
None, v.stream, self.poissonTimeout())
|
2017-10-20 01:21:49 +02:00
|
|
|
|
|
|
|
def pickStem(self, parent=None):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""
|
|
|
|
Pick a random active stem, but not the parent one
|
|
|
|
(the one where an object came from)
|
|
|
|
"""
|
2017-10-20 01:21:49 +02:00
|
|
|
try:
|
|
|
|
# pick a random from available stems
|
|
|
|
stem = choice(range(len(self.stem)))
|
|
|
|
if self.stem[stem] == parent:
|
|
|
|
# one stem available and it's the parent
|
|
|
|
if len(self.stem) == 1:
|
|
|
|
return None
|
|
|
|
# else, pick the other one
|
|
|
|
return self.stem[1 - stem]
|
|
|
|
# all ok
|
|
|
|
return self.stem[stem]
|
|
|
|
except IndexError:
|
|
|
|
# no stems available
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getNodeStem(self, node=None):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""
|
|
|
|
Return child stem node for a given parent stem node
|
|
|
|
(the mapping is static for about 10 minutes, then it reshuffles)
|
|
|
|
"""
|
2017-10-20 01:21:49 +02:00
|
|
|
with self.lock:
|
|
|
|
try:
|
|
|
|
return self.nodeMap[node]
|
|
|
|
except KeyError:
|
2018-02-03 11:46:39 +01:00
|
|
|
self.nodeMap[node] = self.pickStem(node)
|
2017-10-20 01:21:49 +02:00
|
|
|
return self.nodeMap[node]
|
|
|
|
|
|
|
|
def expire(self):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Switch expired objects from stem to fluff mode"""
|
2017-10-20 01:21:49 +02:00
|
|
|
with self.lock:
|
|
|
|
deadline = time()
|
2019-07-11 15:32:00 +02:00
|
|
|
toDelete = [
|
|
|
|
[v.stream, k, v.child] for k, v in self.hashMap.iteritems()
|
|
|
|
if v.timeout < deadline
|
|
|
|
]
|
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
for row in toDelete:
|
|
|
|
self.removeHash(row[1], 'expiration')
|
2019-07-11 15:32:00 +02:00
|
|
|
invQueue.put(row)
|
2019-07-11 10:51:10 +02:00
|
|
|
return toDelete
|
2017-10-20 01:21:49 +02:00
|
|
|
|
2018-02-03 11:46:39 +01:00
|
|
|
def reRandomiseStems(self):
|
2019-07-08 15:23:02 +02:00
|
|
|
"""Re-shuffle stem mapping (parent <-> child pairs)"""
|
2017-10-20 01:21:49 +02:00
|
|
|
with self.lock:
|
2018-02-03 11:46:39 +01:00
|
|
|
try:
|
|
|
|
# random two connections
|
2018-07-17 13:28:56 +02:00
|
|
|
self.stem = sample(
|
|
|
|
connectionpool.BMConnectionPool(
|
|
|
|
).outboundConnections.values(), MAX_STEMS)
|
2018-02-03 11:46:39 +01:00
|
|
|
# not enough stems available
|
|
|
|
except ValueError:
|
2018-07-17 13:28:56 +02:00
|
|
|
self.stem = connectionpool.BMConnectionPool(
|
|
|
|
).outboundConnections.values()
|
2017-10-20 01:21:49 +02:00
|
|
|
self.nodeMap = {}
|
|
|
|
# hashMap stays to cater for pending stems
|
|
|
|
self.refresh = time() + REASSIGN_INTERVAL
|