This repository has been archived on 2024-12-22. You can view files and clone it, but cannot push or open issues or pull requests.
PyBitmessage-2024-12-22/src/network/dandelion.py

196 lines
6.6 KiB
Python
Raw Normal View History

2019-08-31 15:11:05 +02:00
"""
src/network/dandelion.py
========================
"""
import logging
from collections import namedtuple
from random import choice, sample, expovariate
from threading import RLock
from time import time
2018-07-17 13:28:56 +02:00
import connectionpool
import state
from queues import invQueue
from singleton import Singleton
# randomise routes after 600 seconds
REASSIGN_INTERVAL = 600
# trigger fluff due to expiration
FLUFF_TRIGGER_FIXED_DELAY = 10
FLUFF_TRIGGER_MEAN_DELAY = 30
MAX_STEMS = 2
Stem = namedtuple('Stem', ['child', 'stream', 'timeout'])
logger = logging.getLogger('default')
2018-07-17 13:28:56 +02:00
@Singleton
2019-08-31 15:11:05 +02:00
class Dandelion(): # pylint: disable=old-style-class
2019-07-08 15:23:02 +02:00
"""Dandelion class for tracking stem/fluff stages."""
def __init__(self):
# currently assignable child stems
self.stem = []
# currently assigned parent <-> child mappings
self.nodeMap = {}
# currently existing objects in stem mode
self.hashMap = {}
# when to rerandomise routes
self.refresh = time() + REASSIGN_INTERVAL
self.lock = RLock()
2019-08-31 15:11:05 +02:00
@staticmethod
def poissonTimeout(start=None, average=0):
2019-07-08 15:23:02 +02:00
"""Generate deadline using Poisson distribution"""
if start is None:
start = time()
if average == 0:
average = FLUFF_TRIGGER_MEAN_DELAY
2018-07-17 13:28:56 +02:00
return start + expovariate(1.0 / average) + FLUFF_TRIGGER_FIXED_DELAY
def addHash(self, hashId, source=None, stream=1):
2019-07-08 15:23:02 +02:00
"""Add inventory vector to dandelion stem"""
if not state.dandelion:
return
with self.lock:
self.hashMap[hashId] = Stem(
2018-07-17 13:28:56 +02:00
self.getNodeStem(source),
stream,
self.poissonTimeout())
def setHashStream(self, hashId, stream=1):
2019-07-08 15:23:02 +02:00
"""
Update stream for inventory vector (as inv/dinv commands don't
include streams, we only learn this after receiving the object)
"""
with self.lock:
if hashId in self.hashMap:
self.hashMap[hashId] = Stem(
2018-07-17 13:28:56 +02:00
self.hashMap[hashId].child,
stream,
self.poissonTimeout())
def removeHash(self, hashId, reason="no reason specified"):
2019-07-08 15:23:02 +02:00
"""Switch inventory vector from stem to fluff mode"""
logger.debug(
2018-07-17 13:28:56 +02:00
"%s entering fluff mode due to %s.",
''.join('%02x' % ord(i) for i in hashId), reason)
with self.lock:
try:
del self.hashMap[hashId]
except KeyError:
pass
def hasHash(self, hashId):
2019-07-08 15:23:02 +02:00
"""Is inventory vector in stem mode?"""
return hashId in self.hashMap
def objectChildStem(self, hashId):
2019-07-08 15:23:02 +02:00
"""Child (i.e. next) node for an inventory vector during stem mode"""
return self.hashMap[hashId].child
def maybeAddStem(self, connection):
2019-07-08 15:23:02 +02:00
"""
If we had too few outbound connections, add the current one to the
current stem list. Dandelion as designed by the authors should
always have two active stem child connections.
"""
# fewer than MAX_STEMS outbound connections at last reshuffle?
with self.lock:
if len(self.stem) < MAX_STEMS:
self.stem.append(connection)
for k in (k for k, v in self.nodeMap.iteritems() if v is None):
self.nodeMap[k] = connection
2018-07-17 13:28:56 +02:00
for k, v in {
2019-08-31 15:11:05 +02:00
k: v for k, v in self.hashMap.iteritems()
if v.child is None
2018-07-17 13:28:56 +02:00
}.iteritems():
self.hashMap[k] = Stem(
connection, v.stream, self.poissonTimeout())
invQueue.put((v.stream, k, v.child))
def maybeRemoveStem(self, connection):
2019-07-08 15:23:02 +02:00
"""
Remove current connection from the stem list (called e.g. when
a connection is closed).
"""
# is the stem active?
with self.lock:
if connection in self.stem:
self.stem.remove(connection)
# active mappings to pointing to the removed node
2018-07-17 13:28:56 +02:00
for k in (
2019-08-31 15:11:05 +02:00
k for k, v in self.nodeMap.iteritems() if v == connection
2018-07-17 13:28:56 +02:00
):
self.nodeMap[k] = None
2018-07-17 13:28:56 +02:00
for k, v in {
2019-08-31 15:11:05 +02:00
k: v for k, v in self.hashMap.iteritems()
if v.child == connection
2018-07-17 13:28:56 +02:00
}.iteritems():
self.hashMap[k] = Stem(
None, v.stream, self.poissonTimeout())
def pickStem(self, parent=None):
2019-07-08 15:23:02 +02:00
"""
Pick a random active stem, but not the parent one
(the one where an object came from)
"""
try:
# pick a random from available stems
stem = choice(range(len(self.stem)))
if self.stem[stem] == parent:
# one stem available and it's the parent
if len(self.stem) == 1:
return None
# else, pick the other one
return self.stem[1 - stem]
# all ok
return self.stem[stem]
except IndexError:
# no stems available
return None
def getNodeStem(self, node=None):
2019-07-08 15:23:02 +02:00
"""
Return child stem node for a given parent stem node
(the mapping is static for about 10 minutes, then it reshuffles)
"""
with self.lock:
try:
return self.nodeMap[node]
except KeyError:
self.nodeMap[node] = self.pickStem(node)
return self.nodeMap[node]
def expire(self):
2019-07-08 15:23:02 +02:00
"""Switch expired objects from stem to fluff mode"""
with self.lock:
deadline = time()
2019-07-11 15:32:00 +02:00
toDelete = [
[v.stream, k, v.child] for k, v in self.hashMap.iteritems()
if v.timeout < deadline
]
for row in toDelete:
self.removeHash(row[1], 'expiration')
2019-07-11 15:32:00 +02:00
invQueue.put(row)
return toDelete
def reRandomiseStems(self):
2019-07-08 15:23:02 +02:00
"""Re-shuffle stem mapping (parent <-> child pairs)"""
with self.lock:
try:
# random two connections
2018-07-17 13:28:56 +02:00
self.stem = sample(
connectionpool.BMConnectionPool(
).outboundConnections.values(), MAX_STEMS)
# not enough stems available
except ValueError:
2018-07-17 13:28:56 +02:00
self.stem = connectionpool.BMConnectionPool(
).outboundConnections.values()
self.nodeMap = {}
# hashMap stays to cater for pending stems
self.refresh = time() + REASSIGN_INTERVAL