2016-06-30 10:11:33 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2022-09-23 00:54:12 +02:00
|
|
|
"""The main thread, managing connections, nodes and objects"""
|
2016-06-30 10:11:33 +02:00
|
|
|
import base64
|
2023-08-27 00:55:05 +02:00
|
|
|
import csv
|
2016-06-30 10:11:33 +02:00
|
|
|
import logging
|
2021-03-08 16:06:07 +01:00
|
|
|
import os
|
2016-06-30 10:11:33 +02:00
|
|
|
import pickle
|
|
|
|
import queue
|
|
|
|
import random
|
|
|
|
import threading
|
|
|
|
import time
|
|
|
|
|
2021-08-02 18:53:19 +02:00
|
|
|
from . import proofofwork, shared, structure
|
2023-08-19 02:02:19 +02:00
|
|
|
from .connection import Bootstrapper, Connection
|
2021-03-09 15:40:59 +01:00
|
|
|
from .i2p import I2PDialer
|
2016-06-30 10:11:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Manager(threading.Thread):
|
2022-09-23 00:54:12 +02:00
|
|
|
"""The manager thread"""
|
2016-06-30 10:11:33 +02:00
|
|
|
def __init__(self):
|
|
|
|
super().__init__(name='Manager')
|
|
|
|
self.q = queue.Queue()
|
2023-08-19 02:02:19 +02:00
|
|
|
self.bootstrap_pool = []
|
2016-06-30 10:11:33 +02:00
|
|
|
self.last_cleaned_objects = time.time()
|
|
|
|
self.last_cleaned_connections = time.time()
|
|
|
|
self.last_pickled_nodes = time.time()
|
2021-03-08 16:06:07 +01:00
|
|
|
# Publish destination 5-15 minutes after start
|
|
|
|
self.last_published_i2p_destination = \
|
2023-09-09 02:23:01 +02:00
|
|
|
time.time() - 50 * 60 + random.uniform(-1, 1) * 300 # nosec B311
|
2016-06-30 10:11:33 +02:00
|
|
|
|
2023-08-19 04:40:10 +02:00
|
|
|
def fill_bootstrap_pool(self):
|
|
|
|
"""Populate the bootstrap pool by core nodes and checked ones"""
|
|
|
|
self.bootstrap_pool = list(shared.core_nodes.union(shared.node_pool))
|
|
|
|
random.shuffle(self.bootstrap_pool)
|
|
|
|
|
2016-06-30 10:11:33 +02:00
|
|
|
def run(self):
|
2023-08-27 00:55:05 +02:00
|
|
|
self.load_data()
|
2024-09-08 05:48:28 +02:00
|
|
|
shared.objects.cleanup()
|
2023-08-19 04:40:10 +02:00
|
|
|
self.fill_bootstrap_pool()
|
2016-06-30 10:11:33 +02:00
|
|
|
while True:
|
|
|
|
time.sleep(0.8)
|
|
|
|
now = time.time()
|
2017-07-01 15:05:06 +02:00
|
|
|
if shared.shutting_down:
|
|
|
|
logging.debug('Shutting down Manager')
|
|
|
|
break
|
2016-06-30 10:11:33 +02:00
|
|
|
if now - self.last_cleaned_objects > 90:
|
2024-09-08 05:48:28 +02:00
|
|
|
shared.objects.cleanup()
|
2016-06-30 10:11:33 +02:00
|
|
|
self.last_cleaned_objects = now
|
|
|
|
if now - self.last_cleaned_connections > 2:
|
2017-07-20 18:22:59 +02:00
|
|
|
self.manage_connections()
|
2016-06-30 10:11:33 +02:00
|
|
|
self.last_cleaned_connections = now
|
|
|
|
if now - self.last_pickled_nodes > 60:
|
|
|
|
self.pickle_nodes()
|
|
|
|
self.last_pickled_nodes = now
|
2017-07-01 15:05:06 +02:00
|
|
|
if now - self.last_published_i2p_destination > 3600:
|
|
|
|
self.publish_i2p_destination()
|
|
|
|
self.last_published_i2p_destination = now
|
2016-06-30 10:11:33 +02:00
|
|
|
|
2023-08-19 02:02:19 +02:00
|
|
|
def manage_connections(self):
|
|
|
|
"""Open new connections if needed, remove closed ones"""
|
2016-06-30 10:11:33 +02:00
|
|
|
hosts = set()
|
2023-08-19 02:02:19 +02:00
|
|
|
|
|
|
|
def connect(target, connection_class=Connection):
|
|
|
|
"""
|
|
|
|
Open a connection of *connection_class*
|
|
|
|
to the *target* (host, port)
|
|
|
|
"""
|
|
|
|
c = connection_class(*target)
|
|
|
|
c.start()
|
|
|
|
with shared.connections_lock:
|
|
|
|
shared.connections.add(c)
|
|
|
|
|
|
|
|
def bootstrap():
|
|
|
|
"""Bootstrap from DNS seed-nodes and known nodes"""
|
2023-08-19 04:40:10 +02:00
|
|
|
try:
|
|
|
|
target = self.bootstrap_pool.pop()
|
|
|
|
except IndexError:
|
|
|
|
logging.warning(
|
|
|
|
'Ran out of bootstrap nodes, refilling')
|
|
|
|
self.fill_bootstrap_pool()
|
|
|
|
return
|
2023-08-19 02:02:19 +02:00
|
|
|
logging.info('Starting a bootstrapper for %s:%s', *target)
|
|
|
|
connect(target, Bootstrapper)
|
|
|
|
|
2016-06-30 10:11:33 +02:00
|
|
|
outgoing_connections = 0
|
2016-07-09 19:37:54 +02:00
|
|
|
for c in shared.connections.copy():
|
2016-06-30 10:11:33 +02:00
|
|
|
if not c.is_alive() or c.status == 'disconnected':
|
2024-09-12 19:26:57 +02:00
|
|
|
shared.objects.check(
|
|
|
|
*(c.vectors_to_get | c.vectors_requested.keys()))
|
2016-06-30 10:11:33 +02:00
|
|
|
with shared.connections_lock:
|
|
|
|
shared.connections.remove(c)
|
|
|
|
else:
|
2023-08-23 23:23:54 +02:00
|
|
|
hosts.add(structure.NetAddrNoPrefix.network_group(c.host))
|
2016-06-30 10:11:33 +02:00
|
|
|
if not c.server:
|
|
|
|
outgoing_connections += 1
|
2017-07-01 15:05:06 +02:00
|
|
|
|
|
|
|
for d in shared.i2p_dialers.copy():
|
|
|
|
hosts.add(d.destination)
|
|
|
|
if not d.is_alive():
|
|
|
|
shared.i2p_dialers.remove(d)
|
|
|
|
|
2017-01-14 13:42:15 +01:00
|
|
|
to_connect = set()
|
|
|
|
if shared.trusted_peer:
|
|
|
|
to_connect.add(shared.trusted_peer)
|
2017-07-01 15:05:06 +02:00
|
|
|
|
2021-03-08 16:06:07 +01:00
|
|
|
if (
|
|
|
|
outgoing_connections < shared.outgoing_connections
|
|
|
|
and shared.send_outgoing_connections and not shared.trusted_peer
|
|
|
|
):
|
2017-07-01 15:05:06 +02:00
|
|
|
|
|
|
|
if shared.ip_enabled:
|
2023-08-19 04:40:10 +02:00
|
|
|
if len(shared.unchecked_node_pool) > 16:
|
|
|
|
to_connect.update(random.sample(
|
|
|
|
tuple(shared.unchecked_node_pool), 16))
|
|
|
|
else:
|
|
|
|
to_connect.update(shared.unchecked_node_pool)
|
|
|
|
if outgoing_connections < shared.outgoing_connections / 2:
|
|
|
|
bootstrap()
|
|
|
|
shared.unchecked_node_pool.difference_update(to_connect)
|
|
|
|
if len(shared.node_pool) > 8:
|
|
|
|
to_connect.update(random.sample(
|
|
|
|
tuple(shared.node_pool), 8))
|
2017-07-01 15:05:06 +02:00
|
|
|
else:
|
2023-08-19 04:40:10 +02:00
|
|
|
to_connect.update(shared.node_pool)
|
2017-07-01 15:05:06 +02:00
|
|
|
|
|
|
|
if shared.i2p_enabled:
|
|
|
|
if len(shared.i2p_unchecked_node_pool) > 16:
|
2023-10-20 04:41:10 +02:00
|
|
|
to_connect.update(random.sample(
|
|
|
|
tuple(shared.i2p_unchecked_node_pool), 16))
|
2017-07-01 15:05:06 +02:00
|
|
|
else:
|
|
|
|
to_connect.update(shared.i2p_unchecked_node_pool)
|
|
|
|
shared.i2p_unchecked_node_pool.difference_update(to_connect)
|
|
|
|
if len(shared.i2p_node_pool) > 8:
|
2023-10-20 04:41:10 +02:00
|
|
|
to_connect.update(random.sample(
|
|
|
|
tuple(shared.i2p_node_pool), 8))
|
2017-07-01 15:05:06 +02:00
|
|
|
else:
|
|
|
|
to_connect.update(shared.i2p_node_pool)
|
|
|
|
|
2023-08-23 23:23:54 +02:00
|
|
|
for host, port in to_connect:
|
|
|
|
group = structure.NetAddrNoPrefix.network_group(host)
|
|
|
|
if group in hosts:
|
2017-01-14 13:42:15 +01:00
|
|
|
continue
|
2023-08-23 23:23:54 +02:00
|
|
|
if port == 'i2p' and shared.i2p_enabled:
|
|
|
|
if shared.i2p_session_nick and host != shared.i2p_dest_pub:
|
2017-06-11 07:55:53 +02:00
|
|
|
try:
|
2021-03-09 15:40:59 +01:00
|
|
|
d = I2PDialer(
|
|
|
|
shared,
|
2023-08-23 23:23:54 +02:00
|
|
|
host, shared.i2p_session_nick,
|
2021-03-09 15:40:59 +01:00
|
|
|
shared.i2p_sam_host, shared.i2p_sam_port)
|
2017-07-01 15:05:06 +02:00
|
|
|
d.start()
|
|
|
|
hosts.add(d.destination)
|
|
|
|
shared.i2p_dialers.add(d)
|
2021-03-08 16:06:07 +01:00
|
|
|
except Exception:
|
|
|
|
logging.warning(
|
|
|
|
'Exception while trying to establish'
|
|
|
|
' an I2P connection', exc_info=True)
|
2017-06-09 20:41:33 +02:00
|
|
|
else:
|
|
|
|
continue
|
|
|
|
else:
|
2023-08-19 02:02:19 +02:00
|
|
|
connect((host, port))
|
2023-08-23 23:23:54 +02:00
|
|
|
hosts.add(group)
|
2016-07-09 19:37:54 +02:00
|
|
|
shared.hosts = hosts
|
2016-06-30 10:11:33 +02:00
|
|
|
|
2023-08-27 00:55:05 +02:00
|
|
|
@staticmethod
|
|
|
|
def load_data():
|
|
|
|
"""Loads initial nodes and data, stored in files between sessions"""
|
|
|
|
try:
|
|
|
|
with open(
|
|
|
|
os.path.join(shared.data_directory, 'nodes.pickle'), 'br'
|
|
|
|
) as src:
|
|
|
|
shared.node_pool = pickle.load(src)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
except Exception:
|
|
|
|
logging.warning(
|
|
|
|
'Error while loading nodes from disk.', exc_info=True)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(
|
|
|
|
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'br'
|
|
|
|
) as src:
|
|
|
|
shared.i2p_node_pool = pickle.load(src)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
except Exception:
|
|
|
|
logging.warning(
|
|
|
|
'Error while loading nodes from disk.', exc_info=True)
|
|
|
|
|
|
|
|
with open(
|
|
|
|
os.path.join(shared.source_directory, 'core_nodes.csv'),
|
|
|
|
'r', newline='', encoding='ascii'
|
|
|
|
) as src:
|
|
|
|
reader = csv.reader(src)
|
2023-08-19 02:02:19 +02:00
|
|
|
shared.core_nodes = {(row[0], int(row[1])) for row in reader}
|
2023-08-27 00:55:05 +02:00
|
|
|
shared.node_pool.update(shared.core_nodes)
|
|
|
|
|
|
|
|
with open(
|
|
|
|
os.path.join(shared.source_directory, 'i2p_core_nodes.csv'),
|
|
|
|
'r', newline='', encoding='ascii'
|
|
|
|
) as f:
|
|
|
|
reader = csv.reader(f)
|
|
|
|
shared.i2p_core_nodes = {
|
|
|
|
(row[0].encode(), 'i2p') for row in reader}
|
|
|
|
shared.i2p_node_pool.update(shared.i2p_core_nodes)
|
|
|
|
|
2016-06-30 10:11:33 +02:00
|
|
|
@staticmethod
|
|
|
|
def pickle_nodes():
|
|
|
|
if len(shared.node_pool) > 10000:
|
2023-10-20 04:41:10 +02:00
|
|
|
shared.node_pool = set(random.sample(
|
|
|
|
tuple(shared.node_pool), 10000))
|
2016-06-30 10:11:33 +02:00
|
|
|
if len(shared.unchecked_node_pool) > 1000:
|
2023-10-20 04:41:10 +02:00
|
|
|
shared.unchecked_node_pool = set(random.sample(
|
|
|
|
tuple(shared.unchecked_node_pool), 1000))
|
2017-07-01 15:05:06 +02:00
|
|
|
|
|
|
|
if len(shared.i2p_node_pool) > 1000:
|
2023-10-20 04:41:10 +02:00
|
|
|
shared.i2p_node_pool = set(random.sample(
|
|
|
|
tuple(shared.i2p_node_pool), 1000))
|
2017-07-01 15:05:06 +02:00
|
|
|
if len(shared.i2p_unchecked_node_pool) > 100:
|
2023-10-20 04:41:10 +02:00
|
|
|
shared.i2p_unchecked_node_pool = set(random.sample(
|
|
|
|
tuple(shared.i2p_unchecked_node_pool), 100))
|
2017-07-20 18:22:59 +02:00
|
|
|
|
2016-06-30 10:11:33 +02:00
|
|
|
try:
|
2021-03-08 16:06:07 +01:00
|
|
|
with open(
|
|
|
|
os.path.join(shared.data_directory, 'nodes.pickle'), 'bw'
|
|
|
|
) as dst:
|
|
|
|
pickle.dump(shared.node_pool, dst, protocol=3)
|
|
|
|
with open(
|
|
|
|
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'bw'
|
|
|
|
) as dst:
|
|
|
|
pickle.dump(shared.i2p_node_pool, dst, protocol=3)
|
2016-06-30 10:11:33 +02:00
|
|
|
logging.debug('Saved nodes')
|
2021-03-08 16:06:07 +01:00
|
|
|
except Exception:
|
|
|
|
logging.warning('Error while saving nodes', exc_info=True)
|
2017-07-01 15:05:06 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def publish_i2p_destination():
|
2017-07-23 20:58:00 +02:00
|
|
|
if shared.i2p_session_nick and not shared.i2p_transient:
|
2017-07-01 15:05:06 +02:00
|
|
|
logging.info('Publishing our I2P destination')
|
2021-03-08 16:06:07 +01:00
|
|
|
dest_pub_raw = base64.b64decode(
|
|
|
|
shared.i2p_dest_pub, altchars=b'-~')
|
|
|
|
obj = structure.Object(
|
2024-09-08 05:48:28 +02:00
|
|
|
int(time.time() + 2 * 3600),
|
2021-03-08 16:06:07 +01:00
|
|
|
shared.i2p_dest_obj_type, shared.i2p_dest_obj_version,
|
2024-09-08 05:48:28 +02:00
|
|
|
shared.stream, object_payload=dest_pub_raw)
|
2021-08-02 18:53:19 +02:00
|
|
|
proofofwork.do_pow_and_publish(obj)
|