Compare commits

...

3 Commits
v0.3 ... lint

Author SHA1 Message Date
Lee Miller 0bf80bbf5b
Specifically skip B311 in manager by bandit 2023-09-09 03:23:01 +03:00
Lee Miller e2df2159e7
Make load_data a static method in manager,
use ascii while loading nodes csv.
2023-09-05 21:05:01 +03:00
Lee Miller 9bf90fca45
Suppress some too-many-* pylint design warnings in parse_arguments() 2023-09-05 21:05:01 +03:00
2 changed files with 57 additions and 56 deletions

View File

@ -2,11 +2,9 @@
"""Functions for starting the program""" """Functions for starting the program"""
import argparse import argparse
import base64 import base64
import csv
import logging import logging
import multiprocessing import multiprocessing
import os import os
import pickle
import signal import signal
import socket import socket
@ -22,7 +20,7 @@ def handler(s, f): # pylint: disable=unused-argument
shared.shutting_down = True shared.shutting_down = True
def parse_arguments(): def parse_arguments(): # pylint: disable=too-many-branches,too-many-statements
"""Parsing arguments""" """Parsing arguments"""
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', help='Port to listen on', type=int) parser.add_argument('-p', '--port', help='Port to listen on', type=int)
@ -102,56 +100,6 @@ def parse_arguments():
shared.i2p_transient = True shared.i2p_transient = True
def load_data():
"""Loads initial nodes and data, stored in files between sessions"""
try:
with open(
os.path.join(shared.data_directory, 'objects.pickle'), 'br'
) as src:
shared.objects = pickle.load(src)
except FileNotFoundError:
pass # first start
except Exception:
logging.warning(
'Error while loading objects from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'nodes.pickle'), 'br'
) as src:
shared.node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning('Error while loading nodes from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'br'
) as src:
shared.i2p_node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning('Error while loading nodes from disk.', exc_info=True)
with open(
os.path.join(shared.source_directory, 'core_nodes.csv'),
'r', newline=''
) as src:
reader = csv.reader(src)
shared.core_nodes = {tuple(row) for row in reader}
shared.node_pool.update(shared.core_nodes)
with open(
os.path.join(shared.source_directory, 'i2p_core_nodes.csv'),
'r', newline=''
) as f:
reader = csv.reader(f)
shared.i2p_core_nodes = {(row[0].encode(), 'i2p') for row in reader}
shared.i2p_node_pool.update(shared.i2p_core_nodes)
def bootstrap_from_dns(): def bootstrap_from_dns():
"""Addes addresses of bootstrap servers to known nodes""" """Addes addresses of bootstrap servers to known nodes"""
try: try:
@ -290,8 +238,6 @@ def main():
'Error while creating data directory in: %s', 'Error while creating data directory in: %s',
shared.data_directory, exc_info=True) shared.data_directory, exc_info=True)
load_data()
if shared.ip_enabled and not shared.trusted_peer: if shared.ip_enabled and not shared.trusted_peer:
bootstrap_from_dns() bootstrap_from_dns()

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""The main thread, managing connections, nodes and objects""" """The main thread, managing connections, nodes and objects"""
import base64 import base64
import csv
import logging import logging
import os import os
import pickle import pickle
@ -25,9 +26,10 @@ class Manager(threading.Thread):
self.last_pickled_nodes = time.time() self.last_pickled_nodes = time.time()
# Publish destination 5-15 minutes after start # Publish destination 5-15 minutes after start
self.last_published_i2p_destination = \ self.last_published_i2p_destination = \
time.time() - 50 * 60 + random.uniform(-1, 1) * 300 # nosec time.time() - 50 * 60 + random.uniform(-1, 1) * 300 # nosec B311
def run(self): def run(self):
self.load_data()
self.clean_objects() self.clean_objects()
while True: while True:
time.sleep(0.8) time.sleep(0.8)
@ -144,6 +146,59 @@ class Manager(threading.Thread):
shared.connections.add(c) shared.connections.add(c)
shared.hosts = hosts shared.hosts = hosts
@staticmethod
def load_data():
"""Loads initial nodes and data, stored in files between sessions"""
try:
with open(
os.path.join(shared.data_directory, 'objects.pickle'), 'br'
) as src:
shared.objects = pickle.load(src)
except FileNotFoundError:
pass # first start
except Exception:
logging.warning(
'Error while loading objects from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'nodes.pickle'), 'br'
) as src:
shared.node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning(
'Error while loading nodes from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'br'
) as src:
shared.i2p_node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning(
'Error while loading nodes from disk.', exc_info=True)
with open(
os.path.join(shared.source_directory, 'core_nodes.csv'),
'r', newline='', encoding='ascii'
) as src:
reader = csv.reader(src)
shared.core_nodes = {tuple(row) for row in reader}
shared.node_pool.update(shared.core_nodes)
with open(
os.path.join(shared.source_directory, 'i2p_core_nodes.csv'),
'r', newline='', encoding='ascii'
) as f:
reader = csv.reader(f)
shared.i2p_core_nodes = {
(row[0].encode(), 'i2p') for row in reader}
shared.i2p_node_pool.update(shared.i2p_core_nodes)
@staticmethod @staticmethod
def pickle_objects(): def pickle_objects():
try: try: