Compare commits

..

10 Commits

Author SHA1 Message Date
cdece735ce
Make an object from sample data in test_object() 2023-10-12 03:30:49 +03:00
d4fbc35d7d
Set object tag for object types supporting it 2023-10-07 17:55:50 +03:00
e64ca24266
Cover the main proofofwork call and worker procedure 2023-10-07 17:55:50 +03:00
e41391450b
Add Error message class, handle fatal 2023-10-07 17:55:50 +03:00
855f83330a
Define a helper function to read a varint and trim payload 2023-10-07 17:55:50 +03:00
493bc5a411
Use shared.stream when assembling i2p_dest object instead of hardcoded 1 2023-10-07 17:55:49 +03:00
f0752c9e54
Unify and improve message.Version:
- from_message() decoding method as in other messages;
  - support multiple streams and move stream check to connection;
  - use shared.stream instead of hardcoded 1;
  - replace values from shared with the instance attributes in to_bytes(),
    put conventional 1 as services of a remote host.
2023-10-07 17:55:49 +03:00
a363590c18
Add a test for version message 2023-10-07 17:55:49 +03:00
7cd0268212
Improve structure.Object:
- use shared.stream instead of hardcoded 1;
  - reuse pow_initial_hash() in is_valid().
2023-10-07 17:55:49 +03:00
f35a3504a2
Add a test for object covering also proofofwork 2023-10-07 17:55:49 +03:00
5 changed files with 61 additions and 62 deletions

View File

@ -37,7 +37,7 @@ class I2PDialer(I2PThread):
self._send(b'HELLO VERSION MIN=3.0 MAX=3.3\n')
self.version_reply = self._receive_line().split()
if b'RESULT=OK' not in self.version_reply:
logging.debug('Error while connecting to %s', self.destination)
logging.warning('Error while connecting to %s', self.destination)
self.success = False
self._send(
@ -45,5 +45,6 @@ class I2PDialer(I2PThread):
+ self.destination + b'\n')
reply = self._receive_line().split(b' ')
if b'RESULT=OK' not in reply:
logging.debug('Error while connecting to %s', self.destination)
logging.warning(
'Error while connecting to %s', self.destination)
self.success = False

View File

@ -2,9 +2,11 @@
"""Functions for starting the program"""
import argparse
import base64
import csv
import logging
import multiprocessing
import os
import pickle
import signal
import socket
@ -20,7 +22,7 @@ def handler(s, f): # pylint: disable=unused-argument
shared.shutting_down = True
def parse_arguments(): # pylint: disable=too-many-branches,too-many-statements
def parse_arguments():
"""Parsing arguments"""
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', help='Port to listen on', type=int)
@ -100,6 +102,56 @@ def parse_arguments(): # pylint: disable=too-many-branches,too-many-statements
shared.i2p_transient = True
def load_data():
"""Loads initial nodes and data, stored in files between sessions"""
try:
with open(
os.path.join(shared.data_directory, 'objects.pickle'), 'br'
) as src:
shared.objects = pickle.load(src)
except FileNotFoundError:
pass # first start
except Exception:
logging.warning(
'Error while loading objects from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'nodes.pickle'), 'br'
) as src:
shared.node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning('Error while loading nodes from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'br'
) as src:
shared.i2p_node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning('Error while loading nodes from disk.', exc_info=True)
with open(
os.path.join(shared.source_directory, 'core_nodes.csv'),
'r', newline=''
) as src:
reader = csv.reader(src)
shared.core_nodes = {tuple(row) for row in reader}
shared.node_pool.update(shared.core_nodes)
with open(
os.path.join(shared.source_directory, 'i2p_core_nodes.csv'),
'r', newline=''
) as f:
reader = csv.reader(f)
shared.i2p_core_nodes = {(row[0].encode(), 'i2p') for row in reader}
shared.i2p_node_pool.update(shared.i2p_core_nodes)
def bootstrap_from_dns():
"""Addes addresses of bootstrap servers to known nodes"""
try:
@ -238,6 +290,8 @@ def main():
'Error while creating data directory in: %s',
shared.data_directory, exc_info=True)
load_data()
if shared.ip_enabled and not shared.trusted_peer:
bootstrap_from_dns()

View File

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""The main thread, managing connections, nodes and objects"""
import base64
import csv
import logging
import os
import pickle
@ -26,10 +25,9 @@ class Manager(threading.Thread):
self.last_pickled_nodes = time.time()
# Publish destination 5-15 minutes after start
self.last_published_i2p_destination = \
time.time() - 50 * 60 + random.uniform(-1, 1) * 300 # nosec B311
time.time() - 50 * 60 + random.uniform(-1, 1) * 300 # nosec
def run(self):
self.load_data()
self.clean_objects()
while True:
time.sleep(0.8)
@ -146,59 +144,6 @@ class Manager(threading.Thread):
shared.connections.add(c)
shared.hosts = hosts
@staticmethod
def load_data():
"""Loads initial nodes and data, stored in files between sessions"""
try:
with open(
os.path.join(shared.data_directory, 'objects.pickle'), 'br'
) as src:
shared.objects = pickle.load(src)
except FileNotFoundError:
pass # first start
except Exception:
logging.warning(
'Error while loading objects from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'nodes.pickle'), 'br'
) as src:
shared.node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning(
'Error while loading nodes from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'br'
) as src:
shared.i2p_node_pool = pickle.load(src)
except FileNotFoundError:
pass
except Exception:
logging.warning(
'Error while loading nodes from disk.', exc_info=True)
with open(
os.path.join(shared.source_directory, 'core_nodes.csv'),
'r', newline='', encoding='ascii'
) as src:
reader = csv.reader(src)
shared.core_nodes = {tuple(row) for row in reader}
shared.node_pool.update(shared.core_nodes)
with open(
os.path.join(shared.source_directory, 'i2p_core_nodes.csv'),
'r', newline='', encoding='ascii'
) as f:
reader = csv.reader(f)
shared.i2p_core_nodes = {
(row[0].encode(), 'i2p') for row in reader}
shared.i2p_node_pool.update(shared.i2p_core_nodes)
@staticmethod
def pickle_objects():
try:

View File

@ -17,7 +17,7 @@ sample_addr_data = unhexlify(
'0000000060f420b3000000010000000000000001'
'260753000201300000000000000057ae1f90')
# data for an object with expires_time 1697063939
# data for object with expires_time 1697063939
# structure.Object(
# b'\x00' * 8, expires_time, 42, 1, 2, b'HELLO').to_bytes()
sample_object_data = unhexlify(

View File

@ -41,5 +41,4 @@ ignore_errors = true
[pylint.main]
disable = invalid-name,consider-using-f-string,fixme
max-args = 8
max-attributes = 8
max-args = 7