Lower the level of most log messages, but add exc_info for some

- log info on disconnect, debug on disconnected locally
  - don't log connections failed because of obvious resons
This commit is contained in:
Dmitri Bogomolov 2021-08-02 19:08:08 +03:00 committed by Lee Miller
parent 60dc1b9d08
commit 30a8a32c92
Signed by untrusted user: lee.miller
GPG Key ID: 4F97A5EA88F4AB63
3 changed files with 47 additions and 30 deletions

View File

@ -141,24 +141,31 @@ class Connection(threading.Thread):
if not data:
self.status = 'disconnected'
self.s.close()
logging.info(
logging.debug(
'Disconnected from %s:%s', self.host_print, self.port)
break
time.sleep(0.2)
def _connect(self):
logging.debug('Connecting to %s:%s', self.host_print, self.port)
peer_str = '{0.host_print}:{0.port}'.format(self)
logging.debug('Connecting to %s', peer_str)
try:
self.s = socket.create_connection((self.host, self.port), 10)
self.status = 'connected'
logging.debug('Established TCP connection to %s', peer_str)
except socket.timeout:
pass
except OSError as e:
# unreachable, refused, no route
(logging.info if e.errno not in (101, 111, 113)
else logging.debug)(
'Connection to %s failed. Reason: %s', peer_str, e)
except Exception:
logging.info(
'Established TCP connection to %s:%s',
self.host_print, self.port)
except Exception as e:
logging.warning(
'Connection to %s:%s failed. Reason: %s',
self.host_print, self.port, e)
'Connection to %s failed.', peer_str, exc_info=True)
if self.status != 'connected':
self.status = 'failed'
def _send_data(self):
@ -332,6 +339,9 @@ class Connection(threading.Thread):
self.status = 'disconnecting'
self.send_queue.put(None)
else:
logging.info(
'%s:%s claims to be %s',
self.host_print, self.port, version.user_agent)
self.send_queue.put(message.Message(b'verack', b''))
self.verack_sent = True
self.remote_version = version
@ -405,7 +415,7 @@ class Connection(threading.Thread):
self.send_queue.put(message.Message(b'pong', b''))
elif m.command == b'error':
logging.error(
logging.warning(
'%s:%s -> error: %s', self.host_print, self.port, m.payload)
else:
@ -415,6 +425,8 @@ class Connection(threading.Thread):
if self.vectors_to_get and len(self.vectors_requested) < 100:
self.vectors_to_get.difference_update(shared.objects.keys())
if self.vectors_to_get:
logging.info(
'Queued %s vectors to get', len(self.vectors_to_get))
if len(self.vectors_to_get) > 64:
pack = random.sample(self.vectors_to_get, 64)
self.send_queue.put(message.GetData(pack))
@ -437,12 +449,14 @@ class Connection(threading.Thread):
if t < time.time() - 10 * 60}
if to_re_request:
self.vectors_to_get.update(to_re_request)
logging.debug(
logging.info(
'Re-requesting %i objects from %s:%s',
len(to_re_request), self.host_print, self.port)
def _send_objects(self):
if self.vectors_to_send:
logging.info(
'Preparing to send %s objects', len(self.vectors_to_send))
if len(self.vectors_to_send) > 16:
to_send = random.sample(self.vectors_to_send, 16)
self.vectors_to_send.difference_update(to_send)

View File

@ -105,27 +105,31 @@ def load_data():
os.path.join(shared.data_directory, 'objects.pickle'), 'br'
) as src:
shared.objects = pickle.load(src)
except Exception as e:
logging.warning('Error while loading objects from disk.')
logging.warning(e)
except FileNotFoundError:
pass # first start
except Exception:
logging.warning(
'Error while loading objects from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'nodes.pickle'), 'br'
) as src:
shared.node_pool = pickle.load(src)
except Exception as e:
logging.warning('Error while loading nodes from disk.')
logging.warning(e)
except FileNotFoundError:
pass
except Exception:
logging.warning('Error while loading nodes from disk.', exc_info=True)
try:
with open(
os.path.join(shared.data_directory, 'i2p_nodes.pickle'), 'br'
) as src:
shared.i2p_node_pool = pickle.load(src)
except Exception as e:
logging.warning('Error while loading nodes from disk.')
logging.warning(e)
except FileNotFoundError:
pass
except Exception:
logging.warning('Error while loading nodes from disk.', exc_info=True)
with open(
os.path.join(shared.source_directory, 'core_nodes.csv'),
@ -157,7 +161,7 @@ def bootstrap_from_dns():
'Adding %s to unchecked_node_pool'
' based on DNS bootstrap method', item[4][0])
except Exception:
logging.error('Error during DNS bootstrap', exc_info=True)
logging.info('Error during DNS bootstrap', exc_info=True)
def start_ip_listener():
@ -171,22 +175,22 @@ def start_ip_listener():
shared.listening_port, family=socket.AF_INET6)
listener_ipv6.start()
except Exception:
logging.warning(
logging.info(
'Error while starting IPv6 listener on port %s',
shared.listening_port, exc_info=True)
try:
listener_ipv4 = Listener(shared.listening_host, shared.listening_port)
listener_ipv4.start()
except Exception:
except OSError as e:
if listener_ipv6:
logging.warning(
logging.info(
'Error while starting IPv4 listener on port %s.'
' However the IPv6 one seems to be working'
' and will probably accept IPv4 connections.',
shared.listening_port)
' and will probably accept IPv4 connections.', # 48 on macos
shared.listening_port, exc_info=(e.errno not in (48, 98)))
else:
logging.error(
logging.warning(
'Error while starting IPv4 listener on port %s.'
'You will not receive incoming connections.'
' Please check your port configuration',
@ -210,7 +214,7 @@ def start_i2p_listener():
dest_priv = src.read()
logging.debug('Loaded I2P destination private key.')
except Exception:
logging.warning(
logging.info(
'Error while loading I2P destination private key.',
exc_info=True)

View File

@ -145,9 +145,8 @@ class Manager(threading.Thread):
with shared.objects_lock:
pickle.dump(shared.objects, dst, protocol=3)
logging.debug('Saved objects')
except Exception as e:
logging.warning('Error while saving objects')
logging.warning(e)
except Exception:
logging.warning('Error while saving objects', exc_info=True)
@staticmethod
def pickle_nodes():