2018-10-05 11:32:09 +02:00
|
|
|
"""
|
|
|
|
Manipulations with knownNodes dictionary.
|
|
|
|
"""
|
|
|
|
|
2018-05-03 23:46:23 +02:00
|
|
|
import json
|
2017-10-19 08:52:44 +02:00
|
|
|
import os
|
2018-05-03 12:05:49 +02:00
|
|
|
import pickle
|
2017-02-08 13:41:56 +01:00
|
|
|
import threading
|
2018-05-03 12:49:43 +02:00
|
|
|
import time
|
2017-02-08 13:41:56 +01:00
|
|
|
|
2017-02-09 11:53:33 +01:00
|
|
|
import state
|
2018-05-03 12:05:49 +02:00
|
|
|
from bmconfigparser import BMConfigParser
|
2018-05-03 12:49:43 +02:00
|
|
|
from debug import logger
|
2018-10-03 15:07:06 +02:00
|
|
|
from helper_bootstrap import dns
|
2017-02-09 11:53:33 +01:00
|
|
|
|
2017-02-08 13:41:56 +01:00
|
|
|
knownNodesLock = threading.Lock()
|
2018-05-03 12:49:43 +02:00
|
|
|
knownNodes = {stream: {} for stream in range(1, 4)}
|
2017-02-08 13:41:56 +01:00
|
|
|
|
2017-02-27 23:31:12 +01:00
|
|
|
knownNodesTrimAmount = 2000
|
|
|
|
|
2017-10-19 08:52:44 +02:00
|
|
|
# forget a node after rating is this low
|
|
|
|
knownNodesForgetRating = -0.5
|
|
|
|
|
2018-10-10 17:42:58 +02:00
|
|
|
knownNodesActual = False
|
|
|
|
|
2018-05-21 17:41:00 +02:00
|
|
|
DEFAULT_NODES = (
|
|
|
|
state.Peer('5.45.99.75', 8444),
|
|
|
|
state.Peer('75.167.159.54', 8444),
|
|
|
|
state.Peer('95.165.168.168', 8444),
|
|
|
|
state.Peer('85.180.139.241', 8444),
|
|
|
|
state.Peer('158.222.217.190', 8080),
|
|
|
|
state.Peer('178.62.12.187', 8448),
|
|
|
|
state.Peer('24.188.198.204', 8111),
|
|
|
|
state.Peer('109.147.204.113', 1195),
|
|
|
|
state.Peer('178.11.46.221', 8444)
|
|
|
|
)
|
|
|
|
|
2018-10-10 17:42:58 +02:00
|
|
|
DEFAULT_NODES_ONION = (
|
|
|
|
state.Peer('quzwelsuziwqgpt2.onion', 8444),
|
|
|
|
)
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
2018-05-03 23:46:23 +02:00
|
|
|
def json_serialize_knownnodes(output):
|
2018-05-07 12:08:25 +02:00
|
|
|
"""
|
|
|
|
Reorganize knownnodes dict and write it as JSON to output
|
|
|
|
"""
|
2018-05-03 23:46:23 +02:00
|
|
|
_serialized = []
|
|
|
|
for stream, peers in knownNodes.iteritems():
|
|
|
|
for peer, info in peers.iteritems():
|
2018-07-04 17:41:21 +02:00
|
|
|
info.update(rating=round(info.get('rating', 0), 2))
|
2018-05-03 23:46:23 +02:00
|
|
|
_serialized.append({
|
|
|
|
'stream': stream, 'peer': peer._asdict(), 'info': info
|
|
|
|
})
|
|
|
|
json.dump(_serialized, output, indent=4)
|
|
|
|
|
|
|
|
|
|
|
|
def json_deserialize_knownnodes(source):
|
2018-05-07 12:08:25 +02:00
|
|
|
"""
|
|
|
|
Read JSON from source and make knownnodes dict
|
|
|
|
"""
|
2018-10-10 17:42:58 +02:00
|
|
|
global knownNodesActual # pylint: disable=global-statement
|
2018-05-03 23:46:23 +02:00
|
|
|
for node in json.load(source):
|
|
|
|
peer = node['peer']
|
2018-10-10 17:42:58 +02:00
|
|
|
info = node['info']
|
|
|
|
peer = state.Peer(str(peer['host']), peer.get('port', 8444))
|
|
|
|
knownNodes[node['stream']][peer] = info
|
|
|
|
|
|
|
|
if (
|
2019-07-14 17:22:34 +02:00
|
|
|
not (knownNodesActual or info.get('self')) and
|
2018-10-10 17:42:58 +02:00
|
|
|
peer not in DEFAULT_NODES and
|
|
|
|
peer not in DEFAULT_NODES_ONION
|
|
|
|
):
|
|
|
|
knownNodesActual = True
|
2018-05-03 23:46:23 +02:00
|
|
|
|
|
|
|
|
|
|
|
def pickle_deserialize_old_knownnodes(source):
|
2018-05-07 12:08:25 +02:00
|
|
|
"""
|
|
|
|
Unpickle source and reorganize knownnodes dict if it's in old format
|
|
|
|
the old format was {Peer:lastseen, ...}
|
|
|
|
the new format is {Peer:{"lastseen":i, "rating":f}}
|
|
|
|
"""
|
2018-10-02 11:35:35 +02:00
|
|
|
global knownNodes # pylint: disable=global-statement
|
2018-05-03 23:46:23 +02:00
|
|
|
knownNodes = pickle.load(source)
|
|
|
|
for stream in knownNodes.keys():
|
2018-10-02 11:35:35 +02:00
|
|
|
for node, params in knownNodes[stream].iteritems():
|
2018-05-03 23:46:23 +02:00
|
|
|
if isinstance(params, (float, int)):
|
|
|
|
addKnownNode(stream, node, params)
|
|
|
|
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
def saveKnownNodes(dirName=None):
|
2017-02-09 11:53:33 +01:00
|
|
|
if dirName is None:
|
|
|
|
dirName = state.appdata
|
|
|
|
with knownNodesLock:
|
2017-10-19 08:52:44 +02:00
|
|
|
with open(os.path.join(dirName, 'knownnodes.dat'), 'wb') as output:
|
2018-05-03 23:46:23 +02:00
|
|
|
json_serialize_knownnodes(output)
|
2017-02-27 23:31:12 +01:00
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
2018-05-03 12:49:43 +02:00
|
|
|
def addKnownNode(stream, peer, lastseen=None, is_self=False):
|
|
|
|
knownNodes[stream][peer] = {
|
2018-05-07 12:08:25 +02:00
|
|
|
"lastseen": lastseen or time.time(),
|
2018-05-03 12:49:43 +02:00
|
|
|
"rating": 0,
|
|
|
|
"self": is_self,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-10-10 17:42:58 +02:00
|
|
|
def createDefaultKnownNodes(onion=False):
|
|
|
|
past = time.time() - 2418600 # 28 days - 10 min
|
|
|
|
for peer in DEFAULT_NODES_ONION if onion else DEFAULT_NODES:
|
|
|
|
addKnownNode(1, peer, past)
|
2018-05-21 17:41:00 +02:00
|
|
|
saveKnownNodes()
|
|
|
|
|
|
|
|
|
2018-05-03 12:49:43 +02:00
|
|
|
def readKnownNodes():
|
|
|
|
try:
|
|
|
|
with open(state.appdata + 'knownnodes.dat', 'rb') as source:
|
|
|
|
with knownNodesLock:
|
2018-05-03 23:46:23 +02:00
|
|
|
try:
|
|
|
|
json_deserialize_knownnodes(source)
|
|
|
|
except ValueError:
|
|
|
|
source.seek(0)
|
|
|
|
pickle_deserialize_old_knownnodes(source)
|
2018-08-05 09:07:31 +02:00
|
|
|
except (IOError, OSError, KeyError, EOFError):
|
2018-05-03 23:46:23 +02:00
|
|
|
logger.debug(
|
2018-05-03 12:49:43 +02:00
|
|
|
'Failed to read nodes from knownnodes.dat', exc_info=True)
|
2018-05-21 17:41:00 +02:00
|
|
|
createDefaultKnownNodes()
|
2018-05-03 12:49:43 +02:00
|
|
|
|
|
|
|
config = BMConfigParser()
|
|
|
|
|
|
|
|
# your own onion address, if setup
|
|
|
|
onionhostname = config.safeGet('bitmessagesettings', 'onionhostname')
|
|
|
|
if onionhostname and ".onion" in onionhostname:
|
|
|
|
onionport = config.safeGetInt('bitmessagesettings', 'onionport')
|
|
|
|
if onionport:
|
2018-10-10 17:42:58 +02:00
|
|
|
self_peer = state.Peer(onionhostname, onionport)
|
|
|
|
addKnownNode(1, self_peer, is_self=True)
|
|
|
|
state.ownAddresses[self_peer] = True
|
2018-05-03 12:49:43 +02:00
|
|
|
|
|
|
|
|
2017-07-05 09:17:01 +02:00
|
|
|
def increaseRating(peer):
|
|
|
|
increaseAmount = 0.1
|
|
|
|
maxRating = 1
|
|
|
|
with knownNodesLock:
|
|
|
|
for stream in knownNodes.keys():
|
|
|
|
try:
|
2018-05-03 12:05:49 +02:00
|
|
|
knownNodes[stream][peer]["rating"] = min(
|
|
|
|
knownNodes[stream][peer]["rating"] + increaseAmount,
|
|
|
|
maxRating
|
|
|
|
)
|
2017-07-05 09:17:01 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
2017-07-05 09:17:01 +02:00
|
|
|
def decreaseRating(peer):
|
|
|
|
decreaseAmount = 0.1
|
|
|
|
minRating = -1
|
|
|
|
with knownNodesLock:
|
|
|
|
for stream in knownNodes.keys():
|
|
|
|
try:
|
2018-05-03 12:05:49 +02:00
|
|
|
knownNodes[stream][peer]["rating"] = max(
|
|
|
|
knownNodes[stream][peer]["rating"] - decreaseAmount,
|
|
|
|
minRating
|
|
|
|
)
|
2017-07-05 09:17:01 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
|
|
|
def trimKnownNodes(recAddrStream=1):
|
|
|
|
if len(knownNodes[recAddrStream]) < \
|
|
|
|
BMConfigParser().safeGetInt("knownnodes", "maxnodes"):
|
2017-02-27 23:31:12 +01:00
|
|
|
return
|
|
|
|
with knownNodesLock:
|
2018-05-03 12:05:49 +02:00
|
|
|
oldestList = sorted(
|
|
|
|
knownNodes[recAddrStream],
|
|
|
|
key=lambda x: x['lastseen']
|
|
|
|
)[:knownNodesTrimAmount]
|
2017-02-27 23:31:12 +01:00
|
|
|
for oldest in oldestList:
|
|
|
|
del knownNodes[recAddrStream][oldest]
|
2018-10-03 15:07:06 +02:00
|
|
|
|
|
|
|
|
|
|
|
def cleanupKnownNodes():
|
|
|
|
"""
|
|
|
|
Cleanup knownnodes: remove old nodes and nodes with low rating
|
|
|
|
"""
|
|
|
|
now = int(time.time())
|
|
|
|
needToWriteKnownNodesToDisk = False
|
|
|
|
dns_done = False
|
2018-10-10 17:42:58 +02:00
|
|
|
spawnConnections = not BMConfigParser().safeGetBoolean(
|
|
|
|
'bitmessagesettings', 'dontconnect'
|
|
|
|
) and BMConfigParser().safeGetBoolean(
|
|
|
|
'bitmessagesettings', 'sendoutgoingconnections')
|
2018-10-03 15:07:06 +02:00
|
|
|
|
|
|
|
with knownNodesLock:
|
|
|
|
for stream in knownNodes:
|
2018-10-10 17:42:58 +02:00
|
|
|
if stream not in state.streamsInWhichIAmParticipating:
|
|
|
|
continue
|
2018-10-03 15:07:06 +02:00
|
|
|
keys = knownNodes[stream].keys()
|
2018-10-10 17:42:58 +02:00
|
|
|
if len(keys) <= 1: # leave at least one node
|
|
|
|
if not dns_done and spawnConnections:
|
|
|
|
dns()
|
|
|
|
dns_done = True
|
2018-10-03 15:07:06 +02:00
|
|
|
continue
|
|
|
|
for node in keys:
|
|
|
|
try:
|
|
|
|
# scrap old nodes
|
|
|
|
if (now - knownNodes[stream][node]["lastseen"] >
|
|
|
|
2419200): # 28 days
|
|
|
|
needToWriteKnownNodesToDisk = True
|
|
|
|
del knownNodes[stream][node]
|
|
|
|
continue
|
|
|
|
# scrap old nodes with low rating
|
|
|
|
if (now - knownNodes[stream][node]["lastseen"] > 10800 and
|
|
|
|
knownNodes[stream][node]["rating"] <=
|
|
|
|
knownNodesForgetRating):
|
|
|
|
needToWriteKnownNodesToDisk = True
|
|
|
|
del knownNodes[stream][node]
|
|
|
|
continue
|
|
|
|
except TypeError:
|
|
|
|
logger.warning('Error in %s', node)
|
|
|
|
keys = []
|
|
|
|
|
|
|
|
# Let us write out the knowNodes to disk
|
|
|
|
# if there is anything new to write out.
|
|
|
|
if needToWriteKnownNodesToDisk:
|
|
|
|
saveKnownNodes()
|