2018-05-03 23:46:23 +02:00
|
|
|
import json
|
2017-10-19 08:52:44 +02:00
|
|
|
import os
|
2018-05-03 12:05:49 +02:00
|
|
|
import pickle
|
2018-05-03 12:49:43 +02:00
|
|
|
# import sys
|
2017-02-08 13:41:56 +01:00
|
|
|
import threading
|
2018-05-03 12:49:43 +02:00
|
|
|
import time
|
2017-02-08 13:41:56 +01:00
|
|
|
|
2017-02-09 11:53:33 +01:00
|
|
|
import state
|
2018-05-03 12:05:49 +02:00
|
|
|
from bmconfigparser import BMConfigParser
|
2018-05-03 12:49:43 +02:00
|
|
|
from debug import logger
|
2017-02-09 11:53:33 +01:00
|
|
|
|
2017-02-08 13:41:56 +01:00
|
|
|
knownNodesLock = threading.Lock()
|
2018-05-03 12:49:43 +02:00
|
|
|
knownNodes = {stream: {} for stream in range(1, 4)}
|
2017-02-08 13:41:56 +01:00
|
|
|
|
2017-02-27 23:31:12 +01:00
|
|
|
knownNodesTrimAmount = 2000
|
|
|
|
|
2017-10-19 08:52:44 +02:00
|
|
|
# forget a node after rating is this low
|
|
|
|
knownNodesForgetRating = -0.5
|
|
|
|
|
2018-05-21 17:41:00 +02:00
|
|
|
DEFAULT_NODES = (
|
|
|
|
state.Peer('5.45.99.75', 8444),
|
|
|
|
state.Peer('75.167.159.54', 8444),
|
|
|
|
state.Peer('95.165.168.168', 8444),
|
|
|
|
state.Peer('85.180.139.241', 8444),
|
|
|
|
state.Peer('158.222.217.190', 8080),
|
|
|
|
state.Peer('178.62.12.187', 8448),
|
|
|
|
state.Peer('24.188.198.204', 8111),
|
|
|
|
state.Peer('109.147.204.113', 1195),
|
|
|
|
state.Peer('178.11.46.221', 8444)
|
|
|
|
)
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
2018-05-03 23:46:23 +02:00
|
|
|
def json_serialize_knownnodes(output):
|
2018-05-07 12:08:25 +02:00
|
|
|
"""
|
|
|
|
Reorganize knownnodes dict and write it as JSON to output
|
|
|
|
"""
|
2018-05-03 23:46:23 +02:00
|
|
|
_serialized = []
|
|
|
|
for stream, peers in knownNodes.iteritems():
|
|
|
|
for peer, info in peers.iteritems():
|
|
|
|
_serialized.append({
|
|
|
|
'stream': stream, 'peer': peer._asdict(), 'info': info
|
|
|
|
})
|
|
|
|
json.dump(_serialized, output, indent=4)
|
|
|
|
|
|
|
|
|
|
|
|
def json_deserialize_knownnodes(source):
|
2018-05-07 12:08:25 +02:00
|
|
|
"""
|
|
|
|
Read JSON from source and make knownnodes dict
|
|
|
|
"""
|
2018-05-03 23:46:23 +02:00
|
|
|
for node in json.load(source):
|
|
|
|
peer = node['peer']
|
|
|
|
peer['host'] = str(peer['host'])
|
|
|
|
knownNodes[node['stream']][state.Peer(**peer)] = node['info']
|
|
|
|
|
|
|
|
|
|
|
|
def pickle_deserialize_old_knownnodes(source):
|
2018-05-07 12:08:25 +02:00
|
|
|
"""
|
|
|
|
Unpickle source and reorganize knownnodes dict if it's in old format
|
|
|
|
the old format was {Peer:lastseen, ...}
|
|
|
|
the new format is {Peer:{"lastseen":i, "rating":f}}
|
|
|
|
"""
|
2018-05-03 23:46:23 +02:00
|
|
|
knownNodes = pickle.load(source)
|
|
|
|
for stream in knownNodes.keys():
|
|
|
|
for node, params in knownNodes[stream].items():
|
|
|
|
if isinstance(params, (float, int)):
|
|
|
|
addKnownNode(stream, node, params)
|
|
|
|
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
def saveKnownNodes(dirName=None):
|
2017-02-09 11:53:33 +01:00
|
|
|
if dirName is None:
|
|
|
|
dirName = state.appdata
|
|
|
|
with knownNodesLock:
|
2017-10-19 08:52:44 +02:00
|
|
|
with open(os.path.join(dirName, 'knownnodes.dat'), 'wb') as output:
|
2018-05-03 23:46:23 +02:00
|
|
|
json_serialize_knownnodes(output)
|
2017-02-27 23:31:12 +01:00
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
2018-05-03 12:49:43 +02:00
|
|
|
def addKnownNode(stream, peer, lastseen=None, is_self=False):
|
|
|
|
knownNodes[stream][peer] = {
|
2018-05-07 12:08:25 +02:00
|
|
|
"lastseen": lastseen or time.time(),
|
2018-05-03 12:49:43 +02:00
|
|
|
"rating": 0,
|
|
|
|
"self": is_self,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-05-21 17:41:00 +02:00
|
|
|
def createDefaultKnownNodes():
|
|
|
|
for peer in DEFAULT_NODES:
|
|
|
|
addKnownNode(1, peer)
|
|
|
|
saveKnownNodes()
|
|
|
|
|
|
|
|
|
2018-05-03 12:49:43 +02:00
|
|
|
def readKnownNodes():
|
|
|
|
try:
|
|
|
|
with open(state.appdata + 'knownnodes.dat', 'rb') as source:
|
|
|
|
with knownNodesLock:
|
2018-05-03 23:46:23 +02:00
|
|
|
try:
|
|
|
|
json_deserialize_knownnodes(source)
|
|
|
|
except ValueError:
|
|
|
|
source.seek(0)
|
|
|
|
pickle_deserialize_old_knownnodes(source)
|
2018-05-07 12:15:58 +02:00
|
|
|
except (IOError, OSError, KeyError):
|
2018-05-03 23:46:23 +02:00
|
|
|
logger.debug(
|
2018-05-03 12:49:43 +02:00
|
|
|
'Failed to read nodes from knownnodes.dat', exc_info=True)
|
2018-05-21 17:41:00 +02:00
|
|
|
createDefaultKnownNodes()
|
2018-05-03 12:49:43 +02:00
|
|
|
|
|
|
|
config = BMConfigParser()
|
|
|
|
# if config.safeGetInt('bitmessagesettings', 'settingsversion') > 10:
|
|
|
|
# sys.exit(
|
|
|
|
# 'Bitmessage cannot read future versions of the keys file'
|
|
|
|
# ' (keys.dat). Run the newer version of Bitmessage.')
|
|
|
|
|
|
|
|
# your own onion address, if setup
|
|
|
|
onionhostname = config.safeGet('bitmessagesettings', 'onionhostname')
|
|
|
|
if onionhostname and ".onion" in onionhostname:
|
|
|
|
onionport = config.safeGetInt('bitmessagesettings', 'onionport')
|
|
|
|
if onionport:
|
|
|
|
addKnownNode(1, state.Peer(onionhostname, onionport), is_self=True)
|
|
|
|
|
|
|
|
|
2017-07-05 09:17:01 +02:00
|
|
|
def increaseRating(peer):
|
|
|
|
increaseAmount = 0.1
|
|
|
|
maxRating = 1
|
|
|
|
with knownNodesLock:
|
|
|
|
for stream in knownNodes.keys():
|
|
|
|
try:
|
2018-05-03 12:05:49 +02:00
|
|
|
knownNodes[stream][peer]["rating"] = min(
|
|
|
|
knownNodes[stream][peer]["rating"] + increaseAmount,
|
|
|
|
maxRating
|
|
|
|
)
|
2017-07-05 09:17:01 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
2017-07-05 09:17:01 +02:00
|
|
|
def decreaseRating(peer):
|
|
|
|
decreaseAmount = 0.1
|
|
|
|
minRating = -1
|
|
|
|
with knownNodesLock:
|
|
|
|
for stream in knownNodes.keys():
|
|
|
|
try:
|
2018-05-03 12:05:49 +02:00
|
|
|
knownNodes[stream][peer]["rating"] = max(
|
|
|
|
knownNodes[stream][peer]["rating"] - decreaseAmount,
|
|
|
|
minRating
|
|
|
|
)
|
2017-07-05 09:17:01 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2018-05-03 12:05:49 +02:00
|
|
|
|
|
|
|
def trimKnownNodes(recAddrStream=1):
|
|
|
|
if len(knownNodes[recAddrStream]) < \
|
|
|
|
BMConfigParser().safeGetInt("knownnodes", "maxnodes"):
|
2017-02-27 23:31:12 +01:00
|
|
|
return
|
|
|
|
with knownNodesLock:
|
2018-05-03 12:05:49 +02:00
|
|
|
oldestList = sorted(
|
|
|
|
knownNodes[recAddrStream],
|
|
|
|
key=lambda x: x['lastseen']
|
|
|
|
)[:knownNodesTrimAmount]
|
2017-02-27 23:31:12 +01:00
|
|
|
for oldest in oldestList:
|
|
|
|
del knownNodes[recAddrStream][oldest]
|