2018-05-22 12:34:01 +02:00
|
|
|
# pylint: disable=too-many-boolean-expressions,too-many-return-statements,too-many-locals,too-many-statements
|
|
|
|
"""
|
|
|
|
protocol.py
|
|
|
|
===========
|
|
|
|
|
|
|
|
Low-level protocol-related functions.
|
|
|
|
"""
|
|
|
|
|
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
import base64
|
2017-01-11 17:00:00 +01:00
|
|
|
from binascii import hexlify
|
2017-01-11 14:27:19 +01:00
|
|
|
import hashlib
|
2018-05-24 17:59:40 +02:00
|
|
|
import os
|
2017-01-11 14:27:19 +01:00
|
|
|
import random
|
|
|
|
import socket
|
|
|
|
import ssl
|
|
|
|
from struct import pack, unpack, Struct
|
|
|
|
import sys
|
|
|
|
import time
|
2017-01-11 17:46:33 +01:00
|
|
|
import traceback
|
2017-01-11 14:27:19 +01:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
import defaults
|
|
|
|
import highlevelcrypto
|
|
|
|
import state
|
2018-10-12 17:42:31 +02:00
|
|
|
from addresses import encodeVarint, decodeVarint, decodeAddress, varintDecodeError
|
2017-02-22 09:34:54 +01:00
|
|
|
from bmconfigparser import BMConfigParser
|
2017-01-11 17:00:00 +01:00
|
|
|
from debug import logger
|
|
|
|
from helper_sql import sqlExecute
|
2017-01-11 14:27:19 +01:00
|
|
|
from version import softwareVersion
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
|
|
|
# Service flags
|
2017-01-11 14:27:19 +01:00
|
|
|
NODE_NETWORK = 1
|
|
|
|
NODE_SSL = 2
|
2017-09-25 01:17:04 +02:00
|
|
|
NODE_DANDELION = 8
|
2017-01-11 14:27:19 +01:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
# Bitfield flags
|
2017-01-11 14:27:19 +01:00
|
|
|
BITFIELD_DOESACK = 1
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
# Error types
|
2017-04-16 18:27:15 +02:00
|
|
|
STATUS_WARNING = 0
|
|
|
|
STATUS_ERROR = 1
|
|
|
|
STATUS_FATAL = 2
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
# Object types
|
2017-05-24 16:51:49 +02:00
|
|
|
OBJECT_GETPUBKEY = 0
|
|
|
|
OBJECT_PUBKEY = 1
|
|
|
|
OBJECT_MSG = 2
|
|
|
|
OBJECT_BROADCAST = 3
|
2017-08-09 17:34:47 +02:00
|
|
|
OBJECT_I2P = 0x493250
|
|
|
|
OBJECT_ADDR = 0x61646472
|
2017-05-24 16:51:49 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
eightBytesOfRandomDataUsedToDetectConnectionsToSelf = pack(
|
|
|
|
'>Q', random.randrange(1, 18446744073709551615))
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
# Compiled struct for packing/unpacking headers
|
|
|
|
# New code should use CreatePacket instead of Header.pack
|
2017-01-11 14:27:19 +01:00
|
|
|
Header = Struct('!L12sL4s')
|
|
|
|
|
2017-03-20 18:32:26 +01:00
|
|
|
VersionPacket = Struct('>LqQ20s4s36sH')
|
2017-03-11 11:12:08 +01:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
# Bitfield
|
2015-12-15 20:30:32 +01:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2015-12-15 20:30:32 +01:00
|
|
|
def getBitfield(address):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Get a bitfield from an address"""
|
2015-12-15 20:30:32 +01:00
|
|
|
# bitfield of features supported by me (see the wiki).
|
|
|
|
bitfield = 0
|
|
|
|
# send ack
|
2017-01-11 14:27:19 +01:00
|
|
|
if not BMConfigParser().safeGetBoolean(address, 'dontsendack'):
|
|
|
|
bitfield |= BITFIELD_DOESACK
|
|
|
|
return pack('>I', bitfield)
|
2015-12-15 20:30:32 +01:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2015-12-15 20:30:32 +01:00
|
|
|
def checkBitfield(bitfieldBinary, flags):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Check if a bitfield matches the given flags"""
|
2017-01-11 14:27:19 +01:00
|
|
|
bitfield, = unpack('>I', bitfieldBinary)
|
|
|
|
return (bitfield & flags) == flags
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
def isBitSetWithinBitfield(fourByteString, n):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Check if a particular bit is set in a bitfeld"""
|
2017-01-11 14:27:19 +01:00
|
|
|
# Uses MSB 0 bit numbering across 4 bytes of data
|
|
|
|
n = 31 - n
|
|
|
|
x, = unpack('>L', fourByteString)
|
|
|
|
return x & 2**n != 0
|
|
|
|
|
|
|
|
|
2017-05-25 23:04:33 +02:00
|
|
|
# ip addresses
|
2017-01-11 14:27:19 +01:00
|
|
|
|
2018-05-24 17:59:40 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
def encodeHost(host):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Encode a given host to be used in low-level socket operations"""
|
2017-01-11 14:27:19 +01:00
|
|
|
if host.find('.onion') > -1:
|
|
|
|
return '\xfd\x87\xd8\x7e\xeb\x43' + base64.b32decode(host.split(".")[0], True)
|
|
|
|
elif host.find(':') == -1:
|
|
|
|
return '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + \
|
|
|
|
socket.inet_aton(host)
|
2018-05-22 12:34:01 +02:00
|
|
|
return socket.inet_pton(socket.AF_INET6, host)
|
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
|
2017-01-12 19:18:56 +01:00
|
|
|
def networkType(host):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Determine if a host is IPv4, IPv6 or an onion address"""
|
2017-01-12 19:18:56 +01:00
|
|
|
if host.find('.onion') > -1:
|
|
|
|
return 'onion'
|
|
|
|
elif host.find(':') == -1:
|
|
|
|
return 'IPv4'
|
2018-05-22 12:34:01 +02:00
|
|
|
return 'IPv6'
|
|
|
|
|
2017-01-12 19:18:56 +01:00
|
|
|
|
2017-05-27 19:09:21 +02:00
|
|
|
def checkIPAddress(host, private=False):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Returns hostStandardFormat if it is a valid IP address, otherwise returns False"""
|
2017-05-25 23:04:33 +02:00
|
|
|
if host[0:12] == '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
|
|
|
|
hostStandardFormat = socket.inet_ntop(socket.AF_INET, host[12:])
|
2017-05-27 19:09:21 +02:00
|
|
|
return checkIPv4Address(host[12:], hostStandardFormat, private)
|
2017-05-25 23:04:33 +02:00
|
|
|
elif host[0:6] == '\xfd\x87\xd8\x7e\xeb\x43':
|
|
|
|
# Onion, based on BMD/bitcoind
|
|
|
|
hostStandardFormat = base64.b32encode(host[6:]).lower() + ".onion"
|
2017-05-27 19:09:21 +02:00
|
|
|
if private:
|
|
|
|
return False
|
2017-05-25 23:04:33 +02:00
|
|
|
return hostStandardFormat
|
|
|
|
else:
|
2017-02-23 19:22:44 +01:00
|
|
|
try:
|
|
|
|
hostStandardFormat = socket.inet_ntop(socket.AF_INET6, host)
|
|
|
|
except ValueError:
|
|
|
|
return False
|
2017-05-25 23:04:33 +02:00
|
|
|
if hostStandardFormat == "":
|
2018-05-22 12:34:01 +02:00
|
|
|
# This can happen on Windows systems which are not 64-bit compatible
|
|
|
|
# so let us drop the IPv6 address.
|
2017-05-25 23:04:33 +02:00
|
|
|
return False
|
2017-05-27 19:09:21 +02:00
|
|
|
return checkIPv6Address(host, hostStandardFormat, private)
|
2017-05-25 23:04:33 +02:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-05-27 19:09:21 +02:00
|
|
|
def checkIPv4Address(host, hostStandardFormat, private=False):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Returns hostStandardFormat if it is an IPv4 address, otherwise returns False"""
|
|
|
|
if host[0] == '\x7F': # 127/8
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring IP address in loopback range: %s', hostStandardFormat)
|
2017-08-09 17:34:47 +02:00
|
|
|
return hostStandardFormat if private else False
|
2018-05-22 12:34:01 +02:00
|
|
|
if host[0] == '\x0A': # 10/8
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
|
2017-05-27 19:09:21 +02:00
|
|
|
return hostStandardFormat if private else False
|
2018-05-22 12:34:01 +02:00
|
|
|
if host[0:2] == '\xC0\xA8': # 192.168/16
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
|
2017-05-27 19:09:21 +02:00
|
|
|
return hostStandardFormat if private else False
|
2018-05-22 12:34:01 +02:00
|
|
|
if host[0:2] >= '\xAC\x10' and host[0:2] < '\xAC\x20': # 172.16/12
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
|
2017-05-29 00:24:07 +02:00
|
|
|
return hostStandardFormat if private else False
|
2017-05-27 19:09:21 +02:00
|
|
|
return False if private else hostStandardFormat
|
2017-05-25 23:04:33 +02:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-05-27 19:09:21 +02:00
|
|
|
def checkIPv6Address(host, hostStandardFormat, private=False):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Returns hostStandardFormat if it is an IPv6 address, otherwise returns False"""
|
2017-05-25 23:04:33 +02:00
|
|
|
if host == ('\x00' * 15) + '\x01':
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring loopback address: %s', hostStandardFormat)
|
2017-05-25 23:04:33 +02:00
|
|
|
return False
|
|
|
|
if host[0] == '\xFE' and (ord(host[1]) & 0xc0) == 0x80:
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring local address: %s', hostStandardFormat)
|
2017-05-27 19:09:21 +02:00
|
|
|
return hostStandardFormat if private else False
|
2017-05-25 23:04:33 +02:00
|
|
|
if (ord(host[0]) & 0xfe) == 0xfc:
|
2017-05-29 00:24:07 +02:00
|
|
|
if not private:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.debug('Ignoring unique local address: %s', hostStandardFormat)
|
2017-05-27 19:09:21 +02:00
|
|
|
return hostStandardFormat if private else False
|
|
|
|
return False if private else hostStandardFormat
|
2017-05-25 23:04:33 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
def haveSSL(server=False):
|
|
|
|
"""
|
|
|
|
Predicate to check if ECDSA server support is required and available
|
|
|
|
|
|
|
|
python < 2.7.9's ssl library does not support ECDSA server due to
|
|
|
|
missing initialisation of available curves, but client works ok
|
|
|
|
"""
|
|
|
|
if not server:
|
2017-01-11 14:27:19 +01:00
|
|
|
return True
|
2018-05-22 12:34:01 +02:00
|
|
|
elif sys.version_info >= (2, 7, 9):
|
2017-01-11 14:27:19 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
def checkSocksIP(host):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Predicate to check if we're using a SOCKS proxy"""
|
2017-01-11 14:27:19 +01:00
|
|
|
try:
|
2017-01-11 17:00:00 +01:00
|
|
|
if state.socksIP is None or not state.socksIP:
|
|
|
|
state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname"))
|
2017-02-26 20:03:14 +01:00
|
|
|
# uninitialised
|
2017-01-11 14:27:19 +01:00
|
|
|
except NameError:
|
2017-01-11 17:00:00 +01:00
|
|
|
state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname"))
|
2017-02-26 20:03:14 +01:00
|
|
|
# resolving failure
|
|
|
|
except socket.gaierror:
|
|
|
|
state.socksIP = BMConfigParser().get("bitmessagesettings", "sockshostname")
|
2017-01-11 17:00:00 +01:00
|
|
|
return state.socksIP == host
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 17:00:00 +01:00
|
|
|
def isProofOfWorkSufficient(data,
|
|
|
|
nonceTrialsPerByte=0,
|
2018-05-23 05:38:05 +02:00
|
|
|
payloadLengthExtraBytes=0,
|
|
|
|
recvTime=0):
|
2018-05-25 08:13:01 +02:00
|
|
|
"""
|
|
|
|
Validate an object's Proof of Work using method described in:
|
|
|
|
https://bitmessage.org/wiki/Proof_of_work
|
|
|
|
Arguments:
|
|
|
|
int nonceTrialsPerByte (default: from default.py)
|
|
|
|
int payloadLengthExtraBytes (default: from default.py)
|
|
|
|
float recvTime (optional) UNIX epoch time when object was
|
|
|
|
received from the network (default: current system time)
|
|
|
|
Returns:
|
|
|
|
True if PoW valid and sufficient, False in all other cases
|
|
|
|
"""
|
2017-02-08 20:37:42 +01:00
|
|
|
if nonceTrialsPerByte < defaults.networkDefaultProofOfWorkNonceTrialsPerByte:
|
|
|
|
nonceTrialsPerByte = defaults.networkDefaultProofOfWorkNonceTrialsPerByte
|
|
|
|
if payloadLengthExtraBytes < defaults.networkDefaultPayloadLengthExtraBytes:
|
|
|
|
payloadLengthExtraBytes = defaults.networkDefaultPayloadLengthExtraBytes
|
2017-01-11 17:00:00 +01:00
|
|
|
endOfLifeTime, = unpack('>Q', data[8:16])
|
2018-05-25 08:13:01 +02:00
|
|
|
TTL = endOfLifeTime - (int(recvTime) if recvTime else int(time.time()))
|
2017-01-11 17:00:00 +01:00
|
|
|
if TTL < 300:
|
|
|
|
TTL = 300
|
|
|
|
POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(data[
|
2018-05-22 12:34:01 +02:00
|
|
|
:8] + hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8])
|
|
|
|
return POW <= 2 ** 64 / (nonceTrialsPerByte *
|
|
|
|
(len(data) + payloadLengthExtraBytes +
|
|
|
|
((TTL * (len(data) + payloadLengthExtraBytes)) / (2 ** 16))))
|
2017-01-11 14:27:19 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Packet creation
|
|
|
|
|
2018-05-24 17:59:40 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
def CreatePacket(command, payload=''):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Construct and return a number of bytes from a payload"""
|
2017-01-11 14:27:19 +01:00
|
|
|
payload_length = len(payload)
|
|
|
|
checksum = hashlib.sha512(payload).digest()[0:4]
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
b = bytearray(Header.size + payload_length)
|
|
|
|
Header.pack_into(b, 0, 0xE9BEB4D9, command, payload_length, checksum)
|
|
|
|
b[Header.size:] = payload
|
|
|
|
return bytes(b)
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
|
|
|
def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=False, nodeid=None):
|
|
|
|
"""Construct the payload of a version message, return the resultng bytes of running CreatePacket() on it"""
|
2017-01-11 14:27:19 +01:00
|
|
|
payload = ''
|
|
|
|
payload += pack('>L', 3) # protocol version.
|
2017-09-25 01:17:04 +02:00
|
|
|
# bitflags of the services I offer.
|
2018-05-22 12:34:01 +02:00
|
|
|
payload += pack(
|
|
|
|
'>q',
|
|
|
|
NODE_NETWORK |
|
|
|
|
(NODE_SSL if haveSSL(server) else 0) |
|
|
|
|
(NODE_DANDELION if state.dandelion else 0)
|
|
|
|
)
|
2017-01-11 14:27:19 +01:00
|
|
|
payload += pack('>q', int(time.time()))
|
|
|
|
|
|
|
|
payload += pack(
|
|
|
|
'>q', 1) # boolservices of remote connection; ignored by the remote host.
|
2018-05-22 12:34:01 +02:00
|
|
|
if checkSocksIP(remoteHost) and server: # prevent leaking of tor outbound IP
|
2017-01-11 14:27:19 +01:00
|
|
|
payload += encodeHost('127.0.0.1')
|
|
|
|
payload += pack('>H', 8444)
|
|
|
|
else:
|
|
|
|
payload += encodeHost(remoteHost)
|
|
|
|
payload += pack('>H', remotePort) # remote IPv6 and port
|
|
|
|
|
2017-09-25 01:17:04 +02:00
|
|
|
# bitflags of the services I offer.
|
2018-05-22 12:34:01 +02:00
|
|
|
payload += pack(
|
|
|
|
'>q',
|
|
|
|
NODE_NETWORK |
|
|
|
|
(NODE_SSL if haveSSL(server) else 0) |
|
|
|
|
(NODE_DANDELION if state.dandelion else 0)
|
|
|
|
)
|
|
|
|
# = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used.
|
|
|
|
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack('>L', 2130706433)
|
|
|
|
# we have a separate extPort and incoming over clearnet or outgoing through clearnet
|
2017-01-11 17:00:00 +01:00
|
|
|
if BMConfigParser().safeGetBoolean('bitmessagesettings', 'upnp') and state.extPort \
|
2018-05-22 12:34:01 +02:00
|
|
|
and ((server and not checkSocksIP(remoteHost)) or
|
|
|
|
(BMConfigParser().get("bitmessagesettings", "socksproxytype") == "none" and not server)):
|
2017-01-11 17:00:00 +01:00
|
|
|
payload += pack('>H', state.extPort)
|
2018-05-22 12:34:01 +02:00
|
|
|
elif checkSocksIP(remoteHost) and server: # incoming connection over Tor
|
2017-01-11 14:27:19 +01:00
|
|
|
payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'onionport'))
|
2018-05-22 12:34:01 +02:00
|
|
|
else: # no extPort and not incoming over Tor
|
2017-01-11 14:27:19 +01:00
|
|
|
payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'port'))
|
|
|
|
|
|
|
|
random.seed()
|
2017-07-10 07:10:05 +02:00
|
|
|
if nodeid is not None:
|
|
|
|
payload += nodeid[0:8]
|
|
|
|
else:
|
|
|
|
payload += eightBytesOfRandomDataUsedToDetectConnectionsToSelf
|
2017-01-11 14:27:19 +01:00
|
|
|
userAgent = '/PyBitmessage:' + softwareVersion + '/'
|
|
|
|
payload += encodeVarint(len(userAgent))
|
|
|
|
payload += userAgent
|
2017-02-06 17:47:05 +01:00
|
|
|
|
|
|
|
# Streams
|
|
|
|
payload += encodeVarint(len(participatingStreams))
|
|
|
|
count = 0
|
|
|
|
for stream in sorted(participatingStreams):
|
|
|
|
payload += encodeVarint(stream)
|
|
|
|
count += 1
|
|
|
|
# protocol limit, see specification
|
|
|
|
if count >= 160000:
|
|
|
|
break
|
2017-01-11 14:27:19 +01:00
|
|
|
|
|
|
|
return CreatePacket('version', payload)
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''):
|
2018-05-22 12:34:01 +02:00
|
|
|
"""Construct the payload of an error message, return the resultng bytes of running CreatePacket() on it"""
|
2017-01-11 14:27:19 +01:00
|
|
|
payload = encodeVarint(fatal)
|
|
|
|
payload += encodeVarint(banTime)
|
|
|
|
payload += encodeVarint(len(inventoryVector))
|
|
|
|
payload += inventoryVector
|
|
|
|
payload += encodeVarint(len(errorText))
|
|
|
|
payload += errorText
|
|
|
|
return CreatePacket('error', payload)
|
|
|
|
|
|
|
|
|
|
|
|
# Packet decoding
|
|
|
|
|
2018-05-24 17:59:40 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
def decryptAndCheckPubkeyPayload(data, address):
|
|
|
|
"""
|
2018-05-22 12:34:01 +02:00
|
|
|
Version 4 pubkeys are encrypted. This function is run when we already have the
|
2017-01-11 14:27:19 +01:00
|
|
|
address to which we want to try to send a message. The 'data' may come either
|
|
|
|
off of the wire or we might have had it already in our inventory when we tried
|
2018-05-22 12:34:01 +02:00
|
|
|
to send a msg to this particular address.
|
2017-01-11 14:27:19 +01:00
|
|
|
"""
|
2018-05-22 12:34:01 +02:00
|
|
|
# pylint: disable=unused-variable
|
2017-01-11 14:27:19 +01:00
|
|
|
try:
|
|
|
|
status, addressVersion, streamNumber, ripe = decodeAddress(address)
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
readPosition = 20 # bypass the nonce, time, and object type
|
|
|
|
embeddedAddressVersion, varintLength = decodeVarint(data[readPosition:readPosition + 10])
|
|
|
|
readPosition += varintLength
|
|
|
|
embeddedStreamNumber, varintLength = decodeVarint(data[readPosition:readPosition + 10])
|
|
|
|
readPosition += varintLength
|
2018-05-22 12:34:01 +02:00
|
|
|
# We'll store the address version and stream number (and some more) in the pubkeys table.
|
|
|
|
storedData = data[20:readPosition]
|
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
if addressVersion != embeddedAddressVersion:
|
|
|
|
logger.info('Pubkey decryption was UNsuccessful due to address version mismatch.')
|
|
|
|
return 'failed'
|
|
|
|
if streamNumber != embeddedStreamNumber:
|
|
|
|
logger.info('Pubkey decryption was UNsuccessful due to stream number mismatch.')
|
|
|
|
return 'failed'
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
tag = data[readPosition:readPosition + 32]
|
|
|
|
readPosition += 32
|
2018-05-22 12:34:01 +02:00
|
|
|
# the time through the tag. More data is appended onto signedData below after the decryption.
|
|
|
|
signedData = data[8:readPosition]
|
2017-01-11 14:27:19 +01:00
|
|
|
encryptedData = data[readPosition:]
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
# Let us try to decrypt the pubkey
|
2017-01-11 17:00:00 +01:00
|
|
|
toAddress, cryptorObject = state.neededPubkeys[tag]
|
2017-01-11 14:27:19 +01:00
|
|
|
if toAddress != address:
|
2018-05-22 12:34:01 +02:00
|
|
|
logger.critical(
|
2018-05-24 17:59:40 +02:00
|
|
|
'decryptAndCheckPubkeyPayload failed due to toAddress mismatch.'
|
|
|
|
' This is very peculiar. toAddress: %s, address %s',
|
2018-05-22 12:34:01 +02:00
|
|
|
toAddress,
|
|
|
|
address)
|
|
|
|
# the only way I can think that this could happen is if someone encodes their address data two different
|
|
|
|
# ways. That sort of address-malleability should have been caught by the UI or API and an error given to
|
|
|
|
# the user.
|
2017-01-11 14:27:19 +01:00
|
|
|
return 'failed'
|
|
|
|
try:
|
|
|
|
decryptedData = cryptorObject.decrypt(encryptedData)
|
|
|
|
except:
|
|
|
|
# Someone must have encrypted some data with a different key
|
|
|
|
# but tagged it with a tag for which we are watching.
|
|
|
|
logger.info('Pubkey decryption was unsuccessful.')
|
|
|
|
return 'failed'
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
readPosition = 0
|
|
|
|
bitfieldBehaviors = decryptedData[readPosition:readPosition + 4]
|
|
|
|
readPosition += 4
|
|
|
|
publicSigningKey = '\x04' + decryptedData[readPosition:readPosition + 64]
|
|
|
|
readPosition += 64
|
|
|
|
publicEncryptionKey = '\x04' + decryptedData[readPosition:readPosition + 64]
|
|
|
|
readPosition += 64
|
|
|
|
specifiedNonceTrialsPerByte, specifiedNonceTrialsPerByteLength = decodeVarint(
|
|
|
|
decryptedData[readPosition:readPosition + 10])
|
|
|
|
readPosition += specifiedNonceTrialsPerByteLength
|
|
|
|
specifiedPayloadLengthExtraBytes, specifiedPayloadLengthExtraBytesLength = decodeVarint(
|
|
|
|
decryptedData[readPosition:readPosition + 10])
|
|
|
|
readPosition += specifiedPayloadLengthExtraBytesLength
|
|
|
|
storedData += decryptedData[:readPosition]
|
|
|
|
signedData += decryptedData[:readPosition]
|
|
|
|
signatureLength, signatureLengthLength = decodeVarint(
|
|
|
|
decryptedData[readPosition:readPosition + 10])
|
|
|
|
readPosition += signatureLengthLength
|
|
|
|
signature = decryptedData[readPosition:readPosition + signatureLength]
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
if highlevelcrypto.verify(signedData, signature, hexlify(publicSigningKey)):
|
|
|
|
logger.info('ECDSA verify passed (within decryptAndCheckPubkeyPayload)')
|
|
|
|
else:
|
|
|
|
logger.info('ECDSA verify failed (within decryptAndCheckPubkeyPayload)')
|
|
|
|
return 'failed'
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
sha = hashlib.new('sha512')
|
|
|
|
sha.update(publicSigningKey + publicEncryptionKey)
|
|
|
|
ripeHasher = hashlib.new('ripemd160')
|
|
|
|
ripeHasher.update(sha.digest())
|
|
|
|
embeddedRipe = ripeHasher.digest()
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
if embeddedRipe != ripe:
|
|
|
|
# Although this pubkey object had the tag were were looking for and was
|
|
|
|
# encrypted with the correct encryption key, it doesn't contain the
|
|
|
|
# correct pubkeys. Someone is either being malicious or using buggy software.
|
|
|
|
logger.info('Pubkey decryption was UNsuccessful due to RIPE mismatch.')
|
|
|
|
return 'failed'
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
# Everything checked out. Insert it into the pubkeys table.
|
2018-05-22 12:34:01 +02:00
|
|
|
|
|
|
|
logger.info(
|
2018-05-24 17:59:40 +02:00
|
|
|
os.linesep.join([
|
|
|
|
'within decryptAndCheckPubkeyPayload,'
|
|
|
|
' addressVersion: %s, streamNumber: %s' % addressVersion, streamNumber,
|
|
|
|
'ripe %s' % hexlify(ripe),
|
|
|
|
'publicSigningKey in hex: %s' % hexlify(publicSigningKey),
|
|
|
|
'publicEncryptionKey in hex: %s' % hexlify(publicEncryptionKey),
|
|
|
|
])
|
2018-05-22 12:34:01 +02:00
|
|
|
)
|
|
|
|
|
2017-01-11 14:27:19 +01:00
|
|
|
t = (address, addressVersion, storedData, int(time.time()), 'yes')
|
|
|
|
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
|
|
|
|
return 'successful'
|
2018-05-22 12:34:01 +02:00
|
|
|
except varintDecodeError:
|
2017-01-11 14:27:19 +01:00
|
|
|
logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.')
|
|
|
|
return 'failed'
|
2018-05-22 12:34:01 +02:00
|
|
|
except Exception:
|
|
|
|
logger.critical(
|
|
|
|
'Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s',
|
|
|
|
traceback.format_exc())
|
2017-01-11 14:27:19 +01:00
|
|
|
return 'failed'
|
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-11 20:47:27 +01:00
|
|
|
# sslProtocolVersion
|
2018-05-22 12:34:01 +02:00
|
|
|
if sys.version_info >= (2, 7, 13):
|
2017-01-11 20:47:27 +01:00
|
|
|
# this means TLSv1 or higher
|
|
|
|
# in the future change to
|
|
|
|
# ssl.PROTOCOL_TLS1.2
|
2018-05-22 12:34:01 +02:00
|
|
|
sslProtocolVersion = ssl.PROTOCOL_TLS # pylint: disable=no-member
|
|
|
|
elif sys.version_info >= (2, 7, 9):
|
2017-01-11 20:47:27 +01:00
|
|
|
# this means any SSL/TLS. SSLv2 and 3 are excluded with an option after context is created
|
|
|
|
sslProtocolVersion = ssl.PROTOCOL_SSLv23
|
|
|
|
else:
|
|
|
|
# this means TLSv1, there is no way to set "TLSv1 or higher" or
|
|
|
|
# "TLSv1.2" in < 2.7.9
|
|
|
|
sslProtocolVersion = ssl.PROTOCOL_TLSv1
|
2017-01-14 17:47:57 +01:00
|
|
|
|
2018-05-22 12:34:01 +02:00
|
|
|
|
2017-01-14 17:47:57 +01:00
|
|
|
# ciphers
|
2017-02-18 17:22:17 +01:00
|
|
|
if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000 and not ssl.OPENSSL_VERSION.startswith("LibreSSL"):
|
2017-01-14 17:47:57 +01:00
|
|
|
sslProtocolCiphers = "AECDH-AES256-SHA@SECLEVEL=0"
|
|
|
|
else:
|
|
|
|
sslProtocolCiphers = "AECDH-AES256-SHA"
|