Fixed: Code style and lint fixes #1261

Merged
coffeedogs merged 2 commits from codeQ-2105 into v0.6 2018-06-12 00:10:53 +02:00
3 changed files with 328 additions and 210 deletions

View File

@ -1,10 +1,17 @@
# Since there is overlap in the violations that the different tools check for, it makes sense to quiesce some warnings
# in some tools if those warnings in other tools are preferred. This avoids the need to add duplicate lint warnings.
[pycodestyle] [pycodestyle]
max-line-length = 119 max-line-length = 119
[flake8] [flake8]
max-line-length = 119 max-line-length = 119
ignore = E722 ignore = E722,F841
# E722: pylint is preferred for bare-except
# F841: pylint is preferred for unused-variable
# pylint # pylint honours the [MESSAGES CONTROL] section
[MESSAGES CONTROL] [MESSAGES CONTROL]
disable=invalid-name,bare-except disable=invalid-name,bare-except,broad-except
# invalid-name: needs fixing during a large, project-wide refactor
# bare-except,broad-except: Need fixing once thorough testing is easier

View File

@ -1,6 +1,17 @@
# pylint: disable=too-many-boolean-expressions,too-many-return-statements,too-many-locals,too-many-statements
"""
protocol.py
===========
Low-level protocol-related functions.
"""
from __future__ import absolute_import
import base64 import base64
from binascii import hexlify from binascii import hexlify
import hashlib import hashlib
import os
import random import random
import socket import socket
import ssl import ssl
@ -9,31 +20,32 @@ import sys
import time import time
import traceback import traceback
import defaults
import highlevelcrypto
import state
from addresses import calculateInventoryHash, encodeVarint, decodeVarint, decodeAddress, varintDecodeError from addresses import calculateInventoryHash, encodeVarint, decodeVarint, decodeAddress, varintDecodeError
from bmconfigparser import BMConfigParser from bmconfigparser import BMConfigParser
from debug import logger from debug import logger
import defaults
from helper_sql import sqlExecute from helper_sql import sqlExecute
import highlevelcrypto
from inventory import Inventory from inventory import Inventory
from queues import objectProcessorQueue from queues import objectProcessorQueue
import state
from version import softwareVersion from version import softwareVersion
#Service flags
# Service flags
NODE_NETWORK = 1 NODE_NETWORK = 1
NODE_SSL = 2 NODE_SSL = 2
NODE_DANDELION = 8 NODE_DANDELION = 8
g1itch commented 2018-05-24 14:12:28 +02:00 (Migrated from github.com)
Review

It seems to be safe to sort these standard library imports. Not sure about the local imports below. Did you test?

It seems to be safe to sort these standard library imports. Not sure about the local imports below. Did you test?
coffeedogs commented 2018-05-24 17:41:06 +02:00 (Migrated from github.com)
Review

I tested that the gui ran but my test setup is lacking. As I said in the PR description, Of interest for testing would be the change to imports. The app runs for me but my testing procedure is not thorough yet.

I tested that the gui ran but my test setup is lacking. As I said in the PR description, `Of interest for testing would be the change to imports. The app runs for me but my testing procedure is not thorough yet.`
g1itch commented 2018-05-24 18:37:25 +02:00 (Migrated from github.com)
Review
OK, at least my tests doesn't fail: https://travis-ci.org/g1itch/PyBitmessage/builds/383283944
#Bitfield flags # Bitfield flags
BITFIELD_DOESACK = 1 BITFIELD_DOESACK = 1
#Error types # Error types
STATUS_WARNING = 0 STATUS_WARNING = 0
STATUS_ERROR = 1 STATUS_ERROR = 1
STATUS_FATAL = 2 STATUS_FATAL = 2
#Object types # Object types
OBJECT_GETPUBKEY = 0 OBJECT_GETPUBKEY = 0
OBJECT_PUBKEY = 1 OBJECT_PUBKEY = 1
OBJECT_MSG = 2 OBJECT_MSG = 2
@ -44,15 +56,17 @@ OBJECT_ADDR = 0x61646472
eightBytesOfRandomDataUsedToDetectConnectionsToSelf = pack( eightBytesOfRandomDataUsedToDetectConnectionsToSelf = pack(
'>Q', random.randrange(1, 18446744073709551615)) '>Q', random.randrange(1, 18446744073709551615))
#Compiled struct for packing/unpacking headers # Compiled struct for packing/unpacking headers
#New code should use CreatePacket instead of Header.pack # New code should use CreatePacket instead of Header.pack
Header = Struct('!L12sL4s') Header = Struct('!L12sL4s')
VersionPacket = Struct('>LqQ20s4s36sH') VersionPacket = Struct('>LqQ20s4s36sH')
# Bitfield # Bitfield
def getBitfield(address): def getBitfield(address):
"""Get a bitfield from an address"""
# bitfield of features supported by me (see the wiki). # bitfield of features supported by me (see the wiki).
bitfield = 0 bitfield = 0
# send ack # send ack
@ -60,36 +74,45 @@ def getBitfield(address):
bitfield |= BITFIELD_DOESACK bitfield |= BITFIELD_DOESACK
return pack('>I', bitfield) return pack('>I', bitfield)
def checkBitfield(bitfieldBinary, flags): def checkBitfield(bitfieldBinary, flags):
"""Check if a bitfield matches the given flags"""
bitfield, = unpack('>I', bitfieldBinary) bitfield, = unpack('>I', bitfieldBinary)
return (bitfield & flags) == flags return (bitfield & flags) == flags
def isBitSetWithinBitfield(fourByteString, n): def isBitSetWithinBitfield(fourByteString, n):
"""Check if a particular bit is set in a bitfeld"""
# Uses MSB 0 bit numbering across 4 bytes of data # Uses MSB 0 bit numbering across 4 bytes of data
n = 31 - n n = 31 - n
x, = unpack('>L', fourByteString) x, = unpack('>L', fourByteString)
return x & 2**n != 0 return x & 2**n != 0
# ip addresses # ip addresses
def encodeHost(host): def encodeHost(host):
"""Encode a given host to be used in low-level socket operations"""
if host.find('.onion') > -1: if host.find('.onion') > -1:
return '\xfd\x87\xd8\x7e\xeb\x43' + base64.b32decode(host.split(".")[0], True) return '\xfd\x87\xd8\x7e\xeb\x43' + base64.b32decode(host.split(".")[0], True)
elif host.find(':') == -1: elif host.find(':') == -1:
return '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + \ return '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + \
socket.inet_aton(host) socket.inet_aton(host)
else: return socket.inet_pton(socket.AF_INET6, host)
return socket.inet_pton(socket.AF_INET6, host)
def networkType(host): def networkType(host):
"""Determine if a host is IPv4, IPv6 or an onion address"""
if host.find('.onion') > -1: if host.find('.onion') > -1:
return 'onion' return 'onion'
elif host.find(':') == -1: elif host.find(':') == -1:
return 'IPv4' return 'IPv4'
else: return 'IPv6'
return 'IPv6'
def checkIPAddress(host, private=False): def checkIPAddress(host, private=False):
"""Returns hostStandardFormat if it is a valid IP address, otherwise returns False"""
if host[0:12] == '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF': if host[0:12] == '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
hostStandardFormat = socket.inet_ntop(socket.AF_INET, host[12:]) hostStandardFormat = socket.inet_ntop(socket.AF_INET, host[12:])
return checkIPv4Address(host[12:], hostStandardFormat, private) return checkIPv4Address(host[12:], hostStandardFormat, private)
@ -105,56 +128,66 @@ def checkIPAddress(host, private=False):
except ValueError: except ValueError:
return False return False
if hostStandardFormat == "": if hostStandardFormat == "":
# This can happen on Windows systems which are not 64-bit compatible # This can happen on Windows systems which are not 64-bit compatible
# so let us drop the IPv6 address. # so let us drop the IPv6 address.
return False return False
return checkIPv6Address(host, hostStandardFormat, private) return checkIPv6Address(host, hostStandardFormat, private)
def checkIPv4Address(host, hostStandardFormat, private=False): def checkIPv4Address(host, hostStandardFormat, private=False):
if host[0] == '\x7F': # 127/8 """Returns hostStandardFormat if it is an IPv4 address, otherwise returns False"""
if host[0] == '\x7F': # 127/8
if not private: if not private:
logger.debug('Ignoring IP address in loopback range: ' + hostStandardFormat) logger.debug('Ignoring IP address in loopback range: %s', hostStandardFormat)
return hostStandardFormat if private else False return hostStandardFormat if private else False
if host[0] == '\x0A': # 10/8 if host[0] == '\x0A': # 10/8
if not private: if not private:
logger.debug('Ignoring IP address in private range: ' + hostStandardFormat) logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
return hostStandardFormat if private else False return hostStandardFormat if private else False
if host[0:2] == '\xC0\xA8': # 192.168/16 if host[0:2] == '\xC0\xA8': # 192.168/16
if not private: if not private:
logger.debug('Ignoring IP address in private range: ' + hostStandardFormat) logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
return hostStandardFormat if private else False return hostStandardFormat if private else False
if host[0:2] >= '\xAC\x10' and host[0:2] < '\xAC\x20': # 172.16/12 if host[0:2] >= '\xAC\x10' and host[0:2] < '\xAC\x20': # 172.16/12
if not private: if not private:
logger.debug('Ignoring IP address in private range:' + hostStandardFormat) logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
return hostStandardFormat if private else False return hostStandardFormat if private else False
return False if private else hostStandardFormat return False if private else hostStandardFormat
def checkIPv6Address(host, hostStandardFormat, private=False): def checkIPv6Address(host, hostStandardFormat, private=False):
"""Returns hostStandardFormat if it is an IPv6 address, otherwise returns False"""
if host == ('\x00' * 15) + '\x01': if host == ('\x00' * 15) + '\x01':
if not private: if not private:
logger.debug('Ignoring loopback address: ' + hostStandardFormat) logger.debug('Ignoring loopback address: %s', hostStandardFormat)
return False return False
if host[0] == '\xFE' and (ord(host[1]) & 0xc0) == 0x80: if host[0] == '\xFE' and (ord(host[1]) & 0xc0) == 0x80:
if not private: if not private:
logger.debug ('Ignoring local address: ' + hostStandardFormat) logger.debug('Ignoring local address: %s', hostStandardFormat)
return hostStandardFormat if private else False return hostStandardFormat if private else False
if (ord(host[0]) & 0xfe) == 0xfc: if (ord(host[0]) & 0xfe) == 0xfc:
if not private: if not private:
logger.debug ('Ignoring unique local address: ' + hostStandardFormat) logger.debug('Ignoring unique local address: %s', hostStandardFormat)
return hostStandardFormat if private else False return hostStandardFormat if private else False
return False if private else hostStandardFormat return False if private else hostStandardFormat
# checks
def haveSSL(server = False): def haveSSL(server=False):
# python < 2.7.9's ssl library does not support ECDSA server due to missing initialisation of available curves, but client works ok """
if server == False: Predicate to check if ECDSA server support is required and available
python < 2.7.9's ssl library does not support ECDSA server due to
missing initialisation of available curves, but client works ok
"""
if not server:
return True return True
elif sys.version_info >= (2,7,9): elif sys.version_info >= (2, 7, 9):
return True return True
return False return False
def checkSocksIP(host): def checkSocksIP(host):
"""Predicate to check if we're using a SOCKS proxy"""
try: try:
if state.socksIP is None or not state.socksIP: if state.socksIP is None or not state.socksIP:
state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname")) state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname"))
@ -166,6 +199,7 @@ def checkSocksIP(host):
state.socksIP = BMConfigParser().get("bitmessagesettings", "sockshostname") state.socksIP = BMConfigParser().get("bitmessagesettings", "sockshostname")
return state.socksIP == host return state.socksIP == host
def isProofOfWorkSufficient(data, def isProofOfWorkSufficient(data,
nonceTrialsPerByte=0, nonceTrialsPerByte=0,
payloadLengthExtraBytes=0): payloadLengthExtraBytes=0):
@ -178,34 +212,42 @@ def isProofOfWorkSufficient(data,
if TTL < 300: if TTL < 300:
TTL = 300 TTL = 300
POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(data[ POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(data[
:8] + hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8]) :8] + hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8])
return POW <= 2 ** 64 / (nonceTrialsPerByte*(len(data) + payloadLengthExtraBytes + ((TTL*(len(data)+payloadLengthExtraBytes))/(2 ** 16)))) return POW <= 2 ** 64 / (nonceTrialsPerByte *
(len(data) + payloadLengthExtraBytes +
((TTL * (len(data) + payloadLengthExtraBytes)) / (2 ** 16))))
# Packet creation # Packet creation
def CreatePacket(command, payload=''): def CreatePacket(command, payload=''):
"""Construct and return a number of bytes from a payload"""
payload_length = len(payload) payload_length = len(payload)
checksum = hashlib.sha512(payload).digest()[0:4] checksum = hashlib.sha512(payload).digest()[0:4]
b = bytearray(Header.size + payload_length) b = bytearray(Header.size + payload_length)
Header.pack_into(b, 0, 0xE9BEB4D9, command, payload_length, checksum) Header.pack_into(b, 0, 0xE9BEB4D9, command, payload_length, checksum)
b[Header.size:] = payload b[Header.size:] = payload
return bytes(b) return bytes(b)
def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server = False, nodeid = None):
def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=False, nodeid=None):
"""Construct the payload of a version message, return the resultng bytes of running CreatePacket() on it"""
payload = '' payload = ''
payload += pack('>L', 3) # protocol version. payload += pack('>L', 3) # protocol version.
# bitflags of the services I offer. # bitflags of the services I offer.
payload += pack('>q', payload += pack(
NODE_NETWORK | '>q',
(NODE_SSL if haveSSL(server) else 0) | NODE_NETWORK |
(NODE_DANDELION if state.dandelion else 0) (NODE_SSL if haveSSL(server) else 0) |
) (NODE_DANDELION if state.dandelion else 0)
)
payload += pack('>q', int(time.time())) payload += pack('>q', int(time.time()))
payload += pack( payload += pack(
'>q', 1) # boolservices of remote connection; ignored by the remote host. '>q', 1) # boolservices of remote connection; ignored by the remote host.
if checkSocksIP(remoteHost) and server: # prevent leaking of tor outbound IP if checkSocksIP(remoteHost) and server: # prevent leaking of tor outbound IP
payload += encodeHost('127.0.0.1') payload += encodeHost('127.0.0.1')
payload += pack('>H', 8444) payload += pack('>H', 8444)
else: else:
@ -213,23 +255,22 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server
payload += pack('>H', remotePort) # remote IPv6 and port payload += pack('>H', remotePort) # remote IPv6 and port
# bitflags of the services I offer. # bitflags of the services I offer.
payload += pack('>q', payload += pack(
NODE_NETWORK | '>q',
(NODE_SSL if haveSSL(server) else 0) | NODE_NETWORK |
(NODE_DANDELION if state.dandelion else 0) (NODE_SSL if haveSSL(server) else 0) |
) (NODE_DANDELION if state.dandelion else 0)
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack( )
'>L', 2130706433) # = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used. # = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used.
# we have a separate extPort and payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack('>L', 2130706433)
# incoming over clearnet or # we have a separate extPort and incoming over clearnet or outgoing through clearnet
# outgoing through clearnet
if BMConfigParser().safeGetBoolean('bitmessagesettings', 'upnp') and state.extPort \ if BMConfigParser().safeGetBoolean('bitmessagesettings', 'upnp') and state.extPort \
and ((server and not checkSocksIP(remoteHost)) or \ and ((server and not checkSocksIP(remoteHost)) or
(BMConfigParser().get("bitmessagesettings", "socksproxytype") == "none" and not server)): (BMConfigParser().get("bitmessagesettings", "socksproxytype") == "none" and not server)):
payload += pack('>H', state.extPort) payload += pack('>H', state.extPort)
elif checkSocksIP(remoteHost) and server: # incoming connection over Tor elif checkSocksIP(remoteHost) and server: # incoming connection over Tor
payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'onionport')) payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'onionport'))
else: # no extPort and not incoming over Tor else: # no extPort and not incoming over Tor
payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'port')) payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'port'))
random.seed() random.seed()
@ -253,7 +294,9 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server
return CreatePacket('version', payload) return CreatePacket('version', payload)
g1itch commented 2018-05-24 13:00:37 +02:00 (Migrated from github.com)
Review

Again, brackets are redundant here. Also I'd prefer formatting like:

'decryptAndCheckPubkeyPayload failed due to toAddress mismatch.'
' This is very peculiar. toAddress: %s, address %s'

because in this case the continuation line is better seen

Again, brackets are redundant here. Also I'd prefer formatting like: ```python 'decryptAndCheckPubkeyPayload failed due to toAddress mismatch.' ' This is very peculiar. toAddress: %s, address %s' ``` because in this case the continuation line is better seen
g1itch commented 2018-05-24 14:01:00 +02:00 (Migrated from github.com)
Review

Next section...

Next section...
coffeedogs commented 2018-05-24 17:22:56 +02:00 (Migrated from github.com)
Review

OK, change incoming.

OK, change incoming.
coffeedogs commented 2018-05-24 17:34:53 +02:00 (Migrated from github.com)
Review

incoming

incoming
def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''): def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''):
"""Construct the payload of an error message, return the resultng bytes of running CreatePacket() on it"""
payload = encodeVarint(fatal) payload = encodeVarint(fatal)
payload += encodeVarint(banTime) payload += encodeVarint(banTime)
payload += encodeVarint(len(inventoryVector)) payload += encodeVarint(len(inventoryVector))
@ -262,43 +305,53 @@ def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''):
payload += errorText payload += errorText
return CreatePacket('error', payload) return CreatePacket('error', payload)
# Packet decoding # Packet decoding
def decryptAndCheckPubkeyPayload(data, address): def decryptAndCheckPubkeyPayload(data, address):
""" """
Version 4 pubkeys are encrypted. This function is run when we already have the Version 4 pubkeys are encrypted. This function is run when we already have the
address to which we want to try to send a message. The 'data' may come either address to which we want to try to send a message. The 'data' may come either
off of the wire or we might have had it already in our inventory when we tried off of the wire or we might have had it already in our inventory when we tried
to send a msg to this particular address. to send a msg to this particular address.
""" """
# pylint: disable=unused-variable
try: try:
status, addressVersion, streamNumber, ripe = decodeAddress(address) status, addressVersion, streamNumber, ripe = decodeAddress(address)
readPosition = 20 # bypass the nonce, time, and object type readPosition = 20 # bypass the nonce, time, and object type
embeddedAddressVersion, varintLength = decodeVarint(data[readPosition:readPosition + 10]) embeddedAddressVersion, varintLength = decodeVarint(data[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
embeddedStreamNumber, varintLength = decodeVarint(data[readPosition:readPosition + 10]) embeddedStreamNumber, varintLength = decodeVarint(data[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
storedData = data[20:readPosition] # We'll store the address version and stream number (and some more) in the pubkeys table. # We'll store the address version and stream number (and some more) in the pubkeys table.
storedData = data[20:readPosition]
if addressVersion != embeddedAddressVersion: if addressVersion != embeddedAddressVersion:
logger.info('Pubkey decryption was UNsuccessful due to address version mismatch.') logger.info('Pubkey decryption was UNsuccessful due to address version mismatch.')
return 'failed' return 'failed'
if streamNumber != embeddedStreamNumber: if streamNumber != embeddedStreamNumber:
logger.info('Pubkey decryption was UNsuccessful due to stream number mismatch.') logger.info('Pubkey decryption was UNsuccessful due to stream number mismatch.')
return 'failed' return 'failed'
tag = data[readPosition:readPosition + 32] tag = data[readPosition:readPosition + 32]
readPosition += 32 readPosition += 32
signedData = data[8:readPosition] # the time through the tag. More data is appended onto signedData below after the decryption. # the time through the tag. More data is appended onto signedData below after the decryption.
signedData = data[8:readPosition]
encryptedData = data[readPosition:] encryptedData = data[readPosition:]
# Let us try to decrypt the pubkey # Let us try to decrypt the pubkey
toAddress, cryptorObject = state.neededPubkeys[tag] toAddress, cryptorObject = state.neededPubkeys[tag]
if toAddress != address: if toAddress != address:
logger.critical('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. This is very peculiar. toAddress: %s, address %s', toAddress, address) logger.critical(
# the only way I can think that this could happen is if someone encodes their address data two different ways. 'decryptAndCheckPubkeyPayload failed due to toAddress mismatch.'
# That sort of address-malleability should have been caught by the UI or API and an error given to the user. ' This is very peculiar. toAddress: %s, address %s',
toAddress,
address)
# the only way I can think that this could happen is if someone encodes their address data two different
# ways. That sort of address-malleability should have been caught by the UI or API and an error given to
# the user.
return 'failed' return 'failed'
try: try:
decryptedData = cryptorObject.decrypt(encryptedData) decryptedData = cryptorObject.decrypt(encryptedData)
@ -307,7 +360,7 @@ def decryptAndCheckPubkeyPayload(data, address):
# but tagged it with a tag for which we are watching. # but tagged it with a tag for which we are watching.
logger.info('Pubkey decryption was unsuccessful.') logger.info('Pubkey decryption was unsuccessful.')
return 'failed' return 'failed'
readPosition = 0 readPosition = 0
bitfieldBehaviors = decryptedData[readPosition:readPosition + 4] bitfieldBehaviors = decryptedData[readPosition:readPosition + 4]
readPosition += 4 readPosition += 4
@ -327,52 +380,55 @@ def decryptAndCheckPubkeyPayload(data, address):
decryptedData[readPosition:readPosition + 10]) decryptedData[readPosition:readPosition + 10])
readPosition += signatureLengthLength readPosition += signatureLengthLength
signature = decryptedData[readPosition:readPosition + signatureLength] signature = decryptedData[readPosition:readPosition + signatureLength]
if highlevelcrypto.verify(signedData, signature, hexlify(publicSigningKey)): if highlevelcrypto.verify(signedData, signature, hexlify(publicSigningKey)):
logger.info('ECDSA verify passed (within decryptAndCheckPubkeyPayload)') logger.info('ECDSA verify passed (within decryptAndCheckPubkeyPayload)')
else: else:
logger.info('ECDSA verify failed (within decryptAndCheckPubkeyPayload)') logger.info('ECDSA verify failed (within decryptAndCheckPubkeyPayload)')
return 'failed' return 'failed'
sha = hashlib.new('sha512') sha = hashlib.new('sha512')
sha.update(publicSigningKey + publicEncryptionKey) sha.update(publicSigningKey + publicEncryptionKey)
ripeHasher = hashlib.new('ripemd160') ripeHasher = hashlib.new('ripemd160')
ripeHasher.update(sha.digest()) ripeHasher.update(sha.digest())
embeddedRipe = ripeHasher.digest() embeddedRipe = ripeHasher.digest()
if embeddedRipe != ripe: if embeddedRipe != ripe:
# Although this pubkey object had the tag were were looking for and was # Although this pubkey object had the tag were were looking for and was
# encrypted with the correct encryption key, it doesn't contain the # encrypted with the correct encryption key, it doesn't contain the
# correct pubkeys. Someone is either being malicious or using buggy software. # correct pubkeys. Someone is either being malicious or using buggy software.
logger.info('Pubkey decryption was UNsuccessful due to RIPE mismatch.') logger.info('Pubkey decryption was UNsuccessful due to RIPE mismatch.')
return 'failed' return 'failed'
# Everything checked out. Insert it into the pubkeys table. # Everything checked out. Insert it into the pubkeys table.
logger.info('within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\ logger.info(
ripe %s\n\ os.linesep.join([
publicSigningKey in hex: %s\n\ 'within decryptAndCheckPubkeyPayload,'
publicEncryptionKey in hex: %s', addressVersion, ' addressVersion: %s, streamNumber: %s' % addressVersion, streamNumber,
streamNumber, 'ripe %s' % hexlify(ripe),
hexlify(ripe), 'publicSigningKey in hex: %s' % hexlify(publicSigningKey),
hexlify(publicSigningKey), 'publicEncryptionKey in hex: %s' % hexlify(publicEncryptionKey),
hexlify(publicEncryptionKey) ])
) )
t = (address, addressVersion, storedData, int(time.time()), 'yes') t = (address, addressVersion, storedData, int(time.time()), 'yes')
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t) sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
return 'successful' return 'successful'
except varintDecodeError as e: except varintDecodeError:
logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.') logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.')
return 'failed' return 'failed'
except Exception as e: except Exception:
logger.critical('Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s', traceback.format_exc()) logger.critical(
'Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s',
traceback.format_exc())
return 'failed' return 'failed'
def checkAndShareObjectWithPeers(data): def checkAndShareObjectWithPeers(data):
""" """
This function is called after either receiving an object off of the wire This function is called after either receiving an object off of the wire
or after receiving one as ackdata. or after receiving one as ackdata.
Returns the length of time that we should reserve to process this message Returns the length of time that we should reserve to process this message
if we are receiving it off of the wire. if we are receiving it off of the wire.
""" """
@ -383,13 +439,16 @@ def checkAndShareObjectWithPeers(data):
if not isProofOfWorkSufficient(data): if not isProofOfWorkSufficient(data):
logger.info('Proof of work is insufficient.') logger.info('Proof of work is insufficient.')
return 0 return 0
endOfLifeTime, = unpack('>Q', data[8:16]) endOfLifeTime, = unpack('>Q', data[8:16])
if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800: # The TTL may not be larger than 28 days + 3 hours of wiggle room # The TTL may not be larger than 28 days + 3 hours of wiggle room
if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800:
logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s', endOfLifeTime) logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s', endOfLifeTime)
return 0 return 0
if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much. if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much.
logger.info('This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s', endOfLifeTime) logger.info(
'This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s',
endOfLifeTime)
return 0 return 0
intObjectType, = unpack('>I', data[16:20]) intObjectType, = unpack('>I', data[16:20])
try: try:
g1itch commented 2018-05-24 13:05:48 +02:00 (Migrated from github.com)
Review

Same considerations about brackets and exc_info here and above...

Same considerations about brackets and exc_info here and above...
coffeedogs commented 2018-05-24 17:25:55 +02:00 (Migrated from github.com)
Review

Changed to the string formatting incoming, same issue with changing fuctionality here.

Changed to the string formatting incoming, same issue with changing fuctionality here.
@ -405,48 +464,54 @@ def checkAndShareObjectWithPeers(data):
elif intObjectType == 3: elif intObjectType == 3:
_checkAndShareBroadcastWithPeers(data) _checkAndShareBroadcastWithPeers(data)
return 0.6 return 0.6
else: _checkAndShareUndefinedObjectWithPeers(data)
_checkAndShareUndefinedObjectWithPeers(data) return 0.6
return 0.6 except varintDecodeError as err:
except varintDecodeError as e: logger.debug(
logger.debug("There was a problem with a varint while checking to see whether it was appropriate to share an object with peers. Some details: %s", e) "There was a problem with a varint while checking to see whether it was appropriate to share an object"
except Exception as e: " with peers. Some details: %s", err
logger.critical('There was a problem while checking to see whether it was appropriate to share an object with peers. This is definitely a bug! \n%s', traceback.format_exc()) )
except Exception:
logger.critical(
'There was a problem while checking to see whether it was appropriate to share an object with peers.'
' This is definitely a bug! %s%s' % os.linesep, traceback.format_exc()
)
return 0 return 0
def _checkAndShareUndefinedObjectWithPeers(data): def _checkAndShareUndefinedObjectWithPeers(data):
# pylint: disable=unused-variable
embeddedTime, = unpack('>Q', data[8:16]) embeddedTime, = unpack('>Q', data[8:16])
readPosition = 20 # bypass nonce, time, and object type readPosition = 20 # bypass nonce, time, and object type
objectVersion, objectVersionLength = decodeVarint( objectVersion, objectVersionLength = decodeVarint(
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
readPosition += objectVersionLength readPosition += objectVersionLength
streamNumber, streamNumberLength = decodeVarint( streamNumber, streamNumberLength = decodeVarint(
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
if not streamNumber in state.streamsInWhichIAmParticipating: if streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
return return
inventoryHash = calculateInventoryHash(data) inventoryHash = calculateInventoryHash(data)
if inventoryHash in Inventory(): if inventoryHash in Inventory():
logger.debug('We have already received this undefined object. Ignoring.') logger.debug('We have already received this undefined object. Ignoring.')
return return
objectType, = unpack('>I', data[16:20]) objectType, = unpack('>I', data[16:20])
Inventory()[inventoryHash] = ( Inventory()[inventoryHash] = (
objectType, streamNumber, data, embeddedTime,'') objectType, streamNumber, data, embeddedTime, '')
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
def _checkAndShareMsgWithPeers(data): def _checkAndShareMsgWithPeers(data):
embeddedTime, = unpack('>Q', data[8:16]) embeddedTime, = unpack('>Q', data[8:16])
readPosition = 20 # bypass nonce, time, and object type readPosition = 20 # bypass nonce, time, and object type
objectVersion, objectVersionLength = decodeVarint( objectVersion, objectVersionLength = decodeVarint( # pylint: disable=unused-variable
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
readPosition += objectVersionLength readPosition += objectVersionLength
streamNumber, streamNumberLength = decodeVarint( streamNumber, streamNumberLength = decodeVarint(
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
if not streamNumber in state.streamsInWhichIAmParticipating: if streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
return return
readPosition += streamNumberLength readPosition += streamNumberLength
@ -457,14 +522,16 @@ def _checkAndShareMsgWithPeers(data):
# This msg message is valid. Let's let our peers know about it. # This msg message is valid. Let's let our peers know about it.
objectType = 2 objectType = 2
Inventory()[inventoryHash] = ( Inventory()[inventoryHash] = (
objectType, streamNumber, data, embeddedTime,'') objectType, streamNumber, data, embeddedTime, '')
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
# Now let's enqueue it to be processed ourselves. # Now let's enqueue it to be processed ourselves.
objectProcessorQueue.put((objectType,data)) objectProcessorQueue.put((objectType, data))
def _checkAndShareGetpubkeyWithPeers(data): def _checkAndShareGetpubkeyWithPeers(data):
# pylint: disable=unused-variable
if len(data) < 42: if len(data) < 42:
logger.info('getpubkey message doesn\'t contain enough data. Ignoring.') logger.info('getpubkey message doesn\'t contain enough data. Ignoring.')
return return
@ -477,7 +544,7 @@ def _checkAndShareGetpubkeyWithPeers(data):
readPosition += addressVersionLength readPosition += addressVersionLength
streamNumber, streamNumberLength = decodeVarint( streamNumber, streamNumberLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
if not streamNumber in state.streamsInWhichIAmParticipating: if streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
return return
readPosition += streamNumberLength readPosition += streamNumberLength
@ -489,13 +556,14 @@ def _checkAndShareGetpubkeyWithPeers(data):
objectType = 0 objectType = 0
Inventory()[inventoryHash] = ( Inventory()[inventoryHash] = (
objectType, streamNumber, data, embeddedTime,'') objectType, streamNumber, data, embeddedTime, '')
# This getpubkey request is valid. Forward to peers. # This getpubkey request is valid. Forward to peers.
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
# Now let's queue it to be processed ourselves. # Now let's queue it to be processed ourselves.
objectProcessorQueue.put((objectType,data)) objectProcessorQueue.put((objectType, data))
def _checkAndSharePubkeyWithPeers(data): def _checkAndSharePubkeyWithPeers(data):
if len(data) < 146 or len(data) > 440: # sanity check if len(data) < 146 or len(data) > 440: # sanity check
@ -508,7 +576,7 @@ def _checkAndSharePubkeyWithPeers(data):
streamNumber, varintLength = decodeVarint( streamNumber, varintLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
if not streamNumber in state.streamsInWhichIAmParticipating: if streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
return return
if addressVersion >= 4: if addressVersion >= 4:
@ -528,14 +596,15 @@ def _checkAndSharePubkeyWithPeers(data):
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
# Now let's queue it to be processed ourselves. # Now let's queue it to be processed ourselves.
objectProcessorQueue.put((objectType,data)) objectProcessorQueue.put((objectType, data))
def _checkAndShareBroadcastWithPeers(data): def _checkAndShareBroadcastWithPeers(data):
if len(data) < 180: if len(data) < 180:
logger.debug('The payload length of this broadcast packet is unreasonably low. Someone is probably trying funny business. Ignoring message.') logger.debug(
'The payload length of this broadcast packet is unreasonably low. '
'Someone is probably trying funny business. Ignoring message.')
return return
embeddedTime, = unpack('>Q', data[8:16]) embeddedTime, = unpack('>Q', data[8:16])
readPosition = 20 # bypass the nonce, time, and object type readPosition = 20 # bypass the nonce, time, and object type
@ -545,11 +614,11 @@ def _checkAndShareBroadcastWithPeers(data):
if broadcastVersion >= 2: if broadcastVersion >= 2:
streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition + 10]) streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition + 10])
readPosition += streamNumberLength readPosition += streamNumberLength
if not streamNumber in state.streamsInWhichIAmParticipating: if streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
return return
if broadcastVersion >= 3: if broadcastVersion >= 3:
tag = data[readPosition:readPosition+32] tag = data[readPosition:readPosition + 32]
else: else:
tag = '' tag = ''
inventoryHash = calculateInventoryHash(data) inventoryHash = calculateInventoryHash(data)
@ -565,23 +634,26 @@ def _checkAndShareBroadcastWithPeers(data):
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
# Now let's queue it to be processed ourselves. # Now let's queue it to be processed ourselves.
objectProcessorQueue.put((objectType,data)) objectProcessorQueue.put((objectType, data))
# If you want to command all of the sendDataThreads to do something, like shutdown or send some data, this
# function puts your data into the queues for each of the sendDataThreads. The sendDataThreads are
# responsible for putting their queue into (and out of) the sendDataQueues list.
def broadcastToSendDataQueues(data): def broadcastToSendDataQueues(data):
# logger.debug('running broadcastToSendDataQueues') """
If you want to command all of the sendDataThreads to do something, like shutdown or send some data, this
function puts your data into the queues for each of the sendDataThreads. The sendDataThreads are
responsible for putting their queue into (and out of) the sendDataQueues list.
"""
for q in state.sendDataQueues: for q in state.sendDataQueues:
q.put(data) q.put(data)
# sslProtocolVersion # sslProtocolVersion
if sys.version_info >= (2,7,13): if sys.version_info >= (2, 7, 13):
# this means TLSv1 or higher # this means TLSv1 or higher
# in the future change to # in the future change to
# ssl.PROTOCOL_TLS1.2 # ssl.PROTOCOL_TLS1.2
sslProtocolVersion = ssl.PROTOCOL_TLS sslProtocolVersion = ssl.PROTOCOL_TLS # pylint: disable=no-member
elif sys.version_info >= (2,7,9): elif sys.version_info >= (2, 7, 9):
# this means any SSL/TLS. SSLv2 and 3 are excluded with an option after context is created # this means any SSL/TLS. SSLv2 and 3 are excluded with an option after context is created
sslProtocolVersion = ssl.PROTOCOL_SSLv23 sslProtocolVersion = ssl.PROTOCOL_SSLv23
else: else:
@ -589,6 +661,7 @@ else:
# "TLSv1.2" in < 2.7.9 # "TLSv1.2" in < 2.7.9
sslProtocolVersion = ssl.PROTOCOL_TLSv1 sslProtocolVersion = ssl.PROTOCOL_TLSv1
# ciphers # ciphers
if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000 and not ssl.OPENSSL_VERSION.startswith("LibreSSL"): if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000 and not ssl.OPENSSL_VERSION.startswith("LibreSSL"):
sslProtocolCiphers = "AECDH-AES256-SHA@SECLEVEL=0" sslProtocolCiphers = "AECDH-AES256-SHA@SECLEVEL=0"

View File

@ -1,106 +1,144 @@
import hashlib, re # pylint: disable=missing-docstring,too-many-function-args
P = 2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 import hashlib
import re
P = 2**256 - 2**32 - 2**9 - 2**8 - 2**7 - 2**6 - 2**4 - 1
A = 0 A = 0
Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240
Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424
G = (Gx,Gy) G = (Gx, Gy)
def inv(a, n):
lm, hm = 1, 0
low, high = a % n, n
while low > 1:
r = high / low
nm, new = hm - lm * r, high - low * r
lm, low, hm, high = nm, new, lm, low
return lm % n
def inv(a,n):
lm, hm = 1,0
low, high = a%n,n
while low > 1:
r = high/low
nm, new = hm-lm*r, high-low*r
lm, low, hm, high = nm, new, lm, low
return lm % n
def get_code_string(base): def get_code_string(base):
if base == 2: return '01' if base == 2:
elif base == 10: return '0123456789' return '01'
elif base == 16: return "0123456789abcdef" elif base == 10:
elif base == 58: return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" return '0123456789'
elif base == 256: return ''.join([chr(x) for x in range(256)]) elif base == 16:
else: raise ValueError("Invalid base!") return "0123456789abcdef"
elif base == 58:
return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
elif base == 256:
return ''.join([chr(x) for x in range(256)])
else:
raise ValueError("Invalid base!")
def encode(val,base,minlen=0):
code_string = get_code_string(base)
result = ""
while val > 0:
result = code_string[val % base] + result
val /= base
if len(result) < minlen:
result = code_string[0]*(minlen-len(result))+result
return result
def decode(string,base): def encode(val, base, minlen=0):
code_string = get_code_string(base) code_string = get_code_string(base)
result = 0 result = ""
if base == 16: string = string.lower() while val > 0:
while len(string) > 0: result = code_string[val % base] + result
result *= base val /= base
result += code_string.find(string[0]) if len(result) < minlen:
string = string[1:] result = code_string[0] * (minlen - len(result)) + result
return result return result
def decode(string, base):
code_string = get_code_string(base)
result = 0
if base == 16:
string = string.lower()
while string:
result *= base
result += code_string.find(string[0])
string = string[1:]
return result
def changebase(string, frm, to, minlen=0):
return encode(decode(string, frm), to, minlen)
def base10_add(a, b):
if a is None:
return b[0], b[1]
if b is None:
return a[0], a[1]
if a[0] == b[0]:
if a[1] == b[1]:
return base10_double(a[0], a[1])
return None
m = ((b[1] - a[1]) * inv(b[0] - a[0], P)) % P
x = (m * m - a[0] - b[0]) % P
y = (m * (a[0] - x) - a[1]) % P
return (x, y)
def changebase(string,frm,to,minlen=0):
return encode(decode(string,frm),to,minlen)
def base10_add(a,b):
if a == None: return b[0],b[1]
if b == None: return a[0],a[1]
if a[0] == b[0]:
if a[1] == b[1]: return base10_double(a[0],a[1])
else: return None
m = ((b[1]-a[1]) * inv(b[0]-a[0],P)) % P
x = (m*m-a[0]-b[0]) % P
y = (m*(a[0]-x)-a[1]) % P
return (x,y)
def base10_double(a): def base10_double(a):
if a == None: return None if a is None:
m = ((3*a[0]*a[0]+A)*inv(2*a[1],P)) % P return None
x = (m*m-2*a[0]) % P m = ((3 * a[0] * a[0] + A) * inv(2 * a[1], P)) % P
y = (m*(a[0]-x)-a[1]) % P x = (m * m - 2 * a[0]) % P
return (x,y) y = (m * (a[0] - x) - a[1]) % P
return (x, y)
def base10_multiply(a,n):
if n == 0: return G
if n == 1: return a
if (n%2) == 0: return base10_double(base10_multiply(a,n/2))
if (n%2) == 1: return base10_add(base10_double(base10_multiply(a,n/2)),a)
def hex_to_point(h): return (decode(h[2:66],16),decode(h[66:],16)) def base10_multiply(a, n):
if n == 0:
return G
if n == 1:
return a
if (n % 2) == 0:
return base10_double(base10_multiply(a, n / 2))
if (n % 2) == 1:
return base10_add(base10_double(base10_multiply(a, n / 2)), a)
return None
def point_to_hex(p): return '04'+encode(p[0],16,64)+encode(p[1],16,64)
def multiply(privkey,pubkey): def hex_to_point(h):
return point_to_hex(base10_multiply(hex_to_point(pubkey),decode(privkey,16))) return (decode(h[2:66], 16), decode(h[66:], 16))
def point_to_hex(p):
return '04' + encode(p[0], 16, 64) + encode(p[1], 16, 64)
def multiply(privkey, pubkey):
return point_to_hex(base10_multiply(hex_to_point(pubkey), decode(privkey, 16)))
def privtopub(privkey): def privtopub(privkey):
return point_to_hex(base10_multiply(G,decode(privkey,16))) return point_to_hex(base10_multiply(G, decode(privkey, 16)))
def add(p1, p2):
if len(p1) == 32:
return encode(decode(p1, 16) + decode(p2, 16) % P, 16, 32)
return point_to_hex(base10_add(hex_to_point(p1), hex_to_point(p2)))
def add(p1,p2):
if (len(p1)==32):
return encode(decode(p1,16) + decode(p2,16) % P,16,32)
else:
return point_to_hex(base10_add(hex_to_point(p1),hex_to_point(p2)))
def hash_160(string): def hash_160(string):
intermed = hashlib.sha256(string).digest() intermed = hashlib.sha256(string).digest()
g1itch commented 2018-05-24 14:24:41 +02:00 (Migrated from github.com)
Review

Strange call. base10_double() is defined for only one arg below.

Strange call. `base10_double()` is defined for only one arg below.
g1itch commented 2018-05-24 14:30:19 +02:00 (Migrated from github.com)
Review

@PeterSurda this looks like unused code

>>> import arithmetic
>>> arithmetic.base10_double(0, 1)
Traceback (most recent call last):
  File "<stdin>", line 1, in <module>
TypeError: base10_double() takes exactly 1 argument (2 given)
>>> arithmetic.base10_add([0, 1], [0, 1])
Traceback (most recent call last):
  File "<stdin>", line 1, in <module>
  File "arithmetic.py", line 53, in base10_add
    if a[1] == b[1]: return base10_double(a[0],a[1])
TypeError: base10_double() takes exactly 1 argument (2 given)
@PeterSurda this looks like unused code ``` >>> import arithmetic >>> arithmetic.base10_double(0, 1) Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: base10_double() takes exactly 1 argument (2 given) >>> arithmetic.base10_add([0, 1], [0, 1]) Traceback (most recent call last): File "<stdin>", line 1, in <module> File "arithmetic.py", line 53, in base10_add if a[1] == b[1]: return base10_double(a[0],a[1]) TypeError: base10_double() takes exactly 1 argument (2 given) ```
coffeedogs commented 2018-05-24 17:58:59 +02:00 (Migrated from github.com)
Review

Try an list of two items:

>>> arithmetic.base10_double([0, 1])
(0L, 115792089237316195423570985008687907853269984665640564039457584007908834671662L)

Although the function is unused (except by arithmetic.base10_add()) there are other functions in there that are not used as well. While removing cruft does fall under linting, I'd argue that applies to our cruft and that we should be using the pyelliptic library rather than copy/pasting it into our code and hacking it.

Perhaps we could open an issue to deal with this separately? Updates to upstream would be easier to manage if we add a dependency or left the code as-is to allow copy/paste from upstream. Having to copy/paste from upstream when we've hacked the files makes updating harder.

If you read this and still want the unused code removing, I will do.

Try an list of two items: ``` >>> arithmetic.base10_double([0, 1]) (0L, 115792089237316195423570985008687907853269984665640564039457584007908834671662L) ``` Although the function is unused (except by arithmetic.base10_add()) there are other functions in there that are not used as well. While removing cruft does fall under linting, I'd argue that applies to our cruft and that we should be using the pyelliptic library rather than copy/pasting it into our code and hacking it. Perhaps we could open an issue to deal with this separately? Updates to upstream would be easier to manage if we add a dependency or left the code as-is to allow copy/paste from upstream. Having to copy/paste from upstream when we've hacked the files makes updating harder. If you read this and still want the unused code removing, I will do.
coffeedogs commented 2018-05-24 18:08:08 +02:00 (Migrated from github.com)
Review

It looks like this bug is in upstream since day 1.

It looks like this bug is in upstream since day 1.
PeterSurda commented 2018-06-12 00:08:49 +02:00 (Migrated from github.com)
Review

Upstream is abandoned, sadly. We have to carry the flag.

Upstream is abandoned, sadly. We have to carry the flag.
ripemd160 = hashlib.new('ripemd160') ripemd160 = hashlib.new('ripemd160')
ripemd160.update(intermed) ripemd160.update(intermed)
return ripemd160.digest() return ripemd160.digest()
def dbl_sha256(string): def dbl_sha256(string):
return hashlib.sha256(hashlib.sha256(string).digest()).digest() return hashlib.sha256(hashlib.sha256(string).digest()).digest()
def bin_to_b58check(inp):
inp_fmtd = '\x00' + inp def bin_to_b58check(inp):
leadingzbytes = len(re.match('^\x00*',inp_fmtd).group(0)) inp_fmtd = '\x00' + inp
checksum = dbl_sha256(inp_fmtd)[:4] leadingzbytes = len(re.match('^\x00*', inp_fmtd).group(0))
return '1' * leadingzbytes + changebase(inp_fmtd+checksum,256,58) checksum = dbl_sha256(inp_fmtd)[:4]
return '1' * leadingzbytes + changebase(inp_fmtd + checksum, 256, 58)
# Convert a public key (in hex) to a Bitcoin address
#Convert a public key (in hex) to a Bitcoin address
def pubkey_to_address(pubkey): def pubkey_to_address(pubkey):
return bin_to_b58check(hash_160(changebase(pubkey,16,256))) return bin_to_b58check(hash_160(changebase(pubkey, 16, 256)))