From ab1dd319e3b74a539aad7d29cfff37bfe128e19b Mon Sep 17 00:00:00 2001 From: coffeedogs Date: Tue, 22 May 2018 11:34:01 +0100 Subject: [PATCH 1/2] Fixed: Code style and lint fixes --- setup.cfg | 13 +- src/protocol.py | 322 +++++++++++++++++++++-------------- src/pyelliptic/arithmetic.py | 200 +++++++++++++--------- 3 files changed, 322 insertions(+), 213 deletions(-) diff --git a/setup.cfg b/setup.cfg index 32abcdc7..93efd2d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,10 +1,17 @@ +# Since there is overlap in the violations that the different tools check for, it makes sense to quiesce some warnings +# in some tools if those warnings in other tools are preferred. This avoids the need to add duplicate lint warnings. + [pycodestyle] max-line-length = 119 [flake8] max-line-length = 119 -ignore = E722 +ignore = E722,F841 +# E722: pylint is preferred for bare-except +# F841: pylint is preferred for unused-variable -# pylint +# pylint honours the [MESSAGES CONTROL] section [MESSAGES CONTROL] -disable=invalid-name,bare-except +disable=invalid-name,bare-except,broad-except +# invalid-name: needs fixing during a large, project-wide refactor +# bare-except,broad-except: Need fixing once thorough testing is easier diff --git a/src/protocol.py b/src/protocol.py index dca4c942..eb19a262 100644 --- a/src/protocol.py +++ b/src/protocol.py @@ -1,3 +1,13 @@ +# pylint: disable=too-many-boolean-expressions,too-many-return-statements,too-many-locals,too-many-statements +""" +protocol.py +=========== + +Low-level protocol-related functions. +""" + +from __future__ import absolute_import + import base64 from binascii import hexlify import hashlib @@ -9,31 +19,32 @@ import sys import time import traceback +import defaults +import highlevelcrypto +import state from addresses import calculateInventoryHash, encodeVarint, decodeVarint, decodeAddress, varintDecodeError from bmconfigparser import BMConfigParser from debug import logger -import defaults from helper_sql import sqlExecute -import highlevelcrypto from inventory import Inventory from queues import objectProcessorQueue -import state from version import softwareVersion -#Service flags + +# Service flags NODE_NETWORK = 1 NODE_SSL = 2 NODE_DANDELION = 8 -#Bitfield flags +# Bitfield flags BITFIELD_DOESACK = 1 -#Error types +# Error types STATUS_WARNING = 0 STATUS_ERROR = 1 STATUS_FATAL = 2 -#Object types +# Object types OBJECT_GETPUBKEY = 0 OBJECT_PUBKEY = 1 OBJECT_MSG = 2 @@ -44,15 +55,17 @@ OBJECT_ADDR = 0x61646472 eightBytesOfRandomDataUsedToDetectConnectionsToSelf = pack( '>Q', random.randrange(1, 18446744073709551615)) -#Compiled struct for packing/unpacking headers -#New code should use CreatePacket instead of Header.pack +# Compiled struct for packing/unpacking headers +# New code should use CreatePacket instead of Header.pack Header = Struct('!L12sL4s') VersionPacket = Struct('>LqQ20s4s36sH') # Bitfield + def getBitfield(address): + """Get a bitfield from an address""" # bitfield of features supported by me (see the wiki). bitfield = 0 # send ack @@ -60,36 +73,42 @@ def getBitfield(address): bitfield |= BITFIELD_DOESACK return pack('>I', bitfield) + def checkBitfield(bitfieldBinary, flags): + """Check if a bitfield matches the given flags""" bitfield, = unpack('>I', bitfieldBinary) return (bitfield & flags) == flags + def isBitSetWithinBitfield(fourByteString, n): + """Check if a particular bit is set in a bitfeld""" # Uses MSB 0 bit numbering across 4 bytes of data n = 31 - n x, = unpack('>L', fourByteString) return x & 2**n != 0 -# ip addresses def encodeHost(host): + """Encode a given host to be used in low-level socket operations""" if host.find('.onion') > -1: return '\xfd\x87\xd8\x7e\xeb\x43' + base64.b32decode(host.split(".")[0], True) elif host.find(':') == -1: return '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + \ socket.inet_aton(host) - else: - return socket.inet_pton(socket.AF_INET6, host) + return socket.inet_pton(socket.AF_INET6, host) + def networkType(host): + """Determine if a host is IPv4, IPv6 or an onion address""" if host.find('.onion') > -1: return 'onion' elif host.find(':') == -1: return 'IPv4' - else: - return 'IPv6' + return 'IPv6' + def checkIPAddress(host, private=False): + """Returns hostStandardFormat if it is a valid IP address, otherwise returns False""" if host[0:12] == '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF': hostStandardFormat = socket.inet_ntop(socket.AF_INET, host[12:]) return checkIPv4Address(host[12:], hostStandardFormat, private) @@ -105,56 +124,66 @@ def checkIPAddress(host, private=False): except ValueError: return False if hostStandardFormat == "": - # This can happen on Windows systems which are not 64-bit compatible - # so let us drop the IPv6 address. + # This can happen on Windows systems which are not 64-bit compatible + # so let us drop the IPv6 address. return False return checkIPv6Address(host, hostStandardFormat, private) + def checkIPv4Address(host, hostStandardFormat, private=False): - if host[0] == '\x7F': # 127/8 + """Returns hostStandardFormat if it is an IPv4 address, otherwise returns False""" + if host[0] == '\x7F': # 127/8 if not private: - logger.debug('Ignoring IP address in loopback range: ' + hostStandardFormat) + logger.debug('Ignoring IP address in loopback range: %s', hostStandardFormat) return hostStandardFormat if private else False - if host[0] == '\x0A': # 10/8 + if host[0] == '\x0A': # 10/8 if not private: - logger.debug('Ignoring IP address in private range: ' + hostStandardFormat) + logger.debug('Ignoring IP address in private range: %s', hostStandardFormat) return hostStandardFormat if private else False - if host[0:2] == '\xC0\xA8': # 192.168/16 + if host[0:2] == '\xC0\xA8': # 192.168/16 if not private: - logger.debug('Ignoring IP address in private range: ' + hostStandardFormat) + logger.debug('Ignoring IP address in private range: %s', hostStandardFormat) return hostStandardFormat if private else False - if host[0:2] >= '\xAC\x10' and host[0:2] < '\xAC\x20': # 172.16/12 + if host[0:2] >= '\xAC\x10' and host[0:2] < '\xAC\x20': # 172.16/12 if not private: - logger.debug('Ignoring IP address in private range:' + hostStandardFormat) + logger.debug('Ignoring IP address in private range: %s', hostStandardFormat) return hostStandardFormat if private else False return False if private else hostStandardFormat + def checkIPv6Address(host, hostStandardFormat, private=False): + """Returns hostStandardFormat if it is an IPv6 address, otherwise returns False""" if host == ('\x00' * 15) + '\x01': if not private: - logger.debug('Ignoring loopback address: ' + hostStandardFormat) + logger.debug('Ignoring loopback address: %s', hostStandardFormat) return False if host[0] == '\xFE' and (ord(host[1]) & 0xc0) == 0x80: if not private: - logger.debug ('Ignoring local address: ' + hostStandardFormat) + logger.debug('Ignoring local address: %s', hostStandardFormat) return hostStandardFormat if private else False if (ord(host[0]) & 0xfe) == 0xfc: if not private: - logger.debug ('Ignoring unique local address: ' + hostStandardFormat) + logger.debug('Ignoring unique local address: %s', hostStandardFormat) return hostStandardFormat if private else False return False if private else hostStandardFormat -# checks -def haveSSL(server = False): - # python < 2.7.9's ssl library does not support ECDSA server due to missing initialisation of available curves, but client works ok - if server == False: +def haveSSL(server=False): + """ + Predicate to check if ECDSA server support is required and available + + python < 2.7.9's ssl library does not support ECDSA server due to + missing initialisation of available curves, but client works ok + """ + if not server: return True - elif sys.version_info >= (2,7,9): + elif sys.version_info >= (2, 7, 9): return True return False + def checkSocksIP(host): + """Predicate to check if we're using a SOCKS proxy""" try: if state.socksIP is None or not state.socksIP: state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname")) @@ -166,6 +195,7 @@ def checkSocksIP(host): state.socksIP = BMConfigParser().get("bitmessagesettings", "sockshostname") return state.socksIP == host + def isProofOfWorkSufficient(data, nonceTrialsPerByte=0, payloadLengthExtraBytes=0): @@ -178,34 +208,39 @@ def isProofOfWorkSufficient(data, if TTL < 300: TTL = 300 POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(data[ - :8] + hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8]) - return POW <= 2 ** 64 / (nonceTrialsPerByte*(len(data) + payloadLengthExtraBytes + ((TTL*(len(data)+payloadLengthExtraBytes))/(2 ** 16)))) + :8] + hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8]) + return POW <= 2 ** 64 / (nonceTrialsPerByte * + (len(data) + payloadLengthExtraBytes + + ((TTL * (len(data) + payloadLengthExtraBytes)) / (2 ** 16)))) -# Packet creation def CreatePacket(command, payload=''): + """Construct and return a number of bytes from a payload""" payload_length = len(payload) checksum = hashlib.sha512(payload).digest()[0:4] - + b = bytearray(Header.size + payload_length) Header.pack_into(b, 0, 0xE9BEB4D9, command, payload_length, checksum) b[Header.size:] = payload return bytes(b) -def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server = False, nodeid = None): + +def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=False, nodeid=None): + """Construct the payload of a version message, return the resultng bytes of running CreatePacket() on it""" payload = '' payload += pack('>L', 3) # protocol version. # bitflags of the services I offer. - payload += pack('>q', - NODE_NETWORK | - (NODE_SSL if haveSSL(server) else 0) | - (NODE_DANDELION if state.dandelion else 0) - ) + payload += pack( + '>q', + NODE_NETWORK | + (NODE_SSL if haveSSL(server) else 0) | + (NODE_DANDELION if state.dandelion else 0) + ) payload += pack('>q', int(time.time())) payload += pack( '>q', 1) # boolservices of remote connection; ignored by the remote host. - if checkSocksIP(remoteHost) and server: # prevent leaking of tor outbound IP + if checkSocksIP(remoteHost) and server: # prevent leaking of tor outbound IP payload += encodeHost('127.0.0.1') payload += pack('>H', 8444) else: @@ -213,23 +248,22 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server payload += pack('>H', remotePort) # remote IPv6 and port # bitflags of the services I offer. - payload += pack('>q', - NODE_NETWORK | - (NODE_SSL if haveSSL(server) else 0) | - (NODE_DANDELION if state.dandelion else 0) - ) - payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack( - '>L', 2130706433) # = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used. - # we have a separate extPort and - # incoming over clearnet or - # outgoing through clearnet + payload += pack( + '>q', + NODE_NETWORK | + (NODE_SSL if haveSSL(server) else 0) | + (NODE_DANDELION if state.dandelion else 0) + ) + # = 127.0.0.1. This will be ignored by the remote host. The actual remote connected IP will be used. + payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF' + pack('>L', 2130706433) + # we have a separate extPort and incoming over clearnet or outgoing through clearnet if BMConfigParser().safeGetBoolean('bitmessagesettings', 'upnp') and state.extPort \ - and ((server and not checkSocksIP(remoteHost)) or \ - (BMConfigParser().get("bitmessagesettings", "socksproxytype") == "none" and not server)): + and ((server and not checkSocksIP(remoteHost)) or + (BMConfigParser().get("bitmessagesettings", "socksproxytype") == "none" and not server)): payload += pack('>H', state.extPort) - elif checkSocksIP(remoteHost) and server: # incoming connection over Tor + elif checkSocksIP(remoteHost) and server: # incoming connection over Tor payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'onionport')) - else: # no extPort and not incoming over Tor + else: # no extPort and not incoming over Tor payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'port')) random.seed() @@ -253,7 +287,9 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server return CreatePacket('version', payload) + def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''): + """Construct the payload of an error message, return the resultng bytes of running CreatePacket() on it""" payload = encodeVarint(fatal) payload += encodeVarint(banTime) payload += encodeVarint(len(inventoryVector)) @@ -262,43 +298,50 @@ def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''): payload += errorText return CreatePacket('error', payload) -# Packet decoding def decryptAndCheckPubkeyPayload(data, address): """ - Version 4 pubkeys are encrypted. This function is run when we already have the + Version 4 pubkeys are encrypted. This function is run when we already have the address to which we want to try to send a message. The 'data' may come either off of the wire or we might have had it already in our inventory when we tried - to send a msg to this particular address. + to send a msg to this particular address. """ + # pylint: disable=unused-variable try: status, addressVersion, streamNumber, ripe = decodeAddress(address) - + readPosition = 20 # bypass the nonce, time, and object type embeddedAddressVersion, varintLength = decodeVarint(data[readPosition:readPosition + 10]) readPosition += varintLength embeddedStreamNumber, varintLength = decodeVarint(data[readPosition:readPosition + 10]) readPosition += varintLength - storedData = data[20:readPosition] # We'll store the address version and stream number (and some more) in the pubkeys table. - + # We'll store the address version and stream number (and some more) in the pubkeys table. + storedData = data[20:readPosition] + if addressVersion != embeddedAddressVersion: logger.info('Pubkey decryption was UNsuccessful due to address version mismatch.') return 'failed' if streamNumber != embeddedStreamNumber: logger.info('Pubkey decryption was UNsuccessful due to stream number mismatch.') return 'failed' - + tag = data[readPosition:readPosition + 32] readPosition += 32 - signedData = data[8:readPosition] # the time through the tag. More data is appended onto signedData below after the decryption. + # the time through the tag. More data is appended onto signedData below after the decryption. + signedData = data[8:readPosition] encryptedData = data[readPosition:] - + # Let us try to decrypt the pubkey toAddress, cryptorObject = state.neededPubkeys[tag] if toAddress != address: - logger.critical('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. This is very peculiar. toAddress: %s, address %s', toAddress, address) - # the only way I can think that this could happen is if someone encodes their address data two different ways. - # That sort of address-malleability should have been caught by the UI or API and an error given to the user. + logger.critical( + ('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. ' + 'This is very peculiar. toAddress: %s, address %s'), + toAddress, + address) + # the only way I can think that this could happen is if someone encodes their address data two different + # ways. That sort of address-malleability should have been caught by the UI or API and an error given to + # the user. return 'failed' try: decryptedData = cryptorObject.decrypt(encryptedData) @@ -307,7 +350,7 @@ def decryptAndCheckPubkeyPayload(data, address): # but tagged it with a tag for which we are watching. logger.info('Pubkey decryption was unsuccessful.') return 'failed' - + readPosition = 0 bitfieldBehaviors = decryptedData[readPosition:readPosition + 4] readPosition += 4 @@ -327,52 +370,56 @@ def decryptAndCheckPubkeyPayload(data, address): decryptedData[readPosition:readPosition + 10]) readPosition += signatureLengthLength signature = decryptedData[readPosition:readPosition + signatureLength] - + if highlevelcrypto.verify(signedData, signature, hexlify(publicSigningKey)): logger.info('ECDSA verify passed (within decryptAndCheckPubkeyPayload)') else: logger.info('ECDSA verify failed (within decryptAndCheckPubkeyPayload)') return 'failed' - + sha = hashlib.new('sha512') sha.update(publicSigningKey + publicEncryptionKey) ripeHasher = hashlib.new('ripemd160') ripeHasher.update(sha.digest()) embeddedRipe = ripeHasher.digest() - + if embeddedRipe != ripe: # Although this pubkey object had the tag were were looking for and was # encrypted with the correct encryption key, it doesn't contain the # correct pubkeys. Someone is either being malicious or using buggy software. logger.info('Pubkey decryption was UNsuccessful due to RIPE mismatch.') return 'failed' - + # Everything checked out. Insert it into the pubkeys table. - - logger.info('within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\ - ripe %s\n\ - publicSigningKey in hex: %s\n\ - publicEncryptionKey in hex: %s', addressVersion, - streamNumber, - hexlify(ripe), - hexlify(publicSigningKey), - hexlify(publicEncryptionKey) - ) - + + logger.info( + 'within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\ + ripe %s\n\ + publicSigningKey in hex: %s\n\ + publicEncryptionKey in hex: %s', addressVersion, + streamNumber, + hexlify(ripe), + hexlify(publicSigningKey), + hexlify(publicEncryptionKey) + ) + t = (address, addressVersion, storedData, int(time.time()), 'yes') sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t) return 'successful' - except varintDecodeError as e: + except varintDecodeError: logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.') return 'failed' - except Exception as e: - logger.critical('Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s', traceback.format_exc()) + except Exception: + logger.critical( + 'Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s', + traceback.format_exc()) return 'failed' + def checkAndShareObjectWithPeers(data): """ This function is called after either receiving an object off of the wire - or after receiving one as ackdata. + or after receiving one as ackdata. Returns the length of time that we should reserve to process this message if we are receiving it off of the wire. """ @@ -383,13 +430,16 @@ def checkAndShareObjectWithPeers(data): if not isProofOfWorkSufficient(data): logger.info('Proof of work is insufficient.') return 0 - + endOfLifeTime, = unpack('>Q', data[8:16]) - if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800: # The TTL may not be larger than 28 days + 3 hours of wiggle room + # The TTL may not be larger than 28 days + 3 hours of wiggle room + if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800: logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s', endOfLifeTime) return 0 - if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much. - logger.info('This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s', endOfLifeTime) + if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much. + logger.info( + 'This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s', + endOfLifeTime) return 0 intObjectType, = unpack('>I', data[16:20]) try: @@ -405,48 +455,54 @@ def checkAndShareObjectWithPeers(data): elif intObjectType == 3: _checkAndShareBroadcastWithPeers(data) return 0.6 - else: - _checkAndShareUndefinedObjectWithPeers(data) - return 0.6 - except varintDecodeError as e: - logger.debug("There was a problem with a varint while checking to see whether it was appropriate to share an object with peers. Some details: %s", e) - except Exception as e: - logger.critical('There was a problem while checking to see whether it was appropriate to share an object with peers. This is definitely a bug! \n%s', traceback.format_exc()) + _checkAndShareUndefinedObjectWithPeers(data) + return 0.6 + except varintDecodeError as err: + logger.debug( + ("There was a problem with a varint while checking to see whether it was appropriate to share an object " + "with peers. Some details: %s"), + err) + except Exception: + logger.critical( + ('There was a problem while checking to see whether it was appropriate to share an object with peers. ' + 'This is definitely a bug! \n%s'), + traceback.format_exc()) return 0 - + def _checkAndShareUndefinedObjectWithPeers(data): + # pylint: disable=unused-variable embeddedTime, = unpack('>Q', data[8:16]) - readPosition = 20 # bypass nonce, time, and object type + readPosition = 20 # bypass nonce, time, and object type objectVersion, objectVersionLength = decodeVarint( data[readPosition:readPosition + 9]) readPosition += objectVersionLength streamNumber, streamNumberLength = decodeVarint( data[readPosition:readPosition + 9]) - if not streamNumber in state.streamsInWhichIAmParticipating: + if streamNumber not in state.streamsInWhichIAmParticipating: logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return - + inventoryHash = calculateInventoryHash(data) if inventoryHash in Inventory(): logger.debug('We have already received this undefined object. Ignoring.') return objectType, = unpack('>I', data[16:20]) Inventory()[inventoryHash] = ( - objectType, streamNumber, data, embeddedTime,'') + objectType, streamNumber, data, embeddedTime, '') logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) - - + + def _checkAndShareMsgWithPeers(data): embeddedTime, = unpack('>Q', data[8:16]) - readPosition = 20 # bypass nonce, time, and object type - objectVersion, objectVersionLength = decodeVarint( + readPosition = 20 # bypass nonce, time, and object type + objectVersion, objectVersionLength = decodeVarint( # pylint: disable=unused-variable data[readPosition:readPosition + 9]) readPosition += objectVersionLength streamNumber, streamNumberLength = decodeVarint( data[readPosition:readPosition + 9]) - if not streamNumber in state.streamsInWhichIAmParticipating: + if streamNumber not in state.streamsInWhichIAmParticipating: logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return readPosition += streamNumberLength @@ -457,14 +513,16 @@ def _checkAndShareMsgWithPeers(data): # This msg message is valid. Let's let our peers know about it. objectType = 2 Inventory()[inventoryHash] = ( - objectType, streamNumber, data, embeddedTime,'') + objectType, streamNumber, data, embeddedTime, '') logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) # Now let's enqueue it to be processed ourselves. - objectProcessorQueue.put((objectType,data)) + objectProcessorQueue.put((objectType, data)) + def _checkAndShareGetpubkeyWithPeers(data): + # pylint: disable=unused-variable if len(data) < 42: logger.info('getpubkey message doesn\'t contain enough data. Ignoring.') return @@ -477,7 +535,7 @@ def _checkAndShareGetpubkeyWithPeers(data): readPosition += addressVersionLength streamNumber, streamNumberLength = decodeVarint( data[readPosition:readPosition + 10]) - if not streamNumber in state.streamsInWhichIAmParticipating: + if streamNumber not in state.streamsInWhichIAmParticipating: logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return readPosition += streamNumberLength @@ -489,13 +547,14 @@ def _checkAndShareGetpubkeyWithPeers(data): objectType = 0 Inventory()[inventoryHash] = ( - objectType, streamNumber, data, embeddedTime,'') + objectType, streamNumber, data, embeddedTime, '') # This getpubkey request is valid. Forward to peers. logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) # Now let's queue it to be processed ourselves. - objectProcessorQueue.put((objectType,data)) + objectProcessorQueue.put((objectType, data)) + def _checkAndSharePubkeyWithPeers(data): if len(data) < 146 or len(data) > 440: # sanity check @@ -508,7 +567,7 @@ def _checkAndSharePubkeyWithPeers(data): streamNumber, varintLength = decodeVarint( data[readPosition:readPosition + 10]) readPosition += varintLength - if not streamNumber in state.streamsInWhichIAmParticipating: + if streamNumber not in state.streamsInWhichIAmParticipating: logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return if addressVersion >= 4: @@ -528,14 +587,15 @@ def _checkAndSharePubkeyWithPeers(data): logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) - # Now let's queue it to be processed ourselves. - objectProcessorQueue.put((objectType,data)) + objectProcessorQueue.put((objectType, data)) def _checkAndShareBroadcastWithPeers(data): if len(data) < 180: - logger.debug('The payload length of this broadcast packet is unreasonably low. Someone is probably trying funny business. Ignoring message.') + logger.debug( + 'The payload length of this broadcast packet is unreasonably low. ' + 'Someone is probably trying funny business. Ignoring message.') return embeddedTime, = unpack('>Q', data[8:16]) readPosition = 20 # bypass the nonce, time, and object type @@ -545,11 +605,11 @@ def _checkAndShareBroadcastWithPeers(data): if broadcastVersion >= 2: streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition + 10]) readPosition += streamNumberLength - if not streamNumber in state.streamsInWhichIAmParticipating: + if streamNumber not in state.streamsInWhichIAmParticipating: logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return if broadcastVersion >= 3: - tag = data[readPosition:readPosition+32] + tag = data[readPosition:readPosition + 32] else: tag = '' inventoryHash = calculateInventoryHash(data) @@ -565,23 +625,26 @@ def _checkAndShareBroadcastWithPeers(data): broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) # Now let's queue it to be processed ourselves. - objectProcessorQueue.put((objectType,data)) + objectProcessorQueue.put((objectType, data)) + -# If you want to command all of the sendDataThreads to do something, like shutdown or send some data, this -# function puts your data into the queues for each of the sendDataThreads. The sendDataThreads are -# responsible for putting their queue into (and out of) the sendDataQueues list. def broadcastToSendDataQueues(data): - # logger.debug('running broadcastToSendDataQueues') + """ + If you want to command all of the sendDataThreads to do something, like shutdown or send some data, this + function puts your data into the queues for each of the sendDataThreads. The sendDataThreads are + responsible for putting their queue into (and out of) the sendDataQueues list. + """ for q in state.sendDataQueues: q.put(data) + # sslProtocolVersion -if sys.version_info >= (2,7,13): +if sys.version_info >= (2, 7, 13): # this means TLSv1 or higher # in the future change to # ssl.PROTOCOL_TLS1.2 - sslProtocolVersion = ssl.PROTOCOL_TLS -elif sys.version_info >= (2,7,9): + sslProtocolVersion = ssl.PROTOCOL_TLS # pylint: disable=no-member +elif sys.version_info >= (2, 7, 9): # this means any SSL/TLS. SSLv2 and 3 are excluded with an option after context is created sslProtocolVersion = ssl.PROTOCOL_SSLv23 else: @@ -589,6 +652,7 @@ else: # "TLSv1.2" in < 2.7.9 sslProtocolVersion = ssl.PROTOCOL_TLSv1 + # ciphers if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000 and not ssl.OPENSSL_VERSION.startswith("LibreSSL"): sslProtocolCiphers = "AECDH-AES256-SHA@SECLEVEL=0" diff --git a/src/pyelliptic/arithmetic.py b/src/pyelliptic/arithmetic.py index 1eec381a..95c85b93 100644 --- a/src/pyelliptic/arithmetic.py +++ b/src/pyelliptic/arithmetic.py @@ -1,106 +1,144 @@ -import hashlib, re +# pylint: disable=missing-docstring,too-many-function-args -P = 2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 +import hashlib +import re + +P = 2**256 - 2**32 - 2**9 - 2**8 - 2**7 - 2**6 - 2**4 - 1 A = 0 Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 -G = (Gx,Gy) +G = (Gx, Gy) + + +def inv(a, n): + lm, hm = 1, 0 + low, high = a % n, n + while low > 1: + r = high / low + nm, new = hm - lm * r, high - low * r + lm, low, hm, high = nm, new, lm, low + return lm % n -def inv(a,n): - lm, hm = 1,0 - low, high = a%n,n - while low > 1: - r = high/low - nm, new = hm-lm*r, high-low*r - lm, low, hm, high = nm, new, lm, low - return lm % n def get_code_string(base): - if base == 2: return '01' - elif base == 10: return '0123456789' - elif base == 16: return "0123456789abcdef" - elif base == 58: return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" - elif base == 256: return ''.join([chr(x) for x in range(256)]) - else: raise ValueError("Invalid base!") + if base == 2: + return '01' + elif base == 10: + return '0123456789' + elif base == 16: + return "0123456789abcdef" + elif base == 58: + return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + elif base == 256: + return ''.join([chr(x) for x in range(256)]) + else: + raise ValueError("Invalid base!") -def encode(val,base,minlen=0): - code_string = get_code_string(base) - result = "" - while val > 0: - result = code_string[val % base] + result - val /= base - if len(result) < minlen: - result = code_string[0]*(minlen-len(result))+result - return result -def decode(string,base): - code_string = get_code_string(base) - result = 0 - if base == 16: string = string.lower() - while len(string) > 0: - result *= base - result += code_string.find(string[0]) - string = string[1:] - return result +def encode(val, base, minlen=0): + code_string = get_code_string(base) + result = "" + while val > 0: + result = code_string[val % base] + result + val /= base + if len(result) < minlen: + result = code_string[0] * (minlen - len(result)) + result + return result + + +def decode(string, base): + code_string = get_code_string(base) + result = 0 + if base == 16: + string = string.lower() + while string: + result *= base + result += code_string.find(string[0]) + string = string[1:] + return result + + +def changebase(string, frm, to, minlen=0): + return encode(decode(string, frm), to, minlen) + + +def base10_add(a, b): + if a is None: + return b[0], b[1] + if b is None: + return a[0], a[1] + if a[0] == b[0]: + if a[1] == b[1]: + return base10_double(a[0], a[1]) + return None + m = ((b[1] - a[1]) * inv(b[0] - a[0], P)) % P + x = (m * m - a[0] - b[0]) % P + y = (m * (a[0] - x) - a[1]) % P + return (x, y) -def changebase(string,frm,to,minlen=0): - return encode(decode(string,frm),to,minlen) -def base10_add(a,b): - if a == None: return b[0],b[1] - if b == None: return a[0],a[1] - if a[0] == b[0]: - if a[1] == b[1]: return base10_double(a[0],a[1]) - else: return None - m = ((b[1]-a[1]) * inv(b[0]-a[0],P)) % P - x = (m*m-a[0]-b[0]) % P - y = (m*(a[0]-x)-a[1]) % P - return (x,y) - def base10_double(a): - if a == None: return None - m = ((3*a[0]*a[0]+A)*inv(2*a[1],P)) % P - x = (m*m-2*a[0]) % P - y = (m*(a[0]-x)-a[1]) % P - return (x,y) + if a is None: + return None + m = ((3 * a[0] * a[0] + A) * inv(2 * a[1], P)) % P + x = (m * m - 2 * a[0]) % P + y = (m * (a[0] - x) - a[1]) % P + return (x, y) -def base10_multiply(a,n): - if n == 0: return G - if n == 1: return a - if (n%2) == 0: return base10_double(base10_multiply(a,n/2)) - if (n%2) == 1: return base10_add(base10_double(base10_multiply(a,n/2)),a) -def hex_to_point(h): return (decode(h[2:66],16),decode(h[66:],16)) +def base10_multiply(a, n): + if n == 0: + return G + if n == 1: + return a + if (n % 2) == 0: + return base10_double(base10_multiply(a, n / 2)) + if (n % 2) == 1: + return base10_add(base10_double(base10_multiply(a, n / 2)), a) + return None -def point_to_hex(p): return '04'+encode(p[0],16,64)+encode(p[1],16,64) -def multiply(privkey,pubkey): - return point_to_hex(base10_multiply(hex_to_point(pubkey),decode(privkey,16))) +def hex_to_point(h): + return (decode(h[2:66], 16), decode(h[66:], 16)) + + +def point_to_hex(p): + return '04' + encode(p[0], 16, 64) + encode(p[1], 16, 64) + + +def multiply(privkey, pubkey): + return point_to_hex(base10_multiply(hex_to_point(pubkey), decode(privkey, 16))) + def privtopub(privkey): - return point_to_hex(base10_multiply(G,decode(privkey,16))) + return point_to_hex(base10_multiply(G, decode(privkey, 16))) + + +def add(p1, p2): + if len(p1) == 32: + return encode(decode(p1, 16) + decode(p2, 16) % P, 16, 32) + return point_to_hex(base10_add(hex_to_point(p1), hex_to_point(p2))) -def add(p1,p2): - if (len(p1)==32): - return encode(decode(p1,16) + decode(p2,16) % P,16,32) - else: - return point_to_hex(base10_add(hex_to_point(p1),hex_to_point(p2))) def hash_160(string): - intermed = hashlib.sha256(string).digest() - ripemd160 = hashlib.new('ripemd160') - ripemd160.update(intermed) - return ripemd160.digest() + intermed = hashlib.sha256(string).digest() + ripemd160 = hashlib.new('ripemd160') + ripemd160.update(intermed) + return ripemd160.digest() + def dbl_sha256(string): - return hashlib.sha256(hashlib.sha256(string).digest()).digest() - -def bin_to_b58check(inp): - inp_fmtd = '\x00' + inp - leadingzbytes = len(re.match('^\x00*',inp_fmtd).group(0)) - checksum = dbl_sha256(inp_fmtd)[:4] - return '1' * leadingzbytes + changebase(inp_fmtd+checksum,256,58) + return hashlib.sha256(hashlib.sha256(string).digest()).digest() + + +def bin_to_b58check(inp): + inp_fmtd = '\x00' + inp + leadingzbytes = len(re.match('^\x00*', inp_fmtd).group(0)) + checksum = dbl_sha256(inp_fmtd)[:4] + return '1' * leadingzbytes + changebase(inp_fmtd + checksum, 256, 58) + +# Convert a public key (in hex) to a Bitcoin address + -#Convert a public key (in hex) to a Bitcoin address def pubkey_to_address(pubkey): - return bin_to_b58check(hash_160(changebase(pubkey,16,256))) + return bin_to_b58check(hash_160(changebase(pubkey, 16, 256))) -- 2.45.1 From e1c2e8ec46912c106e60a14b3d8330da8ef60df8 Mon Sep 17 00:00:00 2001 From: coffeedogs Date: Thu, 24 May 2018 16:59:40 +0100 Subject: [PATCH 2/2] Fixed: Responded to PR comments --- src/protocol.py | 41 +++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/src/protocol.py b/src/protocol.py index eb19a262..37debbc3 100644 --- a/src/protocol.py +++ b/src/protocol.py @@ -11,6 +11,7 @@ from __future__ import absolute_import import base64 from binascii import hexlify import hashlib +import os import random import socket import ssl @@ -88,6 +89,9 @@ def isBitSetWithinBitfield(fourByteString, n): return x & 2**n != 0 +# ip addresses + + def encodeHost(host): """Encode a given host to be used in low-level socket operations""" if host.find('.onion') > -1: @@ -214,6 +218,9 @@ def isProofOfWorkSufficient(data, ((TTL * (len(data) + payloadLengthExtraBytes)) / (2 ** 16)))) +# Packet creation + + def CreatePacket(command, payload=''): """Construct and return a number of bytes from a payload""" payload_length = len(payload) @@ -299,6 +306,9 @@ def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''): return CreatePacket('error', payload) +# Packet decoding + + def decryptAndCheckPubkeyPayload(data, address): """ Version 4 pubkeys are encrypted. This function is run when we already have the @@ -335,8 +345,8 @@ def decryptAndCheckPubkeyPayload(data, address): toAddress, cryptorObject = state.neededPubkeys[tag] if toAddress != address: logger.critical( - ('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. ' - 'This is very peculiar. toAddress: %s, address %s'), + 'decryptAndCheckPubkeyPayload failed due to toAddress mismatch.' + ' This is very peculiar. toAddress: %s, address %s', toAddress, address) # the only way I can think that this could happen is if someone encodes their address data two different @@ -393,14 +403,13 @@ def decryptAndCheckPubkeyPayload(data, address): # Everything checked out. Insert it into the pubkeys table. logger.info( - 'within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\ - ripe %s\n\ - publicSigningKey in hex: %s\n\ - publicEncryptionKey in hex: %s', addressVersion, - streamNumber, - hexlify(ripe), - hexlify(publicSigningKey), - hexlify(publicEncryptionKey) + os.linesep.join([ + 'within decryptAndCheckPubkeyPayload,' + ' addressVersion: %s, streamNumber: %s' % addressVersion, streamNumber, + 'ripe %s' % hexlify(ripe), + 'publicSigningKey in hex: %s' % hexlify(publicSigningKey), + 'publicEncryptionKey in hex: %s' % hexlify(publicEncryptionKey), + ]) ) t = (address, addressVersion, storedData, int(time.time()), 'yes') @@ -459,14 +468,14 @@ def checkAndShareObjectWithPeers(data): return 0.6 except varintDecodeError as err: logger.debug( - ("There was a problem with a varint while checking to see whether it was appropriate to share an object " - "with peers. Some details: %s"), - err) + "There was a problem with a varint while checking to see whether it was appropriate to share an object" + " with peers. Some details: %s", err + ) except Exception: logger.critical( - ('There was a problem while checking to see whether it was appropriate to share an object with peers. ' - 'This is definitely a bug! \n%s'), - traceback.format_exc()) + 'There was a problem while checking to see whether it was appropriate to share an object with peers.' + ' This is definitely a bug! %s%s' % os.linesep, traceback.format_exc() + ) return 0 -- 2.45.1