flake8: class_objectProcessor

This commit is contained in:
Dmitri Bogomolov 2018-05-17 12:38:46 +03:00
parent de27c9082d
commit 40dc7d330d
Signed by untrusted user: g1itch
GPG Key ID: 720A756F18DEED13

View File

@ -1,21 +1,23 @@
import time
import threading
import shared
import hashlib import hashlib
import random import random
import shared
import string import string
from subprocess import call # nosec import threading
import traceback import time
from binascii import hexlify from binascii import hexlify
from subprocess import call # nosec
import highlevelcrypto import highlevelcrypto
from addresses import * from addresses import (
calculateInventoryHash, decodeAddress, decodeVarint, encodeAddress,
encodeVarint, varintDecodeError
)
from bmconfigparser import BMConfigParser from bmconfigparser import BMConfigParser
import helper_bitcoin import helper_bitcoin
import helper_inbox import helper_inbox
import helper_msgcoding import helper_msgcoding
import helper_sent import helper_sent
from helper_sql import * from helper_sql import SqlBulkExecute, sqlExecute, sqlQuery
from helper_ackPayload import genAckPayload from helper_ackPayload import genAckPayload
import protocol import protocol
import queues import queues
@ -32,21 +34,22 @@ class objectProcessor(threading.Thread):
""" """
def __init__(self): def __init__(self):
""" """
It may be the case that the last time Bitmessage was running, the user It may be the case that the last time Bitmessage was running,
closed it before it finished processing everything in the the user closed it before it finished processing everything in the
objectProcessorQueue. Assuming that Bitmessage wasn't closed forcefully, objectProcessorQueue. Assuming that Bitmessage wasn't closed
it should have saved the data in the queue into the objectprocessorqueue forcefully, it should have saved the data in the queue into the
table. Let's pull it out. objectprocessorqueue table. Let's pull it out.
""" """
threading.Thread.__init__(self, name="objectProcessor") threading.Thread.__init__(self, name="objectProcessor")
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT objecttype, data FROM objectprocessorqueue''') '''SELECT objecttype, data FROM objectprocessorqueue''')
for row in queryreturn: for row in queryreturn:
objectType, data = row objectType, data = row
queues.objectProcessorQueue.put((objectType,data)) queues.objectProcessorQueue.put((objectType, data))
sqlExecute('''DELETE FROM objectprocessorqueue''') sqlExecute('''DELETE FROM objectprocessorqueue''')
logger.debug('Loaded %s objects from disk into the objectProcessorQueue.' % str(len(queryreturn))) logger.debug(
'Loaded %s objects from disk into the objectProcessorQueue.',
len(queryreturn))
def run(self): def run(self):
while True: while True:
@ -57,36 +60,56 @@ class objectProcessor(threading.Thread):
try: try:
if objectType == 0: # getpubkey if objectType == 0: # getpubkey
self.processgetpubkey(data) self.processgetpubkey(data)
elif objectType == 1: #pubkey elif objectType == 1: # pubkey
self.processpubkey(data) self.processpubkey(data)
elif objectType == 2: #msg elif objectType == 2: # msg
self.processmsg(data) self.processmsg(data)
elif objectType == 3: #broadcast elif objectType == 3: # broadcast
self.processbroadcast(data) self.processbroadcast(data)
elif objectType == 'checkShutdownVariable': # is more of a command, not an object type. Is used to get this thread past the queue.get() so that it will check the shutdown variable. # is more of a command, not an object type. Is used to get
# this thread past the queue.get() so that it will check
# the shutdown variable.
elif objectType == 'checkShutdownVariable':
pass pass
else: else:
if isinstance(objectType, int): if isinstance(objectType, int):
logger.info('Don\'t know how to handle object type 0x%08X', objectType) logger.info(
'Don\'t know how to handle object type 0x%08X',
objectType)
else: else:
logger.info('Don\'t know how to handle object type %s', objectType) logger.info(
'Don\'t know how to handle object type %s',
objectType)
except helper_msgcoding.DecompressionSizeException as e: except helper_msgcoding.DecompressionSizeException as e:
logger.error("The object is too big after decompression (stopped decompressing at %ib, your configured limit %ib). Ignoring", e.size, BMConfigParser().safeGetInt("zlib", "maxsize")) logger.error(
'The object is too big after decompression (stopped'
' decompressing at %ib, your configured limit %ib).'
' Ignoring',
e.size, BMConfigParser().safeGetInt("zlib", "maxsize"))
except varintDecodeError as e: except varintDecodeError as e:
logger.debug("There was a problem with a varint while processing an object. Some details: %s" % e) logger.debug(
except Exception as e: 'There was a problem with a varint while processing an'
logger.critical("Critical error within objectProcessorThread: \n%s" % traceback.format_exc()) ' object. Some details: %s', e)
except Exception:
logger.critical(
'Critical error within objectProcessorThread: \n',
exc_info=True)
if state.shutdown: if state.shutdown:
time.sleep(.5) # Wait just a moment for most of the connections to close # Wait just a moment for most of the connections to close
time.sleep(.5)
numberOfObjectsThatWereInTheObjectProcessorQueue = 0 numberOfObjectsThatWereInTheObjectProcessorQueue = 0
with SqlBulkExecute() as sql: with SqlBulkExecute() as sql:
while queues.objectProcessorQueue.curSize > 0: while queues.objectProcessorQueue.curSize > 0:
objectType, data = queues.objectProcessorQueue.get() objectType, data = queues.objectProcessorQueue.get()
sql.execute('''INSERT INTO objectprocessorqueue VALUES (?,?)''', sql.execute(
objectType,data) 'INSERT INTO objectprocessorqueue VALUES (?,?)',
objectType, data)
numberOfObjectsThatWereInTheObjectProcessorQueue += 1 numberOfObjectsThatWereInTheObjectProcessorQueue += 1
logger.debug('Saved %s objects from the objectProcessorQueue to disk. objectProcessorThread exiting.' % str(numberOfObjectsThatWereInTheObjectProcessorQueue)) logger.debug(
'Saved %s objects from the objectProcessorQueue to'
' disk. objectProcessorThread exiting.',
numberOfObjectsThatWereInTheObjectProcessorQueue)
state.shutdown = 2 state.shutdown = 2
break break
@ -101,17 +124,26 @@ class objectProcessor(threading.Thread):
if data[readPosition:] in shared.ackdataForWhichImWatching: if data[readPosition:] in shared.ackdataForWhichImWatching:
logger.info('This object is an acknowledgement bound for me.') logger.info('This object is an acknowledgement bound for me.')
del shared.ackdataForWhichImWatching[data[readPosition:]] del shared.ackdataForWhichImWatching[data[readPosition:]]
sqlExecute('UPDATE sent SET status=?, lastactiontime=? WHERE ackdata=?', sqlExecute(
'ackreceived', 'UPDATE sent SET status=?, lastactiontime=?'
int(time.time()), ' WHERE ackdata=?',
data[readPosition:]) 'ackreceived', int(time.time()), data[readPosition:])
queues.UISignalQueue.put(('updateSentItemStatusByAckdata', (data[readPosition:], tr._translate("MainWindow",'Acknowledgement of the message received %1').arg(l10n.formatTimestamp())))) queues.UISignalQueue.put((
'updateSentItemStatusByAckdata',
(data[readPosition:],
tr._translate(
"MainWindow",
"Acknowledgement of the message received %1"
).arg(l10n.formatTimestamp()))
))
else: else:
logger.debug('This object is not an acknowledgement bound for me.') logger.debug('This object is not an acknowledgement bound for me.')
def processgetpubkey(self, data): def processgetpubkey(self, data):
if len(data) > 200: if len(data) > 200:
logger.info('getpubkey is abnormally long. Sanity check failed. Ignoring object.') logger.info(
'getpubkey is abnormally long. Sanity check failed.'
' Ignoring object.')
return return
readPosition = 20 # bypass the nonce, time, and object type readPosition = 20 # bypass the nonce, time, and object type
requestedAddressVersionNumber, addressVersionLength = decodeVarint( requestedAddressVersionNumber, addressVersionLength = decodeVarint(
@ -122,30 +154,45 @@ class objectProcessor(threading.Thread):
readPosition += streamNumberLength readPosition += streamNumberLength
if requestedAddressVersionNumber == 0: if requestedAddressVersionNumber == 0:
logger.debug('The requestedAddressVersionNumber of the pubkey request is zero. That doesn\'t make any sense. Ignoring it.') logger.debug(
'The requestedAddressVersionNumber of the pubkey request'
' is zero. That doesn\'t make any sense. Ignoring it.')
return return
elif requestedAddressVersionNumber == 1: elif requestedAddressVersionNumber == 1:
logger.debug('The requestedAddressVersionNumber of the pubkey request is 1 which isn\'t supported anymore. Ignoring it.') logger.debug(
'The requestedAddressVersionNumber of the pubkey request'
' is 1 which isn\'t supported anymore. Ignoring it.')
return return
elif requestedAddressVersionNumber > 4: elif requestedAddressVersionNumber > 4:
logger.debug('The requestedAddressVersionNumber of the pubkey request is too high. Can\'t understand. Ignoring it.') logger.debug(
'The requestedAddressVersionNumber of the pubkey request'
' is too high. Can\'t understand. Ignoring it.')
return return
myAddress = '' myAddress = ''
if requestedAddressVersionNumber <= 3 : if requestedAddressVersionNumber <= 3:
requestedHash = data[readPosition:readPosition + 20] requestedHash = data[readPosition:readPosition + 20]
if len(requestedHash) != 20: if len(requestedHash) != 20:
logger.debug('The length of the requested hash is not 20 bytes. Something is wrong. Ignoring.') logger.debug(
'The length of the requested hash is not 20 bytes.'
' Something is wrong. Ignoring.')
return return
logger.info('the hash requested in this getpubkey request is: %s' % hexlify(requestedHash)) logger.info(
if requestedHash in shared.myAddressesByHash: # if this address hash is one of mine 'the hash requested in this getpubkey request is: %s',
hexlify(requestedHash))
# if this address hash is one of mine
if requestedHash in shared.myAddressesByHash:
myAddress = shared.myAddressesByHash[requestedHash] myAddress = shared.myAddressesByHash[requestedHash]
elif requestedAddressVersionNumber >= 4: elif requestedAddressVersionNumber >= 4:
requestedTag = data[readPosition:readPosition + 32] requestedTag = data[readPosition:readPosition + 32]
if len(requestedTag) != 32: if len(requestedTag) != 32:
logger.debug('The length of the requested tag is not 32 bytes. Something is wrong. Ignoring.') logger.debug(
'The length of the requested tag is not 32 bytes.'
' Something is wrong. Ignoring.')
return return
logger.debug('the tag requested in this getpubkey request is: %s' % hexlify(requestedTag)) logger.debug(
'the tag requested in this getpubkey request is: %s',
hexlify(requestedTag))
if requestedTag in shared.myAddressesByTag: if requestedTag in shared.myAddressesByTag:
myAddress = shared.myAddressesByTag[requestedTag] myAddress = shared.myAddressesByTag[requestedTag]
@ -154,32 +201,48 @@ class objectProcessor(threading.Thread):
return return
if decodeAddress(myAddress)[1] != requestedAddressVersionNumber: if decodeAddress(myAddress)[1] != requestedAddressVersionNumber:
logger.warning('(Within the processgetpubkey function) Someone requested one of my pubkeys but the requestedAddressVersionNumber doesn\'t match my actual address version number. Ignoring.') logger.warning(
'(Within the processgetpubkey function) Someone requested'
' one of my pubkeys but the requestedAddressVersionNumber'
' doesn\'t match my actual address version number.'
' Ignoring.')
return return
if decodeAddress(myAddress)[2] != streamNumber: if decodeAddress(myAddress)[2] != streamNumber:
logger.warning('(Within the processgetpubkey function) Someone requested one of my pubkeys but the stream number on which we heard this getpubkey object doesn\'t match this address\' stream number. Ignoring.') logger.warning(
'(Within the processgetpubkey function) Someone requested'
' one of my pubkeys but the stream number on which we'
' heard this getpubkey object doesn\'t match this'
' address\' stream number. Ignoring.')
return return
if BMConfigParser().safeGetBoolean(myAddress, 'chan'): if BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('Ignoring getpubkey request because it is for one of my chan addresses. The other party should already have the pubkey.') logger.info(
'Ignoring getpubkey request because it is for one of my'
' chan addresses. The other party should already have'
' the pubkey.')
return return
try: try:
lastPubkeySendTime = int(BMConfigParser().get( lastPubkeySendTime = int(BMConfigParser().get(
myAddress, 'lastpubkeysendtime')) myAddress, 'lastpubkeysendtime'))
except: except:
lastPubkeySendTime = 0 lastPubkeySendTime = 0
if lastPubkeySendTime > time.time() - 2419200: # If the last time we sent our pubkey was more recent than 28 days ago... # If the last time we sent our pubkey was more recent than
logger.info('Found getpubkey-requested-item in my list of EC hashes BUT we already sent it recently. Ignoring request. The lastPubkeySendTime is: %s' % lastPubkeySendTime) # 28 days ago...
if lastPubkeySendTime > time.time() - 2419200:
logger.info(
'Found getpubkey-requested-item in my list of EC hashes'
' BUT we already sent it recently. Ignoring request.'
' The lastPubkeySendTime is: %s', lastPubkeySendTime)
return return
logger.info('Found getpubkey-requested-hash in my list of EC hashes. Telling Worker thread to do the POW for a pubkey message and send it out.') logger.info(
'Found getpubkey-requested-hash in my list of EC hashes.'
' Telling Worker thread to do the POW for a pubkey message'
' and send it out.')
if requestedAddressVersionNumber == 2: if requestedAddressVersionNumber == 2:
queues.workerQueue.put(( queues.workerQueue.put(('doPOWForMyV2Pubkey', requestedHash))
'doPOWForMyV2Pubkey', requestedHash))
elif requestedAddressVersionNumber == 3: elif requestedAddressVersionNumber == 3:
queues.workerQueue.put(( queues.workerQueue.put(('sendOutOrStoreMyV3Pubkey', requestedHash))
'sendOutOrStoreMyV3Pubkey', requestedHash))
elif requestedAddressVersionNumber == 4: elif requestedAddressVersionNumber == 4:
queues.workerQueue.put(( queues.workerQueue.put(('sendOutOrStoreMyV4Pubkey', myAddress))
'sendOutOrStoreMyV4Pubkey', myAddress))
def processpubkey(self, data): def processpubkey(self, data):
pubkeyProcessingStartTime = time.time() pubkeyProcessingStartTime = time.time()
@ -194,14 +257,21 @@ class objectProcessor(threading.Thread):
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
if addressVersion == 0: if addressVersion == 0:
logger.debug('(Within processpubkey) addressVersion of 0 doesn\'t make sense.') logger.debug(
'(Within processpubkey) addressVersion of 0 doesn\'t'
' make sense.')
return return
if addressVersion > 4 or addressVersion == 1: if addressVersion > 4 or addressVersion == 1:
logger.info('This version of Bitmessage cannot handle version %s addresses.' % addressVersion) logger.info(
'This version of Bitmessage cannot handle version %s'
' addresses.', addressVersion)
return return
if addressVersion == 2: if addressVersion == 2:
if len(data) < 146: # sanity check. This is the minimum possible length. # sanity check. This is the minimum possible length.
logger.debug('(within processpubkey) payloadLength less than 146. Sanity check failed.') if len(data) < 146:
logger.debug(
'(within processpubkey) payloadLength less than 146.'
' Sanity check failed.')
return return
readPosition += 4 readPosition += 4
publicSigningKey = data[readPosition:readPosition + 64] publicSigningKey = data[readPosition:readPosition + 64]
@ -211,10 +281,13 @@ class objectProcessor(threading.Thread):
readPosition += 64 readPosition += 64
publicEncryptionKey = data[readPosition:readPosition + 64] publicEncryptionKey = data[readPosition:readPosition + 64]
if len(publicEncryptionKey) < 64: if len(publicEncryptionKey) < 64:
logger.debug('publicEncryptionKey length less than 64. Sanity check failed.') logger.debug(
'publicEncryptionKey length less than 64. Sanity check'
' failed.')
return return
readPosition += 64 readPosition += 64
dataToStore = data[20:readPosition] # The data we'll store in the pubkeys table. # The data we'll store in the pubkeys table.
dataToStore = data[20:readPosition]
sha = hashlib.new('sha512') sha = hashlib.new('sha512')
sha.update( sha.update(
'\x04' + publicSigningKey + '\x04' + publicEncryptionKey) '\x04' + publicSigningKey + '\x04' + publicEncryptionKey)
@ -222,32 +295,39 @@ class objectProcessor(threading.Thread):
ripeHasher.update(sha.digest()) ripeHasher.update(sha.digest())
ripe = ripeHasher.digest() ripe = ripeHasher.digest()
logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\ logger.debug(
ripe %s\n\ 'within recpubkey, addressVersion: %s, streamNumber: %s'
publicSigningKey in hex: %s\n\ '\nripe %s\npublicSigningKey in hex: %s'
publicEncryptionKey in hex: %s' % (addressVersion, '\npublicEncryptionKey in hex: %s',
streamNumber, addressVersion, streamNumber, hexlify(ripe),
hexlify(ripe), hexlify(publicSigningKey), hexlify(publicEncryptionKey)
hexlify(publicSigningKey),
hexlify(publicEncryptionKey)
)
) )
address = encodeAddress(addressVersion, streamNumber, ripe) address = encodeAddress(addressVersion, streamNumber, ripe)
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT usedpersonally FROM pubkeys WHERE address=? AND usedpersonally='yes' ''', address) "SELECT usedpersonally FROM pubkeys WHERE address=?"
if queryreturn != []: # if this pubkey is already in our database and if we have used it personally: " AND usedpersonally='yes'", address)
logger.info('We HAVE used this pubkey personally. Updating time.') # if this pubkey is already in our database and if we have
t = (address, addressVersion, dataToStore, int(time.time()), 'yes') # used it personally:
if queryreturn != []:
logger.info(
'We HAVE used this pubkey personally. Updating time.')
t = (address, addressVersion, dataToStore,
int(time.time()), 'yes')
else: else:
logger.info('We have NOT used this pubkey personally. Inserting in database.') logger.info(
t = (address, addressVersion, dataToStore, int(time.time()), 'no') 'We have NOT used this pubkey personally. Inserting'
' in database.')
t = (address, addressVersion, dataToStore,
int(time.time()), 'no')
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t) sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
self.possibleNewPubkey(address) self.possibleNewPubkey(address)
if addressVersion == 3: if addressVersion == 3:
if len(data) < 170: # sanity check. if len(data) < 170: # sanity check.
logger.warning('(within processpubkey) payloadLength less than 170. Sanity check failed.') logger.warning(
'(within processpubkey) payloadLength less than 170.'
' Sanity check failed.')
return return
readPosition += 4 readPosition += 4
publicSigningKey = '\x04' + data[readPosition:readPosition + 64] publicSigningKey = '\x04' + data[readPosition:readPosition + 64]
@ -261,12 +341,15 @@ class objectProcessor(threading.Thread):
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += specifiedPayloadLengthExtraBytesLength readPosition += specifiedPayloadLengthExtraBytesLength
endOfSignedDataPosition = readPosition endOfSignedDataPosition = readPosition
dataToStore = data[20:readPosition] # The data we'll store in the pubkeys table. # The data we'll store in the pubkeys table.
dataToStore = data[20:readPosition]
signatureLength, signatureLengthLength = decodeVarint( signatureLength, signatureLengthLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += signatureLengthLength readPosition += signatureLengthLength
signature = data[readPosition:readPosition + signatureLength] signature = data[readPosition:readPosition + signatureLength]
if highlevelcrypto.verify(data[8:endOfSignedDataPosition], signature, hexlify(publicSigningKey)): if highlevelcrypto.verify(
data[8:endOfSignedDataPosition],
signature, hexlify(publicSigningKey)):
logger.debug('ECDSA verify passed (within processpubkey)') logger.debug('ECDSA verify passed (within processpubkey)')
else: else:
logger.warning('ECDSA verify failed (within processpubkey)') logger.warning('ECDSA verify failed (within processpubkey)')
@ -278,51 +361,62 @@ class objectProcessor(threading.Thread):
ripeHasher.update(sha.digest()) ripeHasher.update(sha.digest())
ripe = ripeHasher.digest() ripe = ripeHasher.digest()
logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\ logger.debug(
ripe %s\n\ 'within recpubkey, addressVersion: %s, streamNumber: %s'
publicSigningKey in hex: %s\n\ '\nripe %s\npublicSigningKey in hex: %s'
publicEncryptionKey in hex: %s' % (addressVersion, '\npublicEncryptionKey in hex: %s',
streamNumber, addressVersion, streamNumber, hexlify(ripe),
hexlify(ripe), hexlify(publicSigningKey), hexlify(publicEncryptionKey)
hexlify(publicSigningKey),
hexlify(publicEncryptionKey)
)
) )
address = encodeAddress(addressVersion, streamNumber, ripe) address = encodeAddress(addressVersion, streamNumber, ripe)
queryreturn = sqlQuery('''SELECT usedpersonally FROM pubkeys WHERE address=? AND usedpersonally='yes' ''', address) queryreturn = sqlQuery(
if queryreturn != []: # if this pubkey is already in our database and if we have used it personally: "SELECT usedpersonally FROM pubkeys WHERE address=?"
logger.info('We HAVE used this pubkey personally. Updating time.') " AND usedpersonally='yes'", address)
t = (address, addressVersion, dataToStore, int(time.time()), 'yes') # if this pubkey is already in our database and if we have
# used it personally:
if queryreturn != []:
logger.info(
'We HAVE used this pubkey personally. Updating time.')
t = (address, addressVersion, dataToStore,
int(time.time()), 'yes')
else: else:
logger.info('We have NOT used this pubkey personally. Inserting in database.') logger.info(
t = (address, addressVersion, dataToStore, int(time.time()), 'no') 'We have NOT used this pubkey personally. Inserting'
' in database.')
t = (address, addressVersion, dataToStore,
int(time.time()), 'no')
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t) sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
self.possibleNewPubkey(address) self.possibleNewPubkey(address)
if addressVersion == 4: if addressVersion == 4:
if len(data) < 350: # sanity check. if len(data) < 350: # sanity check.
logger.debug('(within processpubkey) payloadLength less than 350. Sanity check failed.') logger.debug(
'(within processpubkey) payloadLength less than 350.'
' Sanity check failed.')
return return
tag = data[readPosition:readPosition + 32] tag = data[readPosition:readPosition + 32]
if tag not in state.neededPubkeys: if tag not in state.neededPubkeys:
logger.info('We don\'t need this v4 pubkey. We didn\'t ask for it.') logger.info(
'We don\'t need this v4 pubkey. We didn\'t ask for it.')
return return
# Let us try to decrypt the pubkey # Let us try to decrypt the pubkey
toAddress, _ = state.neededPubkeys[tag] toAddress, _ = state.neededPubkeys[tag]
if shared.decryptAndCheckPubkeyPayload(data, toAddress) == 'successful': if shared.decryptAndCheckPubkeyPayload(data, toAddress) == \
# At this point we know that we have been waiting on this pubkey. 'successful':
# This function will command the workerThread to start work on # At this point we know that we have been waiting on this
# the messages that require it. # pubkey. This function will command the workerThread
# to start work on the messages that require it.
self.possibleNewPubkey(toAddress) self.possibleNewPubkey(toAddress)
# Display timing data # Display timing data
timeRequiredToProcessPubkey = time.time( timeRequiredToProcessPubkey = time.time(
) - pubkeyProcessingStartTime ) - pubkeyProcessingStartTime
logger.debug('Time required to process this pubkey: %s' % timeRequiredToProcessPubkey) logger.debug(
'Time required to process this pubkey: %s',
timeRequiredToProcessPubkey)
def processmsg(self, data): def processmsg(self, data):
messageProcessingStartTime = time.time() messageProcessingStartTime = time.time()
@ -330,14 +424,17 @@ class objectProcessor(threading.Thread):
queues.UISignalQueue.put(( queues.UISignalQueue.put((
'updateNumberOfMessagesProcessed', 'no data')) 'updateNumberOfMessagesProcessed', 'no data'))
readPosition = 20 # bypass the nonce, time, and object type readPosition = 20 # bypass the nonce, time, and object type
msgVersion, msgVersionLength = decodeVarint(data[readPosition:readPosition + 9]) msgVersion, msgVersionLength = decodeVarint(
data[readPosition:readPosition + 9])
if msgVersion != 1: if msgVersion != 1:
logger.info('Cannot understand message versions other than one. Ignoring message.') logger.info(
'Cannot understand message versions other than one.'
' Ignoring message.')
return return
readPosition += msgVersionLength readPosition += msgVersionLength
streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = decodeVarint( streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = \
data[readPosition:readPosition + 9]) decodeVarint(data[readPosition:readPosition + 9])
readPosition += streamNumberAsClaimedByMsgLength readPosition += streamNumberAsClaimedByMsgLength
inventoryHash = calculateInventoryHash(data) inventoryHash = calculateInventoryHash(data)
initialDecryptionSuccessful = False initialDecryptionSuccessful = False
@ -345,37 +442,54 @@ class objectProcessor(threading.Thread):
# This is not an acknowledgement bound for me. See if it is a message # This is not an acknowledgement bound for me. See if it is a message
# bound for me by trying to decrypt it with my private keys. # bound for me by trying to decrypt it with my private keys.
for key, cryptorObject in sorted(shared.myECCryptorObjects.items(), key=lambda x: random.random()): for key, cryptorObject in sorted(
shared.myECCryptorObjects.items(),
key=lambda x: random.random()):
try: try:
if initialDecryptionSuccessful: # continue decryption attempts to avoid timing attacks # continue decryption attempts to avoid timing attacks
if initialDecryptionSuccessful:
cryptorObject.decrypt(data[readPosition:]) cryptorObject.decrypt(data[readPosition:])
else: else:
decryptedData = cryptorObject.decrypt(data[readPosition:]) decryptedData = cryptorObject.decrypt(data[readPosition:])
toRipe = key # This is the RIPE hash of my pubkeys. We need this below to compare to the destination_ripe included in the encrypted data. # This is the RIPE hash of my pubkeys. We need this
# below to compare to the destination_ripe included
# in the encrypted data.
toRipe = key
initialDecryptionSuccessful = True initialDecryptionSuccessful = True
logger.info('EC decryption successful using key associated with ripe hash: %s.' % hexlify(key)) logger.info(
'EC decryption successful using key associated'
' with ripe hash: %s.', hexlify(key))
except Exception: except Exception:
pass pass
if not initialDecryptionSuccessful: if not initialDecryptionSuccessful:
# This is not a message bound for me. # This is not a message bound for me.
logger.info('Length of time program spent failing to decrypt this message: %s seconds.' % (time.time() - messageProcessingStartTime,)) logger.info(
'Length of time program spent failing to decrypt this'
' message: %s seconds.',
time.time() - messageProcessingStartTime)
return return
# This is a message bound for me. # This is a message bound for me.
toAddress = shared.myAddressesByHash[ toAddress = shared.myAddressesByHash[
toRipe] # Look up my address based on the RIPE hash. toRipe] # Look up my address based on the RIPE hash.
readPosition = 0 readPosition = 0
sendersAddressVersionNumber, sendersAddressVersionNumberLength = decodeVarint( sendersAddressVersionNumber, sendersAddressVersionNumberLength = \
decryptedData[readPosition:readPosition + 10]) decodeVarint(decryptedData[readPosition:readPosition + 10])
readPosition += sendersAddressVersionNumberLength readPosition += sendersAddressVersionNumberLength
if sendersAddressVersionNumber == 0: if sendersAddressVersionNumber == 0:
logger.info('Cannot understand sendersAddressVersionNumber = 0. Ignoring message.') logger.info(
'Cannot understand sendersAddressVersionNumber = 0.'
' Ignoring message.')
return return
if sendersAddressVersionNumber > 4: if sendersAddressVersionNumber > 4:
logger.info('Sender\'s address version number %s not yet supported. Ignoring message.' % sendersAddressVersionNumber) logger.info(
'Sender\'s address version number %s not yet supported.'
' Ignoring message.', sendersAddressVersionNumber)
return return
if len(decryptedData) < 170: if len(decryptedData) < 170:
logger.info('Length of the unencrypted data is unreasonably short. Sanity check failed. Ignoring message.') logger.info(
'Length of the unencrypted data is unreasonably short.'
' Sanity check failed. Ignoring message.')
return return
sendersStreamNumber, sendersStreamNumberLength = decodeVarint( sendersStreamNumber, sendersStreamNumberLength = decodeVarint(
decryptedData[readPosition:readPosition + 10]) decryptedData[readPosition:readPosition + 10])
@ -391,20 +505,30 @@ class objectProcessor(threading.Thread):
readPosition:readPosition + 64] readPosition:readPosition + 64]
readPosition += 64 readPosition += 64
if sendersAddressVersionNumber >= 3: if sendersAddressVersionNumber >= 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint( requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = \
decryptedData[readPosition:readPosition + 10]) decodeVarint(decryptedData[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
logger.info('sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is %s' % requiredAverageProofOfWorkNonceTrialsPerByte) logger.info(
'sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is %s',
requiredAverageProofOfWorkNonceTrialsPerByte)
requiredPayloadLengthExtraBytes, varintLength = decodeVarint( requiredPayloadLengthExtraBytes, varintLength = decodeVarint(
decryptedData[readPosition:readPosition + 10]) decryptedData[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
logger.info('sender\'s requiredPayloadLengthExtraBytes is %s' % requiredPayloadLengthExtraBytes) logger.info(
endOfThePublicKeyPosition = readPosition # needed for when we store the pubkey in our database of pubkeys for later use. 'sender\'s requiredPayloadLengthExtraBytes is %s',
requiredPayloadLengthExtraBytes)
# needed for when we store the pubkey in our database of pubkeys
# for later use.
endOfThePublicKeyPosition = readPosition
if toRipe != decryptedData[readPosition:readPosition + 20]: if toRipe != decryptedData[readPosition:readPosition + 20]:
logger.info('The original sender of this message did not send it to you. Someone is attempting a Surreptitious Forwarding Attack.\n\ logger.info(
See: http://world.std.com/~dtd/sign_encrypt/sign_encrypt7.html \n\ 'The original sender of this message did not send it to'
your toRipe: %s\n\ ' you. Someone is attempting a Surreptitious Forwarding'
embedded destination toRipe: %s' % (hexlify(toRipe), hexlify(decryptedData[readPosition:readPosition + 20])) ' Attack.\nSee: '
'http://world.std.com/~dtd/sign_encrypt/sign_encrypt7.html'
'\nyour toRipe: %s\nembedded destination toRipe: %s',
hexlify(toRipe),
hexlify(decryptedData[readPosition:readPosition + 20])
) )
return return
readPosition += 20 readPosition += 20
@ -422,22 +546,36 @@ class objectProcessor(threading.Thread):
readPosition += ackLengthLength readPosition += ackLengthLength
ackData = decryptedData[readPosition:readPosition + ackLength] ackData = decryptedData[readPosition:readPosition + ackLength]
readPosition += ackLength readPosition += ackLength
positionOfBottomOfAckData = readPosition # needed to mark the end of what is covered by the signature # needed to mark the end of what is covered by the signature
positionOfBottomOfAckData = readPosition
signatureLength, signatureLengthLength = decodeVarint( signatureLength, signatureLengthLength = decodeVarint(
decryptedData[readPosition:readPosition + 10]) decryptedData[readPosition:readPosition + 10])
readPosition += signatureLengthLength readPosition += signatureLengthLength
signature = decryptedData[ signature = decryptedData[
readPosition:readPosition + signatureLength] readPosition:readPosition + signatureLength]
signedData = data[8:20] + encodeVarint(1) + encodeVarint(streamNumberAsClaimedByMsg) + decryptedData[:positionOfBottomOfAckData] signedData = data[8:20] + encodeVarint(1) + encodeVarint(
streamNumberAsClaimedByMsg
) + decryptedData[:positionOfBottomOfAckData]
if not highlevelcrypto.verify(signedData, signature, hexlify(pubSigningKey)): if not highlevelcrypto.verify(
signedData, signature, hexlify(pubSigningKey)):
logger.debug('ECDSA verify failed') logger.debug('ECDSA verify failed')
return return
logger.debug('ECDSA verify passed') logger.debug('ECDSA verify passed')
logger.debug('As a matter of intellectual curiosity, here is the Bitcoin address associated with the keys owned by the other person: %s ..and here is the testnet address: %s. The other person must take their private signing key from Bitmessage and import it into Bitcoin (or a service like Blockchain.info) for it to be of any use. Do not use this unless you know what you are doing.' % logger.debug(
(helper_bitcoin.calculateBitcoinAddressFromPubkey(pubSigningKey), helper_bitcoin.calculateTestnetAddressFromPubkey(pubSigningKey)) 'As a matter of intellectual curiosity, here is the Bitcoin'
' address associated with the keys owned by the other person:'
' %s ..and here is the testnet address: %s. The other person'
' must take their private signing key from Bitmessage and'
' import it into Bitcoin (or a service like Blockchain.info)'
' for it to be of any use. Do not use this unless you know'
' what you are doing.',
helper_bitcoin.calculateBitcoinAddressFromPubkey(pubSigningKey),
helper_bitcoin.calculateTestnetAddressFromPubkey(pubSigningKey)
) )
sigHash = hashlib.sha512(hashlib.sha512(signature).digest()).digest()[32:] # Used to detect and ignore duplicate messages in our inbox # Used to detect and ignore duplicate messages in our inbox
sigHash = hashlib.sha512(
hashlib.sha512(signature).digest()).digest()[32:]
# calculate the fromRipe. # calculate the fromRipe.
sha = hashlib.new('sha512') sha = hashlib.new('sha512')
@ -467,19 +605,31 @@ class objectProcessor(threading.Thread):
# proof of work requirement. If this is bound for one of my chan # proof of work requirement. If this is bound for one of my chan
# addresses then we skip this check; the minimum network POW is # addresses then we skip this check; the minimum network POW is
# fine. # fine.
if decodeAddress(toAddress)[1] >= 3 and not BMConfigParser().safeGetBoolean(toAddress, 'chan'): # If the toAddress version number is 3 or higher and not one of my chan addresses: # If the toAddress version number is 3 or higher and not one of
if not shared.isAddressInMyAddressBookSubscriptionsListOrWhitelist(fromAddress): # If I'm not friendly with this person: # my chan addresses:
if decodeAddress(toAddress)[1] >= 3 \
and not BMConfigParser().safeGetBoolean(toAddress, 'chan'):
# If I'm not friendly with this person:
if not shared.isAddressInMyAddressBookSubscriptionsListOrWhitelist(fromAddress):
requiredNonceTrialsPerByte = BMConfigParser().getint( requiredNonceTrialsPerByte = BMConfigParser().getint(
toAddress, 'noncetrialsperbyte') toAddress, 'noncetrialsperbyte')
requiredPayloadLengthExtraBytes = BMConfigParser().getint( requiredPayloadLengthExtraBytes = BMConfigParser().getint(
toAddress, 'payloadlengthextrabytes') toAddress, 'payloadlengthextrabytes')
if not protocol.isProofOfWorkSufficient(data, requiredNonceTrialsPerByte, requiredPayloadLengthExtraBytes): if not protocol.isProofOfWorkSufficient(
logger.info('Proof of work in msg is insufficient only because it does not meet our higher requirement.') data, requiredNonceTrialsPerByte,
requiredPayloadLengthExtraBytes):
logger.info(
'Proof of work in msg is insufficient only because'
' it does not meet our higher requirement.')
return return
blockMessage = False # Gets set to True if the user shouldn't see the message according to black or white lists. # Gets set to True if the user shouldn't see the message according
if BMConfigParser().get('bitmessagesettings', 'blackwhitelist') == 'black': # If we are using a blacklist # to black or white lists.
blockMessage = False
# If we are using a blacklist
if BMConfigParser().get(
'bitmessagesettings', 'blackwhitelist') == 'black':
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT label FROM blacklist where address=? and enabled='1' ''', "SELECT label FROM blacklist where address=? and enabled='1'",
fromAddress) fromAddress)
if queryreturn != []: if queryreturn != []:
logger.info('Message ignored because address is in blacklist.') logger.info('Message ignored because address is in blacklist.')
@ -487,10 +637,11 @@ class objectProcessor(threading.Thread):
blockMessage = True blockMessage = True
else: # We're using a whitelist else: # We're using a whitelist
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT label FROM whitelist where address=? and enabled='1' ''', "SELECT label FROM whitelist where address=? and enabled='1'",
fromAddress) fromAddress)
if queryreturn == []: if queryreturn == []:
logger.info('Message ignored because address not in whitelist.') logger.info(
'Message ignored because address not in whitelist.')
blockMessage = True blockMessage = True
toLabel = BMConfigParser().get(toAddress, 'label') toLabel = BMConfigParser().get(toAddress, 'label')
@ -498,7 +649,8 @@ class objectProcessor(threading.Thread):
toLabel = toAddress toLabel = toAddress
try: try:
decodedMessage = helper_msgcoding.MsgDecode(messageEncodingType, message) decodedMessage = helper_msgcoding.MsgDecode(
messageEncodingType, message)
except helper_msgcoding.MsgDecodeException: except helper_msgcoding.MsgDecodeException:
return return
subject = decodedMessage.subject subject = decodedMessage.subject
@ -510,8 +662,9 @@ class objectProcessor(threading.Thread):
blockMessage = True blockMessage = True
if not blockMessage: if not blockMessage:
if messageEncodingType != 0: if messageEncodingType != 0:
t = (inventoryHash, toAddress, fromAddress, subject, int( t = (inventoryHash, toAddress, fromAddress, subject,
time.time()), body, 'inbox', messageEncodingType, 0, sigHash) int(time.time()), body, 'inbox', messageEncodingType,
0, sigHash)
helper_inbox.insert(t) helper_inbox.insert(t)
queues.UISignalQueue.put(('displayNewInboxMessage', ( queues.UISignalQueue.put(('displayNewInboxMessage', (
@ -520,7 +673,8 @@ class objectProcessor(threading.Thread):
# If we are behaving as an API then we might need to run an # If we are behaving as an API then we might need to run an
# outside command to let some program know that a new message # outside command to let some program know that a new message
# has arrived. # has arrived.
if BMConfigParser().safeGetBoolean('bitmessagesettings', 'apienabled'): if BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'apienabled'):
try: try:
apiNotifyPath = BMConfigParser().get( apiNotifyPath = BMConfigParser().get(
'bitmessagesettings', 'apinotifypath') 'bitmessagesettings', 'apinotifypath')
@ -531,7 +685,8 @@ class objectProcessor(threading.Thread):
# Let us now check and see whether our receiving address is # Let us now check and see whether our receiving address is
# behaving as a mailing list # behaving as a mailing list
if BMConfigParser().safeGetBoolean(toAddress, 'mailinglist') and messageEncodingType != 0: if BMConfigParser().safeGetBoolean(toAddress, 'mailinglist') \
and messageEncodingType != 0:
try: try:
mailingListName = BMConfigParser().get( mailingListName = BMConfigParser().get(
toAddress, 'mailinglistname') toAddress, 'mailinglistname')
@ -541,10 +696,17 @@ class objectProcessor(threading.Thread):
subject = self.addMailingListNameToSubject( subject = self.addMailingListNameToSubject(
subject, mailingListName) subject, mailingListName)
# Let us now send this message out as a broadcast # Let us now send this message out as a broadcast
message = time.strftime("%a, %Y-%m-%d %H:%M:%S UTC", time.gmtime( message = time.strftime(
)) + ' Message ostensibly from ' + fromAddress + ':\n\n' + body "%a, %Y-%m-%d %H:%M:%S UTC", time.gmtime()
fromAddress = toAddress # The fromAddress for the broadcast that we are about to send is the toAddress (my address) for the msg message we are currently processing. ) + ' Message ostensibly from ' + fromAddress \
# We don't actually need the ackdata for acknowledgement since this is a broadcast message but we can use it to update the user interface when the POW is done generating. + ':\n\n' + body
# The fromAddress for the broadcast that we are about to
# send is the toAddress (my address) for the msg message
# we are currently processing.
fromAddress = toAddress
# We don't actually need the ackdata for acknowledgement
# since this is a broadcast message but we can use it to
# update the user interface when the POW is done generating.
streamNumber = decodeAddress(fromAddress)[2] streamNumber = decodeAddress(fromAddress)[2]
ackdata = genAckPayload(streamNumber, 0) ackdata = genAckPayload(streamNumber, 0)
@ -572,16 +734,20 @@ class objectProcessor(threading.Thread):
TTL) TTL)
helper_sent.insert(t) helper_sent.insert(t)
queues.UISignalQueue.put(('displayNewSentMessage', ( queues.UISignalQueue.put((
toAddress, '[Broadcast subscribers]', fromAddress, subject, message, ackdata))) 'displayNewSentMessage', (
toAddress, '[Broadcast subscribers]', fromAddress,
subject, message, ackdata)
))
queues.workerQueue.put(('sendbroadcast', '')) queues.workerQueue.put(('sendbroadcast', ''))
# Don't send ACK if invalid, blacklisted senders, invisible messages, disabled or chan # Don't send ACK if invalid, blacklisted senders, invisible
if self.ackDataHasAValidHeader(ackData) and \ # messages, disabled or chan
not blockMessage and \ if (self.ackDataHasAValidHeader(ackData) and not blockMessage
messageEncodingType != 0 and \ and messageEncodingType != 0 and
not BMConfigParser().safeGetBoolean(toAddress, 'dontsendack') and \ not BMConfigParser().safeGetBoolean(toAddress, 'dontsendack')
not BMConfigParser().safeGetBoolean(toAddress, 'chan'): and not BMConfigParser().safeGetBoolean(toAddress, 'chan')
):
shared.checkAndShareObjectWithPeers(ackData[24:]) shared.checkAndShareObjectWithPeers(ackData[24:])
# Display timing data # Display timing data
@ -592,9 +758,12 @@ class objectProcessor(threading.Thread):
timing_sum = 0 timing_sum = 0
for item in shared.successfullyDecryptMessageTimings: for item in shared.successfullyDecryptMessageTimings:
timing_sum += item timing_sum += item
logger.debug('Time to decrypt this message successfully: %s\n\ logger.debug(
Average time for all message decryption successes since startup: %s.' % 'Time to decrypt this message successfully: %s'
(timeRequiredToAttemptToDecryptMessage, timing_sum / len(shared.successfullyDecryptMessageTimings)) '\nAverage time for all message decryption successes since'
' startup: %s.',
timeRequiredToAttemptToDecryptMessage,
timing_sum / len(shared.successfullyDecryptMessageTimings)
) )
def processbroadcast(self, data): def processbroadcast(self, data):
@ -608,32 +777,53 @@ class objectProcessor(threading.Thread):
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
readPosition += broadcastVersionLength readPosition += broadcastVersionLength
if broadcastVersion < 4 or broadcastVersion > 5: if broadcastVersion < 4 or broadcastVersion > 5:
logger.info('Cannot decode incoming broadcast versions less than 4 or higher than 5. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.') logger.info(
'Cannot decode incoming broadcast versions less than 4'
' or higher than 5. Assuming the sender isn\'t being silly,'
' you should upgrade Bitmessage because this message shall'
' be ignored.'
)
return return
cleartextStreamNumber, cleartextStreamNumberLength = decodeVarint( cleartextStreamNumber, cleartextStreamNumberLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += cleartextStreamNumberLength readPosition += cleartextStreamNumberLength
if broadcastVersion == 4: if broadcastVersion == 4:
# v4 broadcasts are encrypted the same way the msgs are encrypted. To see if we are interested in a # v4 broadcasts are encrypted the same way the msgs are
# v4 broadcast, we try to decrypt it. This was replaced with v5 broadcasts which include a tag which # encrypted. To see if we are interested in a v4 broadcast,
# we check instead, just like we do with v4 pubkeys. # we try to decrypt it. This was replaced with v5 broadcasts
# which include a tag which we check instead, just like we do
# with v4 pubkeys.
signedData = data[8:readPosition] signedData = data[8:readPosition]
initialDecryptionSuccessful = False initialDecryptionSuccessful = False
for key, cryptorObject in sorted(shared.MyECSubscriptionCryptorObjects.items(), key=lambda x: random.random()): for key, cryptorObject in sorted(
shared.MyECSubscriptionCryptorObjects.items(),
key=lambda x: random.random()):
try: try:
if initialDecryptionSuccessful: # continue decryption attempts to avoid timing attacks # continue decryption attempts to avoid timing attacks
if initialDecryptionSuccessful:
cryptorObject.decrypt(data[readPosition:]) cryptorObject.decrypt(data[readPosition:])
else: else:
decryptedData = cryptorObject.decrypt(data[readPosition:]) decryptedData = cryptorObject.decrypt(
toRipe = key # This is the RIPE hash of the sender's pubkey. We need this below to compare to the RIPE hash of the sender's address to verify that it was encrypted by with their key rather than some other key. data[readPosition:])
# This is the RIPE hash of the sender's pubkey.
# We need this below to compare to the RIPE hash
# of the sender's address to verify that it was
# encrypted by with their key rather than some
# other key.
toRipe = key
initialDecryptionSuccessful = True initialDecryptionSuccessful = True
logger.info('EC decryption successful using key associated with ripe hash: %s' % hexlify(key)) logger.info(
'EC decryption successful using key associated'
' with ripe hash: %s', hexlify(key))
except Exception: except Exception:
pass logger.debug(
# print 'cryptorObject.decrypt Exception:', err 'cryptorObject.decrypt Exception:', exc_info=True)
if not initialDecryptionSuccessful: if not initialDecryptionSuccessful:
# This is not a broadcast I am interested in. # This is not a broadcast I am interested in.
logger.debug('Length of time program spent failing to decrypt this v4 broadcast: %s seconds.' % (time.time() - messageProcessingStartTime,)) logger.debug(
'Length of time program spent failing to decrypt this'
' v4 broadcast: %s seconds.',
time.time() - messageProcessingStartTime)
return return
elif broadcastVersion == 5: elif broadcastVersion == 5:
embeddedTag = data[readPosition:readPosition+32] embeddedTag = data[readPosition:readPosition+32]
@ -642,13 +832,16 @@ class objectProcessor(threading.Thread):
logger.debug('We\'re not interested in this broadcast.') logger.debug('We\'re not interested in this broadcast.')
return return
# We are interested in this broadcast because of its tag. # We are interested in this broadcast because of its tag.
signedData = data[8:readPosition] # We're going to add some more data which is signed further down. # We're going to add some more data which is signed further down.
signedData = data[8:readPosition]
cryptorObject = shared.MyECSubscriptionCryptorObjects[embeddedTag] cryptorObject = shared.MyECSubscriptionCryptorObjects[embeddedTag]
try: try:
decryptedData = cryptorObject.decrypt(data[readPosition:]) decryptedData = cryptorObject.decrypt(data[readPosition:])
logger.debug('EC decryption successful') logger.debug('EC decryption successful')
except Exception: except Exception:
logger.debug('Broadcast version %s decryption Unsuccessful.' % broadcastVersion) logger.debug(
'Broadcast version %s decryption Unsuccessful.',
broadcastVersion)
return return
# At this point this is a broadcast I have decrypted and am # At this point this is a broadcast I have decrypted and am
# interested in. # interested in.
@ -657,17 +850,31 @@ class objectProcessor(threading.Thread):
decryptedData[readPosition:readPosition + 9]) decryptedData[readPosition:readPosition + 9])
if broadcastVersion == 4: if broadcastVersion == 4:
if sendersAddressVersion < 2 or sendersAddressVersion > 3: if sendersAddressVersion < 2 or sendersAddressVersion > 3:
logger.warning('Cannot decode senderAddressVersion other than 2 or 3. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.') logger.warning(
'Cannot decode senderAddressVersion other than 2 or 3.'
' Assuming the sender isn\'t being silly, you should'
' upgrade Bitmessage because this message shall be'
' ignored.'
)
return return
elif broadcastVersion == 5: elif broadcastVersion == 5:
if sendersAddressVersion < 4: if sendersAddressVersion < 4:
logger.info('Cannot decode senderAddressVersion less than 4 for broadcast version number 5. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.') logger.info(
'Cannot decode senderAddressVersion less than 4 for'
' broadcast version number 5. Assuming the sender'
' isn\'t being silly, you should upgrade Bitmessage'
' because this message shall be ignored.'
)
return return
readPosition += sendersAddressVersionLength readPosition += sendersAddressVersionLength
sendersStream, sendersStreamLength = decodeVarint( sendersStream, sendersStreamLength = decodeVarint(
decryptedData[readPosition:readPosition + 9]) decryptedData[readPosition:readPosition + 9])
if sendersStream != cleartextStreamNumber: if sendersStream != cleartextStreamNumber:
logger.info('The stream number outside of the encryption on which the POW was completed doesn\'t match the stream number inside the encryption. Ignoring broadcast.') logger.info(
'The stream number outside of the encryption on which the'
' POW was completed doesn\'t match the stream number'
' inside the encryption. Ignoring broadcast.'
)
return return
readPosition += sendersStreamLength readPosition += sendersStreamLength
readPosition += 4 readPosition += 4
@ -678,14 +885,18 @@ class objectProcessor(threading.Thread):
decryptedData[readPosition:readPosition + 64] decryptedData[readPosition:readPosition + 64]
readPosition += 64 readPosition += 64
if sendersAddressVersion >= 3: if sendersAddressVersion >= 3:
requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = decodeVarint( requiredAverageProofOfWorkNonceTrialsPerByte, varintLength = \
decryptedData[readPosition:readPosition + 10]) decodeVarint(decryptedData[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
logger.debug('sender\'s requiredAverageProofOfWorkNonceTrialsPerByte is %s' % requiredAverageProofOfWorkNonceTrialsPerByte) logger.debug(
'sender\'s requiredAverageProofOfWorkNonceTrialsPerByte'
' is %s', requiredAverageProofOfWorkNonceTrialsPerByte)
requiredPayloadLengthExtraBytes, varintLength = decodeVarint( requiredPayloadLengthExtraBytes, varintLength = decodeVarint(
decryptedData[readPosition:readPosition + 10]) decryptedData[readPosition:readPosition + 10])
readPosition += varintLength readPosition += varintLength
logger.debug('sender\'s requiredPayloadLengthExtraBytes is %s' % requiredPayloadLengthExtraBytes) logger.debug(
'sender\'s requiredPayloadLengthExtraBytes is %s',
requiredPayloadLengthExtraBytes)
endOfPubkeyPosition = readPosition endOfPubkeyPosition = readPosition
sha = hashlib.new('sha512') sha = hashlib.new('sha512')
@ -696,13 +907,23 @@ class objectProcessor(threading.Thread):
if broadcastVersion == 4: if broadcastVersion == 4:
if toRipe != calculatedRipe: if toRipe != calculatedRipe:
logger.info('The encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.') logger.info(
'The encryption key used to encrypt this message'
' doesn\'t match the keys inbedded in the message'
' itself. Ignoring message.'
)
return return
elif broadcastVersion == 5: elif broadcastVersion == 5:
calculatedTag = hashlib.sha512(hashlib.sha512(encodeVarint( calculatedTag = hashlib.sha512(hashlib.sha512(encodeVarint(
sendersAddressVersion) + encodeVarint(sendersStream) + calculatedRipe).digest()).digest()[32:] sendersAddressVersion) + encodeVarint(sendersStream)
+ calculatedRipe).digest()
).digest()[32:]
if calculatedTag != embeddedTag: if calculatedTag != embeddedTag:
logger.debug('The tag and encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.') logger.debug(
'The tag and encryption key used to encrypt this'
' message doesn\'t match the keys inbedded in the'
' message itself. Ignoring message.'
)
return return
messageEncodingType, messageEncodingTypeLength = decodeVarint( messageEncodingType, messageEncodingTypeLength = decodeVarint(
decryptedData[readPosition:readPosition + 9]) decryptedData[readPosition:readPosition + 9])
@ -721,11 +942,14 @@ class objectProcessor(threading.Thread):
signature = decryptedData[ signature = decryptedData[
readPosition:readPosition + signatureLength] readPosition:readPosition + signatureLength]
signedData += decryptedData[:readPositionAtBottomOfMessage] signedData += decryptedData[:readPositionAtBottomOfMessage]
if not highlevelcrypto.verify(signedData, signature, hexlify(sendersPubSigningKey)): if not highlevelcrypto.verify(
signedData, signature, hexlify(sendersPubSigningKey)):
logger.debug('ECDSA verify failed') logger.debug('ECDSA verify failed')
return return
logger.debug('ECDSA verify passed') logger.debug('ECDSA verify passed')
sigHash = hashlib.sha512(hashlib.sha512(signature).digest()).digest()[32:] # Used to detect and ignore duplicate messages in our inbox # Used to detect and ignore duplicate messages in our inbox
sigHash = hashlib.sha512(
hashlib.sha512(signature).digest()).digest()[32:]
fromAddress = encodeAddress( fromAddress = encodeAddress(
sendersAddressVersion, sendersStream, calculatedRipe) sendersAddressVersion, sendersStream, calculatedRipe)
@ -749,7 +973,8 @@ class objectProcessor(threading.Thread):
logger.debug('fromAddress: ' + fromAddress) logger.debug('fromAddress: ' + fromAddress)
try: try:
decodedMessage = helper_msgcoding.MsgDecode(messageEncodingType, message) decodedMessage = helper_msgcoding.MsgDecode(
messageEncodingType, message)
except helper_msgcoding.MsgDecodeException: except helper_msgcoding.MsgDecodeException:
return return
subject = decodedMessage.subject subject = decodedMessage.subject
@ -779,31 +1004,36 @@ class objectProcessor(threading.Thread):
call([apiNotifyPath, "newBroadcast"]) call([apiNotifyPath, "newBroadcast"])
# Display timing data # Display timing data
logger.info('Time spent processing this interesting broadcast: %s' % (time.time() - messageProcessingStartTime,)) logger.info(
'Time spent processing this interesting broadcast: %s',
time.time() - messageProcessingStartTime)
def possibleNewPubkey(self, address): def possibleNewPubkey(self, address):
""" """
We have inserted a pubkey into our pubkey table which we received from a We have inserted a pubkey into our pubkey table which we received
pubkey, msg, or broadcast message. It might be one that we have been from a pubkey, msg, or broadcast message. It might be one that we
waiting for. Let's check. have been waiting for. Let's check.
""" """
# For address versions <= 3, we wait on a key with the correct address version, # For address versions <= 3, we wait on a key with the correct
# stream number, and RIPE hash. # address version, stream number and RIPE hash.
_, addressVersion, streamNumber, ripe = decodeAddress(address) _, addressVersion, streamNumber, ripe = decodeAddress(address)
if addressVersion <=3: if addressVersion <= 3:
if address in state.neededPubkeys: if address in state.neededPubkeys:
del state.neededPubkeys[address] del state.neededPubkeys[address]
self.sendMessages(address) self.sendMessages(address)
else: else:
logger.debug('We don\'t need this pub key. We didn\'t ask for it. For address: %s' % address) logger.debug(
'We don\'t need this pub key. We didn\'t ask for it.'
' For address: %s', address)
# For address versions >= 4, we wait on a pubkey with the correct tag. # For address versions >= 4, we wait on a pubkey with the correct tag.
# Let us create the tag from the address and see if we were waiting # Let us create the tag from the address and see if we were waiting
# for it. # for it.
elif addressVersion >= 4: elif addressVersion >= 4:
tag = hashlib.sha512(hashlib.sha512(encodeVarint( tag = hashlib.sha512(hashlib.sha512(
addressVersion) + encodeVarint(streamNumber) + ripe).digest()).digest()[32:] encodeVarint(addressVersion) + encodeVarint(streamNumber)
+ ripe).digest()
).digest()[32:]
if tag in state.neededPubkeys: if tag in state.neededPubkeys:
del state.neededPubkeys[tag] del state.neededPubkeys[tag]
self.sendMessages(address) self.sendMessages(address)
@ -816,30 +1046,40 @@ class objectProcessor(threading.Thread):
""" """
logger.info('We have been awaiting the arrival of this pubkey.') logger.info('We have been awaiting the arrival of this pubkey.')
sqlExecute( sqlExecute(
'''UPDATE sent SET status='doingmsgpow', retrynumber=0 WHERE toaddress=? AND (status='awaitingpubkey' or status='doingpubkeypow') AND folder='sent' ''', "UPDATE sent SET status='doingmsgpow', retrynumber=0"
address) " WHERE toaddress=?"
" AND (status='awaitingpubkey' OR status='doingpubkeypow')"
" AND folder='sent'", address)
queues.workerQueue.put(('sendmessage', '')) queues.workerQueue.put(('sendmessage', ''))
def ackDataHasAValidHeader(self, ackData): def ackDataHasAValidHeader(self, ackData):
if len(ackData) < protocol.Header.size: if len(ackData) < protocol.Header.size:
logger.info('The length of ackData is unreasonably short. Not sending ackData.') logger.info(
'The length of ackData is unreasonably short. Not sending'
' ackData.')
return False return False
magic,command,payloadLength,checksum = protocol.Header.unpack(ackData[:protocol.Header.size]) magic, command, payloadLength, checksum = protocol.Header.unpack(
ackData[:protocol.Header.size])
if magic != 0xE9BEB4D9: if magic != 0xE9BEB4D9:
logger.info('Ackdata magic bytes were wrong. Not sending ackData.') logger.info('Ackdata magic bytes were wrong. Not sending ackData.')
return False return False
payload = ackData[protocol.Header.size:] payload = ackData[protocol.Header.size:]
if len(payload) != payloadLength: if len(payload) != payloadLength:
logger.info('ackData payload length doesn\'t match the payload length specified in the header. Not sending ackdata.') logger.info(
'ackData payload length doesn\'t match the payload length'
' specified in the header. Not sending ackdata.')
return False return False
if payloadLength > 1600100: # ~1.6 MB which is the maximum possible size of an inv message. # ~1.6 MB which is the maximum possible size of an inv message.
# The largest message should be either an inv or a getdata message at 1.6 MB in size. if payloadLength > 1600100:
# The largest message should be either an inv or a getdata
# message at 1.6 MB in size.
# That doesn't mean that the object may be that big. The # That doesn't mean that the object may be that big. The
# shared.checkAndShareObjectWithPeers function will verify that it is no larger than # shared.checkAndShareObjectWithPeers function will verify
# 2^18 bytes. # that it is no larger than 2^18 bytes.
return False return False
if checksum != hashlib.sha512(payload).digest()[0:4]: # test the checksum in the message. # test the checksum in the message.
if checksum != hashlib.sha512(payload).digest()[0:4]:
logger.info('ackdata checksum wrong. Not sending ackdata.') logger.info('ackdata checksum wrong. Not sending ackdata.')
return False return False
command = command.rstrip('\x00') command = command.rstrip('\x00')