Merge branch '1233' into v0.6

This commit is contained in:
Peter Šurda 2018-05-08 13:58:45 +02:00
commit 5d96a77a87
Signed by untrusted user: PeterSurda
GPG Key ID: 0C5F50C0B5F37D87
10 changed files with 136 additions and 185 deletions

View File

@ -3,12 +3,13 @@
- try to explain what the code is about - try to explain what the code is about
- try to follow [PEP0008](https://www.python.org/dev/peps/pep-0008/) - try to follow [PEP0008](https://www.python.org/dev/peps/pep-0008/)
- make the pull request against the ["v0.6" branch](https://github.com/Bitmessage/PyBitmessage/tree/v0.6) - make the pull request against the ["v0.6" branch](https://github.com/Bitmessage/PyBitmessage/tree/v0.6)
- it should be possible to do a fast-forward merge of the pull requests
- PGP-sign the commits included in the pull request - PGP-sign the commits included in the pull request
- try to use a good editor that removes trailing whitespace, highlights potential python issues and uses unix line endings
- You can get paid for merged commits if you register at [Tip4Commit](https://tip4commit.com/github/Bitmessage/PyBitmessage) - You can get paid for merged commits if you register at [Tip4Commit](https://tip4commit.com/github/Bitmessage/PyBitmessage)
If for some reason you don't want to use github, you can submit the patch using Bitmessage to the "bitmessage" chan, or to one of the developers. If for some reason you don't want to use github, you can submit the patch using Bitmessage to the "bitmessage" chan, or to one of the developers.
## Translations ## Translations
For helping with translations, please use [Transifex](https://www.transifex.com/bitmessage-project/pybitmessage/). There is no need to submit pull requests for translations. For helping with translations, please use [Transifex](https://www.transifex.com/bitmessage-project/pybitmessage/). There is no need to submit pull requests for translations.
For translating technical terms it is recommended to consult the [Microsoft Language Portal](https://www.microsoft.com/Language/en-US/Default.aspx). For translating technical terms it is recommended to consult the [Microsoft Language Portal](https://www.microsoft.com/Language/en-US/Default.aspx).

View File

@ -29,3 +29,4 @@ References
* [Installation](https://bitmessage.org/wiki/Compiling_instructions) * [Installation](https://bitmessage.org/wiki/Compiling_instructions)
* [Discuss on Reddit](https://www.reddit.com/r/bitmessage) * [Discuss on Reddit](https://www.reddit.com/r/bitmessage)
* [Chat on Gitter](https://gitter.im/Bitmessage/PyBitmessage) * [Chat on Gitter](https://gitter.im/Bitmessage/PyBitmessage)

View File

@ -230,7 +230,7 @@ class addressGenerator(threading.Thread, StoppableThread):
# need it if we end up passing the info to the API. # need it if we end up passing the info to the API.
listOfNewAddressesToSendOutThroughTheAPI = [] listOfNewAddressesToSendOutThroughTheAPI = []
for i in range(numberOfAddressesToMake): for _ in range(numberOfAddressesToMake):
# This next section is a little bit strange. We're # This next section is a little bit strange. We're
# going to generate keys over and over until we find # going to generate keys over and over until we find
# one that has a RIPEMD hash that starts with either # one that has a RIPEMD hash that starts with either

View File

@ -3,19 +3,14 @@ import threading
import shared import shared
import hashlib import hashlib
import random import random
from struct import unpack, pack
import sys
import string import string
from subprocess import call # used when the API must execute an outside program from subprocess import call # nosec
import traceback import traceback
from binascii import hexlify from binascii import hexlify
from pyelliptic.openssl import OpenSSL
import highlevelcrypto import highlevelcrypto
from addresses import * from addresses import *
from bmconfigparser import BMConfigParser from bmconfigparser import BMConfigParser
import helper_generic
from helper_generic import addDataPadding
import helper_bitcoin import helper_bitcoin
import helper_inbox import helper_inbox
import helper_msgcoding import helper_msgcoding
@ -29,20 +24,21 @@ import tr
from debug import logger from debug import logger
import l10n import l10n
class objectProcessor(threading.Thread): class objectProcessor(threading.Thread):
""" """
The objectProcessor thread, of which there is only one, receives network The objectProcessor thread, of which there is only one, receives network
objects (msg, broadcast, pubkey, getpubkey) from the receiveDataThreads. objects (msg, broadcast, pubkey, getpubkey) from the receiveDataThreads.
""" """
def __init__(self): def __init__(self):
threading.Thread.__init__(self, name="objectProcessor")
""" """
It may be the case that the last time Bitmessage was running, the user It may be the case that the last time Bitmessage was running, the user
closed it before it finished processing everything in the closed it before it finished processing everything in the
objectProcessorQueue. Assuming that Bitmessage wasn't closed forcefully, objectProcessorQueue. Assuming that Bitmessage wasn't closed forcefully,
it should have saved the data in the queue into the objectprocessorqueue it should have saved the data in the queue into the objectprocessorqueue
table. Let's pull it out. table. Let's pull it out.
""" """
threading.Thread.__init__(self, name="objectProcessor")
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT objecttype, data FROM objectprocessorqueue''') '''SELECT objecttype, data FROM objectprocessorqueue''')
for row in queryreturn: for row in queryreturn:
@ -107,13 +103,12 @@ class objectProcessor(threading.Thread):
del shared.ackdataForWhichImWatching[data[readPosition:]] del shared.ackdataForWhichImWatching[data[readPosition:]]
sqlExecute('UPDATE sent SET status=?, lastactiontime=? WHERE ackdata=?', sqlExecute('UPDATE sent SET status=?, lastactiontime=? WHERE ackdata=?',
'ackreceived', 'ackreceived',
int(time.time()), int(time.time()),
data[readPosition:]) data[readPosition:])
queues.UISignalQueue.put(('updateSentItemStatusByAckdata', (data[readPosition:], tr._translate("MainWindow",'Acknowledgement of the message received %1').arg(l10n.formatTimestamp())))) queues.UISignalQueue.put(('updateSentItemStatusByAckdata', (data[readPosition:], tr._translate("MainWindow",'Acknowledgement of the message received %1').arg(l10n.formatTimestamp()))))
else: else:
logger.debug('This object is not an acknowledgement bound for me.') logger.debug('This object is not an acknowledgement bound for me.')
def processgetpubkey(self, data): def processgetpubkey(self, data):
if len(data) > 200: if len(data) > 200:
logger.info('getpubkey is abnormally long. Sanity check failed. Ignoring object.') logger.info('getpubkey is abnormally long. Sanity check failed. Ignoring object.')
@ -173,9 +168,9 @@ class objectProcessor(threading.Thread):
except: except:
lastPubkeySendTime = 0 lastPubkeySendTime = 0
if lastPubkeySendTime > time.time() - 2419200: # If the last time we sent our pubkey was more recent than 28 days ago... if lastPubkeySendTime > time.time() - 2419200: # If the last time we sent our pubkey was more recent than 28 days ago...
logger.info('Found getpubkey-requested-item in my list of EC hashes BUT we already sent it recently. Ignoring request. The lastPubkeySendTime is: %s' % lastPubkeySendTime) logger.info('Found getpubkey-requested-item in my list of EC hashes BUT we already sent it recently. Ignoring request. The lastPubkeySendTime is: %s' % lastPubkeySendTime)
return return
logger.info('Found getpubkey-requested-hash in my list of EC hashes. Telling Worker thread to do the POW for a pubkey message and send it out.') logger.info('Found getpubkey-requested-hash in my list of EC hashes. Telling Worker thread to do the POW for a pubkey message and send it out.')
if requestedAddressVersionNumber == 2: if requestedAddressVersionNumber == 2:
queues.workerQueue.put(( queues.workerQueue.put((
'doPOWForMyV2Pubkey', requestedHash)) 'doPOWForMyV2Pubkey', requestedHash))
@ -191,7 +186,6 @@ class objectProcessor(threading.Thread):
shared.numberOfPubkeysProcessed += 1 shared.numberOfPubkeysProcessed += 1
queues.UISignalQueue.put(( queues.UISignalQueue.put((
'updateNumberOfPubkeysProcessed', 'no data')) 'updateNumberOfPubkeysProcessed', 'no data'))
embeddedTime, = unpack('>Q', data[8:16])
readPosition = 20 # bypass the nonce, time, and object type readPosition = 20 # bypass the nonce, time, and object type
addressVersion, varintLength = decodeVarint( addressVersion, varintLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
@ -209,7 +203,6 @@ class objectProcessor(threading.Thread):
if len(data) < 146: # sanity check. This is the minimum possible length. if len(data) < 146: # sanity check. This is the minimum possible length.
logger.debug('(within processpubkey) payloadLength less than 146. Sanity check failed.') logger.debug('(within processpubkey) payloadLength less than 146. Sanity check failed.')
return return
bitfieldBehaviors = data[readPosition:readPosition + 4]
readPosition += 4 readPosition += 4
publicSigningKey = data[readPosition:readPosition + 64] publicSigningKey = data[readPosition:readPosition + 64]
# Is it possible for a public key to be invalid such that trying to # Is it possible for a public key to be invalid such that trying to
@ -229,21 +222,19 @@ class objectProcessor(threading.Thread):
ripeHasher.update(sha.digest()) ripeHasher.update(sha.digest())
ripe = ripeHasher.digest() ripe = ripeHasher.digest()
logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\ logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\
ripe %s\n\ ripe %s\n\
publicSigningKey in hex: %s\n\ publicSigningKey in hex: %s\n\
publicEncryptionKey in hex: %s' % (addressVersion, publicEncryptionKey in hex: %s' % (addressVersion,
streamNumber, streamNumber,
hexlify(ripe), hexlify(ripe),
hexlify(publicSigningKey), hexlify(publicSigningKey),
hexlify(publicEncryptionKey) hexlify(publicEncryptionKey)
) )
) )
address = encodeAddress(addressVersion, streamNumber, ripe) address = encodeAddress(addressVersion, streamNumber, ripe)
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT usedpersonally FROM pubkeys WHERE address=? AND usedpersonally='yes' ''', address) '''SELECT usedpersonally FROM pubkeys WHERE address=? AND usedpersonally='yes' ''', address)
if queryreturn != []: # if this pubkey is already in our database and if we have used it personally: if queryreturn != []: # if this pubkey is already in our database and if we have used it personally:
@ -258,16 +249,15 @@ class objectProcessor(threading.Thread):
if len(data) < 170: # sanity check. if len(data) < 170: # sanity check.
logger.warning('(within processpubkey) payloadLength less than 170. Sanity check failed.') logger.warning('(within processpubkey) payloadLength less than 170. Sanity check failed.')
return return
bitfieldBehaviors = data[readPosition:readPosition + 4]
readPosition += 4 readPosition += 4
publicSigningKey = '\x04' + data[readPosition:readPosition + 64] publicSigningKey = '\x04' + data[readPosition:readPosition + 64]
readPosition += 64 readPosition += 64
publicEncryptionKey = '\x04' + data[readPosition:readPosition + 64] publicEncryptionKey = '\x04' + data[readPosition:readPosition + 64]
readPosition += 64 readPosition += 64
specifiedNonceTrialsPerByte, specifiedNonceTrialsPerByteLength = decodeVarint( _, specifiedNonceTrialsPerByteLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += specifiedNonceTrialsPerByteLength readPosition += specifiedNonceTrialsPerByteLength
specifiedPayloadLengthExtraBytes, specifiedPayloadLengthExtraBytesLength = decodeVarint( _, specifiedPayloadLengthExtraBytesLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += specifiedPayloadLengthExtraBytesLength readPosition += specifiedPayloadLengthExtraBytesLength
endOfSignedDataPosition = readPosition endOfSignedDataPosition = readPosition
@ -287,13 +277,12 @@ class objectProcessor(threading.Thread):
ripeHasher = hashlib.new('ripemd160') ripeHasher = hashlib.new('ripemd160')
ripeHasher.update(sha.digest()) ripeHasher.update(sha.digest())
ripe = ripeHasher.digest() ripe = ripeHasher.digest()
logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\ logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\
ripe %s\n\ ripe %s\n\
publicSigningKey in hex: %s\n\ publicSigningKey in hex: %s\n\
publicEncryptionKey in hex: %s' % (addressVersion, publicEncryptionKey in hex: %s' % (addressVersion,
streamNumber, streamNumber,
hexlify(ripe), hexlify(ripe),
hexlify(publicSigningKey), hexlify(publicSigningKey),
hexlify(publicEncryptionKey) hexlify(publicEncryptionKey)
@ -320,9 +309,9 @@ class objectProcessor(threading.Thread):
if tag not in state.neededPubkeys: if tag not in state.neededPubkeys:
logger.info('We don\'t need this v4 pubkey. We didn\'t ask for it.') logger.info('We don\'t need this v4 pubkey. We didn\'t ask for it.')
return return
# Let us try to decrypt the pubkey # Let us try to decrypt the pubkey
toAddress, cryptorObject = state.neededPubkeys[tag] toAddress, _ = state.neededPubkeys[tag]
if shared.decryptAndCheckPubkeyPayload(data, toAddress) == 'successful': if shared.decryptAndCheckPubkeyPayload(data, toAddress) == 'successful':
# At this point we know that we have been waiting on this pubkey. # At this point we know that we have been waiting on this pubkey.
# This function will command the workerThread to start work on # This function will command the workerThread to start work on
@ -343,10 +332,10 @@ class objectProcessor(threading.Thread):
readPosition = 20 # bypass the nonce, time, and object type readPosition = 20 # bypass the nonce, time, and object type
msgVersion, msgVersionLength = decodeVarint(data[readPosition:readPosition + 9]) msgVersion, msgVersionLength = decodeVarint(data[readPosition:readPosition + 9])
if msgVersion != 1: if msgVersion != 1:
logger.info('Cannot understand message versions other than one. Ignoring message.') logger.info('Cannot understand message versions other than one. Ignoring message.')
return return
readPosition += msgVersionLength readPosition += msgVersionLength
streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = decodeVarint( streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = decodeVarint(
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
readPosition += streamNumberAsClaimedByMsgLength readPosition += streamNumberAsClaimedByMsgLength
@ -355,7 +344,7 @@ class objectProcessor(threading.Thread):
# This is not an acknowledgement bound for me. See if it is a message # This is not an acknowledgement bound for me. See if it is a message
# bound for me by trying to decrypt it with my private keys. # bound for me by trying to decrypt it with my private keys.
for key, cryptorObject in sorted(shared.myECCryptorObjects.items(), key=lambda x: random.random()): for key, cryptorObject in sorted(shared.myECCryptorObjects.items(), key=lambda x: random.random()):
try: try:
if initialDecryptionSuccessful: # continue decryption attempts to avoid timing attacks if initialDecryptionSuccessful: # continue decryption attempts to avoid timing attacks
@ -365,11 +354,11 @@ class objectProcessor(threading.Thread):
toRipe = key # This is the RIPE hash of my pubkeys. We need this below to compare to the destination_ripe included in the encrypted data. toRipe = key # This is the RIPE hash of my pubkeys. We need this below to compare to the destination_ripe included in the encrypted data.
initialDecryptionSuccessful = True initialDecryptionSuccessful = True
logger.info('EC decryption successful using key associated with ripe hash: %s.' % hexlify(key)) logger.info('EC decryption successful using key associated with ripe hash: %s.' % hexlify(key))
except Exception as err: except Exception:
pass pass
if not initialDecryptionSuccessful: if not initialDecryptionSuccessful:
# This is not a message bound for me. # This is not a message bound for me.
logger.info('Length of time program spent failing to decrypt this message: %s seconds.' % (time.time() - messageProcessingStartTime,)) logger.info('Length of time program spent failing to decrypt this message: %s seconds.' % (time.time() - messageProcessingStartTime,))
return return
# This is a message bound for me. # This is a message bound for me.
@ -380,10 +369,10 @@ class objectProcessor(threading.Thread):
decryptedData[readPosition:readPosition + 10]) decryptedData[readPosition:readPosition + 10])
readPosition += sendersAddressVersionNumberLength readPosition += sendersAddressVersionNumberLength
if sendersAddressVersionNumber == 0: if sendersAddressVersionNumber == 0:
logger.info('Cannot understand sendersAddressVersionNumber = 0. Ignoring message.') logger.info('Cannot understand sendersAddressVersionNumber = 0. Ignoring message.')
return return
if sendersAddressVersionNumber > 4: if sendersAddressVersionNumber > 4:
logger.info('Sender\'s address version number %s not yet supported. Ignoring message.' % sendersAddressVersionNumber) logger.info('Sender\'s address version number %s not yet supported. Ignoring message.' % sendersAddressVersionNumber)
return return
if len(decryptedData) < 170: if len(decryptedData) < 170:
logger.info('Length of the unencrypted data is unreasonably short. Sanity check failed. Ignoring message.') logger.info('Length of the unencrypted data is unreasonably short. Sanity check failed. Ignoring message.')
@ -394,7 +383,6 @@ class objectProcessor(threading.Thread):
logger.info('sender\'s stream number is 0. Ignoring message.') logger.info('sender\'s stream number is 0. Ignoring message.')
return return
readPosition += sendersStreamNumberLength readPosition += sendersStreamNumberLength
behaviorBitfield = decryptedData[readPosition:readPosition + 4]
readPosition += 4 readPosition += 4
pubSigningKey = '\x04' + decryptedData[ pubSigningKey = '\x04' + decryptedData[
readPosition:readPosition + 64] readPosition:readPosition + 64]
@ -441,7 +429,7 @@ class objectProcessor(threading.Thread):
signature = decryptedData[ signature = decryptedData[
readPosition:readPosition + signatureLength] readPosition:readPosition + signatureLength]
signedData = data[8:20] + encodeVarint(1) + encodeVarint(streamNumberAsClaimedByMsg) + decryptedData[:positionOfBottomOfAckData] signedData = data[8:20] + encodeVarint(1) + encodeVarint(streamNumberAsClaimedByMsg) + decryptedData[:positionOfBottomOfAckData]
if not highlevelcrypto.verify(signedData, signature, hexlify(pubSigningKey)): if not highlevelcrypto.verify(signedData, signature, hexlify(pubSigningKey)):
logger.debug('ECDSA verify failed') logger.debug('ECDSA verify failed')
return return
@ -458,7 +446,7 @@ class objectProcessor(threading.Thread):
ripe.update(sha.digest()) ripe.update(sha.digest())
fromAddress = encodeAddress( fromAddress = encodeAddress(
sendersAddressVersionNumber, sendersStreamNumber, ripe.digest()) sendersAddressVersionNumber, sendersStreamNumber, ripe.digest())
# Let's store the public key in case we want to reply to this # Let's store the public key in case we want to reply to this
# person. # person.
sqlExecute( sqlExecute(
@ -468,12 +456,12 @@ class objectProcessor(threading.Thread):
decryptedData[:endOfThePublicKeyPosition], decryptedData[:endOfThePublicKeyPosition],
int(time.time()), int(time.time()),
'yes') 'yes')
# Check to see whether we happen to be awaiting this # Check to see whether we happen to be awaiting this
# pubkey in order to send a message. If we are, it will do the POW # pubkey in order to send a message. If we are, it will do the POW
# and send it. # and send it.
self.possibleNewPubkey(fromAddress) self.possibleNewPubkey(fromAddress)
# If this message is bound for one of my version 3 addresses (or # If this message is bound for one of my version 3 addresses (or
# higher), then we must check to make sure it meets our demanded # higher), then we must check to make sure it meets our demanded
# proof of work requirement. If this is bound for one of my chan # proof of work requirement. If this is bound for one of my chan
@ -565,22 +553,22 @@ class objectProcessor(threading.Thread):
# We really should have a discussion about how to # We really should have a discussion about how to
# set the TTL for mailing list broadcasts. This is obviously # set the TTL for mailing list broadcasts. This is obviously
# hard-coded. # hard-coded.
TTL = 2*7*24*60*60 # 2 weeks TTL = 2*7*24*60*60 # 2 weeks
t = ('', t = ('',
toAddress, toAddress,
ripe, ripe,
fromAddress, fromAddress,
subject, subject,
message, message,
ackdata, ackdata,
int(time.time()), # sentTime (this doesn't change) int(time.time()), # sentTime (this doesn't change)
int(time.time()), # lastActionTime int(time.time()), # lastActionTime
0, 0,
'broadcastqueued', 'broadcastqueued',
0, 0,
'sent', 'sent',
messageEncodingType, messageEncodingType,
TTL) TTL)
helper_sent.insert(t) helper_sent.insert(t)
@ -601,15 +589,14 @@ class objectProcessor(threading.Thread):
) - messageProcessingStartTime ) - messageProcessingStartTime
shared.successfullyDecryptMessageTimings.append( shared.successfullyDecryptMessageTimings.append(
timeRequiredToAttemptToDecryptMessage) timeRequiredToAttemptToDecryptMessage)
sum = 0 timing_sum = 0
for item in shared.successfullyDecryptMessageTimings: for item in shared.successfullyDecryptMessageTimings:
sum += item timing_sum += item
logger.debug('Time to decrypt this message successfully: %s\n\ logger.debug('Time to decrypt this message successfully: %s\n\
Average time for all message decryption successes since startup: %s.' % Average time for all message decryption successes since startup: %s.' %
(timeRequiredToAttemptToDecryptMessage, sum / len(shared.successfullyDecryptMessageTimings)) (timeRequiredToAttemptToDecryptMessage, timing_sum / len(shared.successfullyDecryptMessageTimings))
) )
def processbroadcast(self, data): def processbroadcast(self, data):
messageProcessingStartTime = time.time() messageProcessingStartTime = time.time()
shared.numberOfBroadcastsProcessed += 1 shared.numberOfBroadcastsProcessed += 1
@ -621,17 +608,15 @@ class objectProcessor(threading.Thread):
data[readPosition:readPosition + 9]) data[readPosition:readPosition + 9])
readPosition += broadcastVersionLength readPosition += broadcastVersionLength
if broadcastVersion < 4 or broadcastVersion > 5: if broadcastVersion < 4 or broadcastVersion > 5:
logger.info('Cannot decode incoming broadcast versions less than 4 or higher than 5. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.') logger.info('Cannot decode incoming broadcast versions less than 4 or higher than 5. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.')
return return
cleartextStreamNumber, cleartextStreamNumberLength = decodeVarint( cleartextStreamNumber, cleartextStreamNumberLength = decodeVarint(
data[readPosition:readPosition + 10]) data[readPosition:readPosition + 10])
readPosition += cleartextStreamNumberLength readPosition += cleartextStreamNumberLength
if broadcastVersion == 4: if broadcastVersion == 4:
""" # v4 broadcasts are encrypted the same way the msgs are encrypted. To see if we are interested in a
v4 broadcasts are encrypted the same way the msgs are encrypted. To see if we are interested in a # v4 broadcast, we try to decrypt it. This was replaced with v5 broadcasts which include a tag which
v4 broadcast, we try to decrypt it. This was replaced with v5 broadcasts which include a tag which # we check instead, just like we do with v4 pubkeys.
we check instead, just like we do with v4 pubkeys.
"""
signedData = data[8:readPosition] signedData = data[8:readPosition]
initialDecryptionSuccessful = False initialDecryptionSuccessful = False
for key, cryptorObject in sorted(shared.MyECSubscriptionCryptorObjects.items(), key=lambda x: random.random()): for key, cryptorObject in sorted(shared.MyECSubscriptionCryptorObjects.items(), key=lambda x: random.random()):
@ -643,7 +628,7 @@ class objectProcessor(threading.Thread):
toRipe = key # This is the RIPE hash of the sender's pubkey. We need this below to compare to the RIPE hash of the sender's address to verify that it was encrypted by with their key rather than some other key. toRipe = key # This is the RIPE hash of the sender's pubkey. We need this below to compare to the RIPE hash of the sender's address to verify that it was encrypted by with their key rather than some other key.
initialDecryptionSuccessful = True initialDecryptionSuccessful = True
logger.info('EC decryption successful using key associated with ripe hash: %s' % hexlify(key)) logger.info('EC decryption successful using key associated with ripe hash: %s' % hexlify(key))
except Exception as err: except Exception:
pass pass
# print 'cryptorObject.decrypt Exception:', err # print 'cryptorObject.decrypt Exception:', err
if not initialDecryptionSuccessful: if not initialDecryptionSuccessful:
@ -654,7 +639,7 @@ class objectProcessor(threading.Thread):
embeddedTag = data[readPosition:readPosition+32] embeddedTag = data[readPosition:readPosition+32]
readPosition += 32 readPosition += 32
if embeddedTag not in shared.MyECSubscriptionCryptorObjects: if embeddedTag not in shared.MyECSubscriptionCryptorObjects:
logger.debug('We\'re not interested in this broadcast.') logger.debug('We\'re not interested in this broadcast.')
return return
# We are interested in this broadcast because of its tag. # We are interested in this broadcast because of its tag.
signedData = data[8:readPosition] # We're going to add some more data which is signed further down. signedData = data[8:readPosition] # We're going to add some more data which is signed further down.
@ -662,8 +647,8 @@ class objectProcessor(threading.Thread):
try: try:
decryptedData = cryptorObject.decrypt(data[readPosition:]) decryptedData = cryptorObject.decrypt(data[readPosition:])
logger.debug('EC decryption successful') logger.debug('EC decryption successful')
except Exception as err: except Exception:
logger.debug('Broadcast version %s decryption Unsuccessful.' % broadcastVersion) logger.debug('Broadcast version %s decryption Unsuccessful.' % broadcastVersion)
return return
# At this point this is a broadcast I have decrypted and am # At this point this is a broadcast I have decrypted and am
# interested in. # interested in.
@ -676,16 +661,15 @@ class objectProcessor(threading.Thread):
return return
elif broadcastVersion == 5: elif broadcastVersion == 5:
if sendersAddressVersion < 4: if sendersAddressVersion < 4:
logger.info('Cannot decode senderAddressVersion less than 4 for broadcast version number 5. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.') logger.info('Cannot decode senderAddressVersion less than 4 for broadcast version number 5. Assuming the sender isn\'t being silly, you should upgrade Bitmessage because this message shall be ignored.')
return return
readPosition += sendersAddressVersionLength readPosition += sendersAddressVersionLength
sendersStream, sendersStreamLength = decodeVarint( sendersStream, sendersStreamLength = decodeVarint(
decryptedData[readPosition:readPosition + 9]) decryptedData[readPosition:readPosition + 9])
if sendersStream != cleartextStreamNumber: if sendersStream != cleartextStreamNumber:
logger.info('The stream number outside of the encryption on which the POW was completed doesn\'t match the stream number inside the encryption. Ignoring broadcast.') logger.info('The stream number outside of the encryption on which the POW was completed doesn\'t match the stream number inside the encryption. Ignoring broadcast.')
return return
readPosition += sendersStreamLength readPosition += sendersStreamLength
behaviorBitfield = decryptedData[readPosition:readPosition + 4]
readPosition += 4 readPosition += 4
sendersPubSigningKey = '\x04' + \ sendersPubSigningKey = '\x04' + \
decryptedData[readPosition:readPosition + 64] decryptedData[readPosition:readPosition + 64]
@ -712,13 +696,13 @@ class objectProcessor(threading.Thread):
if broadcastVersion == 4: if broadcastVersion == 4:
if toRipe != calculatedRipe: if toRipe != calculatedRipe:
logger.info('The encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.') logger.info('The encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.')
return return
elif broadcastVersion == 5: elif broadcastVersion == 5:
calculatedTag = hashlib.sha512(hashlib.sha512(encodeVarint( calculatedTag = hashlib.sha512(hashlib.sha512(encodeVarint(
sendersAddressVersion) + encodeVarint(sendersStream) + calculatedRipe).digest()).digest()[32:] sendersAddressVersion) + encodeVarint(sendersStream) + calculatedRipe).digest()).digest()[32:]
if calculatedTag != embeddedTag: if calculatedTag != embeddedTag:
logger.debug('The tag and encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.') logger.debug('The tag and encryption key used to encrypt this message doesn\'t match the keys inbedded in the message itself. Ignoring message.')
return return
messageEncodingType, messageEncodingTypeLength = decodeVarint( messageEncodingType, messageEncodingTypeLength = decodeVarint(
decryptedData[readPosition:readPosition + 9]) decryptedData[readPosition:readPosition + 9])
@ -804,10 +788,10 @@ class objectProcessor(threading.Thread):
pubkey, msg, or broadcast message. It might be one that we have been pubkey, msg, or broadcast message. It might be one that we have been
waiting for. Let's check. waiting for. Let's check.
""" """
# For address versions <= 3, we wait on a key with the correct address version, # For address versions <= 3, we wait on a key with the correct address version,
# stream number, and RIPE hash. # stream number, and RIPE hash.
status, addressVersion, streamNumber, ripe = decodeAddress(address) _, addressVersion, streamNumber, ripe = decodeAddress(address)
if addressVersion <=3: if addressVersion <=3:
if address in state.neededPubkeys: if address in state.neededPubkeys:
del state.neededPubkeys[address] del state.neededPubkeys[address]
@ -840,7 +824,7 @@ class objectProcessor(threading.Thread):
if len(ackData) < protocol.Header.size: if len(ackData) < protocol.Header.size:
logger.info('The length of ackData is unreasonably short. Not sending ackData.') logger.info('The length of ackData is unreasonably short. Not sending ackData.')
return False return False
magic,command,payloadLength,checksum = protocol.Header.unpack(ackData[:protocol.Header.size]) magic,command,payloadLength,checksum = protocol.Header.unpack(ackData[:protocol.Header.size])
if magic != 0xE9BEB4D9: if magic != 0xE9BEB4D9:
logger.info('Ackdata magic bytes were wrong. Not sending ackData.') logger.info('Ackdata magic bytes were wrong. Not sending ackData.')
@ -849,13 +833,11 @@ class objectProcessor(threading.Thread):
if len(payload) != payloadLength: if len(payload) != payloadLength:
logger.info('ackData payload length doesn\'t match the payload length specified in the header. Not sending ackdata.') logger.info('ackData payload length doesn\'t match the payload length specified in the header. Not sending ackdata.')
return False return False
if payloadLength > 1600100: # ~1.6 MB which is the maximum possible size of an inv message. if payloadLength > 1600100: # ~1.6 MB which is the maximum possible size of an inv message.
""" # The largest message should be either an inv or a getdata message at 1.6 MB in size.
The largest message should be either an inv or a getdata message at 1.6 MB in size. # That doesn't mean that the object may be that big. The
That doesn't mean that the object may be that big. The # shared.checkAndShareObjectWithPeers function will verify that it is no larger than
shared.checkAndShareObjectWithPeers function will verify that it is no larger than # 2^18 bytes.
2^18 bytes.
"""
return False return False
if checksum != hashlib.sha512(payload).digest()[0:4]: # test the checksum in the message. if checksum != hashlib.sha512(payload).digest()[0:4]: # test the checksum in the message.
logger.info('ackdata checksum wrong. Not sending ackdata.') logger.info('ackdata checksum wrong. Not sending ackdata.')

View File

@ -18,10 +18,7 @@ class ObjectProcessorQueue(Queue.Queue):
Queue.Queue.put(self, item, block, timeout) Queue.Queue.put(self, item, block, timeout)
def get(self, block = True, timeout = None): def get(self, block = True, timeout = None):
try: item = Queue.Queue.get(self, block, timeout)
item = Queue.Queue.get(self, block, timeout)
except Queue.Empty as e:
raise Queue.Empty()
with self.sizeLock: with self.sizeLock:
self.curSize -= len(item[1]) self.curSize -= len(item[1])
return item return item

View File

@ -1,3 +1,22 @@
"""
The singleCleaner class is a timer-driven thread that cleans data structures
to free memory, resends messages when a remote node doesn't respond, and
sends pong messages to keep connections alive if the network isn't busy.
It cleans these data structures in memory:
inventory (moves data to the on-disk sql database)
inventorySets (clears then reloads data out of sql database)
It cleans these tables on the disk:
inventory (clears expired objects)
pubkeys (clears pubkeys older than 4 weeks old which we have not used personally)
knownNodes (clears addresses which have not been online for over 3 days)
It resends messages when there has been no response:
resends getpubkey messages in 5 days (then 10 days, then 20 days, etc...)
resends msg messages in 5 days (then 10 days, then 20 days, etc...)
"""
import gc import gc
import threading import threading
import shared import shared
@ -15,25 +34,6 @@ import knownnodes
import queues import queues
import state import state
"""
The singleCleaner class is a timer-driven thread that cleans data structures
to free memory, resends messages when a remote node doesn't respond, and
sends pong messages to keep connections alive if the network isn't busy.
It cleans these data structures in memory:
inventory (moves data to the on-disk sql database)
inventorySets (clears then reloads data out of sql database)
It cleans these tables on the disk:
inventory (clears expired objects)
pubkeys (clears pubkeys older than 4 weeks old which we have not used personally)
knownNodes (clears addresses which have not been online for over 3 days)
It resends messages when there has been no response:
resends getpubkey messages in 5 days (then 10 days, then 20 days, etc...)
resends msg messages in 5 days (then 10 days, then 20 days, etc...)
"""
class singleCleaner(threading.Thread, StoppableThread): class singleCleaner(threading.Thread, StoppableThread):
cycleLength = 300 cycleLength = 300
@ -61,7 +61,7 @@ class singleCleaner(threading.Thread, StoppableThread):
'updateStatusBar', 'Doing housekeeping (Flushing inventory in memory to disk...)')) 'updateStatusBar', 'Doing housekeeping (Flushing inventory in memory to disk...)'))
Inventory().flush() Inventory().flush()
queues.UISignalQueue.put(('updateStatusBar', '')) queues.UISignalQueue.put(('updateStatusBar', ''))
# If we are running as a daemon then we are going to fill up the UI # If we are running as a daemon then we are going to fill up the UI
# queue which will never be handled by a UI. We should clear it to # queue which will never be handled by a UI. We should clear it to
# save memory. # save memory.

View File

@ -5,7 +5,7 @@ import threading
import hashlib import hashlib
from struct import pack from struct import pack
# used when the API must execute an outside program # used when the API must execute an outside program
from subprocess import call from subprocess import call # nosec
from binascii import hexlify, unhexlify from binascii import hexlify, unhexlify
import tr import tr
@ -228,7 +228,7 @@ class singleWorker(threading.Thread, StoppableThread):
# This function also broadcasts out the pubkey message # This function also broadcasts out the pubkey message
# once it is done with the POW # once it is done with the POW
def doPOWForMyV2Pubkey(self, hash): def doPOWForMyV2Pubkey(self, adressHash):
# Look up my stream number based on my address hash # Look up my stream number based on my address hash
"""configSections = shared.config.addresses() """configSections = shared.config.addresses()
for addressInKeysFile in configSections: for addressInKeysFile in configSections:
@ -239,9 +239,9 @@ class singleWorker(threading.Thread, StoppableThread):
if hash == hashFromThisParticularAddress: if hash == hashFromThisParticularAddress:
myAddress = addressInKeysFile myAddress = addressInKeysFile
break""" break"""
myAddress = shared.myAddressesByHash[hash] myAddress = shared.myAddressesByHash[adressHash]
# status # status
_, addressVersionNumber, streamNumber, hash = decodeAddress(myAddress) _, addressVersionNumber, streamNumber, adressHash = decodeAddress(myAddress)
# 28 days from now plus or minus five minutes # 28 days from now plus or minus five minutes
TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300)) TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300))
@ -293,31 +293,31 @@ class singleWorker(threading.Thread, StoppableThread):
# does the necessary POW and sends it out. If it *is* a chan then it # does the necessary POW and sends it out. If it *is* a chan then it
# assembles the pubkey and stores is in the pubkey table so that we can # assembles the pubkey and stores is in the pubkey table so that we can
# send messages to "ourselves". # send messages to "ourselves".
def sendOutOrStoreMyV3Pubkey(self, hash): def sendOutOrStoreMyV3Pubkey(self, adressHash):
try: try:
myAddress = shared.myAddressesByHash[hash] myAddress = shared.myAddressesByHash[adressHash]
except: except:
# The address has been deleted. # The address has been deleted.
return return
if BMConfigParser().safeGetBoolean(myAddress, 'chan'): if BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('This is a chan address. Not sending pubkey.') logger.info('This is a chan address. Not sending pubkey.')
return return
status, addressVersionNumber, streamNumber, hash = decodeAddress( _, addressVersionNumber, streamNumber, adressHash = decodeAddress(
myAddress) myAddress)
# 28 days from now plus or minus five minutes # 28 days from now plus or minus five minutes
TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300)) TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300))
embeddedTime = int(time.time() + TTL) embeddedTime = int(time.time() + TTL)
# signedTimeForProtocolV2 = embeddedTime - TTL # signedTimeForProtocolV2 = embeddedTime - TTL
""" # According to the protocol specification, the expiresTime
According to the protocol specification, the expiresTime # along with the pubkey information is signed. But to be
along with the pubkey information is signed. But to be # backwards compatible during the upgrade period, we shall sign
backwards compatible during the upgrade period, we shall sign # not the expiresTime but rather the current time. There must be
not the expiresTime but rather the current time. There must be # precisely a 28 day difference between the two. After the upgrade
precisely a 28 day difference between the two. After the upgrade # period we'll switch to signing the whole payload with the
period we'll switch to signing the whole payload with the # expiresTime time.
expiresTime time.
"""
payload = pack('>Q', (embeddedTime)) payload = pack('>Q', (embeddedTime))
payload += '\x00\x00\x00\x01' # object type: pubkey payload += '\x00\x00\x00\x01' # object type: pubkey
payload += encodeVarint(addressVersionNumber) # Address version number payload += encodeVarint(addressVersionNumber) # Address version number
@ -379,7 +379,7 @@ class singleWorker(threading.Thread, StoppableThread):
if shared.BMConfigParser().safeGetBoolean(myAddress, 'chan'): if shared.BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('This is a chan address. Not sending pubkey.') logger.info('This is a chan address. Not sending pubkey.')
return return
status, addressVersionNumber, streamNumber, hash = decodeAddress( _, addressVersionNumber, streamNumber, addressHash = decodeAddress(
myAddress) myAddress)
# 28 days from now plus or minus five minutes # 28 days from now plus or minus five minutes
@ -419,7 +419,7 @@ class singleWorker(threading.Thread, StoppableThread):
# when they want to send a message. # when they want to send a message.
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512( doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) + encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hash encodeVarint(streamNumber) + addressHash
).digest()).digest() ).digest()).digest()
payload += doubleHashOfAddressData[32:] # the tag payload += doubleHashOfAddressData[32:] # the tag
signature = highlevelcrypto.sign( signature = highlevelcrypto.sign(
@ -462,6 +462,7 @@ class singleWorker(threading.Thread, StoppableThread):
# Reset just in case # Reset just in case
sqlExecute( sqlExecute(
'''UPDATE sent SET status='broadcastqueued' ''' '''UPDATE sent SET status='broadcastqueued' '''
'''WHERE status = 'doingbroadcastpow' ''') '''WHERE status = 'doingbroadcastpow' ''')
queryreturn = sqlQuery( queryreturn = sqlQuery(
'''SELECT fromaddress, subject, message, ''' '''SELECT fromaddress, subject, message, '''
@ -848,7 +849,7 @@ class singleWorker(threading.Thread, StoppableThread):
# to bypass the address version whose length is definitely 1 # to bypass the address version whose length is definitely 1
readPosition = 1 readPosition = 1
streamNumber, streamNumberLength = decodeVarint( _, streamNumberLength = decodeVarint(
pubkeyPayload[readPosition:readPosition + 10]) pubkeyPayload[readPosition:readPosition + 10])
readPosition += streamNumberLength readPosition += streamNumberLength
behaviorBitfield = pubkeyPayload[readPosition:readPosition + 4] behaviorBitfield = pubkeyPayload[readPosition:readPosition + 4]
@ -925,7 +926,7 @@ class singleWorker(threading.Thread, StoppableThread):
# regardless of what they say is allowed in order # regardless of what they say is allowed in order
# to get our message to propagate through the network. # to get our message to propagate through the network.
if requiredAverageProofOfWorkNonceTrialsPerByte < \ if requiredAverageProofOfWorkNonceTrialsPerByte < \
defaults.networkDefaultProofOfWorkNonceTrialsPerByte: defaults.networkDefaultProofOfWorkNonceTrialsPerByte:
requiredAverageProofOfWorkNonceTrialsPerByte = \ requiredAverageProofOfWorkNonceTrialsPerByte = \
defaults.networkDefaultProofOfWorkNonceTrialsPerByte defaults.networkDefaultProofOfWorkNonceTrialsPerByte
if requiredPayloadLengthExtraBytes < \ if requiredPayloadLengthExtraBytes < \

View File

@ -19,7 +19,7 @@ class smtpDeliver(threading.Thread, StoppableThread):
def __init__(self, parent=None): def __init__(self, parent=None):
threading.Thread.__init__(self, name="smtpDeliver") threading.Thread.__init__(self, name="smtpDeliver")
self.initStop() self.initStop()
def stopThread(self): def stopThread(self):
try: try:
queues.UISignallerQueue.put(("stopThread", "data")) queues.UISignallerQueue.put(("stopThread", "data"))

View File

@ -6,7 +6,6 @@ from email.header import decode_header
import re import re
import signal import signal
import smtpd import smtpd
import socket
import threading import threading
import time import time
@ -16,7 +15,6 @@ from debug import logger
from helper_sql import sqlExecute from helper_sql import sqlExecute
from helper_ackPayload import genAckPayload from helper_ackPayload import genAckPayload
from helper_threading import StoppableThread from helper_threading import StoppableThread
from pyelliptic.openssl import OpenSSL
import queues import queues
from version import softwareVersion from version import softwareVersion
@ -68,7 +66,6 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
status, addressVersionNumber, streamNumber, ripe = decodeAddress(toAddress) status, addressVersionNumber, streamNumber, ripe = decodeAddress(toAddress)
stealthLevel = BMConfigParser().safeGetInt('bitmessagesettings', 'ackstealthlevel') stealthLevel = BMConfigParser().safeGetInt('bitmessagesettings', 'ackstealthlevel')
ackdata = genAckPayload(streamNumber, stealthLevel) ackdata = genAckPayload(streamNumber, stealthLevel)
t = ()
sqlExecute( sqlExecute(
'''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''', '''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''',
'', '',
@ -97,7 +94,7 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
ret.append(unicode(h[0], h[1])) ret.append(unicode(h[0], h[1]))
else: else:
ret.append(h[0].decode("utf-8", errors='replace')) ret.append(h[0].decode("utf-8", errors='replace'))
return ret return ret
@ -151,7 +148,7 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
raise Exception("Bad domain %s", domain) raise Exception("Bad domain %s", domain)
logger.debug("Sending %s to %s about %s", sender, rcpt, msg_subject) logger.debug("Sending %s to %s about %s", sender, rcpt, msg_subject)
self.send(sender, rcpt, msg_subject, body) self.send(sender, rcpt, msg_subject, body)
logger.info("Relayed %s to %s", sender, rcpt) logger.info("Relayed %s to %s", sender, rcpt)
except Exception as err: except Exception as err:
logger.error( "Bad to %s: %s", to, repr(err)) logger.error( "Bad to %s: %s", to, repr(err))
continue continue
@ -162,21 +159,11 @@ class smtpServer(threading.Thread, StoppableThread):
threading.Thread.__init__(self, name="smtpServerThread") threading.Thread.__init__(self, name="smtpServerThread")
self.initStop() self.initStop()
self.server = smtpServerPyBitmessage(('127.0.0.1', LISTENPORT), None) self.server = smtpServerPyBitmessage(('127.0.0.1', LISTENPORT), None)
def stopThread(self): def stopThread(self):
super(smtpServer, self).stopThread() super(smtpServer, self).stopThread()
self.server.close() self.server.close()
return return
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# for ip in ('127.0.0.1', BMConfigParser().get('bitmessagesettings', 'onionbindip')):
for ip in ('127.0.0.1'):
try:
s.connect((ip, LISTENPORT))
s.shutdown(socket.SHUT_RDWR)
s.close()
break
except:
pass
def run(self): def run(self):
asyncore.loop(1) asyncore.loop(1)

View File

@ -11,9 +11,7 @@ import helper_sql
from namecoin import ensureNamecoinOptions from namecoin import ensureNamecoinOptions
import paths import paths
import queues import queues
import random
import state import state
import string
import tr#anslate import tr#anslate
import helper_random import helper_random
# This thread exists because SQLITE3 is so un-threadsafe that we must # This thread exists because SQLITE3 is so un-threadsafe that we must
@ -26,11 +24,11 @@ class sqlThread(threading.Thread):
def __init__(self): def __init__(self):
threading.Thread.__init__(self, name="SQL") threading.Thread.__init__(self, name="SQL")
def run(self): def run(self):
self.conn = sqlite3.connect(state.appdata + 'messages.dat') self.conn = sqlite3.connect(state.appdata + 'messages.dat')
self.conn.text_factory = str self.conn.text_factory = str
self.cur = self.conn.cursor() self.cur = self.conn.cursor()
self.cur.execute('PRAGMA secure_delete = true') self.cur.execute('PRAGMA secure_delete = true')
try: try:
@ -178,24 +176,11 @@ class sqlThread(threading.Thread):
self.cur.execute( self.cur.execute(
'''update sent set status='broadcastqueued' where status='broadcastpending' ''') '''update sent set status='broadcastqueued' where status='broadcastpending' ''')
self.conn.commit() self.conn.commit()
if not BMConfigParser().has_option('bitmessagesettings', 'sockslisten'): if not BMConfigParser().has_option('bitmessagesettings', 'sockslisten'):
BMConfigParser().set('bitmessagesettings', 'sockslisten', 'false') BMConfigParser().set('bitmessagesettings', 'sockslisten', 'false')
ensureNamecoinOptions() ensureNamecoinOptions()
"""# Add a new column to the inventory table to store the first 20 bytes of encrypted messages to support Android app
item = '''SELECT value FROM settings WHERE key='version';'''
parameters = ''
self.cur.execute(item, parameters)
if int(self.cur.fetchall()[0][0]) == 1:
print 'upgrading database'
item = '''ALTER TABLE inventory ADD first20bytesofencryptedmessage blob DEFAULT '' '''
parameters = ''
self.cur.execute(item, parameters)
item = '''update settings set value=? WHERE key='version';'''
parameters = (2,)
self.cur.execute(item, parameters)"""
# Let's get rid of the first20bytesofencryptedmessage field in the inventory table. # Let's get rid of the first20bytesofencryptedmessage field in the inventory table.
item = '''SELECT value FROM settings WHERE key='version';''' item = '''SELECT value FROM settings WHERE key='version';'''
@ -239,9 +224,6 @@ class sqlThread(threading.Thread):
# Raise the default required difficulty from 1 to 2 # Raise the default required difficulty from 1 to 2
# With the change to protocol v3, this is obsolete. # With the change to protocol v3, this is obsolete.
if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 6: if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 6:
"""if int(shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')) == defaults.networkDefaultProofOfWorkNonceTrialsPerByte:
shared.config.set('bitmessagesettings','defaultnoncetrialsperbyte', str(defaults.networkDefaultProofOfWorkNonceTrialsPerByte * 2))
"""
BMConfigParser().set('bitmessagesettings', 'settingsversion', '7') BMConfigParser().set('bitmessagesettings', 'settingsversion', '7')
# Add a new column to the pubkeys table to store the address version. # Add a new column to the pubkeys table to store the address version.
@ -259,7 +241,7 @@ class sqlThread(threading.Thread):
item = '''update settings set value=? WHERE key='version';''' item = '''update settings set value=? WHERE key='version';'''
parameters = (5,) parameters = (5,)
self.cur.execute(item, parameters) self.cur.execute(item, parameters)
if not BMConfigParser().has_option('bitmessagesettings', 'useidenticons'): if not BMConfigParser().has_option('bitmessagesettings', 'useidenticons'):
BMConfigParser().set('bitmessagesettings', 'useidenticons', 'True') BMConfigParser().set('bitmessagesettings', 'useidenticons', 'True')
if not BMConfigParser().has_option('bitmessagesettings', 'identiconsuffix'): # acts as a salt if not BMConfigParser().has_option('bitmessagesettings', 'identiconsuffix'): # acts as a salt
@ -271,7 +253,7 @@ class sqlThread(threading.Thread):
'bitmessagesettings', 'stopresendingafterxdays', '') 'bitmessagesettings', 'stopresendingafterxdays', '')
BMConfigParser().set( BMConfigParser().set(
'bitmessagesettings', 'stopresendingafterxmonths', '') 'bitmessagesettings', 'stopresendingafterxmonths', '')
BMConfigParser().set('bitmessagesettings', 'settingsversion', '8') BMConfigParser().set('bitmessagesettings', 'settingsversion', '8')
# Add a new table: objectprocessorqueue with which to hold objects # Add a new table: objectprocessorqueue with which to hold objects
# that have yet to be processed if the user shuts down Bitmessage. # that have yet to be processed if the user shuts down Bitmessage.
@ -286,7 +268,7 @@ class sqlThread(threading.Thread):
item = '''update settings set value=? WHERE key='version';''' item = '''update settings set value=? WHERE key='version';'''
parameters = (6,) parameters = (6,)
self.cur.execute(item, parameters) self.cur.execute(item, parameters)
# changes related to protocol v3 # changes related to protocol v3
# In table inventory and objectprocessorqueue, objecttype is now an integer (it was a human-friendly string previously) # In table inventory and objectprocessorqueue, objecttype is now an integer (it was a human-friendly string previously)
item = '''SELECT value FROM settings WHERE key='version';''' item = '''SELECT value FROM settings WHERE key='version';'''
@ -303,8 +285,8 @@ class sqlThread(threading.Thread):
parameters = (7,) parameters = (7,)
self.cur.execute(item, parameters) self.cur.execute(item, parameters)
logger.debug('Finished dropping and recreating the inventory table.') logger.debug('Finished dropping and recreating the inventory table.')
# With the change to protocol version 3, reset the user-settable difficulties to 1 # With the change to protocol version 3, reset the user-settable difficulties to 1
if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 8: if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 8:
BMConfigParser().set('bitmessagesettings','defaultnoncetrialsperbyte', str(defaults.networkDefaultProofOfWorkNonceTrialsPerByte)) BMConfigParser().set('bitmessagesettings','defaultnoncetrialsperbyte', str(defaults.networkDefaultProofOfWorkNonceTrialsPerByte))
BMConfigParser().set('bitmessagesettings','defaultpayloadlengthextrabytes', str(defaults.networkDefaultPayloadLengthExtraBytes)) BMConfigParser().set('bitmessagesettings','defaultpayloadlengthextrabytes', str(defaults.networkDefaultPayloadLengthExtraBytes))
@ -313,7 +295,7 @@ class sqlThread(threading.Thread):
BMConfigParser().set('bitmessagesettings','maxacceptablenoncetrialsperbyte', str(previousTotalDifficulty * 1000)) BMConfigParser().set('bitmessagesettings','maxacceptablenoncetrialsperbyte', str(previousTotalDifficulty * 1000))
BMConfigParser().set('bitmessagesettings','maxacceptablepayloadlengthextrabytes', str(previousSmallMessageDifficulty * 1000)) BMConfigParser().set('bitmessagesettings','maxacceptablepayloadlengthextrabytes', str(previousSmallMessageDifficulty * 1000))
BMConfigParser().set('bitmessagesettings', 'settingsversion', '9') BMConfigParser().set('bitmessagesettings', 'settingsversion', '9')
# Adjust the required POW values for each of this user's addresses to conform to protocol v3 norms. # Adjust the required POW values for each of this user's addresses to conform to protocol v3 norms.
if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 9: if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 9:
for addressInKeysFile in BMConfigParser().addressses(): for addressInKeysFile in BMConfigParser().addressses():
@ -332,7 +314,7 @@ class sqlThread(threading.Thread):
BMConfigParser().set('bitmessagesettings', 'maxuploadrate', '0') BMConfigParser().set('bitmessagesettings', 'maxuploadrate', '0')
BMConfigParser().set('bitmessagesettings', 'settingsversion', '10') BMConfigParser().set('bitmessagesettings', 'settingsversion', '10')
BMConfigParser().save() BMConfigParser().save()
# sanity check # sanity check
if BMConfigParser().getint('bitmessagesettings', 'maxacceptablenoncetrialsperbyte') == 0: if BMConfigParser().getint('bitmessagesettings', 'maxacceptablenoncetrialsperbyte') == 0:
BMConfigParser().set('bitmessagesettings','maxacceptablenoncetrialsperbyte', str(defaults.ridiculousDifficulty * defaults.networkDefaultProofOfWorkNonceTrialsPerByte)) BMConfigParser().set('bitmessagesettings','maxacceptablenoncetrialsperbyte', str(defaults.ridiculousDifficulty * defaults.networkDefaultProofOfWorkNonceTrialsPerByte))
@ -361,7 +343,7 @@ class sqlThread(threading.Thread):
logger.debug('Finished clearing currently held pubkeys.') logger.debug('Finished clearing currently held pubkeys.')
# Add a new column to the inbox table to store the hash of the message signature. # Add a new column to the inbox table to store the hash of the message signature.
# We'll use this as temporary message UUID in order to detect duplicates. # We'll use this as temporary message UUID in order to detect duplicates.
item = '''SELECT value FROM settings WHERE key='version';''' item = '''SELECT value FROM settings WHERE key='version';'''
parameters = '' parameters = ''
self.cur.execute(item, parameters) self.cur.execute(item, parameters)
@ -374,11 +356,11 @@ class sqlThread(threading.Thread):
item = '''update settings set value=? WHERE key='version';''' item = '''update settings set value=? WHERE key='version';'''
parameters = (9,) parameters = (9,)
self.cur.execute(item, parameters) self.cur.execute(item, parameters)
# TTL is now user-specifiable. Let's add an option to save whatever the user selects. # TTL is now user-specifiable. Let's add an option to save whatever the user selects.
if not BMConfigParser().has_option('bitmessagesettings', 'ttl'): if not BMConfigParser().has_option('bitmessagesettings', 'ttl'):
BMConfigParser().set('bitmessagesettings', 'ttl', '367200') BMConfigParser().set('bitmessagesettings', 'ttl', '367200')
# We'll also need a `sleeptill` field and a `ttl` field. Also we can combine # We'll also need a `sleeptill` field and a `ttl` field. Also we can combine
# the pubkeyretrynumber and msgretrynumber into one. # the pubkeyretrynumber and msgretrynumber into one.
item = '''SELECT value FROM settings WHERE key='version';''' item = '''SELECT value FROM settings WHERE key='version';'''
parameters = '' parameters = ''
@ -399,16 +381,16 @@ class sqlThread(threading.Thread):
logger.info('In messages.dat database, finished making TTL-related changes.') logger.info('In messages.dat database, finished making TTL-related changes.')
logger.debug('In messages.dat database, adding address field to the pubkeys table.') logger.debug('In messages.dat database, adding address field to the pubkeys table.')
# We're going to have to calculate the address for each row in the pubkeys # We're going to have to calculate the address for each row in the pubkeys
# table. Then we can take out the hash field. # table. Then we can take out the hash field.
self.cur.execute('''ALTER TABLE pubkeys ADD address text DEFAULT '' ''') self.cur.execute('''ALTER TABLE pubkeys ADD address text DEFAULT '' ''')
self.cur.execute('''SELECT hash, addressversion FROM pubkeys''') self.cur.execute('''SELECT hash, addressversion FROM pubkeys''')
queryResult = self.cur.fetchall() queryResult = self.cur.fetchall()
from addresses import encodeAddress from addresses import encodeAddress
for row in queryResult: for row in queryResult:
hash, addressVersion = row addressHash, addressVersion = row
address = encodeAddress(addressVersion, 1, hash) address = encodeAddress(addressVersion, 1, hash)
item = '''UPDATE pubkeys SET address=? WHERE hash=?;''' item = '''UPDATE pubkeys SET address=? WHERE hash=?;'''
parameters = (address, hash) parameters = (address, addressHash)
self.cur.execute(item, parameters) self.cur.execute(item, parameters)
# Now we can remove the hash field from the pubkeys table. # Now we can remove the hash field from the pubkeys table.
self.cur.execute( self.cur.execute(
@ -423,7 +405,7 @@ class sqlThread(threading.Thread):
self.cur.execute( '''DROP TABLE pubkeys_backup''') self.cur.execute( '''DROP TABLE pubkeys_backup''')
logger.debug('In messages.dat database, done adding address field to the pubkeys table and removing the hash field.') logger.debug('In messages.dat database, done adding address field to the pubkeys table and removing the hash field.')
self.cur.execute('''update settings set value=10 WHERE key='version';''') self.cur.execute('''update settings set value=10 WHERE key='version';''')
if not BMConfigParser().has_option('bitmessagesettings', 'onionhostname'): if not BMConfigParser().has_option('bitmessagesettings', 'onionhostname'):
BMConfigParser().set('bitmessagesettings', 'onionhostname', '') BMConfigParser().set('bitmessagesettings', 'onionhostname', '')
if not BMConfigParser().has_option('bitmessagesettings', 'onionport'): if not BMConfigParser().has_option('bitmessagesettings', 'onionport'):
@ -445,10 +427,10 @@ class sqlThread(threading.Thread):
BMConfigParser().set('bitmessagesettings', 'maxoutboundconnections', '8') BMConfigParser().set('bitmessagesettings', 'maxoutboundconnections', '8')
BMConfigParser().save() BMConfigParser().save()
# Are you hoping to add a new option to the keys.dat file of existing # Are you hoping to add a new option to the keys.dat file of existing
# Bitmessage users or modify the SQLite database? Add it right above this line! # Bitmessage users or modify the SQLite database? Add it right above this line!
try: try:
testpayload = '\x00\x00' testpayload = '\x00\x00'
t = ('1234', 1, testpayload, '12345678', 'no') t = ('1234', 1, testpayload, '12345678', 'no')