Fixed: Codacy issues in src/class_*.py

This commit is contained in:
coffeedogs 2018-05-02 16:29:55 +01:00
parent c9a2240b44
commit c99c3fc782
No known key found for this signature in database
GPG Key ID: 9D818C503D0B7E70
9 changed files with 137 additions and 184 deletions

View File

@ -5,6 +5,7 @@
- make the pull request against the ["v0.6" branch](https://github.com/Bitmessage/PyBitmessage/tree/v0.6)
- it should be possible to do a fast-forward merge of the pull requests
- PGP-sign the commits included in the pull request
- try to use a good editor that removes trailing whitespace, highlights potential python issues and uses unix line endings
- You can get paid for merged commits if you register at [Tip4Commit](https://tip4commit.com/github/Bitmessage/PyBitmessage)
If for some reason you don't want to use github, you can submit the patch using Bitmessage to the "bitmessage" chan, or to one of the developers.

View File

@ -230,7 +230,7 @@ class addressGenerator(threading.Thread, StoppableThread):
# need it if we end up passing the info to the API.
listOfNewAddressesToSendOutThroughTheAPI = []
for i in range(numberOfAddressesToMake):
for _ in range(numberOfAddressesToMake):
# This next section is a little bit strange. We're
# going to generate keys over and over until we find
# one that has a RIPEMD hash that starts with either

View File

@ -3,19 +3,14 @@ import threading
import shared
import hashlib
import random
from struct import unpack, pack
import sys
import string
from subprocess import call # used when the API must execute an outside program
from subprocess import call # nosec
import traceback
from binascii import hexlify
from pyelliptic.openssl import OpenSSL
import highlevelcrypto
from addresses import *
from bmconfigparser import BMConfigParser
import helper_generic
from helper_generic import addDataPadding
import helper_bitcoin
import helper_inbox
import helper_msgcoding
@ -29,13 +24,13 @@ import tr
from debug import logger
import l10n
class objectProcessor(threading.Thread):
"""
The objectProcessor thread, of which there is only one, receives network
objects (msg, broadcast, pubkey, getpubkey) from the receiveDataThreads.
"""
def __init__(self):
threading.Thread.__init__(self, name="objectProcessor")
"""
It may be the case that the last time Bitmessage was running, the user
closed it before it finished processing everything in the
@ -43,6 +38,7 @@ class objectProcessor(threading.Thread):
it should have saved the data in the queue into the objectprocessorqueue
table. Let's pull it out.
"""
threading.Thread.__init__(self, name="objectProcessor")
queryreturn = sqlQuery(
'''SELECT objecttype, data FROM objectprocessorqueue''')
for row in queryreturn:
@ -113,7 +109,6 @@ class objectProcessor(threading.Thread):
else:
logger.debug('This object is not an acknowledgement bound for me.')
def processgetpubkey(self, data):
if len(data) > 200:
logger.info('getpubkey is abnormally long. Sanity check failed. Ignoring object.')
@ -191,7 +186,6 @@ class objectProcessor(threading.Thread):
shared.numberOfPubkeysProcessed += 1
queues.UISignalQueue.put((
'updateNumberOfPubkeysProcessed', 'no data'))
embeddedTime, = unpack('>Q', data[8:16])
readPosition = 20 # bypass the nonce, time, and object type
addressVersion, varintLength = decodeVarint(
data[readPosition:readPosition + 10])
@ -209,7 +203,6 @@ class objectProcessor(threading.Thread):
if len(data) < 146: # sanity check. This is the minimum possible length.
logger.debug('(within processpubkey) payloadLength less than 146. Sanity check failed.')
return
bitfieldBehaviors = data[readPosition:readPosition + 4]
readPosition += 4
publicSigningKey = data[readPosition:readPosition + 64]
# Is it possible for a public key to be invalid such that trying to
@ -229,7 +222,6 @@ class objectProcessor(threading.Thread):
ripeHasher.update(sha.digest())
ripe = ripeHasher.digest()
logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\
ripe %s\n\
publicSigningKey in hex: %s\n\
@ -241,7 +233,6 @@ class objectProcessor(threading.Thread):
)
)
address = encodeAddress(addressVersion, streamNumber, ripe)
queryreturn = sqlQuery(
@ -258,16 +249,15 @@ class objectProcessor(threading.Thread):
if len(data) < 170: # sanity check.
logger.warning('(within processpubkey) payloadLength less than 170. Sanity check failed.')
return
bitfieldBehaviors = data[readPosition:readPosition + 4]
readPosition += 4
publicSigningKey = '\x04' + data[readPosition:readPosition + 64]
readPosition += 64
publicEncryptionKey = '\x04' + data[readPosition:readPosition + 64]
readPosition += 64
specifiedNonceTrialsPerByte, specifiedNonceTrialsPerByteLength = decodeVarint(
_, specifiedNonceTrialsPerByteLength = decodeVarint(
data[readPosition:readPosition + 10])
readPosition += specifiedNonceTrialsPerByteLength
specifiedPayloadLengthExtraBytes, specifiedPayloadLengthExtraBytesLength = decodeVarint(
_, specifiedPayloadLengthExtraBytesLength = decodeVarint(
data[readPosition:readPosition + 10])
readPosition += specifiedPayloadLengthExtraBytesLength
endOfSignedDataPosition = readPosition
@ -288,7 +278,6 @@ class objectProcessor(threading.Thread):
ripeHasher.update(sha.digest())
ripe = ripeHasher.digest()
logger.debug('within recpubkey, addressVersion: %s, streamNumber: %s \n\
ripe %s\n\
publicSigningKey in hex: %s\n\
@ -322,7 +311,7 @@ class objectProcessor(threading.Thread):
return
# Let us try to decrypt the pubkey
toAddress, cryptorObject = state.neededPubkeys[tag]
toAddress, _ = state.neededPubkeys[tag]
if shared.decryptAndCheckPubkeyPayload(data, toAddress) == 'successful':
# At this point we know that we have been waiting on this pubkey.
# This function will command the workerThread to start work on
@ -365,7 +354,7 @@ class objectProcessor(threading.Thread):
toRipe = key # This is the RIPE hash of my pubkeys. We need this below to compare to the destination_ripe included in the encrypted data.
initialDecryptionSuccessful = True
logger.info('EC decryption successful using key associated with ripe hash: %s.' % hexlify(key))
except Exception as err:
except Exception:
pass
if not initialDecryptionSuccessful:
# This is not a message bound for me.
@ -394,7 +383,6 @@ class objectProcessor(threading.Thread):
logger.info('sender\'s stream number is 0. Ignoring message.')
return
readPosition += sendersStreamNumberLength
behaviorBitfield = decryptedData[readPosition:readPosition + 4]
readPosition += 4
pubSigningKey = '\x04' + decryptedData[
readPosition:readPosition + 64]
@ -539,7 +527,7 @@ class objectProcessor(threading.Thread):
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newMessage"])
call([apiNotifyPath, "newMessage"]) # nosec
# Let us now check and see whether our receiving address is
# behaving as a mailing list
@ -566,7 +554,7 @@ class objectProcessor(threading.Thread):
# We really should have a discussion about how to
# set the TTL for mailing list broadcasts. This is obviously
# hard-coded.
TTL = 2*7*24*60*60 # 2 weeks
TTL = 2*7*24*60*60 # 2 weeks
t = ('',
toAddress,
ripe,
@ -601,15 +589,14 @@ class objectProcessor(threading.Thread):
) - messageProcessingStartTime
shared.successfullyDecryptMessageTimings.append(
timeRequiredToAttemptToDecryptMessage)
sum = 0
timing_sum = 0
for item in shared.successfullyDecryptMessageTimings:
sum += item
timing_sum += item
logger.debug('Time to decrypt this message successfully: %s\n\
Average time for all message decryption successes since startup: %s.' %
(timeRequiredToAttemptToDecryptMessage, sum / len(shared.successfullyDecryptMessageTimings))
(timeRequiredToAttemptToDecryptMessage, timing_sum / len(shared.successfullyDecryptMessageTimings))
)
def processbroadcast(self, data):
messageProcessingStartTime = time.time()
shared.numberOfBroadcastsProcessed += 1
@ -627,11 +614,9 @@ class objectProcessor(threading.Thread):
data[readPosition:readPosition + 10])
readPosition += cleartextStreamNumberLength
if broadcastVersion == 4:
"""
v4 broadcasts are encrypted the same way the msgs are encrypted. To see if we are interested in a
v4 broadcast, we try to decrypt it. This was replaced with v5 broadcasts which include a tag which
we check instead, just like we do with v4 pubkeys.
"""
# v4 broadcasts are encrypted the same way the msgs are encrypted. To see if we are interested in a
# v4 broadcast, we try to decrypt it. This was replaced with v5 broadcasts which include a tag which
# we check instead, just like we do with v4 pubkeys.
signedData = data[8:readPosition]
initialDecryptionSuccessful = False
for key, cryptorObject in sorted(shared.MyECSubscriptionCryptorObjects.items(), key=lambda x: random.random()):
@ -643,7 +628,7 @@ class objectProcessor(threading.Thread):
toRipe = key # This is the RIPE hash of the sender's pubkey. We need this below to compare to the RIPE hash of the sender's address to verify that it was encrypted by with their key rather than some other key.
initialDecryptionSuccessful = True
logger.info('EC decryption successful using key associated with ripe hash: %s' % hexlify(key))
except Exception as err:
except Exception:
pass
# print 'cryptorObject.decrypt Exception:', err
if not initialDecryptionSuccessful:
@ -662,7 +647,7 @@ class objectProcessor(threading.Thread):
try:
decryptedData = cryptorObject.decrypt(data[readPosition:])
logger.debug('EC decryption successful')
except Exception as err:
except Exception:
logger.debug('Broadcast version %s decryption Unsuccessful.' % broadcastVersion)
return
# At this point this is a broadcast I have decrypted and am
@ -685,7 +670,6 @@ class objectProcessor(threading.Thread):
logger.info('The stream number outside of the encryption on which the POW was completed doesn\'t match the stream number inside the encryption. Ignoring broadcast.')
return
readPosition += sendersStreamLength
behaviorBitfield = decryptedData[readPosition:readPosition + 4]
readPosition += 4
sendersPubSigningKey = '\x04' + \
decryptedData[readPosition:readPosition + 64]
@ -792,7 +776,7 @@ class objectProcessor(threading.Thread):
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newBroadcast"])
call([apiNotifyPath, "newBroadcast"]) # nosec
# Display timing data
logger.info('Time spent processing this interesting broadcast: %s' % (time.time() - messageProcessingStartTime,))
@ -807,7 +791,7 @@ class objectProcessor(threading.Thread):
# For address versions <= 3, we wait on a key with the correct address version,
# stream number, and RIPE hash.
status, addressVersion, streamNumber, ripe = decodeAddress(address)
_, addressVersion, streamNumber, ripe = decodeAddress(address)
if addressVersion <=3:
if address in state.neededPubkeys:
del state.neededPubkeys[address]
@ -849,13 +833,11 @@ class objectProcessor(threading.Thread):
if len(payload) != payloadLength:
logger.info('ackData payload length doesn\'t match the payload length specified in the header. Not sending ackdata.')
return False
if payloadLength > 1600100: # ~1.6 MB which is the maximum possible size of an inv message.
"""
The largest message should be either an inv or a getdata message at 1.6 MB in size.
That doesn't mean that the object may be that big. The
shared.checkAndShareObjectWithPeers function will verify that it is no larger than
2^18 bytes.
"""
if payloadLength > 1600100: # ~1.6 MB which is the maximum possible size of an inv message.
# The largest message should be either an inv or a getdata message at 1.6 MB in size.
# That doesn't mean that the object may be that big. The
# shared.checkAndShareObjectWithPeers function will verify that it is no larger than
# 2^18 bytes.
return False
if checksum != hashlib.sha512(payload).digest()[0:4]: # test the checksum in the message.
logger.info('ackdata checksum wrong. Not sending ackdata.')

View File

@ -20,7 +20,7 @@ class ObjectProcessorQueue(Queue.Queue):
def get(self, block = True, timeout = None):
try:
item = Queue.Queue.get(self, block, timeout)
except Queue.Empty as e:
except Queue.Empty:
raise Queue.Empty()
with self.sizeLock:
self.curSize -= len(item[1])

View File

@ -1,20 +1,3 @@
import gc
import threading
import shared
import time
import os
import tr#anslate
from bmconfigparser import BMConfigParser
from helper_sql import *
from helper_threading import *
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from debug import logger
import knownnodes
import queues
import state
"""
The singleCleaner class is a timer-driven thread that cleans data structures
to free memory, resends messages when a remote node doesn't respond, and
@ -34,6 +17,23 @@ resends msg messages in 5 days (then 10 days, then 20 days, etc...)
"""
import gc
import threading
import shared
import time
import os
import tr#anslate
from bmconfigparser import BMConfigParser
from helper_sql import *
from helper_threading import *
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from debug import logger
import knownnodes
import queues
import state
class singleCleaner(threading.Thread, StoppableThread):
cycleLength = 300

View File

@ -5,7 +5,7 @@ import threading
import hashlib
from struct import pack
# used when the API must execute an outside program
from subprocess import call
from subprocess import call # nosec
from binascii import hexlify, unhexlify
import tr
@ -228,7 +228,7 @@ class singleWorker(threading.Thread, StoppableThread):
# This function also broadcasts out the pubkey message
# once it is done with the POW
def doPOWForMyV2Pubkey(self, hash):
def doPOWForMyV2Pubkey(self, adressHash):
# Look up my stream number based on my address hash
"""configSections = shared.config.addresses()
for addressInKeysFile in configSections:
@ -239,9 +239,9 @@ class singleWorker(threading.Thread, StoppableThread):
if hash == hashFromThisParticularAddress:
myAddress = addressInKeysFile
break"""
myAddress = shared.myAddressesByHash[hash]
myAddress = shared.myAddressesByHash[adressHash]
# status
_, addressVersionNumber, streamNumber, hash = decodeAddress(myAddress)
_, addressVersionNumber, streamNumber, adressHash = decodeAddress(myAddress)
# 28 days from now plus or minus five minutes
TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300))
@ -293,31 +293,31 @@ class singleWorker(threading.Thread, StoppableThread):
# does the necessary POW and sends it out. If it *is* a chan then it
# assembles the pubkey and stores is in the pubkey table so that we can
# send messages to "ourselves".
def sendOutOrStoreMyV3Pubkey(self, hash):
def sendOutOrStoreMyV3Pubkey(self, adressHash):
try:
myAddress = shared.myAddressesByHash[hash]
myAddress = shared.myAddressesByHash[adressHash]
except:
# The address has been deleted.
return
if BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('This is a chan address. Not sending pubkey.')
return
status, addressVersionNumber, streamNumber, hash = decodeAddress(
_, addressVersionNumber, streamNumber, adressHash = decodeAddress(
myAddress)
# 28 days from now plus or minus five minutes
TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300))
embeddedTime = int(time.time() + TTL)
# signedTimeForProtocolV2 = embeddedTime - TTL
"""
According to the protocol specification, the expiresTime
along with the pubkey information is signed. But to be
backwards compatible during the upgrade period, we shall sign
not the expiresTime but rather the current time. There must be
precisely a 28 day difference between the two. After the upgrade
period we'll switch to signing the whole payload with the
expiresTime time.
"""
# According to the protocol specification, the expiresTime
# along with the pubkey information is signed. But to be
# backwards compatible during the upgrade period, we shall sign
# not the expiresTime but rather the current time. There must be
# precisely a 28 day difference between the two. After the upgrade
# period we'll switch to signing the whole payload with the
# expiresTime time.
payload = pack('>Q', (embeddedTime))
payload += '\x00\x00\x00\x01' # object type: pubkey
payload += encodeVarint(addressVersionNumber) # Address version number
@ -379,7 +379,7 @@ class singleWorker(threading.Thread, StoppableThread):
if shared.BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('This is a chan address. Not sending pubkey.')
return
status, addressVersionNumber, streamNumber, hash = decodeAddress(
_, addressVersionNumber, streamNumber, addressHash = decodeAddress(
myAddress)
# 28 days from now plus or minus five minutes
@ -419,7 +419,7 @@ class singleWorker(threading.Thread, StoppableThread):
# when they want to send a message.
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hash
encodeVarint(streamNumber) + addressHash
).digest()).digest()
payload += doubleHashOfAddressData[32:] # the tag
signature = highlevelcrypto.sign(
@ -462,6 +462,7 @@ class singleWorker(threading.Thread, StoppableThread):
# Reset just in case
sqlExecute(
'''UPDATE sent SET status='broadcastqueued' '''
'''WHERE status = 'doingbroadcastpow' ''')
queryreturn = sqlQuery(
'''SELECT fromaddress, subject, message, '''
@ -848,7 +849,7 @@ class singleWorker(threading.Thread, StoppableThread):
# to bypass the address version whose length is definitely 1
readPosition = 1
streamNumber, streamNumberLength = decodeVarint(
_, streamNumberLength = decodeVarint(
pubkeyPayload[readPosition:readPosition + 10])
readPosition += streamNumberLength
behaviorBitfield = pubkeyPayload[readPosition:readPosition + 4]
@ -1266,7 +1267,7 @@ class singleWorker(threading.Thread, StoppableThread):
except:
apiNotifyPath = ''
if apiNotifyPath != '':
call([apiNotifyPath, "newMessage"])
call([apiNotifyPath, "newMessage"]) # nosec
def requestPubKey(self, toAddress):
toStatus, addressVersionNumber, streamNumber, ripe = decodeAddress(

View File

@ -6,7 +6,6 @@ from email.header import decode_header
import re
import signal
import smtpd
import socket
import threading
import time
@ -16,7 +15,6 @@ from debug import logger
from helper_sql import sqlExecute
from helper_ackPayload import genAckPayload
from helper_threading import StoppableThread
from pyelliptic.openssl import OpenSSL
import queues
from version import softwareVersion
@ -68,7 +66,6 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
status, addressVersionNumber, streamNumber, ripe = decodeAddress(toAddress)
stealthLevel = BMConfigParser().safeGetInt('bitmessagesettings', 'ackstealthlevel')
ackdata = genAckPayload(streamNumber, stealthLevel)
t = ()
sqlExecute(
'''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''',
'',
@ -167,16 +164,6 @@ class smtpServer(threading.Thread, StoppableThread):
super(smtpServer, self).stopThread()
self.server.close()
return
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# for ip in ('127.0.0.1', BMConfigParser().get('bitmessagesettings', 'onionbindip')):
for ip in ('127.0.0.1'):
try:
s.connect((ip, LISTENPORT))
s.shutdown(socket.SHUT_RDWR)
s.close()
break
except:
pass
def run(self):
asyncore.loop(1)

View File

@ -11,9 +11,7 @@ import helper_sql
from namecoin import ensureNamecoinOptions
import paths
import queues
import random
import state
import string
import tr#anslate
import helper_random
# This thread exists because SQLITE3 is so un-threadsafe that we must
@ -184,19 +182,6 @@ class sqlThread(threading.Thread):
ensureNamecoinOptions()
"""# Add a new column to the inventory table to store the first 20 bytes of encrypted messages to support Android app
item = '''SELECT value FROM settings WHERE key='version';'''
parameters = ''
self.cur.execute(item, parameters)
if int(self.cur.fetchall()[0][0]) == 1:
print 'upgrading database'
item = '''ALTER TABLE inventory ADD first20bytesofencryptedmessage blob DEFAULT '' '''
parameters = ''
self.cur.execute(item, parameters)
item = '''update settings set value=? WHERE key='version';'''
parameters = (2,)
self.cur.execute(item, parameters)"""
# Let's get rid of the first20bytesofencryptedmessage field in the inventory table.
item = '''SELECT value FROM settings WHERE key='version';'''
parameters = ''
@ -239,9 +224,6 @@ class sqlThread(threading.Thread):
# Raise the default required difficulty from 1 to 2
# With the change to protocol v3, this is obsolete.
if BMConfigParser().getint('bitmessagesettings', 'settingsversion') == 6:
"""if int(shared.config.get('bitmessagesettings','defaultnoncetrialsperbyte')) == defaults.networkDefaultProofOfWorkNonceTrialsPerByte:
shared.config.set('bitmessagesettings','defaultnoncetrialsperbyte', str(defaults.networkDefaultProofOfWorkNonceTrialsPerByte * 2))
"""
BMConfigParser().set('bitmessagesettings', 'settingsversion', '7')
# Add a new column to the pubkeys table to store the address version.
@ -405,10 +387,10 @@ class sqlThread(threading.Thread):
queryResult = self.cur.fetchall()
from addresses import encodeAddress
for row in queryResult:
hash, addressVersion = row
addressHash, addressVersion = row
address = encodeAddress(addressVersion, 1, hash)
item = '''UPDATE pubkeys SET address=? WHERE hash=?;'''
parameters = (address, hash)
parameters = (address, addressHash)
self.cur.execute(item, parameters)
# Now we can remove the hash field from the pubkeys table.
self.cur.execute(