Fixes pointed out by landscape.io
- missing/wrong/obsolete imports - logger formatting
This commit is contained in:
parent
ac348e4e6b
commit
fa0a3135e7
|
@ -24,6 +24,7 @@ import helper_inbox
|
||||||
import helper_sent
|
import helper_sent
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
import protocol
|
||||||
import state
|
import state
|
||||||
from pyelliptic.openssl import OpenSSL
|
from pyelliptic.openssl import OpenSSL
|
||||||
from struct import pack
|
from struct import pack
|
||||||
|
|
|
@ -29,6 +29,7 @@ from helper_startup import isOurOperatingSystemLimitedToHavingVeryFewHalfOpenCon
|
||||||
import shared
|
import shared
|
||||||
from helper_sql import sqlQuery
|
from helper_sql import sqlQuery
|
||||||
import state
|
import state
|
||||||
|
import protocol
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
# Classes
|
# Classes
|
||||||
|
|
|
@ -13,6 +13,7 @@ from l10n import getTranslationLanguage
|
||||||
from openclpow import openclAvailable, openclEnabled
|
from openclpow import openclAvailable, openclEnabled
|
||||||
import paths
|
import paths
|
||||||
from proofofwork import bmpow
|
from proofofwork import bmpow
|
||||||
|
import protocol
|
||||||
from pyelliptic.openssl import OpenSSL
|
from pyelliptic.openssl import OpenSSL
|
||||||
import shared
|
import shared
|
||||||
import state
|
import state
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from PyQt4 import QtGui
|
from PyQt4 import QtGui
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import shared
|
|
||||||
from addresses import addBMIfNotPresent
|
from addresses import addBMIfNotPresent
|
||||||
from configparser import BMConfigParser
|
from configparser import BMConfigParser
|
||||||
import state
|
import state
|
||||||
|
|
|
@ -10,6 +10,7 @@ from addresses import *
|
||||||
from configparser import BMConfigParser
|
from configparser import BMConfigParser
|
||||||
from debug import logger
|
from debug import logger
|
||||||
from helper_threading import *
|
from helper_threading import *
|
||||||
|
import protocol
|
||||||
from pyelliptic import arithmetic
|
from pyelliptic import arithmetic
|
||||||
import tr
|
import tr
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
|
|
|
@ -9,6 +9,7 @@ import os
|
||||||
from debug import logger
|
from debug import logger
|
||||||
from namecoin import ensureNamecoinOptions
|
from namecoin import ensureNamecoinOptions
|
||||||
import paths
|
import paths
|
||||||
|
import protocol
|
||||||
import random
|
import random
|
||||||
import state
|
import state
|
||||||
import string
|
import string
|
||||||
|
|
|
@ -19,7 +19,6 @@ Use: `from debug import logger` to import this facility into whatever module you
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
import os
|
import os
|
||||||
import shared
|
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import helper_startup
|
import helper_startup
|
||||||
|
|
|
@ -11,6 +11,7 @@ from distutils.version import StrictVersion
|
||||||
|
|
||||||
from namecoin import ensureNamecoinOptions
|
from namecoin import ensureNamecoinOptions
|
||||||
import paths
|
import paths
|
||||||
|
import protocol
|
||||||
import state
|
import state
|
||||||
|
|
||||||
storeConfigFilesInSameDirectoryAsProgramByDefault = False # The user may de-select Portable Mode in the settings if they want the config files to stay in the application data folder.
|
storeConfigFilesInSameDirectoryAsProgramByDefault = False # The user may de-select Portable Mode in the settings if they want the config files to stay in the application data folder.
|
||||||
|
|
|
@ -6,8 +6,8 @@ import sqlite3
|
||||||
from time import strftime, localtime
|
from time import strftime, localtime
|
||||||
import sys
|
import sys
|
||||||
import shared
|
import shared
|
||||||
|
import paths
|
||||||
import state
|
import state
|
||||||
import string
|
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
|
|
||||||
appdata = paths.lookupAppdataFolder()
|
appdata = paths.lookupAppdataFolder()
|
||||||
|
|
|
@ -23,11 +23,11 @@ def lookupAppdataFolder():
|
||||||
APPNAME = "PyBitmessage"
|
APPNAME = "PyBitmessage"
|
||||||
if "BITMESSAGE_HOME" in environ:
|
if "BITMESSAGE_HOME" in environ:
|
||||||
dataFolder = environ["BITMESSAGE_HOME"]
|
dataFolder = environ["BITMESSAGE_HOME"]
|
||||||
if dataFolder[-1] not in [os.path.sep, os.path.altsep]:
|
if dataFolder[-1] not in [path.sep, path.altsep]:
|
||||||
dataFolder += os.path.sep
|
dataFolder += path.sep
|
||||||
elif sys.platform == 'darwin':
|
elif sys.platform == 'darwin':
|
||||||
if "HOME" in environ:
|
if "HOME" in environ:
|
||||||
dataFolder = path.join(os.environ["HOME"], "Library/Application Support/", APPNAME) + '/'
|
dataFolder = path.join(environ["HOME"], "Library/Application Support/", APPNAME) + '/'
|
||||||
else:
|
else:
|
||||||
stringToLog = 'Could not find home folder, please report this message and your OS X version to the BitMessage Github.'
|
stringToLog = 'Could not find home folder, please report this message and your OS X version to the BitMessage Github.'
|
||||||
if 'logger' in globals():
|
if 'logger' in globals():
|
||||||
|
|
|
@ -204,7 +204,7 @@ def decryptAndCheckPubkeyPayload(data, address):
|
||||||
# Let us try to decrypt the pubkey
|
# Let us try to decrypt the pubkey
|
||||||
toAddress, cryptorObject = state.neededPubkeys[tag]
|
toAddress, cryptorObject = state.neededPubkeys[tag]
|
||||||
if toAddress != address:
|
if toAddress != address:
|
||||||
logger.critical('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. This is very peculiar. toAddress: %s, address %s' % (toAddress, address))
|
logger.critical('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. This is very peculiar. toAddress: %s, address %s', toAddress, address)
|
||||||
# the only way I can think that this could happen is if someone encodes their address data two different ways.
|
# the only way I can think that this could happen is if someone encodes their address data two different ways.
|
||||||
# That sort of address-malleability should have been caught by the UI or API and an error given to the user.
|
# That sort of address-malleability should have been caught by the UI or API and an error given to the user.
|
||||||
return 'failed'
|
return 'failed'
|
||||||
|
@ -260,13 +260,12 @@ def decryptAndCheckPubkeyPayload(data, address):
|
||||||
logger.info('within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\
|
logger.info('within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\
|
||||||
ripe %s\n\
|
ripe %s\n\
|
||||||
publicSigningKey in hex: %s\n\
|
publicSigningKey in hex: %s\n\
|
||||||
publicEncryptionKey in hex: %s' % (addressVersion,
|
publicEncryptionKey in hex: %s', addressVersion,
|
||||||
streamNumber,
|
streamNumber,
|
||||||
hexlify(ripe),
|
hexlify(ripe),
|
||||||
hexlify(publicSigningKey),
|
hexlify(publicSigningKey),
|
||||||
hexlify(publicEncryptionKey)
|
hexlify(publicEncryptionKey)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
t = (address, addressVersion, storedData, int(time.time()), 'yes')
|
t = (address, addressVersion, storedData, int(time.time()), 'yes')
|
||||||
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
|
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
|
||||||
|
@ -275,7 +274,7 @@ def decryptAndCheckPubkeyPayload(data, address):
|
||||||
logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.')
|
logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.')
|
||||||
return 'failed'
|
return 'failed'
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical('Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s' % traceback.format_exc())
|
logger.critical('Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s', traceback.format_exc())
|
||||||
return 'failed'
|
return 'failed'
|
||||||
|
|
||||||
def checkAndShareObjectWithPeers(data):
|
def checkAndShareObjectWithPeers(data):
|
||||||
|
@ -286,7 +285,7 @@ def checkAndShareObjectWithPeers(data):
|
||||||
if we are receiving it off of the wire.
|
if we are receiving it off of the wire.
|
||||||
"""
|
"""
|
||||||
if len(data) > 2 ** 18:
|
if len(data) > 2 ** 18:
|
||||||
logger.info('The payload length of this object is too large (%s bytes). Ignoring it.' % len(data))
|
logger.info('The payload length of this object is too large (%s bytes). Ignoring it.', len(data))
|
||||||
return 0
|
return 0
|
||||||
# Let us check to make sure that the proof of work is sufficient.
|
# Let us check to make sure that the proof of work is sufficient.
|
||||||
if not isProofOfWorkSufficient(data):
|
if not isProofOfWorkSufficient(data):
|
||||||
|
@ -295,10 +294,10 @@ def checkAndShareObjectWithPeers(data):
|
||||||
|
|
||||||
endOfLifeTime, = unpack('>Q', data[8:16])
|
endOfLifeTime, = unpack('>Q', data[8:16])
|
||||||
if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800: # The TTL may not be larger than 28 days + 3 hours of wiggle room
|
if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800: # The TTL may not be larger than 28 days + 3 hours of wiggle room
|
||||||
logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s' % endOfLifeTime)
|
logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s', endOfLifeTime)
|
||||||
return 0
|
return 0
|
||||||
if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much.
|
if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much.
|
||||||
logger.info('This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s' % endOfLifeTime)
|
logger.info('This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s', endOfLifeTime)
|
||||||
return 0
|
return 0
|
||||||
intObjectType, = unpack('>I', data[16:20])
|
intObjectType, = unpack('>I', data[16:20])
|
||||||
try:
|
try:
|
||||||
|
@ -318,9 +317,9 @@ def checkAndShareObjectWithPeers(data):
|
||||||
_checkAndShareUndefinedObjectWithPeers(data)
|
_checkAndShareUndefinedObjectWithPeers(data)
|
||||||
return 0.6
|
return 0.6
|
||||||
except varintDecodeError as e:
|
except varintDecodeError as e:
|
||||||
logger.debug("There was a problem with a varint while checking to see whether it was appropriate to share an object with peers. Some details: %s" % e)
|
logger.debug("There was a problem with a varint while checking to see whether it was appropriate to share an object with peers. Some details: %s", e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical('There was a problem while checking to see whether it was appropriate to share an object with peers. This is definitely a bug! \n%s' % traceback.format_exc())
|
logger.critical('There was a problem while checking to see whether it was appropriate to share an object with peers. This is definitely a bug! \n%s', traceback.format_exc())
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -333,7 +332,7 @@ def _checkAndShareUndefinedObjectWithPeers(data):
|
||||||
streamNumber, streamNumberLength = decodeVarint(
|
streamNumber, streamNumberLength = decodeVarint(
|
||||||
data[readPosition:readPosition + 9])
|
data[readPosition:readPosition + 9])
|
||||||
if not streamNumber in state.streamsInWhichIAmParticipating:
|
if not streamNumber in state.streamsInWhichIAmParticipating:
|
||||||
logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber)
|
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
|
||||||
return
|
return
|
||||||
|
|
||||||
inventoryHash = calculateInventoryHash(data)
|
inventoryHash = calculateInventoryHash(data)
|
||||||
|
@ -343,7 +342,7 @@ def _checkAndShareUndefinedObjectWithPeers(data):
|
||||||
objectType, = unpack('>I', data[16:20])
|
objectType, = unpack('>I', data[16:20])
|
||||||
Inventory()[inventoryHash] = (
|
Inventory()[inventoryHash] = (
|
||||||
objectType, streamNumber, data, embeddedTime,'')
|
objectType, streamNumber, data, embeddedTime,'')
|
||||||
logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash))
|
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
|
||||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||||
|
|
||||||
|
|
||||||
|
@ -356,7 +355,7 @@ def _checkAndShareMsgWithPeers(data):
|
||||||
streamNumber, streamNumberLength = decodeVarint(
|
streamNumber, streamNumberLength = decodeVarint(
|
||||||
data[readPosition:readPosition + 9])
|
data[readPosition:readPosition + 9])
|
||||||
if not streamNumber in state.streamsInWhichIAmParticipating:
|
if not streamNumber in state.streamsInWhichIAmParticipating:
|
||||||
logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber)
|
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
|
||||||
return
|
return
|
||||||
readPosition += streamNumberLength
|
readPosition += streamNumberLength
|
||||||
inventoryHash = calculateInventoryHash(data)
|
inventoryHash = calculateInventoryHash(data)
|
||||||
|
@ -367,7 +366,7 @@ def _checkAndShareMsgWithPeers(data):
|
||||||
objectType = 2
|
objectType = 2
|
||||||
Inventory()[inventoryHash] = (
|
Inventory()[inventoryHash] = (
|
||||||
objectType, streamNumber, data, embeddedTime,'')
|
objectType, streamNumber, data, embeddedTime,'')
|
||||||
logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash))
|
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
|
||||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||||
|
|
||||||
# Now let's enqueue it to be processed ourselves.
|
# Now let's enqueue it to be processed ourselves.
|
||||||
|
@ -387,7 +386,7 @@ def _checkAndShareGetpubkeyWithPeers(data):
|
||||||
streamNumber, streamNumberLength = decodeVarint(
|
streamNumber, streamNumberLength = decodeVarint(
|
||||||
data[readPosition:readPosition + 10])
|
data[readPosition:readPosition + 10])
|
||||||
if not streamNumber in state.streamsInWhichIAmParticipating:
|
if not streamNumber in state.streamsInWhichIAmParticipating:
|
||||||
logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber)
|
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
|
||||||
return
|
return
|
||||||
readPosition += streamNumberLength
|
readPosition += streamNumberLength
|
||||||
|
|
||||||
|
@ -400,7 +399,7 @@ def _checkAndShareGetpubkeyWithPeers(data):
|
||||||
Inventory()[inventoryHash] = (
|
Inventory()[inventoryHash] = (
|
||||||
objectType, streamNumber, data, embeddedTime,'')
|
objectType, streamNumber, data, embeddedTime,'')
|
||||||
# This getpubkey request is valid. Forward to peers.
|
# This getpubkey request is valid. Forward to peers.
|
||||||
logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash))
|
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
|
||||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||||
|
|
||||||
# Now let's queue it to be processed ourselves.
|
# Now let's queue it to be processed ourselves.
|
||||||
|
@ -418,11 +417,11 @@ def _checkAndSharePubkeyWithPeers(data):
|
||||||
data[readPosition:readPosition + 10])
|
data[readPosition:readPosition + 10])
|
||||||
readPosition += varintLength
|
readPosition += varintLength
|
||||||
if not streamNumber in state.streamsInWhichIAmParticipating:
|
if not streamNumber in state.streamsInWhichIAmParticipating:
|
||||||
logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber)
|
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
|
||||||
return
|
return
|
||||||
if addressVersion >= 4:
|
if addressVersion >= 4:
|
||||||
tag = data[readPosition:readPosition + 32]
|
tag = data[readPosition:readPosition + 32]
|
||||||
logger.debug('tag in received pubkey is: %s' % hexlify(tag))
|
logger.debug('tag in received pubkey is: %s', hexlify(tag))
|
||||||
else:
|
else:
|
||||||
tag = ''
|
tag = ''
|
||||||
|
|
||||||
|
@ -434,7 +433,7 @@ def _checkAndSharePubkeyWithPeers(data):
|
||||||
Inventory()[inventoryHash] = (
|
Inventory()[inventoryHash] = (
|
||||||
objectType, streamNumber, data, embeddedTime, tag)
|
objectType, streamNumber, data, embeddedTime, tag)
|
||||||
# This object is valid. Forward it to peers.
|
# This object is valid. Forward it to peers.
|
||||||
logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash))
|
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
|
||||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||||
|
|
||||||
|
|
||||||
|
@ -455,7 +454,7 @@ def _checkAndShareBroadcastWithPeers(data):
|
||||||
streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition + 10])
|
streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition + 10])
|
||||||
readPosition += streamNumberLength
|
readPosition += streamNumberLength
|
||||||
if not streamNumber in state.streamsInWhichIAmParticipating:
|
if not streamNumber in state.streamsInWhichIAmParticipating:
|
||||||
logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber)
|
logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber)
|
||||||
return
|
return
|
||||||
if broadcastVersion >= 3:
|
if broadcastVersion >= 3:
|
||||||
tag = data[readPosition:readPosition+32]
|
tag = data[readPosition:readPosition+32]
|
||||||
|
@ -470,7 +469,7 @@ def _checkAndShareBroadcastWithPeers(data):
|
||||||
Inventory()[inventoryHash] = (
|
Inventory()[inventoryHash] = (
|
||||||
objectType, streamNumber, data, embeddedTime, tag)
|
objectType, streamNumber, data, embeddedTime, tag)
|
||||||
# This object is valid. Forward it to peers.
|
# This object is valid. Forward it to peers.
|
||||||
logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash))
|
logger.debug('advertising inv with hash: %s', hexlify(inventoryHash))
|
||||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||||
|
|
||||||
# Now let's queue it to be processed ourselves.
|
# Now let's queue it to be processed ourselves.
|
||||||
|
|
|
@ -19,7 +19,6 @@ import threading
|
||||||
import time
|
import time
|
||||||
import shutil # used for moving the data folder and copying keys.dat
|
import shutil # used for moving the data folder and copying keys.dat
|
||||||
import datetime
|
import datetime
|
||||||
from os import path, environ
|
|
||||||
import traceback
|
import traceback
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
|
|
||||||
|
@ -645,6 +644,7 @@ def _checkAndShareBroadcastWithPeers(data):
|
||||||
|
|
||||||
def openKeysFile():
|
def openKeysFile():
|
||||||
if 'linux' in sys.platform:
|
if 'linux' in sys.platform:
|
||||||
|
import subprocess
|
||||||
subprocess.call(["xdg-open", state.appdata + 'keys.dat'])
|
subprocess.call(["xdg-open", state.appdata + 'keys.dat'])
|
||||||
else:
|
else:
|
||||||
os.startfile(state.appdata + 'keys.dat')
|
os.startfile(state.appdata + 'keys.dat')
|
||||||
|
|
Reference in New Issue
Block a user