diff --git a/src/api.py b/src/api.py index a3c80283..18b2dd20 100644 --- a/src/api.py +++ b/src/api.py @@ -24,6 +24,7 @@ import helper_inbox import helper_sent import hashlib +import protocol import state from pyelliptic.openssl import OpenSSL from struct import pack diff --git a/src/bitmessagemain.py b/src/bitmessagemain.py index 5c41e4e5..cd7205e8 100755 --- a/src/bitmessagemain.py +++ b/src/bitmessagemain.py @@ -29,6 +29,7 @@ from helper_startup import isOurOperatingSystemLimitedToHavingVeryFewHalfOpenCon import shared from helper_sql import sqlQuery import state +import protocol import threading # Classes diff --git a/src/bitmessageqt/support.py b/src/bitmessageqt/support.py index aae5134f..07d0ab3a 100644 --- a/src/bitmessageqt/support.py +++ b/src/bitmessageqt/support.py @@ -13,6 +13,7 @@ from l10n import getTranslationLanguage from openclpow import openclAvailable, openclEnabled import paths from proofofwork import bmpow +import protocol from pyelliptic.openssl import OpenSSL import shared import state diff --git a/src/bitmessageqt/utils.py b/src/bitmessageqt/utils.py index 1a5b67d7..fd18183d 100644 --- a/src/bitmessageqt/utils.py +++ b/src/bitmessageqt/utils.py @@ -1,7 +1,6 @@ from PyQt4 import QtGui import hashlib import os -import shared from addresses import addBMIfNotPresent from configparser import BMConfigParser import state diff --git a/src/class_addressGenerator.py b/src/class_addressGenerator.py index ff5eb25e..ebedf2f9 100644 --- a/src/class_addressGenerator.py +++ b/src/class_addressGenerator.py @@ -10,6 +10,7 @@ from addresses import * from configparser import BMConfigParser from debug import logger from helper_threading import * +import protocol from pyelliptic import arithmetic import tr from binascii import hexlify diff --git a/src/class_sqlThread.py b/src/class_sqlThread.py index 2beb50dc..df747aea 100644 --- a/src/class_sqlThread.py +++ b/src/class_sqlThread.py @@ -9,6 +9,7 @@ import os from debug import logger from namecoin import ensureNamecoinOptions import paths +import protocol import random import state import string diff --git a/src/debug.py b/src/debug.py index 24e43332..663bbeeb 100644 --- a/src/debug.py +++ b/src/debug.py @@ -19,7 +19,6 @@ Use: `from debug import logger` to import this facility into whatever module you import logging import logging.config import os -import shared import sys import traceback import helper_startup diff --git a/src/helper_startup.py b/src/helper_startup.py index c0f84635..86d3de42 100644 --- a/src/helper_startup.py +++ b/src/helper_startup.py @@ -11,6 +11,7 @@ from distutils.version import StrictVersion from namecoin import ensureNamecoinOptions import paths +import protocol import state storeConfigFilesInSameDirectoryAsProgramByDefault = False # The user may de-select Portable Mode in the settings if they want the config files to stay in the application data folder. diff --git a/src/message_data_reader.py b/src/message_data_reader.py index 0c38a291..79a3a607 100644 --- a/src/message_data_reader.py +++ b/src/message_data_reader.py @@ -6,8 +6,8 @@ import sqlite3 from time import strftime, localtime import sys import shared +import paths import state -import string from binascii import hexlify appdata = paths.lookupAppdataFolder() diff --git a/src/paths.py b/src/paths.py index 6b0baf15..e92116c0 100644 --- a/src/paths.py +++ b/src/paths.py @@ -23,11 +23,11 @@ def lookupAppdataFolder(): APPNAME = "PyBitmessage" if "BITMESSAGE_HOME" in environ: dataFolder = environ["BITMESSAGE_HOME"] - if dataFolder[-1] not in [os.path.sep, os.path.altsep]: - dataFolder += os.path.sep + if dataFolder[-1] not in [path.sep, path.altsep]: + dataFolder += path.sep elif sys.platform == 'darwin': if "HOME" in environ: - dataFolder = path.join(os.environ["HOME"], "Library/Application Support/", APPNAME) + '/' + dataFolder = path.join(environ["HOME"], "Library/Application Support/", APPNAME) + '/' else: stringToLog = 'Could not find home folder, please report this message and your OS X version to the BitMessage Github.' if 'logger' in globals(): diff --git a/src/protocol.py b/src/protocol.py index cb6726e3..cf0d65d4 100644 --- a/src/protocol.py +++ b/src/protocol.py @@ -204,7 +204,7 @@ def decryptAndCheckPubkeyPayload(data, address): # Let us try to decrypt the pubkey toAddress, cryptorObject = state.neededPubkeys[tag] if toAddress != address: - logger.critical('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. This is very peculiar. toAddress: %s, address %s' % (toAddress, address)) + logger.critical('decryptAndCheckPubkeyPayload failed due to toAddress mismatch. This is very peculiar. toAddress: %s, address %s', toAddress, address) # the only way I can think that this could happen is if someone encodes their address data two different ways. # That sort of address-malleability should have been caught by the UI or API and an error given to the user. return 'failed' @@ -260,12 +260,11 @@ def decryptAndCheckPubkeyPayload(data, address): logger.info('within decryptAndCheckPubkeyPayload, addressVersion: %s, streamNumber: %s \n\ ripe %s\n\ publicSigningKey in hex: %s\n\ - publicEncryptionKey in hex: %s' % (addressVersion, + publicEncryptionKey in hex: %s', addressVersion, streamNumber, hexlify(ripe), hexlify(publicSigningKey), hexlify(publicEncryptionKey) - ) ) t = (address, addressVersion, storedData, int(time.time()), 'yes') @@ -275,7 +274,7 @@ def decryptAndCheckPubkeyPayload(data, address): logger.info('Pubkey decryption was UNsuccessful due to a malformed varint.') return 'failed' except Exception as e: - logger.critical('Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s' % traceback.format_exc()) + logger.critical('Pubkey decryption was UNsuccessful because of an unhandled exception! This is definitely a bug! \n%s', traceback.format_exc()) return 'failed' def checkAndShareObjectWithPeers(data): @@ -286,7 +285,7 @@ def checkAndShareObjectWithPeers(data): if we are receiving it off of the wire. """ if len(data) > 2 ** 18: - logger.info('The payload length of this object is too large (%s bytes). Ignoring it.' % len(data)) + logger.info('The payload length of this object is too large (%s bytes). Ignoring it.', len(data)) return 0 # Let us check to make sure that the proof of work is sufficient. if not isProofOfWorkSufficient(data): @@ -295,10 +294,10 @@ def checkAndShareObjectWithPeers(data): endOfLifeTime, = unpack('>Q', data[8:16]) if endOfLifeTime - int(time.time()) > 28 * 24 * 60 * 60 + 10800: # The TTL may not be larger than 28 days + 3 hours of wiggle room - logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s' % endOfLifeTime) + logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %s', endOfLifeTime) return 0 if endOfLifeTime - int(time.time()) < - 3600: # The EOL time was more than an hour ago. That's too much. - logger.info('This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s' % endOfLifeTime) + logger.info('This object\'s End of Life time was more than an hour ago. Ignoring the object. Time is %s', endOfLifeTime) return 0 intObjectType, = unpack('>I', data[16:20]) try: @@ -318,9 +317,9 @@ def checkAndShareObjectWithPeers(data): _checkAndShareUndefinedObjectWithPeers(data) return 0.6 except varintDecodeError as e: - logger.debug("There was a problem with a varint while checking to see whether it was appropriate to share an object with peers. Some details: %s" % e) + logger.debug("There was a problem with a varint while checking to see whether it was appropriate to share an object with peers. Some details: %s", e) except Exception as e: - logger.critical('There was a problem while checking to see whether it was appropriate to share an object with peers. This is definitely a bug! \n%s' % traceback.format_exc()) + logger.critical('There was a problem while checking to see whether it was appropriate to share an object with peers. This is definitely a bug! \n%s', traceback.format_exc()) return 0 @@ -333,7 +332,7 @@ def _checkAndShareUndefinedObjectWithPeers(data): streamNumber, streamNumberLength = decodeVarint( data[readPosition:readPosition + 9]) if not streamNumber in state.streamsInWhichIAmParticipating: - logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber) + logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return inventoryHash = calculateInventoryHash(data) @@ -343,7 +342,7 @@ def _checkAndShareUndefinedObjectWithPeers(data): objectType, = unpack('>I', data[16:20]) Inventory()[inventoryHash] = ( objectType, streamNumber, data, embeddedTime,'') - logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash)) + logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) @@ -356,7 +355,7 @@ def _checkAndShareMsgWithPeers(data): streamNumber, streamNumberLength = decodeVarint( data[readPosition:readPosition + 9]) if not streamNumber in state.streamsInWhichIAmParticipating: - logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber) + logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return readPosition += streamNumberLength inventoryHash = calculateInventoryHash(data) @@ -367,7 +366,7 @@ def _checkAndShareMsgWithPeers(data): objectType = 2 Inventory()[inventoryHash] = ( objectType, streamNumber, data, embeddedTime,'') - logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash)) + logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) # Now let's enqueue it to be processed ourselves. @@ -387,7 +386,7 @@ def _checkAndShareGetpubkeyWithPeers(data): streamNumber, streamNumberLength = decodeVarint( data[readPosition:readPosition + 10]) if not streamNumber in state.streamsInWhichIAmParticipating: - logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber) + logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return readPosition += streamNumberLength @@ -400,7 +399,7 @@ def _checkAndShareGetpubkeyWithPeers(data): Inventory()[inventoryHash] = ( objectType, streamNumber, data, embeddedTime,'') # This getpubkey request is valid. Forward to peers. - logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash)) + logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) # Now let's queue it to be processed ourselves. @@ -418,11 +417,11 @@ def _checkAndSharePubkeyWithPeers(data): data[readPosition:readPosition + 10]) readPosition += varintLength if not streamNumber in state.streamsInWhichIAmParticipating: - logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber) + logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return if addressVersion >= 4: tag = data[readPosition:readPosition + 32] - logger.debug('tag in received pubkey is: %s' % hexlify(tag)) + logger.debug('tag in received pubkey is: %s', hexlify(tag)) else: tag = '' @@ -434,7 +433,7 @@ def _checkAndSharePubkeyWithPeers(data): Inventory()[inventoryHash] = ( objectType, streamNumber, data, embeddedTime, tag) # This object is valid. Forward it to peers. - logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash)) + logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) @@ -455,7 +454,7 @@ def _checkAndShareBroadcastWithPeers(data): streamNumber, streamNumberLength = decodeVarint(data[readPosition:readPosition + 10]) readPosition += streamNumberLength if not streamNumber in state.streamsInWhichIAmParticipating: - logger.debug('The streamNumber %s isn\'t one we are interested in.' % streamNumber) + logger.debug('The streamNumber %s isn\'t one we are interested in.', streamNumber) return if broadcastVersion >= 3: tag = data[readPosition:readPosition+32] @@ -470,7 +469,7 @@ def _checkAndShareBroadcastWithPeers(data): Inventory()[inventoryHash] = ( objectType, streamNumber, data, embeddedTime, tag) # This object is valid. Forward it to peers. - logger.debug('advertising inv with hash: %s' % hexlify(inventoryHash)) + logger.debug('advertising inv with hash: %s', hexlify(inventoryHash)) broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash)) # Now let's queue it to be processed ourselves. diff --git a/src/shared.py b/src/shared.py index d55b3b78..11058fd7 100644 --- a/src/shared.py +++ b/src/shared.py @@ -19,7 +19,6 @@ import threading import time import shutil # used for moving the data folder and copying keys.dat import datetime -from os import path, environ import traceback from binascii import hexlify @@ -645,6 +644,7 @@ def _checkAndShareBroadcastWithPeers(data): def openKeysFile(): if 'linux' in sys.platform: + import subprocess subprocess.call(["xdg-open", state.appdata + 'keys.dat']) else: os.startfile(state.appdata + 'keys.dat')