Fix broken tests

- setting BITMESSAGE_HOME doesn't always work, so we wrote a function to
  do this
- logger and BMConfigParser now clear old config when loading new
- add method for flushing log files
- rework logic in test_logger
- detect thread names on UNIX-like OSes
- be more accurate when starting or stopping a process, better waiting
  mechanisms
- add extra cleanup during TestProcess start
This commit is contained in:
Peter Šurda 2020-10-16 14:51:02 +02:00
parent b650e97edc
commit 3061bec889
Signed by: PeterSurda
GPG Key ID: 0C5F50C0B5F37D87
9 changed files with 190 additions and 43 deletions

View File

@ -7,7 +7,7 @@ import os
import shutil import shutil
from datetime import datetime from datetime import datetime
import state from paths import get_active_config_folder
from singleton import Singleton from singleton import Singleton
BMConfigDefaults = { BMConfigDefaults = {
@ -124,7 +124,16 @@ class BMConfigParser(ConfigParser.SafeConfigParser):
return [ return [
x for x in BMConfigParser().sections() if x.startswith('BM-')] x for x in BMConfigParser().sections() if x.startswith('BM-')]
def _reset(self):
"""Reset current config. There doesn't appear to be a built in
method for this"""
sections = self.sections()
for x in sections:
self.remove_section(x)
def read(self, filenames): def read(self, filenames):
"""Read config and populate defaults"""
self._reset()
ConfigParser.ConfigParser.read(self, filenames) ConfigParser.ConfigParser.read(self, filenames)
for section in self.sections(): for section in self.sections():
for option in self.options(section): for option in self.options(section):
@ -144,7 +153,7 @@ class BMConfigParser(ConfigParser.SafeConfigParser):
def save(self): def save(self):
"""Save the runtime config onto the filesystem""" """Save the runtime config onto the filesystem"""
fileName = os.path.join(state.appdata, 'keys.dat') fileName = os.path.join(get_active_config_folder(), 'keys.dat')
fileNameBak = '.'.join([ fileNameBak = '.'.join([
fileName, datetime.now().strftime("%Y%j%H%M%S%f"), 'bak']) fileName, datetime.now().strftime("%Y%j%H%M%S%f"), 'bak'])
# create a backup copy to prevent the accidental loss due to # create a backup copy to prevent the accidental loss due to

View File

@ -28,7 +28,9 @@ class sqlThread(threading.Thread):
def run(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements def run(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
"""Process SQL queries from `.helper_sql.sqlSubmitQueue`""" """Process SQL queries from `.helper_sql.sqlSubmitQueue`"""
self.conn = sqlite3.connect(state.appdata + 'messages.dat') self.conn = sqlite3.connect(
os.path.join(paths.get_active_config_folder(),
'messages.dat'))
self.conn.text_factory = str self.conn.text_factory = str
self.cur = self.conn.cursor() self.cur = self.conn.cursor()

View File

@ -42,7 +42,8 @@ import os
import sys import sys
import helper_startup import helper_startup
import state
from paths import get_active_config_folder
helper_startup.loadConfig() helper_startup.loadConfig()
@ -67,7 +68,7 @@ def configureLogging():
sys.excepthook = log_uncaught_exceptions sys.excepthook = log_uncaught_exceptions
fail_msg = '' fail_msg = ''
try: try:
logging_config = os.path.join(state.appdata, 'logging.dat') logging_config = os.path.join(get_active_config_folder(), 'logging.dat')
logging.config.fileConfig( logging.config.fileConfig(
logging_config, disable_existing_loggers=False) logging_config, disable_existing_loggers=False)
return ( return (
@ -103,7 +104,7 @@ def configureLogging():
'class': 'logging.handlers.RotatingFileHandler', 'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'default', 'formatter': 'default',
'level': log_level, 'level': log_level,
'filename': os.path.join(state.appdata, 'debug.log'), 'filename': os.path.join(get_active_config_folder(), 'debug.log'),
'maxBytes': 2097152, # 2 MiB 'maxBytes': 2097152, # 2 MiB
'backupCount': 1, 'backupCount': 1,
'encoding': 'UTF-8', 'encoding': 'UTF-8',
@ -140,14 +141,23 @@ def resetLogging():
"""Reconfigure logging in runtime when state.appdata dir changed""" """Reconfigure logging in runtime when state.appdata dir changed"""
# pylint: disable=global-statement, used-before-assignment # pylint: disable=global-statement, used-before-assignment
global logger global logger
for i in logger.handlers: for i in list(logger.handlers):
logger.removeHandler(i) try:
logger.removeHandler(i)
except ValueError:
pass
i.flush() i.flush()
i.close() i.close()
configureLogging() configureLogging()
logger = logging.getLogger('default') logger = logging.getLogger('default')
def flush_logs():
"""Tell handlers to flush"""
for i in list(logger.handlers):
i.flush()
# ! # !
preconfigured, msg = configureLogging() preconfigured, msg = configureLogging()
logger = logging.getLogger('default') logger = logging.getLogger('default')

View File

@ -16,6 +16,7 @@ except ImportError:
import state import state
from bmconfigparser import BMConfigParser from bmconfigparser import BMConfigParser
from network.node import Peer from network.node import Peer
from paths import get_active_config_folder
knownNodesLock = threading.RLock() knownNodesLock = threading.RLock()
"""Thread lock for knownnodes modification""" """Thread lock for knownnodes modification"""
@ -91,7 +92,7 @@ def pickle_deserialize_old_knownnodes(source):
def saveKnownNodes(dirName=None): def saveKnownNodes(dirName=None):
"""Save knownnodes to filesystem""" """Save knownnodes to filesystem"""
if dirName is None: if dirName is None:
dirName = state.appdata dirName = get_active_config_folder()
with knownNodesLock: with knownNodesLock:
with open(os.path.join(dirName, 'knownnodes.dat'), 'wb') as output: with open(os.path.join(dirName, 'knownnodes.dat'), 'wb') as output:
json_serialize_knownnodes(output) json_serialize_knownnodes(output)
@ -149,7 +150,9 @@ def createDefaultKnownNodes():
def readKnownNodes(): def readKnownNodes():
"""Load knownnodes from filesystem""" """Load knownnodes from filesystem"""
try: try:
with open(state.appdata + 'knownnodes.dat', 'rb') as source: with open(os.path.join(get_active_config_folder(),
'knownnodes.dat'),
'rb') as source:
with knownNodesLock: with knownNodesLock:
try: try:
json_deserialize_knownnodes(source) json_deserialize_knownnodes(source)

View File

@ -8,6 +8,8 @@ import sys
from datetime import datetime from datetime import datetime
from shutil import move from shutil import move
import state
logger = logging.getLogger('default') logger = logging.getLogger('default')
# When using py2exe or py2app, the variable frozen is added to the sys # When using py2exe or py2app, the variable frozen is added to the sys
@ -31,6 +33,28 @@ def lookupExeFolder():
return exeFolder return exeFolder
def set_appdata_folder(path=None):
"""Changes appdata location. Does not move any files, just where
where it's looking when loading/saving later"""
if not path:
try:
# default
del os.environ['BITMESSAGE_HOME']
except KeyError:
pass
else:
os.environ['BITMESSAGE_HOME'] = path
state.appdata = lookupAppdataFolder()
return state.appdata
def get_active_config_folder():
"""Returns current config directory, workaround for inconsistent
imports"""
return state.appdata
def lookupAppdataFolder(): def lookupAppdataFolder():
"""Returns path of the folder where application data is stored""" """Returns path of the folder where application data is stored"""
APPNAME = "PyBitmessage" APPNAME = "PyBitmessage"

View File

@ -17,11 +17,11 @@ from binascii import hexlify
# Project imports. # Project imports.
import highlevelcrypto import highlevelcrypto
import state
from addresses import decodeAddress, encodeVarint from addresses import decodeAddress, encodeVarint
from bmconfigparser import BMConfigParser from bmconfigparser import BMConfigParser
from debug import logger from debug import logger
from helper_sql import sqlQuery from helper_sql import sqlQuery
from paths import get_active_config_folder
from pyelliptic import arithmetic from pyelliptic import arithmetic
@ -114,7 +114,7 @@ def reloadMyAddressHashes():
# myPrivateKeys.clear() # myPrivateKeys.clear()
keyfileSecure = checkSensitiveFilePermissions(os.path.join( keyfileSecure = checkSensitiveFilePermissions(os.path.join(
state.appdata, 'keys.dat')) get_active_config_folder(), 'keys.dat'))
hasEnabledKeys = False hasEnabledKeys = False
for addressInKeysFile in BMConfigParser().addresses(): for addressInKeysFile in BMConfigParser().addresses():
isEnabled = BMConfigParser().getboolean(addressInKeysFile, 'enabled') isEnabled = BMConfigParser().getboolean(addressInKeysFile, 'enabled')
@ -144,7 +144,7 @@ def reloadMyAddressHashes():
if not keyfileSecure: if not keyfileSecure:
fixSensitiveFilePermissions(os.path.join( fixSensitiveFilePermissions(os.path.join(
state.appdata, 'keys.dat'), hasEnabledKeys) get_active_config_folder(), 'keys.dat'), hasEnabledKeys)
def reloadBroadcastSendersForWhichImWatching(): def reloadBroadcastSendersForWhichImWatching():

View File

@ -45,6 +45,7 @@ class TestProcessConfig(TestProcessProto):
def test_config_defaults(self): def test_config_defaults(self):
"""Test settings in the generated config""" """Test settings in the generated config"""
self._stop_process() self._stop_process()
self._kill_process()
config = BMConfigParser() config = BMConfigParser()
config.read(os.path.join(self.home, 'keys.dat')) config.read(os.path.join(self.home, 'keys.dat'))

View File

@ -7,6 +7,8 @@ import os
import tempfile import tempfile
import unittest import unittest
from pybitmessage.paths import set_appdata_folder
class TestLogger(unittest.TestCase): class TestLogger(unittest.TestCase):
"""A test case for bmconfigparser""" """A test case for bmconfigparser"""
@ -35,11 +37,14 @@ level=DEBUG
handlers=default handlers=default
''' '''
def test_fileConfig(self): def test_fileConfig(self): # pylint: disable=invalid-name
"""Put logging.dat with special pattern and check it was used""" """Put logging.dat with special pattern and check it was used"""
tmp = os.environ['BITMESSAGE_HOME'] = tempfile.gettempdir() # FIXME: this doesn't always work, probably due to module
# load order. Workaround through .travis.yml
tmp = set_appdata_folder(tempfile.gettempdir())
log_config = os.path.join(tmp, 'logging.dat') log_config = os.path.join(tmp, 'logging.dat')
log_file = os.path.join(tmp, 'debug.log') log_file = os.path.join(tmp, 'debug.log')
pass_ = 1
def gen_log_config(pattern): def gen_log_config(pattern):
"""A small closure to generate logging.dat with custom pattern""" """A small closure to generate logging.dat with custom pattern"""
@ -48,11 +53,14 @@ handlers=default
pattern = r' o_0 ' pattern = r' o_0 '
gen_log_config(pattern) gen_log_config(pattern)
pass_ = 1
try: try:
from pybitmessage.debug import logger, resetLogging # pylint: disable=import-outside-toplevel
from pybitmessage.debug import logger, resetLogging, flush_logs
if not os.path.isfile(log_file): # second pass if not os.path.isfile(log_file): # second pass
pattern = r' <===> ' pattern = r' <===> '
pass_ = 2
gen_log_config(pattern) gen_log_config(pattern)
resetLogging() resetLogging()
except ImportError: except ImportError:
@ -60,10 +68,17 @@ handlers=default
finally: finally:
os.remove(log_config) os.remove(log_config)
# pylint: disable=invalid-name
self.longMessage = True
logger_ = logging.getLogger('default') logger_ = logging.getLogger('default')
self.assertEqual(logger, logger_) self.assertEqual(logger, logger_, ", pass {}".
format(pass_))
logger_.info('Testing the logger...') logger_.info('Testing the logger...')
flush_logs()
self.assertRegexpMatches(open(log_file).read(), pattern) # pylint: disable=deprecated-method
self.assertRegexpMatches(open(log_file).read(), pattern,
", pass {}".format(pass_))

View File

@ -5,12 +5,15 @@ Common reusable code for tests and tests for pybitmessage process.
import os import os
import signal import signal
import subprocess # nosec import subprocess # nosec
import sys
import tempfile import tempfile
import time import time
import unittest import unittest
import psutil import psutil
from pybitmessage.paths import set_appdata_folder
def put_signal_file(path, filename): def put_signal_file(path, filename):
"""Creates file, presence of which is a signal about some event.""" """Creates file, presence of which is a signal about some event."""
@ -23,7 +26,26 @@ class TestProcessProto(unittest.TestCase):
it starts pybitmessage in setUpClass() and stops it in tearDownClass() it starts pybitmessage in setUpClass() and stops it in tearDownClass()
""" """
_process_cmd = ['pybitmessage', '-d'] _process_cmd = ['pybitmessage', '-d']
_threads_count = 15 _threads_count_min = 15
_threads_count_max = 16
_threads_names = [
'PyBitmessage',
'addressGenerato',
'singleWorker',
'SQL',
'objectProcessor',
'singleCleaner',
'singleAPI',
'Asyncore',
'ReceiveQueue_0',
'ReceiveQueue_1',
'ReceiveQueue_2',
'Announcer',
'InvBroadcaster',
'AddrBroadcaster',
'Downloader',
'Uploader'
]
_files = ( _files = (
'keys.dat', 'debug.log', 'messages.dat', 'knownnodes.dat', 'keys.dat', 'debug.log', 'messages.dat', 'knownnodes.dat',
'.api_started', 'unittest.lock' '.api_started', 'unittest.lock'
@ -32,10 +54,28 @@ class TestProcessProto(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
"""Setup environment and start pybitmessage""" """Setup environment and start pybitmessage"""
cls.home = os.environ['BITMESSAGE_HOME'] = tempfile.gettempdir() cls.home = set_appdata_folder(tempfile.gettempdir())
cls._cleanup_files()
put_signal_file(cls.home, 'unittest.lock') put_signal_file(cls.home, 'unittest.lock')
starttime = int(time.time())
subprocess.call(cls._process_cmd) # nosec subprocess.call(cls._process_cmd) # nosec
time.sleep(5) timeout = starttime + 30
while time.time() <= timeout:
try:
if os.path.exists(os.path.join(cls.home,
'singleton.lock')):
pstat = os.stat(os.path.join(cls.home, 'singleton.lock'))
if starttime <= pstat.st_mtime and pstat.st_size > 0:
break
except OSError:
break
time.sleep(1)
if time.time() >= timeout:
raise psutil.TimeoutExpired(
"Timeout starting {}".format(cls._process_cmd))
# wait a bit for the program to fully start
# 10 sec should be enough
time.sleep(10)
cls.pid = int(cls._get_readline('singleton.lock')) cls.pid = int(cls._get_readline('singleton.lock'))
cls.process = psutil.Process(cls.pid) cls.process = psutil.Process(cls.pid)
@ -56,6 +96,18 @@ class TestProcessProto(unittest.TestCase):
return False return False
return True return True
@classmethod
def _kill_process(cls, timeout=5):
try:
cls.process.send_signal(signal.SIGKILL)
cls.process.wait(timeout)
# Windows or already dead
except (AttributeError, psutil.NoSuchProcess):
return True
# except psutil.TimeoutExpired propagates, it means something is very
# wrong
return True
@classmethod @classmethod
def _cleanup_files(cls): def _cleanup_files(cls):
for pfile in cls._files: for pfile in cls._files:
@ -69,33 +121,57 @@ class TestProcessProto(unittest.TestCase):
"""Ensures that pybitmessage stopped and removes files""" """Ensures that pybitmessage stopped and removes files"""
try: try:
if not cls._stop_process(): if not cls._stop_process():
print(open(os.path.join(cls.home, 'debug.log'), 'rb').read())
cls.process.kill() cls.process.kill()
except psutil.NoSuchProcess: cls.process.wait(5)
except (psutil.TimeoutExpired, psutil.NoSuchProcess):
pass pass
finally: finally:
cls._cleanup_files() cls._cleanup_files()
def _test_threads(self): def _test_threads(self):
# only count for now """Test number and names of threads"""
# because of https://github.com/giampaolo/psutil/issues/613
# PyBitmessage # pylint: disable=invalid-name
# - addressGenerator self.longMessage = True
# - singleWorker
# - SQL try:
# - objectProcessor # using ps for posix platforms
# - singleCleaner # because of https://github.com/giampaolo/psutil/issues/613
# - singleAPI thread_names = subprocess.check_output([
# - Asyncore "ps", "-L", "-o", "comm=", "--pid",
# - ReceiveQueue_0 str(self.process.pid)
# - ReceiveQueue_1 ]).split()
# - ReceiveQueue_2 except: # pylint: disable=bare-except
# - Announcer thread_names = []
# - InvBroadcaster
# - AddrBroadcaster running_threads = len(thread_names)
# - Downloader if 0 < running_threads < 30: # adequacy check
self.assertEqual( extra_threads = []
len(self.process.threads()), self._threads_count) missing_threads = []
for thread_name in thread_names:
if thread_name not in self._threads_names:
extra_threads.append(thread_name)
for thread_name in self._threads_names:
if thread_name not in thread_names:
missing_threads.append(thread_name)
msg = "Missing threads: {}, Extra threads: {}".format(
",".join(missing_threads), ",".join(extra_threads))
else:
running_threads = self.process.num_threads()
if sys.platform.startswith('win'):
running_threads -= 1 # one extra thread on Windows!
msg = "Unexpected running thread count"
self.assertGreaterEqual(
running_threads,
self._threads_count_min,
msg)
self.assertLessEqual(
running_threads,
self._threads_count_max,
msg)
class TestProcessShutdown(TestProcessProto): class TestProcessShutdown(TestProcessProto):
@ -110,7 +186,14 @@ class TestProcessShutdown(TestProcessProto):
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
"""Special teardown because pybitmessage is already stopped""" """Special teardown because pybitmessage is already stopped"""
cls._cleanup_files() try:
if cls.process.is_running():
cls.process.kill()
cls.process.wait(5)
except (psutil.TimeoutExpired, psutil.NoSuchProcess):
pass
finally:
cls._cleanup_files()
class TestProcess(TestProcessProto): class TestProcess(TestProcessProto):