updated origin with upstream changes

This commit is contained in:
jai.s 2019-12-23 16:48:37 +05:30
commit 9b1088b2e2
No known key found for this signature in database
GPG Key ID: 360CFA25EFC67D12
100 changed files with 2167 additions and 1496 deletions

1
.gitignore vendored
View File

@ -17,6 +17,7 @@ dist
*.egg-info
docs/_*/*
docs/autodoc/
build/sphinx/
pyan/
.buildozer/
bin/

9
.readthedocs.yml Normal file
View File

@ -0,0 +1,9 @@
version: 2
python:
version: 2.7
install:
- requirements: docs/requirements.txt
- method: setuptools
path: .
system_packages: true

View File

@ -14,12 +14,10 @@ Development
----------
Bitmessage is a collaborative project. You are welcome to submit pull requests
although if you plan to put a non-trivial amount of work into coding new
features, it is recommended that you first solicit feedback on the DevTalk
pseudo-mailing list:
BM-2D9QKN4teYRvoq2fyzpiftPh9WP9qggtzh
features, it is recommended that you first describe your ideas in the
separate issue.
Feel welcome to join chan "bitmessage", BM-2cWy7cvHoq3f1rYMerRJp8PT653jjSuEdY
which is on preview here: https://beamstat.com/chan/bitmessage
Feel welcome to join chan "bitmessage", BM-2cWy7cvHoq3f1rYMerRJp8PT653jjSuEdY
References
----------

4
docs/_static/custom.css vendored Normal file
View File

@ -0,0 +1,4 @@
/* Hide "On GitHub" section from versions menu */
li.wy-breadcrumbs-aside > a.fa {
display: none;
}

View File

@ -2,35 +2,24 @@
"""
Configuration file for the Sphinx documentation builder.
This file does only contain a selection of the most common options. For a
full list see the documentation:
For a full list of options see the documentation:
http://www.sphinx-doc.org/en/master/config
-- Path setup --------------------------------------------------------------
If extensions (or modules to document with autodoc) are in another directory,
add these directories to sys.path here. If the directory is relative to the
documentation root, use os.path.abspath to make it absolute, like shown here.
"""
import os
import sys
from sphinx.apidoc import main
from mock import Mock as MagicMock
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('../src'))
sys.path.insert(0, os.path.abspath('../src/pyelliptic'))
import version
from importlib import import_module
import version # noqa:E402
# -- Project information -----------------------------------------------------
project = u'PyBitmessage'
copyright = u'2018, The Bitmessage Team' # pylint: disable=redefined-builtin
copyright = u'2019, The Bitmessage Team' # pylint: disable=redefined-builtin
author = u'The Bitmessage Team'
# The short X.Y version
@ -50,15 +39,18 @@ release = version
# ones.
extensions = [
'sphinx.ext.autodoc',
# 'sphinx.ext.doctest', # Currently disabled due to bad doctests
'sphinx.ext.coverage', # FIXME: unused
'sphinx.ext.imgmath', # legacy unused
'sphinx.ext.intersphinx',
'sphinx.ext.linkcode',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
'sphinxcontrib.apidoc',
'm2r',
]
default_role = 'obj'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@ -75,23 +67,29 @@ master_doc = 'index'
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Don't prepend every class or function name with full module path
add_module_names = False
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['pybitmessage.']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@ -104,6 +102,10 @@ html_theme = 'alabaster'
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'custom.css',
]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
@ -114,10 +116,7 @@ html_static_path = ['_static']
#
# html_sidebars = {}
# Deal with long lines in source view
html_theme_options = {
'page_width': '1366px',
}
html_show_sourcelink = False
# -- Options for HTMLHelp output ---------------------------------------------
@ -199,10 +198,74 @@ epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
autodoc_mock_imports = [
'debug',
'pybitmessage.bitmessagekivy',
'pybitmessage.bitmessageqt.addressvalidator',
'pybitmessage.helper_startup',
'pybitmessage.network.httpd',
'pybitmessage.network.https',
'ctypes',
'dialog',
'gi',
'kivy',
'logging',
'msgpack',
'numpy',
'pkg_resources',
'pycanberra',
'pyopencl',
'PyQt4',
'pyxdg',
'qrcode',
'stem',
]
autodoc_member_order = 'bysource'
# Apidoc settings
apidoc_module_dir = '../pybitmessage'
apidoc_output_dir = 'autodoc'
apidoc_excluded_paths = [
'bitmessagekivy', 'build_osx.py',
'bitmessageqt/addressvalidator.py', 'bitmessageqt/migrationwizard.py',
'bitmessageqt/newaddresswizard.py', 'helper_startup.py',
'kivymd', 'main.py', 'navigationdrawer', 'network/http*',
'pybitmessage', 'tests', 'version.py'
]
apidoc_module_first = True
apidoc_separate_modules = True
apidoc_toc_file = False
apidoc_extra_args = ['-a']
# Napoleon settings
napoleon_google_docstring = True
# linkcode function
def linkcode_resolve(domain, info):
"""This generates source URL's for sphinx.ext.linkcode"""
if domain != 'py' or not info['module']:
return
try:
home = os.path.abspath(import_module('pybitmessage').__path__[0])
mod = import_module(info['module']).__file__
except ImportError:
return
repo = 'https://github.com/Bitmessage/PyBitmessage/blob/v0.6/src%s'
path = mod.replace(home, '')
if path != mod:
# put the link only for top level definitions
if len(info['fullname'].split('.')) > 1:
return
if path.endswith('.pyc'):
path = path[:-1]
return repo % path
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
intersphinx_mapping = {'https://docs.python.org/2.7/': None}
# -- Options for todo extension ----------------------------------------------

View File

@ -1,2 +1,2 @@
.. mdinclude:: fabfile/README.md
.. mdinclude:: ../../../fabfile/README.md

View File

@ -62,7 +62,7 @@ To re-build them, run `fab build_docs:dep_graphs=true`. Note that the dot graph
.. figure:: ../../../../_static/deps-sfdp.png
:alt: SFDP graph of dependencies
:width: 100 pc
:index:`SFDP` graph of dependencies
.. figure:: ../../../../_static/deps-dot.png

View File

@ -1,8 +1,8 @@
Processes
=========
In other to keep the Bitmessage project running the team run a number of systems and accounts that form the
development pipeline and continuous delivery process. We are always striving to improve the process. Towards
In order to keep the Bitmessage project running, the team runs a number of systems and accounts that form the
development pipeline and continuous delivery process. We are always striving to improve this process. Towards
that end it is documented here.
@ -20,7 +20,7 @@ Our official Github_ account is Bitmessage. Our issue tracker is here as well.
BitMessage
----------
We eat our own dog food! You can send us bug reports via the Bitmessage chan at xxx
We eat our own dog food! You can send us bug reports via the [chan] bitmessage BM-2cWy7cvHoq3f1rYMerRJp8PT653jjSuEdY
.. _website: https://bitmessage.org

View File

@ -1,12 +1,20 @@
.. mdinclude:: ../README.md
Documentation
-------------
.. toctree::
:maxdepth: 3
autodoc/pybitmessage
Legacy pages
------------
.. toctree::
:maxdepth: 2
overview
usage
contribute
Indices and tables
------------------

2
docs/requirements.txt Normal file
View File

@ -0,0 +1,2 @@
m2r
sphinxcontrib-apidoc

1
pybitmessage Symbolic link
View File

@ -0,0 +1 @@
src

View File

@ -1,14 +1,17 @@
# Since there is overlap in the violations that the different tools check for, it makes sense to quiesce some warnings
# in some tools if those warnings in other tools are preferred. This avoids the need to add duplicate lint warnings.
# max-line-length should be removed ASAP!
[pycodestyle]
max-line-length = 119
[flake8]
max-line-length = 119
ignore = E722,F841
ignore = E722,F841,W503
# E722: pylint is preferred for bare-except
# F841: pylint is preferred for unused-variable
# W503: deprecated: https://bugs.python.org/issue26763 - https://www.python.org/dev/peps/pep-0008/#should-a-line-break-before-or-after-a-binary-operator
# pylint honours the [MESSAGES CONTROL] section
# as well as [MASTER] section

View File

@ -17,13 +17,7 @@ EXTRAS_REQUIRE = {
'qrcode': ['qrcode'],
'sound;platform_system=="Windows"': ['winsound'],
'tor': ['stem'],
'docs': [
'sphinx', # fab build_docs
'graphviz', # fab build_docs
'curses', # src/depends.py
'python2-pythondialog', # src/depends.py
'm2r', # fab build_docs
]
'docs': ['sphinx', 'sphinxcontrib-apidoc', 'm2r']
}
@ -155,5 +149,9 @@ if __name__ == "__main__":
# ]
},
scripts=['src/pybitmessage'],
cmdclass={'install': InstallCmd}
cmdclass={'install': InstallCmd},
command_options={
'build_sphinx': {
'source_dir': ('setup.py', 'docs')}
}
)

View File

@ -1,7 +1,5 @@
"""
src/addresses.py
================
Operations with addresses
"""
# pylint: disable=redefined-outer-name,inconsistent-return-statements
@ -18,8 +16,9 @@ ALPHABET = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
def encodeBase58(num, alphabet=ALPHABET):
"""Encode a number in Base X
`num`: The number to encode
`alphabet`: The alphabet to use for encoding
Args:
num: The number to encode
alphabet: The alphabet to use for encoding
"""
if num == 0:
return alphabet[0]
@ -27,7 +26,6 @@ def encodeBase58(num, alphabet=ALPHABET):
base = len(alphabet)
while num:
rem = num % base
# print 'num is:', num
num = num // base
arr.append(alphabet[rem])
arr.reverse()
@ -37,9 +35,9 @@ def encodeBase58(num, alphabet=ALPHABET):
def decodeBase58(string, alphabet=ALPHABET):
"""Decode a Base X encoded string into the number
Arguments:
- `string`: The encoded string
- `alphabet`: The alphabet to use for encoding
Args:
string: The encoded string
alphabet: The alphabet to use for encoding
"""
base = len(alphabet)
num = 0
@ -54,11 +52,20 @@ def decodeBase58(string, alphabet=ALPHABET):
return num
class varintEncodeError(Exception):
"""Exception class for encoding varint"""
pass
class varintDecodeError(Exception):
"""Exception class for decoding varint data"""
pass
def encodeVarint(integer):
"""Convert integer into varint bytes"""
if integer < 0:
logger.error('varint cannot be < 0')
raise SystemExit
raise varintEncodeError('varint cannot be < 0')
if integer < 253:
return pack('>B', integer)
if integer >= 253 and integer < 65536:
@ -68,13 +75,7 @@ def encodeVarint(integer):
if integer >= 4294967296 and integer < 18446744073709551616:
return pack('>B', 255) + pack('>Q', integer)
if integer >= 18446744073709551616:
logger.error('varint cannot be >= 18446744073709551616')
raise SystemExit
class varintDecodeError(Exception):
"""Exception class for decoding varint data"""
pass
raise varintEncodeError('varint cannot be >= 18446744073709551616')
def decodeVarint(data):

View File

@ -1,19 +1,15 @@
# pylint: disable=too-many-locals,too-many-lines,no-self-use,too-many-public-methods,too-many-branches
# pylint: disable=too-many-statements
"""
src/api.py
==========
# Copyright (c) 2012-2016 Jonathan Warren
# Copyright (c) 2012-2019 The Bitmessage developers
"""
This is not what you run to run the Bitmessage API. Instead, enable the API
( https://bitmessage.org/wiki/API ) and optionally enable daemon mode
( https://bitmessage.org/wiki/Daemon ) then run bitmessagemain.py.
"""
from __future__ import absolute_import
import base64
import errno
import hashlib
@ -42,8 +38,8 @@ from bmconfigparser import BMConfigParser
from debug import logger
from helper_ackPayload import genAckPayload
from helper_sql import SqlBulkExecute, sqlExecute, sqlQuery, sqlStoredProcedure
from helper_threading import StoppableThread
from inventory import Inventory
from network.threads import StoppableThread
str_chan = '[chan]'
@ -99,6 +95,8 @@ class singleAPI(StoppableThread):
for attempt in range(50):
try:
if attempt > 0:
logger.warning(
'Failed to start API listener on port %s', port)
port = random.randint(32767, 65535)
se = StoppableXMLRPCServer(
(BMConfigParser().get(
@ -110,8 +108,9 @@ class singleAPI(StoppableThread):
continue
else:
if attempt > 0:
logger.warning('Setting apiport to %s', port)
BMConfigParser().set(
"bitmessagesettings", "apiport", str(port))
'bitmessagesettings', 'apiport', str(port))
BMConfigParser().save()
break
se.register_introspection_functions()

View File

@ -1,16 +1,15 @@
"""
src/identiconGeneration.py
=================================
Core classes for loading images and converting them to a Texture.
The raw image data can be keep in memory for further access
"""
# pylint: disable=import-error
import hashlib
from io import BytesIO
from PIL import Image
from kivy.core.image import Image as CoreImage
from kivy.uix.image import Image as kiImage
""" Core classes for loading images and converting them to a Texture.
The raw image data can be keep in memory for further access """
# pylint: disable=import-error
# constants

View File

@ -25,8 +25,7 @@
#:set color_font (0.957, 0.890, 0.843, 1) # off white
<MyNavigationDrawerIconButton@NavigationDrawerIconButton>:
font_style: 'Body1'
theme_text_color: 'Secondary'
icon: 'checkbox-blank-circle'
<MySpinnerOption@SpinnerOption>:
font_size: '12.5sp'
@ -40,7 +39,7 @@
height: dp(7)
NavigationDrawerSubheader:
text: "Accounts"
AddressDropdown:
NavigationDrawerIconButton:
CustomSpinner:
id: btn
pos_hint:{"x":0,"y":.25}
@ -58,48 +57,47 @@
y: self.parent.y + self.parent.height/4
size: self.parent.height/2, self.parent.height/2
ArrowImg:
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
id: inbox_cnt
icon: 'email-open'
text: "Inbox"
on_release: app.root.ids.scr_mngr.current = 'inbox'
badge_text: "0"
on_press: app.load_screen(self)
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
id: send_cnt
icon: 'send'
text: "Sent"
#use_active: False
on_release: app.root.ids.scr_mngr.current = 'sent'
badge_text: "0"
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
id: draft_cnt
icon: 'message-draw'
text: "Draft"
on_release: app.root.ids.scr_mngr.current = 'draft'
badge_text: "0"
#MyNavigationDrawerIconButton:
#NavigationDrawerIconButton:
#text: "Starred"
#icon:'star'
#on_release: app.root.ids.scr_mngr.current = 'starred'
#badge_text: "0"
#MyNavigationDrawerIconButton:
#NavigationDrawerIconButton:
#icon: 'archive'
#text: "Archieve"
#on_release: app.root.ids.scr_mngr.current = 'archieve'
#badge_text: "0"
#MyNavigationDrawerIconButton:
#NavigationDrawerIconButton:
#icon: 'email-open-outline'
#text: "Spam"
#on_release: app.root.ids.scr_mngr.current = 'spam'
#badge_text: "0"
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
id: trash_cnt
icon: 'delete'
text: "Trash"
on_release: app.root.ids.scr_mngr.current = 'trash'
badge_text: "0"
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
id: allmail_cnt
text: "All Mails"
icon:'contact-mail'
@ -109,31 +107,31 @@
NavigationDrawerDivider:
NavigationDrawerSubheader:
text: "All labels"
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "Address Book"
icon:'book-multiple'
on_release: app.root.ids.scr_mngr.current = 'addressbook'
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "Settings"
icon:'settings'
on_release: app.root.ids.scr_mngr.current = 'set'
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "Subscriptions/Payment"
icon:'bell'
on_release: app.root.ids.scr_mngr.current = 'payment'
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "Credits"
icon:'wallet'
on_release: app.root.ids.scr_mngr.current = 'credits'
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "new address"
icon:'account-plus'
on_release: app.root.ids.scr_mngr.current = 'login'
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "Network Status"
icon:'server-network'
on_release: app.root.ids.scr_mngr.current = 'networkstat'
MyNavigationDrawerIconButton:
NavigationDrawerIconButton:
text: "My Addresses"
icon:'account-multiple'
on_release: app.root.ids.scr_mngr.current = 'myaddress'
@ -547,6 +545,12 @@ NavigationLayout:
color: (1,1,1,1)
halign: 'center'
<AddressSuccessful>:
name: 'add_sucess'
Label:
text: 'Successfully created a new bit address'
color: 0,0,0,1
<Setting>:
name: 'set'
ScrollView:

View File

@ -1,8 +1,8 @@
"""
src/bitmessagekivy/mpybit.py
=================================
Bitmessage android(mobile) interface
"""
# pylint: disable=relative-import, unused-variable, import-error, no-name-in-module, too-many-lines
# pylint: disable=relative-import, import-error, no-name-in-module
# pylint: disable=too-few-public-methods, too-many-lines, unused-argument
import os
import time
from bmconfigparser import BMConfigParser
@ -36,9 +36,11 @@ from kivy.uix.screenmanager import Screen
from kivy.uix.spinner import Spinner
from kivy.uix.textinput import TextInput
from kivy.utils import platform
from bitmessagekivy import kivy_helper_search
from kivymd.uix.dialog import MDDialog
from bitmessagekivy import kivy_helper_search
from kivymd.uix.button import MDIconButton
from kivymd.uix.dialog import MDDialog
from kivymd.uix.label import MDLabel
from kivymd.uix.list import (
ILeftBody,
@ -58,23 +60,22 @@ import queues
from semaphores import kivyuisignaler
import state
from bitmessagekivy.uikivysignaler import UIkivySignaler
from bitmessagekivy import identiconGeneration
from addresses import addBMIfNotPresent, decodeAddress, encodeVarint
# pylint: disable=unused-argument, too-few-public-methods
from addresses import addBMIfNotPresent, decodeAddress
def toast(text):
"""Method will display the toast message."""
from kivymd.toast.kivytoast import toast # pylint: disable=redefined-outer-name
"""Function displays toast message"""
# pylint: disable=redefined-outer-name
from kivymd.toast.kivytoast import toast
toast(text)
return
class Navigatorss(MDNavigationDrawer):
"""Navigators class contains image, title and logo."""
"""Navigator class (image, title and logo)"""
image_source = StringProperty('images/qidenticon_two.png')
title = StringProperty('Navigation')
drawer_logo = StringProperty()
@ -521,36 +522,35 @@ class AddressBook(Screen):
"DELETE FROM addressbook WHERE address = '{}';".format(address))
class SelectableRecycleBoxLayout(FocusBehavior, LayoutSelectionBehavior,
RecycleBoxLayout):
"""Adds selection and focus behaviour to the view."""
class SelectableRecycleBoxLayout(
FocusBehavior, LayoutSelectionBehavior, RecycleBoxLayout):
"""Adds selection and focus behaviour to the view"""
# pylint: disable = too-many-ancestors, duplicate-bases
pass
class SelectableLabel(RecycleDataViewBehavior, Label):
"""Add selection support to the Label."""
"""Add selection support to the Label"""
index = None
selected = BooleanProperty(False)
selectable = BooleanProperty(True)
def refresh_view_attrs(self, rv, index, data):
"""Catch and handle the view changes."""
"""Catch and handle the view changes"""
self.index = index
return super(SelectableLabel, self).refresh_view_attrs(
rv, index, data)
# pylint: disable=inconsistent-return-statements
def on_touch_down(self, touch):
"""Add selection on touch down."""
"""Add selection on touch down"""
if super(SelectableLabel, self).on_touch_down(touch):
return True
if self.collide_point(*touch.pos) and self.selectable:
return self.parent.select_with_touch(self.index, touch)
def apply_selection(self, rv, index, is_selected):
"""Respond to the selection of items in the view."""
"""Respond to the selection of items in the view"""
self.selected = is_selected
if is_selected:
print("selection changed to {0}".format(rv.data[index]))
@ -559,10 +559,9 @@ class SelectableLabel(RecycleDataViewBehavior, Label):
class RV(RecycleView):
"""Recycling View."""
def __init__(self, **kwargs): # pylint: disable=useless-super-delegation
"""Recycling Method."""
"""Recycling View"""
def __init__(self, **kwargs): # pylint: disable=useless-super-delegation
"""Recycling Method"""
super(RV, self).__init__(**kwargs)
@ -584,7 +583,6 @@ class DropDownWidget(BoxLayout):
sendMessageToPeople = True
if sendMessageToPeople:
if toAddress != '' and subject and message:
from addresses import decodeAddress
status, addressVersionNumber, streamNumber, ripe = (
decodeAddress(toAddress))
if status == 'success':
@ -603,10 +601,7 @@ class DropDownWidget(BoxLayout):
state.send_draft_mail)
self.parent.parent.screens[15].clear_widgets()
self.parent.parent.screens[15].add_widget(Draft())
# state.detailPageType = ''
# state.send_draft_mail = None
else:
from addresses import addBMIfNotPresent
toAddress = addBMIfNotPresent(toAddress)
statusIconColor = 'red'
if (addressVersionNumber > 4) or (
@ -706,8 +701,7 @@ class DropDownWidget(BoxLayout):
class MyTextInput(TextInput):
"""Takes the text input in the field."""
"""Takes the text input in the field"""
txt_input = ObjectProperty()
flt_list = ObjectProperty()
word_list = ListProperty()
@ -719,7 +713,7 @@ class MyTextInput(TextInput):
super(MyTextInput, self).__init__(**kwargs)
def on_text(self, instance, value):
"""Find all the occurrence of the word."""
"""Find all the occurrence of the word"""
self.parent.parent.parent.parent.ids.rv.data = []
matches = [self.word_list[i] for i in range(
len(self.word_list)) if self.word_list[
@ -734,7 +728,7 @@ class MyTextInput(TextInput):
self.parent.height = 400
def keyboard_on_key_down(self, window, keycode, text, modifiers):
"""Key Down."""
"""Key Down"""
if self.suggestion_text and keycode[1] == 'tab':
self.insert_text(self.suggestion_text + ' ')
return True
@ -763,14 +757,12 @@ class Payment(Screen):
class Credits(Screen):
"""Credits Method"""
available_credits = StringProperty(
'{0}'.format('0'))
"""Credits Module"""
available_credits = StringProperty('{0}'.format('0'))
class Login(Screen):
"""Login Screeen."""
"""Login Screeen"""
pass
@ -809,8 +801,7 @@ class NetworkStat(Screen):
class ContentNavigationDrawer(Navigatorss):
"""Navigate Content Drawer."""
"""Navigate Content Drawer"""
pass
@ -1098,7 +1089,7 @@ class Trash(Screen):
"""Trash Screen uses screen to show widgets of screens"""
trash_messages = ListProperty()
has_refreshed = True
# delete_index = StringProperty()
delete_index = StringProperty()
table_name = StringProperty()
def __init__(self, *args, **kwargs):
@ -1213,7 +1204,7 @@ class Trash(Screen):
events_callback=self.callback_for_delete_msg)
delete_msg_dialog.open()
def callback_for_delete_msg(self, text_item, *arg):
def callback_for_delete_msg(self, text_item):
"""Getting the callback of alert box"""
if text_item == 'Yes':
self.delete_message_from_trash()
@ -1224,9 +1215,11 @@ class Trash(Screen):
"""Deleting message from trash"""
self.children[1].active = True
if self.table_name == 'inbox':
sqlExecute("DELETE FROM inbox WHERE msgid = ?;", self.delete_index)
sqlExecute("DELETE FROM inbox WHERE msgid = ?;", str(
self.delete_index))
elif self.table_name == 'sent':
sqlExecute("DELETE FROM sent WHERE ackdata = ?;", self.delete_index)
sqlExecute("DELETE FROM sent WHERE ackdata = ?;", str(
self.delete_index))
msg_count_objs = state.kivyapp.root.children[2].children[0].ids
if int(state.trash_count) > 0:
msg_count_objs.trash_cnt.badge_text = str(
@ -1236,16 +1229,15 @@ class Trash(Screen):
class Page(Screen):
"""Page Screen show widgets of page."""
"""Page Screen show widgets of page"""
pass
class Create(Screen):
"""Creates the screen widgets."""
"""Creates the screen widgets"""
def __init__(self, **kwargs):
"""Getting Labels and address from addressbook."""
"""Getting Labels and address from addressbook"""
super(Create, self).__init__(**kwargs)
widget_1 = DropDownWidget()
widget_1.ids.txt_input.word_list = [
@ -1256,8 +1248,7 @@ class Create(Screen):
class Setting(Screen):
"""Setting the Screen components."""
"""Setting the Screen components"""
pass
@ -1292,6 +1283,8 @@ class NavigateApp(App): # pylint: disable=too-many-public-methods
def build(self):
"""Method builds the widget"""
print("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSss")
print(os.path.join(os.path.dirname(__file__), 'main.kv'))
main_widget = Builder.load_file(
os.path.join(os.path.dirname(__file__), 'main.kv'))
self.nav_drawer = Navigatorss()
@ -1841,7 +1834,8 @@ class GrashofPopup(Popup):
elif status == 'checksumfailed':
text = "The address is not typed or copied correctly(the checksum failed)."
elif status == 'versiontoohigh':
text = "The version number of this address is higher than this software can support. Please upgrade Bitmessage."
text = "The version number of this address is higher"\
" than this software can support. Please upgrade Bitmessage."
elif status == 'invalidcharacters':
text = "The address contains invalid characters."
elif status == 'ripetooshort':
@ -1854,40 +1848,36 @@ class GrashofPopup(Popup):
class AvatarSampleWidget(ILeftBody, Image):
"""Avatar Sample Widget."""
"""Avatar Sample Widget"""
pass
class IconLeftSampleWidget(ILeftBodyTouch, MDIconButton):
"""Left icon sample widget."""
"""Left icon sample widget"""
pass
class IconRightSampleWidget(IRightBodyTouch, MDCheckbox):
"""Right icon sample widget."""
"""Right icon sample widget"""
pass
class NavigationDrawerTwoLineListItem(
TwoLineListItem, NavigationDrawerHeaderBase):
"""Navigation Drawer in Listitems."""
"""Navigation Drawer in Listitems"""
address_property = StringProperty()
def __init__(self, **kwargs):
"""Method for Navigation Drawer."""
"""Method for Navigation Drawer"""
super(NavigationDrawerTwoLineListItem, self).__init__(**kwargs)
Clock.schedule_once(lambda dt: self.setup())
def setup(self):
"""Bind Controller.current_account property."""
"""Bind Controller.current_account property"""
pass
def on_current_account(self, account):
"""Account detail."""
"""Account detail"""
pass
def _update_specific_text_color(self, instance, value):
@ -1965,7 +1955,7 @@ class MailDetail(Screen):
1].ids.search_field.text = ''
sqlExecute(
"UPDATE inbox SET folder = 'trash' WHERE"
" msgid = ?;", state.mail_id)
" msgid = ?;", str(state.mail_id))
msg_count_objs.inbox_cnt.badge_text = str(
int(state.inbox_count) - 1)
state.inbox_count = str(int(state.inbox_count) - 1)
@ -1973,7 +1963,8 @@ class MailDetail(Screen):
self.parent.screens[0].loadMessagelist(state.association)
elif state.detailPageType == 'draft':
sqlExecute("DELETE FROM sent WHERE ackdata = ?;", state.mail_id)
sqlExecute("DELETE FROM sent WHERE ackdata = ?;", str(
state.mail_id))
msg_count_objs.draft_cnt.badge_text = str(
int(state.draft_count) - 1)
state.draft_count = str(int(state.draft_count) - 1)
@ -2008,7 +1999,7 @@ class MailDetail(Screen):
"""Reply inbox messages"""
data = sqlQuery(
"select toaddress, fromaddress, subject, message from inbox where"
" msgid = ?;", state.mail_id)
" msgid = ?;", str(state.mail_id))
composer_obj = self.parent.screens[2].children[1].ids
composer_obj.ti.text = data[0][0]
composer_obj.btn.text = data[0][0]
@ -2144,10 +2135,10 @@ class AddbookDetailPopup(Popup):
class ShowQRCode(Screen):
"""ShowQRCode Screen uses to show the detail of mails."""
"""ShowQRCode Screen uses to show the detail of mails"""
def qrdisplay(self):
"""Method used for showing QR Code."""
"""Showing QR Code"""
# self.manager.parent.parent.parent.ids.search_bar.clear_widgets()
self.ids.qr.clear_widgets()
from kivy.garden.qrcode import QRCodeWidget
@ -2289,7 +2280,8 @@ class Draft(Screen):
data_index))
try:
msg_count_objs = (
self.parent.parent.parent.parent.parent.children[2].children[0].ids)
self.parent.parent.parent.parent.parent.parent.children[
2].children[0].ids)
except Exception:
msg_count_objs = self.parent.parent.parent.parent.parent.parent.children[
2].children[0].ids
@ -2313,9 +2305,7 @@ class Draft(Screen):
encoding = 3
sendMessageToPeople = True
if sendMessageToPeople:
from addresses import decodeAddress
streamNumber, ripe = decodeAddress(toAddress)[2:]
from addresses import addBMIfNotPresent
toAddress = addBMIfNotPresent(toAddress)
stealthLevel = BMConfigParser().safeGetInt(
'bitmessagesettings', 'ackstealthlevel')
@ -2350,10 +2340,10 @@ class Draft(Screen):
class CustomSpinner(Spinner):
"""This class is used for setting spinner size."""
"""This class is used for setting spinner size"""
def __init__(self, *args, **kwargs):
"""Method used for setting size of spinner."""
"""Setting size of spinner"""
super(CustomSpinner, self).__init__(*args, **kwargs)
self.dropdown_cls.max_height = Window.size[1] / 3
@ -2556,20 +2546,17 @@ def avatarImageFirstLetter(letter_string):
class Starred(Screen):
"""Starred Screen show widgets of page."""
"""Starred Screen show widgets of page"""
pass
class Archieve(Screen):
"""Archieve Screen show widgets of page."""
"""Archieve Screen show widgets of page"""
pass
class Spam(Screen):
"""Spam Screen show widgets of page."""
"""Spam Screen show widgets of page"""
pass
@ -2584,8 +2571,3 @@ class LoadingPopup(Popup):
def dismiss_popup(self, dt):
"""Dismiss popups"""
self.dismiss()
class AddressDropdown(OneLineIconListItem):
"""AddressDropdown showns all the addresses"""
pass

View File

@ -1,8 +1,7 @@
#!/usr/bin/python2.7
"""
src/bitmessagemain.py
=================================
The PyBitmessage startup script
"""
# !/usr/bin/python2.7
# Copyright (c) 2012-2016 Jonathan Warren
# Copyright (c) 2012-2019 The Bitmessage developers
# Distributed under the MIT/X11 software license. See the accompanying
@ -11,8 +10,6 @@ src/bitmessagemain.py
# Right now, PyBitmessage only support connecting to stream 1. It doesn't
# yet contain logic to expand into further streams.
# The software version variable is now held in shared.py
import os
import sys
import ctypes
@ -26,41 +23,29 @@ import time
import traceback
from struct import pack
from helper_startup import (
isOurOperatingSystemLimitedToHavingVeryFewHalfOpenConnections
)
from singleinstance import singleinstance
import defaults
import depends
import shared
import knownnodes
import state
import shutdown
from debug import logger
# Classes
from class_sqlThread import sqlThread
from class_singleCleaner import singleCleaner
from class_objectProcessor import objectProcessor
from class_singleWorker import singleWorker
from class_addressGenerator import addressGenerator
from bmconfigparser import BMConfigParser
from debug import logger # this should go before any threads
from helper_startup import (
isOurOperatingSystemLimitedToHavingVeryFewHalfOpenConnections
)
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.dandelion import Dandelion
from network.networkthread import BMNetworkThread
from network.receivequeuethread import ReceiveQueueThread
from network.announcethread import AnnounceThread
from network.invthread import InvThread
from network.addrthread import AddrThread
from network.downloadthread import DownloadThread
from network.uploadthread import UploadThread
# Helper Functions
import helper_threading
from knownnodes import readKnownNodes
# Network objects and threads
from network import (
BMConnectionPool, Dandelion,
AddrThread, AnnounceThread, BMNetworkThread, InvThread, ReceiveQueueThread,
DownloadThread, UploadThread)
from singleinstance import singleinstance
# Synchronous threads
from threads import (
set_thread_name,
addressGenerator, objectProcessor, singleCleaner, singleWorker, sqlThread)
app_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(app_dir)
@ -70,7 +55,7 @@ depends.check_dependencies()
def connectToStream(streamNumber):
"""Method helps us to connect with the stream"""
"""Connect to a stream"""
state.streamsInWhichIAmParticipating.append(streamNumber)
if isOurOperatingSystemLimitedToHavingVeryFewHalfOpenConnections():
@ -87,14 +72,6 @@ def connectToStream(streamNumber):
except:
pass
with knownnodes.knownNodesLock:
if streamNumber not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber] = {}
if streamNumber * 2 not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber * 2] = {}
if streamNumber * 2 + 1 not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber * 2 + 1] = {}
BMConnectionPool().connectToStream(streamNumber)
@ -111,7 +88,7 @@ def _fixSocket():
addressToString = ctypes.windll.ws2_32.WSAAddressToStringA
def inet_ntop(family, host):
"""Method converts an IP address in packed binary format to string format"""
"""Converting an IP address in packed binary format to string format"""
if family == socket.AF_INET:
if len(host) != 4:
raise ValueError("invalid IPv4 host")
@ -133,7 +110,7 @@ def _fixSocket():
stringToAddress = ctypes.windll.ws2_32.WSAStringToAddressA
def inet_pton(family, host):
"""Method converts an IP address in string format to a packed binary format"""
"""Converting an IP address in string format to a packed binary format"""
buf = "\0" * 28
lengthBuf = pack("I", len(buf))
if stringToAddress(str(host),
@ -188,9 +165,8 @@ def signal_handler(signum, frame):
because the UI captures the signal.')
class Main: # pylint: disable=no-init, old-style-class
"""Method starts the proxy config plugin"""
class Main(object):
"""Main PyBitmessage class"""
@staticmethod
def start_proxyconfig(config):
"""Check socksproxytype and start any proxy configuration plugin"""
@ -213,15 +189,15 @@ class Main: # pylint: disable=no-init, old-style-class
'Started proxy config plugin %s in %s sec',
proxy_type, time.time() - proxyconfig_start)
def start(self): # pylint: disable=too-many-statements, too-many-branches, too-many-locals
"""This method helps to start the daemon"""
def start(self): # pylint: disable=too-many-statements, too-many-branches, too-many-locals
"""Start main application"""
_fixSocket()
config = BMConfigParser()
daemon = config.safeGetBoolean('bitmessagesettings', 'daemon')
try:
opts, args = getopt.getopt(
opts, _ = getopt.getopt(
sys.argv[1:], "hcdt",
["help", "curses", "daemon", "test"])
@ -229,7 +205,7 @@ class Main: # pylint: disable=no-init, old-style-class
self.usage()
sys.exit(2)
for opt, arg in opts:
for opt, _ in opts:
if opt in ("-h", "--help"):
self.usage()
sys.exit()
@ -287,7 +263,7 @@ class Main: # pylint: disable=no-init, old-style-class
self.setSignalHandler()
helper_threading.set_thread_name("PyBitmessage")
set_thread_name("PyBitmessage")
state.dandelion = config.safeGetInt('network', 'dandelion')
# dandelion requires outbound connections, without them,
@ -302,7 +278,10 @@ class Main: # pylint: disable=no-init, old-style-class
defaults.networkDefaultProofOfWorkNonceTrialsPerByte / 100)
defaults.networkDefaultPayloadLengthExtraBytes = int(
defaults.networkDefaultPayloadLengthExtraBytes / 100)
knownnodes.readKnownNodes()
readKnownNodes()
# Not needed if objproc is disabled
if state.enableObjProc:
@ -449,7 +428,8 @@ class Main: # pylint: disable=no-init, old-style-class
# wait until grandchild ready
while True:
time.sleep(1)
os._exit(0) # pylint: disable=protected-access
os._exit(0) # pylint: disable=protected-access
except AttributeError:
# fork not implemented
pass
@ -470,7 +450,8 @@ class Main: # pylint: disable=no-init, old-style-class
# wait until child ready
while True:
time.sleep(1)
os._exit(0) # pylint: disable=protected-access
os._exit(0) # pylint: disable=protected-access
except AttributeError:
# fork not implemented
pass
@ -491,34 +472,39 @@ class Main: # pylint: disable=no-init, old-style-class
os.kill(parentPid, signal.SIGTERM)
os.kill(grandfatherPid, signal.SIGTERM)
def setSignalHandler(self): # pylint: disable=no-self-use
@staticmethod
def setSignalHandler():
"""Setting the Signal Handler"""
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
# signal.signal(signal.SIGINT, signal.SIG_DFL)
@staticmethod
def usage(self):
def usage():
"""Displaying the usages"""
print('Usage: ' + sys.argv[0] + ' [OPTIONS]')
print ('''
Options:
-h, --help show this help message and exit
-c, --curses use curses (text mode) interface
-d, --daemon run in daemon (background) mode
-t, --test dryrun, make testing
print('''
Options:
-h, --help show this help message and exit
-c, --curses use curses (text mode) interface
-d, --daemon run in daemon (background) mode
-t, --test dryrun, make testing
All parameters are optional.
''')
All parameters are optional.
''')
def stop(self): # pylint: disable=no-self-use
"""Method helps to stop the Bitmessage Deamon"""
@staticmethod
def stop():
"""Stop main application"""
with shared.printLock:
print('Stopping Bitmessage Deamon.')
shutdown.doCleanShutdown()
# ..todo: nice function but no one is using this
def getApiAddress(self): # pylint: disable=no-self-use
"""This method returns the Api Addresses"""
# .. todo:: nice function but no one is using this
@staticmethod
def getApiAddress():
"""This function returns API address and port"""
if not BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'apienabled'):
return None
@ -528,7 +514,7 @@ class Main: # pylint: disable=no-init, old-style-class
def main():
"""Start of the main thread"""
"""Triggers main module"""
mainprogram = Main()
mainprogram.start()

View File

@ -99,6 +99,8 @@ class SettingsDialog(QtGui.QDialog):
config.getboolean('bitmessagesettings', 'socksauthentication'))
self.checkBoxSocksListen.setChecked(
config.getboolean('bitmessagesettings', 'sockslisten'))
self.checkBoxOnionOnly.setChecked(
config.safeGetBoolean('bitmessagesettings', 'onionservicesonly'))
proxy_type = config.safeGet(
'bitmessagesettings', 'socksproxytype', 'none')
@ -110,6 +112,7 @@ class SettingsDialog(QtGui.QDialog):
self.lineEditSocksPassword.setEnabled(False)
self.checkBoxAuthentication.setEnabled(False)
self.checkBoxSocksListen.setEnabled(False)
self.checkBoxOnionOnly.setEnabled(False)
elif proxy_type == 'SOCKS4a':
self.comboBoxProxyType.setCurrentIndex(1)
elif proxy_type == 'SOCKS5':
@ -200,11 +203,13 @@ class SettingsDialog(QtGui.QDialog):
self.lineEditSocksPassword.setEnabled(False)
self.checkBoxAuthentication.setEnabled(False)
self.checkBoxSocksListen.setEnabled(False)
self.checkBoxOnionOnly.setEnabled(False)
elif comboBoxIndex in (1, 2):
self.lineEditSocksHostname.setEnabled(True)
self.lineEditSocksPort.setEnabled(True)
self.checkBoxAuthentication.setEnabled(True)
self.checkBoxSocksListen.setEnabled(True)
self.checkBoxOnionOnly.setEnabled(True)
if self.checkBoxAuthentication.isChecked():
self.lineEditSocksUsername.setEnabled(True)
self.lineEditSocksPassword.setEnabled(True)
@ -334,6 +339,11 @@ class SettingsDialog(QtGui.QDialog):
self.lineEditSocksPassword.text()))
self.config.set('bitmessagesettings', 'sockslisten', str(
self.checkBoxSocksListen.isChecked()))
if self.checkBoxOnionOnly.isChecked() \
and not self.config.safeGetBoolean('bitmessagesettings', 'onionservicesonly'):
self.net_restart_needed = True
self.config.set('bitmessagesettings', 'onionservicesonly', str(
self.checkBoxOnionOnly.isChecked()))
try:
# Rounding to integers just for aesthetics
self.config.set('bitmessagesettings', 'maxdownloadrate', str(

View File

@ -403,6 +403,13 @@
</property>
</widget>
</item>
<item row="4" column="1" colspan="4">
<widget class="QCheckBox" name="checkBoxOnionOnly">
<property name="text">
<string>Only connect to onion services (*.onion)</string>
</property>
</widget>
</item>
<item row="0" column="1">
<widget class="QComboBox" name="comboBoxProxyType">
<item>

View File

@ -56,7 +56,7 @@ class BMConfigParser(configparser.ConfigParser):
raise ValueError("Invalid value %s" % value)
return configparser.ConfigParser.set(self, section, option, value)
def get(self, section, option, raw=False, variables=None):
def get(self, section, option, raw=False, variables=None): # pylint: disable=arguments-differ
try:
if section == "bitmessagesettings" and option == "timeformat":
return configparser.ConfigParser.get(
@ -85,6 +85,7 @@ class BMConfigParser(configparser.ConfigParser):
def safeGetBoolean(self, section, field):
config = configparser.ConfigParser()
try:
#Used in the python2.7
# return self.getboolean(section, field)
@ -96,6 +97,7 @@ class BMConfigParser(configparser.ConfigParser):
def safeGetInt(self, section, field, default=0):
config = configparser.ConfigParser()
try:
#Used in the python2.7
# return self.getint(section, field)
@ -106,6 +108,7 @@ class BMConfigParser(configparser.ConfigParser):
return default
def safeGet(self, section, option, default=None):
"""Return value as is, default on exceptions, None if default missing"""
try:
return self.get(section, option)
except (configparser.NoSectionError, configparser.NoOptionError,
@ -118,6 +121,7 @@ class BMConfigParser(configparser.ConfigParser):
def addresses(self):
return [x for x in BMConfigParser().sections() if x.startswith('BM-')]
def read(self, filenames):
configparser.ConfigParser.read(self, filenames)
for section in self.sections():
@ -139,6 +143,7 @@ class BMConfigParser(configparser.ConfigParser):
continue
def save(self):
"""Save the runtime config onto the filesystem"""
fileName = os.path.join(state.appdata, 'keys.dat')
fileNameBak = '.'.join([
fileName, datetime.now().strftime("%Y%j%H%M%S%f"), 'bak'])
@ -160,12 +165,15 @@ class BMConfigParser(configparser.ConfigParser):
os.remove(fileNameBak)
def validate(self, section, option, value):
"""Input validator interface (using factory pattern)"""
try:
return getattr(self, 'validate_{}_{}'.format(section, option))(value)
except AttributeError:
return True
def validate_bitmessagesettings_maxoutboundconnections(self, value):
@staticmethod
def validate_bitmessagesettings_maxoutboundconnections(value):
"""Reject maxoutboundconnections that are too high or too low"""
try:
value = int(value)
except ValueError:

View File

@ -13,7 +13,7 @@ package.domain = org.test
source.dir = .
# (list) Source files to include (let empty to include all the files)
source.include_exts = py,png,jpg,kv,atlas
source.include_exts = py,png,jpg,kv,atlas,gif,zip
# (list) List of inclusions using pattern matching
#source.include_patterns = assets/*,images/*.png

View File

@ -1,4 +1,6 @@
"""
A thread for creating addresses
"""
import time
import hashlib
from binascii import hexlify
@ -11,13 +13,13 @@ import shared
import defaults
import highlevelcrypto
from bmconfigparser import BMConfigParser
from debug import logger
from addresses import decodeAddress, encodeAddress, encodeVarint
from fallback import RIPEMD160Hash
from helper_threading import StoppableThread
from network.threads import StoppableThread
class addressGenerator(StoppableThread):
"""A thread for creating addresses"""
name = "addressGenerator"
@ -29,6 +31,11 @@ class addressGenerator(StoppableThread):
super(addressGenerator, self).stopThread()
def run(self):
"""
Process the requests for addresses generation
from `.queues.addressGeneratorQueue`
"""
# pylint: disable=too-many-locals, too-many-branches, protected-access, too-many-statements
while state.shutdown == 0:
queueValue = queues.addressGeneratorQueue.get()
nonceTrialsPerByte = 0
@ -84,12 +91,12 @@ class addressGenerator(StoppableThread):
elif queueValue[0] == 'stopThread':
break
else:
logger.error(
self.logger.error(
'Programming error: A structure with the wrong number'
' of values was passed into the addressGeneratorQueue.'
' Here is the queueValue: %r\n', queueValue)
if addressVersionNumber < 3 or addressVersionNumber > 4:
logger.error(
self.logger.error(
'Program error: For some reason the address generator'
' queue has been given a request to create at least'
' one version %s address which it cannot do.\n',
@ -136,10 +143,10 @@ class addressGenerator(StoppableThread):
'\x00'.encode('utf-8') * numberOfNullBytesDemandedOnFrontOfRipeHash
):
break
logger.info(
self.logger.info(
'Generated address with ripe digest: %s', hexlify(ripe))
try:
logger.info(
self.logger.info(
'Address generator calculated %s addresses at %s'
' addresses per second before finding one with'
' the correct ripe-prefix.',
@ -202,8 +209,8 @@ class addressGenerator(StoppableThread):
elif command == 'createDeterministicAddresses' \
or command == 'getDeterministicAddress' \
or command == 'createChan' or command == 'joinChan':
if len(deterministicPassphrase) == 0:
logger.warning(
if not deterministicPassphrase:
self.logger.warning(
'You are creating deterministic'
' address(es) using a blank passphrase.'
' Bitmessage will do it but it is rather stupid.')
@ -256,10 +263,10 @@ class addressGenerator(StoppableThread):
):
break
logger.info(
self.logger.info(
'Generated address with ripe digest: %s', hexlify(ripe))
try:
logger.info(
self.logger.info(
'Address generator calculated %s addresses'
' at %s addresses per second before finding'
' one with the correct ripe-prefix.',
@ -309,7 +316,7 @@ class addressGenerator(StoppableThread):
addressAlreadyExists = True
if addressAlreadyExists:
logger.info(
self.logger.info(
'%s already exists. Not adding it again.',
address
)
@ -322,7 +329,7 @@ class addressGenerator(StoppableThread):
).arg(address)
))
else:
logger.debug('label: %s', label)
self.logger.debug('label: %s', label)
BMConfigParser().set(address, 'label', label)
BMConfigParser().set(address, 'enabled', 'true')
BMConfigParser().set(address, 'decoy', 'false')
@ -351,7 +358,7 @@ class addressGenerator(StoppableThread):
address)
shared.myECCryptorObjects[ripe] = \
highlevelcrypto.makeCryptor(
hexlify(potentialPrivEncryptionKey))
hexlify(potentialPrivEncryptionKey))
shared.myAddressesByHash[ripe] = address
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) +

View File

@ -1,6 +1,9 @@
"""
The objectProcessor thread, of which there is only one, processes the network objects
"""
import hashlib
import logging
import random
import shared
import threading
import time
from binascii import hexlify
@ -8,11 +11,13 @@ from subprocess import call # nosec
import highlevelcrypto
import knownnodes
import shared
from addresses import (
calculateInventoryHash, decodeAddress, decodeVarint, encodeAddress,
encodeVarint, varintDecodeError
)
from bmconfigparser import BMConfigParser
import helper_bitcoin
import helper_inbox
import helper_msgcoding
@ -20,13 +25,18 @@ import helper_sent
from helper_sql import SqlBulkExecute, sqlExecute, sqlQuery
from helper_ackPayload import genAckPayload
from network import bmproto
from network.node import Peer
import protocol
import queues
import state
import tr
from debug import logger
from fallback import RIPEMD160Hash
import l10n
# pylint: disable=too-many-locals, too-many-return-statements, too-many-branches, too-many-statements
logger = logging.getLogger('default')
class objectProcessor(threading.Thread):
@ -55,6 +65,7 @@ class objectProcessor(threading.Thread):
self.successfullyDecryptMessageTimings = []
def run(self):
"""Process the objects from `.queues.objectProcessorQueue`"""
while True:
objectType, data = queues.objectProcessorQueue.get()
@ -118,7 +129,10 @@ class objectProcessor(threading.Thread):
state.shutdown = 2
break
def checkackdata(self, data):
@staticmethod
def checkackdata(data):
"""Checking Acknowledgement of message received or not?"""
# pylint: disable=protected-access
# Let's check whether this is a message acknowledgement bound for us.
if len(data) < 32:
return
@ -158,7 +172,7 @@ class objectProcessor(threading.Thread):
if not host:
return
peer = state.Peer(host, port)
peer = Peer(host, port)
with knownnodes.knownNodesLock:
knownnodes.addKnownNode(
stream, peer, is_self=state.ownAddresses.get(peer))
@ -268,6 +282,7 @@ class objectProcessor(threading.Thread):
queues.workerQueue.put(('sendOutOrStoreMyV4Pubkey', myAddress))
def processpubkey(self, data):
"""Process a pubkey object"""
pubkeyProcessingStartTime = time.time()
shared.numberOfPubkeysProcessed += 1
queues.UISignalQueue.put((
@ -316,13 +331,14 @@ class objectProcessor(threading.Thread):
'\x04' + publicSigningKey + '\x04' + publicEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
logger.debug(
'within recpubkey, addressVersion: %s, streamNumber: %s'
'\nripe %s\npublicSigningKey in hex: %s'
'\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
'within recpubkey, addressVersion: %s, streamNumber: %s'
'\nripe %s\npublicSigningKey in hex: %s'
'\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
address = encodeAddress(addressVersion, streamNumber, ripe)
@ -380,13 +396,14 @@ class objectProcessor(threading.Thread):
sha.update(publicSigningKey + publicEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
logger.debug(
'within recpubkey, addressVersion: %s, streamNumber: %s'
'\nripe %s\npublicSigningKey in hex: %s'
'\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
'within recpubkey, addressVersion: %s, streamNumber: %s'
'\nripe %s\npublicSigningKey in hex: %s'
'\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
address = encodeAddress(addressVersion, streamNumber, ripe)
queryreturn = sqlQuery(
@ -438,6 +455,7 @@ class objectProcessor(threading.Thread):
timeRequiredToProcessPubkey)
def processmsg(self, data):
"""Process a message object"""
messageProcessingStartTime = time.time()
shared.numberOfMessagesProcessed += 1
queues.UISignalQueue.put((
@ -579,17 +597,18 @@ class objectProcessor(threading.Thread):
logger.debug('ECDSA verify failed')
return
logger.debug('ECDSA verify passed')
logger.debug(
'As a matter of intellectual curiosity, here is the Bitcoin'
' address associated with the keys owned by the other person:'
' %s ..and here is the testnet address: %s. The other person'
' must take their private signing key from Bitmessage and'
' import it into Bitcoin (or a service like Blockchain.info)'
' for it to be of any use. Do not use this unless you know'
' what you are doing.',
helper_bitcoin.calculateBitcoinAddressFromPubkey(pubSigningKey),
helper_bitcoin.calculateTestnetAddressFromPubkey(pubSigningKey)
)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
'As a matter of intellectual curiosity, here is the Bitcoin'
' address associated with the keys owned by the other person:'
' %s ..and here is the testnet address: %s. The other person'
' must take their private signing key from Bitmessage and'
' import it into Bitcoin (or a service like Blockchain.info)'
' for it to be of any use. Do not use this unless you know'
' what you are doing.',
helper_bitcoin.calculateBitcoinAddressFromPubkey(pubSigningKey),
helper_bitcoin.calculateTestnetAddressFromPubkey(pubSigningKey)
)
# Used to detect and ignore duplicate messages in our inbox
sigHash = hashlib.sha512(
hashlib.sha512(signature).digest()).digest()[32:]
@ -732,7 +751,7 @@ class objectProcessor(threading.Thread):
# We really should have a discussion about how to
# set the TTL for mailing list broadcasts. This is obviously
# hard-coded.
TTL = 2*7*24*60*60 # 2 weeks
TTL = 2 * 7 * 24 * 60 * 60 # 2 weeks
t = ('',
toAddress,
ripe,
@ -784,6 +803,7 @@ class objectProcessor(threading.Thread):
)
def processbroadcast(self, data):
"""Process a broadcast object"""
messageProcessingStartTime = time.time()
shared.numberOfBroadcastsProcessed += 1
queues.UISignalQueue.put((
@ -968,7 +988,7 @@ class objectProcessor(threading.Thread):
fromAddress = encodeAddress(
sendersAddressVersion, sendersStream, calculatedRipe)
logger.info('fromAddress: %s' % fromAddress)
logger.info('fromAddress: %s', fromAddress)
# Let's store the public key in case we want to reply to this person.
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''',
@ -985,7 +1005,7 @@ class objectProcessor(threading.Thread):
fromAddress = encodeAddress(
sendersAddressVersion, sendersStream, calculatedRipe)
logger.debug('fromAddress: ' + fromAddress)
logger.debug('fromAddress: %s', fromAddress)
try:
decodedMessage = helper_msgcoding.MsgDecode(
@ -1046,17 +1066,18 @@ class objectProcessor(threading.Thread):
# for it.
elif addressVersion >= 4:
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersion) + encodeVarint(streamNumber) + ripe
encodeVarint(addressVersion) + encodeVarint(streamNumber)
+ ripe
).digest()).digest()[32:]
if tag in state.neededPubkeys:
del state.neededPubkeys[tag]
self.sendMessages(address)
def sendMessages(self, address):
@staticmethod
def sendMessages(address):
"""
This function is called by the possibleNewPubkey function when
that function sees that we now have the necessary pubkey
to send one or more messages.
This method is called by the `possibleNewPubkey` when it sees
that we now have the necessary pubkey to send one or more messages.
"""
logger.info('We have been awaiting the arrival of this pubkey.')
sqlExecute(
@ -1066,7 +1087,9 @@ class objectProcessor(threading.Thread):
" AND folder='sent'", address)
queues.workerQueue.put(('sendmessage', ''))
def ackDataHasAValidHeader(self, ackData):
@staticmethod
def ackDataHasAValidHeader(ackData):
"""Checking ackData with valid Header, not sending ackData when false"""
if len(ackData) < protocol.Header.size:
logger.info(
'The length of ackData is unreasonably short. Not sending'
@ -1101,11 +1124,12 @@ class objectProcessor(threading.Thread):
return False
return True
def addMailingListNameToSubject(self, subject, mailingListName):
@staticmethod
def addMailingListNameToSubject(subject, mailingListName):
"""Adding mailingListName to subject"""
subject = subject.strip()
if subject[:3] == 'Re:' or subject[:3] == 'RE:':
subject = subject[3:].strip()
if '[' + mailingListName + ']' in subject:
return subject
else:
return '[' + mailingListName + '] ' + subject
return '[' + mailingListName + '] ' + subject

View File

@ -1,58 +1,58 @@
"""
The singleCleaner class is a timer-driven thread that cleans data structures
The `singleCleaner` class is a timer-driven thread that cleans data structures
to free memory, resends messages when a remote node doesn't respond, and
sends pong messages to keep connections alive if the network isn't busy.
It cleans these data structures in memory:
inventory (moves data to the on-disk sql database)
inventorySets (clears then reloads data out of sql database)
- inventory (moves data to the on-disk sql database)
- inventorySets (clears then reloads data out of sql database)
It cleans these tables on the disk:
inventory (clears expired objects)
pubkeys (clears pubkeys older than 4 weeks old which we have not used
personally)
knownNodes (clears addresses which have not been online for over 3 days)
- inventory (clears expired objects)
- pubkeys (clears pubkeys older than 4 weeks old which we have not used
personally)
- knownNodes (clears addresses which have not been online for over 3 days)
It resends messages when there has been no response:
resends getpubkey messages in 5 days (then 10 days, then 20 days, etc...)
resends msg messages in 5 days (then 10 days, then 20 days, etc...)
- resends getpubkey messages in 5 days (then 10 days, then 20 days, etc...)
- resends msg messages in 5 days (then 10 days, then 20 days, etc...)
"""
# pylint: disable=relative-import, protected-access
import gc
import os
from datetime import datetime, timedelta
import time
import shared
import tr
from bmconfigparser import BMConfigParser
from helper_sql import sqlQuery, sqlExecute
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from debug import logger
import knownnodes
import queues
import state
import tr
from bmconfigparser import BMConfigParser
from helper_sql import sqlQuery, sqlExecute
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.threads import StoppableThread
class singleCleaner(StoppableThread):
"""Base method that Cleanup knownnodes and handle possible severe exception"""
"""The singleCleaner thread class"""
name = "singleCleaner"
cycleLength = 300
expireDiscoveredPeers = 300
def run(self): # pylint: disable=too-many-branches
def run(self): # pylint: disable=too-many-branches
gc.disable()
timeWeLastClearedInventoryAndPubkeysTables = 0
try:
shared.maximumLengthOfTimeToBotherResendingMessages = (
float(BMConfigParser().get(
'bitmessagesettings', 'stopresendingafterxdays')) *
24 * 60 * 60
'bitmessagesettings', 'stopresendingafterxdays'))
* 24 * 60 * 60
) + (
float(BMConfigParser().get(
'bitmessagesettings', 'stopresendingafterxmonths')) *
(60 * 60 * 24 * 365) / 12)
'bitmessagesettings', 'stopresendingafterxmonths'))
* (60 * 60 * 24 * 365) / 12)
except:
# Either the user hasn't set stopresendingafterxdays and
# stopresendingafterxmonths yet or the options are missing
@ -93,13 +93,13 @@ class singleCleaner(StoppableThread):
queryreturn = sqlQuery(
"SELECT toaddress, ackdata, status FROM sent"
" WHERE ((status='awaitingpubkey' OR status='msgsent')"
" AND folder LIKE '%sent%' AND sleeptill<? AND senttime>?)",
int(time.time()), int(time.time()) -
shared.maximumLengthOfTimeToBotherResendingMessages
" AND folder='sent' AND sleeptill<? AND senttime>?)",
int(time.time()), int(time.time())
- shared.maximumLengthOfTimeToBotherResendingMessages
)
for row in queryreturn:
if len(row) < 2:
logger.error(
self.logger.error(
'Something went wrong in the singleCleaner thread:'
' a query did not return the requested fields. %r',
row
@ -108,17 +108,18 @@ class singleCleaner(StoppableThread):
break
toAddress, ackData, status = row
if status == 'awaitingpubkey':
resendPubkeyRequest(toAddress)
self.resendPubkeyRequest(toAddress)
elif status == 'msgsent':
resendMsg(ackData)
self.resendMsg(ackData)
deleteTrashMsgPermonantly()
try:
# Cleanup knownnodes and handle possible severe exception
# while writing it to disk
knownnodes.cleanupKnownNodes()
except Exception as err:
# pylint: disable=protected-access
if "Errno 28" in str(err):
logger.fatal(
self.logger.fatal(
'(while writing knownnodes to disk)'
' Alert: Your disk or data storage volume is full.'
)
@ -131,20 +132,12 @@ class singleCleaner(StoppableThread):
' is full. Bitmessage will now exit.'),
True)
))
# ..FIXME redundant?
# pylint: disable=no-member
if shared.daemon or not state.enableGUI:
if shared.thisapp.daemon or not state.enableGUI:
os._exit(1)
# # clear download queues
# for thread in threading.enumerate():
# if thread.isAlive() and hasattr(thread, 'downloadQueue'):
# thread.downloadQueue.clear()
# inv/object tracking
for connection in \
list(BMConnectionPool().inboundConnections.values()) + \
list(BMConnectionPool().outboundConnections.values()):
for connection in BMConnectionPool().connections():
connection.clean()
# discovery tracking
@ -155,50 +148,58 @@ class singleCleaner(StoppableThread):
del state.discoveredPeers[k]
except KeyError:
pass
# ..TODO: cleanup pending upload / download
# ..todo:: cleanup pending upload / download
gc.collect()
if state.shutdown == 0:
self.stop.wait(singleCleaner.cycleLength)
def resendPubkeyRequest(self, address):
"""Resend pubkey request for address"""
self.logger.debug(
'It has been a long time and we haven\'t heard a response to our'
' getpubkey request. Sending again.'
)
try:
# We need to take this entry out of the neededPubkeys structure
# because the queues.workerQueue checks to see whether the entry
# is already present and will not do the POW and send the message
# because it assumes that it has already done it recently.
del state.neededPubkeys[address]
except:
pass
def resendPubkeyRequest(address):
"""After a long time, method send getpubkey request"""
logger.debug(
'It has been a long time and we haven\'t heard a response to our'
' getpubkey request. Sending again.'
)
try:
# We need to take this entry out of the neededPubkeys structure
# because the queues.workerQueue checks to see whether the entry
# is already present and will not do the POW and send the message
# because it assumes that it has already done it recently.
del state.neededPubkeys[address]
except:
pass
queues.UISignalQueue.put((
'updateStatusBar',
'Doing work necessary to again attempt to request a public key...'
))
sqlExecute(
'''UPDATE sent SET status='msgqueued' WHERE toaddress=?''',
address)
queues.workerQueue.put(('sendmessage', ''))
queues.UISignalQueue.put((
'updateStatusBar',
'Doing work necessary to again attempt to request a public key...'
))
sqlExecute(
'''UPDATE sent SET status='msgqueued' WHERE toaddress=?''',
address)
queues.workerQueue.put(('sendmessage', ''))
def resendMsg(self, ackdata):
"""Resend message by ackdata"""
self.logger.debug(
'It has been a long time and we haven\'t heard an acknowledgement'
' to our msg. Sending again.'
)
sqlExecute(
'''UPDATE sent SET status='msgqueued' WHERE ackdata=?''',
ackdata)
queues.workerQueue.put(('sendmessage', ''))
queues.UISignalQueue.put((
'updateStatusBar',
'Doing work necessary to again attempt to deliver a message...'
))
def resendMsg(ackdata):
"""After a long time, method send acknowledgement msg"""
logger.debug(
'It has been a long time and we haven\'t heard an acknowledgement'
' to our msg. Sending again.'
)
sqlExecute(
'''UPDATE sent SET status='msgqueued' WHERE ackdata=?''',
ackdata)
queues.workerQueue.put(('sendmessage', ''))
queues.UISignalQueue.put((
'updateStatusBar',
'Doing work necessary to again attempt to deliver a message...'
))
def deleteTrashMsgPermonantly():
"""This method is used to delete old messages"""
ndays_before_time = datetime.now() - timedelta(days=30)
old_messages = time.mktime(ndays_before_time.timetuple())
sqlExecute("delete from sent where folder = 'trash' and lastactiontime <= ?;", int(old_messages))
sqlExecute("delete from inbox where folder = 'trash' and received <= ?;", int(old_messages))
return

View File

@ -1,6 +1,5 @@
"""
src/class_singleWorker.py
=========================
Thread for performing PoW
"""
# pylint: disable=protected-access,too-many-branches,too-many-statements
# pylint: disable=no-self-use,too-many-lines,too-many-locals,relative-import
@ -26,10 +25,9 @@ import state
import tr
from addresses import calculateInventoryHash, decodeAddress, decodeVarint, encodeVarint
from bmconfigparser import BMConfigParser
from debug import logger
from helper_sql import sqlExecute, sqlQuery
from helper_threading import StoppableThread
from inventory import Inventory
from network.threads import StoppableThread
# This thread, of which there is only one, does the heavy lifting:
# calculating POWs.
@ -101,7 +99,7 @@ class singleWorker(StoppableThread):
'''SELECT ackdata FROM sent WHERE status = 'msgsent' ''')
for row in queryreturn:
ackdata, = row
logger.info('Watching for ackdata %s', hexlify(ackdata))
self.logger.info('Watching for ackdata %s', hexlify(ackdata))
shared.ackdataForWhichImWatching[ackdata] = 0
# Fix legacy (headerless) watched ackdata to include header
@ -176,14 +174,14 @@ class singleWorker(StoppableThread):
self.busy = 0
return
else:
logger.error(
self.logger.error(
'Probable programming error: The command sent'
' to the workerThread is weird. It is: %s\n',
command
)
queues.workerQueue.task_done()
logger.info("Quitting...")
self.logger.info("Quitting...")
def _getKeysForAddress(self, address):
privSigningKeyBase58 = BMConfigParser().get(
@ -220,25 +218,24 @@ class singleWorker(StoppableThread):
)) / (2 ** 16))
))
initialHash = hashlib.sha512(payload).digest()
logger.info(
self.logger.info(
'%s Doing proof of work... TTL set to %s', log_prefix, TTL)
if log_time:
start_time = time.time()
trialValue, nonce = proofofwork.run(target, initialHash)
logger.info(
self.logger.info(
'%s Found proof of work %s Nonce: %s',
log_prefix, trialValue, nonce
)
try:
delta = time.time() - start_time
logger.info(
self.logger.info(
'PoW took %.1f seconds, speed %s.',
delta, sizeof_fmt(nonce / delta)
)
except: # NameError
pass
payload = pack('>Q', nonce) + payload
# inventoryHash = calculateInventoryHash(payload)
return payload
def doPOWForMyV2Pubkey(self, adressHash):
@ -263,7 +260,7 @@ class singleWorker(StoppableThread):
_, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(myAddress)
except Exception as err:
logger.error(
self.logger.error(
'Error within doPOWForMyV2Pubkey. Could not read'
' the keys from the keys.dat file for a requested'
' address. %s\n', err
@ -281,7 +278,8 @@ class singleWorker(StoppableThread):
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, '')
logger.info('broadcasting inv with hash: %s', hexlify(inventoryHash))
self.logger.info(
'broadcasting inv with hash: %s', hexlify(inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put(('updateStatusBar', ''))
@ -306,7 +304,7 @@ class singleWorker(StoppableThread):
# The address has been deleted.
return
if BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('This is a chan address. Not sending pubkey.')
self.logger.info('This is a chan address. Not sending pubkey.')
return
_, addressVersionNumber, streamNumber, adressHash = decodeAddress(
myAddress)
@ -336,7 +334,7 @@ class singleWorker(StoppableThread):
privSigningKeyHex, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(myAddress)
except Exception as err:
logger.error(
self.logger.error(
'Error within sendOutOrStoreMyV3Pubkey. Could not read'
' the keys from the keys.dat file for a requested'
' address. %s\n', err
@ -363,7 +361,8 @@ class singleWorker(StoppableThread):
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, '')
logger.info('broadcasting inv with hash: %s', hexlify(inventoryHash))
self.logger.info(
'broadcasting inv with hash: %s', hexlify(inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put(('updateStatusBar', ''))
@ -386,7 +385,7 @@ class singleWorker(StoppableThread):
# The address has been deleted.
return
if shared.BMConfigParser().safeGetBoolean(myAddress, 'chan'):
logger.info('This is a chan address. Not sending pubkey.')
self.logger.info('This is a chan address. Not sending pubkey.')
return
_, addressVersionNumber, streamNumber, addressHash = decodeAddress(
myAddress)
@ -405,7 +404,7 @@ class singleWorker(StoppableThread):
privSigningKeyHex, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(myAddress)
except Exception as err:
logger.error(
self.logger.error(
'Error within sendOutOrStoreMyV4Pubkey. Could not read'
' the keys from the keys.dat file for a requested'
' address. %s\n', err
@ -453,7 +452,8 @@ class singleWorker(StoppableThread):
doubleHashOfAddressData[32:]
)
logger.info('broadcasting inv with hash: %s', hexlify(inventoryHash))
self.logger.info(
'broadcasting inv with hash: %s', hexlify(inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put(('updateStatusBar', ''))
@ -462,7 +462,7 @@ class singleWorker(StoppableThread):
myAddress, 'lastpubkeysendtime', str(int(time.time())))
BMConfigParser().save()
except Exception as err:
logger.error(
self.logger.error(
'Error: Couldn\'t add the lastpubkeysendtime'
' to the keys.dat file. Error message: %s', err
)
@ -470,8 +470,8 @@ class singleWorker(StoppableThread):
def sendOnionPeerObj(self, peer=None):
"""Send onionpeer object representing peer"""
if not peer: # find own onionhostname
for peer in state.ownAddresses: # pylint: disable=redefined-argument-from-local
if peer.host.endswith('.onion'):
for peer_ in state.ownAddresses:
if peer_.host.endswith('.onion'):
break
else:
return
@ -500,7 +500,7 @@ class singleWorker(StoppableThread):
objectType, streamNumber, buffer(payload),
embeddedTime, buffer(tag)
)
logger.info(
self.logger.info(
'sending inv (within sendOnionPeerObj function) for object: %s',
hexlify(inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
@ -523,7 +523,7 @@ class singleWorker(StoppableThread):
_, addressVersionNumber, streamNumber, ripe = \
decodeAddress(fromaddress)
if addressVersionNumber <= 1:
logger.error(
self.logger.error(
'Error: In the singleWorker thread, the '
' sendBroadcast function doesn\'t understand'
' the address version.\n')
@ -639,7 +639,7 @@ class singleWorker(StoppableThread):
# to not let the user try to send a message this large
# until we implement message continuation.
if len(payload) > 2 ** 18: # 256 KiB
logger.critical(
self.logger.critical(
'This broadcast object is too large to send.'
' This should never happen. Object size: %s',
len(payload)
@ -650,7 +650,7 @@ class singleWorker(StoppableThread):
objectType = 3
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, tag)
logger.info(
self.logger.info(
'sending inv (within sendBroadcast function)'
' for object: %s',
hexlify(inventoryHash)
@ -870,8 +870,8 @@ class singleWorker(StoppableThread):
"MainWindow",
"Looking up the receiver\'s public key"))
))
logger.info('Sending a message.')
logger.debug(
self.logger.info('Sending a message.')
self.logger.debug(
'First 150 characters of message: %s',
repr(message[:150])
)
@ -915,7 +915,7 @@ class singleWorker(StoppableThread):
if not shared.BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'willinglysendtomobile'
):
logger.info(
self.logger.info(
'The receiver is a mobile user but the'
' sender (you) has not selected that you'
' are willing to send to mobiles. Aborting'
@ -981,7 +981,7 @@ class singleWorker(StoppableThread):
defaults.networkDefaultPayloadLengthExtraBytes:
requiredPayloadLengthExtraBytes = \
defaults.networkDefaultPayloadLengthExtraBytes
logger.debug(
self.logger.debug(
'Using averageProofOfWorkNonceTrialsPerByte: %s'
' and payloadLengthExtraBytes: %s.',
requiredAverageProofOfWorkNonceTrialsPerByte,
@ -1046,8 +1046,9 @@ class singleWorker(StoppableThread):
l10n.formatTimestamp()))))
continue
else: # if we are sending a message to ourselves or a chan..
logger.info('Sending a message.')
logger.debug('First 150 characters of message: %r', message[:150])
self.logger.info('Sending a message.')
self.logger.debug(
'First 150 characters of message: %r', message[:150])
behaviorBitfield = protocol.getBitfield(fromaddress)
try:
@ -1066,7 +1067,7 @@ class singleWorker(StoppableThread):
" message. %1"
).arg(l10n.formatTimestamp()))
))
logger.error(
self.logger.error(
'Error within sendMsg. Could not read the keys'
' from the keys.dat file for our own address. %s\n',
err)
@ -1142,14 +1143,14 @@ class singleWorker(StoppableThread):
payload += encodeVarint(encodedMessage.length)
payload += encodedMessage.data
if BMConfigParser().has_section(toaddress):
logger.info(
self.logger.info(
'Not bothering to include ackdata because we are'
' sending to ourselves or a chan.'
)
fullAckPayload = ''
elif not protocol.checkBitfield(
behaviorBitfield, protocol.BITFIELD_DOESACK):
logger.info(
self.logger.info(
'Not bothering to include ackdata because'
' the receiver said that they won\'t relay it anyway.'
)
@ -1202,7 +1203,7 @@ class singleWorker(StoppableThread):
requiredPayloadLengthExtraBytes
)) / (2 ** 16))
))
logger.info(
self.logger.info(
'(For msg message) Doing proof of work. Total required'
' difficulty: %f. Required small message difficulty: %f.',
float(requiredAverageProofOfWorkNonceTrialsPerByte) /
@ -1215,12 +1216,12 @@ class singleWorker(StoppableThread):
initialHash = hashlib.sha512(encryptedPayload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
print("nonce calculated value#############################", nonce)
logger.info(
self.logger.info(
'(For msg message) Found proof of work %s Nonce: %s',
trialValue, nonce
)
try:
logger.info(
self.logger.info(
'PoW took %.1f seconds, speed %s.',
time.time() - powStartTime,
sizeof_fmt(nonce / (time.time() - powStartTime))
@ -1235,7 +1236,7 @@ class singleWorker(StoppableThread):
# in the code to not let the user try to send a message
# this large until we implement message continuation.
if len(encryptedPayload) > 2 ** 18: # 256 KiB
logger.critical(
self.logger.critical(
'This msg object is too large to send. This should'
' never happen. Object size: %i',
len(encryptedPayload)
@ -1266,7 +1267,7 @@ class singleWorker(StoppableThread):
" Sent on %1"
).arg(l10n.formatTimestamp()))
))
logger.info(
self.logger.info(
'Broadcasting inv for my msg(within sendmsg function): %s',
hexlify(inventoryHash)
)
@ -1319,7 +1320,7 @@ class singleWorker(StoppableThread):
toStatus, addressVersionNumber, streamNumber, ripe = decodeAddress(
toAddress)
if toStatus != 'success':
logger.error(
self.logger.error(
'Very abnormal error occurred in requestPubKey.'
' toAddress is: %r. Please report this error to Atheros.',
toAddress
@ -1333,7 +1334,7 @@ class singleWorker(StoppableThread):
toAddress
)
if not queryReturn:
logger.critical(
self.logger.critical(
'BUG: Why are we requesting the pubkey for %s'
' if there are no messages in the sent folder'
' to that address?', toAddress
@ -1381,11 +1382,11 @@ class singleWorker(StoppableThread):
payload += encodeVarint(streamNumber)
if addressVersionNumber <= 3:
payload += ripe
logger.info(
self.logger.info(
'making request for pubkey with ripe: %s', hexlify(ripe))
else:
payload += tag
logger.info(
self.logger.info(
'making request for v4 pubkey with tag: %s', hexlify(tag))
# print 'trial value', trialValue
@ -1406,7 +1407,7 @@ class singleWorker(StoppableThread):
objectType = 1
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, '')
logger.info('sending inv (for the getpubkey message)')
self.logger.info('sending inv (for the getpubkey message)')
queues.invQueue.put((streamNumber, inventoryHash))
# wait 10% past expiration

View File

@ -5,7 +5,6 @@ src/class_smtpDeliver.py
# pylint: disable=unused-variable
import smtplib
import sys
import urlparse
from email.header import Header
from email.mime.text import MIMEText
@ -13,8 +12,7 @@ from email.mime.text import MIMEText
import queues
import state
from bmconfigparser import BMConfigParser
from debug import logger
from helper_threading import StoppableThread
from network.threads import StoppableThread
SMTPDOMAIN = "bmaddr.lan"
@ -75,10 +73,12 @@ class smtpDeliver(StoppableThread):
client.starttls()
client.ehlo()
client.sendmail(msg['From'], [to], msg.as_string())
logger.info("Delivered via SMTP to %s through %s:%i ...", to, u.hostname, u.port)
self.logger.info(
'Delivered via SMTP to %s through %s:%i ...',
to, u.hostname, u.port)
client.quit()
except:
logger.error("smtp delivery error", exc_info=True)
self.logger.error('smtp delivery error', exc_info=True)
elif command == 'displayNewSentMessage':
toAddress, fromLabel, fromAddress, subject, message, ackdata = data
elif command == 'updateNetworkStatusTab':
@ -112,5 +112,5 @@ class smtpDeliver(StoppableThread):
elif command == 'stopThread':
break
else:
sys.stderr.write(
'Command sent to smtpDeliver not recognized: %s\n' % command)
self.logger.warning(
'Command sent to smtpDeliver not recognized: %s', command)

View File

@ -1,28 +1,37 @@
"""
SMTP server thread
"""
import asyncore
import base64
import email
from email.parser import Parser
from email.header import decode_header
import logging
import re
import signal
import smtpd
import threading
import time
from email.header import decode_header
from email.parser import Parser
import queues
from addresses import decodeAddress
from bmconfigparser import BMConfigParser
from debug import logger
from helper_sql import sqlExecute
from helper_ackPayload import genAckPayload
from helper_threading import StoppableThread
import queues
from helper_sql import sqlExecute
from network.threads import StoppableThread
from version import softwareVersion
SMTPDOMAIN = "bmaddr.lan"
LISTENPORT = 8425
logger = logging.getLogger('default')
# pylint: disable=attribute-defined-outside-init
class smtpServerChannel(smtpd.SMTPChannel):
"""Asyncore channel for SMTP protocol (server)"""
def smtp_EHLO(self, arg):
"""Process an EHLO"""
if not arg:
self.push('501 Syntax: HELO hostname')
return
@ -30,15 +39,17 @@ class smtpServerChannel(smtpd.SMTPChannel):
self.push('250 AUTH PLAIN')
def smtp_AUTH(self, arg):
"""Process AUTH"""
if not arg or arg[0:5] not in ["PLAIN"]:
self.push('501 Syntax: AUTH PLAIN')
return
authstring = arg[6:]
try:
decoded = base64.b64decode(authstring)
correctauth = "\x00" + BMConfigParser().safeGet("bitmessagesettings", "smtpdusername", "") + \
"\x00" + BMConfigParser().safeGet("bitmessagesettings", "smtpdpassword", "")
logger.debug("authstring: %s / %s", correctauth, decoded)
correctauth = "\x00" + BMConfigParser().safeGet(
"bitmessagesettings", "smtpdusername", "") + "\x00" + BMConfigParser().safeGet(
"bitmessagesettings", "smtpdpassword", "")
logger.debug('authstring: %s / %s', correctauth, decoded)
if correctauth == decoded:
self.auth = True
self.push('235 2.7.0 Authentication successful')
@ -48,22 +59,26 @@ class smtpServerChannel(smtpd.SMTPChannel):
self.push('501 Authentication fail')
def smtp_DATA(self, arg):
"""Process DATA"""
if not hasattr(self, "auth") or not self.auth:
self.push ("530 Authentication required")
self.push('530 Authentication required')
return
smtpd.SMTPChannel.smtp_DATA(self, arg)
class smtpServerPyBitmessage(smtpd.SMTPServer):
"""Asyncore SMTP server class"""
def handle_accept(self):
"""Accept a connection"""
pair = self.accept()
if pair is not None:
conn, addr = pair
# print >> DEBUGSTREAM, 'Incoming connection from %s' % repr(addr)
self.channel = smtpServerChannel(self, conn, addr)
def send(self, fromAddress, toAddress, subject, message):
status, addressVersionNumber, streamNumber, ripe = decodeAddress(toAddress)
def send(self, fromAddress, toAddress, subject, message): # pylint: disable=arguments-differ
"""Send a bitmessage"""
streamNumber, ripe = decodeAddress(toAddress)[2:]
stealthLevel = BMConfigParser().safeGetInt('bitmessagesettings', 'ackstealthlevel')
ackdata = genAckPayload(streamNumber, stealthLevel)
sqlExecute(
@ -75,19 +90,21 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
subject,
message,
ackdata,
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
int(time.time()), # sentTime (this will never change)
int(time.time()), # lastActionTime
0, # sleepTill time. This will get set when the POW gets done.
'msgqueued',
0, # retryNumber
'sent', # folder
2, # encodingtype
min(BMConfigParser().getint('bitmessagesettings', 'ttl'), 86400 * 2) # not necessary to have a TTL higher than 2 days
0, # retryNumber
'sent', # folder
2, # encodingtype
# not necessary to have a TTL higher than 2 days
min(BMConfigParser().getint('bitmessagesettings', 'ttl'), 86400 * 2)
)
queues.workerQueue.put(('sendmessage', toAddress))
def decode_header(self, hdr):
"""Email header decoding"""
ret = []
for h in decode_header(self.msg_headers[hdr]):
if h[1]:
@ -97,37 +114,37 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
return ret
def process_message(self, peer, mailfrom, rcpttos, data):
# print 'Receiving message from:', peer
def process_message(self, peer, mailfrom, rcpttos, data): # pylint: disable=too-many-locals, too-many-branches
"""Process an email"""
# print 'Receiving message from:', peer
p = re.compile(".*<([^>]+)>")
if not hasattr(self.channel, "auth") or not self.channel.auth:
logger.error("Missing or invalid auth")
logger.error('Missing or invalid auth')
return
try:
self.msg_headers = Parser().parsestr(data)
except:
logger.error("Invalid headers")
logger.error('Invalid headers')
return
try:
sender, domain = p.sub(r'\1', mailfrom).split("@")
if domain != SMTPDOMAIN:
raise Exception("Bad domain %s", domain)
raise Exception("Bad domain %s" % domain)
if sender not in BMConfigParser().addresses():
raise Exception("Nonexisting user %s", sender)
raise Exception("Nonexisting user %s" % sender)
except Exception as err:
logger.debug("Bad envelope from %s: %s", mailfrom, repr(err))
logger.debug('Bad envelope from %s: %r', mailfrom, err)
msg_from = self.decode_header("from")
try:
msg_from = p.sub(r'\1', self.decode_header("from")[0])
sender, domain = msg_from.split("@")
if domain != SMTPDOMAIN:
raise Exception("Bad domain %s", domain)
raise Exception("Bad domain %s" % domain)
if sender not in BMConfigParser().addresses():
raise Exception("Nonexisting user %s", sender)
raise Exception("Nonexisting user %s" % sender)
except Exception as err:
logger.error("Bad headers from %s: %s", msg_from, repr(err))
logger.error('Bad headers from %s: %r', msg_from, err)
return
try:
@ -145,18 +162,20 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
try:
rcpt, domain = p.sub(r'\1', to).split("@")
if domain != SMTPDOMAIN:
raise Exception("Bad domain %s", domain)
logger.debug("Sending %s to %s about %s", sender, rcpt, msg_subject)
raise Exception("Bad domain %s" % domain)
logger.debug(
'Sending %s to %s about %s', sender, rcpt, msg_subject)
self.send(sender, rcpt, msg_subject, body)
logger.info("Relayed %s to %s", sender, rcpt)
logger.info('Relayed %s to %s', sender, rcpt)
except Exception as err:
logger.error( "Bad to %s: %s", to, repr(err))
logger.error('Bad to %s: %r', to, err)
continue
return
class smtpServer(StoppableThread):
def __init__(self, parent=None):
"""SMTP server thread"""
def __init__(self, _=None):
super(smtpServer, self).__init__(name="smtpServerThread")
self.server = smtpServerPyBitmessage(('127.0.0.1', LISTENPORT), None)
@ -168,21 +187,26 @@ class smtpServer(StoppableThread):
def run(self):
asyncore.loop(1)
def signals(signal, frame):
print "Got signal, terminating"
def signals(_, __):
"""Signal handler"""
logger.warning('Got signal, terminating')
for thread in threading.enumerate():
if thread.isAlive() and isinstance(thread, StoppableThread):
thread.stopThread()
def runServer():
print "Running SMTPd thread"
"""Run SMTP server as a standalone python process"""
logger.warning('Running SMTPd thread')
smtpThread = smtpServer()
smtpThread.start()
signal.signal(signal.SIGINT, signals)
signal.signal(signal.SIGTERM, signals)
print "Processing"
logger.warning('Processing')
smtpThread.join()
print "The end"
logger.warning('The end')
if __name__ == "__main__":
runServer()

View File

@ -1,29 +1,34 @@
"""
sqlThread is defined here
"""
import threading
from bmconfigparser import BMConfigParser
import sqlite3
import time
import shutil # used for moving the messages.dat file
import sys
import os
from debug import logger
import helper_sql
import helper_startup
import paths
import queues
import state
import tr
# This thread exists because SQLITE3 is so un-threadsafe that we must
# submit queries to it and it puts results back in a different queue. They
# won't let us just use locks.
# pylint: disable=attribute-defined-outside-init,protected-access
class sqlThread(threading.Thread):
"""A thread for all SQL operations"""
def __init__(self):
threading.Thread.__init__(self, name="SQL")
def run(self):
def run(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
"""Process SQL queries from `.helper_sql.sqlSubmitQueue`"""
self.conn = sqlite3.connect(state.appdata + 'messages.dat')
self.conn.text_factory = str
self.cur = self.conn.cursor()
@ -32,30 +37,38 @@ class sqlThread(threading.Thread):
try:
self.cur.execute(
'''CREATE TABLE inbox (msgid blob, toaddress text, fromaddress text, subject text, received text, message text, folder text, encodingtype int, read bool, sighash blob, UNIQUE(msgid) ON CONFLICT REPLACE)''' )
'''CREATE TABLE inbox (msgid blob, toaddress text, fromaddress text, subject text,'''
''' received text, message text, folder text, encodingtype int, read bool, sighash blob,'''
''' UNIQUE(msgid) ON CONFLICT REPLACE)''')
self.cur.execute(
'''CREATE TABLE sent (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text, ackdata blob, senttime integer, lastactiontime integer, sleeptill integer, status text, retrynumber integer, folder text, encodingtype int, ttl int)''' )
'''CREATE TABLE sent (msgid blob, toaddress text, toripe blob, fromaddress text, subject text,'''
''' message text, ackdata blob, senttime integer, lastactiontime integer,'''
''' sleeptill integer, status text, retrynumber integer, folder text, encodingtype int, ttl int)''')
self.cur.execute(
'''CREATE TABLE subscriptions (label text, address text, enabled bool)''' )
'''CREATE TABLE subscriptions (label text, address text, enabled bool)''')
self.cur.execute(
'''CREATE TABLE addressbook (label text, address text)''' )
'''CREATE TABLE addressbook (label text, address text)''')
self.cur.execute(
'''CREATE TABLE blacklist (label text, address text, enabled bool)''' )
'''CREATE TABLE blacklist (label text, address text, enabled bool)''')
self.cur.execute(
'''CREATE TABLE whitelist (label text, address text, enabled bool)''' )
'''CREATE TABLE whitelist (label text, address text, enabled bool)''')
self.cur.execute(
'''CREATE TABLE pubkeys (address text, addressversion int, transmitdata blob, time int, usedpersonally text, UNIQUE(address) ON CONFLICT REPLACE)''' )
'''CREATE TABLE pubkeys (address text, addressversion int, transmitdata blob, time int,'''
''' usedpersonally text, UNIQUE(address) ON CONFLICT REPLACE)''')
self.cur.execute(
'''CREATE TABLE inventory (hash blob, objecttype int, streamnumber int, payload blob, expirestime integer, tag blob, UNIQUE(hash) ON CONFLICT REPLACE)''' )
'''CREATE TABLE inventory (hash blob, objecttype int, streamnumber int, payload blob,'''
''' expirestime integer, tag blob, UNIQUE(hash) ON CONFLICT REPLACE)''')
self.cur.execute(
'''INSERT INTO subscriptions VALUES('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
'''INSERT INTO subscriptions VALUES'''
'''('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
self.cur.execute(
'''CREATE TABLE settings (key blob, value blob, UNIQUE(key) ON CONFLICT REPLACE)''' )
'''CREATE TABLE settings (key blob, value blob, UNIQUE(key) ON CONFLICT REPLACE)''')
self.cur.execute('''INSERT INTO settings VALUES('version','10')''')
self.cur.execute('''INSERT INTO settings VALUES('lastvacuumtime',?)''', (
int(time.time()),))
self.cur.execute(
'''CREATE TABLE objectprocessorqueue (objecttype int, data blob, UNIQUE(objecttype, data) ON CONFLICT REPLACE)''' )
'''CREATE TABLE objectprocessorqueue'''
''' (objecttype int, data blob, UNIQUE(objecttype, data) ON CONFLICT REPLACE)''')
self.conn.commit()
logger.info('Created messages database file')
except Exception as err:
@ -122,33 +135,38 @@ class sqlThread(threading.Thread):
logger.debug(
"In messages.dat database, creating new 'settings' table.")
self.cur.execute(
'''CREATE TABLE settings (key text, value blob, UNIQUE(key) ON CONFLICT REPLACE)''' )
self.cur.execute( '''INSERT INTO settings VALUES('version','1')''')
self.cur.execute( '''INSERT INTO settings VALUES('lastvacuumtime',?)''', (
'''CREATE TABLE settings (key text, value blob, UNIQUE(key) ON CONFLICT REPLACE)''')
self.cur.execute('''INSERT INTO settings VALUES('version','1')''')
self.cur.execute('''INSERT INTO settings VALUES('lastvacuumtime',?)''', (
int(time.time()),))
logger.debug('In messages.dat database, removing an obsolete field from the pubkeys table.')
self.cur.execute(
'''CREATE TEMPORARY TABLE pubkeys_backup(hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE);''')
'''CREATE TEMPORARY TABLE pubkeys_backup(hash blob, transmitdata blob, time int,'''
''' usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE);''')
self.cur.execute(
'''INSERT INTO pubkeys_backup SELECT hash, transmitdata, time, usedpersonally FROM pubkeys;''')
self.cur.execute( '''DROP TABLE pubkeys''')
self.cur.execute('''DROP TABLE pubkeys''')
self.cur.execute(
'''CREATE TABLE pubkeys (hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE)''' )
'''CREATE TABLE pubkeys'''
''' (hash blob, transmitdata blob, time int, usedpersonally text, UNIQUE(hash) ON CONFLICT REPLACE)''')
self.cur.execute(
'''INSERT INTO pubkeys SELECT hash, transmitdata, time, usedpersonally FROM pubkeys_backup;''')
self.cur.execute( '''DROP TABLE pubkeys_backup;''')
logger.debug('Deleting all pubkeys from inventory. They will be redownloaded and then saved with the correct times.')
self.cur.execute('''DROP TABLE pubkeys_backup;''')
logger.debug(
'Deleting all pubkeys from inventory.'
' They will be redownloaded and then saved with the correct times.')
self.cur.execute(
'''delete from inventory where objecttype = 'pubkey';''')
logger.debug('replacing Bitmessage announcements mailing list with a new one.')
self.cur.execute(
'''delete from subscriptions where address='BM-BbkPSZbzPwpVcYZpU4yHwf9ZPEapN5Zx' ''')
self.cur.execute(
'''INSERT INTO subscriptions VALUES('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
'''INSERT INTO subscriptions VALUES'''
'''('Bitmessage new releases/announcements','BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw',1)''')
logger.debug('Commiting.')
self.conn.commit()
logger.debug('Vacuuming message.dat. You might notice that the file size gets much smaller.')
self.cur.execute( ''' VACUUM ''')
self.cur.execute(''' VACUUM ''')
# After code refactoring, the possible status values for sent messages
# have changed.
@ -172,15 +190,21 @@ class sqlThread(threading.Thread):
'In messages.dat database, removing an obsolete field from'
' the inventory table.')
self.cur.execute(
'''CREATE TEMPORARY TABLE inventory_backup(hash blob, objecttype text, streamnumber int, payload blob, receivedtime integer, UNIQUE(hash) ON CONFLICT REPLACE);''')
'''CREATE TEMPORARY TABLE inventory_backup'''
'''(hash blob, objecttype text, streamnumber int, payload blob,'''
''' receivedtime integer, UNIQUE(hash) ON CONFLICT REPLACE);''')
self.cur.execute(
'''INSERT INTO inventory_backup SELECT hash, objecttype, streamnumber, payload, receivedtime FROM inventory;''')
self.cur.execute( '''DROP TABLE inventory''')
'''INSERT INTO inventory_backup SELECT hash, objecttype, streamnumber, payload, receivedtime'''
''' FROM inventory;''')
self.cur.execute('''DROP TABLE inventory''')
self.cur.execute(
'''CREATE TABLE inventory (hash blob, objecttype text, streamnumber int, payload blob, receivedtime integer, UNIQUE(hash) ON CONFLICT REPLACE)''' )
'''CREATE TABLE inventory'''
''' (hash blob, objecttype text, streamnumber int, payload blob, receivedtime integer,'''
''' UNIQUE(hash) ON CONFLICT REPLACE)''')
self.cur.execute(
'''INSERT INTO inventory SELECT hash, objecttype, streamnumber, payload, receivedtime FROM inventory_backup;''')
self.cur.execute( '''DROP TABLE inventory_backup;''')
'''INSERT INTO inventory SELECT hash, objecttype, streamnumber, payload, receivedtime'''
''' FROM inventory_backup;''')
self.cur.execute('''DROP TABLE inventory_backup;''')
item = '''update settings set value=? WHERE key='version';'''
parameters = (3,)
self.cur.execute(item, parameters)
@ -210,7 +234,8 @@ class sqlThread(threading.Thread):
if currentVersion == 4:
self.cur.execute('''DROP TABLE pubkeys''')
self.cur.execute(
'''CREATE TABLE pubkeys (hash blob, addressversion int, transmitdata blob, time int, usedpersonally text, UNIQUE(hash, addressversion) ON CONFLICT REPLACE)''')
'''CREATE TABLE pubkeys (hash blob, addressversion int, transmitdata blob, time int,'''
'''usedpersonally text, UNIQUE(hash, addressversion) ON CONFLICT REPLACE)''')
self.cur.execute(
'''delete from inventory where objecttype = 'pubkey';''')
item = '''update settings set value=? WHERE key='version';'''
@ -226,7 +251,8 @@ class sqlThread(threading.Thread):
if currentVersion == 5:
self.cur.execute('''DROP TABLE knownnodes''')
self.cur.execute(
'''CREATE TABLE objectprocessorqueue (objecttype text, data blob, UNIQUE(objecttype, data) ON CONFLICT REPLACE)''')
'''CREATE TABLE objectprocessorqueue'''
''' (objecttype text, data blob, UNIQUE(objecttype, data) ON CONFLICT REPLACE)''')
item = '''update settings set value=? WHERE key='version';'''
parameters = (6,)
self.cur.execute(item, parameters)
@ -242,10 +268,15 @@ class sqlThread(threading.Thread):
logger.debug(
'In messages.dat database, dropping and recreating'
' the inventory table.')
self.cur.execute( '''DROP TABLE inventory''')
self.cur.execute( '''CREATE TABLE inventory (hash blob, objecttype int, streamnumber int, payload blob, expirestime integer, tag blob, UNIQUE(hash) ON CONFLICT REPLACE)''' )
self.cur.execute( '''DROP TABLE objectprocessorqueue''')
self.cur.execute( '''CREATE TABLE objectprocessorqueue (objecttype int, data blob, UNIQUE(objecttype, data) ON CONFLICT REPLACE)''' )
self.cur.execute('''DROP TABLE inventory''')
self.cur.execute(
'''CREATE TABLE inventory'''
''' (hash blob, objecttype int, streamnumber int, payload blob, expirestime integer,'''
''' tag blob, UNIQUE(hash) ON CONFLICT REPLACE)''')
self.cur.execute('''DROP TABLE objectprocessorqueue''')
self.cur.execute(
'''CREATE TABLE objectprocessorqueue'''
''' (objecttype int, data blob, UNIQUE(objecttype, data) ON CONFLICT REPLACE)''')
item = '''update settings set value=? WHERE key='version';'''
parameters = (7,)
self.cur.execute(item, parameters)
@ -307,15 +338,24 @@ class sqlThread(threading.Thread):
' fields into the retrynumber field and adding the'
' sleeptill and ttl fields...')
self.cur.execute(
'''CREATE TEMPORARY TABLE sent_backup (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text, ackdata blob, lastactiontime integer, status text, retrynumber integer, folder text, encodingtype int)''' )
'''CREATE TEMPORARY TABLE sent_backup'''
''' (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text,'''
''' ackdata blob, lastactiontime integer, status text, retrynumber integer,'''
''' folder text, encodingtype int)''')
self.cur.execute(
'''INSERT INTO sent_backup SELECT msgid, toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime, status, 0, folder, encodingtype FROM sent;''')
self.cur.execute( '''DROP TABLE sent''')
'''INSERT INTO sent_backup SELECT msgid, toaddress, toripe, fromaddress,'''
''' subject, message, ackdata, lastactiontime,'''
''' status, 0, folder, encodingtype FROM sent;''')
self.cur.execute('''DROP TABLE sent''')
self.cur.execute(
'''CREATE TABLE sent (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text, ackdata blob, senttime integer, lastactiontime integer, sleeptill int, status text, retrynumber integer, folder text, encodingtype int, ttl int)''' )
'''CREATE TABLE sent'''
''' (msgid blob, toaddress text, toripe blob, fromaddress text, subject text, message text,'''
''' ackdata blob, senttime integer, lastactiontime integer, sleeptill int, status text,'''
''' retrynumber integer, folder text, encodingtype int, ttl int)''')
self.cur.execute(
'''INSERT INTO sent SELECT msgid, toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime, lastactiontime, 0, status, 0, folder, encodingtype, 216000 FROM sent_backup;''')
self.cur.execute( '''DROP TABLE sent_backup''')
'''INSERT INTO sent SELECT msgid, toaddress, toripe, fromaddress, subject, message, ackdata,'''
''' lastactiontime, lastactiontime, 0, status, 0, folder, encodingtype, 216000 FROM sent_backup;''')
self.cur.execute('''DROP TABLE sent_backup''')
logger.info('In messages.dat database, finished making TTL-related changes.')
logger.debug('In messages.dat database, adding address field to the pubkeys table.')
# We're going to have to calculate the address for each row in the pubkeys
@ -332,16 +372,24 @@ class sqlThread(threading.Thread):
self.cur.execute(item, parameters)
# Now we can remove the hash field from the pubkeys table.
self.cur.execute(
'''CREATE TEMPORARY TABLE pubkeys_backup (address text, addressversion int, transmitdata blob, time int, usedpersonally text, UNIQUE(address) ON CONFLICT REPLACE)''' )
'''CREATE TEMPORARY TABLE pubkeys_backup'''
''' (address text, addressversion int, transmitdata blob, time int,'''
''' usedpersonally text, UNIQUE(address) ON CONFLICT REPLACE)''')
self.cur.execute(
'''INSERT INTO pubkeys_backup SELECT address, addressversion, transmitdata, time, usedpersonally FROM pubkeys;''')
self.cur.execute( '''DROP TABLE pubkeys''')
'''INSERT INTO pubkeys_backup'''
''' SELECT address, addressversion, transmitdata, time, usedpersonally FROM pubkeys;''')
self.cur.execute('''DROP TABLE pubkeys''')
self.cur.execute(
'''CREATE TABLE pubkeys (address text, addressversion int, transmitdata blob, time int, usedpersonally text, UNIQUE(address) ON CONFLICT REPLACE)''' )
'''CREATE TABLE pubkeys'''
''' (address text, addressversion int, transmitdata blob, time int, usedpersonally text,'''
''' UNIQUE(address) ON CONFLICT REPLACE)''')
self.cur.execute(
'''INSERT INTO pubkeys SELECT address, addressversion, transmitdata, time, usedpersonally FROM pubkeys_backup;''')
self.cur.execute( '''DROP TABLE pubkeys_backup''')
logger.debug('In messages.dat database, done adding address field to the pubkeys table and removing the hash field.')
'''INSERT INTO pubkeys SELECT'''
''' address, addressversion, transmitdata, time, usedpersonally FROM pubkeys_backup;''')
self.cur.execute('''DROP TABLE pubkeys_backup''')
logger.debug(
'In messages.dat database, done adding address field to the pubkeys table'
' and removing the hash field.')
self.cur.execute('''update settings set value=10 WHERE key='version';''')
# Are you hoping to add a new option to the keys.dat file of existing
@ -351,7 +399,7 @@ class sqlThread(threading.Thread):
try:
testpayload = '\x00\x00'
t = ('1234', 1, testpayload, '12345678', 'no')
self.cur.execute( '''INSERT INTO pubkeys VALUES(?,?,?,?,?)''', t)
self.cur.execute('''INSERT INTO pubkeys VALUES(?,?,?,?,?)''', t)
self.conn.commit()
self.cur.execute(
'''SELECT transmitdata FROM pubkeys WHERE address='1234' ''')
@ -361,13 +409,29 @@ class sqlThread(threading.Thread):
self.cur.execute('''DELETE FROM pubkeys WHERE address='1234' ''')
self.conn.commit()
if transmitdata == '':
logger.fatal('Problem: The version of SQLite you have cannot store Null values. Please download and install the latest revision of your version of Python (for example, the latest Python 2.7 revision) and try again.\n')
logger.fatal('PyBitmessage will now exit very abruptly. You may now see threading errors related to this abrupt exit but the problem you need to solve is related to SQLite.\n\n')
logger.fatal(
'Problem: The version of SQLite you have cannot store Null values.'
' Please download and install the latest revision of your version of Python'
' (for example, the latest Python 2.7 revision) and try again.\n')
logger.fatal(
'PyBitmessage will now exit very abruptly.'
' You may now see threading errors related to this abrupt exit'
' but the problem you need to solve is related to SQLite.\n\n')
os._exit(0)
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(While null value test) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(While null value test) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
else:
logger.error(err)
@ -383,11 +447,21 @@ class sqlThread(threading.Thread):
if int(value) < int(time.time()) - 86400:
logger.info('It has been a long time since the messages.dat file has been vacuumed. Vacuuming now...')
try:
self.cur.execute( ''' VACUUM ''')
self.cur.execute(''' VACUUM ''')
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(While VACUUM) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(While VACUUM) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
item = '''update settings set value=? WHERE key='lastvacuumtime';'''
parameters = (int(time.time()),)
@ -402,8 +476,18 @@ class sqlThread(threading.Thread):
self.conn.commit()
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(While committing) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(While committing) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
elif item == 'exit':
self.conn.close()
@ -417,8 +501,18 @@ class sqlThread(threading.Thread):
self.conn.commit()
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(while movemessagstoprog) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(while movemessagstoprog) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
self.conn.close()
shutil.move(
@ -433,8 +527,18 @@ class sqlThread(threading.Thread):
self.conn.commit()
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(while movemessagstoappdata) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(while movemessagstoappdata) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
self.conn.close()
shutil.move(
@ -447,11 +551,21 @@ class sqlThread(threading.Thread):
self.cur.execute('''delete from sent where folder='trash' ''')
self.conn.commit()
try:
self.cur.execute( ''' VACUUM ''')
self.cur.execute(''' VACUUM ''')
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(while deleteandvacuume) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(while deleteandvacuume) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
else:
parameters = helper_sql.sqlSubmitQueue.get()
@ -463,11 +577,30 @@ class sqlThread(threading.Thread):
rowcount = self.cur.rowcount
except Exception as err:
if str(err) == 'database or disk is full':
logger.fatal('(while cur.execute) Alert: Your disk or data storage volume is full. sqlThread will now exit.')
queues.UISignalQueue.put(('alert', (tr._translate("MainWindow", "Disk full"), tr._translate("MainWindow", 'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'), True)))
logger.fatal(
'(while cur.execute) Alert: Your disk or data storage volume is full.'
' sqlThread will now exit.')
queues.UISignalQueue.put((
'alert', (
tr._translate(
"MainWindow",
"Disk full"),
tr._translate(
"MainWindow",
'Alert: Your disk or data storage volume is full. Bitmessage will now exit.'),
True)))
os._exit(0)
else:
logger.fatal('Major error occurred when trying to execute a SQL statement within the sqlThread. Please tell Atheros about this error message or post it in the forum! Error occurred while trying to execute statement: "%s" Here are the parameters; you might want to censor this data with asterisks (***) as it can contain private information: %s. Here is the actual error message thrown by the sqlThread: %s', str(item), str(repr(parameters)), str(err))
logger.fatal(
'Major error occurred when trying to execute a SQL statement within the sqlThread.'
' Please tell Atheros about this error message or post it in the forum!'
' Error occurred while trying to execute statement: "%s" Here are the parameters;'
' you might want to censor this data with asterisks (***)'
' as it can contain private information: %s.'
' Here is the actual error message thrown by the sqlThread: %s',
str(item),
str(repr(parameters)),
str(err))
logger.fatal('This program shall now abruptly exit!')
os._exit(0)

View File

@ -1,26 +1,38 @@
"""
Logging and debuging facility
=============================
-----------------------------
Levels:
DEBUG
Detailed information, typically of interest only when diagnosing problems.
INFO
Confirmation that things are working as expected.
WARNING
An indication that something unexpected happened, or indicative of some problem in the
near future (e.g. 'disk space low'). The software is still working as expected.
ERROR
Due to a more serious problem, the software has not been able to perform some function.
CRITICAL
A serious error, indicating that the program itself may be unable to continue running.
DEBUG
Detailed information, typically of interest only when diagnosing problems.
INFO
Confirmation that things are working as expected.
WARNING
An indication that something unexpected happened, or indicative of
some problem in the near future (e.g. 'disk space low'). The software
is still working as expected.
ERROR
Due to a more serious problem, the software has not been able to
perform some function.
CRITICAL
A serious error, indicating that the program itself may be unable to
continue running.
There are three loggers: `console_only`, `file_only` and `both`.
There are three loggers by default: `console_only`, `file_only` and `both`.
You can configure logging in the logging.dat in the appdata dir.
It's format is described in the :func:`logging.config.fileConfig` doc.
Use: `from debug import logger` to import this facility into whatever module you wish to log messages from.
Logging is thread-safe so you don't have to worry about locks, just import and log.
Use:
>>> import logging
>>> logger = logging.getLogger('default')
The old form: ``from debug import logger`` is also may be used,
but only in the top level modules.
Logging is thread-safe so you don't have to worry about locks,
just import and log.
"""
import configparser
@ -28,6 +40,7 @@ import logging
import logging.config
import os
import sys
import helper_startup
import state
@ -40,15 +53,22 @@ log_level = 'WARNING'
def log_uncaught_exceptions(ex_cls, ex, tb):
"""The last resort logging function used for sys.excepthook"""
logging.critical('Unhandled exception', exc_info=(ex_cls, ex, tb))
def configureLogging():
"""
Configure logging,
using either logging.dat file in the state.appdata dir
or dictionary with hardcoded settings.
"""
sys.excepthook = log_uncaught_exceptions
fail_msg = ''
try:
logging_config = os.path.join(state.appdata, 'logging.dat')
# right now I am commenting the logger
# logging.config.fileConfig(logging_config, disable_existing_loggers=False)
logging.config.fileConfig(
logging_config, disable_existing_loggers=False)
return (
False,
'Loaded logger configuration from %s' % logging_config
@ -60,12 +80,11 @@ def configureLogging():
' logging config\n%s' % \
(logging_config, sys.exc_info())
else:
# no need to confuse the user if the logger config is missing entirely
# no need to confuse the user if the logger config
# is missing entirely
fail_msg = 'Using default logger configuration'
sys.excepthook = log_uncaught_exceptions
logging.config.dictConfig({
logging_config = {
'version': 1,
'formatters': {
'default': {
@ -107,34 +126,33 @@ def configureLogging():
'level': log_level,
'handlers': ['console'],
},
})
}
logging_config['loggers']['default'] = logging_config['loggers'][
'file_only' if '-c' in sys.argv else 'both']
logging.config.dictConfig(logging_config)
return True, fail_msg
def initLogging():
preconfigured, msg = configureLogging()
if preconfigured:
if '-c' in sys.argv:
logger = logging.getLogger('file_only')
else:
logger = logging.getLogger('both')
else:
logger = logging.getLogger('default')
if msg:
logger.log(logging.WARNING if preconfigured else logging.INFO, msg)
return logger
def resetLogging():
"""Reconfigure logging in runtime when state.appdata dir changed"""
# pylint: disable=global-statement, used-before-assignment
global logger
for i in logger.handlers.iterkeys():
for i in logger.handlers:
logger.removeHandler(i)
i.flush()
i.close()
logger = initLogging()
configureLogging()
logger = logging.getLogger('default')
# !
logger = initLogging()
try:
preconfigured, msg = configureLogging()
if msg:
logger.log(logging.WARNING if preconfigured else logging.INFO, msg)
except:
pass
logger = logging.getLogger('default')

View File

@ -1,24 +1,24 @@
"""
src/defaults.py
===============
Common default values
"""
# sanity check, prevent doing ridiculous PoW
# 20 million PoWs equals approximately 2 days on dev's dual R9 290
#: sanity check, prevent doing ridiculous PoW
#: 20 million PoWs equals approximately 2 days on dev's dual R9 290
ridiculousDifficulty = 20000000
# Remember here the RPC port read from namecoin.conf so we can restore to
# it as default whenever the user changes the "method" selection for
# namecoin integration to "namecoind".
#: Remember here the RPC port read from namecoin.conf so we can restore to
#: it as default whenever the user changes the "method" selection for
#: namecoin integration to "namecoind".
namecoinDefaultRpcPort = "8336"
# If changed, these values will cause particularly unexpected behavior:
# You won't be able to either send or receive messages because the proof
# of work you do (or demand) won't match that done or demanded by others.
# Don't change them!
# The amount of work that should be performed (and demanded) per byte of the payload.
#: The amount of work that should be performed (and demanded) per byte
#: of the payload.
networkDefaultProofOfWorkNonceTrialsPerByte = 1000
# To make sending short messages a little more difficult, this value is
# added to the payload length for use in calculating the proof of work
# target.
#: To make sending short messages a little more difficult, this value is
#: added to the payload length for use in calculating the proof of work
#: target.
networkDefaultPayloadLengthExtraBytes = 1000

View File

@ -113,6 +113,7 @@ PACKAGES = {
def detectOS():
"""Finding out what Operating System is running"""
if detectOS.result is not None:
return detectOS.result
if sys.platform.startswith('openbsd'):
@ -132,6 +133,7 @@ detectOS.result = None
def detectOSRelease():
"""Detecting the release of OS"""
with open("/etc/os-release", 'r') as osRelease:
version = None
for line in osRelease:
@ -148,6 +150,7 @@ def detectOSRelease():
def try_import(module, log_extra=False):
"""Try to import the non imported packages"""
try:
return import_module(module)
except ImportError:
@ -208,10 +211,8 @@ def check_sqlite():
).fetchone()[0]
logger.info('SQLite Library Source ID: %s', sqlite_source_id)
if sqlite_version_number >= 3006023:
compile_options = ', '.join(map(
lambda row: row[0],
conn.execute('PRAGMA compile_options;')
))
compile_options = ', '.join(
[row[0] for row in conn.execute('PRAGMA compile_options;')])
logger.info(
'SQLite Library Compile Options: %s', compile_options)
# There is no specific version requirement as yet, so we just
@ -230,13 +231,13 @@ def check_sqlite():
conn.close()
def check_openssl():
def check_openssl(): # pylint: disable=too-many-branches, too-many-return-statements
"""Do openssl dependency check.
Here we are checking for openssl with its all dependent libraries
and version checking.
"""
# pylint: disable=protected-access, redefined-outer-name
ctypes = try_import('ctypes')
if not ctypes:
logger.error('Unable to check OpenSSL.')
@ -303,7 +304,7 @@ def check_openssl():
' ECDH, and ECDSA enabled.')
return False
matches = cflags_regex.findall(openssl_cflags)
if len(matches) > 0:
if matches:
logger.error(
'This OpenSSL library is missing the following required'
' features: %s. PyBitmessage requires OpenSSL 0.9.8b'
@ -314,13 +315,13 @@ def check_openssl():
return False
# TODO: The minimum versions of pythondialog and dialog need to be determined
# ..todo:: The minimum versions of pythondialog and dialog need to be determined
def check_curses():
"""Do curses dependency check.
Here we are checking for curses if available or not with check
as interface requires the pythondialog\ package and the dialog
utility.
Here we are checking for curses if available or not with check as interface
requires the `pythondialog <https://pypi.org/project/pythondialog>`_ package
and the dialog utility.
"""
if sys.hexversion < 0x20600F0:
logger.error(

View File

@ -1,13 +1,19 @@
"""
.. todo:: hello world
Fallback expressions help PyBitmessage modules to run without some external
dependencies.
RIPEMD160Hash
-------------
We need to check :mod:`hashlib` for RIPEMD-160, as it won't be available
if OpenSSL is not linked against or the linked OpenSSL has RIPEMD disabled.
Try to use `pycryptodome <https://pypi.org/project/pycryptodome/>`_
in that case.
"""
import hashlib
# We need to check hashlib for RIPEMD-160, as it won't be available
# if OpenSSL is not linked against or the linked OpenSSL has RIPEMD
# disabled.
try:
hashlib.new('ripemd160')
except ValueError:

View File

@ -1,22 +1,28 @@
"""This module is for generating ack payload."""
"""
This module is for generating ack payload
"""
from binascii import hexlify
from struct import pack
import highlevelcrypto
import helper_random
from binascii import hexlify
from struct import pack
from addresses import encodeVarint
# This function generates payload objects for message acknowledgements
# Several stealth levels are available depending on the privacy needs;
# a higher level means better stealth, but also higher cost (size+POW)
# - level 0: a random 32-byte sequence with a message header appended
# - level 1: a getpubkey request for a (random) dummy key hash
# - level 2: a standard message, encrypted to a random pubkey
def genAckPayload(streamNumber=1, stealthLevel=0):
"""Generate and return payload obj."""
if (stealthLevel == 2): # Generate privacy-enhanced payload
"""
Generate and return payload obj.
This function generates payload objects for message acknowledgements
Several stealth levels are available depending on the privacy needs;
a higher level means better stealth, but also higher cost (size+POW)
- level 0: a random 32-byte sequence with a message header appended
- level 1: a getpubkey request for a (random) dummy key hash
- level 2: a standard message, encrypted to a random pubkey
"""
if stealthLevel == 2: # Generate privacy-enhanced payload
# Generate a dummy privkey and derive the pubkey
dummyPubKeyHex = highlevelcrypto.privToPub(
hexlify(helper_random.randomBytes(32)))
@ -29,7 +35,7 @@ def genAckPayload(streamNumber=1, stealthLevel=0):
acktype = 2 # message
version = 1
elif (stealthLevel == 1): # Basic privacy payload (random getpubkey)
elif stealthLevel == 1: # Basic privacy payload (random getpubkey)
ackdata = helper_random.randomBytes(32)
acktype = 0 # getpubkey
version = 4

View File

@ -1,11 +1,19 @@
"""
Calculates bitcoin and testnet address from pubkey
"""
import hashlib
from debug import logger
from pyelliptic import arithmetic
# This function expects that pubkey begin with \x04
def calculateBitcoinAddressFromPubkey(pubkey):
"""Calculate bitcoin address from given pubkey (65 bytes long hex string)"""
if len(pubkey) != 65:
print ('Could not calculate Bitcoin address from pubkey because function was passed a pubkey that was'\
'{}, bytes long rather than 65.'.format(len(pubkey)))
logger.error('Could not calculate Bitcoin address from pubkey because'
' function was passed a pubkey that was'
' %i bytes long rather than 65.', len(pubkey))
return "error"
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha256')
@ -25,9 +33,11 @@ def calculateBitcoinAddressFromPubkey(pubkey):
def calculateTestnetAddressFromPubkey(pubkey):
"""This function expects that pubkey begin with the testnet prefix"""
if len(pubkey) != 65:
print ('Could not calculate Bitcoin address from pubkey because function was passed a pubkey that was',\
'{}, bytes long rather than 65.'.format(pubkey))
logger.error('Could not calculate Bitcoin address from pubkey because'
' function was passed a pubkey that was'
' %i bytes long rather than 65.', len(pubkey))
return "error"
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha256')
@ -43,4 +53,3 @@ def calculateTestnetAddressFromPubkey(pubkey):
numberOfZeroBytesOnBinaryBitcoinAddress += 1
binaryBitcoinAddress = binaryBitcoinAddress[1:]
base58encoded = arithmetic.changebase(binaryBitcoinAddress, 256, 58)
return "1" * numberOfZeroBytesOnBinaryBitcoinAddress + base58encoded

View File

@ -1,10 +1,11 @@
"""Helper Inbox performs inbox messagese related operations."""
"""Helper Inbox performs inbox messages related operations"""
from helper_sql import sqlExecute, sqlQuery
import queues
from helper_sql import sqlExecute, sqlQuery
def insert(t):
"""Perform an insert into the "inbox" table"""
sqlExecute('''INSERT INTO inbox VALUES (?,?,?,?,?,?,?,?,?,?)''', *t)
# shouldn't emit changedInboxUnread and displayNewInboxMessage
# at the same time
@ -12,11 +13,13 @@ def insert(t):
def trash(msgid):
"""Mark a message in the `inbox` as `trash`"""
sqlExecute('''UPDATE inbox SET folder='trash' WHERE msgid=?''', msgid)
queues.UISignalQueue.put(('removeInboxRowByMsgid', msgid))
def isMessageAlreadyInInbox(sigHash):
"""Check for previous instances of this message"""
queryReturn = sqlQuery(
'''SELECT COUNT(*) FROM inbox WHERE sighash=?''', sigHash)
return queryReturn[0][0] != 0

View File

@ -25,19 +25,24 @@ BITMESSAGE_ENCODING_EXTENDED = 3
class MsgEncodeException(Exception):
"""Exception during message encoding"""
pass
class MsgDecodeException(Exception):
"""Exception during message decoding"""
pass
class DecompressionSizeException(MsgDecodeException):
# pylint: disable=super-init-not-called
"""Decompression resulted in too much data (attack protection)"""
def __init__(self, size):
self.size = size
class MsgEncode(object):
"""Message encoder class"""
def __init__(self, message, encoding=BITMESSAGE_ENCODING_SIMPLE):
self.data = None
self.encoding = encoding
@ -52,6 +57,7 @@ class MsgEncode(object):
raise MsgEncodeException("Unknown encoding %i" % (encoding))
def encodeExtended(self, message):
"""Handle extended encoding"""
try:
msgObj = messagetypes.message.Message()
self.data = zlib.compress(msgpack.dumps(msgObj.encode(message)), 9)
@ -64,15 +70,18 @@ class MsgEncode(object):
self.length = len(self.data)
def encodeSimple(self, message):
"""Handle simple encoding"""
self.data = 'Subject:%(subject)s\nBody:%(body)s' % message
self.length = len(self.data)
def encodeTrivial(self, message):
"""Handle trivial encoding"""
self.data = message['body']
self.length = len(self.data)
class MsgDecode(object):
"""Message decoder class"""
def __init__(self, encoding, data):
self.encoding = encoding
if self.encoding == BITMESSAGE_ENCODING_EXTENDED:
@ -88,6 +97,7 @@ class MsgDecode(object):
self.subject = _translate("MsgDecode", "Unknown encoding")
def decodeExtended(self, data):
"""Handle extended encoding"""
dc = zlib.decompressobj()
tmp = ""
while len(tmp) <= BMConfigParser().safeGetInt("zlib", "maxsize"):
@ -131,6 +141,7 @@ class MsgDecode(object):
self.body = msgObj.body
def decodeSimple(self, data):
"""Handle simple encoding"""
bodyPositionIndex = string.find(data, '\nBody:')
if bodyPositionIndex > 1:
subject = data[8:bodyPositionIndex]

View File

@ -56,8 +56,7 @@ def randomrandrange(x, y=None):
"""
if isinstance(y, NoneType):
return random.randrange(x) # nosec
else:
return random.randrange(x, y) # nosec
return random.randrange(x, y) # nosec
def randomchoice(population):

View File

@ -1,6 +1,6 @@
#!/usr/bin/python2.7
"""Additional SQL helper for searching messages"""
from helper_sql import *
from helper_sql import sqlQuery
try:
from PyQt4 import QtGui
@ -8,13 +8,17 @@ try:
except ImportError:
haveQt = False
def search_translate (context, text):
def search_translate(context, text):
"""Translation wrapper"""
if haveQt:
return QtGui.QApplication.translate(context, text)
else:
return text.lower()
return text.lower()
def search_sql(xAddress = "toaddress", account = None, folder = "inbox", where = None, what = None, unreadOnly = False):
def search_sql(xAddress="toaddress", account=None, folder="inbox", where=None, what=None, unreadOnly=False):
"""Perform a search in mailbox tables"""
# pylint: disable=too-many-arguments, too-many-branches
if what is not None and what != "":
what = "%" + what + "%"
if where == search_translate("MainWindow", "To"):
@ -32,7 +36,7 @@ def search_sql(xAddress = "toaddress", account = None, folder = "inbox", where =
if folder == "sent":
sqlStatementBase = '''
SELECT toaddress, fromaddress, subject, status, ackdata, lastactiontime
SELECT toaddress, fromaddress, subject, status, ackdata, lastactiontime
FROM sent '''
else:
sqlStatementBase = '''SELECT folder, msgid, toaddress, fromaddress, subject, received, read
@ -62,13 +66,16 @@ def search_sql(xAddress = "toaddress", account = None, folder = "inbox", where =
sqlArguments.append(what)
if unreadOnly:
sqlStatementParts.append("read = 0")
if len(sqlStatementParts) > 0:
if sqlStatementParts:
sqlStatementBase += "WHERE " + " AND ".join(sqlStatementParts)
if folder == "sent":
sqlStatementBase += " ORDER BY lastactiontime"
return sqlQuery(sqlStatementBase, sqlArguments)
def check_match(toAddress, fromAddress, subject, message, where = None, what = None):
def check_match(toAddress, fromAddress, subject, message, where=None, what=None):
"""Check if a single message matches a filter (used when new messages are added to messagelists)"""
# pylint: disable=too-many-arguments
if what is not None and what != "":
if where in (search_translate("MainWindow", "To"), search_translate("MainWindow", "All")):
if what.lower() not in toAddress.lower():

View File

@ -1,4 +1,9 @@
"""
Insert values into sent table
"""
from helper_sql import *
def insert(t):
"""Perform an insert into the `sent` table"""
sqlExecute('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''', *t)

View File

@ -1,17 +1,39 @@
"""Helper Sql performs sql operations."""
"""
SQL-related functions defined here are really pass the queries (or other SQL
commands) to :class:`.threads.sqlThread` through `sqlSubmitQueue` queue and check
or return the result got from `sqlReturnQueue`.
This is done that way because :mod:`sqlite3` is so thread-unsafe that they
won't even let you call it from different threads using your own locks.
SQLite objects can only be used from one thread.
.. note:: This actually only applies for certain deployments, and/or
really old version of sqlite. I haven't actually seen it anywhere.
Current versions do have support for threading and multiprocessing.
I don't see an urgent reason to refactor this, but it should be noted
in the comment that the problem is mostly not valid. Sadly, last time
I checked, there is no reliable way to check whether the library is
or isn't thread-safe.
"""
import threading
import queue as Queue
sqlSubmitQueue = Queue.Queue()
# SQLITE3 is so thread-unsafe that they won't even let you call it from different threads using your own locks.
# SQL objects #can only be called from one thread.
"""the queue for SQL"""
sqlReturnQueue = Queue.Queue()
"""the queue for results"""
sqlLock = threading.Lock()
def sqlQuery(sqlStatement, *args):
"""SQLLITE execute statement and return query."""
"""
Query sqlite and return results
:param str sqlStatement: SQL statement string
:param list args: SQL query parameters
:rtype: list
"""
sqlLock.acquire()
sqlSubmitQueue.put(sqlStatement)
@ -28,6 +50,7 @@ def sqlQuery(sqlStatement, *args):
def sqlExecuteChunked(sqlStatement, idCount, *args):
"""Execute chunked SQL statement to avoid argument limit"""
# SQLITE_MAX_VARIABLE_NUMBER,
# unfortunately getting/setting isn't exposed to python
sqlExecuteChunked.chunkSize = 999
@ -58,6 +81,7 @@ def sqlExecuteChunked(sqlStatement, idCount, *args):
def sqlExecute(sqlStatement, *args):
"""Execute SQL statement (optionally with arguments)"""
sqlLock.acquire()
sqlSubmitQueue.put(sqlStatement)
@ -70,13 +94,15 @@ def sqlExecute(sqlStatement, *args):
sqlLock.release()
return rowcount
def sqlStoredProcedure(procName):
"""Schedule procName to be run"""
sqlLock.acquire()
sqlSubmitQueue.put(procName)
sqlLock.release()
class SqlBulkExecute:
class SqlBulkExecute(object):
"""This is used when you have to execute the same statement in a cycle."""
def __enter__(self):

View File

@ -1,8 +1,5 @@
"""
src/helper_startup.py
=====================
Helper Start performs all the startup operations.
Startup operations.
"""
# pylint: disable=too-many-branches,too-many-statements
from __future__ import print_function
@ -19,31 +16,11 @@ import paths
import state
from bmconfigparser import BMConfigParser
# The user may de-select Portable Mode in the settings if they want
# the config files to stay in the application data folder.
StoreConfigFilesInSameDirectoryAsProgramByDefault = False
def _loadTrustedPeer():
trustedPeer = ''
try:
trustedPeer = BMConfigParser().get('bitmessagesettings', 'trustedpeer')
except configparser.Error:
# This probably means the trusted peer wasn't specified so we
# can just leave it as None
return
try:
if trustedPeer != None:
host, port = trustedPeer.split(':')
state.trustedPeer = state.Peer(host, int(port))
except ValueError:
sys.exit(
'Bad trustedpeer config setting! It should be set as'
' trustedpeer=<hostname>:<portnumber>'
)
def loadConfig():
"""Load the config"""
config = BMConfigParser()
@ -138,7 +115,7 @@ def loadConfig():
else:
updateConfig()
_loadTrustedPeer()
def updateConfig():
"""Save the config"""

View File

@ -1,58 +0,0 @@
"""Helper threading perform all the threading operations."""
import threading
from contextlib import contextmanager
import helper_random
try:
import prctl
except ImportError:
def set_thread_name(name):
"""Set the thread name for external use (visible from the OS)."""
threading.current_thread().name = name
else:
def set_thread_name(name):
"""Set a name for the thread for python internal use."""
prctl.set_name(name)
def _thread_name_hack(self):
set_thread_name(self.name)
threading.Thread.__bootstrap_original__(self)
threading.Thread.__bootstrap_original__ = threading.Thread._Thread__bootstrap
threading.Thread._Thread__bootstrap = _thread_name_hack
class StoppableThread(threading.Thread):
name = None
def __init__(self, name=None):
if name:
self.name = name
super(StoppableThread, self).__init__(name=self.name)
self.initStop()
helper_random.seed()
def initStop(self):
self.stop = threading.Event()
self._stopped = False
def stopThread(self):
self._stopped = True
self.stop.set()
class BusyError(threading.ThreadError):
pass
@contextmanager
def nonBlocking(lock):
locked = lock.acquire(False)
if not locked:
raise BusyError
try:
yield
finally:
lock.release()

View File

@ -1,6 +1,10 @@
"""
src/highlevelcrypto.py
======================
High level cryptographic functions based on `.pyelliptic` OpenSSL bindings.
.. note::
Upstream pyelliptic was upgraded from SHA1 to SHA256 for signing.
We must upgrade PyBitmessage gracefully.
`More discussion. <https://github.com/yann2192/pyelliptic/issues/32>`_
"""
from binascii import hexlify
@ -12,7 +16,7 @@ from pyelliptic import arithmetic as a
def makeCryptor(privkey):
"""Return a private pyelliptic.ECC() instance"""
"""Return a private `.pyelliptic.ECC` instance"""
private_key = a.changebase(privkey, 16, 256, minlen=32)
public_key = pointMult(private_key)
privkey_bin = '\x02\xca\x00\x20'.encode('raw_unicode_escape') + private_key
@ -31,7 +35,7 @@ def hexToPubkey(pubkey):
def makePubCryptor(pubkey):
"""Return a public pyelliptic.ECC() instance"""
"""Return a public `.pyelliptic.ECC` instance"""
pubkey_bin = hexToPubkey(pubkey)
return pyelliptic.ECC(curve='secp256k1', pubkey=pubkey_bin)
@ -45,7 +49,8 @@ def privToPub(privkey):
def encrypt(msg, hexPubkey):
"""Encrypts message with hex public key"""
return pyelliptic.ECC(curve='secp256k1').encrypt(msg, hexToPubkey(hexPubkey))
return pyelliptic.ECC(curve='secp256k1').encrypt(
msg, hexToPubkey(hexPubkey))
def decrypt(msg, hexPrivkey):
@ -54,36 +59,38 @@ def decrypt(msg, hexPrivkey):
def decryptFast(msg, cryptor):
"""Decrypts message with an existing pyelliptic.ECC.ECC object"""
"""Decrypts message with an existing `.pyelliptic.ECC` object"""
return cryptor.decrypt(msg)
def sign(msg, hexPrivkey):
"""Signs with hex private key"""
# pyelliptic is upgrading from SHA1 to SHA256 for signing. We must
# upgrade PyBitmessage gracefully.
# https://github.com/yann2192/pyelliptic/pull/33
# More discussion: https://github.com/yann2192/pyelliptic/issues/32
digestAlg = BMConfigParser().safeGet('bitmessagesettings', 'digestalg', 'sha1')
"""
Signs with hex private key using SHA1 or SHA256 depending on
"digestalg" setting
"""
digestAlg = BMConfigParser().safeGet(
'bitmessagesettings', 'digestalg', 'sha1')
if digestAlg == "sha1":
# SHA1, this will eventually be deprecated
return makeCryptor(hexPrivkey).sign(msg, digest_alg=OpenSSL.digest_ecdsa_sha1)
return makeCryptor(hexPrivkey).sign(
msg, digest_alg=OpenSSL.digest_ecdsa_sha1)
elif digestAlg == "sha256":
# SHA256. Eventually this will become the default
return makeCryptor(hexPrivkey).sign(msg, digest_alg=OpenSSL.EVP_sha256)
else:
raise ValueError("Unknown digest algorithm %s" % (digestAlg))
raise ValueError("Unknown digest algorithm %s" % digestAlg)
def verify(msg, sig, hexPubkey):
"""Verifies with hex public key"""
"""Verifies with hex public key using SHA1 or SHA256"""
# As mentioned above, we must upgrade gracefully to use SHA256. So
# let us check the signature using both SHA1 and SHA256 and if one
# of them passes then we will be satisfied. Eventually this can
# be simplified and we'll only check with SHA256.
try:
# old SHA1 algorithm.
sigVerifyPassed = makePubCryptor(hexPubkey).verify(sig, msg, digest_alg=OpenSSL.digest_ecdsa_sha1)
sigVerifyPassed = makePubCryptor(hexPubkey).verify(
sig, msg, digest_alg=OpenSSL.digest_ecdsa_sha1)
except:
sigVerifyPassed = False
if sigVerifyPassed:
@ -91,7 +98,8 @@ def verify(msg, sig, hexPubkey):
return True
# The signature check using SHA1 failed. Let us try it with SHA256.
try:
return makePubCryptor(hexPubkey).verify(sig, msg, digest_alg=OpenSSL.EVP_sha256)
return makePubCryptor(hexPubkey).verify(
sig, msg, digest_alg=OpenSSL.EVP_sha256)
except:
return False
@ -102,13 +110,14 @@ def pointMult(secret):
Evidently, this type of error can occur very rarely:
File "highlevelcrypto.py", line 54, in pointMult
group = OpenSSL.EC_KEY_get0_group(k)
WindowsError: exception: access violation reading 0x0000000000000008
>>> File "highlevelcrypto.py", line 54, in pointMult
>>> group = OpenSSL.EC_KEY_get0_group(k)
>>> WindowsError: exception: access violation reading 0x0000000000000008
"""
while True:
try:
k = OpenSSL.EC_KEY_new_by_curve_name(OpenSSL.get_curve('secp256k1'))
k = OpenSSL.EC_KEY_new_by_curve_name(
OpenSSL.get_curve('secp256k1'))
priv_key = OpenSSL.BN_bin2bn(secret, 32, None)
group = OpenSSL.EC_KEY_get0_group(k)
pub_key = OpenSSL.EC_POINT_new(group)

BIN
src/images/3.zip Normal file

Binary file not shown.

BIN
src/images/loader.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -3,6 +3,7 @@ Manipulations with knownNodes dictionary.
"""
import json
import logging
import os
import pickle
import threading
@ -10,28 +11,33 @@ import time
import state
from bmconfigparser import BMConfigParser
from debug import logger
from network.node import Peer
knownNodesLock = threading.Lock()
"""Thread lock for knownnodes modification"""
knownNodes = {stream: {} for stream in range(1, 4)}
"""The dict of known nodes for each stream"""
knownNodesTrimAmount = 2000
"""trim stream knownnodes dict to this length"""
# forget a node after rating is this low
knownNodesForgetRating = -0.5
"""forget a node after rating is this low"""
knownNodesActual = False
logger = logging.getLogger('default')
DEFAULT_NODES = (
state.Peer('5.45.99.75', 8444),
state.Peer('75.167.159.54', 8444),
state.Peer('95.165.168.168', 8444),
state.Peer('85.180.139.241', 8444),
state.Peer('158.222.217.190', 8080),
state.Peer('178.62.12.187', 8448),
state.Peer('24.188.198.204', 8111),
state.Peer('109.147.204.113', 1195),
state.Peer('178.11.46.221', 8444)
Peer('5.45.99.75', 8444),
Peer('75.167.159.54', 8444),
Peer('95.165.168.168', 8444),
Peer('85.180.139.241', 8444),
Peer('158.222.217.190', 8080),
Peer('178.62.12.187', 8448),
Peer('24.188.198.204', 8111),
Peer('109.147.204.113', 1195),
Peer('178.11.46.221', 8444)
)
@ -57,19 +63,17 @@ def json_deserialize_knownnodes(source):
for node in json.load(source):
peer = node['peer']
info = node['info']
peer = state.Peer(str(peer['host']), peer.get('port', 8444))
peer = Peer(str(peer['host']), peer.get('port', 8444))
knownNodes[node['stream']][peer] = info
if (
not (knownNodesActual or info.get('self')) and
peer not in DEFAULT_NODES
):
if not (knownNodesActual
or info.get('self')) and peer not in DEFAULT_NODES:
knownNodesActual = True
def pickle_deserialize_old_knownnodes(source):
"""
Unpickle source and reorganize knownnodes dict if it's in old format
Unpickle source and reorganize knownnodes dict if it has old format
the old format was {Peer:lastseen, ...}
the new format is {Peer:{"lastseen":i, "rating":f}}
"""
@ -82,6 +86,7 @@ def pickle_deserialize_old_knownnodes(source):
def saveKnownNodes(dirName=None):
"""Save knownnodes to filesystem"""
if dirName is None:
dirName = state.appdata
with knownNodesLock:
@ -90,6 +95,7 @@ def saveKnownNodes(dirName=None):
def addKnownNode(stream, peer, lastseen=None, is_self=False):
"""Add a new node to the dict"""
knownNodes[stream][peer] = {
"lastseen": lastseen or time.time(),
"rating": 1 if is_self else 0,
@ -98,6 +104,7 @@ def addKnownNode(stream, peer, lastseen=None, is_self=False):
def createDefaultKnownNodes():
"""Creating default Knownnodes"""
past = time.time() - 2418600 # 28 days - 10 min
for peer in DEFAULT_NODES:
addKnownNode(1, peer, past)
@ -105,6 +112,7 @@ def createDefaultKnownNodes():
def readKnownNodes():
"""Load knownnodes from filesystem"""
try:
with open(state.appdata + 'knownnodes.dat', 'r') as source:
with knownNodesLock:
@ -125,12 +133,13 @@ def readKnownNodes():
if onionhostname and ".onion" in onionhostname:
onionport = config.safeGetInt('bitmessagesettings', 'onionport')
if onionport:
self_peer = state.Peer(onionhostname, onionport)
self_peer = Peer(onionhostname, onionport)
addKnownNode(1, self_peer, is_self=True)
state.ownAddresses[self_peer] = True
def increaseRating(peer):
"""Increase rating of a peer node"""
increaseAmount = 0.1
maxRating = 1
with knownNodesLock:
@ -145,6 +154,7 @@ def increaseRating(peer):
def decreaseRating(peer):
"""Decrease rating of a peer node"""
decreaseAmount = 0.1
minRating = -1
with knownNodesLock:
@ -159,6 +169,7 @@ def decreaseRating(peer):
def trimKnownNodes(recAddrStream=1):
"""Triming Knownnodes"""
if len(knownNodes[recAddrStream]) < \
int(BMConfigParser().safeGet("knownnodes", "maxnodes")):
return
@ -175,7 +186,7 @@ def dns():
"""Add DNS names to knownnodes"""
for port in [8080, 8444]:
addKnownNode(
1, state.Peer('bootstrap%s.bitmessage.org' % port, port))
1, Peer('bootstrap%s.bitmessage.org' % port, port))
def cleanupKnownNodes():
@ -201,8 +212,8 @@ def cleanupKnownNodes():
del knownNodes[stream][node]
continue
# scrap old nodes (age > 3 hours) with low rating
if (age > 10800 and knownNodes[stream][node]["rating"] <=
knownNodesForgetRating):
if (age > 10800 and knownNodes[stream][node]["rating"]
<= knownNodesForgetRating):
needToWriteKnownNodesToDisk = True
del knownNodes[stream][node]
continue

View File

@ -1,4 +1,6 @@
"""
Localization
"""
import logging
import os
import time
@ -6,8 +8,7 @@ import time
from bmconfigparser import BMConfigParser
#logger = logging.getLogger(__name__)
logger = logging.getLogger('file_only')
logger = logging.getLogger('default')
DEFAULT_ENCODING = 'ISO8859-1'
@ -50,7 +51,7 @@ except:
if BMConfigParser().has_option('bitmessagesettings', 'timeformat'):
time_format = BMConfigParser().get('bitmessagesettings', 'timeformat')
#Test the format string
# Test the format string
try:
time.strftime(time_format)
except:
@ -59,12 +60,12 @@ if BMConfigParser().has_option('bitmessagesettings', 'timeformat'):
else:
time_format = DEFAULT_TIME_FORMAT
#It seems some systems lie about the encoding they use so we perform
#comprehensive decoding tests
# It seems some systems lie about the encoding they use so we perform
# comprehensive decoding tests
if time_format != DEFAULT_TIME_FORMAT:
try:
#Check day names
new_time_format = time_format
new_time_format = time_format
import sys
if sys.version_info >= (3, 0, 0) and time_format == '%%c':
time_format = '%c'
@ -83,34 +84,39 @@ if time_format != DEFAULT_TIME_FORMAT:
(time.strftime(time_format, (0, 0, 0, 13, 0, 0, 0, 0, 0))).encode()
#Check DST
(time.strftime(time_format, (0, 0, 0, 0, 0, 0, 0, 0, 1))).encode()
except:
logger.exception('Could not decode locale formatted timestamp')
time_format = DEFAULT_TIME_FORMAT
encoding = DEFAULT_ENCODING
time_format = new_time_format
def setlocale(category, newlocale):
"""Set the locale"""
locale.setlocale(category, newlocale)
# it looks like some stuff isn't initialised yet when this is called the
# first time and its init gets the locale settings from the environment
os.environ["LC_ALL"] = newlocale
def formatTimestamp(timestamp = None, as_unicode = True):
#For some reason some timestamps are strings so we need to sanitize.
def formatTimestamp(timestamp=None, as_unicode=True):
"""Return a formatted timestamp"""
# For some reason some timestamps are strings so we need to sanitize.
if timestamp is not None and not isinstance(timestamp, int):
try:
timestamp = int(timestamp)
except:
timestamp = None
#timestamp can't be less than 0.
# timestamp can't be less than 0.
if timestamp is not None and timestamp < 0:
timestamp = None
if timestamp is None:
timestring = time.strftime(time_format)
else:
#In case timestamp is too far in the future
# In case timestamp is too far in the future
try:
timestring = time.strftime(time_format, time.localtime(timestamp))
except ValueError:
@ -120,17 +126,21 @@ def formatTimestamp(timestamp = None, as_unicode = True):
return unicode(timestring, encoding)
return timestring
def getTranslationLanguage():
userlocale = None
if BMConfigParser().has_option('bitmessagesettings', 'userlocale'):
userlocale = BMConfigParser().get('bitmessagesettings', 'userlocale')
"""Return the user's language choice"""
userlocale = BMConfigParser().safeGet(
'bitmessagesettings', 'userlocale', 'system')
return userlocale if userlocale and userlocale != 'system' else language
if userlocale in [None, '', 'system']:
return language
return userlocale
def getWindowsLocale(posixLocale):
"""
Get the Windows locale
Technically this converts the locale string from UNIX to Windows format,
because they use different ones in their
libraries. E.g. "en_EN.UTF-8" to "english".
"""
if posixLocale in windowsLanguageMap:
return windowsLanguageMap[posixLocale]
if "." in posixLocale:

View File

@ -1,136 +0,0 @@
# pylint: disable=too-many-locals
"""
This program can be used to print out everything in your Inbox or Sent folders and also take things out of the trash.
Scroll down to the bottom to see the functions that you can uncomment. Save then run this file.
The functions which only read the database file seem to function just
fine even if you have Bitmessage running but you should definitly close
it before running the functions that make changes (like taking items out
of the trash).
"""
from __future__ import absolute_import
import sqlite3
from binascii import hexlify
from time import strftime, localtime
import paths
import queues
appdata = paths.lookupAppdataFolder()
conn = sqlite3.connect(appdata + 'messages.dat')
conn.text_factory = str
cur = conn.cursor()
def readInbox():
"""Print each row from inbox table"""
print 'Printing everything in inbox table:'
item = '''select * from inbox'''
parameters = ''
cur.execute(item, parameters)
output = cur.fetchall()
for row in output:
print row
def readSent():
"""Print each row from sent table"""
print 'Printing everything in Sent table:'
item = '''select * from sent where folder !='trash' '''
parameters = ''
cur.execute(item, parameters)
output = cur.fetchall()
for row in output:
(msgid, toaddress, toripe, fromaddress, subject, message, ackdata, lastactiontime,
sleeptill, status, retrynumber, folder, encodingtype, ttl) = row # pylint: disable=unused-variable
print(hexlify(msgid), toaddress, 'toripe:', hexlify(toripe), 'fromaddress:', fromaddress, 'ENCODING TYPE:',
encodingtype, 'SUBJECT:', repr(subject), 'MESSAGE:', repr(message), 'ACKDATA:', hexlify(ackdata),
lastactiontime, status, retrynumber, folder)
def readSubscriptions():
"""Print each row from subscriptions table"""
print 'Printing everything in subscriptions table:'
item = '''select * from subscriptions'''
parameters = ''
cur.execute(item, parameters)
output = cur.fetchall()
for row in output:
print row
def readPubkeys():
"""Print each row from pubkeys table"""
print 'Printing everything in pubkeys table:'
item = '''select address, transmitdata, time, usedpersonally from pubkeys'''
parameters = ''
cur.execute(item, parameters)
output = cur.fetchall()
for row in output:
address, transmitdata, time, usedpersonally = row
print(
'Address:', address, '\tTime first broadcast:', unicode(
strftime('%a, %d %b %Y %I:%M %p', localtime(time)), 'utf-8'),
'\tUsed by me personally:', usedpersonally, '\tFull pubkey message:', hexlify(transmitdata),
)
def readInventory():
"""Print each row from inventory table"""
print 'Printing everything in inventory table:'
item = '''select hash, objecttype, streamnumber, payload, expirestime from inventory'''
parameters = ''
cur.execute(item, parameters)
output = cur.fetchall()
for row in output:
obj_hash, objecttype, streamnumber, payload, expirestime = row
print 'Hash:', hexlify(obj_hash), objecttype, streamnumber, '\t', hexlify(payload), '\t', unicode(
strftime('%a, %d %b %Y %I:%M %p', localtime(expirestime)), 'utf-8')
def takeInboxMessagesOutOfTrash():
"""Update all inbox messages with folder=trash to have folder=inbox"""
item = '''update inbox set folder='inbox' where folder='trash' '''
parameters = ''
cur.execute(item, parameters)
_ = cur.fetchall()
conn.commit()
print 'done'
def takeSentMessagesOutOfTrash():
"""Update all sent messages with folder=trash to have folder=sent"""
item = '''update sent set folder='sent' where folder='trash' '''
parameters = ''
cur.execute(item, parameters)
_ = cur.fetchall()
conn.commit()
print 'done'
def markAllInboxMessagesAsUnread():
"""Update all messages in inbox to have read=0"""
item = '''update inbox set read='0' '''
parameters = ''
cur.execute(item, parameters)
_ = cur.fetchall()
conn.commit()
queues.UISignalQueue.put(('changedInboxUnread', None))
print 'done'
def vacuum():
"""Perform a vacuum on the database"""
item = '''VACUUM'''
parameters = ''
cur.execute(item, parameters)
_ = cur.fetchall()
conn.commit()
print 'done'
if __name__ == '__main__':
readInbox()

View File

@ -1,7 +1,4 @@
"""
src/messagetypes/__init__.py
============================
"""
import logging
from importlib import import_module
from os import path, listdir
@ -9,12 +6,14 @@ try:
from kivy.utils import platform
except:
platform = ''
from debug import logger
import messagetypes
import paths
logger = logging.getLogger('default')
class MsgBase(object): # pylint: disable=too-few-public-methods
class MsgBase(object): # pylint: disable=too-few-public-methods
"""Base class for message types"""
def __init__(self):
self.data = {"": lower(type(self).__name__)}

View File

@ -1,11 +1,10 @@
"""
src/messagetypes/message.py
===========================
"""
from debug import logger
import logging
from messagetypes import MsgBase
# pylint: disable=attribute-defined-outside-init
logger = logging.getLogger('default')
class Message(MsgBase):
"""Encapsulate a message"""

View File

@ -1,11 +1,10 @@
"""
src/messagetypes/vote.py
========================
"""
from debug import logger
import logging
from messagetypes import MsgBase
# pylint: disable=attribute-defined-outside-init
logger = logging.getLogger('default')
class Vote(MsgBase):
"""Module used to vote"""

View File

@ -1,31 +1,28 @@
# pylint: disable=too-many-branches,protected-access
"""
Copyright (C) 2013 by Daniel Kraft <d@domob.eu>
This file is part of the Bitmessage project.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
.. todo:: from debug import logger crashes PyBitmessage due to a circular dependency. The debug module will also
override/disable logging.getLogger() # loggers so module level logging functions are used instead
Namecoin queries
"""
# This file is part of the Bitmessage project.
from __future__ import absolute_import
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import base64
import httplib
@ -258,7 +255,7 @@ class namecoinConnection(object):
resp = self.con.getresponse()
result = resp.read()
if resp.status != 200:
raise Exception("Namecoin returned status %i: %s" % resp.status, resp.reason)
raise Exception("Namecoin returned status %i: %s" % (resp.status, resp.reason))
except:
logger.info("HTTP receive error")
except:
@ -288,7 +285,7 @@ class namecoinConnection(object):
return result
except socket.error as exc:
raise Exception("Socket error in RPC connection: %s" % str(exc))
raise Exception("Socket error in RPC connection: %s" % exc)
def lookupNamecoinFolder():

View File

@ -0,0 +1,17 @@
from .addrthread import AddrThread
from .announcethread import AnnounceThread
from .connectionpool import BMConnectionPool
from .dandelion import Dandelion
from .downloadthread import DownloadThread
from .invthread import InvThread
from .networkthread import BMNetworkThread
from .receivequeuethread import ReceiveQueueThread
from .threads import StoppableThread
from .uploadthread import UploadThread
__all__ = [
"BMConnectionPool", "Dandelion",
"AddrThread", "AnnounceThread", "BMNetworkThread", "DownloadThread",
"InvThread", "ReceiveQueueThread", "UploadThread", "StoppableThread"
]

View File

@ -1,9 +1,15 @@
# import queue as Queue
"""
Announce addresses as they are received from other hosts
"""
import queue as Queue
from helper_threading import StoppableThread
import state
from helper_random import randomshuffle
from network.assemble import assemble_addr
from network.connectionpool import BMConnectionPool
from queues import addrQueue
import state
from network.threads import StoppableThread
class AddrThread(StoppableThread):
@ -15,15 +21,26 @@ class AddrThread(StoppableThread):
while True:
try:
data = addrQueue.get(False)
chunk.append((data[0], data[1]))
if len(data) > 2:
source = BMConnectionPool().getConnectionByAddr(data[2])
chunk.append(data)
except Queue.Empty:
break
except KeyError:
continue
# finish
if chunk:
# Choose peers randomly
connections = BMConnectionPool().establishedConnections()
randomshuffle(connections)
for i in connections:
randomshuffle(chunk)
filtered = []
for stream, peer, seen, destination in chunk:
# peer's own address or address received from peer
if i.destination in (peer, destination):
continue
if stream not in i.streams:
continue
filtered.append((stream, peer, seen))
if filtered:
i.append_write_buf(assemble_addr(filtered))
addrQueue.iterate()
for i in range(len(chunk)):

View File

@ -3,15 +3,14 @@ src/network/advanceddispatcher.py
=================================
"""
# pylint: disable=attribute-defined-outside-init
# import pdb;pdb.set_trace()
import socket
import threading
import time
import network.asyncore_pollchoose as asyncore
import state
from debug import logger
from helper_threading import BusyError, nonBlocking
from network.threads import BusyError, nonBlocking
class ProcessingError(Exception):
@ -87,7 +86,8 @@ class AdvancedDispatcher(asyncore.dispatcher):
try:
cmd = getattr(self, "state_" + str(self.state))
except AttributeError:
logger.error("Unknown state %s", self.state, exc_info=True)
self.logger.error(
'Unknown state %s', self.state, exc_info=True)
raise UnknownStateError(self.state)
if not cmd():
break

View File

@ -2,22 +2,22 @@
src/network/announcethread.py
=================================
"""
import time
import state
from bmconfigparser import BMConfigParser
from debug import logger
from helper_threading import StoppableThread
from network.bmproto import BMProto
from network.assemble import assemble_addr
from network.connectionpool import BMConnectionPool
from network.udp import UDPSocket
import state
from .node import Peer
from network.threads import StoppableThread
class AnnounceThread(StoppableThread):
"""A thread to manage regular announcing of this node"""
def __init__(self):
super(AnnounceThread, self).__init__(name="Announcer")
logger.info("init announce thread")
name = "Announcer"
def run(self):
lastSelfAnnounced = 0
@ -38,6 +38,13 @@ class AnnounceThread(StoppableThread):
for stream in state.streamsInWhichIAmParticipating:
addr = (
stream,
state.Peer('127.0.0.1',int( BMConfigParser().safeGet("bitmessagesettings", "port"))),
int(time.time()))
connection.append_write_buf(BMProto.assembleAddr([addr]))
# state.Peer('127.0.0.1',int( BMConfigParser().safeGet("bitmessagesettings", "port"))),
# int(time.time()))
# connection.append_write_buf(BMProto.assembleAddr([addr]))
Peer(
'127.0.0.1',
BMConfigParser().safeGetInt('bitmessagesettings', 'port')),
time.time())
connection.append_write_buf(assemble_addr([addr]))

32
src/network/assemble.py Normal file
View File

@ -0,0 +1,32 @@
"""
Create bitmessage protocol command packets
"""
import struct
import addresses
from network.constants import MAX_ADDR_COUNT
from network.node import Peer
from protocol import CreatePacket, encodeHost
def assemble_addr(peerList):
"""Create address command"""
if isinstance(peerList, Peer):
peerList = (peerList)
if not peerList:
return b''
retval = b''
for i in range(0, len(peerList), MAX_ADDR_COUNT):
payload = addresses.encodeVarint(
len(peerList[i:i + MAX_ADDR_COUNT]))
for stream, peer, timestamp in peerList[i:i + MAX_ADDR_COUNT]:
payload += struct.pack(
'>Q', timestamp) # 64-bit time
payload += struct.pack('>I', stream)
payload += struct.pack(
'>q', 1) # service bit flags offered by this node
payload += encodeHost(peer.host)
payload += struct.pack('>H', peer.port) # remote port
retval += CreatePacket('addr', payload)
return retval

View File

@ -1,18 +1,18 @@
"""
src/network/bmobject.py
======================
BMObject and it's exceptions.
"""
import logging
import time
import protocol
import state
from addresses import calculateInventoryHash
from debug import logger
from inventory import Inventory
from network.dandelion import Dandelion
logger = logging.getLogger('default')
class BMObjectInsufficientPOWError(Exception):
"""Exception indicating the object doesn't have sufficient proof of work."""

View File

@ -5,6 +5,7 @@ src/network/bmproto.py
# pylint: disable=attribute-defined-outside-init
import base64
import hashlib
import logging
import socket
import struct
import time
@ -16,17 +17,22 @@ import knownnodes
import protocol
import state
from bmconfigparser import BMConfigParser
from debug import logger
from inventory import Inventory
from network.advanceddispatcher import AdvancedDispatcher
from network.constants import (
ADDRESS_ALIVE,
MAX_MESSAGE_SIZE,
MAX_OBJECT_COUNT,
MAX_OBJECT_PAYLOAD_SIZE,
MAX_TIME_OFFSET)
from network.dandelion import Dandelion
from network.bmobject import (
BMObject, BMObjectInsufficientPOWError, BMObjectInvalidDataError,
BMObjectExpiredError, BMObjectUnwantedStreamError,
BMObjectInvalidError, BMObjectAlreadyHaveError)
from network.node import Node
from network.proxy import ProxyError
from network.objectracker import missingObjects, ObjectTracker
from .node import Node, Peer
from queues import objectProcessorQueue, portCheckerQueue, invQueue, addrQueue
from network.randomtrackingdict import RandomTrackingDict
@ -44,6 +50,8 @@ addr_version = 0
count = 0
logger = logging.getLogger('default')
class BMProtoError(ProxyError):
"""A Bitmessage Protocol Base Error"""
errorCodes = ("Protocol error")
@ -62,18 +70,6 @@ class BMProtoExcessiveDataError(BMProtoError):
class BMProto(AdvancedDispatcher, ObjectTracker):
"""A parser for the Bitmessage Protocol"""
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# ~1.6 MB which is the maximum possible size of an inv message.
maxMessageSize = 1600100
# 2**18 = 256kB is the maximum size of an object payload
maxObjectPayloadSize = 2**18
# protocol specification says max 1000 addresses in one addr command
maxAddrCount = 1000
# protocol specification says max 50000 objects in one inv command
maxObjectCount = 50000
# address is online if online less than this many seconds ago
addressAlive = 10800
# maximum time offset
maxTimeOffset = 3600
timeOffsetWrongCount = 0
def __init__(self, address=None, sock=None): # pylint: disable=unused-argument, super-init-not-called
@ -82,6 +78,8 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
# packet/connection from a local IP
self.local = False
self.pendingUpload = RandomTrackingDict()
# canonical identifier of network group
self.network_group = None
def bm_proto_reset(self):
"""Reset the bitmessage object parser"""
@ -127,7 +125,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
self.close_reason = "Bad magic"
self.set_state("close")
return False
if self.payloadLength > BMProto.maxMessageSize:
if self.payloadLength > MAX_MESSAGE_SIZE:
self.invalid = True
self.set_state(
"bm_command",
@ -374,7 +372,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
def _command_inv(self, dandelion=False):
items = self.decode_payload_content("l32s")
if len(items) > BMProto.maxObjectCount:
if len(items) > MAX_OBJECT_COUNT:
logger.error(
'Too many items in %sinv message!', 'd' if dandelion else '')
raise BMProtoExcessiveDataError()
@ -409,7 +407,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
nonce, expiresTime, objectType, version, streamNumber,
self.payload, self.payloadOffset)
if len(self.payload) - self.payloadOffset > BMProto.maxObjectPayloadSize:
if len(self.payload) - self.payloadOffset > MAX_OBJECT_PAYLOAD_SIZE:
logger.info(
'The payload length of this object is too large (%d bytes).'
' Ignoring it.', len(self.payload) - self.payloadOffset)
@ -475,10 +473,10 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
continue
if (
decodedIP and time.time() - seenTime > 0 and
seenTime > time.time() - BMProto.addressAlive and
seenTime > time.time() - ADDRESS_ALIVE and
port > 0
):
peer = state.Peer(decodedIP, port)
peer = Peer(decodedIP, port)
try:
if knownnodes.knownNodes[stream][peer]["lastseen"] > seenTime:
continue
@ -494,12 +492,15 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
"rating": 0,
"self": False,
}
addrQueue.put((stream, peer, self.destination))
# since we don't track peers outside of knownnodes,
# only spread if in knownnodes to prevent flood
addrQueue.put((stream, peer, seenTime,
self.destination))
return True
def bm_command_portcheck(self):
"""Incoming port check request, queue it."""
portCheckerQueue.put(state.Peer(self.destination, self.peerNode.port))
portCheckerQueue.put(Peer(self.destination, self.peerNode.port))
return True
def bm_command_ping(self):
@ -545,7 +546,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
self.timeOffset = self.timestamp - int(time.time())
logger.debug('remoteProtocolVersion: %i', self.remoteProtocolVersion)
logger.debug('services: 0x%08X', self.services)
logger.debug('time offset: %i', self.timestamp - int(time.time()))
logger.debug('time offset: %i', self.timeOffset)
logger.debug('my external IP: %s', self.sockNode.host)
logger.debug(
'remote node incoming address: %s:%i',
@ -588,7 +589,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
'Closing connection to old protocol version %s, node: %s',
self.remoteProtocolVersion, self.destination)
return False
if self.timeOffset > BMProto.maxTimeOffset:
if self.timeOffset > MAX_TIME_OFFSET:
self.append_write_buf(protocol.assembleErrorMessage(
errorText="Your time is too far in the future compared to mine."
" Closing connection.", fatal=2))
@ -597,7 +598,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
" Closing connection to it.", self.destination, self.timeOffset)
BMProto.timeOffsetWrongCount += 1
return False
elif self.timeOffset < -BMProto.maxTimeOffset:
elif self.timeOffset < -MAX_TIME_OFFSET:
self.append_write_buf(protocol.assembleErrorMessage(
errorText="Your time is too far in the past compared to mine."
" Closing connection.", fatal=2))
@ -635,12 +636,14 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
# incoming from a peer we're connected to as outbound,
# or server full report the same error to counter deanonymisation
if (
state.Peer(self.destination.host, self.peerNode.port) in
connectionpool.BMConnectionPool().inboundConnections or
len(connectionpool.BMConnectionPool().inboundConnections) +
len(connectionpool.BMConnectionPool().outboundConnections) >
BMConfigParser().safeGetInt("bitmessagesettings", "maxtotalconnections") +
BMConfigParser().safeGetInt("bitmessagesettings", "maxbootstrapconnections")
Peer(self.destination.host, self.peerNode.port)
in connectionpool.BMConnectionPool().inboundConnections
or len(connectionpool.BMConnectionPool().inboundConnections)
+ len(connectionpool.BMConnectionPool().outboundConnections)
> BMConfigParser().safeGetInt(
'bitmessagesettings', 'maxtotalconnections')
+ BMConfigParser().safeGetInt(
'bitmessagesettings', 'maxbootstrapconnections')
):
self.append_write_buf(protocol.assembleErrorMessage(
errorText="Server full, please try again later.", fatal=2))
@ -660,36 +663,11 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
return True
@staticmethod
def assembleAddr(peerList):
"""Build up a packed address"""
if isinstance(peerList, state.Peer):
peerList = (peerList)
if not peerList:
return b''
retval = b''
for i in range(0, len(peerList), BMProto.maxAddrCount):
payload = addresses.encodeVarint(
len(peerList[i:i + BMProto.maxAddrCount]))
for address in peerList[i:i + BMProto.maxAddrCount]:
stream, peer, timestamp = address
payload += struct.pack(
'>Q', int(timestamp)) # 64-bit time
payload += struct.pack('>I', stream)
payload += struct.pack(
'>q', 1) # service bit flags offered by this node
payload += protocol.encodeHost(peer.host)
payload += struct.pack('>H', peer.port) # remote port
retval += protocol.CreatePacket('addr', payload)
return retval
@staticmethod
def stopDownloadingObject(hashId, forwardAnyway=False):
"""Stop downloading an object"""
for connection in (
connectionpool.BMConnectionPool().inboundConnections.values() +
connectionpool.BMConnectionPool().outboundConnections.values()
):
for connection in connectionpool.BMConnectionPool().connections():
try:
del connection.objectsNewToMe[hashId]
except KeyError:
@ -730,7 +708,7 @@ class BMStringParser(BMProto):
"""
def __init__(self):
super(BMStringParser, self).__init__()
self.destination = state.Peer('127.0.0.1', 8444)
self.destination = Peer('127.0.0.1', 8444)
self.payload = None
ObjectTracker.__init__(self)

View File

@ -1,13 +1,15 @@
# pylint: disable=too-many-branches
import logging
import random # nosec
import knownnodes
import protocol
import state
from bmconfigparser import BMConfigParser
from debug import logger
from queues import Queue, portCheckerQueue
logger = logging.getLogger('default')
def getDiscoveredPeer():
try:
@ -24,9 +26,8 @@ def getDiscoveredPeer():
def chooseConnection(stream):
haveOnion = BMConfigParser().safeGet(
"bitmessagesettings", "socksproxytype")[0:5] == 'SOCKS'
if state.trustedPeer:
return state.trustedPeer
onionOnly = BMConfigParser().safeGetBoolean(
"bitmessagesettings", "onionservicesonly")
try:
retval = portCheckerQueue.get(False)
portCheckerQueue.task_done()
@ -48,6 +49,9 @@ def chooseConnection(stream):
logger.warning('Error in {}'.format(peer))
rating = 0
if haveOnion:
# do not connect to raw IP addresses--keep all traffic within Tor overlay
if onionOnly and not peer.host.endswith('.onion'):
continue
# onion addresses have a higher priority when SOCKS
if peer.host.endswith('.onion') and rating > 0:
rating = 1

View File

@ -1,10 +1,11 @@
"""
src/network/connectionpool.py
==================================
`BMConnectionPool` class definition
"""
import errno
import logging
import re
import socket
import sys
import time
import network.asyncore_pollchoose as asyncore
@ -14,18 +15,35 @@ import protocol
import state
from bmconfigparser import BMConfigParser
from network.connectionchooser import chooseConnection
from debug import logger
from network.proxy import Proxy
from .node import Peer
from singleton import Singleton
from network.tcp import (
TCPServer, Socks5BMConnection, Socks4aBMConnection, TCPConnection,bootstrap)
from network.udp import UDPSocket
logger = logging.getLogger('default')
@Singleton
# pylint: disable=too-many-instance-attributes
class BMConnectionPool(object):
"""Pool of all existing connections"""
# pylint: disable=too-many-instance-attributes
trustedPeer = None
"""
If the trustedpeer option is specified in keys.dat then this will
contain a Peer which will be connected to instead of using the
addresses advertised by other peers.
The expected use case is where the user has a trusted server where
they run a Bitmessage daemon permanently. If they then run a second
instance of the client on a local machine periodically when they want
to check for messages it will sync with the network a lot faster
without compromising security.
"""
def __init__(self):
asyncore.set_rates(
BMConfigParser().safeGetInt(
@ -38,9 +56,35 @@ class BMConnectionPool(object):
self.listeningSockets = {}
self.udpSockets = {}
self.streams = []
self.lastSpawned = 0
self.spawnWait = 2
self.bootstrapped = False
self._lastSpawned = 0
self._spawnWait = 2
self._bootstrapped = False
trustedPeer = BMConfigParser().safeGet(
'bitmessagesettings', 'trustedpeer')
try:
if trustedPeer:
host, port = trustedPeer.split(':')
self.trustedPeer = Peer(host, int(port))
except ValueError:
sys.exit(
'Bad trustedpeer config setting! It should be set as'
' trustedpeer=<hostname>:<portnumber>'
)
def connections(self):
"""
Shortcut for combined list of connections from
`inboundConnections` and `outboundConnections` dicts
"""
inboundConnections = [inboundConnections for inboundConnections in self.inboundConnections.values()]
outboundConnections = [outboundConnections for outboundConnections in self.outboundConnections.values()]
return [ connections for connections in inboundConnections +outboundConnections]
def establishedConnections(self):
"""Shortcut for list of connections having fullyEstablished == True"""
return [
x for x in self.connections() if x.fullyEstablished]
def connectToStream(self, streamNumber):
"""Connect to a bitmessage stream"""
@ -71,14 +115,17 @@ class BMConnectionPool(object):
def isAlreadyConnected(self, nodeid):
"""Check if we're already connected to this peer"""
# for i in (
# self.inboundConnections.values() +
# self.outboundConnections.values()
# ):
for i in (
[inboundConnections for inboundConnections in self.inboundConnections.values()] +
[outboundConnections for outboundConnections in self.outboundConnections.values()]
):
# for i in (
# [inboundConnections for inboundConnections in self.inboundConnections.values()] +
# [outboundConnections for outboundConnections in self.outboundConnections.values()]
# ):
for i in self.connections():
try:
if nodeid == i.nodeid:
return True
@ -104,7 +151,7 @@ class BMConnectionPool(object):
if isinstance(connection, UDPSocket):
del self.udpSockets[connection.listening.host]
elif isinstance(connection, TCPServer):
del self.listeningSockets[state.Peer(
del self.listeningSockets[Peer(
connection.destination.host, connection.destination.port)]
elif connection.isOutbound:
try:
@ -130,10 +177,11 @@ class BMConnectionPool(object):
"bitmessagesettings", "onionbindip")
else:
host = '127.0.0.1'
if (BMConfigParser().safeGetBoolean(
"bitmessagesettings", "sockslisten") or
BMConfigParser().safeGet(
"bitmessagesettings", "socksproxytype") == "none"):
if (
BMConfigParser().safeGetBoolean("bitmessagesettings", "sockslisten")
or BMConfigParser().safeGet("bitmessagesettings", "socksproxytype")
== "none"
):
# python doesn't like bind + INADDR_ANY?
# host = socket.INADDR_ANY
host = BMConfigParser().get("network", "bind")
@ -194,6 +242,7 @@ class BMConnectionPool(object):
def loop(self): # pylint: disable=too-many-branches,too-many-statements
"""Main Connectionpool's loop"""
# pylint: disable=too-many-locals
# defaults to empty loop if outbound connections are maxed
spawnConnections = False
acceptConnections = True
@ -207,11 +256,13 @@ class BMConnectionPool(object):
'bitmessagesettings', 'socksproxytype', '')
onionsocksproxytype = BMConfigParser().safeGet(
'bitmessagesettings', 'onionsocksproxytype', '')
if (socksproxytype[:5] == 'SOCKS' and
not BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'sockslisten') and
'.onion' not in BMConfigParser().safeGet(
'bitmessagesettings', 'onionhostname', '')):
if (
socksproxytype[:5] == 'SOCKS'
and not BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'sockslisten')
and '.onion' not in BMConfigParser().safeGet(
'bitmessagesettings', 'onionhostname', '')
):
acceptConnections = False
# pylint: disable=too-many-nested-blocks
@ -219,9 +270,8 @@ class BMConnectionPool(object):
if not knownnodes.knownNodesActual:
self.startBootstrappers()
knownnodes.knownNodesActual = True
if not self.bootstrapped:
self.bootstrapped = True
if not self._bootstrapped:
self._bootstrapped = True
Proxy.proxy = (
BMConfigParser().safeGet(
'bitmessagesettings', 'sockshostname'),
@ -250,7 +300,7 @@ class BMConnectionPool(object):
for i in range(
state.maximumNumberOfHalfOpenConnections - pending):
try:
chosen = chooseConnection(
chosen = self.trustedPeer or chooseConnection(
helper_random.randomchoice(self.streams))
except ValueError:
continue
@ -261,10 +311,22 @@ class BMConnectionPool(object):
# don't connect to self
if chosen in state.ownAddresses:
continue
# don't connect to the hosts from the same
# network group, defense against sibyl attacks
host_network_group = protocol.network_group(
chosen.host)
same_group = False
for j in self.outboundConnections.values():
if host_network_group == j.network_group:
same_group = True
if chosen.host == j.destination.host:
knownnodes.decreaseRating(chosen)
break
if same_group:
continue
try:
if (chosen.host.endswith(".onion") and
Proxy.onion_proxy is not None):
if chosen.host.endswith(".onion") and Proxy.onion_proxy:
if onionsocksproxytype == "SOCKS5":
self.addConnection(Socks5BMConnection(chosen))
elif onionsocksproxytype == "SOCKS4a":
@ -278,23 +340,24 @@ class BMConnectionPool(object):
except socket.error as e:
if e.errno == errno.ENETUNREACH:
continue
# # print('++++++++++++++++++++++++++++++++++++++++++')
# # print('self.inboundConnections.values()-{}'.format(self.inboundConnections.values()))
# # print('self.outboundConnections.values() -{}'.format(self.outboundConnections.values()))
# # print('+++++++++++++++++++++++++++++++++++++++++++')
# else:
self.lastSpawned = time.time()
# # for i in (
# # list(self.inboundConnections.values()) +
# # list(self.outboundConnections.values())
# # ):
# for i in (
# [inboundConnections for inboundConnections in self.inboundConnections.values()] +
# [inboundConnections for inboundConnections in self.outboundConnections.values()]
# ):
# print('++++++++++++++++++++++++++++++++++++++++++')
# print('self.inboundConnections.values()-{}'.format(self.inboundConnections.values()))
# print('self.outboundConnections.values() -{}'.format(self.outboundConnections.values()))
# print('+++++++++++++++++++++++++++++++++++++++++++')
self._lastSpawned = time.time()
else:
# for i in (
# list(self.inboundConnections.values()) +
# list(self.outboundConnections.values())
# ):
for i in (
[inboundConnections for inboundConnections in self.inboundConnections.values()] +
[inboundConnections for inboundConnections in self.outboundConnections.values()]
):
for i in self.connections():
# FIXME: rating will be increased after next connection
i.handle_close()
@ -304,8 +367,8 @@ class BMConnectionPool(object):
self.startListening()
else:
for bind in re.sub(
'[^\w.]+', ' ', # pylint: disable=anomalous-backslash-in-string
BMConfigParser().safeGet('network', 'bind')
r'[^\w.]+', ' ',
BMConfigParser().safeGet('network', 'bind')
).split():
self.startListening(bind)
logger.info('Listening for incoming connections.')
@ -315,8 +378,8 @@ class BMConnectionPool(object):
self.startUDPSocket()
else:
for bind in re.sub(
'[^\w.]+', ' ', # pylint: disable=anomalous-backslash-in-string
BMConfigParser().safeGet('network', 'bind')
r'[^\w.]+', ' ',
BMConfigParser().safeGet('network', 'bind')
).split():
self.startUDPSocket(bind)
self.startUDPSocket(False)
@ -333,20 +396,23 @@ class BMConnectionPool(object):
i.accepting = i.connecting = i.connected = False
logger.info('Stopped udp sockets.')
loopTime = float(self.spawnWait)
if self.lastSpawned < time.time() - self.spawnWait:
loopTime = float(self._spawnWait)
if self._lastSpawned < time.time() - self._spawnWait:
loopTime = 2.0
asyncore.loop(timeout=loopTime, count=1000)
reaper = []
# # for i in (
# # list(self.inboundConnections.values()) +
# # list(self.outboundConnections.values())
# # ):
# for i in (
# list(self.inboundConnections.values()) +
# list(self.outboundConnections.values())
# [inboundConnections for inboundConnections in self.inboundConnections.values()] +
# [outboundConnections for outboundConnections in self.outboundConnections.values()]
# ):
for i in (
[inboundConnections for inboundConnections in self.inboundConnections.values()] +
[outboundConnections for outboundConnections in self.outboundConnections.values()]
):
for i in self.connections():
minTx = time.time() - 20
if i.fullyEstablished:
minTx -= 300 - 20
@ -364,10 +430,14 @@ class BMConnectionPool(object):
# list(self.udpSockets.values())
# ):
for i in (
[inboundConnections for inboundConnections in self.inboundConnections.values()] +
[outboundConnections for outboundConnections in self.outboundConnections.values()] +
[listeningSockets for listeningSockets in self.listeningSockets.values()] +
[udpSockets for udpSockets in self.udpSockets.values()]
# [inboundConnections for inboundConnections in self.inboundConnections.values()] +
# [outboundConnections for outboundConnections in self.outboundConnections.values()] +
# [listeningSockets for listeningSockets in self.listeningSockets.values()] +
# [udpSockets for udpSockets in self.udpSockets.values()]
self.connections()
+ [listeningSockets for listeningSockets in self.listeningSockets.values()] + [udpSockets for udpSockets in self.udpSockets.values()]
):
if not (i.accepting or i.connecting or i.connected):
reaper.append(i)

11
src/network/constants.py Normal file
View File

@ -0,0 +1,11 @@
"""
Network protocol constants
"""
ADDRESS_ALIVE = 10800 #: address is online if online less than this many seconds ago
MAX_ADDR_COUNT = 1000 #: protocol specification says max 1000 addresses in one addr command
MAX_MESSAGE_SIZE = 1600100 #: ~1.6 MB which is the maximum possible size of an inv message.
MAX_OBJECT_PAYLOAD_SIZE = 2**18 #: 2**18 = 256kB is the maximum size of an object payload
MAX_OBJECT_COUNT = 50000 #: protocol specification says max 50000 objects in one inv command
MAX_TIME_OFFSET = 3600 #: maximum time offset

View File

@ -2,6 +2,7 @@
src/network/dandelion.py
========================
"""
import logging
from collections import namedtuple
from random import choice, sample, expovariate
from threading import RLock
@ -9,7 +10,6 @@ from time import time
from network import connectionpool
import state
from debug import logging
from queues import invQueue
from singleton import Singleton
@ -24,6 +24,8 @@ MAX_STEMS = 2
Stem = namedtuple('Stem', ['child', 'stream', 'timeout'])
logger = logging.getLogger('default')
@Singleton
class Dandelion(): # pylint: disable=old-style-class
@ -72,9 +74,10 @@ class Dandelion(): # pylint: disable=old-style-class
def removeHash(self, hashId, reason="no reason specified"):
"""Switch inventory vector from stem to fluff mode"""
logging.debug(
"%s entering fluff mode due to %s.",
''.join('%02x' % ord(i) for i in hashId), reason)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
'%s entering fluff mode due to %s.',
''.join('%02x' % ord(i) for i in hashId), reason)
with self.lock:
try:
del self.hashMap[hashId]

View File

@ -1,18 +1,17 @@
"""
src/network/downloadthread.py
=============================
`DownloadThread` class definition
"""
import time
import addresses
import helper_random
import protocol
from network.dandelion import Dandelion
from debug import logger
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.objectracker import missingObjects
from network.threads import StoppableThread
class DownloadThread(StoppableThread):
@ -25,12 +24,11 @@ class DownloadThread(StoppableThread):
def __init__(self):
super(DownloadThread, self).__init__(name="Downloader")
logger.info("init download thread")
self.lastCleaned = time.time()
def cleanPending(self):
"""Expire pending downloads eventually"""
deadline = time.time() - DownloadThread.requestExpires
deadline = time.time() - self.requestExpires
try:
toDelete = [k for k, v in iter(missingObjects.items()) if v < deadline]
except RuntimeError:
@ -44,15 +42,17 @@ class DownloadThread(StoppableThread):
while not self._stopped:
requested = 0
# Choose downloading peers randomly
connections = [
x for x in
list(BMConnectionPool().inboundConnections.values()) + list(BMConnectionPool().outboundConnections.values())
if x.fullyEstablished]
# connections = [
# x for x in
# list(BMConnectionPool().inboundConnections.values()) + list(BMConnectionPool().outboundConnections.values())
# if x.fullyEstablished]
connections = BMConnectionPool().establishedConnections()
helper_random.randomshuffle(connections)
try:
requestChunk = max(int(min(DownloadThread.maxRequestChunk, len(missingObjects)) / len(connections)), 1)
except ZeroDivisionError:
requestChunk = 1
requestChunk = max(int(
min(self.maxRequestChunk, len(missingObjects))
/ len(connections)), 1) if connections else 1
for i in connections:
now = time.time()
# avoid unnecessary delay
@ -78,9 +78,11 @@ class DownloadThread(StoppableThread):
continue
payload[0:0] = addresses.encodeVarint(chunkCount)
i.append_write_buf(protocol.CreatePacket('getdata', payload))
logger.debug("%s:%i Requesting %i objects", i.destination.host, i.destination.port, chunkCount)
self.logger.debug(
'%s:%i Requesting %i objects',
i.destination.host, i.destination.port, chunkCount)
requested += chunkCount
if time.time() >= self.lastCleaned + DownloadThread.cleanInterval:
if time.time() >= self.lastCleaned + self.cleanInterval:
self.cleanPending()
if not requested:
self.stop.wait(1)

View File

@ -1,7 +1,3 @@
"""
src/network/http-old.py
=======================
"""
import asyncore
import socket
import time

View File

@ -9,10 +9,10 @@ from time import time
import addresses
import protocol
import state
from helper_threading import StoppableThread
from network.connectionpool import BMConnectionPool
from network.dandelion import Dandelion
from queues import invQueue
from network.threads import StoppableThread
def handleExpiredDandelion(expired):
@ -20,9 +20,7 @@ def handleExpiredDandelion(expired):
the object"""
if not expired:
return
for i in \
BMConnectionPool().inboundConnections.values() + \
BMConnectionPool().outboundConnections.values():
for i in BMConnectionPool().connections():
if not i.fullyEstablished:
continue
for x in expired:
@ -44,9 +42,7 @@ class InvThread(StoppableThread):
def handleLocallyGenerated(stream, hashId):
"""Locally generated inventory items require special handling"""
Dandelion().addHash(hashId, stream=stream)
for connection in \
BMConnectionPool().inboundConnections.values() + \
BMConnectionPool().outboundConnections.values():
for connection in BMConnectionPool().connections():
if state.dandelion and connection != Dandelion().objectChildStem(hashId):
continue
connection.objectsNewToThem[hashId] = time()
@ -67,8 +63,7 @@ class InvThread(StoppableThread):
break
if chunk:
for connection in BMConnectionPool().inboundConnections.values() + \
BMConnectionPool().outboundConnections.values():
for connection in BMConnectionPool().connections():
fluffs = []
stems = []
for inv in chunk:
@ -96,13 +91,13 @@ class InvThread(StoppableThread):
if fluffs:
random.shuffle(fluffs)
connection.append_write_buf(protocol.CreatePacket(
'inv', addresses.encodeVarint(len(fluffs)) +
"".join(fluffs)))
'inv',
addresses.encodeVarint(len(fluffs)) + ''.join(fluffs)))
if stems:
random.shuffle(stems)
connection.append_write_buf(protocol.CreatePacket(
'dinv', addresses.encodeVarint(len(stems)) +
"".join(stems)))
'dinv',
addresses.encodeVarint(len(stems)) + ''.join(stems)))
invQueue.iterate()
for i in range(len(chunk)):

View File

@ -1,20 +1,13 @@
"""
src/network/networkthread.py
============================
"""
import network.asyncore_pollchoose as asyncore
import state
from debug import logger
from helper_threading import StoppableThread
from network.connectionpool import BMConnectionPool
from queues import excQueue
from network.threads import StoppableThread
class BMNetworkThread(StoppableThread):
"""A thread to handle network concerns"""
def __init__(self):
super(BMNetworkThread, self).__init__(name="Asyncore")
logger.info("init asyncore thread")
name = "Asyncore"
def run(self):
try:

View File

@ -1,7 +1,7 @@
"""
src/network/node.py
===================
Named tuples representing the network peers
"""
import collections
Peer = collections.namedtuple('Peer', ['host', 'port'])
Node = collections.namedtuple('Node', ['services', 'host', 'port'])

View File

@ -95,8 +95,7 @@ class ObjectTracker(object):
def handleReceivedObject(self, streamNumber, hashid):
"""Handling received object"""
for i in network.connectionpool.BMConnectionPool().inboundConnections.values(
) + network.connectionpool.BMConnectionPool().outboundConnections.values():
for i in network.connectionpool.BMConnectionPool().connections():
if not i.fullyEstablished:
continue
try:

View File

@ -3,14 +3,17 @@ src/network/proxy.py
====================
"""
# pylint: disable=protected-access
import logging
import socket
import time
import network.asyncore_pollchoose as asyncore
import state
from network.advanceddispatcher import AdvancedDispatcher
from bmconfigparser import BMConfigParser
from debug import logger
from .node import Peer
logger = logging.getLogger('default')
class ProxyError(Exception):
@ -88,9 +91,10 @@ class Proxy(AdvancedDispatcher):
def onion_proxy(self, address):
"""Set onion proxy address"""
if address is not None and (
not isinstance(address, tuple) or len(address) < 2 or
not isinstance(address[0], str) or
not isinstance(address[1], int)):
not isinstance(address, tuple) or len(address) < 2
or not isinstance(address[0], str)
or not isinstance(address[1], int)
):
raise ValueError
self.__class__._onion_proxy = address
@ -105,7 +109,7 @@ class Proxy(AdvancedDispatcher):
self.__class__._onion_auth = authTuple
def __init__(self, address):
if not isinstance(address, state.Peer):
if not isinstance(address, Peer):
raise ValueError
AdvancedDispatcher.__init__(self)
self.destination = address
@ -144,5 +148,6 @@ class Proxy(AdvancedDispatcher):
def state_proxy_handshake_done(self):
"""Handshake is complete at this point"""
self.connectedAt = time.time() # pylint: disable=attribute-defined-outside-init
# pylint: disable=attribute-defined-outside-init
self.connectedAt = time.time()
return False

View File

@ -2,18 +2,16 @@ import errno
import queue as Queue
import socket
from debug import logger
from helper_threading import StoppableThread
import state
from network.connectionpool import BMConnectionPool
from network.advanceddispatcher import UnknownStateError
from queues import receiveDataQueue
import state
from network.threads import StoppableThread
class ReceiveQueueThread(StoppableThread):
def __init__(self, num=0):
super(ReceiveQueueThread, self).__init__(name="ReceiveQueue_%i" % num)
logger.info("init receive queue thread %i", num)
def run(self):
while not self._stopped and state.shutdown == 0:
@ -26,27 +24,26 @@ class ReceiveQueueThread(StoppableThread):
break
# cycle as long as there is data
# methods should return False if there isn't enough data, or the connection is to be aborted
# state_* methods should return False if there isn't enough data,
# methods should return False if there isn't enough data,
# or the connection is to be aborted
# state_* methods should return False if there isn't
# enough data, or the connection is to be aborted
try:
connection = BMConnectionPool().getConnectionByAddr(dest)
# KeyError = connection object not found
except KeyError:
except KeyError: # connection object not found
receiveDataQueue.task_done()
continue
try:
connection.process()
# UnknownStateError = state isn't implemented
except (UnknownStateError):
except UnknownStateError: # state isn't implemented
pass
except socket.error as err:
if err.errno == errno.EBADF:
connection.set_state("close", 0)
else:
logger.error("Socket error: %s", str(err))
self.logger.error('Socket error: %s', err)
except:
logger.error("Error processing", exc_info=True)
self.logger.error('Error processing', exc_info=True)
receiveDataQueue.task_done()

View File

@ -8,9 +8,11 @@ src/network/socks5.py
import socket
import struct
import state
from network.proxy import GeneralProxyError, Proxy, ProxyError
from .node import Peer
class Socks5AuthError(ProxyError):
"""Rised when the socks5 protocol encounters an authentication error"""
@ -200,7 +202,7 @@ class Socks5Resolver(Socks5):
def __init__(self, host):
self.host = host
self.port = 8444
Socks5.__init__(self, address=state.Peer(self.host, self.port))
Socks5.__init__(self, address=Peer(self.host, self.port))
def state_auth_done(self):
"""Perform resolving"""

View File

@ -19,21 +19,22 @@ currentSentSpeed = 0
def connectedHostsList():
"""List of all the connected hosts"""
retval = []
# for i in list(BMConnectionPool().inboundConnections.values()) + \
# list(BMConnectionPool().outboundConnections.values()):
# retval = []
# # for i in list(BMConnectionPool().inboundConnections.values()) + \
# # list(BMConnectionPool().outboundConnections.values()):
outBoundConnections = [outConnection for outConnection in BMConnectionPool().outboundConnections.values()]
inBoundConnections = [inConnection for inConnection in BMConnectionPool().inboundConnections.values()]
for i in outBoundConnections+inBoundConnections:
if not i.fullyEstablished:
continue
try:
retval.append(i)
except AttributeError:
pass
# outBoundConnections = [outConnection for outConnection in BMConnectionPool().outboundConnections.values()]
# inBoundConnections = [inConnection for inConnection in BMConnectionPool().inboundConnections.values()]
# for i in outBoundConnections+inBoundConnections:
# if not i.fullyEstablished:
# continue
# try:
# retval.append(i)
# except AttributeError:
# pass
return retval
# return retval
return BMConnectionPool().establishedConnections()
def sentBytes():
@ -76,12 +77,6 @@ def downloadSpeed():
def pendingDownload():
"""Getting pending downloads"""
return len(missingObjects)
# tmp = {}
# for connection in BMConnectionPool().inboundConnections.values() + \
# BMConnectionPool().outboundConnections.values():
# for k in connection.objectsNewToMe.keys():
# tmp[k] = True
# return len(tmp)
def pendingUpload():

View File

@ -4,6 +4,7 @@ src/network/tcp.py
==================
"""
import logging
import math
import random
import socket
@ -18,18 +19,22 @@ import protocol
import shared
import state
from bmconfigparser import BMConfigParser
from debug import logger
from helper_random import randomBytes
from inventory import Inventory
from network.advanceddispatcher import AdvancedDispatcher
from network.assemble import assemble_addr
from network.bmproto import BMProto
from network.constants import MAX_OBJECT_COUNT
from network.dandelion import Dandelion
from network.objectracker import ObjectTracker
from network.socks4a import Socks4aConnection
from network.socks5 import Socks5Connection
from network.tls import TLSDispatcher
from .node import Peer
from queues import UISignalQueue, invQueue, receiveDataQueue
logger = logging.getLogger('default')
class TCPConnection(BMProto, TLSDispatcher):
# pylint: disable=too-many-instance-attributes
@ -47,7 +52,7 @@ class TCPConnection(BMProto, TLSDispatcher):
self.connectedAt = 0
self.skipUntil = 0
if address is None and sock is not None:
self.destination = state.Peer(*sock.getpeername())
self.destination = Peer(*sock.getpeername())
self.isOutbound = False
TLSDispatcher.__init__(self, sock, server_side=True)
self.connectedAt = time.time()
@ -81,8 +86,11 @@ class TCPConnection(BMProto, TLSDispatcher):
)
except socket.error:
pass # it's probably a hostname
self.network_group = protocol.network_group(self.destination.host)
ObjectTracker.__init__(self) # pylint: disable=non-parent-init-called
self.bm_proto_reset()
# print('--------------tcp------------------')
from network import stats
self.set_state("bm_header", expectBytes=protocol.Header.size)
def antiIntersectionDelay(self, initial=False):
@ -139,7 +147,7 @@ class TCPConnection(BMProto, TLSDispatcher):
))
self.antiIntersectionDelay(True)
self.fullyEstablished = True
print('inside the set_connection_fully_established in tcp file')
# print('inside the set_connection_fully_established in tcp file')
if self.isOutbound:
knownnodes.increaseRating(self.destination)
Dandelion().maybeAddStem(self)
@ -158,8 +166,8 @@ class TCPConnection(BMProto, TLSDispatcher):
addrs = {}
for stream in self.streams:
with knownnodes.knownNodesLock:
for n, s in enumerate((stream, stream * 2, stream * 2 + 1)):
nodes = knownnodes.knownNodes.get(s)
for nitro, sitro in enumerate((stream, stream * 2, stream * 2 + 1)):
nodes = knownnodes.knownNodes.get(sitro)
if not nodes:
continue
# only if more recent than 3 hours
@ -173,13 +181,13 @@ class TCPConnection(BMProto, TLSDispatcher):
# sent 250 only if the remote isn't interested in it
elemCount = min(
len(filtered),
maxAddrCount / 2 if n else maxAddrCount)
addrs[s] = helper_random.randomsample(filtered, elemCount)
maxAddrCount / 2 if nitro else maxAddrCount)
addrs[sitro] = helper_random.randomsample(filtered, elemCount)
for substream in addrs:
for peer, params in addrs[substream]:
templist.append((substream, peer, params["lastseen"]))
if templist:
self.append_write_buf(BMProto.assembleAddr(templist))
self.append_write_buf(assemble_addr(templist))
def sendBigInv(self):
"""
@ -218,7 +226,7 @@ class TCPConnection(BMProto, TLSDispatcher):
# Remove -1 below when sufficient time has passed for users to
# upgrade to versions of PyBitmessage that accept inv with 50,000
# items
if objectCount >= BMProto.maxObjectCount - 1:
if objectCount >= MAX_OBJECT_COUNT - 1:
sendChunk()
payload = b''
objectCount = 0
@ -332,7 +340,7 @@ def bootstrap(connection_class):
_connection_base = connection_class
def __init__(self, host, port):
self._connection_base.__init__(self, state.Peer(host, port))
self._connection_base.__init__(self, Peer(host, port))
self.close_reason = self._succeed = False
def bm_command_addr(self):
@ -370,7 +378,7 @@ class TCPServer(AdvancedDispatcher):
print('inside the attempt of line 371')
try:
if attempt > 0:
print('inside the if condition attempt in 373')
logger.warning('Failed to bind on port %s', port)
port = random.randint(32767, 65535)
self.bind((host, port))
except socket.error as e:
@ -378,12 +386,12 @@ class TCPServer(AdvancedDispatcher):
continue
else:
if attempt > 0:
print('inside the if condition attempt in 381')
logger.warning('Setting port to %s', port)
BMConfigParser().set(
'bitmessagesettings', 'port', str(port))
BMConfigParser().save()
break
self.destination = state.Peer(host, port)
self.destination = Peer(host, port)
self.bound = True
self.listen(5)
@ -401,7 +409,7 @@ class TCPServer(AdvancedDispatcher):
except (TypeError, IndexError):
return
state.ownAddresses[state.Peer(*sock.getsockname())] = True
state.ownAddresses[Peer(*sock.getsockname())] = True
if (
len(connectionpool.BMConnectionPool().inboundConnections) +
len(connectionpool.BMConnectionPool().outboundConnections) >

49
src/network/threads.py Normal file
View File

@ -0,0 +1,49 @@
"""Threading primitives for the network package"""
import logging
import random
import threading
from contextlib import contextmanager
class StoppableThread(threading.Thread):
"""Base class for application threads with stopThread method"""
name = None
logger = logging.getLogger('default')
def __init__(self, name=None):
if name:
self.name = name
super(StoppableThread, self).__init__(name=self.name)
self.stop = threading.Event()
self._stopped = False
random.seed()
self.logger.info('Init thread %s', self.name)
def stopThread(self):
"""Stop the thread"""
self._stopped = True
self.stop.set()
class BusyError(threading.ThreadError):
"""
Thread error raised when another connection holds the lock
we are trying to acquire.
"""
pass
@contextmanager
def nonBlocking(lock):
"""
A context manager which acquires given lock non-blocking
and raises BusyError if failed to acquire.
"""
locked = lock.acquire(False)
if not locked:
raise BusyError
try:
yield
finally:
lock.release()

View File

@ -2,17 +2,18 @@
SSL/TLS negotiation.
"""
import logging
import os
import socket
import ssl
import sys
from debug import logger
from network.advanceddispatcher import AdvancedDispatcher
import network.asyncore_pollchoose as asyncore
from queues import receiveDataQueue
import paths
logger = logging.getLogger('default')
_DISCONNECTED_SSL = frozenset((ssl.SSL_ERROR_EOF,))
@ -43,12 +44,13 @@ else:
sslProtocolCiphers = "AECDH-AES256-SHA"
class TLSDispatcher(AdvancedDispatcher): # pylint: disable=too-many-instance-attributes
class TLSDispatcher(AdvancedDispatcher):
"""TLS functionality for classes derived from AdvancedDispatcher"""
# pylint: disable=too-many-arguments, super-init-not-called, unused-argument
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments,super-init-not-called,unused-argument
def __init__(
self, address=None, sock=None, certfile=None, keyfile=None,
server_side=False, ciphers=sslProtocolCiphers
self, address=None, sock=None, certfile=None, keyfile=None,
server_side=False, ciphers=sslProtocolCiphers
):
self.want_read = self.want_write = True
if certfile is None:
@ -103,8 +105,10 @@ class TLSDispatcher(AdvancedDispatcher): # pylint: disable=too-many-instanc
@staticmethod
def state_tls_handshake():
# print("tls's state_tls_handshake method in line 107")
"""Do nothing while TLS handshake is pending, as during this phase we need to react to callbacks instead"""
"""
Do nothing while TLS handshake is pending, as during this phase
we need to react to callbacks instead
"""
return False
def writable(self):
@ -130,10 +134,11 @@ class TLSDispatcher(AdvancedDispatcher): # pylint: disable=too-many-instanc
except AttributeError:
return AdvancedDispatcher.readable(self)
def handle_read(self): # pylint: disable=inconsistent-return-statements
def handle_read(self): # pylint: disable=inconsistent-return-statements
"""
Handle reads for sockets during TLS handshake. Requires special treatment as during the handshake, buffers must
remain empty and normal reads must be ignored
Handle reads for sockets during TLS handshake. Requires special
treatment as during the handshake, buffers must remain empty
and normal reads must be ignored.
"""
try:
# wait for write buffer flush
@ -155,10 +160,11 @@ class TLSDispatcher(AdvancedDispatcher): # pylint: disable=too-many-instanc
self.handle_close()
return
def handle_write(self): # pylint: disable=inconsistent-return-statements
def handle_write(self): # pylint: disable=inconsistent-return-statements
"""
Handle writes for sockets during TLS handshake. Requires special treatment as during the handshake, buffers
must remain empty and normal writes must be ignored
Handle writes for sockets during TLS handshake. Requires special
treatment as during the handshake, buffers must remain empty
and normal writes must be ignored.
"""
try:
# wait for write buffer flush
@ -202,18 +208,23 @@ class TLSDispatcher(AdvancedDispatcher): # pylint: disable=too-many-instanc
if not (self.want_write or self.want_read):
raise
except socket.error as err:
if err.errno in asyncore._DISCONNECTED: # pylint: disable=protected-access
# pylint: disable=protected-access
if err.errno in asyncore._DISCONNECTED:
self.handle_close()
else:
raise
else:
if sys.version_info >= (2, 7, 9):
self.tlsVersion = self.sslSocket.version()
logger.debug("%s:%i: TLS handshake success, TLS protocol version: %s",
self.destination.host, self.destination.port, self.sslSocket.version())
logger.debug(
'%s:%i: TLS handshake success, TLS protocol version: %s',
self.destination.host, self.destination.port,
self.tlsVersion)
else:
self.tlsVersion = "TLSv1"
logger.debug("%s:%i: TLS handshake success", self.destination.host, self.destination.port)
logger.debug(
'%s:%i: TLS handshake success',
self.destination.host, self.destination.port)
# The handshake has completed, so remove this channel and...
self.del_channel()
self.set_socket(self.sslSocket)

View File

@ -2,24 +2,28 @@
src/network/udp.py
==================
"""
import logging
import time
import socket
import state
import protocol
from network.bmproto import BMProto
from debug import logger
from network.objectracker import ObjectTracker
from .node import Peer
from queues import receiveDataQueue
logger = logging.getLogger('default')
class UDPSocket(BMProto): # pylint: disable=too-many-instance-attributes
class UDPSocket(BMProto): # pylint: disable=too-many-instance-attributes
"""Bitmessage protocol over UDP (class)"""
port = 8444
announceInterval = 60
def __init__(self, host=None, sock=None, announcing=False):
super(BMProto, self).__init__(sock=sock) # pylint: disable=bad-super-call
# pylint: disable=bad-super-call
super(BMProto, self).__init__(sock=sock)
self.verackReceived = True
self.verackSent = True
# .. todo:: sort out streams
@ -40,8 +44,8 @@ class UDPSocket(BMProto): # pylint: disable=too-many-instance-attribut
else:
self.socket = sock
self.set_socket_reuse()
self.listening = state.Peer(*self.socket.getsockname())
self.destination = state.Peer(*self.socket.getsockname())
self.listening = Peer(*self.socket.getsockname())
self.destination = Peer(*self.socket.getsockname())
ObjectTracker.__init__(self)
self.connecting = False
self.connected = True
@ -82,7 +86,8 @@ class UDPSocket(BMProto): # pylint: disable=too-many-instance-attribut
if stream not in state.streamsInWhichIAmParticipating:
continue
if (seenTime < time.time() - self.maxTimeOffset or seenTime > time.time() + self.maxTimeOffset):
if (seenTime < time.time() - self.maxTimeOffset
or seenTime > time.time() + self.maxTimeOffset):
continue
if decodedIP is False:
# if the address isn't local, interpret it as
@ -95,7 +100,7 @@ class UDPSocket(BMProto): # pylint: disable=too-many-instance-attribut
self.destination.host, self.destination.port, remoteport)
if self.local:
state.discoveredPeers[
state.Peer(self.destination.host, remoteport)
Peer(self.destination.host, remoteport)
] = time.time()
return True
@ -130,7 +135,7 @@ class UDPSocket(BMProto): # pylint: disable=too-many-instance-attribut
logger.error("socket error: %s", e)
return
self.destination = state.Peer(*addr)
self.destination = Peer(*addr)
encodedAddr = protocol.encodeHost(addr[0])
self.local = bool(protocol.checkIPAddress(encodedAddr, True))
# overwrite the old buffer to avoid mixing data and so that

View File

@ -1,43 +1,41 @@
"""
src/network/uploadthread.py
`UploadThread` class definition
"""
# pylint: disable=unsubscriptable-object
import time
import helper_random
import protocol
from debug import logger
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.dandelion import Dandelion
from network.randomtrackingdict import RandomTrackingDict
from network.threads import StoppableThread
class UploadThread(StoppableThread):
"""This is a thread that uploads the objects that the peers requested from me """
"""
This is a thread that uploads the objects that the peers requested from me
"""
maxBufSize = 2097152 # 2MB
def __init__(self):
super(UploadThread, self).__init__(name="Uploader")
logger.info("init upload thread")
name = "Uploader"
def run(self):
while not self._stopped:
uploaded = 0
# Choose downloading peers randomly
connections = [x for x in list(BMConnectionPool().inboundConnections.values()) +
list(BMConnectionPool().outboundConnections.values()) if x.fullyEstablished]
# Choose uploading peers randomly
connections = BMConnectionPool().establishedConnections()
helper_random.randomshuffle(connections)
for i in connections:
now = time.time()
# avoid unnecessary delay
if i.skipUntil >= now:
continue
if len(i.write_buf) > UploadThread.maxBufSize:
if len(i.write_buf) > self.maxBufSize:
continue
try:
request = i.pendingUpload.randomKeys(RandomTrackingDict.maxPending)
request = i.pendingUpload.randomKeys(
RandomTrackingDict.maxPending)
except KeyError:
continue
payload = bytearray()
@ -47,22 +45,26 @@ class UploadThread(StoppableThread):
if Dandelion().hasHash(chunk) and \
i != Dandelion().objectChildStem(chunk):
i.antiIntersectionDelay()
logger.info('%s asked for a stem object we didn\'t offer to it.',
i.destination)
self.logger.info(
'%s asked for a stem object we didn\'t offer to it.',
i.destination)
break
try:
payload.extend(protocol.CreatePacket('object',
Inventory()[chunk].payload))
payload.extend(protocol.CreatePacket(
'object', Inventory()[chunk].payload))
chunk_count += 1
except KeyError:
i.antiIntersectionDelay()
logger.info('%s asked for an object we don\'t have.', i.destination)
self.logger.info(
'%s asked for an object we don\'t have.',
i.destination)
break
if not chunk_count:
continue
i.append_write_buf(payload)
logger.debug("%s:%i Uploading %i objects",
i.destination.host, i.destination.port, chunk_count)
self.logger.debug(
'%s:%i Uploading %i objects',
i.destination.host, i.destination.port, chunk_count)
uploaded += chunk_count
if not uploaded:
self.stop.wait(1)

View File

@ -1,8 +1,9 @@
#!/usr/bin/env python2.7
"""
Module for Proof of Work using OpenCL
"""
from struct import pack, unpack
import time
import hashlib
import random
import os
from bmconfigparser import BMConfigParser
@ -27,6 +28,8 @@ except ImportError:
def initCL():
"""Initlialise OpenCL engine"""
# pylint: disable=global-statement
global ctx, queue, program, hash_dt, libAvailable
if libAvailable is False:
return
@ -40,12 +43,13 @@ def initCL():
for platform in cl.get_platforms():
gpus.extend(platform.get_devices(device_type=cl.device_type.GPU))
if BMConfigParser().safeGet("bitmessagesettings", "opencl") == platform.vendor:
enabledGpus.extend(platform.get_devices(device_type=cl.device_type.GPU))
enabledGpus.extend(platform.get_devices(
device_type=cl.device_type.GPU))
if platform.vendor not in vendors:
vendors.append(platform.vendor)
except:
pass
if (len(enabledGpus) > 0):
if enabledGpus:
ctx = cl.Context(devices=enabledGpus)
queue = cl.CommandQueue(ctx)
f = open(os.path.join(paths.codePath(), "bitmsghash", 'bitmsghash.cl'), 'r')
@ -55,23 +59,29 @@ def initCL():
else:
logger.info("No OpenCL GPUs found")
del enabledGpus[:]
except Exception as e:
except Exception:
logger.error("OpenCL fail: ", exc_info=True)
del enabledGpus[:]
def openclAvailable():
return (len(gpus) > 0)
"""Are there any OpenCL GPUs available?"""
return bool(gpus)
def openclEnabled():
return (len(enabledGpus) > 0)
"""Is OpenCL enabled (and available)?"""
return bool(enabledGpus)
def do_opencl_pow(hash, target):
def do_opencl_pow(hash_, target):
"""Perform PoW using OpenCL"""
output = numpy.zeros(1, dtype=[('v', numpy.uint64, 1)])
if (len(enabledGpus) == 0):
if not enabledGpus:
return output[0][0]
data = numpy.zeros(1, dtype=hash_dt, order='C')
data[0]['v'] = ("0000000000000000" + hash).decode("hex")
data[0]['v'] = ("0000000000000000" + hash_).decode("hex")
data[0]['target'] = target
hash_buf = cl.Buffer(ctx, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=data)
@ -83,9 +93,8 @@ def do_opencl_pow(hash, target):
kernel.set_arg(0, hash_buf)
kernel.set_arg(1, dest_buf)
start = time.time()
progress = 0
globamt = worksize*2000
globamt = worksize * 2000
while output[0][0] == 0 and shutdown == 0:
kernel.set_arg(2, pack("<Q", progress))
@ -96,21 +105,18 @@ def do_opencl_pow(hash, target):
cl.enqueue_copy(queue, output, dest_buf)
queue.finish()
progress += globamt
sofar = time.time() - start
# logger.debug("Working for %.3fs, %.2f Mh/s", sofar, (progress / sofar) / 1000000)
if shutdown != 0:
raise Exception ("Interrupted")
taken = time.time() - start
raise Exception("Interrupted")
# logger.debug("Took %d tries.", progress)
return output[0][0]
#initCL()
if __name__ == "__main__":
#in python3 I have change this 54227212183L to 54227212183
target = 54227212183
initialHash = "3758f55b5a8d902fd3597e4ce6a2d3f23daff735f65d9698c270987f4e67ad590b93f3ffeba0ef2fd08a8dc2f87b68ae5a0dc819ab57f22ad2c4c9c8618a43b3".decode("hex")
nonce = do_opencl_pow(initialHash.encode("hex"), target)
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
print ("{} - value {} < {}".format(nonce, trialValue, target))
initCL()
target_ = 54227212183
initialHash = ("3758f55b5a8d902fd3597e4ce6a2d3f23daff735f65d9698c270987f4e67ad590"
"b93f3ffeba0ef2fd08a8dc2f87b68ae5a0dc819ab57f22ad2c4c9c8618a43b3").decode("hex")
nonce = do_opencl_pow(initialHash.encode("hex"), target_)
trialValue, = unpack(
'>Q', hashlib.sha512(hashlib.sha512(pack('>Q', nonce) + initialHash).digest()).digest()[0:8])
print ("{} - value {} < {}".format(nonce, trialValue, target_))

View File

@ -1,6 +1,5 @@
"""
src/paths.py
============
Path related functions
"""
# pylint: disable=import-error
import logging
@ -11,6 +10,7 @@ from datetime import datetime
from shutil import move
from kivy.utils import platform
logger = logging.getLogger('default')
# When using py2exe or py2app, the variable frozen is added to the sys
@ -46,7 +46,8 @@ def lookupAppdataFolder():
dataFolder = os.path.join(
os.environ['HOME'],
'Library/Application Support/', APPNAME
) + '/' # ..fixme:: should also be os.path.sep
) + '/'
except KeyError:
sys.exit(
'Could not find home folder, please report this message'
@ -83,6 +84,7 @@ def codePath():
return os.path.dirname(__file__)
return (
os.environ.get('RESOURCEPATH')
# pylint: disable=protected-access
if frozen == "macosx_app" else sys._MEIPASS)

View File

@ -18,7 +18,7 @@ class DebugLogger(object):
"""Safe logger wrapper for tor and plugin's logs"""
# pylint: disable=too-few-public-methods
def __init__(self):
self._logger = logging.getLogger(__name__.split('.', 1)[0])
self._logger = logging.getLogger('default')
self._levels = {
'err': 40,
'warn': 30,

View File

@ -1,12 +1,8 @@
# pylint: disable=too-many-boolean-expressions,too-many-return-statements,too-many-locals,too-many-statements
"""
protocol.py
===========
Low-level protocol-related functions.
"""
from __future__ import absolute_import
# pylint: disable=too-many-boolean-expressions,too-many-return-statements
# pylint: disable=too-many-locals,too-many-statements
import base64
import hashlib
@ -14,7 +10,6 @@ import random
import socket
import sys
import time
import traceback
from binascii import hexlify
from struct import pack, unpack, Struct
@ -29,10 +24,18 @@ from fallback import RIPEMD160Hash
from helper_sql import sqlExecute
from version import softwareVersion
# Service flags
#: This is a normal network node
NODE_NETWORK = 1
#: This node supports SSL/TLS in the current connect (python < 2.7.9
#: only supports an SSL client, so in that case it would only have this
#: on when the connection is a client).
NODE_SSL = 2
# (Proposal) This node may do PoW on behalf of some its peers
# (PoW offloading/delegating), but it doesn't have to. Clients may have
# to meet additional requirements (e.g. TLS authentication)
# NODE_POW = 4
#: Node supports dandelion
NODE_DANDELION = 8
# Bitfield flags
@ -110,8 +113,39 @@ def networkType(host):
return 'IPv6'
def network_group(host):
"""Canonical identifier of network group
simplified, borrowed from
GetGroup() in src/netaddresses.cpp in bitcoin core"""
if not isinstance(host, str):
return None
network_type = networkType(host)
try:
raw_host = encodeHost(host)
except socket.error:
return host
if network_type == 'IPv4':
decoded_host = checkIPv4Address(raw_host[12:], True)
if decoded_host:
# /16 subnet
return raw_host[12:14]
elif network_type == 'IPv6':
decoded_host = checkIPv6Address(raw_host, True)
if decoded_host:
# /32 subnet
return raw_host[0:12]
else:
# just host, e.g. for tor
return host
# global network type group for local, private, unroutable
return network_type
def checkIPAddress(host, private=False):
"""Returns hostStandardFormat if it is a valid IP address, otherwise returns False"""
"""
Returns hostStandardFormat if it is a valid IP address,
otherwise returns False
"""
if host[0:12] == '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF':
hostStandardFormat = socket.inet_ntop(socket.AF_INET, host[12:])
return checkIPv4Address(host[12:], hostStandardFormat, private)
@ -127,29 +161,37 @@ def checkIPAddress(host, private=False):
except ValueError:
return False
if hostStandardFormat == "":
# This can happen on Windows systems which are not 64-bit compatible
# so let us drop the IPv6 address.
# This can happen on Windows systems which are
# not 64-bit compatible so let us drop the IPv6 address.
return False
return checkIPv6Address(host, hostStandardFormat, private)
def checkIPv4Address(host, hostStandardFormat, private=False):
"""Returns hostStandardFormat if it is an IPv4 address, otherwise returns False"""
"""
Returns hostStandardFormat if it is an IPv4 address,
otherwise returns False
"""
if host[0] == '\x7F': # 127/8
if not private:
logger.debug('Ignoring IP address in loopback range: %s', hostStandardFormat)
logger.debug(
'Ignoring IP address in loopback range: %s',
hostStandardFormat)
return hostStandardFormat if private else False
if host[0] == '\x0A': # 10/8
if not private:
logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
logger.debug(
'Ignoring IP address in private range: %s', hostStandardFormat)
return hostStandardFormat if private else False
if host[0:2] == '\xC0\xA8': # 192.168/16
if not private:
logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
logger.debug(
'Ignoring IP address in private range: %s', hostStandardFormat)
return hostStandardFormat if private else False
if host[0:2] >= '\xAC\x10' and host[0:2] < '\xAC\x20': # 172.16/12
if host[0:2] >= '\xAC\x10'.encode('raw_unicode_escape') and host[0:2] < '\xAC\x20'.encode('raw_unicode_escape'): # 172.16/12
if not private:
logger.debug('Ignoring IP address in private range: %s', hostStandardFormat)
logger.debug(
'Ignoring IP address in private range: %s', hostStandardFormat)
return hostStandardFormat if private else False
return False if private else hostStandardFormat
@ -157,6 +199,7 @@ def checkIPv4Address(host, hostStandardFormat, private=False):
def checkIPv6Address(host, hostStandardFormat, private=False):
"""Returns hostStandardFormat if it is an IPv6 address, otherwise returns False"""
if host == ('\x00'.encode() * 15) + '\x01'.encode():
if not private:
logger.debug('Ignoring loopback address: {}'.format( hostStandardFormat))
return False
@ -167,6 +210,7 @@ def checkIPv6Address(host, hostStandardFormat, private=False):
if (ord(host[0:1]) & 0xfe) == 0xfc:
if not private:
logger.debug('Ignoring unique local address: {}'.format( hostStandardFormat))
return hostStandardFormat if private else False
return False if private else hostStandardFormat
@ -188,28 +232,27 @@ def haveSSL(server=False):
def checkSocksIP(host):
"""Predicate to check if we're using a SOCKS proxy"""
sockshostname = BMConfigParser().safeGet(
'bitmessagesettings', 'sockshostname')
try:
if state.socksIP is None or not state.socksIP:
state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname"))
# uninitialised
except NameError:
state.socksIP = socket.gethostbyname(BMConfigParser().get("bitmessagesettings", "sockshostname"))
# resolving failure
except socket.gaierror:
state.socksIP = BMConfigParser().get("bitmessagesettings", "sockshostname")
if not state.socksIP:
state.socksIP = socket.gethostbyname(sockshostname)
except NameError: # uninitialised
state.socksIP = socket.gethostbyname(sockshostname)
except (TypeError, socket.gaierror): # None, resolving failure
state.socksIP = sockshostname
return state.socksIP == host
def isProofOfWorkSufficient(data,
nonceTrialsPerByte=0,
payloadLengthExtraBytes=0,
recvTime=0):
def isProofOfWorkSufficient(
data, nonceTrialsPerByte=0, payloadLengthExtraBytes=0, recvTime=0):
"""
Validate an object's Proof of Work using method described in:
https://bitmessage.org/wiki/Proof_of_work
Validate an object's Proof of Work using method described
`here <https://bitmessage.org/wiki/Proof_of_work>`_
Arguments:
int nonceTrialsPerByte (default: from default.py)
int payloadLengthExtraBytes (default: from default.py)
int nonceTrialsPerByte (default: from `.defaults`)
int payloadLengthExtraBytes (default: from `.defaults`)
float recvTime (optional) UNIX epoch time when object was
received from the network (default: current system time)
Returns:
@ -223,11 +266,13 @@ def isProofOfWorkSufficient(data,
TTL = endOfLifeTime - (int(recvTime) if recvTime else int(time.time()))
if TTL < 300:
TTL = 300
POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(data[
:8] + hashlib.sha512(data[8:]).digest()).digest()).digest()[0:8])
return POW <= 2 ** 64 / (nonceTrialsPerByte *
(len(data) + payloadLengthExtraBytes +
((TTL * (len(data) + payloadLengthExtraBytes)) / (2 ** 16))))
POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(
data[:8] + hashlib.sha512(data[8:]).digest()
).digest()).digest()[0:8])
return POW <= 2 ** 64 / (
nonceTrialsPerByte * (
len(data) + payloadLengthExtraBytes +
((TTL * (len(data) + payloadLengthExtraBytes)) / (2 ** 16))))
# Packet creation
@ -236,6 +281,7 @@ def isProofOfWorkSufficient(data,
def CreatePacket(command, payload=''):
"""Construct and return a number of bytes from a payload"""
payload = payload if type(payload) == bytes else payload.encode()
payload_length = len(payload)
checksum = hashlib.sha512(payload).digest()[0:4]
byte = bytearray(Header.size + payload_length)
@ -247,6 +293,7 @@ def CreatePacket(command, payload=''):
def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=False, nodeid=None):
"""Construct the payload of a version message, return the resultng bytes of running CreatePacket() on it"""
payload = bytes()
payload += pack('>L', 3) # protocol version.
# bitflags of the services I offer.
payload += pack(
@ -257,9 +304,10 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=
)
payload += pack('>q', int(time.time()))
payload += pack(
'>q', 1) # boolservices of remote connection; ignored by the remote host.
if checkSocksIP(remoteHost) and server: # prevent leaking of tor outbound IP
# boolservices of remote connection; ignored by the remote host.
payload += pack('>q', 1)
if checkSocksIP(remoteHost) and server:
# prevent leaking of tor outbound IP
payload += encodeHost('127.0.0.1')
payload += pack('>H', 8444)
else:
@ -282,13 +330,14 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=
#python3 need to check
payload += '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF'.encode() + pack('>L', 2130706433)
# we have a separate extPort and incoming over clearnet
# or outgoing through clearnet
extport = BMConfigParser().safeGetInt('bitmessagesettings', 'extport')
if (
extport and ((server and not checkSocksIP(remoteHost)) or (
BMConfigParser().get('bitmessagesettings', 'socksproxytype') ==
'none' and not server))
BMConfigParser().get('bitmessagesettings', 'socksproxytype')
== 'none' and not server))
):
payload += pack('>H', extport)
elif checkSocksIP(remoteHost) and server: # incoming connection over Tor
@ -296,6 +345,7 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=
else: # no extport and not incoming over Tor
payload += pack('>H', int(BMConfigParser().safeGet('bitmessagesettings', 'port')))
if nodeid is not None:
payload += nodeid[0:8]
else:
@ -318,7 +368,10 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=
def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''):
"""Construct the payload of an error message, return the resultng bytes of running CreatePacket() on it"""
"""
Construct the payload of an error message,
return the resulting bytes of running `CreatePacket` on it
"""
payload = encodeVarint(fatal)
payload += encodeVarint(banTime)
payload += encodeVarint(len(inventoryVector))
@ -455,7 +508,7 @@ def decryptAndCheckPubkeyPayload(data, address):
except Exception:
logger.critical(
'Pubkey decryption was UNsuccessful because of'
' an unhandled exception! This is definitely a bug! \n%s',
traceback.format_exc()
' an unhandled exception! This is definitely a bug!',
exc_info=True
)
return 'failed'

View File

@ -1,10 +1,13 @@
"""
src/pyelliptic/__init__.py
=====================================
Copyright (C) 2010
Author: Yann GUIBET
Contact: <yannguibet@gmail.com>
Python OpenSSL wrapper.
For modern cryptography with ECC, AES, HMAC, Blowfish, ...
This is an abandoned package maintained inside of the PyBitmessage.
"""
# Copyright (C) 2010
# Author: Yann GUIBET
# Contact: <yannguibet@gmail.com>
from .openssl import OpenSSL
from .ecc import ECC

View File

@ -1,8 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
src/pyelliptic/hash.py
=====================
Wrappers for hash functions from OpenSSL.
"""
# Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com>
# See LICENSE for details.

View File

@ -1,26 +1,19 @@
# Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com>
# See LICENSE for details.
#
# Software slightly changed by Jonathan Warren <bitmessage at-symbol jonwarren.org>
# pylint: disable=protected-access, import-error
"""
src/pyelliptic/openssl.py
=================================
This module loads openssl libs with ctypes and incapsulates
needed openssl functionality in class _OpenSSL.
"""
# pylint: disable=import-error
import sys
import ctypes
from kivy.utils import platform
OpenSSL = None
# !/usr/bin/env python
# -*- coding: utf-8 -*-
"""
src/pyelliptic/openssl.py
=====================
"""
# Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com>
# See LICENSE for details.
#
# Software slightly changed by Jonathan Warren <bitmessage at-symbol jonwarren.org>
# pylint: disable=protected-access
class CipherName:
"""Class returns cipher name, pointer and blocksize"""

View File

@ -1,23 +1,19 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# pylint: disable=too-many-locals,too-many-arguments,too-many-function-args
"""
= usage =
Usage
-----
== python ==
>>> import qtidenticon
>>> qtidenticon.render_identicon(code, size)
Return a PIL Image class instance which have generated identicon image.
```size``` specifies `patch size`. Generated image size is 3 * ```size```.
``size`` specifies `patch size`. Generated image size is 3 * ``size``.
"""
from PyQt4 import QtGui
from PyQt4.QtCore import QSize, QPointF, Qt
from PyQt4.QtGui import QPixmap, QPainter, QPolygonF
__all__ = ['render_identicon', 'IdenticonRendererBase']
class IdenticonRendererBase(object):
"""Encapsulate methods around rendering identicons"""
@ -26,7 +22,7 @@ class IdenticonRendererBase(object):
def __init__(self, code):
"""
@param code code for icon
:param code: code for icon
"""
if not isinstance(code, int):
code = int(code)
@ -36,8 +32,8 @@ class IdenticonRendererBase(object):
"""
render identicon to QPicture
@param size identicon patchsize. (image size is 3 * [size])
@return QPicture
:param size: identicon patchsize. (image size is 3 * [size])
:returns: :class:`QPicture`
"""
# decode the code
@ -79,7 +75,7 @@ class IdenticonRendererBase(object):
def drawPatchQt(self, pos, turn, invert, patch_type, image, size, foreColor,
backColor, penwidth): # pylint: disable=unused-argument
"""
@param size patch size
:param size: patch size
"""
path = self.PATH_SET[patch_type]
if not path:
@ -134,7 +130,7 @@ class IdenticonRendererBase(object):
class DonRenderer(IdenticonRendererBase):
"""
Don Park's implementation of identicon
see : http://www.docuverse.com/blog/donpark/2007/01/19/identicon-updated-and-source-released
see: http://www.docuverse.com/blog/donpark/2007/01/19/identicon-updated-and-source-released
"""
PATH_SET = [

View File

@ -1,20 +1,52 @@
import queue as Queue
from class_objectProcessorQueue import ObjectProcessorQueue
import queue as Queue
"""Most of the queues used by bitmessage threads are defined here."""
import threading
import time
from multiqueue import MultiQueue
class ObjectProcessorQueue(Queue.Queue):
"""Special queue class using lock for `.threads.objectProcessor`"""
maxSize = 32000000
def __init__(self):
Queue.Queue.__init__(self)
self.sizeLock = threading.Lock()
#: in Bytes. We maintain this to prevent nodes from flooding us
#: with objects which take up too much memory. If this gets
#: too big we'll sleep before asking for further objects.
self.curSize = 0
def put(self, item, block=True, timeout=None):
while self.curSize >= self.maxSize:
time.sleep(1)
with self.sizeLock:
self.curSize += len(item[1])
Queue.Queue.put(self, item, block, timeout)
def get(self, block=True, timeout=None):
item = Queue.Queue.get(self, block, timeout)
with self.sizeLock:
self.curSize -= len(item[1])
return item
workerQueue = Queue.Queue()
UISignalQueue = Queue.Queue()
addressGeneratorQueue = Queue.Queue()
# receiveDataThreads dump objects they hear on the network into this
# queue to be processed.
#: `.network.ReceiveQueueThread` instances dump objects they hear
#: on the network into this queue to be processed.
objectProcessorQueue = ObjectProcessorQueue()
invQueue = MultiQueue()
addrQueue = MultiQueue()
portCheckerQueue = Queue.Queue()
receiveDataQueue = Queue.Queue()
# The address generator thread uses this queue to get information back
# to the API thread.
#: The address generator thread uses this queue to get information back
#: to the API thread.
apiAddressGeneratorReturnQueue = Queue.Queue()
# Exceptions
#: for exceptions
excQueue = Queue.Queue()

View File

@ -1,21 +1,28 @@
from __future__ import division
"""
Some shared functions
.. deprecated:: 0.6.3
Should be moved to different places and this file removed,
but it needs refactoring.
"""
from __future__ import division
# Libraries.
import hashlib
import os
import sys
import stat
import threading
import hashlib
import subprocess
from binascii import hexlify
from pyelliptic import arithmetic
from kivy.utils import platform
# Project imports.
import state
import highlevelcrypto
import state
from addresses import decodeAddress, encodeVarint
from bmconfigparser import BMConfigParser
from debug import logger
from addresses import decodeAddress, encodeVarint
from helper_sql import sqlQuery
@ -56,6 +63,7 @@ maximumLengthOfTimeToBotherResendingMessages = 0
def isAddressInMyAddressBook(address):
"""Is address in my addressbook?"""
queryreturn = sqlQuery(
'''select address from addressbook where address=?''',
address)
@ -64,6 +72,7 @@ def isAddressInMyAddressBook(address):
# At this point we should really just have a isAddressInMy(book, address)...
def isAddressInMySubscriptionsList(address):
"""Am I subscribed to this address?"""
queryreturn = sqlQuery(
'''select * from subscriptions where address=?''',
str(address))
@ -71,6 +80,9 @@ def isAddressInMySubscriptionsList(address):
def isAddressInMyAddressBookSubscriptionsListOrWhitelist(address):
"""
Am I subscribed to this address, is it in my addressbook or whitelist?
"""
if isAddressInMyAddressBook(address):
return True
@ -90,6 +102,11 @@ def isAddressInMyAddressBookSubscriptionsListOrWhitelist(address):
return False
def decodeWalletImportFormat(WIFstring):
# pylint: disable=inconsistent-return-statements
"""
Convert private key from base58 that's used in the config file to
8-bit binary string
"""
fullString = arithmetic.changebase(WIFstring, 58, 256)
privkey = fullString[:-4]
if fullString[-4:] != hashlib.sha256(hashlib.sha256(privkey).digest()).digest()[:4]:
@ -98,6 +115,7 @@ def decodeWalletImportFormat(WIFstring):
' private keys, the checksum failed. Here are the first'
' 6 characters of the PRIVATE key: {}'.format(str(WIFstring)[:6])
)
os._exit(0)
if privkey[0:1] == '\x80'.encode()[1:]: # checksum passed
return privkey[1:]
@ -107,10 +125,11 @@ def decodeWalletImportFormat(WIFstring):
' the checksum passed but the key doesn\'t begin with hex 80.'
' Here is the PRIVATE key: {}'.format(WIFstring)
)
os._exit(0)
os._exit(0) # pylint: disable=protected-access
def reloadMyAddressHashes():
"""Reload keys for user's addresses from the config file"""
logger.debug('reloading keys from keys.dat file')
myECCryptorObjects.clear()
@ -118,33 +137,29 @@ def reloadMyAddressHashes():
myAddressesByTag.clear()
# myPrivateKeys.clear()
keyfileSecure = checkSensitiveFilePermissions(state.appdata + 'keys.dat')
keyfileSecure = checkSensitiveFilePermissions(os.path.join(
state.appdata, 'keys.dat'))
hasEnabledKeys = False
for addressInKeysFile in BMConfigParser().addresses():
isEnabled = BMConfigParser().safeGet(addressInKeysFile, 'enabled')
if isEnabled:
hasEnabledKeys = True
# status
_, addressVersionNumber, streamNumber, hash = \
decodeAddress(addressInKeysFile)
addressVersionNumber, streamNumber, hashobj = decodeAddress(addressInKeysFile)[1:]
if addressVersionNumber in (2, 3, 4):
# Returns a simple 32 bytes of information encoded
# in 64 Hex characters, or null if there was an error.
privEncryptionKey = hexlify(decodeWalletImportFormat(
BMConfigParser().get(addressInKeysFile, 'privencryptionkey'))
)
BMConfigParser().get(addressInKeysFile, 'privencryptionkey')))
# It is 32 bytes encoded as 64 hex characters
if len(privEncryptionKey) == 64:
myECCryptorObjects[hash] = \
myECCryptorObjects[hashobj] = \
highlevelcrypto.makeCryptor(privEncryptionKey)
myAddressesByHash[hash] = addressInKeysFile
myAddressesByHash[hashobj] = addressInKeysFile
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hash).digest()
).digest()[32:]
encodeVarint(streamNumber) + hashobj).digest()).digest()[32:]
myAddressesByTag[tag] = addressInKeysFile
else:
logger.error(
'Error in reloadMyAddressHashes: Can\'t handle'
@ -157,6 +172,10 @@ def reloadMyAddressHashes():
def reloadBroadcastSendersForWhichImWatching():
"""
Reinitialize runtime data for the broadcasts I'm subscribed to
from the config file
"""
broadcastSendersForWhichImWatching.clear()
MyECSubscriptionCryptorObjects.clear()
queryreturn = sqlQuery('SELECT address FROM subscriptions where enabled=1')
@ -164,22 +183,23 @@ def reloadBroadcastSendersForWhichImWatching():
for row in queryreturn:
address, = row
# status
_, addressVersionNumber, streamNumber, hash = decodeAddress(address)
addressVersionNumber, streamNumber, hashobj = decodeAddress(address)[1:]
if addressVersionNumber == 2:
broadcastSendersForWhichImWatching[hash] = 0
broadcastSendersForWhichImWatching[hashobj] = 0
# Now, for all addresses, even version 2 addresses,
# we should create Cryptor objects in a dictionary which we will
# use to attempt to decrypt encrypted broadcast messages.
if addressVersionNumber <= 3:
privEncryptionKey = hashlib.sha512((
privEncryptionKey = hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hash)).digest()[:32]
MyECSubscriptionCryptorObjects[hash] = \
encodeVarint(streamNumber) + hashobj
).digest()[:32]
MyECSubscriptionCryptorObjects[hashobj] = \
highlevelcrypto.makeCryptor(hexlify(privEncryptionKey))
else:
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber) +
encodeVarint(streamNumber) + hash
encodeVarint(streamNumber) + hashobj
).digest()).digest()
tag = doubleHashOfAddressData[32:]
privEncryptionKey = doubleHashOfAddressData[:32]
@ -188,21 +208,23 @@ def reloadBroadcastSendersForWhichImWatching():
def fixPotentiallyInvalidUTF8Data(text):
"""Sanitise invalid UTF-8 strings"""
try:
unicode(text, 'utf-8')
return text
except:
return 'Part of the message is corrupt. The message cannot be'\
return 'Part of the message is corrupt. The message cannot be' \
' displayed the normal way.\n\n' + repr(text)
# Checks sensitive file permissions for inappropriate umask
# during keys.dat creation. (Or unwise subsequent chmod.)
#
# Returns true iff file appears to have appropriate permissions.
def checkSensitiveFilePermissions(filename):
"""
:param str filename: path to the file
:return: True if file appears to have appropriate permissions.
"""
if sys.platform == 'win32':
# TODO: This might deserve extra checks by someone familiar with
# .. todo:: This might deserve extra checks by someone familiar with
# Windows systems.
return True
elif sys.platform[:7] == 'freebsd':
@ -210,30 +232,30 @@ def checkSensitiveFilePermissions(filename):
present_permissions = os.stat(filename)[0]
disallowed_permissions = stat.S_IRWXG | stat.S_IRWXO
return present_permissions & disallowed_permissions == 0
else:
try:
# Skip known problems for non-Win32 filesystems
# without POSIX permissions.
fstype = subprocess.check_output(
'stat -f -c "%%T" %s' % (filename),
shell=True,
stderr=subprocess.STDOUT
)
if 'fuseblk' in fstype:
logger.info(
'Skipping file permissions check for %s.'
' Filesystem fuseblk detected.', filename)
return True
except:
# Swallow exception here, but we might run into trouble later!
logger.error('Could not determine filesystem type. %s', filename)
present_permissions = os.stat(filename)[0]
disallowed_permissions = stat.S_IRWXG | stat.S_IRWXO
return present_permissions & disallowed_permissions == 0
try:
# Skip known problems for non-Win32 filesystems
# without POSIX permissions.
fstype = subprocess.check_output(
'stat -f -c "%%T" %s' % (filename),
shell=True,
stderr=subprocess.STDOUT
)
if 'fuseblk' in fstype:
logger.info(
'Skipping file permissions check for %s.'
' Filesystem fuseblk detected.', filename)
return True
except:
# Swallow exception here, but we might run into trouble later!
logger.error('Could not determine filesystem type. %s', filename)
present_permissions = os.stat(filename)[0]
disallowed_permissions = stat.S_IRWXG | stat.S_IRWXO
return present_permissions & disallowed_permissions == 0
# Fixes permissions on a sensitive file.
def fixSensitiveFilePermissions(filename, hasEnabledKeys):
"""Try to change file permissions to be more restrictive"""
if hasEnabledKeys:
logger.warning(
'Keyfile had insecure permissions, and there were enabled'
@ -258,6 +280,7 @@ def fixSensitiveFilePermissions(filename, hasEnabledKeys):
def openKeysFile():
"""Open keys file with an external editor"""
if 'linux' in sys.platform:
subprocess.call(["xdg-open", state.appdata + 'keys.dat'])
else:

View File

@ -1,22 +1,24 @@
"""shutdown function"""
import os
import queue as Queue
import threading
import time
from debug import logger
from helper_sql import sqlQuery, sqlStoredProcedure
from helper_threading import StoppableThread
from knownnodes import saveKnownNodes
from inventory import Inventory
from queues import (
addressGeneratorQueue, objectProcessorQueue, UISignalQueue, workerQueue)
import shared
import state
from debug import logger
from helper_sql import sqlQuery, sqlStoredProcedure
from inventory import Inventory
from knownnodes import saveKnownNodes
from network import StoppableThread
from queues import (
addressGeneratorQueue, objectProcessorQueue, UISignalQueue, workerQueue)
def doCleanShutdown():
# Used to tell proof of work worker threads
# and the objectProcessorThread to exit.
"""
Used to tell all the treads to finish work and exit.
"""
state.shutdown = 1
objectProcessorQueue.put(('checkShutdownVariable', 'no data'))
@ -52,9 +54,11 @@ def doCleanShutdown():
time.sleep(.25)
for thread in threading.enumerate():
if (thread is not threading.currentThread() and
isinstance(thread, StoppableThread) and
thread.name != 'SQL'):
if (
thread is not threading.currentThread()
and isinstance(thread, StoppableThread)
and thread.name != 'SQL'
):
logger.debug("Waiting for thread %s", thread.name)
thread.join()
@ -76,10 +80,10 @@ def doCleanShutdown():
except Queue.Empty:
break
if shared.thisapp.daemon or not state.enableGUI: # FIXME redundant?
if shared.thisapp.daemon or not state.enableGUI: # ..fixme:: redundant?
logger.info('Clean shutdown complete.')
shared.thisapp.cleanup()
os._exit(0)
os._exit(0) # pylint: disable=protected-access
else:
logger.info('Core shutdown complete.')
for thread in threading.enumerate():

View File

@ -1,8 +1,13 @@
#! /usr/bin/env python
"""
This is based upon the singleton class from
`tendo <https://github.com/pycontribs/tendo>`_
which is under the Python Software Foundation License version 2
"""
import atexit
import os
import sys
import state
try:
@ -11,15 +16,10 @@ except ImportError:
pass
class singleinstance:
class singleinstance(object):
"""
Implements a single instance application by creating a lock file
at appdata.
This is based upon the singleton class from tendo
https://github.com/pycontribs/tendo
which is under the Python Software Foundation License version 2
"""
def __init__(self, flavor_id="", daemon=False):
self.initialized = False
@ -40,6 +40,7 @@ class singleinstance:
atexit.register(self.cleanup)
def lock(self):
"""Obtain single instance lock"""
if self.lockPid is None:
self.lockPid = os.getpid()
if sys.platform == 'win32':
@ -52,8 +53,7 @@ class singleinstance:
self.lockfile,
os.O_CREAT | os.O_EXCL | os.O_RDWR | os.O_TRUNC
)
except OSError:
type, e, tb = sys.exc_info()
except OSError as e:
if e.errno == 13:
print(
'Another instance of this application'
@ -84,6 +84,7 @@ class singleinstance:
self.fp.flush()
def cleanup(self):
"""Release single instance lock"""
if not self.initialized:
return
if self.daemon and self.lockPid == os.getpid():
@ -96,7 +97,6 @@ class singleinstance:
fcntl.lockf(self.fp, fcntl.LOCK_UN)
except Exception as e:
pass
return
print ("Cleaning up lockfile")
try:

View File

@ -1,6 +1,21 @@
"""
Singleton decorator definition
"""
from functools import wraps
def Singleton(cls):
"""
Decorator implementing the singleton pattern:
it restricts the instantiation of a class to one "single" instance.
"""
instances = {}
# https://github.com/sphinx-doc/sphinx/issues/3783
@wraps(cls)
def getinstance():
"""Find an instance or save newly created one"""
if cls not in instances:
instances[cls] = cls()
return instances[cls]

View File

@ -1,30 +1,43 @@
"""
src/state.py
=================================
Global runtime variables.
"""
import collections
neededPubkeys = {}
streamsInWhichIAmParticipating = []
# For UPnP
extPort = None
# for Tor hidden service
"""For UPnP"""
socksIP = None
# Network protocols availability, initialised below
networkProtocolAvailability = None
appdata = '' # holds the location of the application data storage directory
# Set to 1 by the doCleanShutdown function.
# Used to tell the proof of work worker threads to exit.
"""for Tor hidden service"""
appdata = ''
"""holds the location of the application data storage directory"""
shutdown = 0
"""
Set to 1 by the `.shutdown.doCleanShutdown` function.
Used to tell the threads to exit.
"""
# Component control flags - set on startup, do not change during runtime
# The defaults are for standalone GUI (default operating mode)
enableNetwork = True # enable network threads
enableObjProc = True # enable object processing threads
enableAPI = True # enable API (if configured)
enableGUI = True # enable GUI (QT or ncurses)
enableSTDIO = False # enable STDIO threads
enableNetwork = True
"""enable network threads"""
enableObjProc = True
"""enable object processing thread"""
enableAPI = True
"""enable API (if configured)"""
enableGUI = True
"""enable GUI (QT or ncurses)"""
enableSTDIO = False
"""enable STDIO threads"""
curses = False
sqlReady = False # set to true by sqlTread when ready for processing
sqlReady = False
"""set to true by `.threads.sqlThread` when ready for processing"""
maximumNumberOfHalfOpenConnections = 0
invThread = None
addrThread = None
@ -55,6 +68,8 @@ def resetNetworkProtocolAvailability():
resetNetworkProtocolAvailability()
discoveredPeers = {}
dandelion = 0
testmode = False

View File

@ -17,6 +17,7 @@ from bmconfigparser import BMConfigParser
from helper_msgcoding import MsgEncode, MsgDecode
from network import asyncore_pollchoose as asyncore
from network.connectionpool import BMConnectionPool
from network.node import Peer
from network.tcp import Socks4aBMConnection, Socks5BMConnection, TCPConnection
from queues import excQueue
@ -30,7 +31,7 @@ def pickle_knownnodes():
with open(knownnodes_file, 'wb') as dst:
pickle.dump({
stream: {
state.Peer(
Peer(
'%i.%i.%i.%i' % tuple([
random.randint(1, 255) for i in range(4)]),
8444): {'lastseen': now, 'rating': 0.1}
@ -90,7 +91,7 @@ class TestCore(unittest.TestCase):
"""initial fill script from network.tcp"""
BMConfigParser().set('bitmessagesettings', 'dontconnect', 'true')
try:
for peer in (state.Peer("127.0.0.1", 8448),):
for peer in (Peer("127.0.0.1", 8448),):
direct = TCPConnection(peer)
while asyncore.socket_map:
print("loop, state = %s" % direct.state)
@ -147,7 +148,7 @@ class TestCore(unittest.TestCase):
def _initiate_bootstrap(self):
BMConfigParser().set('bitmessagesettings', 'dontconnect', 'true')
self._outdate_knownnodes()
knownnodes.addKnownNode(1, state.Peer('127.0.0.1', 8444), is_self=True)
knownnodes.addKnownNode(1, Peer('127.0.0.1', 8444), is_self=True)
knownnodes.cleanupKnownNodes()
time.sleep(2)
@ -173,6 +174,22 @@ class TestCore(unittest.TestCase):
self.fail(
'Failed to connect during %s sec' % (time.time() - _started))
def test_onionservicesonly(self):
"""test onionservicesonly networking mode"""
BMConfigParser().set('bitmessagesettings', 'onionservicesonly', 'true')
self._initiate_bootstrap()
BMConfigParser().remove_option('bitmessagesettings', 'dontconnect')
for _ in range(360):
time.sleep(1)
for n, peer in enumerate(BMConnectionPool().outboundConnections):
if n > 2:
return
if not peer.host.endswith('.onion'):
self.fail(
'Found non onion hostname %s in outbound connections!'
% peer.host)
self.fail('Failed to connect to at least 3 nodes within 360 sec')
def test_bootstrap(self):
"""test bootstrapping"""
self._initiate_bootstrap()

69
src/tests/test_logger.py Normal file
View File

@ -0,0 +1,69 @@
"""
Testing the logger configuration
"""
import logging
import os
import tempfile
import unittest
class TestLogger(unittest.TestCase):
"""A test case for bmconfigparser"""
conf_template = '''
[loggers]
keys=root
[handlers]
keys=default
[formatters]
keys=default
[formatter_default]
format=%(asctime)s {1} %(message)s
[handler_default]
class=FileHandler
level=NOTSET
formatter=default
args=('{0}', 'w')
[logger_root]
level=DEBUG
handlers=default
'''
def test_fileConfig(self):
"""Put logging.dat with special pattern and check it was used"""
tmp = os.environ['BITMESSAGE_HOME'] = tempfile.gettempdir()
log_config = os.path.join(tmp, 'logging.dat')
log_file = os.path.join(tmp, 'debug.log')
def gen_log_config(pattern):
"""A small closure to generate logging.dat with custom pattern"""
with open(log_config, 'wb') as dst:
dst.write(self.conf_template.format(log_file, pattern))
pattern = r' o_0 '
gen_log_config(pattern)
try:
from pybitmessage.debug import logger, resetLogging
if not os.path.isfile(log_file): # second pass
pattern = r' <===> '
gen_log_config(pattern)
resetLogging()
except ImportError:
self.fail('There is no package pybitmessage. Things gone wrong.')
finally:
os.remove(log_config)
logger_ = logging.getLogger('default')
self.assertEqual(logger, logger_)
logger_.info('Testing the logger...')
self.assertRegexpMatches(open(log_file).read(), pattern)

View File

@ -0,0 +1,39 @@
"""
Test for network group
"""
import unittest
class TestNetworkGroup(unittest.TestCase):
"""
Test case for network group
"""
def test_network_group(self):
"""Test various types of network groups"""
from pybitmessage.protocol import network_group
test_ip = '1.2.3.4'
self.assertEqual('\x01\x02', network_group(test_ip))
test_ip = '127.0.0.1'
self.assertEqual('IPv4', network_group(test_ip))
test_ip = '0102:0304:0506:0708:090A:0B0C:0D0E:0F10'
self.assertEqual(
'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C',
network_group(test_ip))
test_ip = 'bootstrap8444.bitmessage.org'
self.assertEqual(
'bootstrap8444.bitmessage.org',
network_group(test_ip))
test_ip = 'quzwelsuziwqgpt2.onion'
self.assertEqual(
test_ip,
network_group(test_ip))
test_ip = None
self.assertEqual(
None,
network_group(test_ip))

46
src/threads.py Normal file
View File

@ -0,0 +1,46 @@
"""
PyBitmessage does various tasks in separate threads. Most of them inherit
from `.network.StoppableThread`. There are `addressGenerator` for
addresses generation, `objectProcessor` for processing the network objects
passed minimal validation, `singleCleaner` to periodically clean various
internal storages (like inventory and knownnodes) and do forced garbage
collection, `singleWorker` for doing PoW, `sqlThread` for querying sqlite
database.
There are also other threads in the `.network` package.
:func:`set_thread_name` is defined here for the threads that don't inherit from
:class:`.network.StoppableThread`
"""
import threading
try:
import prctl
except ImportError:
def set_thread_name(name):
"""Set a name for the thread for python internal use."""
threading.current_thread().name = name
else:
def set_thread_name(name):
"""Set the thread name for external use (visible from the OS)."""
prctl.set_name(name)
def _thread_name_hack(self):
set_thread_name(self.name)
threading.Thread.__bootstrap_original__(self)
# pylint: disable=protected-access
threading.Thread.__bootstrap_original__ = threading.Thread._Thread__bootstrap
threading.Thread._Thread__bootstrap = _thread_name_hack
from class_addressGenerator import addressGenerator
from class_objectProcessor import objectProcessor
from class_singleCleaner import singleCleaner
from class_singleWorker import singleWorker
from class_sqlThread import sqlThread
__all__ = [
"addressGenerator", "objectProcessor", "singleCleaner", "singleWorker",
"sqlThread"
]

View File

@ -1,21 +1,17 @@
"""
src/tr.py
=================================
Translating text
"""
# pylint: disable=relative-import
import os
import state
"""This is used so that the translateText function can be used """
"""when we are in daemon mode and not using any QT functions."""
class translateClass: # pylint: disable=old-style-class, too-few-public-methods
class translateClass:
"""
This is used so that the translateText function can be used when we are
in daemon mode and not using any QT functions.
This is used so that the translateText function can be used
when we are in daemon mode and not using any QT functions.
"""
# pylint: disable=old-style-class,too-few-public-methods
def __init__(self, context, text):
self.context = context
self.text = text
@ -29,12 +25,9 @@ class translateClass: # pylint: disable=old-style-class, too-few-public-m
return self.text
def _translate(context, text, disambiguation=None, encoding=None, n=None): # pylint: disable=unused-argument
def _translate(context, text, disambiguation=None, encoding=None, n=None): # pylint: disable=unused-argument
return translateText(context, text, n)
# def _translate(context, text, disambiguation = None, encoding = None, n = None):
# return translateClass(context, text.replace('%','',1))
def translateText(context, text, n=None):
"""Translate text in context"""
@ -52,6 +45,7 @@ def translateText(context, text, n=None):
If you want to run in daemon mode, see https://bitmessage.org/wiki/Daemon')
print ('Error message:', err)
os._exit(0) # pylint: disable=protected-access
if n is None:
return QtGui.QApplication.translate(context, text)
return QtGui.QApplication.translate(context, text, None, QtCore.QCoreApplication.CodecForTr, n)

View File

@ -1,9 +1,6 @@
# pylint: disable=too-many-statements,too-many-branches,protected-access,no-self-use
"""
src/upnp.py
===========
A simple upnp module to forward port for BitMessage
Complete UPnP port forwarding implementation in separate thread.
Reference: http://mattscodecave.com/posts/using-python-and-upnp-to-forward-a-port
"""
@ -21,8 +18,9 @@ import state
import tr
from bmconfigparser import BMConfigParser
from debug import logger
from helper_threading import StoppableThread
from network.connectionpool import BMConnectionPool
from network.threads import StoppableThread
from network.node import Peer
def createRequestXML(service, action, arguments=None):
@ -263,7 +261,7 @@ class uPnPThread(StoppableThread):
self.routers.append(newRouter)
self.createPortMapping(newRouter)
try:
self_peer = state.Peer(
self_peer = Peer(
newRouter.GetExternalIPAddress(),
self.extPort
)