resolve conflicts

This commit is contained in:
Navjot 2019-09-17 19:08:05 +05:30
commit 1177e17c81
No known key found for this signature in database
GPG Key ID: 9EE70AFD71357F1C
54 changed files with 1338 additions and 452 deletions

173
buildscripts/builder.sh Executable file
View File

@ -0,0 +1,173 @@
#!/bin/bash
# INIT
MACHINE_TYPE=`uname -m`
BASE_DIR=$(pwd)
PYTHON_VERSION=2.7.15
PYQT_VERSION=4-4.11.4-gpl-Py2.7-Qt4.8.7
OPENSSL_VERSION=1_0_2t
DIRECTORY32BIT=SoftwareDownloads32bit
DIRECTORY64BIT=SoftwareDownloads64bit
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
if [ ! -d "$DIRECTORY64BIT" ]; then
mkdir SoftwareDownloads64bit
cd SoftwareDownloads64bit
else
echo "Directory already exists"
cd SoftwareDownloads64bit
fi
else
if [ ! -d "$DIRECTORY32BIT" ]; then
mkdir SoftwareDownloads32bit
cd SoftwareDownloads32bit
else
echo "Directory 32 bit alrready exists"
cd SoftwareDownloads32bit
fi
fi
#Functions
function install_wine {
wget -nc https://dl.winehq.org/wine-builds/Release.key --no-check-certificate
sudo apt-key add Release.key
sudo apt-add-repository 'https://dl.winehq.org/wine-builds/ubuntu/'
sudo apt-get -y update
sudo apt-get -y install wine1.8 winetricks
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
sudo apt-get -y install wine64-development
env WINEPREFIX=$HOME/.wine64 WINEARCH=win64 winecfg
WINE="env WINEPREFIX=$HOME/.wine64 wine"
export WINEPREFIX
else
sudo apt-get -y install wine32-development
env WINEPREFIX=$HOME/.wine32 WINEARCH=win32 winecfg
WINE="env WINEPREFIX=$HOME/.wine32 wine"
export WINEPREFIX
fi
}
function install_python(){
echo "Download Python2.7"
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
# For 64 bit machine
wget -nc wget http://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}.amd64.msi --no-check-certificate
echo "Install Python2.7 for 64 bit"
$WINE msiexec -i python-${PYTHON_VERSION}.amd64.msi /q /norestart
wget -nc https://download.microsoft.com/download/d/2/4/d242c3fb-da5a-4542-ad66-f9661d0a8d19/vcredist_x64.exe --no-check-certificate
$WINE vcredist_x64.exe /q /norestart
echo "Installed vcredist for 64 bit"
$WINE pip install --upgrade pip
else
# For 32 bit machine
wget -nc https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}.msi --no-check-certificate
echo "Install Python2.7 for 32 bit"
$WINE msiexec -i python-${PYTHON_VERSION}.msi /q /norestart
echo "Installing vc_redist for 32 bit "
wget -nc https://download.microsoft.com/download/1/1/1/1116b75a-9ec3-481a-a3c8-1777b5381140/vcredist_x86.exe --no-check-certificate
$WINE vcredist_x86.exe /q /norestart
#insatlled msvcr120.dll for 32 bit system
wget -nc http://www.dll-found.com/zip/m/msvcr120.dll.zip --no-check-certificate
unzip msvcr120.dll.zip
sudo cp msvcr120.dll $HOME/.wine32/drive_c/windows/system32/
$WINE pip install --upgrade pip
fi
}
function install_pyqt(){
echo "Download PyQT"
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
# For 64 bit machine
wget -nc --content-disposition https://github.com/Bitmessage/ThirdPartyLibraries/blob/master/PyQt4-4.11.4-gpl-Py2.7-Qt4.8.7-x64.exe?raw=true --no-check-certificate
$WINE PyQt4-4.11.4-gpl-Py2.7-Qt4.8.7-x64.exe /q /norestart /silent /verysiling /sp- /suppressmsgboxes
else
# For 32 bit machine
wget -nc --content-disposition https://github.com/Bitmessage/ThirdPartyLibraries/blob/master/PyQt4-4.11.4-gpl-Py2.7-Qt4.8.7-x32.exe?raw=true --no-check-certificate
$WINE PyQt4-4.11.4-gpl-Py2.7-Qt4.8.7-x32.exe /q /norestart /silent /verysiling /sp- /suppressmsgboxes
fi
}
function install_openssl(){
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
wget -nc --content-disposition https://github.com/Bitmessage/ThirdPartyLibraries/blob/master/Win64OpenSSL-${OPENSSL_VERSION}.exe?raw=true --no-check-certificate
$WINE Win64OpenSSL-${OPENSSL_VERSION}.exe /q /norestart /silent /verysiling /sp- /suppressmsgboxes
else
wget -nc --content-disposition https://github.com/Bitmessage/ThirdPartyLibraries/blob/master/Win32OpenSSL-${OPENSSL_VERSION}.exe?raw=true --no-check-certificate
$WINE Win32OpenSSL-${OPENSSL_VERSION}.exe /q /norestart /silent /verysiling /sp- /suppressmsgboxes
echo "Install PyInstaller 32 bit"
fi
}
function install_pyinstaller()
{
$WINE pip install pyinstaller
echo "Install PyInstaller"
echo "Install Pyopencl"
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
wget -nc https://github.com/Bitmessage/ThirdPartyLibraries/blob/master/pyopencl-2015.1-cp27-none-win_amd64.whl --no-check-certificate
$WINE pip install pyopencl-2015.1-cp27-none-win_amd64.whl
$WINE pip install msgpack-python
else
wget -nc --content-disposition https://github.com/Bitmessage/ThirdPartyLibraries/blob/master/pyopencl-2015.1-cp27-none-win_amd64one-win32.whl?raw=true --no-check-certificate
$WINE pip install msgpack-python
$WINE pip install pyopencl-2015.1-cp27-none-win32.whl
fi
echo "Install Message Pack"
}
function build_dll(){
cd $BASE_DIR
rm -rf master.zip
rm -rf PyBitmessage
git clone https://github.com/Bitmessage/PyBitmessage.git
cd PyBitmessage/src/bitmsghash
if [ ${MACHINE_TYPE} == 'x86_64' ]; then
# Do stuff for 64 bit machine
echo "Install MinGW"
sudo apt-get -y install mingw-w64
echo "Create dll"
x86_64-w64-mingw32-g++ -D_WIN32 -Wall -O3 -march=native -I$HOME/.wine64/drive_c/OpenSSL-Win64/include -I/usr/x86_64-w64-mingw32/include -L$HOME/.wine64/drive_c/OpenSSL-Win64/lib -c bitmsghash.cpp
x86_64-w64-mingw32-g++ -static-libgcc -shared bitmsghash.o -D_WIN32 -O3 -march=native -I$HOME/.wine64/drive_c/OpenSSL-Win64/include -L$HOME/.wine64/drive_c/OpenSSL-Win64 -L/usr/lib/x86_64-linux-gnu/wine -fPIC -shared -lcrypt32 -leay32 -lwsock32 -o bitmsghash64.dll -Wl,--out-implib,bitmsghash.a
echo "DLL generated successfully "
cd ..
cp -R bitmsghash ../../../src/
cd ../../../
cd packages/pyinstaller/
env WINEPREFIX=$HOME/.wine64 wine pyinstaller bitmessagemain.spec
else
echo "Install MinGW for 32 bit"
sudo apt-get install mingw-w64
echo "Create dll"
i686-w64-mingw32-g++ -D_WIN32 -Wall -m32 -O3 -march=native -I$HOME/.wine32/drive_c/OpenSSL-Win32/include -I/usr/i686-w64-mingw32/include -L$HOME/.wine32/drive_c/OpenSSL-Win32/lib -c bitmsghash.cpp
i686-w64-mingw32-g++ -static-libgcc -shared bitmsghash.o -D_WIN32 -O3 -march=native -I$HOME/.wine32/drive_c/OpenSSL-Win32/include -L$HOME/.wine32/drive_c/OpenSSL-Win32/lib/MinGW -fPIC -shared -lcrypt32 -leay32 -lwsock32 -o bitmsghash32.dll -Wl,--out-implib,bitmsghash.a
cd ..
cp -R bitmsghash ../../../src/
cd ../../../
cd packages/pyinstaller/
env WINEPREFIX=$HOME/.wine32 wine pyinstaller bitmessagemain.spec
fi
}
install_wine
install_python
install_pyqt
install_openssl
install_pyinstaller
build_dll

View File

@ -1,22 +1,42 @@
import ctypes
import os
import time
import sys
if ctypes.sizeof(ctypes.c_voidp) == 4:
arch=32
else:
arch=64
sslName = 'OpenSSL-Win%s' % ("32" if arch == 32 else "64")
site_root = os.path.abspath(HOMEPATH)
spec_root = os.path.abspath(SPECPATH)
cdrivePath= site_root[0:3]
srcPath = spec_root[:-20]+"src\\"
qtPath = site_root+"\\PyQt4\\"
openSSLPath = cdrivePath+sslName+"\\"
msvcrDllPath = cdrivePath+"windows\\system32\\"
pythonDllPath = cdrivePath+"Python27\\"
outPath = spec_root+"\\bitmessagemain"
importPath = srcPath
sys.path.insert(0,importPath)
os.chdir(sys.path[0])
from version import softwareVersion
srcPath = "C:\\src\\PyBitmessage\\src\\"
qtPath = "C:\\Qt-4.8.7\\"
openSSLPath = "C:\\OpenSSL-1.0.2j\\bin\\"
outPath = "C:\\src\\PyInstaller-3.2.1\\bitmessagemain"
today = time.strftime("%Y%m%d")
snapshot = False
os.rename(os.path.join(srcPath, '__init__.py'), os.path.join(srcPath, '__init__.py.backup'))
# -*- mode: python -*-
a = Analysis([srcPath + 'bitmessagemain.py'],
a = Analysis(
[srcPath + 'bitmessagemain.py'],
pathex=[outPath],
hiddenimports=[],
hiddenimports=['pyopencl','numpy', 'win32com' , 'setuptools.msvc' ,'_cffi_backend'],
hookspath=None,
runtime_hooks=None)
runtime_hooks=None
)
os.rename(os.path.join(srcPath, '__init__.py.backup'), os.path.join(srcPath, '__init__.py'))
@ -46,20 +66,17 @@ def addUIs():
a.datas += addTranslations()
a.datas += addUIs()
if ctypes.sizeof(ctypes.c_voidp) == 4:
arch=32
else:
arch=64
a.binaries += [('libeay32.dll', openSSLPath + 'libeay32.dll', 'BINARY'),
('python27.dll', pythonDllPath + 'python27.dll', 'BINARY'),
('msvcr120.dll', msvcrDllPath + 'msvcr120.dll','BINARY'),
(os.path.join('bitmsghash', 'bitmsghash%i.dll' % (arch)), os.path.join(srcPath, 'bitmsghash', 'bitmsghash%i.dll' % (arch)), 'BINARY'),
(os.path.join('bitmsghash', 'bitmsghash.cl'), os.path.join(srcPath, 'bitmsghash', 'bitmsghash.cl'), 'BINARY'),
(os.path.join('sslkeys', 'cert.pem'), os.path.join(srcPath, 'sslkeys', 'cert.pem'), 'BINARY'),
(os.path.join('sslkeys', 'key.pem'), os.path.join(srcPath, 'sslkeys', 'key.pem'), 'BINARY')
]
with open(os.path.join(srcPath, 'version.py'), 'rt') as f:
softwareVersion = f.readline().split('\'')[1]
fname = 'Bitmessage_%s_%s.exe' % ("x86" if arch == 32 else "x64", softwareVersion)
if snapshot:
@ -72,8 +89,18 @@ exe = EXE(pyz,
a.zipfiles,
a.datas,
a.binaries,
[],
name=fname,
debug=False,
strip=None,
upx=False,
console=False, icon= os.path.join(srcPath, 'images', 'can-icon.ico'))
upx=True,
console=True, icon= os.path.join(srcPath, 'images', 'can-icon.ico'))
coll = COLLECT(exe,
a.binaries,
a.zipfiles,
a.datas,
strip=False,
upx=True,
name='main')

View File

@ -16,6 +16,7 @@ EXTRAS_REQUIRE = {
'prctl': ['python_prctl'], # Named threads
'qrcode': ['qrcode'],
'sound;platform_system=="Windows"': ['winsound'],
'tor': ['stem'],
'docs': [
'sphinx', # fab build_docs
'graphviz', # fab build_docs
@ -147,6 +148,9 @@ if __name__ == "__main__":
'libmessaging ='
'pybitmessage.plugins.indicator_libmessaging [gir]'
],
'bitmessage.proxyconfig': [
'stem = pybitmessage.plugins.proxyconfig_stem [tor]'
],
# 'console_scripts': [
# 'pybitmessage = pybitmessage.bitmessagemain:main'
# ]

View File

@ -21,7 +21,6 @@ import json
import random # nosec
import socket
import subprocess
import threading
import time
from binascii import hexlify, unhexlify
from SimpleXMLRPCServer import SimpleXMLRPCRequestHandler, SimpleXMLRPCServer
@ -32,7 +31,6 @@ from version import softwareVersion
import defaults
import helper_inbox
import helper_sent
import helper_threading
import network.stats
import proofofwork
import queues
@ -44,6 +42,7 @@ from bmconfigparser import BMConfigParser
from debug import logger
from helper_ackPayload import genAckPayload
from helper_sql import SqlBulkExecute, sqlExecute, sqlQuery, sqlStoredProcedure
from helper_threading import StoppableThread
from inventory import Inventory
str_chan = '[chan]'
@ -73,11 +72,10 @@ class StoppableXMLRPCServer(SimpleXMLRPCServer):
# This thread, of which there is only one, runs the API.
class singleAPI(threading.Thread, helper_threading.StoppableThread):
class singleAPI(StoppableThread):
"""API thread"""
def __init__(self):
threading.Thread.__init__(self, name="singleAPI")
self.initStop()
name = "singleAPI"
def stopThread(self):
super(singleAPI, self).stopThread()

View File

@ -88,10 +88,10 @@ def connectToStream(streamNumber):
with knownnodes.knownNodesLock:
if streamNumber not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber] = {}
if streamNumber*2 not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber*2] = {}
if streamNumber*2+1 not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber*2+1] = {}
if streamNumber * 2 not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber * 2] = {}
if streamNumber * 2 + 1 not in knownnodes.knownNodes:
knownnodes.knownNodes[streamNumber * 2 + 1] = {}
BMConnectionPool().connectToStream(streamNumber)
@ -185,11 +185,32 @@ def signal_handler(signum, frame):
class Main:
@staticmethod
def start_proxyconfig(config):
"""Check socksproxytype and start any proxy configuration plugin"""
proxy_type = config.safeGet('bitmessagesettings', 'socksproxytype')
if proxy_type not in ('none', 'SOCKS4a', 'SOCKS5'):
# pylint: disable=relative-import
from plugins.plugin import get_plugin
try:
proxyconfig_start = time.time()
get_plugin('proxyconfig', name=proxy_type)(config)
except TypeError:
logger.error(
'Failed to run proxy config plugin %s',
proxy_type, exc_info=True)
shutdown.doCleanShutdown()
sys.exit(2)
else:
logger.info(
'Started proxy config plugin %s in %s sec',
proxy_type, time.time() - proxyconfig_start)
def start(self):
_fixSocket()
daemon = BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'daemon')
config = BMConfigParser()
daemon = config.safeGetBoolean('bitmessagesettings', 'daemon')
try:
opts, args = getopt.getopt(
@ -217,7 +238,6 @@ class Main:
# Fallback: in case when no api command was issued
state.last_api_response = time.time()
# Apply special settings
config = BMConfigParser()
config.set(
'bitmessagesettings', 'apienabled', 'true')
config.set(
@ -261,14 +281,14 @@ class Main:
helper_threading.set_thread_name("PyBitmessage")
state.dandelion = BMConfigParser().safeGetInt('network', 'dandelion')
state.dandelion = config.safeGetInt('network', 'dandelion')
# dandelion requires outbound connections, without them,
# stem objects will get stuck forever
if state.dandelion and not BMConfigParser().safeGetBoolean(
if state.dandelion and not config.safeGetBoolean(
'bitmessagesettings', 'sendoutgoingconnections'):
state.dandelion = 0
if state.testmode or BMConfigParser().safeGetBoolean(
if state.testmode or config.safeGetBoolean(
'bitmessagesettings', 'extralowdifficulty'):
defaults.networkDefaultProofOfWorkNonceTrialsPerByte = int(
defaults.networkDefaultProofOfWorkNonceTrialsPerByte / 100)
@ -302,15 +322,15 @@ class Main:
# Enable object processor and SMTP only if objproc enabled
if state.enableObjProc:
# SMTP delivery thread
if daemon and BMConfigParser().safeGet(
"bitmessagesettings", "smtpdeliver", '') != '':
if daemon and config.safeGet(
'bitmessagesettings', 'smtpdeliver', '') != '':
from class_smtpDeliver import smtpDeliver
smtpDeliveryThread = smtpDeliver()
smtpDeliveryThread.start()
# SMTP daemon thread
if daemon and BMConfigParser().safeGetBoolean(
"bitmessagesettings", "smtpd"):
if daemon and config.safeGetBoolean(
'bitmessagesettings', 'smtpd'):
from class_smtpServer import smtpServer
smtpServerThread = smtpServer()
smtpServerThread.start()
@ -332,7 +352,7 @@ class Main:
shared.reloadMyAddressHashes()
shared.reloadBroadcastSendersForWhichImWatching()
# API is also objproc dependent
if BMConfigParser().safeGetBoolean('bitmessagesettings', 'apienabled'):
if config.safeGetBoolean('bitmessagesettings', 'apienabled'):
import api # pylint: disable=relative-import
singleAPIThread = api.singleAPI()
# close the main program even if there are threads left
@ -340,11 +360,12 @@ class Main:
singleAPIThread.start()
# start network components if networking is enabled
if state.enableNetwork:
self.start_proxyconfig(config)
BMConnectionPool()
asyncoreThread = BMNetworkThread()
asyncoreThread.daemon = True
asyncoreThread.start()
for i in range(BMConfigParser().getint("threads", "receive")):
for i in range(config.getint('threads', 'receive')):
receiveQueueThread = ReceiveQueueThread(i)
receiveQueueThread.daemon = True
receiveQueueThread.start()
@ -365,8 +386,7 @@ class Main:
state.uploadThread.start()
connectToStream(1)
if BMConfigParser().safeGetBoolean(
'bitmessagesettings', 'upnp'):
if config.safeGetBoolean('bitmessagesettings', 'upnp'):
import upnp
upnpThread = upnp.uPnPThread()
upnpThread.start()
@ -382,7 +402,7 @@ class Main:
bitmessagecurses.runwrapper()
elif state.kivy:
BMConfigParser().remove_option('bitmessagesettings', 'dontconnect')
config.remove_option('bitmessagesettings', 'dontconnect')
from bitmessagekivy.mpybit import NavigateApp
state.kivyapp = NavigateApp()
state.kivyapp.run()
@ -390,13 +410,13 @@ class Main:
import bitmessageqt
bitmessageqt.run()
else:
BMConfigParser().remove_option('bitmessagesettings', 'dontconnect')
config.remove_option('bitmessagesettings', 'dontconnect')
if daemon:
while state.shutdown == 0:
time.sleep(1)
if (state.testmode and
time.time() - state.last_api_response >= 30):
if (
state.testmode and time.time() - state.last_api_response >= 30):
self.stop()
elif not state.enableGUI:
from tests import core as test_core # pylint: disable=relative-import

View File

@ -3270,8 +3270,8 @@ class MyForm(settingsmixin.SMainWindow):
tableWidget.model().removeRows(r.topRow(), r.bottomRow()-r.topRow()+1)
idCount = len(inventoryHashesToTrash)
sqlExecuteChunked(
"DELETE FROM inbox" if folder == "trash" or shifted else
"UPDATE inbox SET folder='trash'"
("DELETE FROM inbox" if folder == "trash" or shifted else
"UPDATE inbox SET folder='trash'") +
" WHERE msgid IN ({0})", idCount, *inventoryHashesToTrash)
tableWidget.selectRow(0 if currentRow == 0 else currentRow - 1)
tableWidget.setUpdatesEnabled(True)

View File

@ -1,12 +1,18 @@
from HTMLParser import HTMLParser
"""Subclass of HTMLParser.HTMLParser for MessageView widget"""
import inspect
import re
from urllib import quote, quote_plus
from HTMLParser import HTMLParser
from urllib import quote_plus
from urlparse import urlparse
class SafeHTMLParser(HTMLParser):
"""HTML parser with sanitisation"""
# from html5lib.sanitiser
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area',
acceptable_elements = (
'a', 'abbr', 'acronym', 'address', 'area',
'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button',
'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup',
'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn',
@ -18,34 +24,50 @@ class SafeHTMLParser(HTMLParser):
'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong',
'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot',
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video']
replaces_pre = [["&", "&amp;"], ["\"", "&quot;"], ["<", "&lt;"], [">", "&gt;"]]
replaces_post = [["\n", "<br/>"], ["\t", "&nbsp;&nbsp;&nbsp;&nbsp;"], [" ", "&nbsp; "], [" ", "&nbsp; "], ["<br/> ", "<br/>&nbsp;"]]
src_schemes = [ "data" ]
#uriregex1 = re.compile(r'(?i)\b((?:(https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?]))')
uriregex1 = re.compile(r'((https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)')
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video'
)
replaces_pre = (
("&", "&amp;"), ("\"", "&quot;"), ("<", "&lt;"), (">", "&gt;"))
replaces_post = (
("\n", "<br/>"), ("\t", "&nbsp;&nbsp;&nbsp;&nbsp;"),
(" ", "&nbsp; "), (" ", "&nbsp; "), ("<br/> ", "<br/>&nbsp;"))
src_schemes = ["data"]
# uriregex1 = re.compile(
# r'(?i)\b((?:(https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])'
# r'|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)'
# r'(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))'
# r'+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?]))')
uriregex1 = re.compile(
r'((https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])'
r'(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)'
)
uriregex2 = re.compile(r'<a href="([^"]+)&amp;')
emailregex = re.compile(r'\b([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,})\b')
emailregex = re.compile(
r'\b([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,})\b')
@staticmethod
def replace_pre(text):
"""Perform substring replacement before regex replacements"""
for a in SafeHTMLParser.replaces_pre:
text = text.replace(a[0], a[1])
text = text.replace(*a)
return text
@staticmethod
def replace_post(text):
"""Perform substring replacement after regex replacements"""
for a in SafeHTMLParser.replaces_post:
text = text.replace(a[0], a[1])
text = text.replace(*a)
if len(text) > 1 and text[0] == " ":
text = "&nbsp;" + text[1:]
return text
def __init__(self, *args, **kwargs):
HTMLParser.__init__(self, *args, **kwargs)
self.reset()
self.reset_safe()
def reset_safe(self):
"""Reset runtime variables specific to this class"""
self.elements = set()
self.raw = u""
self.sanitised = u""
@ -53,8 +75,9 @@ class SafeHTMLParser(HTMLParser):
self.allow_picture = False
self.allow_external_src = False
def add_if_acceptable(self, tag, attrs = None):
if tag not in SafeHTMLParser.acceptable_elements:
def add_if_acceptable(self, tag, attrs=None):
"""Add tag if it passes sanitisation"""
if tag not in self.acceptable_elements:
return
self.sanitised += "<"
if inspect.stack()[1][3] == "handle_endtag":
@ -66,7 +89,7 @@ class SafeHTMLParser(HTMLParser):
val = ""
elif attr == "src" and not self.allow_external_src:
url = urlparse(val)
if url.scheme not in SafeHTMLParser.src_schemes:
if url.scheme not in self.src_schemes:
val = ""
self.sanitised += " " + quote_plus(attr)
if not (val is None):
@ -76,7 +99,7 @@ class SafeHTMLParser(HTMLParser):
self.sanitised += ">"
def handle_starttag(self, tag, attrs):
if tag in SafeHTMLParser.acceptable_elements:
if tag in self.acceptable_elements:
self.has_html = True
self.add_if_acceptable(tag, attrs)
@ -84,7 +107,7 @@ class SafeHTMLParser(HTMLParser):
self.add_if_acceptable(tag)
def handle_startendtag(self, tag, attrs):
if tag in SafeHTMLParser.acceptable_elements:
if tag in self.acceptable_elements:
self.has_html = True
self.add_if_acceptable(tag, attrs)
@ -104,15 +127,14 @@ class SafeHTMLParser(HTMLParser):
data = unicode(data, 'utf-8', errors='replace')
HTMLParser.feed(self, data)
tmp = SafeHTMLParser.replace_pre(data)
tmp = SafeHTMLParser.uriregex1.sub(
r'<a href="\1">\1</a>',
tmp)
tmp = SafeHTMLParser.uriregex2.sub(r'<a href="\1&', tmp)
tmp = SafeHTMLParser.emailregex.sub(r'<a href="mailto:\1">\1</a>', tmp)
tmp = self.uriregex1.sub(r'<a href="\1">\1</a>', tmp)
tmp = self.uriregex2.sub(r'<a href="\1&', tmp)
tmp = self.emailregex.sub(r'<a href="mailto:\1">\1</a>', tmp)
tmp = SafeHTMLParser.replace_post(tmp)
self.raw += tmp
def is_html(self, text = None, allow_picture = False):
def is_html(self, text=None, allow_picture=False):
"""Detect if string contains HTML tags"""
if text:
self.reset()
self.reset_safe()

View File

@ -1,3 +1,4 @@
"""Building osx."""
from glob import glob
import os
from PyQt4 import QtCore
@ -17,15 +18,15 @@ DATA_FILES = [
]
setup(
name = name,
version = version,
app = mainscript,
data_files = DATA_FILES,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
name=name,
version=version,
app=mainscript,
data_files=DATA_FILES,
setup_requires=["py2app"],
options=dict(
py2app=dict(
includes=['sip', 'PyQt4._qt'],
iconfile="images/bitmessage.icns"
)
)
)

View File

@ -1,6 +1,5 @@
import time
import threading
import hashlib
from binascii import hexlify
from pyelliptic import arithmetic
@ -18,12 +17,9 @@ from fallback import RIPEMD160Hash
from helper_threading import StoppableThread
class addressGenerator(threading.Thread, StoppableThread):
class addressGenerator(StoppableThread):
def __init__(self):
# QThread.__init__(self, parent)
threading.Thread.__init__(self, name="addressGenerator")
self.initStop()
name = "addressGenerator"
def stopThread(self):
try:

View File

@ -35,12 +35,13 @@ class objectProcessor(threading.Thread):
objects (msg, broadcast, pubkey, getpubkey) from the receiveDataThreads.
"""
def __init__(self):
threading.Thread.__init__(self, name="objectProcessor")
random.seed()
# It may be the case that the last time Bitmessage was running,
# the user closed it before it finished processing everything in the
# objectProcessorQueue. Assuming that Bitmessage wasn't closed
# forcefully, it should have saved the data in the queue into the
# objectprocessorqueue table. Let's pull it out.
threading.Thread.__init__(self, name="objectProcessor")
queryreturn = sqlQuery(
'''SELECT objecttype, data FROM objectprocessorqueue''')
for row in queryreturn:

View File

@ -21,7 +21,6 @@ resends msg messages in 5 days (then 10 days, then 20 days, etc...)
import gc
import os
import shared
import threading
import time
import tr
@ -36,14 +35,11 @@ import queues
import state
class singleCleaner(threading.Thread, StoppableThread):
class singleCleaner(StoppableThread):
name = "singleCleaner"
cycleLength = 300
expireDiscoveredPeers = 300
def __init__(self):
threading.Thread.__init__(self, name="singleCleaner")
self.initStop()
def run(self):
gc.disable()
timeWeLastClearedInventoryAndPubkeysTables = 0

View File

@ -7,7 +7,6 @@ src/class_singleWorker.py
from __future__ import division
import hashlib
import threading
import time
from binascii import hexlify, unhexlify
from struct import pack
@ -45,12 +44,11 @@ def sizeof_fmt(num, suffix='h/s'):
return "%.1f%s%s" % (num, 'Yi', suffix)
class singleWorker(threading.Thread, StoppableThread):
class singleWorker(StoppableThread):
"""Thread for performing PoW"""
def __init__(self):
threading.Thread.__init__(self, name="singleWorker")
self.initStop()
super(singleWorker, self).__init__(name="singleWorker")
proofofwork.init()
def stopThread(self):

View File

@ -6,7 +6,6 @@ src/class_smtpDeliver.py
import smtplib
import sys
import threading
import urlparse
from email.header import Header
from email.mime.text import MIMEText
@ -20,14 +19,11 @@ from helper_threading import StoppableThread
SMTPDOMAIN = "bmaddr.lan"
class smtpDeliver(threading.Thread, StoppableThread):
class smtpDeliver(StoppableThread):
"""SMTP client thread for delivery"""
name = "smtpDeliver"
_instance = None
def __init__(self):
threading.Thread.__init__(self, name="smtpDeliver")
self.initStop()
def stopThread(self):
try:
queues.UISignallerQueue.put(("stopThread", "data")) # pylint: disable=no-member

View File

@ -154,10 +154,10 @@ class smtpServerPyBitmessage(smtpd.SMTPServer):
continue
return
class smtpServer(threading.Thread, StoppableThread):
class smtpServer(StoppableThread):
def __init__(self, parent=None):
threading.Thread.__init__(self, name="smtpServerThread")
self.initStop()
super(smtpServer, self).__init__(name="smtpServerThread")
self.server = smtpServerPyBitmessage(('127.0.0.1', LISTENPORT), None)
def stopThread(self):

View File

@ -6,6 +6,11 @@ from pyelliptic.openssl import OpenSSL
NoneType = type(None)
def seed():
"""Initialize random number generator"""
random.seed()
def randomBytes(n):
"""Method randomBytes."""
try:

View File

@ -1,7 +1,9 @@
"""Helper threading perform all the threading operations."""
from contextlib import contextmanager
import threading
from contextlib import contextmanager
import helper_random
try:
import prctl
@ -22,7 +24,16 @@ else:
threading.Thread._Thread__bootstrap = _thread_name_hack
class StoppableThread(object):
class StoppableThread(threading.Thread):
name = None
def __init__(self, name=None):
if name:
self.name = name
super(StoppableThread, self).__init__(name=self.name)
self.initStop()
helper_random.seed()
def initStop(self):
self.stop = threading.Event()
self._stopped = False
@ -35,6 +46,7 @@ class StoppableThread(object):
class BusyError(threading.ThreadError):
pass
@contextmanager
def nonBlocking(lock):
locked = lock.acquire(False)

Binary file not shown.

After

Width:  |  Height:  |  Size: 475 B

View File

@ -98,7 +98,7 @@ def saveKnownNodes(dirName=None):
def addKnownNode(stream, peer, lastseen=None, is_self=False):
knownNodes[stream][peer] = {
"lastseen": lastseen or time.time(),
"rating": 0,
"rating": 1 if is_self else 0,
"self": is_self,
}

View File

@ -1,18 +1,13 @@
import Queue
import threading
import addresses
from helper_threading import StoppableThread
from network.connectionpool import BMConnectionPool
from queues import addrQueue
import protocol
import state
class AddrThread(threading.Thread, StoppableThread):
def __init__(self):
threading.Thread.__init__(self, name="AddrBroadcaster")
self.initStop()
self.name = "AddrBroadcaster"
class AddrThread(StoppableThread):
name = "AddrBroadcaster"
def run(self):
while not state.shutdown:
@ -28,7 +23,7 @@ class AddrThread(threading.Thread, StoppableThread):
except KeyError:
continue
#finish
# finish
addrQueue.iterate()
for i in range(len(chunk)):

View File

@ -1,4 +1,7 @@
import threading
"""
src/network/announcethread.py
=================================
"""
import time
from bmconfigparser import BMConfigParser
@ -9,11 +12,11 @@ from network.connectionpool import BMConnectionPool
from network.udp import UDPSocket
import state
class AnnounceThread(threading.Thread, StoppableThread):
class AnnounceThread(StoppableThread):
"""A thread to manage regular announcing of this node"""
def __init__(self):
threading.Thread.__init__(self, name="Announcer")
self.initStop()
self.name = "Announcer"
super(AnnounceThread, self).__init__(name="Announcer")
logger.info("init announce thread")
def run(self):
@ -26,10 +29,15 @@ class AnnounceThread(threading.Thread, StoppableThread):
if processed == 0:
self.stop.wait(10)
def announceSelf(self):
@staticmethod
def announceSelf():
"""Announce our presence"""
for connection in BMConnectionPool().udpSockets.values():
if not connection.announcing:
continue
for stream in state.streamsInWhichIAmParticipating:
addr = (stream, state.Peer('127.0.0.1', BMConfigParser().safeGetInt("bitmessagesettings", "port")), time.time())
addr = (
stream,
state.Peer('127.0.0.1', BMConfigParser().safeGetInt("bitmessagesettings", "port")),
time.time())
connection.append_write_buf(BMProto.assembleAddr([addr]))

View File

@ -1,3 +1,8 @@
"""
src/network/bmproto.py
==================================
"""
# pylint: disable=attribute-defined-outside-init
import base64
import hashlib
import socket
@ -43,6 +48,7 @@ class BMProtoExcessiveDataError(BMProtoError):
class BMProto(AdvancedDispatcher, ObjectTracker):
"""A parser for the Bitmessage Protocol"""
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# ~1.6 MB which is the maximum possible size of an inv message.
maxMessageSize = 1600100
# 2**18 = 256kB is the maximum size of an object payload
@ -57,7 +63,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
maxTimeOffset = 3600
timeOffsetWrongCount = 0
def __init__(self, address=None, sock=None):
def __init__(self, address=None, sock=None): # pylint: disable=unused-argument, super-init-not-called
AdvancedDispatcher.__init__(self, sock)
self.isOutbound = False
# packet/connection from a local IP
@ -97,7 +103,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
length=protocol.Header.size, expectBytes=self.payloadLength)
return True
def state_bm_command(self):
def state_bm_command(self): # pylint: disable=too-many-branches
"""Process incoming command"""
self.payload = self.read_buf[:self.payloadLength]
if self.checksum != hashlib.sha512(self.payload).digest()[0:4]:
@ -181,7 +187,8 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
return Node(services, host, port)
def decode_payload_content(self, pattern="v"):
def decode_payload_content(self, pattern="v"): # pylint: disable=too-many-branches, too-many-statements
"""
Decode the payload depending on pattern:
@ -197,7 +204,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
, = end of array
"""
def decode_simple(self, char="v"):
def decode_simple(self, char="v"): # pylint: disable=inconsistent-return-statements
"""Decode the payload using one char pattern"""
if char == "v":
return self.decode_payload_varint()
@ -251,6 +258,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
for j in range(parserStack[-1][4], len(parserStack[-1][3])):
if parserStack[-1][3][j] not in "lL0123456789":
break
# pylint: disable=undefined-loop-variable
parserStack.append([
size, size, isArray,
parserStack[-1][3][parserStack[-1][4]:j + 1], 0, []
@ -422,7 +430,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
def bm_command_addr(self):
"""Incoming addresses, process them"""
addresses = self._decode_addr()
addresses = self._decode_addr() # pylint: disable=redefined-outer-name
for i in addresses:
seenTime, stream, services, ip, port = i
decodedIP = protocol.checkIPAddress(str(ip))
@ -462,7 +470,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
self.append_write_buf(protocol.CreatePacket('pong'))
return True
def bm_command_pong(self):
def bm_command_pong(self): # pylint: disable=no-self-use
"""
Incoming pong.
Ignore it. PyBitmessage pings connections after about 5 minutes
@ -530,7 +538,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
length=self.payloadLength, expectBytes=0)
return False
def peerValidityChecks(self):
def peerValidityChecks(self): # pylint: disable=too-many-return-statements
"""Check the validity of the peer"""
if self.remoteProtocolVersion < 3:
self.append_write_buf(protocol.assembleErrorMessage(

View File

@ -1,3 +1,7 @@
"""
src/network/connectionpool.py
==================================
"""
import errno
import re
import socket
@ -20,6 +24,7 @@ from udp import UDPSocket
@Singleton
# pylint: disable=too-many-instance-attributes
class BMConnectionPool(object):
"""Pool of all existing connections"""
def __init__(self):
@ -113,7 +118,8 @@ class BMConnectionPool(object):
pass
connection.handle_close()
def getListeningIP(self):
@staticmethod
def getListeningIP():
"""What IP are we supposed to be listening on?"""
if BMConfigParser().safeGet(
"bitmessagesettings", "onionhostname").endswith(".onion"):
@ -154,7 +160,7 @@ class BMConnectionPool(object):
udpSocket = UDPSocket(host=bind, announcing=True)
self.udpSockets[udpSocket.listening.host] = udpSocket
def loop(self):
def loop(self): # pylint: disable=too-many-branches, too-many-statements
"""Main Connectionpool's loop"""
# defaults to empty loop if outbound connections are maxed
spawnConnections = False
@ -176,6 +182,7 @@ class BMConnectionPool(object):
'bitmessagesettings', 'onionhostname', '')):
acceptConnections = False
# pylint: disable=too-many-nested-blocks
if spawnConnections:
if not knownnodes.knownNodesActual:
helper_bootstrap.dns()
@ -253,7 +260,7 @@ class BMConnectionPool(object):
self.startListening()
else:
for bind in re.sub(
"[^\w.]+", " ",
'[^\w.]+', ' ', # pylint: disable=anomalous-backslash-in-string
BMConfigParser().safeGet('network', 'bind')
).split():
self.startListening(bind)
@ -263,7 +270,7 @@ class BMConnectionPool(object):
self.startUDPSocket()
else:
for bind in re.sub(
"[^\w.]+", " ",
'[^\w.]+', ' ', # pylint: disable=anomalous-backslash-in-string
BMConfigParser().safeGet('network', 'bind')
).split():
self.startUDPSocket(bind)

View File

@ -1,3 +1,7 @@
"""
src/network/dandelion.py
========================
"""
from collections import namedtuple
from random import choice, sample, expovariate
from threading import RLock
@ -22,7 +26,7 @@ Stem = namedtuple('Stem', ['child', 'stream', 'timeout'])
@Singleton
class Dandelion():
class Dandelion(): # pylint: disable=old-style-class
"""Dandelion class for tracking stem/fluff stages."""
def __init__(self):
# currently assignable child stems
@ -35,7 +39,8 @@ class Dandelion():
self.refresh = time() + REASSIGN_INTERVAL
self.lock = RLock()
def poissonTimeout(self, start=None, average=0):
@staticmethod
def poissonTimeout(start=None, average=0):
"""Generate deadline using Poisson distribution"""
if start is None:
start = time()

View File

@ -1,4 +1,7 @@
import threading
"""
src/network/downloadthread.py
=============================
"""
import time
import addresses
@ -12,7 +15,8 @@ from network.connectionpool import BMConnectionPool
from objectracker import missingObjects
class DownloadThread(threading.Thread, StoppableThread):
class DownloadThread(StoppableThread):
"""Thread-based class for downloading from connections"""
minPending = 200
maxRequestChunk = 1000
requestTimeout = 60
@ -20,13 +24,12 @@ class DownloadThread(threading.Thread, StoppableThread):
requestExpires = 3600
def __init__(self):
threading.Thread.__init__(self, name="Downloader")
self.initStop()
self.name = "Downloader"
super(DownloadThread, self).__init__(name="Downloader")
logger.info("init download thread")
self.lastCleaned = time.time()
def cleanPending(self):
"""Expire pending downloads eventually"""
deadline = time.time() - DownloadThread.requestExpires
try:
toDelete = [k for k, v in missingObjects.iteritems() if v < deadline]
@ -41,7 +44,10 @@ class DownloadThread(threading.Thread, StoppableThread):
while not self._stopped:
requested = 0
# Choose downloading peers randomly
connections = [x for x in BMConnectionPool().inboundConnections.values() + BMConnectionPool().outboundConnections.values() if x.fullyEstablished]
connections = [
x for x in
BMConnectionPool().inboundConnections.values() + BMConnectionPool().outboundConnections.values()
if x.fullyEstablished]
helper_random.randomshuffle(connections)
try:
requestChunk = max(int(min(DownloadThread.maxRequestChunk, len(missingObjects)) / len(connections)), 1)

View File

@ -1,3 +1,7 @@
"""
src/network/http-old.py
=======================
"""
import asyncore
import socket
import time
@ -8,6 +12,7 @@ duration = 60
class HTTPClient(asyncore.dispatcher):
"""An asyncore dispatcher"""
port = 12345
def __init__(self, host, path, connect=True):
@ -19,31 +24,33 @@ class HTTPClient(asyncore.dispatcher):
self.buffer = 'GET %s HTTP/1.0\r\n\r\n' % path
def handle_close(self):
# pylint: disable=global-statement
global requestCount
requestCount += 1
self.close()
def handle_read(self):
# print self.recv(8192)
# print self.recv(8192)
self.recv(8192)
def writable(self):
return (len(self.buffer) > 0)
return len(self.buffer) > 0
def handle_write(self):
sent = self.send(self.buffer)
self.buffer = self.buffer[sent:]
if __name__ == "__main__":
# initial fill
for i in range(parallel):
HTTPClient('127.0.0.1', '/')
start = time.time()
while (time.time() - start < duration):
if (len(asyncore.socket_map) < parallel):
while time.time() - start < duration:
if len(asyncore.socket_map) < parallel:
for i in range(parallel - len(asyncore.socket_map)):
HTTPClient('127.0.0.1', '/')
print "Active connections: %i" % (len(asyncore.socket_map))
asyncore.loop(count=len(asyncore.socket_map)/2)
asyncore.loop(count=len(asyncore.socket_map) / 2)
if requestCount % 100 == 0:
print "Processed %i total messages" % (requestCount)

View File

@ -2,15 +2,17 @@ import socket
from advanceddispatcher import AdvancedDispatcher
import asyncore_pollchoose as asyncore
from proxy import Proxy, ProxyError, GeneralProxyError
from socks5 import Socks5Connection, Socks5Resolver, Socks5AuthError, Socks5Error
from socks4a import Socks4aConnection, Socks4aResolver, Socks4aError
from proxy import ProxyError
from socks5 import Socks5Connection, Socks5Resolver
from socks4a import Socks4aConnection, Socks4aResolver
class HttpError(ProxyError): pass
class HttpError(ProxyError):
pass
class HttpConnection(AdvancedDispatcher):
def __init__(self, host, path="/"):
def __init__(self, host, path="/"): # pylint: disable=redefined-outer-name
AdvancedDispatcher.__init__(self)
self.path = path
self.destination = (host, 80)
@ -19,13 +21,15 @@ class HttpConnection(AdvancedDispatcher):
print "connecting in background to %s:%i" % (self.destination[0], self.destination[1])
def state_init(self):
self.append_write_buf("GET %s HTTP/1.1\r\nHost: %s\r\nConnection: close\r\n\r\n" % (self.path, self.destination[0]))
self.append_write_buf(
"GET %s HTTP/1.1\r\nHost: %s\r\nConnection: close\r\n\r\n" % (
self.path, self.destination[0]))
print "Sending %ib" % (len(self.write_buf))
self.set_state("http_request_sent", 0)
return False
def state_http_request_sent(self):
if len(self.read_buf) > 0:
if self.read_buf:
print "Received %ib" % (len(self.read_buf))
self.read_buf = b""
if not self.connected:
@ -34,7 +38,7 @@ class HttpConnection(AdvancedDispatcher):
class Socks5HttpConnection(Socks5Connection, HttpConnection):
def __init__(self, host, path="/"):
def __init__(self, host, path="/"): # pylint: disable=super-init-not-called, redefined-outer-name
self.path = path
Socks5Connection.__init__(self, address=(host, 80))
@ -44,7 +48,7 @@ class Socks5HttpConnection(Socks5Connection, HttpConnection):
class Socks4aHttpConnection(Socks4aConnection, HttpConnection):
def __init__(self, host, path="/"):
def __init__(self, host, path="/"): # pylint: disable=super-init-not-called, redefined-outer-name
Socks4aConnection.__init__(self, address=(host, 80))
self.path = path
@ -55,32 +59,31 @@ class Socks4aHttpConnection(Socks4aConnection, HttpConnection):
if __name__ == "__main__":
# initial fill
for host in ("bootstrap8080.bitmessage.org", "bootstrap8444.bitmessage.org"):
proxy = Socks5Resolver(host=host)
while len(asyncore.socket_map) > 0:
while asyncore.socket_map:
print "loop %s, len %i" % (proxy.state, len(asyncore.socket_map))
asyncore.loop(timeout=1, count=1)
proxy.resolved()
proxy = Socks4aResolver(host=host)
while len(asyncore.socket_map) > 0:
while asyncore.socket_map:
print "loop %s, len %i" % (proxy.state, len(asyncore.socket_map))
asyncore.loop(timeout=1, count=1)
proxy.resolved()
for host in ("bitmessage.org",):
direct = HttpConnection(host)
while len(asyncore.socket_map) > 0:
# print "loop, state = %s" % (direct.state)
while asyncore.socket_map:
# print "loop, state = %s" % (direct.state)
asyncore.loop(timeout=1, count=1)
proxy = Socks5HttpConnection(host)
while len(asyncore.socket_map) > 0:
# print "loop, state = %s" % (proxy.state)
while asyncore.socket_map:
# print "loop, state = %s" % (proxy.state)
asyncore.loop(timeout=1, count=1)
proxy = Socks4aHttpConnection(host)
while len(asyncore.socket_map) > 0:
# print "loop, state = %s" % (proxy.state)
while asyncore.socket_map:
# print "loop, state = %s" % (proxy.state)
asyncore.loop(timeout=1, count=1)

View File

@ -1,28 +1,34 @@
"""
src/network/httpd.py
=======================
"""
import asyncore
import socket
from tls import TLSHandshake
class HTTPRequestHandler(asyncore.dispatcher):
"""Handling HTTP request"""
response = """HTTP/1.0 200 OK\r
Date: Sun, 23 Oct 2016 18:02:00 GMT\r
Content-Type: text/html; charset=UTF-8\r
Content-Encoding: UTF-8\r
Content-Length: 136\r
Last-Modified: Wed, 08 Jan 2003 23:11:55 GMT\r
Server: Apache/1.3.3.7 (Unix) (Red-Hat/Linux)\r
ETag: "3f80f-1b6-3e1cb03b"\r
Accept-Ranges: bytes\r
Connection: close\r
\r
<html>
<head>
Date: Sun, 23 Oct 2016 18:02:00 GMT\r
Content-Type: text/html; charset=UTF-8\r
Content-Encoding: UTF-8\r
Content-Length: 136\r
Last-Modified: Wed, 08 Jan 2003 23:11:55 GMT\r
Server: Apache/1.3.3.7 (Unix) (Red-Hat/Linux)\r
ETag: "3f80f-1b6-3e1cb03b"\r
Accept-Ranges: bytes\r
Connection: close\r
\r
<html>
<head>
<title>An Example Page</title>
</head>
</head>
<body>
Hello World, this is a very simple HTML document.
</body>
</html>"""
</html>"""
def __init__(self, sock):
if not hasattr(self, '_map'):
@ -62,11 +68,17 @@ Connection: close\r
class HTTPSRequestHandler(HTTPRequestHandler, TLSHandshake):
"""Handling HTTPS request"""
def __init__(self, sock):
if not hasattr(self, '_map'):
asyncore.dispatcher.__init__(self, sock)
# self.tlsDone = False
TLSHandshake.__init__(self, sock=sock, certfile='/home/shurdeek/src/PyBitmessage/src/sslkeys/cert.pem', keyfile='/home/shurdeek/src/PyBitmessage/src/sslkeys/key.pem', server_side=True)
asyncore.dispatcher.__init__(self, sock) # pylint: disable=non-parent-init-called
# self.tlsDone = False
TLSHandshake.__init__(
self,
sock=sock,
certfile='/home/shurdeek/src/PyBitmessage/src/sslkeys/cert.pem',
keyfile='/home/shurdeek/src/PyBitmessage/src/sslkeys/key.pem',
server_side=True)
HTTPRequestHandler.__init__(self, sock)
def handle_connect(self):
@ -81,7 +93,6 @@ class HTTPSRequestHandler(HTTPRequestHandler, TLSHandshake):
def readable(self):
if self.tlsDone:
return HTTPRequestHandler.readable(self)
else:
return TLSHandshake.readable(self)
def handle_read(self):
@ -93,7 +104,6 @@ class HTTPSRequestHandler(HTTPRequestHandler, TLSHandshake):
def writable(self):
if self.tlsDone:
return HTTPRequestHandler.writable(self)
else:
return TLSHandshake.writable(self)
def handle_write(self):
@ -104,6 +114,7 @@ class HTTPSRequestHandler(HTTPRequestHandler, TLSHandshake):
class HTTPServer(asyncore.dispatcher):
"""Handling HTTP Server"""
port = 12345
def __init__(self):
@ -119,14 +130,15 @@ class HTTPServer(asyncore.dispatcher):
pair = self.accept()
if pair is not None:
sock, addr = pair
# print 'Incoming connection from %s' % repr(addr)
# print 'Incoming connection from %s' % repr(addr)
self.connections += 1
# if self.connections % 1000 == 0:
# print "Processed %i connections, active %i" % (self.connections, len(asyncore.socket_map))
# if self.connections % 1000 == 0:
# print "Processed %i connections, active %i" % (self.connections, len(asyncore.socket_map))
HTTPRequestHandler(sock)
class HTTPSServer(HTTPServer):
"""Handling HTTPS Server"""
port = 12345
def __init__(self):
@ -137,12 +149,13 @@ class HTTPSServer(HTTPServer):
pair = self.accept()
if pair is not None:
sock, addr = pair
# print 'Incoming connection from %s' % repr(addr)
# print 'Incoming connection from %s' % repr(addr)
self.connections += 1
# if self.connections % 1000 == 0:
# print "Processed %i connections, active %i" % (self.connections, len(asyncore.socket_map))
# if self.connections % 1000 == 0:
# print "Processed %i connections, active %i" % (self.connections, len(asyncore.socket_map))
HTTPSRequestHandler(sock)
if __name__ == "__main__":
client = HTTPSServer()
asyncore.loop()

View File

@ -1,10 +1,18 @@
import asyncore
from http import HTTPClient
import paths
from tls import TLSHandshake
# self.sslSock = ssl.wrap_socket(self.sock, keyfile = os.path.join(paths.codePath(), 'sslkeys', 'key.pem'), certfile = os.path.join(paths.codePath(), 'sslkeys', 'cert.pem'), server_side = not self.initiatedConnection, ssl_version=ssl.PROTOCOL_TLSv1, do_handshake_on_connect=False, ciphers='AECDH-AES256-SHA')
"""
self.sslSock = ssl.wrap_socket(
self.sock,
keyfile=os.path.join(paths.codePath(), 'sslkeys', 'key.pem'),
certfile=os.path.join(paths.codePath(), 'sslkeys', 'cert.pem'),
server_side=not self.initiatedConnection,
ssl_version=ssl.PROTOCOL_TLSv1,
do_handshake_on_connect=False,
ciphers='AECDH-AES256-SHA')
"""
class HTTPSClient(HTTPClient, TLSHandshake):
@ -12,7 +20,15 @@ class HTTPSClient(HTTPClient, TLSHandshake):
if not hasattr(self, '_map'):
asyncore.dispatcher.__init__(self)
self.tlsDone = False
# TLSHandshake.__init__(self, address=(host, 443), certfile='/home/shurdeek/src/PyBitmessage/sslsrc/keys/cert.pem', keyfile='/home/shurdeek/src/PyBitmessage/src/sslkeys/key.pem', server_side=False, ciphers='AECDH-AES256-SHA')
"""
TLSHandshake.__init__(
self,
address=(host, 443),
certfile='/home/shurdeek/src/PyBitmessage/sslsrc/keys/cert.pem',
keyfile='/home/shurdeek/src/PyBitmessage/src/sslkeys/key.pem',
server_side=False,
ciphers='AECDH-AES256-SHA')
"""
HTTPClient.__init__(self, host, path, connect=False)
TLSHandshake.__init__(self, address=(host, 443), server_side=False)
@ -49,6 +65,7 @@ class HTTPSClient(HTTPClient, TLSHandshake):
else:
TLSHandshake.handle_write(self)
if __name__ == "__main__":
client = HTTPSClient('anarchy.economicsofbitcoin.com', '/')
asyncore.loop()

View File

@ -1,16 +1,18 @@
"""
src/network/invthread.py
========================
"""
import Queue
from random import randint, shuffle
import threading
import random
from time import time
import addresses
from bmconfigparser import BMConfigParser
import protocol
import state
from helper_threading import StoppableThread
from network.connectionpool import BMConnectionPool
from network.dandelion import Dandelion
from queues import invQueue
import protocol
import state
def handleExpiredDandelion(expired):
@ -33,13 +35,14 @@ def handleExpiredDandelion(expired):
i.objectsNewToThem[hashid] = time()
class InvThread(threading.Thread, StoppableThread):
def __init__(self):
threading.Thread.__init__(self, name="InvBroadcaster")
self.initStop()
self.name = "InvBroadcaster"
class InvThread(StoppableThread):
"""A thread to send inv annoucements."""
def handleLocallyGenerated(self, stream, hashId):
name = "InvBroadcaster"
@staticmethod
def handleLocallyGenerated(stream, hashId):
"""Locally generated inventory items require special handling"""
Dandelion().addHash(hashId, stream=stream)
for connection in \
BMConnectionPool().inboundConnections.values() + \
@ -48,8 +51,8 @@ class InvThread(threading.Thread, StoppableThread):
continue
connection.objectsNewToThem[hashId] = time()
def run(self):
while not state.shutdown:
def run(self): # pylint: disable=too-many-branches
while not state.shutdown: # pylint: disable=too-many-nested-blocks
chunk = []
while True:
# Dandelion fluff trigger by expiration
@ -80,7 +83,7 @@ class InvThread(threading.Thread, StoppableThread):
if connection == Dandelion().objectChildStem(inv[1]):
# Fluff trigger by RNG
# auto-ignore if config set to 0, i.e. dandelion is off
if randint(1, 100) >= state.dandelion:
if random.randint(1, 100) >= state.dandelion:
fluffs.append(inv[1])
# send a dinv only if the stem node supports dandelion
elif connection.services & protocol.NODE_DANDELION > 0:
@ -91,13 +94,15 @@ class InvThread(threading.Thread, StoppableThread):
fluffs.append(inv[1])
if fluffs:
shuffle(fluffs)
connection.append_write_buf(protocol.CreatePacket('inv', \
addresses.encodeVarint(len(fluffs)) + "".join(fluffs)))
random.shuffle(fluffs)
connection.append_write_buf(protocol.CreatePacket(
'inv', addresses.encodeVarint(len(fluffs)) +
"".join(fluffs)))
if stems:
shuffle(stems)
connection.append_write_buf(protocol.CreatePacket('dinv', \
addresses.encodeVarint(len(stems)) + "".join(stems)))
random.shuffle(stems)
connection.append_write_buf(protocol.CreatePacket(
'dinv', addresses.encodeVarint(len(stems)) +
"".join(stems)))
invQueue.iterate()
for i in range(len(chunk)):

View File

@ -1,5 +1,7 @@
import threading
"""
src/network/networkthread.py
============================
"""
import network.asyncore_pollchoose as asyncore
import state
from debug import logger
@ -8,11 +10,10 @@ from network.connectionpool import BMConnectionPool
from queues import excQueue
class BMNetworkThread(threading.Thread, StoppableThread):
class BMNetworkThread(StoppableThread):
"""A thread to handle network concerns"""
def __init__(self):
threading.Thread.__init__(self, name="Asyncore")
self.initStop()
self.name = "Asyncore"
super(BMNetworkThread, self).__init__(name="Asyncore")
logger.info("init asyncore thread")
def run(self):

View File

@ -1,3 +1,7 @@
"""
src/network/node.py
===================
"""
import collections
Node = collections.namedtuple('Node', ['services', 'host', 'port'])

View File

@ -1,3 +1,7 @@
"""
src/network/objectracker.py
===========================
"""
import time
from threading import RLock
@ -27,6 +31,7 @@ missingObjects = {}
class ObjectTracker(object):
"""Object tracker mixin"""
invCleanPeriod = 300
invInitialCapacity = 50000
invErrorRate = 0.03
@ -42,21 +47,24 @@ class ObjectTracker(object):
self.lastCleaned = time.time()
def initInvBloom(self):
"""Init bloom filter for tracking. WIP."""
if haveBloom:
# lock?
self.invBloom = BloomFilter(capacity=ObjectTracker.invInitialCapacity,
error_rate=ObjectTracker.invErrorRate)
def initAddrBloom(self):
"""Init bloom filter for tracking addrs, WIP. This either needs to be moved to addrthread.py or removed."""
if haveBloom:
# lock?
self.addrBloom = BloomFilter(capacity=ObjectTracker.invInitialCapacity,
error_rate=ObjectTracker.invErrorRate)
def clean(self):
"""Clean up tracking to prevent memory bloat"""
if self.lastCleaned < time.time() - ObjectTracker.invCleanPeriod:
if haveBloom:
if len(missingObjects) == 0:
if missingObjects == 0:
self.initInvBloom()
self.initAddrBloom()
else:
@ -67,12 +75,13 @@ class ObjectTracker(object):
self.lastCleaned = time.time()
def hasObj(self, hashid):
"""Do we already have object?"""
if haveBloom:
return hashid in self.invBloom
else:
return hashid in self.objectsNewToMe
def handleReceivedInventory(self, hashId):
"""Handling received inventory"""
if haveBloom:
self.invBloom.add(hashId)
try:
@ -85,15 +94,16 @@ class ObjectTracker(object):
self.objectsNewToMe[hashId] = True
def handleReceivedObject(self, streamNumber, hashid):
for i in network.connectionpool.BMConnectionPool().inboundConnections.values() + network.connectionpool.BMConnectionPool().outboundConnections.values():
"""Handling received object"""
for i in network.connectionpool.BMConnectionPool().inboundConnections.values(
) + network.connectionpool.BMConnectionPool().outboundConnections.values():
if not i.fullyEstablished:
continue
try:
del i.objectsNewToMe[hashid]
except KeyError:
if streamNumber in i.streams and \
(not Dandelion().hasHash(hashid) or \
Dandelion().objectChildStem(hashid) == i):
if streamNumber in i.streams and (
not Dandelion().hasHash(hashid) or Dandelion().objectChildStem(hashid) == i):
with i.objectsNewToThemLock:
i.objectsNewToThem[hashid] = time.time()
# update stream number, which we didn't have when we just received the dinv
@ -109,23 +119,12 @@ class ObjectTracker(object):
self.objectsNewToMe.setLastObject()
def hasAddr(self, addr):
"""WIP, should be moved to addrthread.py or removed"""
if haveBloom:
return addr in self.invBloom
return None
def addAddr(self, hashid):
"""WIP, should be moved to addrthread.py or removed"""
if haveBloom:
self.addrBloom.add(hashid)
# addr sending -> per node upload queue, and flush every minute or so
# inv sending -> if not in bloom, inv immediately, otherwise put into a per node upload queue and flush every minute or so
# data sending -> a simple queue
# no bloom
# - if inv arrives
# - if we don't have it, add tracking and download queue
# - if we do have it, remove from tracking
# tracking downloads
# - per node hash of items the node has but we don't
# tracking inv
# - per node hash of items that neither the remote node nor we have
#

View File

@ -1,3 +1,8 @@
"""
src/network/proxy.py
====================
"""
# pylint: disable=protected-access
import socket
import time
@ -106,6 +111,7 @@ class Proxy(AdvancedDispatcher):
self.destination = address
self.isOutbound = True
self.fullyEstablished = False
self.connectedAt = 0
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
if BMConfigParser().safeGetBoolean(
"bitmessagesettings", "socksauthentication"):
@ -138,5 +144,5 @@ class Proxy(AdvancedDispatcher):
def state_proxy_handshake_done(self):
"""Handshake is complete at this point"""
self.connectedAt = time.time()
self.connectedAt = time.time() # pylint: disable=attribute-defined-outside-init
return False

View File

@ -1,27 +1,18 @@
import errno
import Queue
import socket
import sys
import threading
import time
import addresses
from bmconfigparser import BMConfigParser
from debug import logger
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.bmproto import BMProto
from network.advanceddispatcher import UnknownStateError
from queues import receiveDataQueue
import protocol
import state
class ReceiveQueueThread(threading.Thread, StoppableThread):
class ReceiveQueueThread(StoppableThread):
def __init__(self, num=0):
threading.Thread.__init__(self, name="ReceiveQueue_%i" %(num))
self.initStop()
self.name = "ReceiveQueue_%i" % (num)
super(ReceiveQueueThread, self).__init__(name="ReceiveQueue_%i" % num)
logger.info("init receive queue thread %i", num)
def run(self):

View File

@ -1,3 +1,8 @@
"""
src/network/socks4a.py
=================================
"""
# pylint: disable=attribute-defined-outside-init
import socket
import struct
@ -82,7 +87,7 @@ class Socks4aConnection(Socks4a):
self.append_write_buf(self.ipaddr)
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if Proxy._remote_dns:
if self._remote_dns:
# Resolve remotely
rmtrslv = True
self.ipaddr = None
@ -118,6 +123,7 @@ class Socks4aResolver(Socks4a):
Socks4a.__init__(self, address=(self.host, self.port))
def state_auth_done(self):
"""Request connection to be made"""
# Now we can request the actual connection
self.append_write_buf(
struct.pack('>BBH', 0x04, 0xF0, self.destination[1]))

View File

@ -66,10 +66,9 @@ class Socks5(Proxy):
elif ret[1] == 2:
# username/password
self.append_write_buf(
struct.pack('BB', 1, len(self._auth[0])) +
self._auth[0] + struct.pack('B', len(self._auth[1])) +
self._auth[1]
)
struct.pack(
'BB', 1, len(self._auth[0])) + self._auth[0] + struct.pack(
'B', len(self._auth[1])) + self._auth[1])
self.set_state("auth_needed", length=2, expectBytes=2)
else:
if ret[1] == 0xff:
@ -178,11 +177,8 @@ class Socks5Connection(Socks5):
if Proxy._remote_dns: # pylint: disable=protected-access
# Resolve remotely
self.ipaddr = None
self.append_write_buf(
chr(0x03).encode() +
chr(len(self.destination[0])).encode() +
self.destination[0]
)
self.append_write_buf(chr(0x03).encode() + chr(
len(self.destination[0])).encode() + self.destination[0])
else:
# Resolve locally
self.ipaddr = socket.inet_aton(
@ -212,10 +208,8 @@ class Socks5Resolver(Socks5):
"""Perform resolving"""
# Now we can request the actual connection
self.append_write_buf(struct.pack('BBB', 0x05, 0xF0, 0x00))
self.append_write_buf(
chr(0x03).encode() + chr(len(self.host)).encode() +
str(self.host)
)
self.append_write_buf(chr(0x03).encode() + chr(
len(self.host)).encode() + str(self.host))
self.append_write_buf(struct.pack(">H", self.port))
self.set_state("pre_connect", length=0, expectBytes=4)
return True

View File

@ -1,9 +1,14 @@
"""
src/network/stats.py
====================
"""
import time
import asyncore_pollchoose as asyncore
from network.connectionpool import BMConnectionPool
from objectracker import missingObjects
lastReceivedTimestamp = time.time()
lastReceivedBytes = 0
currentReceivedSpeed = 0
@ -11,7 +16,9 @@ lastSentTimestamp = time.time()
lastSentBytes = 0
currentSentSpeed = 0
def connectedHostsList():
"""List of all the connected hosts"""
retval = []
for i in BMConnectionPool().inboundConnections.values() + \
BMConnectionPool().outboundConnections.values():
@ -23,10 +30,15 @@ def connectedHostsList():
pass
return retval
def sentBytes():
"""Sending Bytes"""
return asyncore.sentBytes
def uploadSpeed():
"""Getting upload speed"""
# pylint: disable=global-statement
global lastSentTimestamp, lastSentBytes, currentSentSpeed
currentTimestamp = time.time()
if int(lastSentTimestamp) < int(currentTimestamp):
@ -36,35 +48,44 @@ def uploadSpeed():
lastSentTimestamp = currentTimestamp
return currentSentSpeed
def receivedBytes():
"""Receiving Bytes"""
return asyncore.receivedBytes
def downloadSpeed():
"""Getting download speed"""
# pylint: disable=global-statement
global lastReceivedTimestamp, lastReceivedBytes, currentReceivedSpeed
currentTimestamp = time.time()
if int(lastReceivedTimestamp) < int(currentTimestamp):
currentReceivedBytes = asyncore.receivedBytes
currentReceivedSpeed = int((currentReceivedBytes - lastReceivedBytes) /
(currentTimestamp - lastReceivedTimestamp))
currentReceivedSpeed = int(
(currentReceivedBytes - lastReceivedBytes) / (currentTimestamp - lastReceivedTimestamp))
lastReceivedBytes = currentReceivedBytes
lastReceivedTimestamp = currentTimestamp
return currentReceivedSpeed
def pendingDownload():
"""Getting pending downloads"""
return len(missingObjects)
#tmp = {}
#for connection in BMConnectionPool().inboundConnections.values() + \
# tmp = {}
# for connection in BMConnectionPool().inboundConnections.values() + \
# BMConnectionPool().outboundConnections.values():
# for k in connection.objectsNewToMe.keys():
# tmp[k] = True
#return len(tmp)
# return len(tmp)
def pendingUpload():
#tmp = {}
#for connection in BMConnectionPool().inboundConnections.values() + \
"""Getting pending uploads"""
# tmp = {}
# for connection in BMConnectionPool().inboundConnections.values() + \
# BMConnectionPool().outboundConnections.values():
# for k in connection.objectsNewToThem.keys():
# tmp[k] = True
#This probably isn't the correct logic so it's disabled
#return len(tmp)
# This probably isn't the correct logic so it's disabled
# return len(tmp)
return 0

View File

@ -38,7 +38,9 @@ else:
sslProtocolCiphers = "AECDH-AES256-SHA"
class TLSDispatcher(AdvancedDispatcher):
class TLSDispatcher(AdvancedDispatcher): # pylint: disable=too-many-instance-attributes
"""TLS functionality for classes derived from AdvancedDispatcher"""
# pylint: disable=too-many-arguments, super-init-not-called, unused-argument
def __init__(
self, address=None, sock=None, certfile=None, keyfile=None,
server_side=False, ciphers=sslProtocolCiphers
@ -60,19 +62,24 @@ class TLSDispatcher(AdvancedDispatcher):
self.isSSL = False
def state_tls_init(self):
"""Prepare sockets for TLS handshake"""
# pylint: disable=attribute-defined-outside-init
self.isSSL = True
self.tlsStarted = True
# Once the connection has been established, it's safe to wrap the
# socket.
if sys.version_info >= (2,7,9):
context = ssl.create_default_context(purpose = ssl.Purpose.SERVER_AUTH if self.server_side else ssl.Purpose.CLIENT_AUTH)
if sys.version_info >= (2, 7, 9):
context = ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH if self.server_side else ssl.Purpose.CLIENT_AUTH)
context.set_ciphers(self.ciphers)
context.set_ecdh_curve("secp256k1")
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
# also exclude TLSv1 and TLSv1.1 in the future
context.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_SINGLE_ECDH_USE | ssl.OP_CIPHER_SERVER_PREFERENCE
self.sslSocket = context.wrap_socket(self.socket, server_side = self.server_side, do_handshake_on_connect=False)
context.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 |\
ssl.OP_NO_SSLv3 | ssl.OP_SINGLE_ECDH_USE | ssl.OP_CIPHER_SERVER_PREFERENCE
self.sslSocket = context.wrap_socket(
self.socket, server_side=self.server_side, do_handshake_on_connect=False)
else:
self.sslSocket = ssl.wrap_socket(
self.socket, server_side=self.server_side,
@ -86,10 +93,13 @@ class TLSDispatcher(AdvancedDispatcher):
# if hasattr(self.socket, "context"):
# self.socket.context.set_ecdh_curve("secp256k1")
def state_tls_handshake(self):
@staticmethod
def state_tls_handshake():
"""Do nothing while TLS handshake is pending, as during this phase we need to react to callbacks instead"""
return False
def writable(self):
"""Handle writable checks for TLS-enabled sockets"""
try:
if self.tlsStarted and not self.tlsDone and not self.write_buf:
return self.want_write
@ -98,10 +108,11 @@ class TLSDispatcher(AdvancedDispatcher):
return AdvancedDispatcher.writable(self)
def readable(self):
"""Handle readable check for TLS-enabled sockets"""
try:
# during TLS handshake, and after flushing write buffer, return status of last handshake attempt
if self.tlsStarted and not self.tlsDone and not self.write_buf:
#print "tls readable, %r" % (self.want_read)
# print "tls readable, %r" % (self.want_read)
return self.want_read
# prior to TLS handshake, receiveDataThread should emulate synchronous behaviour
elif not self.fullyEstablished and (self.expectBytes == 0 or not self.write_buf_empty()):
@ -110,14 +121,18 @@ class TLSDispatcher(AdvancedDispatcher):
except AttributeError:
return AdvancedDispatcher.readable(self)
def handle_read(self):
def handle_read(self): # pylint: disable=inconsistent-return-statements
"""
Handle reads for sockets during TLS handshake. Requires special treatment as during the handshake, buffers must
remain empty and normal reads must be ignored
"""
try:
# wait for write buffer flush
if self.tlsStarted and not self.tlsDone and not self.write_buf:
#logger.debug("%s:%i TLS handshaking (read)", self.destination.host, self.destination.port)
# logger.debug("%s:%i TLS handshaking (read)", self.destination.host, self.destination.port)
self.tls_handshake()
else:
#logger.debug("%s:%i Not TLS handshaking (read)", self.destination.host, self.destination.port)
# logger.debug("%s:%i Not TLS handshaking (read)", self.destination.host, self.destination.port)
return AdvancedDispatcher.handle_read(self)
except AttributeError:
return AdvancedDispatcher.handle_read(self)
@ -131,14 +146,18 @@ class TLSDispatcher(AdvancedDispatcher):
self.handle_close()
return
def handle_write(self):
def handle_write(self): # pylint: disable=inconsistent-return-statements
"""
Handle writes for sockets during TLS handshake. Requires special treatment as during the handshake, buffers
must remain empty and normal writes must be ignored
"""
try:
# wait for write buffer flush
if self.tlsStarted and not self.tlsDone and not self.write_buf:
#logger.debug("%s:%i TLS handshaking (write)", self.destination.host, self.destination.port)
# logger.debug("%s:%i TLS handshaking (write)", self.destination.host, self.destination.port)
self.tls_handshake()
else:
#logger.debug("%s:%i Not TLS handshaking (write)", self.destination.host, self.destination.port)
# logger.debug("%s:%i Not TLS handshaking (write)", self.destination.host, self.destination.port)
return AdvancedDispatcher.handle_write(self)
except AttributeError:
return AdvancedDispatcher.handle_write(self)
@ -153,26 +172,27 @@ class TLSDispatcher(AdvancedDispatcher):
return
def tls_handshake(self):
"""Perform TLS handshake and handle its stages"""
# wait for flush
if self.write_buf:
return False
# Perform the handshake.
try:
#print "handshaking (internal)"
# print "handshaking (internal)"
self.sslSocket.do_handshake()
except ssl.SSLError as err:
#print "%s:%i: handshake fail" % (self.destination.host, self.destination.port)
# print "%s:%i: handshake fail" % (self.destination.host, self.destination.port)
self.want_read = self.want_write = False
if err.args[0] == ssl.SSL_ERROR_WANT_READ:
#print "want read"
# print "want read"
self.want_read = True
if err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
#print "want write"
# print "want write"
self.want_write = True
if not (self.want_write or self.want_read):
raise
except socket.error as err:
if err.errno in asyncore._DISCONNECTED:
if err.errno in asyncore._DISCONNECTED: # pylint: disable=protected-access
self.handle_close()
else:
raise

View File

@ -1,3 +1,7 @@
"""
src/network/udp.py
==================
"""
import time
import socket
@ -9,15 +13,16 @@ from objectracker import ObjectTracker
from queues import receiveDataQueue
class UDPSocket(BMProto):
class UDPSocket(BMProto): # pylint: disable=too-many-instance-attributes
"""Bitmessage protocol over UDP (class)"""
port = 8444
announceInterval = 60
def __init__(self, host=None, sock=None, announcing=False):
super(BMProto, self).__init__(sock=sock)
super(BMProto, self).__init__(sock=sock) # pylint: disable=bad-super-call
self.verackReceived = True
self.verackSent = True
# TODO sort out streams
# .. todo:: sort out streams
self.streams = [1]
self.fullyEstablished = True
self.connectedAt = 0
@ -44,6 +49,7 @@ class UDPSocket(BMProto):
self.set_state("bm_header", expectBytes=protocol.Header.size)
def set_socket_reuse(self):
"""Set socket reuse option"""
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
@ -73,8 +79,7 @@ class UDPSocket(BMProto):
decodedIP = protocol.checkIPAddress(str(ip))
if stream not in state.streamsInWhichIAmParticipating:
continue
if (seenTime < time.time() - self.maxTimeOffset or
seenTime > time.time() + self.maxTimeOffset):
if (seenTime < time.time() - self.maxTimeOffset or seenTime > time.time() + self.maxTimeOffset):
continue
if decodedIP is False:
# if the address isn't local, interpret it as
@ -124,10 +129,7 @@ class UDPSocket(BMProto):
self.destination = state.Peer(*addr)
encodedAddr = protocol.encodeHost(addr[0])
if protocol.checkIPAddress(encodedAddr, True):
self.local = True
else:
self.local = False
self.local = bool(protocol.checkIPAddress(encodedAddr, True))
# overwrite the old buffer to avoid mixing data and so that
# self.local works correctly
self.read_buf[0:] = recdata

View File

@ -2,7 +2,6 @@
src/network/uploadthread.py
"""
# pylint: disable=unsubscriptable-object
import threading
import time
import helper_random
@ -15,14 +14,12 @@ from network.dandelion import Dandelion
from randomtrackingdict import RandomTrackingDict
class UploadThread(threading.Thread, StoppableThread):
class UploadThread(StoppableThread):
"""This is a thread that uploads the objects that the peers requested from me """
maxBufSize = 2097152 # 2MB
def __init__(self):
threading.Thread.__init__(self, name="Uploader")
self.initStop()
self.name = "Uploader"
super(UploadThread, self).__init__(name="Uploader")
logger.info("init upload thread")
def run(self):

View File

@ -42,9 +42,7 @@ def lookupAppdataFolder():
print stringToLog
sys.exit()
elif platform == 'android':
# dataFolder = path.join(os.path.dirname(os.path.abspath("__file__")), "PyBitmessage") + '/'
dataFolder = path.join(os.environ['ANDROID_PRIVATE'] + '/', APPNAME) + '/'
# dataFolder = path.join('/sdcard/', 'DCIM/', APPNAME) + '/'
elif 'win32' in sys.platform or 'win64' in sys.platform:
dataFolder = path.join(environ['APPDATA'].decode(sys.getfilesystemencoding(), 'ignore'), APPNAME) + path.sep

View File

@ -0,0 +1,110 @@
# -*- coding: utf-8 -*-
import os
import logging
import random # noseq
import tempfile
import stem
import stem.control
import stem.process
class DebugLogger(object):
"""Safe logger wrapper for tor and plugin's logs"""
# pylint: disable=too-few-public-methods
def __init__(self):
self._logger = logging.getLogger(__name__.split('.', 1)[0])
self._levels = {
'err': 40,
'warn': 30,
'notice': 20
}
def __call__(self, line):
try:
level, line = line.split('[', 1)[1].split(']')
except IndexError:
# Plugin's debug or unexpected log line from tor
self._logger.debug(line)
else:
self._logger.log(self._levels.get(level, 10), '(tor)' + line)
def connect_plugin(config):
"""Run stem proxy configurator"""
logwrite = DebugLogger()
if config.safeGet('bitmessagesettings', 'sockshostname') not in (
'localhost', '127.0.0.1', ''
):
# remote proxy is choosen for outbound connections,
# nothing to do here, but need to set socksproxytype to SOCKS5!
logwrite(
'sockshostname is set to remote address,'
' aborting stem proxy configuration')
return
datadir = tempfile.mkdtemp()
control_socket = os.path.join(datadir, 'control')
tor_config = {
'SocksPort': '9050',
# 'DataDirectory': datadir, # had an exception with control socket
'ControlSocket': control_socket
}
port = config.safeGet('bitmessagesettings', 'socksport', '9050')
for attempt in range(50):
if attempt > 0:
port = random.randint(32767, 65535)
tor_config['SocksPort'] = str(port)
# It's recommended to use separate tor instance for hidden services.
# So if there is a system wide tor, use it for outbound connections.
try:
stem.process.launch_tor_with_config(
tor_config, take_ownership=True, init_msg_handler=logwrite)
except OSError:
continue
else:
logwrite('Started tor on port %s' % port)
break
if config.safeGetBoolean('bitmessagesettings', 'sockslisten'):
# need a hidden service for inbound connections
try:
controller = stem.control.Controller.from_socket_file(
control_socket)
controller.authenticate()
except stem.SocketError:
# something goes wrong way
logwrite('Failed to instantiate or authenticate on controller')
return
onionhostname = config.safeGet('bitmessagesettings', 'onionhostname')
onionkey = config.safeGet(onionhostname, 'privsigningkey')
if onionhostname and not onionkey:
logwrite('The hidden service found in config ): %s' % onionhostname)
onionkeytype = config.safeGet(onionhostname, 'keytype')
response = controller.create_ephemeral_hidden_service(
config.safeGetInt('bitmessagesettings', 'onionport', 8444),
key_type=(onionkeytype or 'NEW'),
key_content=(onionkey or onionhostname and 'ED25519-V3' or 'BEST')
)
if not response.is_ok():
logwrite('Bad response from controller ):')
return
if not onionkey:
logwrite('Started hidden service %s.onion' % response.service_id)
# only save new service keys if onionhostname was not set previously
if not onionhostname:
onionhostname = response.service_id + '.onion'
config.set(
'bitmessagesettings', 'onionhostname', onionhostname)
config.add_section(onionhostname)
config.set(
onionhostname, 'privsigningkey', response.private_key)
config.set(
onionhostname, 'keytype', response.private_key_type)
config.save()
config.set('bitmessagesettings', 'socksproxytype', 'SOCKS5')

View File

@ -290,7 +290,6 @@ def assembleVersionMessage(remoteHost, remotePort, participatingStreams, server=
else: # no extport and not incoming over Tor
payload += pack('>H', BMConfigParser().getint('bitmessagesettings', 'port'))
random.seed()
if nodeid is not None:
payload += nodeid[0:8]
else:

View File

@ -2,18 +2,20 @@
# Author: Yann GUIBET
# Contact: <yannguibet@gmail.com>
from .openssl import OpenSSL
from .ecc import ECC
from .eccblind import ECCBlind
from .cipher import Cipher
from .hash import hmac_sha256, hmac_sha512, pbkdf2
__version__ = '1.3'
__all__ = [
'OpenSSL',
'ECC',
'ECCBlind',
'Cipher',
'hmac_sha256',
'hmac_sha512',
'pbkdf2'
]
from .openssl import OpenSSL
from .ecc import ECC
from .cipher import Cipher
from .hash import hmac_sha256, hmac_sha512, pbkdf2

View File

@ -4,7 +4,7 @@
# Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com>
# See LICENSE for details.
from pyelliptic.openssl import OpenSSL
from openssl import OpenSSL
class Cipher:

View File

@ -12,9 +12,9 @@ src/pyelliptic/ecc.py
from hashlib import sha512
from struct import pack, unpack
from pyelliptic.cipher import Cipher
from pyelliptic.hash import equals, hmac_sha256
from pyelliptic.openssl import OpenSSL
from cipher import Cipher
from hash import equals, hmac_sha256
from openssl import OpenSSL
class ECC(object):

210
src/pyelliptic/eccblind.py Normal file
View File

@ -0,0 +1,210 @@
#!/usr/bin/env python
"""
ECC blind signature functionality based on "An Efficient Blind Signature Scheme
Based on the Elliptic CurveDiscrete Logarithm Problem" by Morteza Nikooghadama
<mnikooghadam@sbu.ac.ir> and Ali Zakerolhosseini <a-zaker@sbu.ac.ir>,
http://www.isecure-journal.com/article_39171_47f9ec605dd3918c2793565ec21fcd7a.pdf
"""
# variable names are based on the math in the paper, so they don't conform
# to PEP8
# pylint: disable=invalid-name
from .openssl import OpenSSL
class ECCBlind(object): # pylint: disable=too-many-instance-attributes
"""
Class for ECC blind signature functionality
"""
# init
k = None
R = None
keypair = None
F = None
Q = None
a = None
b = None
c = None
binv = None
r = None
m = None
m_ = None
s_ = None
signature = None
@staticmethod
def ec_get_random(group, ctx):
"""
Random point from finite field
"""
order = OpenSSL.BN_new()
OpenSSL.EC_GROUP_get_order(group, order, ctx)
OpenSSL.BN_rand(order, OpenSSL.BN_num_bits(order), 0, 0)
return order
@staticmethod
def ec_invert(group, a, ctx):
"""
ECC inversion
"""
order = OpenSSL.BN_new()
OpenSSL.EC_GROUP_get_order(group, order, ctx)
inverse = OpenSSL.BN_mod_inverse(0, a, order, ctx)
return inverse
@staticmethod
def ec_gen_keypair(group, ctx):
"""
Generate an ECC keypair
"""
d = ECCBlind.ec_get_random(group, ctx)
Q = OpenSSL.EC_POINT_new(group)
OpenSSL.EC_POINT_mul(group, Q, d, 0, 0, 0)
return (d, Q)
@staticmethod
def ec_Ftor(F, group, ctx):
"""
x0 coordinate of F
"""
# F = (x0, y0)
x0 = OpenSSL.BN_new()
y0 = OpenSSL.BN_new()
OpenSSL.EC_POINT_get_affine_coordinates_GFp(group, F, x0, y0,
ctx)
return x0
def __init__(self, curve="secp256k1", pubkey=None):
self.ctx = OpenSSL.BN_CTX_new()
if pubkey:
self.group, self.G, self.n, self.Q = pubkey
else:
self.group = OpenSSL.EC_GROUP_new_by_curve_name(OpenSSL.get_curve(curve))
# Order n
self.n = OpenSSL.BN_new()
OpenSSL.EC_GROUP_get_order(self.group, self.n, self.ctx)
# Generator G
self.G = OpenSSL.EC_GROUP_get0_generator(self.group)
# new keypair
self.keypair = ECCBlind.ec_gen_keypair(self.group, self.ctx)
self.Q = self.keypair[1]
self.pubkey = (self.group, self.G, self.n, self.Q)
# Identity O (infinity)
self.iO = OpenSSL.EC_POINT_new(self.group)
OpenSSL.EC_POINT_set_to_infinity(self.group, self.iO)
def signer_init(self):
"""
Init signer
"""
# Signer: Random integer k
self.k = ECCBlind.ec_get_random(self.group, self.ctx)
# R = kG
self.R = OpenSSL.EC_POINT_new(self.group)
OpenSSL.EC_POINT_mul(self.group, self.R, self.k, 0, 0, 0)
return self.R
def create_signing_request(self, R, msg):
"""
Requester creates a new signing request
"""
self.R = R
# Requester: 3 random blinding factors
self.F = OpenSSL.EC_POINT_new(self.group)
OpenSSL.EC_POINT_set_to_infinity(self.group, self.F)
temp = OpenSSL.EC_POINT_new(self.group)
abinv = OpenSSL.BN_new()
# F != O
while OpenSSL.EC_POINT_cmp(self.group, self.F, self.iO, self.ctx) == 0:
self.a = ECCBlind.ec_get_random(self.group, self.ctx)
self.b = ECCBlind.ec_get_random(self.group, self.ctx)
self.c = ECCBlind.ec_get_random(self.group, self.ctx)
# F = b^-1 * R...
self.binv = ECCBlind.ec_invert(self.group, self.b, self.ctx)
OpenSSL.EC_POINT_mul(self.group, temp, 0, self.R, self.binv, 0)
OpenSSL.EC_POINT_copy(self.F, temp)
# ... + a*b^-1 * Q...
OpenSSL.BN_mul(abinv, self.a, self.binv, self.ctx)
OpenSSL.EC_POINT_mul(self.group, temp, 0, self.Q, abinv, 0)
OpenSSL.EC_POINT_add(self.group, self.F, self.F, temp, 0)
# ... + c*G
OpenSSL.EC_POINT_mul(self.group, temp, 0, self.G, self.c, 0)
OpenSSL.EC_POINT_add(self.group, self.F, self.F, temp, 0)
# F = (x0, y0)
self.r = ECCBlind.ec_Ftor(self.F, self.group, self.ctx)
# Requester: Blinding (m' = br(m) + a)
self.m = OpenSSL.BN_new()
OpenSSL.BN_bin2bn(msg, len(msg), self.m)
self.m_ = OpenSSL.BN_new()
OpenSSL.BN_mod_mul(self.m_, self.b, self.r, self.n, self.ctx)
OpenSSL.BN_mod_mul(self.m_, self.m_, self.m, self.n, self.ctx)
OpenSSL.BN_mod_add(self.m_, self.m_, self.a, self.n, self.ctx)
return self.m_
def blind_sign(self, m_):
"""
Signer blind-signs the request
"""
self.m_ = m_
self.s_ = OpenSSL.BN_new()
OpenSSL.BN_mod_mul(self.s_, self.keypair[0], self.m_, self.n, self.ctx)
OpenSSL.BN_mod_add(self.s_, self.s_, self.k, self.n, self.ctx)
return self.s_
def unblind(self, s_):
"""
Requester unblinds the signature
"""
self.s_ = s_
s = OpenSSL.BN_new()
OpenSSL.BN_mod_mul(s, self.binv, self.s_, self.n, self.ctx)
OpenSSL.BN_mod_add(s, s, self.c, self.n, self.ctx)
self.signature = (s, self.F)
return self.signature
def verify(self, msg, signature):
"""
Verify signature with certifier's pubkey
"""
# convert msg to BIGNUM
self.m = OpenSSL.BN_new()
OpenSSL.BN_bin2bn(msg, len(msg), self.m)
# init
s, self.F = signature
if self.r is None:
self.r = ECCBlind.ec_Ftor(self.F, self.group, self.ctx)
lhs = OpenSSL.EC_POINT_new(self.group)
rhs = OpenSSL.EC_POINT_new(self.group)
OpenSSL.EC_POINT_mul(self.group, lhs, s, 0, 0, 0)
OpenSSL.EC_POINT_mul(self.group, rhs, 0, self.Q, self.m, 0)
OpenSSL.EC_POINT_mul(self.group, rhs, 0, rhs, self.r, 0)
OpenSSL.EC_POINT_add(self.group, rhs, rhs, self.F, self.ctx)
retval = OpenSSL.EC_POINT_cmp(self.group, lhs, rhs, self.ctx)
if retval == -1:
raise RuntimeError("EC_POINT_cmp returned an error")
else:
return retval == 0

View File

@ -4,7 +4,7 @@
# Copyright (C) 2011 Yann GUIBET <yannguibet@gmail.com>
# See LICENSE for details.
from pyelliptic.openssl import OpenSSL
from openssl import OpenSSL
# For python3

View File

@ -19,7 +19,9 @@ class CipherName:
self._blocksize = blocksize
def __str__(self):
return "Cipher : " + self._name + " | Blocksize : " + str(self._blocksize) + " | Fonction pointer : " + str(self._pointer)
return "Cipher : " + self._name + \
" | Blocksize : " + str(self._blocksize) + \
" | Function pointer : " + str(self._pointer)
def get_pointer(self):
return self._pointer()
@ -36,7 +38,7 @@ def get_version(library):
hexversion = None
cflags = None
try:
#OpenSSL 1.1
# OpenSSL 1.1
OPENSSL_VERSION = 0
OPENSSL_CFLAGS = 1
library.OpenSSL_version.argtypes = [ctypes.c_int]
@ -47,7 +49,7 @@ def get_version(library):
hexversion = library.OpenSSL_version_num()
except AttributeError:
try:
#OpenSSL 1.0
# OpenSSL 1.0
SSLEAY_VERSION = 0
SSLEAY_CFLAGS = 2
library.SSLeay.restype = ctypes.c_long
@ -57,7 +59,7 @@ def get_version(library):
cflags = library.SSLeay_version(SSLEAY_CFLAGS)
hexversion = library.SSLeay()
except AttributeError:
#raise NotImplementedError('Cannot determine version of this OpenSSL library.')
# raise NotImplementedError('Cannot determine version of this OpenSSL library.')
pass
return (version, hexversion, cflags)
@ -130,7 +132,11 @@ class _OpenSSL:
self.EC_POINT_get_affine_coordinates_GFp = self._lib.EC_POINT_get_affine_coordinates_GFp
self.EC_POINT_get_affine_coordinates_GFp.restype = ctypes.c_int
self.EC_POINT_get_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.EC_POINT_get_affine_coordinates_GFp.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
self.EC_KEY_set_private_key.restype = ctypes.c_int
@ -144,11 +150,16 @@ class _OpenSSL:
self.EC_KEY_set_group = self._lib.EC_KEY_set_group
self.EC_KEY_set_group.restype = ctypes.c_int
self.EC_KEY_set_group.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.EC_KEY_set_group.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_set_affine_coordinates_GFp = self._lib.EC_POINT_set_affine_coordinates_GFp
self.EC_POINT_set_affine_coordinates_GFp.restype = ctypes.c_int
self.EC_POINT_set_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.EC_POINT_set_affine_coordinates_GFp.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_new = self._lib.EC_POINT_new
self.EC_POINT_new.restype = ctypes.c_void_p
@ -164,7 +175,11 @@ class _OpenSSL:
self.EC_POINT_mul = self._lib.EC_POINT_mul
self.EC_POINT_mul.restype = None
self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.EC_POINT_mul.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
self.EC_KEY_set_private_key.restype = ctypes.c_int
@ -178,7 +193,8 @@ class _OpenSSL:
self.EC_KEY_set_method = self._lib.EC_KEY_set_method
self._lib.EC_KEY_set_method.restype = ctypes.c_int
self._lib.EC_KEY_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self._lib.EC_KEY_set_method.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
else:
self.ECDH_OpenSSL = self._lib.ECDH_OpenSSL
self._lib.ECDH_OpenSSL.restype = ctypes.c_void_p
@ -186,7 +202,8 @@ class _OpenSSL:
self.ECDH_set_method = self._lib.ECDH_set_method
self._lib.ECDH_set_method.restype = ctypes.c_int
self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.BN_CTX_new = self._lib.BN_CTX_new
self._lib.BN_CTX_new.restype = ctypes.c_void_p
@ -195,12 +212,15 @@ class _OpenSSL:
self.ECDH_compute_key = self._lib.ECDH_compute_key
self.ECDH_compute_key.restype = ctypes.c_int
self.ECDH_compute_key.argtypes = [ctypes.c_void_p,
ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
ctypes.c_int,
ctypes.c_void_p,
ctypes.c_void_p]
self.EVP_CipherInit_ex = self._lib.EVP_CipherInit_ex
self.EVP_CipherInit_ex.restype = ctypes.c_int
self.EVP_CipherInit_ex.argtypes = [ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p]
ctypes.c_void_p,
ctypes.c_void_p]
self.EVP_CIPHER_CTX_new = self._lib.EVP_CIPHER_CTX_new
self.EVP_CIPHER_CTX_new.restype = ctypes.c_void_p
@ -223,13 +243,13 @@ class _OpenSSL:
self.EVP_aes_256_cbc.restype = ctypes.c_void_p
self.EVP_aes_256_cbc.argtypes = []
#self.EVP_aes_128_ctr = self._lib.EVP_aes_128_ctr
#self.EVP_aes_128_ctr.restype = ctypes.c_void_p
#self.EVP_aes_128_ctr.argtypes = []
# self.EVP_aes_128_ctr = self._lib.EVP_aes_128_ctr
# self.EVP_aes_128_ctr.restype = ctypes.c_void_p
# self.EVP_aes_128_ctr.argtypes = []
#self.EVP_aes_256_ctr = self._lib.EVP_aes_256_ctr
#self.EVP_aes_256_ctr.restype = ctypes.c_void_p
#self.EVP_aes_256_ctr.argtypes = []
# self.EVP_aes_256_ctr = self._lib.EVP_aes_256_ctr
# self.EVP_aes_256_ctr.restype = ctypes.c_void_p
# self.EVP_aes_256_ctr.argtypes = []
self.EVP_aes_128_ofb = self._lib.EVP_aes_128_ofb
self.EVP_aes_128_ofb.restype = ctypes.c_void_p
@ -377,6 +397,124 @@ class _OpenSSL:
ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_void_p]
# Blind signature requirements
self.BN_CTX_new = self._lib.BN_CTX_new
self.BN_CTX_new.restype = ctypes.c_void_p
self.BN_CTX_new.argtypes = []
self.BN_dup = self._lib.BN_dup
self.BN_dup.restype = ctypes.c_void_p
self.BN_dup.argtypes = [ctypes.c_void_p]
self.BN_rand = self._lib.BN_rand
self.BN_rand.restype = ctypes.c_int
self.BN_rand.argtypes = [ctypes.c_void_p,
ctypes.c_int,
ctypes.c_int]
self.BN_set_word = self._lib.BN_set_word
self.BN_set_word.restype = ctypes.c_int
self.BN_set_word.argtypes = [ctypes.c_void_p,
ctypes.c_ulong]
self.BN_mul = self._lib.BN_mul
self.BN_mul.restype = ctypes.c_int
self.BN_mul.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_add = self._lib.BN_mod_add
self.BN_mod_add.restype = ctypes.c_int
self.BN_mod_add.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_inverse = self._lib.BN_mod_inverse
self.BN_mod_inverse.restype = ctypes.c_void_p
self.BN_mod_inverse.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_mul = self._lib.BN_mod_mul
self.BN_mod_mul.restype = ctypes.c_int
self.BN_mod_mul.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_lshift = self._lib.BN_lshift
self.BN_lshift.restype = ctypes.c_int
self.BN_lshift.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_int]
self.BN_sub_word = self._lib.BN_sub_word
self.BN_sub_word.restype = ctypes.c_int
self.BN_sub_word.argtypes = [ctypes.c_void_p,
ctypes.c_ulong]
self.BN_cmp = self._lib.BN_cmp
self.BN_cmp.restype = ctypes.c_int
self.BN_cmp.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.BN_bn2dec = self._lib.BN_bn2dec
self.BN_bn2dec.restype = ctypes.c_char_p
self.BN_bn2dec.argtypes = [ctypes.c_void_p]
self.BN_CTX_free = self._lib.BN_CTX_free
self.BN_CTX_free.argtypes = [ctypes.c_void_p]
self.EC_GROUP_new_by_curve_name = self._lib.EC_GROUP_new_by_curve_name
self.EC_GROUP_new_by_curve_name.restype = ctypes.c_void_p
self.EC_GROUP_new_by_curve_name.argtypes = [ctypes.c_int]
self.EC_GROUP_get_order = self._lib.EC_GROUP_get_order
self.EC_GROUP_get_order.restype = ctypes.c_int
self.EC_GROUP_get_order.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_GROUP_get_cofactor = self._lib.EC_GROUP_get_cofactor
self.EC_GROUP_get_cofactor.restype = ctypes.c_int
self.EC_GROUP_get_cofactor.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_GROUP_get0_generator = self._lib.EC_GROUP_get0_generator
self.EC_GROUP_get0_generator.restype = ctypes.c_void_p
self.EC_GROUP_get0_generator.argtypes = [ctypes.c_void_p]
self.EC_POINT_copy = self._lib.EC_POINT_copy
self.EC_POINT_copy.restype = ctypes.c_int
self.EC_POINT_copy.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_add = self._lib.EC_POINT_add
self.EC_POINT_add.restype = ctypes.c_int
self.EC_POINT_add.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_cmp = self._lib.EC_POINT_cmp
self.EC_POINT_cmp.restype = ctypes.c_int
self.EC_POINT_cmp.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_set_to_infinity = self._lib.EC_POINT_set_to_infinity
self.EC_POINT_set_to_infinity.restype = ctypes.c_int
self.EC_POINT_set_to_infinity.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self._set_ciphers()
self._set_curves()
@ -388,8 +526,8 @@ class _OpenSSL:
'aes-256-cfb': CipherName('aes-256-cfb', self.EVP_aes_256_cfb128, 16),
'aes-128-ofb': CipherName('aes-128-ofb', self._lib.EVP_aes_128_ofb, 16),
'aes-256-ofb': CipherName('aes-256-ofb', self._lib.EVP_aes_256_ofb, 16),
#'aes-128-ctr': CipherName('aes-128-ctr', self._lib.EVP_aes_128_ctr, 16),
#'aes-256-ctr': CipherName('aes-256-ctr', self._lib.EVP_aes_256_ctr, 16),
# 'aes-128-ctr': CipherName('aes-128-ctr', self._lib.EVP_aes_128_ctr, 16),
# 'aes-256-ctr': CipherName('aes-256-ctr', self._lib.EVP_aes_256_ctr, 16),
'bf-cfb': CipherName('bf-cfb', self.EVP_bf_cfb64, 8),
'bf-cbc': CipherName('bf-cbc', self.EVP_bf_cbc, 8),
'rc4': CipherName('rc4', self.EVP_rc4, 128), # 128 is the initialisation size not block size
@ -494,21 +632,22 @@ class _OpenSSL:
buffer = self.create_string_buffer(size)
return buffer
def loadOpenSSL():
global OpenSSL
from os import path, environ
from ctypes.util import find_library
libdir = []
if getattr(sys,'frozen', None):
if getattr(sys, 'frozen', None):
if 'darwin' in sys.platform:
libdir.extend([
path.join(environ['RESOURCEPATH'], '..', 'Frameworks','libcrypto.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks','libcrypto.1.1.0.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks','libcrypto.1.0.2.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks','libcrypto.1.0.1.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks','libcrypto.1.0.0.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks','libcrypto.0.9.8.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks', 'libcrypto.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks', 'libcrypto.1.1.0.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks', 'libcrypto.1.0.2.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks', 'libcrypto.1.0.1.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks', 'libcrypto.1.0.0.dylib'),
path.join(environ['RESOURCEPATH'], '..', 'Frameworks', 'libcrypto.0.9.8.dylib'),
])
elif 'win32' in sys.platform or 'win64' in sys.platform:
libdir.append(path.join(sys._MEIPASS, 'libeay32.dll'))
@ -536,7 +675,6 @@ def loadOpenSSL():
libdir.append('libssl1.0.2p.so')
libdir.append('libcrypto1.1.so')
libdir.append('libssl1.1.so')
else:
libdir.append('libcrypto.so')
libdir.append('libssl.so')
@ -557,4 +695,5 @@ def loadOpenSSL():
pass
raise Exception("Couldn't find and load the OpenSSL library. You must install it.")
loadOpenSSL()

View File

@ -0,0 +1,52 @@
"""
Test for ECC blind signatures
"""
import os
import unittest
from ctypes import cast, c_char_p
from pybitmessage.pyelliptic.eccblind import ECCBlind
from pybitmessage.pyelliptic.openssl import OpenSSL
class TestBlindSig(unittest.TestCase):
"""
Test case for ECC blind signature
"""
def test_blind_sig(self):
"""Test full sequence using a random certifier key and a random message"""
# See page 127 of the paper
# (1) Initialization
signer_obj = ECCBlind()
point_r = signer_obj.signer_init()
# (2) Request
requester_obj = ECCBlind(pubkey=signer_obj.pubkey)
# only 64 byte messages are planned to be used in Bitmessage
msg = os.urandom(64)
msg_blinded = requester_obj.create_signing_request(point_r, msg)
# check
msg_blinded_str = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(msg_blinded))
OpenSSL.BN_bn2bin(msg_blinded, msg_blinded_str)
self.assertNotEqual(msg, cast(msg_blinded_str, c_char_p).value)
# (3) Signature Generation
signature_blinded = signer_obj.blind_sign(msg_blinded)
# (4) Extraction
signature = requester_obj.unblind(signature_blinded)
# check
signature_blinded_str = OpenSSL.malloc(0,
OpenSSL.BN_num_bytes(
signature_blinded))
signature_str = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(signature[0]))
OpenSSL.BN_bn2bin(signature_blinded, signature_blinded_str)
OpenSSL.BN_bn2bin(signature[0], signature_str)
self.assertNotEqual(cast(signature_str, c_char_p).value,
cast(signature_blinded_str, c_char_p).value)
# (5) Verification
verifier_obj = ECCBlind(pubkey=signer_obj.pubkey)
self.assertTrue(verifier_obj.verify(msg, signature))

View File

@ -9,13 +9,13 @@ Reference: http://mattscodecave.com/posts/using-python-and-upnp-to-forward-a-por
import httplib
import socket
import threading
import time
import urllib2
from random import randint
from urlparse import urlparse
from xml.dom.minidom import Document, parseString
import knownnodes
import queues
import state
import tr
@ -166,9 +166,11 @@ class Router: # pylint: disable=old-style-class
def GetExternalIPAddress(self):
"""Get the external address"""
resp = self.soapRequest(self.upnp_schema + ':1', 'GetExternalIPAddress')
dom = parseString(resp)
return dom.getElementsByTagName('NewExternalIPAddress')[0].childNodes[0].data
resp = self.soapRequest(
self.upnp_schema + ':1', 'GetExternalIPAddress')
dom = parseString(resp.read())
return dom.getElementsByTagName(
'NewExternalIPAddress')[0].childNodes[0].data
def soapRequest(self, service, action, arguments=None):
"""Make a request to a router"""
@ -198,7 +200,7 @@ class Router: # pylint: disable=old-style-class
return resp
class uPnPThread(threading.Thread, StoppableThread):
class uPnPThread(StoppableThread):
"""Start a thread to handle UPnP activity"""
SSDP_ADDR = "239.255.255.250"
@ -208,7 +210,7 @@ class uPnPThread(threading.Thread, StoppableThread):
SSDP_ST = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
def __init__(self):
threading.Thread.__init__(self, name="uPnPThread")
super(uPnPThread, self).__init__(name="uPnPThread")
try:
self.extPort = BMConfigParser().getint('bitmessagesettings', 'extport')
except:
@ -220,7 +222,6 @@ class uPnPThread(threading.Thread, StoppableThread):
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
self.sock.settimeout(5)
self.sendSleep = 60
self.initStop()
def run(self):
"""Start the thread to manage UPnP activity"""
@ -261,6 +262,17 @@ class uPnPThread(threading.Thread, StoppableThread):
logger.debug("Found UPnP router at %s", ip)
self.routers.append(newRouter)
self.createPortMapping(newRouter)
try:
self_peer = state.Peer(
newRouter.GetExternalIPAddress(),
self.extPort
)
except:
logger.debug('Failed to get external IP')
else:
with knownnodes.knownNodesLock:
knownnodes.addKnownNode(
1, self_peer, is_self=True)
queues.UISignalQueue.put(('updateStatusBar', tr._translate(
"MainWindow", 'UPnP port mapping established on port %1'
).arg(str(self.extPort))))