Compare commits

..

4 Commits

Author SHA1 Message Date
550fe7daa3
Make bootstrap collectd restart conditional
- only restart if the config file changes
2024-02-29 07:38:06 +08:00
664e57db37
Change bootstrap api user
- from `pybitmessage` to `api``
2024-02-28 23:00:23 +08:00
17d1e901c0
Bootstrap config upgrade
- only IPVS for access, no docker's portforwarding
- generate random password
- generate config for collectd's curl_json
2024-02-28 22:46:37 +08:00
d4c6db4997
Add code to run bootstrap server 2024-02-28 12:45:59 +08:00
143 changed files with 1159 additions and 2057 deletions

View File

@ -22,10 +22,7 @@ RUN apt-get -y update -qq \
RUN apt-get -y install -qq --no-install-recommends openjdk-17-jdk \
&& apt-get -y autoremove
# pyzbar dependencies
RUN apt-get -y install -qq --no-install-recommends libzbar0 libtool gettext
RUN pip install buildozer cython virtualenv
RUN pip install pip install buildozer cython virtualenv
ENV ANDROID_NDK_HOME="${ANDROID_HOME}/android-ndk"

View File

@ -1,32 +1,9 @@
#!/bin/bash
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
# buildozer OOM workaround
mkdir -p ~/.gradle
echo "org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8" \
> ~/.gradle/gradle.properties
# workaround for symlink
rm -rf src/pybitmessage
mkdir -p src/pybitmessage
cp src/*.py src/pybitmessage
cp -r src/bitmessagekivy src/backend src/mockbm src/images src/pybitmessage
pushd packages/android
BUILDMODE=debug
if [ "$BUILDBOT_JOBNAME" = "android" -a \
"$BUILDBOT_REPOSITORY" = "https://github.com/Bitmessage/PyBitmessage" -a \
"$BUILDBOT_BRANCH" = "v0.6" ]; then
sed -e 's/android.release_artifact *=.*/release_artifact = aab/' -i "" buildozer.spec
BUILDMODE=release
fi
buildozer android $BUILDMODE || exit $?
buildozer android debug || exit $?
popd
mkdir -p ../out
RELEASE_ARTIFACT=$(grep release_artifact packages/android/buildozer.spec |cut -d= -f2|tr -Cd 'a-z')
cp packages/android/bin/*.${RELEASE_ARTIFACT} ../out
cp packages/android/bin/*.apk ../out

View File

@ -1,11 +1,5 @@
#!/bin/bash
RELEASE_ARTIFACT=$(grep release_artifact packages/android/buildozer.spec |cut -d= -f2|tr -Cd 'a-z')
if [ $RELEASE_ARTIFACT = "aab" ]; then
exit
fi
unzip -p packages/android/bin/*.apk assets/private.tar \
| tar --list -z > package.list
cat package.list

View File

@ -24,5 +24,3 @@ RUN wget -qO appimage-builder-x86_64.AppImage \
https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
ADD . .
CMD .buildbot/tox-bionic/build.sh

View File

@ -4,11 +4,7 @@ export APPIMAGE_EXTRACT_AND_RUN=1
BUILDER=appimage-builder-x86_64.AppImage
RECIPE=packages/AppImage/AppImageBuilder.yml
git remote add -f upstream https://github.com/Bitmessage/PyBitmessage.git
HEAD="$(git rev-parse HEAD)"
UPSTREAM="$(git merge-base --fork-point upstream/v0.6)"
export APP_VERSION=$(git describe --tags | cut -d- -f1,3 | tr -d v)
[ "$HEAD" != "$UPSTREAM" ] && APP_VERSION="${APP_VERSION}-alpha"
function set_sourceline {
if [ ${ARCH} == amd64 ]; then
@ -18,45 +14,36 @@ function set_sourceline {
fi
}
function build_appimage {
set_sourceline
./${BUILDER} --recipe ${RECIPE} || exit 1
rm -rf build
}
[ -f ${BUILDER} ] || wget -qO ${BUILDER} \
https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage \
&& chmod +x ${BUILDER}
chmod 1777 /tmp
export ARCH=amd64
export APPIMAGE_ARCH=x86_64
export RUNTIME=${APPIMAGE_ARCH}
set_sourceline
build_appimage
./${BUILDER} --recipe ${RECIPE} || exit 1
export ARCH=armhf
export APPIMAGE_ARCH=${ARCH}
export RUNTIME=gnueabihf
export CC=arm-linux-gnueabihf-gcc
export CXX=${CC}
set_sourceline
build_appimage
./${BUILDER} --recipe ${RECIPE} || exit 1
export ARCH=arm64
export APPIMAGE_ARCH=aarch64
export RUNTIME=${APPIMAGE_ARCH}
export CC=aarch64-linux-gnu-gcc
export CXX=${CC}
set_sourceline
build_appimage
./${BUILDER} --recipe ${RECIPE}
EXISTING_OWNER=$(stat -c %u ../out) || mkdir -p ../out
sha256sum PyBitmessage*.AppImage >> ../out/SHA256SUMS
mkdir -p ../out
sha256sum PyBitmessage*.AppImage > ../out/SHA256SUMS
cp PyBitmessage*.AppImage ../out
if [ ${EXISTING_OWNER} ]; then
chown ${EXISTING_OWNER} ../out/PyBitmessage*.AppImage ../out/SHA256SUMS
fi

View File

@ -15,4 +15,4 @@ RUN apt-get install -yq \
RUN ln -sf /usr/bin/python3 /usr/bin/python
RUN pip3 install --upgrade 'setuptools<71' pip
RUN pip3 install --upgrade setuptools pip

View File

@ -1,12 +1,5 @@
#!/bin/bash
git remote add -f upstream https://github.com/Bitmessage/PyBitmessage.git
HEAD="$(git rev-parse HEAD)"
UPSTREAM="$(git merge-base --fork-point upstream/v0.6)"
SNAP_DIFF="$(git diff upstream/v0.6 -- packages/snap .buildbot/snap)"
[ -z "${SNAP_DIFF}" ] && [ $HEAD != $UPSTREAM ] && exit 0
pushd packages && snapcraft || exit 1
popd

View File

@ -20,7 +20,3 @@ RUN python3.8 -m pip install --upgrade pip tox virtualenv
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
ADD . .
CMD .buildbot/tox-bionic/test.sh

View File

@ -1,4 +1,4 @@
#!/bin/sh
tox -e lint-basic || exit 1
tox -e lint-basic # || exit 1
tox

View File

@ -13,5 +13,3 @@ RUN apt-get install -yq --no-install-suggests --no-install-recommends \
RUN python3.9 -m pip install --upgrade pip tox virtualenv
ADD . .
CMD .buildbot/tox-focal/test.sh

View File

@ -10,7 +10,3 @@ RUN apt-get install -yq --no-install-suggests --no-install-recommends \
python3-dev python3-pip language-pack-en qt5dxcb-plugin tor xvfb
RUN pip install tox
ADD . .
CMD .buildbot/tox-jammy/test.sh

View File

@ -1,4 +1,4 @@
#!/bin/sh
tox -e lint || exit 1
tox -e lint-basic # || exit 1
tox -e py310

View File

@ -0,0 +1,50 @@
FROM ubuntu:xenial
RUN apt-get update
# Common apt packages
RUN apt-get install -yq --no-install-suggests --no-install-recommends \
software-properties-common build-essential libcap-dev libffi-dev \
libssl-dev python-all-dev python-pip python-setuptools python3-dev
# wget
RUN apt-get install -yq --no-install-suggests --no-install-recommends \
libgmp-dev m4 pkgconf wget
RUN wget "https://ftp.gnu.org/gnu/nettle/nettle-3.9.1.tar.gz" \
&& tar -zxf nettle-3.9.1.tar.gz
RUN cd nettle-3.9.1 \
&& ./configure --disable-openssl --enable-shared \
&& make && make install
RUN wget "https://www.gnupg.org/ftp/gcrypt/gnutls/v3.6/gnutls-3.6.16.tar.xz" \
&& tar -Jxf gnutls-3.6.16.tar.xz
RUN apt-get remove -yq libgnutls30
RUN cd gnutls-3.6.16 \
&& ./configure --prefix=/usr --without-p11-kit \
--with-included-libtasn1 --with-included-unistring --without-idn \
&& make && make install
RUN wget "https://ftp.gnu.org/gnu/wget/wget2-2.1.0.tar.gz" \
&& tar -zxf wget2-2.1.0.tar.gz
RUN apt-get remove -yq wget
RUN cd wget2-2.1.0 \
&& ./configure --without-libpsl --prefix=/usr \
GNUTLS_CFLAGS=-I/usr/include/gnutls/ GNUTLS_LIBS=-L/usr/lib \
&& make && make install \
&& mv /usr/bin/wget2 /usr/bin/wget
RUN wget -O /usr/local/bin/buildbot_entrypoint.sh http://git.bitmessage.org/Bitmessage/buildbot-scripts/raw/branch/master/docker/xenial/entrypoint.sh
RUN pip install --upgrade pip==20.0.1
RUN pip install --upgrade setuptools
RUN pip install tox
ADD . .
CMD .buildbot/tox-xenial/test.sh

View File

@ -0,0 +1 @@
../tox-bionic/test.sh

View File

@ -1,4 +1,4 @@
FROM ubuntu:jammy
FROM ubuntu:latest
ARG USERNAME=user
ARG USER_UID=1000
@ -15,9 +15,6 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get install -y \
libcap-dev \
libssl-dev \
pylint \
python-setuptools \
python2.7 \
python2.7-dev \
python3 \
python3-dev \
python3-flake8 \
@ -29,6 +26,9 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get install -y \
RUN apt-add-repository ppa:deadsnakes/ppa
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y \
python2.7 python2.7-dev
RUN pip install 'tox<4' 'virtualenv<20.22.0'
RUN groupadd --gid $USER_GID $USERNAME \

View File

@ -16,7 +16,6 @@
],
"dockerFile": "Dockerfile",
"postCreateCommand": "pip3 install -r requirements.txt",
"updateContentCommand": "python2.7 setup.py install --user",
"remoteEnv": {
"PATH": "${containerEnv:PATH}:/home/user/.local/bin"
},

View File

@ -5,4 +5,3 @@ __pycache__
.buildozer
.tox
mprofile_*
**.so

23
.travis.yml Normal file
View File

@ -0,0 +1,23 @@
language: python
cache: pip
dist: bionic
python:
- "2.7_with_system_site_packages"
- "3.7"
addons:
apt:
packages:
- build-essential
- libcap-dev
- python-qt4
- python-pyqt5
- tor
- xvfb
install:
- pip install -r requirements.txt
- python setup.py install
- export PYTHONWARNINGS=all
script:
- python checkdeps.py
- python src/bitmessagemain.py -t
- python -bm tests

View File

@ -1,43 +1,12 @@
# PyBitmessage Installation Instructions
- Binary (64bit, no separate installation of dependencies required)
- Windows: https://artifacts.bitmessage.at/winebuild/
- Windows: https://download.bitmessage.org/snapshots/
- Linux AppImages: https://artifacts.bitmessage.at/appimage/
- Linux snaps: https://artifacts.bitmessage.at/snap/
- Linux snaps: https://artifacts.bitmessage.at/snap/
- Mac (not up to date): https://github.com/Bitmessage/PyBitmessage/releases/tag/v0.6.1
- Source
`git clone git://github.com/Bitmessage/PyBitmessage.git`
## Notes on the AppImages
The [AppImage](https://docs.appimage.org/introduction/index.html)
is a bundle, built by the
[appimage-builder](https://github.com/AppImageCrafters/appimage-builder) from
the Ubuntu Bionic deb files, the sources and `bitmsghash.so`, precompiled for
3 architectures, using the `packages/AppImage/AppImageBuilder.yml` recipe.
When you run the appimage the bundle is loop mounted to a location like
`/tmp/.mount_PyBitm97wj4K` with `squashfs-tools`.
The appimage name has several informational filds:
```
PyBitmessage-<VERSION>-g<COMMITHASH>[-alpha]-<ARCH>.AppImage
```
E.g. `PyBitmessage-0.6.3.2-ge571ba8a-x86_64.AppImage` is an appimage, built from
the `v0.6` for x86_64 and `PyBitmessage-0.6.3.2-g9de2aaf1-alpha-aarch64.AppImage`
is one, built from some development branch for arm64.
You can also build the appimage with local code. For that you need installed
docker:
```
$ docker build -t bm-appimage -f .buildbot/appimage/Dockerfile .
$ docker run -t --rm -v "$(pwd)"/dist:/out bm-appimage .buildbot/appimage/build.sh
```
The appimages should be in the dist dir.
## Helper Script for building from source
Go to the directory with PyBitmessage source code and run:
```

View File

@ -0,0 +1,9 @@
version: "3"
name: pybitmessage
services:
bootstrap:
image: pybitmessage/bootstrap:latest
build: ..
env_file: .env
deploy:
replicas: $THREADS

View File

@ -0,0 +1,70 @@
#!/bin/bash
apt -y install curl jq ipvsadm libyajl2
EXTIP=$(curl -s telnetmyip.com|jq -r .ip)
if [ ! -e .env ]; then
THREADS=$(nproc --all)
PASSWORD=$(tr -dc a-zA-Z0-9 < /dev/urandom | head -c32 && echo)
cat > .env << EOF
THREADS=$THREADS
PASSWORD=$PASSWORD
EOF
else
. .env
fi
ipvsadm -C
ipvsadm -A -t ${EXTIP}:8444 -s rr
ipvsadm -A -t ${EXTIP}:8080 -s rr
docker compose up -d
CF=/etc/collectd/collectd.conf.d/curl_json.conf.new
CF_LIVE=/etc/collectd/collectd.conf.d/curl_json.conf
echo "LoadPlugin curl_json" > $CF
echo "<Plugin curl_json>" >> $CF
for i in `seq 1 $THREADS`; do
cont="pybitmessage-bootstrap-${i}"
IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $cont 2>/dev/null)
[ -z "$IP" ] && continue
echo "Adding $IP"
ipvsadm -a -t ${EXTIP}:8444 -r ${IP}:8444 -m
ipvsadm -a -t ${EXTIP}:8080 -r ${IP}:8444 -m
INSTANCE=$(echo $cont|tr - _)
cat >> $CF << EOF
<URL "http://$IP:8442/">
Plugin "pybitmessagestatus"
Instance "$INSTANCE"
User "api"
Password "$PASSWORD"
Post "{\"jsonrpc\":\"2.0\",\"id\":\"id\",\"method\":\"clientStatus\",\"params\":[]}"
<Key "result/networkConnections">
Type "gauge"
Instance "networkconnections"
</Key>
<Key "result/numberOfPubkeysProcessed">
Type "counter"
Instance "numberofpubkeysprocessed"
</Key>
<Key "result/numberOfMessagesProcessed">
Type "counter"
Instance "numberofmessagesprocessed"
</Key>
<Key "result/numberOfBroadcastsProcessed">
Type "counter"
Instance "numberofbroadcastsprocessed"
</Key>
</URL>
EOF
done
echo "</Plugin>" >> $CF
if ! cmp -s $CF $CF_LIVE; then
mv $CF $CF_LIVE
systemctl restart collectd
fi
ipvsadm -l -n

View File

@ -1,11 +0,0 @@
#!/bin/sh
DOCKERFILE=.buildbot/tox-bionic/Dockerfile
docker build -t pybm/tox -f $DOCKERFILE .
if [ $? -gt 0 ]; then
docker build --no-cache -t pybm/tox -f $DOCKERFILE .
fi
docker run --rm -it pybm/tox

View File

@ -203,7 +203,7 @@ autodoc_mock_imports = [
'pybitmessage.bitmessagekivy',
'pybitmessage.bitmessageqt.foldertree',
'pybitmessage.helper_startup',
'pybitmessage.mockbm',
'pybitmessage.mock',
'pybitmessage.network.httpd',
'pybitmessage.network.https',
'ctypes',
@ -232,7 +232,7 @@ apidoc_excluded_paths = [
'bitmessageqt/addressvalidator.py', 'bitmessageqt/foldertree.py',
'bitmessageqt/migrationwizard.py', 'bitmessageqt/newaddresswizard.py',
'helper_startup.py',
'kivymd', 'mockbm', 'main.py', 'navigationdrawer', 'network/http*',
'kivymd', 'mock', 'main.py', 'navigationdrawer', 'network/http*',
'src', 'tests', 'version.py'
]
apidoc_module_first = True

View File

@ -1,5 +1,4 @@
mistune<=0.8.4
m2r<=0.2.1
sphinx_rtd_theme
m2r
sphinxcontrib-apidoc
docutils<=0.17.1

View File

@ -1,11 +1,6 @@
kivy-garden.qrcode
kivymd==1.0.2
kivy==2.1.0
opencv-python
pyzbar
git+https://github.com/tito/telenium@9b54ff1#egg=telenium
Pillow==9.4.0
jaraco.collections==3.8.0
jaraco.classes==3.2.3
pytz==2022.7.1
pydantic==1.10.6
Pillow

View File

@ -1,19 +1,19 @@
[app]
# (str) Title of your application
title = PyBitmessage Mock
title = mockone
# (str) Package name
package.name = pybitmessagemock
package.name = mock
# (str) Package domain (needed for android/ios packaging)
package.domain = at.bitmessage
package.domain = org.mock
# (str) Source code where the main.py live
source.dir = ../../src
# (list) Source files to include (let empty to include all the files)
source.include_exts = py,png,jpg,kv,atlas,tflite,sql,json
source.include_exts = py,png,jpg,kv,atlas,tflite,sql
# (list) List of inclusions using pattern matching
#source.include_patterns = assets/*,images/*.png
@ -28,7 +28,7 @@ source.include_exts = py,png,jpg,kv,atlas,tflite,sql,json
#source.exclude_patterns = license,images/*/*.jpg
# (str) Application versioning (method 1)
version = 0.1.1
version = 0.1
# (str) Application versioning (method 2)
# version.regex = __version__ = ['"](.*)['"]
@ -36,7 +36,7 @@ version = 0.1.1
# (list) Application requirements
# comma separated e.g. requirements = sqlite3,kivy
requirements = python3,kivy,sqlite3,kivymd==1.0.2,Pillow,opencv,kivy-garden.qrcode,qrcode,typing_extensions,pypng,pyzbar,xcamera,zbarcam
requirements = python3,kivy
# (str) Custom source folders for requirements
# Sets custom source for any requirements with recipes
@ -92,7 +92,7 @@ fullscreen = 0
# (int) Android API to use (targetSdkVersion AND compileSdkVersion)
# note: when changing, Dockerfile also needs to be changed to install corresponding build tools
android.api = 33
android.api = 28
# (int) Minimum API required. You will need to set the android.ndk_api to be as low as this value.
android.minapi = 21
@ -243,8 +243,6 @@ android.allow_backup = True
# Usage example : android.manifest_placeholders = [myCustomUrl:\"org.kivy.customurl\"]
# android.manifest_placeholders = [:]
android.release_artifact = apk
#
# Python for android (p4a) specific
#

View File

@ -3,13 +3,15 @@
# Setup the environment for docker container
APIUSER=${USER:-api}
APIPASS=${PASSWORD:-$(tr -dc a-zA-Z0-9 < /dev/urandom | head -c32 && echo)}
IP=$(hostname -i)
echo "\napiusername: $APIUSER\napipassword: $APIPASS"
sed -i -e "s|\(apiinterface = \).*|\10\.0\.0\.0|g" \
sed -i -e "s|\(apiinterface = \).*|\1$IP|g" \
-e "s|\(apivariant = \).*|\1json|g" \
-e "s|\(apiusername = \).*|\1$APIUSER|g" \
-e "s|\(apipassword = \).*|\1$APIPASS|g" \
-e "s|\(bind = \).*|\1$IP|g" \
-e "s|apinotifypath = .*||g" ${BITMESSAGE_HOME}/keys.dat
# Run

View File

@ -28,7 +28,7 @@ parts:
source: https://github.com/Bitmessage/PyBitmessage.git
override-pull: |
snapcraftctl pull
snapcraftctl set-version $(git describe --tags | cut -d- -f1,3 | tr -d v)
snapcraftctl set-version $(git describe --tags --abbrev=0 | tr -d v)
plugin: python
python-version: python2
build-packages:

View File

@ -1,8 +1,7 @@
coverage
psutil
pycryptodome
PyQt5;python_version>="3.7" and platform_machine=="x86_64"
mock;python_version<="2.7"
PyQt5;python_version>="3.7"
python_prctl;platform_system=="Linux"
six
xvfbwrapper;platform_system=="Linux"

13
run-tests-in-docker.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/sh
DOCKERFILE=packages/docker/Dockerfile.bionic
# explicitly mark appimage stage because it builds in any case
docker build --target appimage -t pybm/appimage -f $DOCKERFILE .
if [ $? -gt 0 ]; then
docker build --no-cache --target appimage -t pybm/appimage -f $DOCKERFILE .
fi
docker build --target tox -t pybm/tox -f $DOCKERFILE .
docker run --rm -t pybm/tox

View File

@ -13,7 +13,7 @@ from src.version import softwareVersion
EXTRAS_REQUIRE = {
'docs': ['sphinx'],
'docs': ['sphinx', 'sphinx_rtd_theme'],
'gir': ['pygobject'],
'json': ['jsonrpclib'],
'notify2': ['notify2'],
@ -92,7 +92,7 @@ if __name__ == "__main__":
)
if os.environ.get('INSTALL_TESTS', False):
packages.extend(['pybitmessage.mockbm', 'pybitmessage.backend', 'pybitmessage.bitmessagekivy.tests'])
packages.extend(['pybitmessage.mock', 'pybitmessage.backend', 'pybitmessage.bitmessagekivy.tests'])
package_data[''].extend(['bitmessagekivy/tests/sampleData/*.dat'])
# this will silently accept alternative providers of msgpack

View File

@ -2,16 +2,11 @@
Operations with addresses
"""
# pylint: disable=inconsistent-return-statements
import hashlib
import logging
from binascii import hexlify, unhexlify
from struct import pack, unpack
try:
from highlevelcrypto import double_sha512
except ImportError:
from .highlevelcrypto import double_sha512
logger = logging.getLogger('default')
@ -139,6 +134,15 @@ def decodeVarint(data):
return (encodedValue, 9)
def calculateInventoryHash(data):
"""Calculate inventory hash from object data"""
sha = hashlib.new('sha512')
sha2 = hashlib.new('sha512')
sha.update(data)
sha2.update(sha.digest())
return sha2.digest()[0:32]
def encodeAddress(version, stream, ripe):
"""Convert ripe to address"""
if version >= 2 and version < 4:
@ -162,7 +166,12 @@ def encodeAddress(version, stream, ripe):
storedBinaryData = encodeVarint(version) + encodeVarint(stream) + ripe
# Generate the checksum
checksum = double_sha512(storedBinaryData)[0:4]
sha = hashlib.new('sha512')
sha.update(storedBinaryData)
currentHash = sha.digest()
sha = hashlib.new('sha512')
sha.update(currentHash)
checksum = sha.digest()[0:4]
# FIXME: encodeBase58 should take binary data, to reduce conversions
# encodeBase58(storedBinaryData + checksum)
@ -198,7 +207,13 @@ def decodeAddress(address):
data = unhexlify(hexdata)
checksum = data[-4:]
if checksum != double_sha512(data[:-4])[0:4]:
sha = hashlib.new('sha512')
sha.update(data[:-4])
currentHash = sha.digest()
sha = hashlib.new('sha512')
sha.update(currentHash)
if checksum != sha.digest()[0:4]:
status = 'checksumfailed'
return status, 0, 0, ''

View File

@ -71,9 +71,9 @@ from struct import pack, unpack
import six
from six.moves import configparser, http_client, xmlrpc_server
import defaults
import helper_inbox
import helper_sent
import protocol
import proofofwork
import queues
import shared
@ -82,25 +82,23 @@ import shutdown
import state
from addresses import (
addBMIfNotPresent,
calculateInventoryHash,
decodeAddress,
decodeVarint,
varintDecodeError
)
from bmconfigparser import config
from debug import logger
from defaults import (
networkDefaultProofOfWorkNonceTrialsPerByte,
networkDefaultPayloadLengthExtraBytes)
from helper_sql import (
SqlBulkExecute, sqlExecute, sqlQuery, sqlStoredProcedure, sql_ready)
from highlevelcrypto import calculateInventoryHash
from inventory import Inventory
try:
from network import connectionpool
from network import BMConnectionPool
except ImportError:
connectionpool = None
BMConnectionPool = None
from network import stats, StoppableThread, invQueue
from network import stats, StoppableThread
from version import softwareVersion
try: # TODO: write tests for XML vulnerabilities
@ -658,11 +656,13 @@ class BMRPCDispatcher(object):
nonceTrialsPerByte = self.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte'
) if not totalDifficulty else int(
networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
defaults.networkDefaultProofOfWorkNonceTrialsPerByte
* totalDifficulty)
payloadLengthExtraBytes = self.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes'
) if not smallMessageDifficulty else int(
networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
defaults.networkDefaultPayloadLengthExtraBytes
* smallMessageDifficulty)
if not isinstance(eighteenByteRipe, bool):
raise APIError(
@ -704,11 +704,13 @@ class BMRPCDispatcher(object):
nonceTrialsPerByte = self.config.get(
'bitmessagesettings', 'defaultnoncetrialsperbyte'
) if not totalDifficulty else int(
networkDefaultProofOfWorkNonceTrialsPerByte * totalDifficulty)
defaults.networkDefaultProofOfWorkNonceTrialsPerByte
* totalDifficulty)
payloadLengthExtraBytes = self.config.get(
'bitmessagesettings', 'defaultpayloadlengthextrabytes'
) if not smallMessageDifficulty else int(
networkDefaultPayloadLengthExtraBytes * smallMessageDifficulty)
defaults.networkDefaultPayloadLengthExtraBytes
* smallMessageDifficulty)
if not passphrase:
raise APIError(1, 'The specified passphrase is blank.')
@ -1281,73 +1283,68 @@ class BMRPCDispatcher(object):
})
return {'subscriptions': data}
@command('disseminatePreEncryptedMsg', 'disseminatePreparedObject')
def HandleDisseminatePreparedObject(
self, encryptedPayload,
nonceTrialsPerByte=networkDefaultProofOfWorkNonceTrialsPerByte,
payloadLengthExtraBytes=networkDefaultPayloadLengthExtraBytes
):
"""
Handle a request to disseminate an encrypted message.
@command('disseminatePreEncryptedMsg')
def HandleDisseminatePreEncryptedMsg( # pylint: disable=too-many-locals
self, encryptedPayload, requiredAverageProofOfWorkNonceTrialsPerByte,
requiredPayloadLengthExtraBytes):
"""Handle a request to disseminate an encrypted message"""
The device issuing this command to PyBitmessage supplies an object
that has already been encrypted but which may still need the PoW
to be done. PyBitmessage accepts this object and sends it out
to the rest of the Bitmessage network as if it had generated
the message itself.
*encryptedPayload* is a hex encoded string starting with the nonce,
8 zero bytes in case of no PoW done.
"""
# The device issuing this command to PyBitmessage supplies a msg
# object that has already been encrypted but which still needs the POW
# to be done. PyBitmessage accepts this msg object and sends it out
# to the rest of the Bitmessage network as if it had generated
# the message itself. Please do not yet add this to the api doc.
encryptedPayload = self._decode(encryptedPayload, "hex")
nonce, = unpack('>Q', encryptedPayload[:8])
objectType, toStreamNumber, expiresTime = \
protocol.decodeObjectParameters(encryptedPayload)
if nonce == 0: # Let us do the POW and attach it to the front
encryptedPayload = encryptedPayload[8:]
TTL = expiresTime - time.time() + 300 # a bit of extra padding
# Let us do the POW and attach it to the front
logger.debug("expiresTime: %s", expiresTime)
logger.debug("TTL: %s", TTL)
logger.debug("objectType: %s", objectType)
logger.info(
'(For msg message via API) Doing proof of work. Total required'
' difficulty: %s\nRequired small message difficulty: %s',
float(nonceTrialsPerByte)
/ networkDefaultProofOfWorkNonceTrialsPerByte,
float(payloadLengthExtraBytes)
/ networkDefaultPayloadLengthExtraBytes,
)
powStartTime = time.time()
target = 2**64 / (
nonceTrialsPerByte * (
len(encryptedPayload) + 8 + payloadLengthExtraBytes + ((
TTL * (
len(encryptedPayload) + 8 + payloadLengthExtraBytes
)) / (2 ** 16))
))
initialHash = hashlib.sha512(encryptedPayload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
logger.info(
'(For msg message via API) Found proof of work %s\nNonce: %s\n'
'POW took %s seconds. %s nonce trials per second.',
trialValue, nonce, int(time.time() - powStartTime),
nonce / (time.time() - powStartTime)
)
encryptedPayload = pack('>Q', nonce) + encryptedPayload
expiresTime = unpack('>Q', encryptedPayload[0:8])[0]
objectType = unpack('>I', encryptedPayload[8:12])[0]
TTL = expiresTime - time.time() + 300 # a bit of extra padding
# Let us do the POW and attach it to the front
target = 2**64 / (
requiredAverageProofOfWorkNonceTrialsPerByte * (
len(encryptedPayload) + 8
+ requiredPayloadLengthExtraBytes + ((
TTL * (
len(encryptedPayload) + 8
+ requiredPayloadLengthExtraBytes
)) / (2 ** 16))
))
logger.debug("expiresTime: %s", expiresTime)
logger.debug("TTL: %s", TTL)
logger.debug("objectType: %s", objectType)
logger.info(
'(For msg message via API) Doing proof of work. Total required'
' difficulty: %s\nRequired small message difficulty: %s',
float(requiredAverageProofOfWorkNonceTrialsPerByte)
/ defaults.networkDefaultProofOfWorkNonceTrialsPerByte,
float(requiredPayloadLengthExtraBytes)
/ defaults.networkDefaultPayloadLengthExtraBytes,
)
powStartTime = time.time()
initialHash = hashlib.sha512(encryptedPayload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
logger.info(
'(For msg message via API) Found proof of work %s\nNonce: %s\n'
'POW took %s seconds. %s nonce trials per second.',
trialValue, nonce, int(time.time() - powStartTime),
nonce / (time.time() - powStartTime)
)
encryptedPayload = pack('>Q', nonce) + encryptedPayload
parserPos = 20
_, objectVersionLength = decodeVarint(
encryptedPayload[parserPos:parserPos + 10])
parserPos += objectVersionLength
toStreamNumber, _ = decodeVarint(
encryptedPayload[parserPos:parserPos + 10])
inventoryHash = calculateInventoryHash(encryptedPayload)
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, toStreamNumber, encryptedPayload,
expiresTime, b''
expiresTime, ''
)
logger.info(
'Broadcasting inv for msg(API disseminatePreEncryptedMsg'
' command): %s', hexlify(inventoryHash))
invQueue.put((toStreamNumber, inventoryHash))
return hexlify(inventoryHash).decode()
queues.invQueue.put((toStreamNumber, inventoryHash))
return hexlify(inventoryHash)
@command('trashSentMessageByAckData')
def HandleTrashSentMessageByAckDAta(self, ackdata):
@ -1370,8 +1367,8 @@ class BMRPCDispatcher(object):
# Let us do the POW
target = 2 ** 64 / ((
len(payload) + networkDefaultPayloadLengthExtraBytes + 8
) * networkDefaultProofOfWorkNonceTrialsPerByte)
len(payload) + defaults.networkDefaultPayloadLengthExtraBytes + 8
) * defaults.networkDefaultProofOfWorkNonceTrialsPerByte)
logger.info('(For pubkey message via API) Doing proof of work...')
initialHash = hashlib.sha512(payload).digest()
trialValue, nonce = proofofwork.run(target, initialHash)
@ -1395,13 +1392,13 @@ class BMRPCDispatcher(object):
inventoryHash = calculateInventoryHash(payload)
objectType = 1 # .. todo::: support v4 pubkeys
TTL = 28 * 24 * 60 * 60
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, pubkeyStreamNumber, payload, int(time.time()) + TTL, ''
)
logger.info(
'broadcasting inv within API command disseminatePubkey with'
' hash: %s', hexlify(inventoryHash))
invQueue.put((pubkeyStreamNumber, inventoryHash))
queues.invQueue.put((pubkeyStreamNumber, inventoryHash))
@command(
'getMessageDataByDestinationHash', 'getMessageDataByDestinationTag')
@ -1475,18 +1472,18 @@ class BMRPCDispatcher(object):
Returns bitmessage connection information as dict with keys *inbound*,
*outbound*.
"""
if connectionpool is None:
if BMConnectionPool is None:
raise APIError(21, 'Could not import BMConnectionPool.')
inboundConnections = []
outboundConnections = []
for i in connectionpool.pool.inboundConnections.values():
for i in BMConnectionPool().inboundConnections.values():
inboundConnections.append({
'host': i.destination.host,
'port': i.destination.port,
'fullyEstablished': i.fullyEstablished,
'userAgent': str(i.userAgent)
})
for i in connectionpool.pool.outboundConnections.values():
for i in BMConnectionPool().outboundConnections.values():
outboundConnections.append({
'host': i.destination.host,
'port': i.destination.port,

View File

@ -30,6 +30,7 @@ import state
from addresses import addBMIfNotPresent, decodeAddress
from bmconfigparser import config
from helper_sql import sqlExecute, sqlQuery
from inventory import Inventory
# pylint: disable=global-statement
@ -144,8 +145,8 @@ def scrollbox(d, text, height=None, width=None):
def resetlookups():
"""Reset the Inventory Lookups"""
global inventorydata
inventorydata = state.Inventory.numberOfInventoryLookupsPerformed
state.Inventory.numberOfInventoryLookupsPerformed = 0
inventorydata = Inventory().numberOfInventoryLookupsPerformed
Inventory().numberOfInventoryLookupsPerformed = 0
Timer(1, resetlookups, ()).start()

View File

@ -6,12 +6,14 @@ allmail.py
==============
All mails are managed in allmail screen
"""
import logging
from kivy.clock import Clock
from kivy.properties import ListProperty, StringProperty
from kivy.properties import (
ListProperty,
StringProperty
)
from kivy.uix.screenmanager import Screen
from kivy.app import App
@ -19,52 +21,47 @@ from pybitmessage.bitmessagekivy.baseclass.common import (
show_limited_cnt, empty_screen_label, kivy_state_variables,
)
import logging
logger = logging.getLogger('default')
class AllMails(Screen):
"""AllMails Screen for Kivy UI"""
class Allmails(Screen):
"""Allmails Screen for kivy Ui"""
data = ListProperty()
has_refreshed = True
all_mails = ListProperty()
account = StringProperty()
label_str = 'No messages for this account.'
label_str = 'yet no message for this account!!!!!!!!!!!!!'
def __init__(self, *args, **kwargs):
"""Initialize the AllMails screen."""
super().__init__(*args, **kwargs) # pylint: disable=missing-super-argument
"""Method Parsing the address"""
super(Allmails, self).__init__(*args, **kwargs)
self.kivy_state = kivy_state_variables()
self._initialize_selected_address()
if self.kivy_state.selected_address == '':
if App.get_running_app().identity_list:
self.kivy_state.selected_address = App.get_running_app().identity_list[0]
Clock.schedule_once(self.init_ui, 0)
def _initialize_selected_address(self):
"""Initialize the selected address from the identity list."""
if not self.kivy_state.selected_address and App.get_running_app().identity_list:
self.kivy_state.selected_address = App.get_running_app().identity_list[0]
def init_ui(self, dt=0):
"""Initialize the UI by loading the message list."""
self.load_message_list()
logger.debug("UI initialized after %s seconds.", dt)
"""Clock Schdule for method all mails"""
self.loadMessagelist()
logger.debug(dt)
def load_message_list(self):
"""Load the Inbox, Sent, and Draft message lists."""
def loadMessagelist(self):
"""Load Inbox, Sent anf Draft list of messages"""
self.account = self.kivy_state.selected_address
self.ids.tag_label.text = 'All Mails' if self.all_mails else ''
self._update_mail_count()
def _update_mail_count(self):
"""Update the mail count and handle empty states."""
self.ids.tag_label.text = ''
if self.all_mails:
total_count = int(self.kivy_state.sent_count) + int(self.kivy_state.inbox_count)
self.kivy_state.all_count = str(total_count)
self.set_all_mail_count(self.kivy_state.all_count)
self.ids.tag_label.text = 'All Mails'
self.kivy_state.all_count = str(
int(self.kivy_state.sent_count) + int(self.kivy_state.inbox_count))
self.set_AllmailCnt(self.kivy_state.all_count)
else:
self.set_all_mail_count('0')
self.set_AllmailCnt('0')
self.ids.ml.add_widget(empty_screen_label(self.label_str))
@staticmethod
def set_all_mail_count(count):
"""Set the message count for all mails."""
allmail_count_widget = App.get_running_app().root.ids.content_drawer.ids.allmail_cnt
allmail_count_widget.ids.badge_txt.text = show_limited_cnt(int(count))
def set_AllmailCnt(Count):
"""This method is used to set allmails message count"""
allmailCnt_obj = App.get_running_app().root.ids.content_drawer.ids.allmail_cnt
allmailCnt_obj.ids.badge_txt.text = show_limited_cnt(int(Count))

View File

@ -5,50 +5,54 @@
draft.py
==============
Draft screen for managing draft messages in Kivy UI.
Draft screen
"""
from kivy.clock import Clock
from kivy.properties import ListProperty, StringProperty
from kivy.properties import (
ListProperty,
StringProperty
)
from kivy.uix.screenmanager import Screen
from kivy.app import App
from pybitmessage.bitmessagekivy.baseclass.common import (
show_limited_cnt, empty_screen_label, kivy_state_variables
show_limited_cnt, empty_screen_label,
kivy_state_variables
)
import logging
logger = logging.getLogger('default')
class Draft(Screen):
"""Draft screen class for Kivy UI"""
"""Draft screen class for kivy Ui"""
data = ListProperty()
account = StringProperty()
queryreturn = ListProperty()
has_refreshed = True
label_str = "Yet no message for this account!"
label_str = "yet no message for this account!!!!!!!!!!!!!"
def __init__(self, *args, **kwargs):
"""Initialize the Draft screen and set the default account"""
super().__init__(*args, **kwargs)
"""Method used for storing draft messages"""
super(Draft, self).__init__(*args, **kwargs)
self.kivy_state = kivy_state_variables()
if not self.kivy_state.selected_address:
if self.kivy_state.selected_address == '':
if App.get_running_app().identity_list:
self.kivy_state.selected_address = App.get_running_app().identity_list[0]
Clock.schedule_once(self.init_ui, 0)
def init_ui(self, dt=0):
"""Initialize the UI and load draft messages"""
"""Clock Schedule for method draft accounts"""
self.load_draft()
logger.debug(f"UI initialized with dt: {dt}") # noqa: E999
logger.debug(dt)
def load_draft(self, where="", what=""):
"""Load the list of draft messages"""
"""Load draft list for Draft messages"""
self.set_draft_count('0')
self.ids.ml.add_widget(empty_screen_label(self.label_str))
@staticmethod
def set_draft_count(count):
"""Set the count of draft messages in the UI"""
draft_count_obj = App.get_running_app().root.ids.content_drawer.ids.draft_cnt
draft_count_obj.ids.badge_txt.text = show_limited_cnt(int(count))
def set_draft_count(Count):
"""Set the count of draft mails"""
draftCnt_obj = App.get_running_app().root.ids.content_drawer.ids.draft_cnt
draftCnt_obj.ids.badge_txt.text = show_limited_cnt(int(Count))

View File

@ -6,14 +6,18 @@
Kivy UI for inbox screen
"""
from kivy.clock import Clock
from kivy.properties import ListProperty, StringProperty
from kivy.properties import (
ListProperty,
StringProperty
)
from kivy.app import App
from kivy.uix.screenmanager import Screen
from pybitmessage.bitmessagekivy.baseclass.common import kivy_state_variables, load_image_path
class Inbox(Screen):
"""Inbox Screen class for Kivy UI"""
"""Inbox Screen class for kivy Ui"""
queryreturn = ListProperty()
has_refreshed = True
@ -22,32 +26,33 @@ class Inbox(Screen):
label_str = "Yet no message for this account!"
def __init__(self, *args, **kwargs):
"""Initialize Kivy variables and set up the UI"""
super().__init__(*args, **kwargs) # pylint: disable=missing-super-argument
"""Initialize kivy variables"""
super(Inbox, self).__init__(*args, **kwargs)
self.kivy_running_app = App.get_running_app()
self.kivy_state = kivy_state_variables()
self.image_dir = load_image_path()
Clock.schedule_once(self.init_ui, 0)
def set_default_address(self):
"""Set the default address if none is selected"""
if not self.kivy_state.selected_address and self.kivy_running_app.identity_list:
self.kivy_state.selected_address = self.kivy_running_app.identity_list[0]
def set_defaultAddress(self):
"""Set default address"""
if self.kivy_state.selected_address == "":
if self.kivy_running_app.identity_list:
self.kivy_state.selected_address = self.kivy_running_app.identity_list[0]
def init_ui(self, dt=0):
"""Initialize UI and load message list"""
"""loadMessagelist() call at specific interval"""
self.loadMessagelist()
def loadMessagelist(self, where="", what=""):
"""Load inbox messages"""
self.set_default_address()
"""Load inbox list for inbox messages"""
self.set_defaultAddress()
self.account = self.kivy_state.selected_address
def refresh_callback(self, *args):
"""Refresh the inbox messages while showing a loading spinner"""
"""Load inbox messages while wating-loader spins & called in inbox.kv"""
def refresh_on_scroll_down(interval):
"""Reset search fields and reload data on scroll"""
"""Reset fields and load data on scrolling upside down"""
self.kivy_state.searching_text = ""
self.children[2].children[1].ids.search_field.text = ""
self.ids.ml.clear_widgets()

View File

@ -174,7 +174,7 @@ class MailDetail(Screen): # pylint: disable=too-many-instance-attributes
self.parent.screens[3].clear_widgets()
self.parent.screens[3].add_widget(Factory.Trash())
self.parent.screens[14].clear_widgets()
self.parent.screens[14].add_widget(Factory.AllMails())
self.parent.screens[14].add_widget(Factory.Allmails())
Clock.schedule_once(self.callback_for_delete, 4)
def callback_for_delete(self, dt=0):

View File

@ -14,7 +14,7 @@ from kivy.uix.screenmanager import Screen
from pybitmessage import state
if os.environ.get('INSTALL_TESTS', False) and not state.backend_py3_compatible:
from pybitmessage.mockbm import kivy_main
from pybitmessage.mock import kivy_main
stats = kivy_main.network.stats
objectracker = kivy_main.network.objectracker
else:

View File

@ -40,8 +40,7 @@ def generate_hash(string):
try:
# make input case insensitive
string = str.lower(string)
hash_object = hashlib.md5( # nosec B324, B303
str.encode(string))
hash_object = hashlib.md5(str.encode(string)) # nosec B303
print(hash_object.hexdigest())
# returned object is a hex string
return hash_object.hexdigest()

View File

@ -1,4 +1,4 @@
<AllMails>:
<Allmails>:
name: 'allmails'
BoxLayout:
orientation: 'vertical'

View File

@ -250,7 +250,7 @@ MDNavigationLayout:
id:id_sent
Trash:
id:id_trash
AllMails:
Allmails:
id:id_allmail
Draft:
id:id_draft

View File

@ -45,7 +45,7 @@ from pybitmessage.bitmessagekivy.baseclass.popup import (
from pybitmessage.bitmessagekivy.baseclass.login import * # noqa: F401, F403
from pybitmessage.bitmessagekivy.uikivysignaler import UIkivySignaler
from pybitmessage.mockbm.helper_startup import loadConfig, total_encrypted_messages_per_month
from pybitmessage.mock.helper_startup import loadConfig, total_encrypted_messages_per_month
logger = logging.getLogger('default')

View File

@ -59,7 +59,7 @@
"All Mails": {
"kv_string": "allmails",
"name_screen": "allmails",
"Import": "from pybitmessage.bitmessagekivy.baseclass.allmail import AllMails"
"Import": "from pybitmessage.bitmessagekivy.baseclass.allmail import Allmails"
},
"MailDetail": {
"kv_string": "maildetail",

View File

@ -7,8 +7,6 @@ import shutil
import tempfile
from time import time, sleep
from requests.exceptions import ChunkedEncodingError
from telenium.tests import TeleniumTestCase
from telenium.client import TeleniumHttpException
@ -34,7 +32,7 @@ def cleanup(files=_files):
class TeleniumTestProcess(TeleniumTestCase):
"""Setting Screen Functionality Testing"""
cmd_entrypoint = [os.path.join(os.path.abspath(os.getcwd()), 'src', 'mockbm', 'kivy_main.py')]
cmd_entrypoint = [os.path.join(os.path.abspath(os.getcwd()), 'src', 'mock', 'kivy_main.py')]
@classmethod
def setUpClass(cls):
@ -56,10 +54,7 @@ class TeleniumTestProcess(TeleniumTestCase):
def tearDownClass(cls):
"""Ensures that pybitmessage stopped and removes files"""
# pylint: disable=no-member
try:
super(TeleniumTestProcess, cls).tearDownClass()
except ChunkedEncodingError:
pass
super(TeleniumTestProcess, cls).tearDownClass()
cleanup()
def assert_wait_no_except(self, selector, timeout=-1, value='inbox'):

View File

@ -12,7 +12,6 @@ The PyBitmessage startup script
import os
import sys
try:
import pathmagic
except ImportError:
@ -157,6 +156,13 @@ class Main(object):
set_thread_name("PyBitmessage")
state.dandelion = config.safeGetInt('network', 'dandelion')
# dandelion requires outbound connections, without them,
# stem objects will get stuck forever
if state.dandelion and not config.safeGetBoolean(
'bitmessagesettings', 'sendoutgoingconnections'):
state.dandelion = 0
if state.testmode or config.safeGetBoolean(
'bitmessagesettings', 'extralowdifficulty'):
defaults.networkDefaultProofOfWorkNonceTrialsPerByte = int(
@ -170,7 +176,8 @@ class Main(object):
# The closeEvent should command this thread to exit gracefully.
sqlLookup.daemon = False
sqlLookup.start()
state.Inventory = Inventory() # init
Inventory() # init
if state.enableObjProc: # Not needed if objproc is disabled
# Start the address generation thread
@ -231,7 +238,8 @@ class Main(object):
upnpThread = upnp.uPnPThread()
upnpThread.start()
else:
network.connectionpool.pool.connectToStream(1)
# Populate with hardcoded value (same as connectToStream above)
state.streamsInWhichIAmParticipating.append(1)
if not daemon and state.enableGUI:
if state.curses:

View File

@ -62,9 +62,6 @@ except ImportError:
get_plugins = False
is_windows = sys.platform.startswith('win')
# TODO: rewrite
def powQueueSize():
"""Returns the size of queues.workerQueue including current unfinished work"""
@ -83,7 +80,7 @@ def openKeysFile():
keysfile = os.path.join(state.appdata, 'keys.dat')
if 'linux' in sys.platform:
subprocess.call(["xdg-open", keysfile])
elif is_windows:
elif sys.platform.startswith('win'):
os.startfile(keysfile) # pylint: disable=no-member
@ -871,7 +868,7 @@ class MyForm(settingsmixin.SMainWindow):
"""
startonlogon = config.safeGetBoolean(
'bitmessagesettings', 'startonlogon')
if is_windows: # Auto-startup for Windows
if sys.platform.startswith('win'): # Auto-startup for Windows
RUN_PATH = "HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run"
settings = QtCore.QSettings(
RUN_PATH, QtCore.QSettings.NativeFormat)
@ -4236,14 +4233,6 @@ class BitmessageQtApplication(QtGui.QApplication):
# Unique identifier for this application
uuid = '6ec0149b-96e1-4be1-93ab-1465fb3ebf7c'
@staticmethod
def get_windowstyle():
"""Get window style set in config or default"""
return config.safeGet(
'bitmessagesettings', 'windowstyle',
'Windows' if is_windows else 'GTK+'
)
def __init__(self, *argv):
super(BitmessageQtApplication, self).__init__(*argv)
id = BitmessageQtApplication.uuid
@ -4252,14 +4241,6 @@ class BitmessageQtApplication(QtGui.QApplication):
QtCore.QCoreApplication.setOrganizationDomain("bitmessage.org")
QtCore.QCoreApplication.setApplicationName("pybitmessageqt")
self.setStyle(self.get_windowstyle())
font = config.safeGet('bitmessagesettings', 'font')
if font:
# family, size, weight = font.split(',')
family, size = font.split(',')
self.setFont(QtGui.QFont(family, int(size)))
self.server = None
self.is_running = False

View File

@ -9,10 +9,10 @@ from PyQt4 import QtCore, QtGui
import queues
import widgets
import state
from account import AccountMixin, GatewayAccount, MailchuckAccount, accountClass
from addresses import addBMIfNotPresent, decodeAddress, encodeVarint
from bmconfigparser import config as global_config
from inventory import Inventory
from tr import _translate
@ -190,13 +190,13 @@ class NewSubscriptionDialog(AddressDataDialog):
" broadcasts."
))
else:
state.Inventory.flush()
Inventory().flush()
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersion)
+ encodeVarint(streamNumber) + ripe
).digest()).digest()
tag = doubleHashOfAddressData[32:]
self.recent = state.Inventory.by_type_and_tag(3, tag)
self.recent = Inventory().by_type_and_tag(3, tag)
count = len(self.recent)
if count == 0:
self.checkBoxDisplayMessagesAlreadyInInventory.setText(

View File

@ -61,8 +61,7 @@ class Ui_MainWindow(object):
self.tabWidget.setMinimumSize(QtCore.QSize(0, 0))
self.tabWidget.setBaseSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
base_size = QtGui.QApplication.instance().font().pointSize()
font.setPointSize(int(base_size * 0.75))
font.setPointSize(9)
self.tabWidget.setFont(font)
self.tabWidget.setTabPosition(QtGui.QTabWidget.North)
self.tabWidget.setTabShape(QtGui.QTabWidget.Rounded)

View File

@ -10,7 +10,8 @@ import l10n
import network.stats
import state
import widgets
from network import connectionpool, knownnodes
from inventory import Inventory
from network import BMConnectionPool, knownnodes
from retranslateui import RetranslateMixin
from tr import _translate
from uisignaler import UISignaler
@ -49,7 +50,7 @@ class NetworkStatus(QtGui.QWidget, RetranslateMixin):
def startUpdate(self):
"""Start a timer to update counters every 2 seconds"""
state.Inventory.numberOfInventoryLookupsPerformed = 0
Inventory().numberOfInventoryLookupsPerformed = 0
self.runEveryTwoSeconds()
self.timer.start(2000) # milliseconds
@ -148,16 +149,16 @@ class NetworkStatus(QtGui.QWidget, RetranslateMixin):
# pylint: disable=too-many-branches,undefined-variable
if outbound:
try:
c = connectionpool.pool.outboundConnections[destination]
c = BMConnectionPool().outboundConnections[destination]
except KeyError:
if add:
return
else:
try:
c = connectionpool.pool.inboundConnections[destination]
c = BMConnectionPool().inboundConnections[destination]
except KeyError:
try:
c = connectionpool.pool.inboundConnections[destination.host]
c = BMConnectionPool().inboundConnections[destination.host]
except KeyError:
if add:
return
@ -201,7 +202,7 @@ class NetworkStatus(QtGui.QWidget, RetranslateMixin):
self.tableWidgetConnectionCount.item(0, 0).setData(QtCore.Qt.UserRole, destination)
self.tableWidgetConnectionCount.item(0, 1).setData(QtCore.Qt.UserRole, outbound)
else:
if not connectionpool.pool.inboundConnections:
if not BMConnectionPool().inboundConnections:
self.window().setStatusIcon('yellow')
for i in range(self.tableWidgetConnectionCount.rowCount()):
if self.tableWidgetConnectionCount.item(i, 0).data(QtCore.Qt.UserRole).toPyObject() != destination:
@ -228,8 +229,8 @@ class NetworkStatus(QtGui.QWidget, RetranslateMixin):
def runEveryTwoSeconds(self):
"""Updates counters, runs every 2 seconds if the timer is running"""
self.labelLookupsPerSecond.setText(_translate("networkstatus", "Inventory lookups per second: %1").arg(
str(state.Inventory.numberOfInventoryLookupsPerformed / 2)))
state.Inventory.numberOfInventoryLookupsPerformed = 0
str(Inventory().numberOfInventoryLookupsPerformed / 2)))
Inventory().numberOfInventoryLookupsPerformed = 0
self.updateNumberOfBytes()
self.updateNumberOfObjectsToBeSynced()

View File

@ -20,8 +20,7 @@ import widgets
from bmconfigparser import config as config_obj
from helper_sql import sqlExecute, sqlStoredProcedure
from helper_startup import start_proxyconfig
from network import connectionpool, knownnodes
from network.announcethread import AnnounceThread
from network import knownnodes, AnnounceThread
from network.asyncore_pollchoose import set_rates
from tr import _translate
@ -41,17 +40,14 @@ def getSOCKSProxyType(config):
class SettingsDialog(QtGui.QDialog):
"""The "Settings" dialog"""
# pylint: disable=too-many-instance-attributes
def __init__(self, parent=None, firstrun=False):
super(SettingsDialog, self).__init__(parent)
widgets.load('settings.ui', self)
self.app = QtGui.QApplication.instance()
self.parent = parent
self.firstrun = firstrun
self.config = config_obj
self.net_restart_needed = False
self.font_setting = None
self.timer = QtCore.QTimer()
if self.config.safeGetBoolean('bitmessagesettings', 'dontconnect'):
@ -88,15 +84,6 @@ class SettingsDialog(QtGui.QDialog):
def adjust_from_config(self, config):
"""Adjust all widgets state according to config settings"""
# pylint: disable=too-many-branches,too-many-statements
current_style = self.app.get_windowstyle()
for i, sk in enumerate(QtGui.QStyleFactory.keys()):
self.comboBoxStyle.addItem(sk)
if sk == current_style:
self.comboBoxStyle.setCurrentIndex(i)
self.save_font_setting(self.app.font())
if not self.parent.tray.isSystemTrayAvailable():
self.groupBoxTray.setEnabled(False)
self.groupBoxTray.setTitle(_translate(
@ -149,7 +136,7 @@ class SettingsDialog(QtGui.QDialog):
"MainWindow",
"Tray notifications not yet supported on your OS."))
if not sys.platform.startswith('win') and not self.parent.desktop:
if 'win' not in sys.platform and not self.parent.desktop:
self.checkBoxStartOnLogon.setDisabled(True)
self.checkBoxStartOnLogon.setText(_translate(
"MainWindow", "Start-on-login not yet supported on your OS."))
@ -177,7 +164,7 @@ class SettingsDialog(QtGui.QDialog):
if self._proxy_type:
for node, info in six.iteritems(
knownnodes.knownNodes.get(
min(connectionpool.pool.streams), [])
min(state.streamsInWhichIAmParticipating), [])
):
if (
node.host.endswith('.onion') and len(node.host) > 22
@ -334,18 +321,6 @@ class SettingsDialog(QtGui.QDialog):
if status == 'success':
self.parent.namecoin = nc
def save_font_setting(self, font):
"""Save user font setting and set the buttonFont text"""
font_setting = (font.family(), font.pointSize())
self.buttonFont.setText('{} {}'.format(*font_setting))
self.font_setting = '{},{}'.format(*font_setting)
def choose_font(self):
"""Show the font selection dialog"""
font, valid = QtGui.QFontDialog.getFont()
if valid:
self.save_font_setting(font)
def accept(self):
"""A callback for accepted event of buttonBox (OK button pressed)"""
# pylint: disable=too-many-branches,too-many-statements
@ -372,20 +347,6 @@ class SettingsDialog(QtGui.QDialog):
self.config.set('bitmessagesettings', 'replybelow', str(
self.checkBoxReplyBelow.isChecked()))
window_style = str(self.comboBoxStyle.currentText())
if self.app.get_windowstyle() != window_style or self.config.safeGet(
'bitmessagesettings', 'font'
) != self.font_setting:
self.config.set('bitmessagesettings', 'windowstyle', window_style)
self.config.set('bitmessagesettings', 'font', self.font_setting)
queues.UISignalQueue.put((
'updateStatusBar', (
_translate(
"MainWindow",
"You need to restart the application to apply"
" the window style or default font."), 1)
))
lang = str(self.languageComboBox.itemData(
self.languageComboBox.currentIndex()).toString())
self.config.set('bitmessagesettings', 'userlocale', lang)

View File

@ -147,32 +147,6 @@
</property>
</widget>
</item>
<item row="9" column="0">
<widget class="QGroupBox" name="groupBoxStyle">
<property name="title">
<string>Custom Style</string>
</property>
<layout class="QHBoxLayout">
<item>
<widget class="QComboBox" name="comboBoxStyle">
<property name="minimumSize">
<size>
<width>100</width>
<height>0</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="buttonFont">
<property name="text">
<string>Font</string>
</property>
</widget>
</item>
</layout>
</widget>
</item>
<item row="9" column="1">
<widget class="QGroupBox" name="groupBox">
<property name="title">
@ -1228,11 +1202,5 @@
</hint>
</hints>
</connection>
<connection>
<sender>buttonFont</sender>
<signal>clicked()</signal>
<receiver>settingsDialog</receiver>
<slot>choose_font</slot>
</connection>
</connections>
</ui>

View File

@ -1,9 +1,9 @@
"""bitmessageqt tests"""
from .addressbook import TestAddressbook
from .main import TestMain, TestUISignaler
from .settings import TestSettings
from .support import TestSupport
from addressbook import TestAddressbook
from main import TestMain, TestUISignaler
from settings import TestSettings
from support import TestSupport
__all__ = [
"TestAddressbook", "TestMain", "TestSettings", "TestSupport",

View File

@ -1,23 +1,19 @@
"""Common definitions for bitmessageqt tests"""
import Queue
import sys
import unittest
from PyQt4 import QtCore, QtGui
from six.moves import queue
import bitmessageqt
from bitmessageqt import _translate, config, queues
import queues
from tr import _translate
class TestBase(unittest.TestCase):
"""Base class for bitmessageqt test case"""
@classmethod
def setUpClass(cls):
"""Provide the UI test cases with common settings"""
cls.config = config
def setUp(self):
self.app = (
QtGui.QApplication.instance()
@ -28,21 +24,14 @@ class TestBase(unittest.TestCase):
self.window.appIndicatorInit(self.app)
def tearDown(self):
"""Search for exceptions in closures called by timer and fail if any"""
# self.app.deleteLater()
concerning = []
while True:
try:
thread, exc = queues.excQueue.get(block=False)
except queue.Empty:
break
except Queue.Empty:
return
if thread == 'tests':
concerning.append(exc)
if concerning:
self.fail(
'Exceptions found in the main thread:\n%s' % '\n'.join((
str(e) for e in concerning
)))
self.fail('Exception in the main thread: %s' % exc)
class TestMain(unittest.TestCase):

View File

@ -1,14 +1,10 @@
"""Tests for PyBitmessage settings"""
import threading
import time
from PyQt4 import QtCore, QtGui, QtTest
from main import TestBase
from bmconfigparser import config
from bitmessageqt import settings
from .main import TestBase
class TestSettings(TestBase):
"""A test case for the "Settings" dialog"""
@ -18,7 +14,8 @@ class TestSettings(TestBase):
def test_udp(self):
"""Test the effect of checkBoxUDP"""
udp_setting = config.safeGetBoolean('bitmessagesettings', 'udp')
udp_setting = config.safeGetBoolean(
'bitmessagesettings', 'udp')
self.assertEqual(udp_setting, self.dialog.checkBoxUDP.isChecked())
self.dialog.checkBoxUDP.setChecked(not udp_setting)
self.dialog.accept()
@ -35,44 +32,3 @@ class TestSettings(TestBase):
else:
if not udp_setting:
self.fail('No Announcer thread found while udp set to True')
def test_styling(self):
"""Test custom windows style and font"""
style_setting = config.safeGet('bitmessagesettings', 'windowstyle')
font_setting = config.safeGet('bitmessagesettings', 'font')
self.assertIs(style_setting, None)
self.assertIs(font_setting, None)
style_control = self.dialog.comboBoxStyle
self.assertEqual(
style_control.currentText(), self.app.get_windowstyle())
def call_font_dialog():
"""A function to get the open font dialog and accept it"""
font_dialog = QtGui.QApplication.activeModalWidget()
self.assertTrue(isinstance(font_dialog, QtGui.QFontDialog))
selected_font = font_dialog.currentFont()
self.assertEqual(
config.safeGet('bitmessagesettings', 'font'), '{},{}'.format(
selected_font.family(), selected_font.pointSize()))
font_dialog.accept()
self.dialog.accept()
self.assertEqual(
config.safeGet('bitmessagesettings', 'windowstyle'),
style_control.currentText())
def click_font_button():
"""Use QtTest to click the button"""
QtTest.QTest.mouseClick(
self.dialog.buttonFont, QtCore.Qt.LeftButton)
style_count = style_control.count()
self.assertGreater(style_count, 1)
for i in range(style_count):
if i != style_control.currentIndex():
style_control.setCurrentIndex(i)
break
QtCore.QTimer.singleShot(30, click_font_button)
QtCore.QTimer.singleShot(60, call_font_dialog)
time.sleep(2)

View File

@ -1,7 +1,7 @@
"""
A thread for creating addresses
"""
import hashlib
import time
from binascii import hexlify
@ -14,7 +14,10 @@ import shared
import state
from addresses import decodeAddress, encodeAddress, encodeVarint
from bmconfigparser import config
from fallback import RIPEMD160Hash
from network import StoppableThread
from pyelliptic import arithmetic
from pyelliptic.openssl import OpenSSL
from tr import _translate
@ -127,13 +130,18 @@ class addressGenerator(StoppableThread):
# the \x00 or \x00\x00 bytes thus making the address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
privSigningKey, pubSigningKey = highlevelcrypto.random_keys()
potentialPrivSigningKey = OpenSSL.rand(32)
potentialPubSigningKey = highlevelcrypto.pointMult(
potentialPrivSigningKey)
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivEncryptionKey, potentialPubEncryptionKey = \
highlevelcrypto.random_keys()
ripe = highlevelcrypto.to_ripe(
pubSigningKey, potentialPubEncryptionKey)
potentialPrivEncryptionKey = OpenSSL.rand(32)
potentialPubEncryptionKey = highlevelcrypto.pointMult(
potentialPrivEncryptionKey)
sha = hashlib.new('sha512')
sha.update(
potentialPubSigningKey + potentialPubEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
if (
ripe[:numberOfNullBytesDemandedOnFrontOfRipeHash]
== b'\x00' * numberOfNullBytesDemandedOnFrontOfRipeHash
@ -156,10 +164,20 @@ class addressGenerator(StoppableThread):
address = encodeAddress(
addressVersionNumber, streamNumber, ripe)
privSigningKeyWIF = highlevelcrypto.encodeWalletImportFormat(
privSigningKey)
privEncryptionKeyWIF = highlevelcrypto.encodeWalletImportFormat(
potentialPrivEncryptionKey)
# An excellent way for us to store our keys
# is in Wallet Import Format. Let us convert now.
# https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = b'\x80' + potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(
privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(
privSigningKey + checksum, 256, 58)
privEncryptionKey = b'\x80' + potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(
privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(
privEncryptionKey + checksum, 256, 58)
config.add_section(address)
config.set(address, 'label', label)
@ -231,19 +249,24 @@ class addressGenerator(StoppableThread):
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivSigningKey, potentialPubSigningKey = \
highlevelcrypto.deterministic_keys(
deterministicPassphrase,
encodeVarint(signingKeyNonce))
potentialPrivEncryptionKey, potentialPubEncryptionKey = \
highlevelcrypto.deterministic_keys(
deterministicPassphrase,
encodeVarint(encryptionKeyNonce))
potentialPrivSigningKey = hashlib.sha512(
deterministicPassphrase
+ encodeVarint(signingKeyNonce)
).digest()[:32]
potentialPrivEncryptionKey = hashlib.sha512(
deterministicPassphrase
+ encodeVarint(encryptionKeyNonce)
).digest()[:32]
potentialPubSigningKey = highlevelcrypto.pointMult(
potentialPrivSigningKey)
potentialPubEncryptionKey = highlevelcrypto.pointMult(
potentialPrivEncryptionKey)
signingKeyNonce += 2
encryptionKeyNonce += 2
ripe = highlevelcrypto.to_ripe(
potentialPubSigningKey, potentialPubEncryptionKey)
sha = hashlib.new('sha512')
sha.update(
potentialPubSigningKey + potentialPubEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
if (
ripe[:numberOfNullBytesDemandedOnFrontOfRipeHash]
== b'\x00' * numberOfNullBytesDemandedOnFrontOfRipeHash
@ -280,12 +303,21 @@ class addressGenerator(StoppableThread):
saveAddressToDisk = False
if saveAddressToDisk and live:
privSigningKeyWIF = \
highlevelcrypto.encodeWalletImportFormat(
potentialPrivSigningKey)
privEncryptionKeyWIF = \
highlevelcrypto.encodeWalletImportFormat(
potentialPrivEncryptionKey)
# An excellent way for us to store our keys is
# in Wallet Import Format. Let us convert now.
# https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = b'\x80' + potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(
privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(
privSigningKey + checksum, 256, 58)
privEncryptionKey = b'\x80' + \
potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(
privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(
privEncryptionKey + checksum, 256, 58)
try:
config.add_section(address)
@ -337,10 +369,10 @@ class addressGenerator(StoppableThread):
highlevelcrypto.makeCryptor(
hexlify(potentialPrivEncryptionKey))
shared.myAddressesByHash[ripe] = address
tag = highlevelcrypto.double_sha512(
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + ripe
)[32:]
).digest()).digest()[32:]
shared.myAddressesByTag[tag] = address
if addressVersionNumber == 3:
# If this is a chan address,

View File

@ -24,13 +24,14 @@ import queues
import shared
import state
from addresses import (
decodeAddress, decodeVarint,
calculateInventoryHash, decodeAddress, decodeVarint,
encodeAddress, encodeVarint, varintDecodeError
)
from bmconfigparser import config
from fallback import RIPEMD160Hash
from helper_sql import (
sql_ready, sql_timeout, SqlBulkExecute, sqlExecute, sqlQuery)
from network import knownnodes, invQueue
from network import bmproto, knownnodes
from network.node import Peer
from tr import _translate
@ -63,6 +64,7 @@ class objectProcessor(threading.Thread):
logger.debug(
'Loaded %s objects from disk into the objectProcessorQueue.',
len(queryreturn))
self._ack_obj = bmproto.BMStringParser()
self.successfullyDecryptMessageTimings = []
def run(self):
@ -299,20 +301,23 @@ class objectProcessor(threading.Thread):
'(within processpubkey) payloadLength less than 146.'
' Sanity check failed.')
readPosition += 4
pubSigningKey = '\x04' + data[readPosition:readPosition + 64]
publicSigningKey = data[readPosition:readPosition + 64]
# Is it possible for a public key to be invalid such that trying to
# encrypt or sign with it will cause an error? If it is, it would
# be easiest to test them here.
readPosition += 64
pubEncryptionKey = '\x04' + data[readPosition:readPosition + 64]
if len(pubEncryptionKey) < 65:
publicEncryptionKey = data[readPosition:readPosition + 64]
if len(publicEncryptionKey) < 64:
return logger.debug(
'publicEncryptionKey length less than 64. Sanity check'
' failed.')
readPosition += 64
# The data we'll store in the pubkeys table.
dataToStore = data[20:readPosition]
ripe = highlevelcrypto.to_ripe(pubSigningKey, pubEncryptionKey)
sha = hashlib.new('sha512')
sha.update(
'\x04' + publicSigningKey + '\x04' + publicEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
@ -320,7 +325,7 @@ class objectProcessor(threading.Thread):
'\nripe %s\npublicSigningKey in hex: %s'
'\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(pubSigningKey), hexlify(pubEncryptionKey)
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
address = encodeAddress(addressVersion, streamNumber, ripe)
@ -350,9 +355,9 @@ class objectProcessor(threading.Thread):
' Sanity check failed.')
return
readPosition += 4
pubSigningKey = '\x04' + data[readPosition:readPosition + 64]
publicSigningKey = '\x04' + data[readPosition:readPosition + 64]
readPosition += 64
pubEncryptionKey = '\x04' + data[readPosition:readPosition + 64]
publicEncryptionKey = '\x04' + data[readPosition:readPosition + 64]
readPosition += 64
specifiedNonceTrialsPerByteLength = decodeVarint(
data[readPosition:readPosition + 10])[1]
@ -369,13 +374,15 @@ class objectProcessor(threading.Thread):
signature = data[readPosition:readPosition + signatureLength]
if highlevelcrypto.verify(
data[8:endOfSignedDataPosition],
signature, hexlify(pubSigningKey)):
signature, hexlify(publicSigningKey)):
logger.debug('ECDSA verify passed (within processpubkey)')
else:
logger.warning('ECDSA verify failed (within processpubkey)')
return
ripe = highlevelcrypto.to_ripe(pubSigningKey, pubEncryptionKey)
sha = hashlib.new('sha512')
sha.update(publicSigningKey + publicEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
if logger.isEnabledFor(logging.DEBUG):
logger.debug(
@ -383,7 +390,7 @@ class objectProcessor(threading.Thread):
'\nripe %s\npublicSigningKey in hex: %s'
'\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(pubSigningKey), hexlify(pubEncryptionKey)
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
address = encodeAddress(addressVersion, streamNumber, ripe)
@ -449,7 +456,7 @@ class objectProcessor(threading.Thread):
streamNumberAsClaimedByMsg, streamNumberAsClaimedByMsgLength = \
decodeVarint(data[readPosition:readPosition + 9])
readPosition += streamNumberAsClaimedByMsgLength
inventoryHash = highlevelcrypto.calculateInventoryHash(data)
inventoryHash = calculateInventoryHash(data)
initialDecryptionSuccessful = False
# This is not an acknowledgement bound for me. See if it is a message
@ -579,10 +586,13 @@ class objectProcessor(threading.Thread):
helper_bitcoin.calculateTestnetAddressFromPubkey(pubSigningKey)
)
# Used to detect and ignore duplicate messages in our inbox
sigHash = highlevelcrypto.double_sha512(signature)[32:]
sigHash = hashlib.sha512(
hashlib.sha512(signature).digest()).digest()[32:]
# calculate the fromRipe.
ripe = highlevelcrypto.to_ripe(pubSigningKey, pubEncryptionKey)
sha = hashlib.new('sha512')
sha.update(pubSigningKey + pubEncryptionKey)
ripe = RIPEMD160Hash(sha.digest()).digest()
fromAddress = encodeAddress(
sendersAddressVersionNumber, sendersStreamNumber, ripe)
@ -676,7 +686,8 @@ class objectProcessor(threading.Thread):
apiNotifyPath = config.safeGet(
'bitmessagesettings', 'apinotifypath')
if apiNotifyPath:
subprocess.call([apiNotifyPath, "newMessage"]) # nosec B603
subprocess.call( # nosec B603
[apiNotifyPath, "newMessage"])
# Let us now check and see whether our receiving address is
# behaving as a mailing list
@ -723,13 +734,7 @@ class objectProcessor(threading.Thread):
and not config.safeGetBoolean(toAddress, 'dontsendack')
and not config.safeGetBoolean(toAddress, 'chan')
):
ackPayload = ackData[24:]
objectType, toStreamNumber, expiresTime = \
protocol.decodeObjectParameters(ackPayload)
inventoryHash = highlevelcrypto.calculateInventoryHash(ackPayload)
state.Inventory[inventoryHash] = (
objectType, toStreamNumber, ackPayload, expiresTime, b'')
invQueue.put((toStreamNumber, inventoryHash))
self._ack_obj.send_data(ackData[24:])
# Display timing data
timeRequiredToAttemptToDecryptMessage = time.time(
@ -753,7 +758,7 @@ class objectProcessor(threading.Thread):
state.numberOfBroadcastsProcessed += 1
queues.UISignalQueue.put((
'updateNumberOfBroadcastsProcessed', 'no data'))
inventoryHash = highlevelcrypto.calculateInventoryHash(data)
inventoryHash = calculateInventoryHash(data)
readPosition = 20 # bypass the nonce, time, and object type
broadcastVersion, broadcastVersionLength = decodeVarint(
data[readPosition:readPosition + 9])
@ -875,8 +880,9 @@ class objectProcessor(threading.Thread):
requiredPayloadLengthExtraBytes)
endOfPubkeyPosition = readPosition
calculatedRipe = highlevelcrypto.to_ripe(
sendersPubSigningKey, sendersPubEncryptionKey)
sha = hashlib.new('sha512')
sha.update(sendersPubSigningKey + sendersPubEncryptionKey)
calculatedRipe = RIPEMD160Hash(sha.digest()).digest()
if broadcastVersion == 4:
if toRipe != calculatedRipe:
@ -886,10 +892,10 @@ class objectProcessor(threading.Thread):
' itself. Ignoring message.'
)
elif broadcastVersion == 5:
calculatedTag = highlevelcrypto.double_sha512(
calculatedTag = hashlib.sha512(hashlib.sha512(
encodeVarint(sendersAddressVersion)
+ encodeVarint(sendersStream) + calculatedRipe
)[32:]
).digest()).digest()[32:]
if calculatedTag != embeddedTag:
return logger.debug(
'The tag and encryption key used to encrypt this'
@ -919,7 +925,8 @@ class objectProcessor(threading.Thread):
return
logger.debug('ECDSA verify passed')
# Used to detect and ignore duplicate messages in our inbox
sigHash = highlevelcrypto.double_sha512(signature)[32:]
sigHash = hashlib.sha512(
hashlib.sha512(signature).digest()).digest()[32:]
fromAddress = encodeAddress(
sendersAddressVersion, sendersStream, calculatedRipe)
@ -993,10 +1000,10 @@ class objectProcessor(threading.Thread):
# Let us create the tag from the address and see if we were waiting
# for it.
elif addressVersion >= 4:
tag = highlevelcrypto.double_sha512(
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersion) + encodeVarint(streamNumber)
+ ripe
)[32:]
).digest()).digest()[32:]
if tag in state.neededPubkeys:
del state.neededPubkeys[tag]
self.sendMessages(address)

View File

@ -27,7 +27,8 @@ import queues
import state
from bmconfigparser import config
from helper_sql import sqlExecute, sqlQuery
from network import connectionpool, knownnodes, StoppableThread
from inventory import Inventory
from network import BMConnectionPool, knownnodes, StoppableThread
from tr import _translate
@ -68,7 +69,7 @@ class singleCleaner(StoppableThread):
'updateStatusBar',
'Doing housekeeping (Flushing inventory in memory to disk...)'
))
state.Inventory.flush()
Inventory().flush()
queues.UISignalQueue.put(('updateStatusBar', ''))
# If we are running as a daemon then we are going to fill up the UI
@ -81,7 +82,7 @@ class singleCleaner(StoppableThread):
tick = int(time.time())
if timeWeLastClearedInventoryAndPubkeysTables < tick - 7380:
timeWeLastClearedInventoryAndPubkeysTables = tick
state.Inventory.clean()
Inventory().clean()
queues.workerQueue.put(('sendOnionPeerObj', ''))
# pubkeys
sqlExecute(
@ -108,7 +109,7 @@ class singleCleaner(StoppableThread):
# Cleanup knownnodes and handle possible severe exception
# while writing it to disk
if state.enableNetwork:
knownnodes.cleanupKnownNodes(connectionpool.pool)
knownnodes.cleanupKnownNodes()
except Exception as err:
if "Errno 28" in str(err):
self.logger.fatal(
@ -129,7 +130,7 @@ class singleCleaner(StoppableThread):
os._exit(1) # pylint: disable=protected-access
# inv/object tracking
for connection in connectionpool.pool.connections():
for connection in BMConnectionPool().connections():
connection.clean()
# discovery tracking

View File

@ -25,10 +25,13 @@ import queues
import shared
import state
import tr
from addresses import decodeAddress, decodeVarint, encodeVarint
from addresses import (
calculateInventoryHash, decodeAddress, decodeVarint, encodeVarint
)
from bmconfigparser import config
from helper_sql import sqlExecute, sqlQuery
from network import knownnodes, StoppableThread, invQueue
from inventory import Inventory
from network import knownnodes, StoppableThread
from six.moves import configparser, queue
@ -47,8 +50,6 @@ class singleWorker(StoppableThread):
def __init__(self):
super(singleWorker, self).__init__(name="singleWorker")
self.digestAlg = config.safeGet(
'bitmessagesettings', 'digestalg', 'sha256')
proofofwork.init()
def stopThread(self):
@ -72,16 +73,18 @@ class singleWorker(StoppableThread):
queryreturn = sqlQuery(
'''SELECT DISTINCT toaddress FROM sent'''
''' WHERE (status='awaitingpubkey' AND folder='sent')''')
for toAddress, in queryreturn:
toAddressVersionNumber, toStreamNumber, toRipe = \
decodeAddress(toAddress)[1:]
for row in queryreturn:
toAddress, = row
# toStatus
_, toAddressVersionNumber, toStreamNumber, toRipe = \
decodeAddress(toAddress)
if toAddressVersionNumber <= 3:
state.neededPubkeys[toAddress] = 0
elif toAddressVersionNumber >= 4:
doubleHashOfAddressData = highlevelcrypto.double_sha512(
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(toAddressVersionNumber)
+ encodeVarint(toStreamNumber) + toRipe
)
).digest()).digest()
# Note that this is the first half of the sha512 hash.
privEncryptionKey = doubleHashOfAddressData[:32]
tag = doubleHashOfAddressData[32:]
@ -116,7 +119,7 @@ class singleWorker(StoppableThread):
# For the case if user deleted knownnodes
# but is still having onionpeer objects in inventory
if not knownnodes.knownNodesActual:
for item in state.Inventory.by_type_and_tag(protocol.OBJECT_ONIONPEER):
for item in Inventory().by_type_and_tag(protocol.OBJECT_ONIONPEER):
queues.objectProcessorQueue.put((
protocol.OBJECT_ONIONPEER, item.payload
))
@ -192,19 +195,15 @@ class singleWorker(StoppableThread):
self.logger.info("Quitting...")
def _getKeysForAddress(self, address):
try:
privSigningKeyBase58 = config.get(address, 'privsigningkey')
privEncryptionKeyBase58 = config.get(address, 'privencryptionkey')
except (configparser.NoSectionError, configparser.NoOptionError):
self.logger.error(
'Could not read or decode privkey for address %s', address)
raise ValueError
privSigningKeyBase58 = config.get(
address, 'privsigningkey')
privEncryptionKeyBase58 = config.get(
address, 'privencryptionkey')
privSigningKeyHex = hexlify(highlevelcrypto.decodeWalletImportFormat(
privSigningKeyBase58.encode()))
privEncryptionKeyHex = hexlify(
highlevelcrypto.decodeWalletImportFormat(
privEncryptionKeyBase58.encode()))
privSigningKeyHex = hexlify(shared.decodeWalletImportFormat(
privSigningKeyBase58))
privEncryptionKeyHex = hexlify(shared.decodeWalletImportFormat(
privEncryptionKeyBase58))
# The \x04 on the beginning of the public keys are not sent.
# This way there is only one acceptable way to encode
@ -255,7 +254,9 @@ class singleWorker(StoppableThread):
message once it is done with the POW"""
# Look up my stream number based on my address hash
myAddress = shared.myAddressesByHash[adressHash]
addressVersionNumber, streamNumber = decodeAddress(myAddress)[1:3]
# status
_, addressVersionNumber, streamNumber, adressHash = (
decodeAddress(myAddress))
# 28 days from now plus or minus five minutes
TTL = int(28 * 24 * 60 * 60 + helper_random.randomrandrange(-300, 300))
@ -268,15 +269,17 @@ class singleWorker(StoppableThread):
payload += protocol.getBitfield(myAddress)
try:
pubSigningKey, pubEncryptionKey = self._getKeysForAddress(
myAddress)[2:]
except ValueError:
return
except Exception: # pylint:disable=broad-exception-caught
# privSigningKeyHex, privEncryptionKeyHex
_, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(myAddress)
except (configparser.NoSectionError, configparser.NoOptionError) as err:
self.logger.warning("Section or Option did not found: %s", err)
except Exception as err:
self.logger.error(
'Error within doPOWForMyV2Pubkey. Could not read'
' the keys from the keys.dat file for a requested'
' address. %s\n', exc_info=True)
' address. %s\n', err
)
return
payload += pubSigningKey + pubEncryptionKey
@ -285,15 +288,15 @@ class singleWorker(StoppableThread):
payload = self._doPOWDefaults(
payload, TTL, log_prefix='(For pubkey message)')
inventoryHash = highlevelcrypto.calculateInventoryHash(payload)
inventoryHash = calculateInventoryHash(payload)
objectType = 1
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, '')
self.logger.info(
'broadcasting inv with hash: %s', hexlify(inventoryHash))
invQueue.put((streamNumber, inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put(('updateStatusBar', ''))
try:
config.set(
@ -315,8 +318,8 @@ class singleWorker(StoppableThread):
try:
myAddress = shared.myAddressesByHash[adressHash]
except KeyError:
self.logger.warning( # The address has been deleted.
"Can't find %s in myAddressByHash", hexlify(adressHash))
# The address has been deleted.
self.logger.warning("Can't find %s in myAddressByHash", hexlify(adressHash))
return
if config.safeGetBoolean(myAddress, 'chan'):
self.logger.info('This is a chan address. Not sending pubkey.')
@ -348,13 +351,14 @@ class singleWorker(StoppableThread):
# , privEncryptionKeyHex
privSigningKeyHex, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(myAddress)
except ValueError:
return
except Exception: # pylint:disable=broad-exception-caught
except (configparser.NoSectionError, configparser.NoOptionError) as err:
self.logger.warning("Section or Option did not found: %s", err)
except Exception as err:
self.logger.error(
'Error within sendOutOrStoreMyV3Pubkey. Could not read'
' the keys from the keys.dat file for a requested'
' address. %s\n', exc_info=True)
' address. %s\n', err
)
return
payload += pubSigningKey + pubEncryptionKey
@ -364,8 +368,7 @@ class singleWorker(StoppableThread):
payload += encodeVarint(config.getint(
myAddress, 'payloadlengthextrabytes'))
signature = highlevelcrypto.sign(
payload, privSigningKeyHex, self.digestAlg)
signature = highlevelcrypto.sign(payload, privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
@ -373,15 +376,15 @@ class singleWorker(StoppableThread):
payload = self._doPOWDefaults(
payload, TTL, log_prefix='(For pubkey message)')
inventoryHash = highlevelcrypto.calculateInventoryHash(payload)
inventoryHash = calculateInventoryHash(payload)
objectType = 1
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, '')
self.logger.info(
'broadcasting inv with hash: %s', hexlify(inventoryHash))
invQueue.put((streamNumber, inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put(('updateStatusBar', ''))
try:
config.set(
@ -422,13 +425,14 @@ class singleWorker(StoppableThread):
# , privEncryptionKeyHex
privSigningKeyHex, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(myAddress)
except ValueError:
return
except Exception: # pylint:disable=broad-exception-caught
except (configparser.NoSectionError, configparser.NoOptionError) as err:
self.logger.warning("Section or Option did not found: %s", err)
except Exception as err:
self.logger.error(
'Error within sendOutOrStoreMyV4Pubkey. Could not read'
' the keys from the keys.dat file for a requested'
' address. %s\n', exc_info=True)
' address. %s\n', err
)
return
dataToEncrypt += pubSigningKey + pubEncryptionKey
@ -445,13 +449,14 @@ class singleWorker(StoppableThread):
# unencrypted, the pubkey with part of the hash so that nodes
# know which pubkey object to try to decrypt
# when they want to send a message.
doubleHashOfAddressData = highlevelcrypto.double_sha512(
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + addressHash
)
).digest()).digest()
payload += doubleHashOfAddressData[32:] # the tag
signature = highlevelcrypto.sign(
payload + dataToEncrypt, privSigningKeyHex, self.digestAlg)
payload + dataToEncrypt, privSigningKeyHex
)
dataToEncrypt += encodeVarint(len(signature))
dataToEncrypt += signature
@ -464,9 +469,9 @@ class singleWorker(StoppableThread):
payload = self._doPOWDefaults(
payload, TTL, log_prefix='(For pubkey message)')
inventoryHash = highlevelcrypto.calculateInventoryHash(payload)
inventoryHash = calculateInventoryHash(payload)
objectType = 1
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime,
doubleHashOfAddressData[32:]
)
@ -474,7 +479,7 @@ class singleWorker(StoppableThread):
self.logger.info(
'broadcasting inv with hash: %s', hexlify(inventoryHash))
invQueue.put((streamNumber, inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put(('updateStatusBar', ''))
try:
config.set(
@ -500,9 +505,9 @@ class singleWorker(StoppableThread):
objectType = protocol.OBJECT_ONIONPEER
# FIXME: ideally the objectPayload should be signed
objectPayload = encodeVarint(peer.port) + protocol.encodeHost(peer.host)
tag = highlevelcrypto.calculateInventoryHash(objectPayload)
tag = calculateInventoryHash(objectPayload)
if state.Inventory.by_type_and_tag(objectType, tag):
if Inventory().by_type_and_tag(objectType, tag):
return # not expired
payload = pack('>Q', embeddedTime)
@ -514,15 +519,15 @@ class singleWorker(StoppableThread):
payload = self._doPOWDefaults(
payload, TTL, log_prefix='(For onionpeer object)')
inventoryHash = highlevelcrypto.calculateInventoryHash(payload)
state.Inventory[inventoryHash] = (
objectType, streamNumber, buffer(payload), # noqa: F821
embeddedTime, buffer(tag) # noqa: F821
inventoryHash = calculateInventoryHash(payload)
Inventory()[inventoryHash] = (
objectType, streamNumber, buffer(payload),
embeddedTime, buffer(tag)
)
self.logger.info(
'sending inv (within sendOnionPeerObj function) for object: %s',
hexlify(inventoryHash))
invQueue.put((streamNumber, inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
def sendBroadcast(self):
"""Send a broadcast-type object (assemble the object, perform PoW and put it to the inv announcement queue)"""
@ -553,7 +558,8 @@ class singleWorker(StoppableThread):
# , privEncryptionKeyHex
privSigningKeyHex, _, pubSigningKey, pubEncryptionKey = \
self._getKeysForAddress(fromaddress)
except ValueError:
except (configparser.NoSectionError, configparser.NoOptionError) as err:
self.logger.warning("Section or Option did not found: %s", err)
queues.UISignalQueue.put((
'updateSentItemStatusByAckdata', (
ackdata,
@ -562,7 +568,6 @@ class singleWorker(StoppableThread):
"Error! Could not find sender address"
" (your address) in the keys.dat file."))
))
continue
except Exception as err:
self.logger.error(
'Error within sendBroadcast. Could not read'
@ -608,10 +613,10 @@ class singleWorker(StoppableThread):
payload += encodeVarint(streamNumber)
if addressVersionNumber >= 4:
doubleHashOfAddressData = highlevelcrypto.double_sha512(
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + ripe
)
).digest()).digest()
tag = doubleHashOfAddressData[32:]
payload += tag
else:
@ -636,7 +641,7 @@ class singleWorker(StoppableThread):
dataToSign = payload + dataToEncrypt
signature = highlevelcrypto.sign(
dataToSign, privSigningKeyHex, self.digestAlg)
dataToSign, privSigningKeyHex)
dataToEncrypt += encodeVarint(len(signature))
dataToEncrypt += signature
@ -681,16 +686,16 @@ class singleWorker(StoppableThread):
)
continue
inventoryHash = highlevelcrypto.calculateInventoryHash(payload)
inventoryHash = calculateInventoryHash(payload)
objectType = 3
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, tag)
self.logger.info(
'sending inv (within sendBroadcast function)'
' for object: %s',
hexlify(inventoryHash)
)
invQueue.put((streamNumber, inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
queues.UISignalQueue.put((
'updateSentItemStatusByAckdata', (
@ -790,10 +795,10 @@ class singleWorker(StoppableThread):
if toAddressVersionNumber <= 3:
toTag = ''
else:
toTag = highlevelcrypto.double_sha512(
toTag = hashlib.sha512(hashlib.sha512(
encodeVarint(toAddressVersionNumber)
+ encodeVarint(toStreamNumber) + toRipe
)[32:]
).digest()).digest()[32:]
if toaddress in state.neededPubkeys or \
toTag in state.neededPubkeys:
# We already sent a request for the pubkey
@ -827,11 +832,11 @@ class singleWorker(StoppableThread):
# already contains the toAddress and cryptor
# object associated with the tag for this toAddress.
if toAddressVersionNumber >= 4:
doubleHashOfToAddressData = \
highlevelcrypto.double_sha512(
encodeVarint(toAddressVersionNumber)
+ encodeVarint(toStreamNumber) + toRipe
)
doubleHashOfToAddressData = hashlib.sha512(
hashlib.sha512(
encodeVarint(toAddressVersionNumber) + encodeVarint(toStreamNumber) + toRipe
).digest()
).digest()
# The first half of the sha512 hash.
privEncryptionKey = doubleHashOfToAddressData[:32]
# The second half of the sha512 hash.
@ -842,7 +847,7 @@ class singleWorker(StoppableThread):
hexlify(privEncryptionKey))
)
for value in state.Inventory.by_type_and_tag(1, toTag):
for value in Inventory().by_type_and_tag(1, toTag):
# if valid, this function also puts it
# in the pubkeys table.
if protocol.decryptAndCheckPubkeyPayload(
@ -1111,9 +1116,8 @@ class singleWorker(StoppableThread):
' from the keys.dat file for our own address. %s\n',
err)
continue
privEncryptionKeyHex = hexlify(
highlevelcrypto.decodeWalletImportFormat(
privEncryptionKeyBase58.encode()))
privEncryptionKeyHex = hexlify(shared.decodeWalletImportFormat(
privEncryptionKeyBase58))
pubEncryptionKeyBase256 = unhexlify(highlevelcrypto.privToPub(
privEncryptionKeyHex))[1:]
requiredAverageProofOfWorkNonceTrialsPerByte = \
@ -1142,7 +1146,8 @@ class singleWorker(StoppableThread):
privSigningKeyHex, privEncryptionKeyHex, \
pubSigningKey, pubEncryptionKey = self._getKeysForAddress(
fromaddress)
except ValueError:
except (configparser.NoSectionError, configparser.NoOptionError) as err:
self.logger.warning("Section or Option did not found: %s", err)
queues.UISignalQueue.put((
'updateSentItemStatusByAckdata', (
ackdata,
@ -1151,7 +1156,6 @@ class singleWorker(StoppableThread):
"Error! Could not find sender address"
" (your address) in the keys.dat file."))
))
continue
except Exception as err:
self.logger.error(
'Error within sendMsg. Could not read'
@ -1219,8 +1223,7 @@ class singleWorker(StoppableThread):
payload += fullAckPayload
dataToSign = pack('>Q', embeddedTime) + '\x00\x00\x00\x02' + \
encodeVarint(1) + encodeVarint(toStreamNumber) + payload
signature = highlevelcrypto.sign(
dataToSign, privSigningKeyHex, self.digestAlg)
signature = highlevelcrypto.sign(dataToSign, privSigningKeyHex)
payload += encodeVarint(len(signature))
payload += signature
@ -1298,9 +1301,9 @@ class singleWorker(StoppableThread):
)
continue
inventoryHash = highlevelcrypto.calculateInventoryHash(encryptedPayload)
inventoryHash = calculateInventoryHash(encryptedPayload)
objectType = 2
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, toStreamNumber, encryptedPayload, embeddedTime, '')
if config.has_section(toaddress) or \
not protocol.checkBitfield(behaviorBitfield, protocol.BITFIELD_DOESACK):
@ -1326,7 +1329,7 @@ class singleWorker(StoppableThread):
'Broadcasting inv for my msg(within sendmsg function): %s',
hexlify(inventoryHash)
)
invQueue.put((toStreamNumber, inventoryHash))
queues.invQueue.put((toStreamNumber, inventoryHash))
# Update the sent message in the sent table with the
# necessary information.
@ -1348,7 +1351,8 @@ class singleWorker(StoppableThread):
# the message in our own inbox.
if config.has_section(toaddress):
# Used to detect and ignore duplicate messages in our inbox
sigHash = highlevelcrypto.double_sha512(signature)[32:]
sigHash = hashlib.sha512(hashlib.sha512(
signature).digest()).digest()[32:]
t = (inventoryHash, toaddress, fromaddress, subject, int(
time.time()), message, 'inbox', encoding, 0, sigHash)
helper_inbox.insert(t)
@ -1368,7 +1372,7 @@ class singleWorker(StoppableThread):
if apiNotifyPath:
# There is no additional risk of remote exploitation or
# privilege escalation
call([apiNotifyPath, "newMessage"]) # nosec B603
call([apiNotifyPath, "newMessage"]) # nosec:B603
def requestPubKey(self, toAddress):
"""Send a getpubkey object"""
@ -1405,13 +1409,16 @@ class singleWorker(StoppableThread):
# neededPubkeys dictionary. But if we are recovering
# from a restart of the client then we have to put it in now.
doubleHashOfAddressData = highlevelcrypto.double_sha512(
# Note that this is the first half of the sha512 hash.
privEncryptionKey = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + ripe
)
privEncryptionKey = doubleHashOfAddressData[:32]
).digest()).digest()[:32]
# Note that this is the second half of the sha512 hash.
tag = doubleHashOfAddressData[32:]
tag = hashlib.sha512(hashlib.sha512(
encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + ripe
).digest()).digest()[32:]
if tag not in state.neededPubkeys:
# We'll need this for when we receive a pubkey reply:
# it will be encrypted and we'll need to decrypt it.
@ -1454,12 +1461,12 @@ class singleWorker(StoppableThread):
payload = self._doPOWDefaults(payload, TTL)
inventoryHash = highlevelcrypto.calculateInventoryHash(payload)
inventoryHash = calculateInventoryHash(payload)
objectType = 1
state.Inventory[inventoryHash] = (
Inventory()[inventoryHash] = (
objectType, streamNumber, payload, embeddedTime, '')
self.logger.info('sending inv (for the getpubkey message)')
invQueue.put((streamNumber, inventoryHash))
queues.invQueue.put((streamNumber, inventoryHash))
# wait 10% past expiration
sleeptill = int(time.time() + TTL * 1.1)

View File

@ -287,7 +287,7 @@ def check_openssl():
path = ctypes.util.find_library('ssl')
if path not in paths:
paths.append(path)
except: # nosec B110 # pylint:disable=bare-except
except: # nosec:B110 pylint:disable=bare-except
pass
openssl_version = None
@ -361,7 +361,7 @@ def check_curses():
return False
try:
subprocess.check_call(['which', 'dialog']) # nosec B603, B607
subprocess.check_call(['which', 'dialog'])
except subprocess.CalledProcessError:
logger.error(
'Curses requires the `dialog` command to be installed as well as'

View File

@ -22,26 +22,26 @@ def genAckPayload(streamNumber=1, stealthLevel=0):
- level 1: a getpubkey request for a (random) dummy key hash
- level 2: a standard message, encrypted to a random pubkey
"""
if stealthLevel == 2: # Generate privacy-enhanced payload
if stealthLevel == 2: # Generate privacy-enhanced payload
# Generate a dummy privkey and derive the pubkey
dummyPubKeyHex = highlevelcrypto.privToPub(
hexlify(highlevelcrypto.randomBytes(32)))
hexlify(helper_random.randomBytes(32)))
# Generate a dummy message of random length
# (the smallest possible standard-formatted message is 234 bytes)
dummyMessage = highlevelcrypto.randomBytes(
dummyMessage = helper_random.randomBytes(
helper_random.randomrandrange(234, 801))
# Encrypt the message using standard BM encryption (ECIES)
ackdata = highlevelcrypto.encrypt(dummyMessage, dummyPubKeyHex)
acktype = 2 # message
version = 1
elif stealthLevel == 1: # Basic privacy payload (random getpubkey)
ackdata = highlevelcrypto.randomBytes(32)
elif stealthLevel == 1: # Basic privacy payload (random getpubkey)
ackdata = helper_random.randomBytes(32)
acktype = 0 # getpubkey
version = 4
else: # Minimum viable payload (non stealth)
ackdata = highlevelcrypto.randomBytes(32)
ackdata = helper_random.randomBytes(32)
acktype = 2 # message
version = 1

View File

@ -1,7 +1,12 @@
"""Convenience functions for random operations. Not suitable for security / cryptography operations."""
import os
import random
try:
from pyelliptic.openssl import OpenSSL
except ImportError:
from .pyelliptic.openssl import OpenSSL
NoneType = type(None)
@ -11,6 +16,14 @@ def seed():
random.seed()
def randomBytes(n):
"""Method randomBytes."""
try:
return os.urandom(n)
except NotImplementedError:
return OpenSSL.rand(n)
def randomshuffle(population):
"""Method randomShuffle.

View File

@ -12,7 +12,6 @@ import sys
import time
from distutils.version import StrictVersion
from struct import pack
from six.moves import configparser
try:
import defaults
@ -219,8 +218,7 @@ def updateConfig():
config.set(
addressInKeysFile, 'payloadlengthextrabytes',
str(int(previousSmallMessageDifficulty * 1000)))
except (ValueError, TypeError, configparser.NoSectionError,
configparser.NoOptionError):
except Exception:
continue
config.set('bitmessagesettings', 'maxdownloadrate', '0')
config.set('bitmessagesettings', 'maxuploadrate', '0')

View File

@ -7,104 +7,25 @@ High level cryptographic functions based on `.pyelliptic` OpenSSL bindings.
`More discussion. <https://github.com/yann2192/pyelliptic/issues/32>`_
"""
import hashlib
import os
from binascii import hexlify
try:
import pyelliptic
from fallback import RIPEMD160Hash
from pyelliptic import OpenSSL
from pyelliptic import arithmetic as a
except ImportError:
from pybitmessage import pyelliptic
from pybitmessage.fallback import RIPEMD160Hash
from pybitmessage.pyelliptic import OpenSSL
from pybitmessage.pyelliptic import arithmetic as a
import pyelliptic
from pyelliptic import OpenSSL
from pyelliptic import arithmetic as a
from bmconfigparser import config
__all__ = ['encrypt', 'makeCryptor', 'pointMult', 'privToPub', 'sign', 'verify']
__all__ = [
'decodeWalletImportFormat', 'deterministic_keys',
'double_sha512', 'calculateInventoryHash', 'encodeWalletImportFormat',
'encrypt', 'makeCryptor', 'pointMult', 'privToPub', 'randomBytes',
'random_keys', 'sign', 'to_ripe', 'verify']
# WIF (uses arithmetic ):
def decodeWalletImportFormat(WIFstring):
"""
Convert private key from base58 that's used in the config file to
8-bit binary string.
"""
fullString = a.changebase(WIFstring, 58, 256)
privkey = fullString[:-4]
if fullString[-4:] != \
hashlib.sha256(hashlib.sha256(privkey).digest()).digest()[:4]:
raise ValueError('Checksum failed')
elif privkey[0:1] == b'\x80': # checksum passed
return privkey[1:]
raise ValueError('No hex 80 prefix')
# An excellent way for us to store our keys
# is in Wallet Import Format. Let us convert now.
# https://en.bitcoin.it/wiki/Wallet_import_format
def encodeWalletImportFormat(privKey):
"""
Convert private key from binary 8-bit string into base58check WIF string.
"""
privKey = b'\x80' + privKey
checksum = hashlib.sha256(hashlib.sha256(privKey).digest()).digest()[0:4]
return a.changebase(privKey + checksum, 256, 58)
# Random
def randomBytes(n):
"""Get n random bytes"""
try:
return os.urandom(n)
except NotImplementedError:
return OpenSSL.rand(n)
# Hashes
def _bm160(data):
"""RIPEME160(SHA512(data)) -> bytes"""
return RIPEMD160Hash(hashlib.sha512(data).digest()).digest()
def to_ripe(signing_key, encryption_key):
"""Convert two public keys to a ripe hash"""
return _bm160(signing_key + encryption_key)
def double_sha512(data):
"""Binary double SHA512 digest"""
return hashlib.sha512(hashlib.sha512(data).digest()).digest()
def calculateInventoryHash(data):
"""Calculate inventory hash from object data"""
return double_sha512(data)[:32]
# Keys
def random_keys():
"""Return a pair of keys, private and public"""
priv = randomBytes(32)
pub = pointMult(priv)
return priv, pub
def deterministic_keys(passphrase, nonce):
"""Generate keys from *passphrase* and *nonce* (encoded as varint)"""
priv = hashlib.sha512(passphrase + nonce).digest()[:32]
pub = pointMult(priv)
return priv, pub
def makeCryptor(privkey, curve='secp256k1'):
"""Return a private `.pyelliptic.ECC` instance"""
private_key = a.changebase(privkey, 16, 256, minlen=32)
public_key = pointMult(private_key)
cryptor = pyelliptic.ECC(
pubkey_x=public_key[1:-32], pubkey_y=public_key[-32:],
raw_privkey=private_key, curve=curve)
return cryptor
def hexToPubkey(pubkey):
@ -114,6 +35,12 @@ def hexToPubkey(pubkey):
return pubkey_bin
def makePubCryptor(pubkey):
"""Return a public `.pyelliptic.ECC` instance"""
pubkey_bin = hexToPubkey(pubkey)
return pyelliptic.ECC(curve='secp256k1', pubkey=pubkey_bin)
def privToPub(privkey):
"""Converts hex private key into hex public key"""
private_key = a.changebase(privkey, 16, 256, minlen=32)
@ -121,6 +48,63 @@ def privToPub(privkey):
return hexlify(public_key)
def encrypt(msg, hexPubkey):
"""Encrypts message with hex public key"""
return pyelliptic.ECC(curve='secp256k1').encrypt(
msg, hexToPubkey(hexPubkey))
def decrypt(msg, hexPrivkey):
"""Decrypts message with hex private key"""
return makeCryptor(hexPrivkey).decrypt(msg)
def decryptFast(msg, cryptor):
"""Decrypts message with an existing `.pyelliptic.ECC` object"""
return cryptor.decrypt(msg)
def sign(msg, hexPrivkey):
"""
Signs with hex private key using SHA1 or SHA256 depending on
"digestalg" setting
"""
digestAlg = config.safeGet(
'bitmessagesettings', 'digestalg', 'sha256')
if digestAlg == "sha1":
# SHA1, this will eventually be deprecated
return makeCryptor(hexPrivkey).sign(
msg, digest_alg=OpenSSL.digest_ecdsa_sha1)
elif digestAlg == "sha256":
# SHA256. Eventually this will become the default
return makeCryptor(hexPrivkey).sign(msg, digest_alg=OpenSSL.EVP_sha256)
else:
raise ValueError("Unknown digest algorithm %s" % digestAlg)
def verify(msg, sig, hexPubkey):
"""Verifies with hex public key using SHA1 or SHA256"""
# As mentioned above, we must upgrade gracefully to use SHA256. So
# let us check the signature using both SHA1 and SHA256 and if one
# of them passes then we will be satisfied. Eventually this can
# be simplified and we'll only check with SHA256.
try:
# old SHA1 algorithm.
sigVerifyPassed = makePubCryptor(hexPubkey).verify(
sig, msg, digest_alg=OpenSSL.digest_ecdsa_sha1)
except:
sigVerifyPassed = False
if sigVerifyPassed:
# The signature check passed using SHA1
return True
# The signature check using SHA1 failed. Let us try it with SHA256.
try:
return makePubCryptor(hexPubkey).verify(
sig, msg, digest_alg=OpenSSL.EVP_sha256)
except:
return False
def pointMult(secret):
"""
Does an EC point multiplication; turns a private key into a public key.
@ -158,81 +142,3 @@ def pointMult(secret):
OpenSSL.EC_POINT_free(pub_key)
OpenSSL.BN_free(priv_key)
OpenSSL.EC_KEY_free(k)
# Encryption
def makeCryptor(privkey, curve='secp256k1'):
"""Return a private `.pyelliptic.ECC` instance"""
private_key = a.changebase(privkey, 16, 256, minlen=32)
public_key = pointMult(private_key)
cryptor = pyelliptic.ECC(
pubkey_x=public_key[1:-32], pubkey_y=public_key[-32:],
raw_privkey=private_key, curve=curve)
return cryptor
def makePubCryptor(pubkey):
"""Return a public `.pyelliptic.ECC` instance"""
pubkey_bin = hexToPubkey(pubkey)
return pyelliptic.ECC(curve='secp256k1', pubkey=pubkey_bin)
def encrypt(msg, hexPubkey):
"""Encrypts message with hex public key"""
return pyelliptic.ECC(curve='secp256k1').encrypt(
msg, hexToPubkey(hexPubkey))
def decrypt(msg, hexPrivkey):
"""Decrypts message with hex private key"""
return makeCryptor(hexPrivkey).decrypt(msg)
def decryptFast(msg, cryptor):
"""Decrypts message with an existing `.pyelliptic.ECC` object"""
return cryptor.decrypt(msg)
# Signatures
def _choose_digest_alg(name):
"""
Choose openssl digest constant by name raises ValueError if not appropriate
"""
if name not in ("sha1", "sha256"):
raise ValueError("Unknown digest algorithm %s" % name)
return (
# SHA1, this will eventually be deprecated
OpenSSL.digest_ecdsa_sha1 if name == "sha1" else OpenSSL.EVP_sha256)
def sign(msg, hexPrivkey, digestAlg="sha256"):
"""
Signs with hex private key using SHA1 or SHA256 depending on
*digestAlg* keyword.
"""
return makeCryptor(hexPrivkey).sign(
msg, digest_alg=_choose_digest_alg(digestAlg))
def verify(msg, sig, hexPubkey, digestAlg=None):
"""Verifies with hex public key using SHA1 or SHA256"""
# As mentioned above, we must upgrade gracefully to use SHA256. So
# let us check the signature using both SHA1 and SHA256 and if one
# of them passes then we will be satisfied. Eventually this can
# be simplified and we'll only check with SHA256.
if digestAlg is None:
# old SHA1 algorithm.
sigVerifyPassed = verify(msg, sig, hexPubkey, "sha1")
if sigVerifyPassed:
# The signature check passed using SHA1
return True
# The signature check using SHA1 failed. Let us try it with SHA256.
return verify(msg, sig, hexPubkey, "sha256")
try:
return makePubCryptor(hexPubkey).verify(
sig, msg, digest_alg=_choose_digest_alg(digestAlg))
except:
return False

View File

@ -1,9 +1,10 @@
"""The Inventory"""
"""The Inventory singleton"""
# TODO make this dynamic, and watch out for frozen, like with messagetypes
import storage.filesystem
import storage.sqlite
from bmconfigparser import config
from singleton import Singleton
def create_inventory_instance(backend="sqlite"):
@ -16,9 +17,10 @@ def create_inventory_instance(backend="sqlite"):
"{}Inventory".format(backend.title()))()
class Inventory:
@Singleton
class Inventory():
"""
Inventory class which uses storage backends
Inventory singleton class which uses storage backends
to manage the inventory.
"""
def __init__(self):
@ -43,6 +45,3 @@ class Inventory:
# hint for pylint: this is dictionary like object
def __getitem__(self, key):
return self._realInventory[key]
def __setitem__(self, key, value):
self._realInventory[key] = value

View File

@ -1,13 +0,0 @@
"""This module is for thread start."""
import state
import sys
from bitmessagemain import main
from termcolor import colored
print(colored('kivy is not supported at the moment for this version..', 'red'))
sys.exit()
if __name__ == '__main__':
state.kivy = True
print("Kivy Loading......")
main()

View File

@ -1,31 +1,13 @@
# pylint: disable=unused-import, wrong-import-position, ungrouped-imports
# flake8: noqa:E401, E402
"""Mock kivy app with mock threads."""
import os
from kivy.config import Config
from mockbm import multiqueue
"""This module is for thread start."""
import state
from mockbm.class_addressGenerator import FakeAddressGenerator # noqa:E402
from bitmessagekivy.mpybit import NavigateApp # noqa:E402
from mockbm import network # noqa:E402
stats = network.stats
objectracker = network.objectracker
import sys
from bitmessagemain import main
from termcolor import colored
print(colored('kivy is not supported at the moment for this version..', 'red'))
sys.exit()
def main():
"""main method for starting threads"""
addressGeneratorThread = FakeAddressGenerator()
addressGeneratorThread.daemon = True
addressGeneratorThread.start()
state.kivyapp = NavigateApp()
state.kivyapp.run()
addressGeneratorThread.stopThread()
if __name__ == "__main__":
os.environ['INSTALL_TESTS'] = "True"
if __name__ == '__main__':
state.kivy = True
print("Kivy Loading......")
main()

View File

@ -5,7 +5,7 @@
import os
from kivy.config import Config
from pybitmessage.mockbm import multiqueue
from pybitmessage.mock import multiqueue
from pybitmessage import state
if os.environ.get("INSTALL_TESTS", False):
@ -16,9 +16,9 @@ if os.environ.get("INSTALL_TESTS", False):
Config.set("graphics", "left", 0)
from pybitmessage.mockbm.class_addressGenerator import FakeAddressGenerator # noqa:E402
from pybitmessage.mock.class_addressGenerator import FakeAddressGenerator # noqa:E402
from pybitmessage.bitmessagekivy.mpybit import NavigateApp # noqa:E402
from pybitmessage.mockbm import network # noqa:E402
from pybitmessage.mock import network # noqa:E402
stats = network.stats
objectracker = network.objectracker

View File

@ -2,11 +2,16 @@
A queue with multiple internal subqueues.
Elements are added into a random subqueue, and retrieval rotates
"""
import random
from collections import deque
from six.moves import queue
try:
import helper_random
except ImportError:
from . import helper_random
class MultiQueue(queue.Queue):
"""A base queue class"""
@ -33,7 +38,7 @@ class MultiQueue(queue.Queue):
# Put a new item in the queue
def _put(self, item):
# self.queue.append(item)
self.queues[random.randrange(self.queueCount)].append( # nosec B311
self.queues[helper_random.randomrandrange(self.queueCount)].append(
(item))
# Get an item from the queue

View File

@ -1,27 +1,23 @@
"""
Network subsystem package
"""
from six.moves import queue
from .dandelion import Dandelion
try:
from .announcethread import AnnounceThread
from .connectionpool import BMConnectionPool
except ImportError:
AnnounceThread = None
BMConnectionPool = None
from .threads import StoppableThread
from .multiqueue import MultiQueue
dandelion_ins = Dandelion()
# network queues
invQueue = MultiQueue()
addrQueue = MultiQueue()
portCheckerQueue = queue.Queue()
receiveDataQueue = queue.Queue()
__all__ = ["StoppableThread"]
__all__ = ["AnnounceThread", "BMConnectionPool", "StoppableThread"]
def start(config, state):
"""Start network threads"""
from .announcethread import AnnounceThread
import connectionpool # pylint: disable=relative-import
from .addrthread import AddrThread
from .dandelion import Dandelion
from .downloadthread import DownloadThread
from .invthread import InvThread
from .networkthread import BMNetworkThread
@ -29,13 +25,10 @@ def start(config, state):
from .receivequeuethread import ReceiveQueueThread
from .uploadthread import UploadThread
# check and set dandelion enabled value at network startup
dandelion_ins.init_dandelion_enabled(config)
# pass pool instance into dandelion class instance
dandelion_ins.init_pool(connectionpool.pool)
readKnownNodes()
connectionpool.pool.connectToStream(1)
# init, needs to be early because other thread may access it early
Dandelion()
BMConnectionPool().connectToStream(1)
for thread in (
BMNetworkThread(), InvThread(), AddrThread(),
DownloadThread(), UploadThread()

View File

@ -1,13 +1,14 @@
"""
Announce addresses as they are received from other hosts
"""
import random
from six.moves import queue
# magic imports!
import connectionpool
import state
from helper_random import randomshuffle
from protocol import assembleAddrMessage
from network import addrQueue # FIXME: init with queue
from queues import addrQueue # FIXME: init with queue
from network.connectionpool import BMConnectionPool
from threads import StoppableThread
@ -17,7 +18,7 @@ class AddrThread(StoppableThread):
name = "AddrBroadcaster"
def run(self):
while not self._stopped:
while not state.shutdown:
chunk = []
while True:
try:
@ -28,10 +29,10 @@ class AddrThread(StoppableThread):
if chunk:
# Choose peers randomly
connections = connectionpool.pool.establishedConnections()
random.shuffle(connections)
connections = BMConnectionPool().establishedConnections()
randomshuffle(connections)
for i in connections:
random.shuffle(chunk)
randomshuffle(chunk)
filtered = []
for stream, peer, seen, destination in chunk:
# peer's own address or address received from peer

View File

@ -4,9 +4,10 @@ Announce myself (node address)
import time
# magic imports!
import connectionpool
import state
from bmconfigparser import config
from protocol import assembleAddrMessage
from network.connectionpool import BMConnectionPool
from node import Peer
from threads import StoppableThread
@ -19,7 +20,7 @@ class AnnounceThread(StoppableThread):
def run(self):
lastSelfAnnounced = 0
while not self._stopped:
while not self._stopped and state.shutdown == 0:
processed = 0
if lastSelfAnnounced < time.time() - self.announceInterval:
self.announceSelf()
@ -30,10 +31,10 @@ class AnnounceThread(StoppableThread):
@staticmethod
def announceSelf():
"""Announce our presence"""
for connection in connectionpool.pool.udpSockets.values():
for connection in BMConnectionPool().udpSockets.values():
if not connection.announcing:
continue
for stream in connectionpool.pool.streams:
for stream in state.streamsInWhichIAmParticipating:
addr = (
stream,
Peer(

View File

@ -9,7 +9,6 @@ Basic infrastructure for asynchronous socket service clients and servers.
import os
import select
import socket
import random
import sys
import time
import warnings
@ -20,6 +19,7 @@ from errno import (
)
from threading import current_thread
import helper_random
try:
from errno import WSAEWOULDBLOCK
@ -233,13 +233,13 @@ def select_poller(timeout=0.0, map=None):
if err.args[0] in (WSAENOTSOCK, ):
return
for fd in random.sample(r, len(r)):
for fd in helper_random.randomsample(r, len(r)):
obj = map.get(fd)
if obj is None:
continue
read(obj)
for fd in random.sample(w, len(w)):
for fd in helper_random.randomsample(w, len(w)):
obj = map.get(fd)
if obj is None:
continue
@ -297,7 +297,7 @@ def poll_poller(timeout=0.0, map=None):
except socket.error as err:
if err.args[0] in (EBADF, WSAENOTSOCK, EINTR):
return
for fd, flags in random.sample(r, len(r)):
for fd, flags in helper_random.randomsample(r, len(r)):
obj = map.get(fd)
if obj is None:
continue
@ -357,7 +357,7 @@ def epoll_poller(timeout=0.0, map=None):
if err.args[0] != EINTR:
raise
r = []
for fd, flags in random.sample(r, len(r)):
for fd, flags in helper_random.randomsample(r, len(r)):
obj = map.get(fd)
if obj is None:
continue
@ -420,7 +420,7 @@ def kqueue_poller(timeout=0.0, map=None):
events = kqueue_poller.pollster.control(updates, selectables, timeout)
if len(events) > 1:
events = random.sample(events, len(events))
events = helper_random.randomsample(events, len(events))
for event in events:
fd = event.ident

View File

@ -6,9 +6,9 @@ import time
import protocol
import state
import connectionpool
from network import dandelion_ins
from highlevelcrypto import calculateInventoryHash
from addresses import calculateInventoryHash
from inventory import Inventory
from network.dandelion import Dandelion
logger = logging.getLogger('default')
@ -100,7 +100,7 @@ class BMObject(object): # pylint: disable=too-many-instance-attributes
logger.warning(
'The object has invalid stream: %s', self.streamNumber)
raise BMObjectInvalidError()
if self.streamNumber not in connectionpool.pool.streams:
if self.streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug(
'The streamNumber %i isn\'t one we are interested in.',
self.streamNumber)
@ -113,9 +113,9 @@ class BMObject(object): # pylint: disable=too-many-instance-attributes
or advertise it unnecessarily)
"""
# if it's a stem duplicate, pretend we don't have it
if dandelion_ins.hasHash(self.inventoryHash):
if Dandelion().hasHash(self.inventoryHash):
return
if self.inventoryHash in state.Inventory:
if self.inventoryHash in Inventory():
raise BMObjectAlreadyHaveError()
def checkObjectByType(self):

View File

@ -9,15 +9,17 @@ import re
import socket
import struct
import time
from binascii import hexlify
# magic imports!
import addresses
import connectionpool
import knownnodes
import protocol
import state
import connectionpool
from bmconfigparser import config
from queues import objectProcessorQueue
from inventory import Inventory
from queues import invQueue, objectProcessorQueue, portCheckerQueue
from randomtrackingdict import RandomTrackingDict
from network.advanceddispatcher import AdvancedDispatcher
from network.bmobject import (
@ -25,8 +27,9 @@ from network.bmobject import (
BMObjectInsufficientPOWError, BMObjectInvalidError,
BMObjectUnwantedStreamError
)
from network.dandelion import Dandelion
from network.proxy import ProxyError
from network import dandelion_ins, invQueue, portCheckerQueue
from node import Node, Peer
from objectracker import ObjectTracker, missingObjects
@ -337,27 +340,27 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
self.pendingUpload[str(i)] = now
return True
def _command_inv(self, extend_dandelion_stem=False):
def _command_inv(self, dandelion=False):
"""
Common inv announce implementation:
both inv and dinv depending on *extend_dandelion_stem* kwarg
both inv and dinv depending on *dandelion* kwarg
"""
items = self.decode_payload_content("l32s")
if len(items) > protocol.MAX_OBJECT_COUNT:
logger.error(
'Too many items in %sinv message!', 'd' if extend_dandelion_stem else '')
'Too many items in %sinv message!', 'd' if dandelion else '')
raise BMProtoExcessiveDataError()
# ignore dinv if dandelion turned off
if extend_dandelion_stem and not dandelion_ins.enabled:
if dandelion and not state.dandelion:
return True
for i in map(str, items):
if i in state.Inventory and not dandelion_ins.hasHash(i):
if i in Inventory() and not Dandelion().hasHash(i):
continue
if extend_dandelion_stem and not dandelion_ins.hasHash(i):
dandelion_ins.addHash(i, self)
if dandelion and not Dandelion().hasHash(i):
Dandelion().addHash(i, self)
self.handleReceivedInventory(i)
return True
@ -410,7 +413,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
try:
self.object.checkObjectByType()
objectProcessorQueue.put((
self.object.objectType, buffer(self.object.data))) # noqa: F821
self.object.objectType, buffer(self.object.data)))
except BMObjectInvalidError:
BMProto.stopDownloadingObject(self.object.inventoryHash, True)
else:
@ -419,15 +422,15 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
except KeyError:
pass
if self.object.inventoryHash in state.Inventory and dandelion_ins.hasHash(
if self.object.inventoryHash in Inventory() and Dandelion().hasHash(
self.object.inventoryHash):
dandelion_ins.removeHash(
Dandelion().removeHash(
self.object.inventoryHash, "cycle detection")
state.Inventory[self.object.inventoryHash] = (
Inventory()[self.object.inventoryHash] = (
self.object.objectType, self.object.streamNumber,
buffer(self.payload[objectOffset:]), self.object.expiresTime, # noqa: F821
buffer(self.object.tag) # noqa: F821
buffer(self.payload[objectOffset:]), self.object.expiresTime,
buffer(self.object.tag)
)
self.handleReceivedObject(
self.object.streamNumber, self.object.inventoryHash)
@ -445,7 +448,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
for seenTime, stream, _, ip, port in self._decode_addr():
ip = str(ip)
if (
stream not in connectionpool.pool.streams
stream not in state.streamsInWhichIAmParticipating
# FIXME: should check against complete list
or ip.startswith('bootstrap')
):
@ -540,7 +543,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
if not self.isOutbound:
self.append_write_buf(protocol.assembleVersionMessage(
self.destination.host, self.destination.port,
connectionpool.pool.streams, dandelion_ins.enabled, True,
connectionpool.BMConnectionPool().streams, True,
nodeid=self.nodeid))
logger.debug(
'%(host)s:%(port)i sending version',
@ -596,7 +599,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
'Closed connection to %s because there is no overlapping'
' interest in streams.', self.destination)
return False
if connectionpool.pool.inboundConnections.get(
if connectionpool.BMConnectionPool().inboundConnections.get(
self.destination):
try:
if not protocol.checkSocksIP(self.destination.host):
@ -607,15 +610,15 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
'Closed connection to %s because we are already'
' connected to that IP.', self.destination)
return False
except Exception: # nosec B110 # pylint:disable=broad-exception-caught
except Exception: # TODO: exception types
pass
if not self.isOutbound:
# incoming from a peer we're connected to as outbound,
# or server full report the same error to counter deanonymisation
if (
Peer(self.destination.host, self.peerNode.port)
in connectionpool.pool.inboundConnections
or len(connectionpool.pool)
in connectionpool.BMConnectionPool().inboundConnections
or len(connectionpool.BMConnectionPool())
> config.safeGetInt(
'bitmessagesettings', 'maxtotalconnections')
+ config.safeGetInt(
@ -627,7 +630,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
'Closed connection to %s due to server full'
' or duplicate inbound/outbound.', self.destination)
return False
if connectionpool.pool.isAlreadyConnected(self.nonce):
if connectionpool.BMConnectionPool().isAlreadyConnected(self.nonce):
self.append_write_buf(protocol.assembleErrorMessage(
errorText="I'm connected to myself. Closing connection.",
fatal=2))
@ -641,7 +644,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
@staticmethod
def stopDownloadingObject(hashId, forwardAnyway=False):
"""Stop downloading object *hashId*"""
for connection in connectionpool.pool.connections():
for connection in connectionpool.BMConnectionPool().connections():
try:
del connection.objectsNewToMe[hashId]
except KeyError:
@ -675,3 +678,32 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
except AttributeError:
logger.debug('Disconnected socket closing')
AdvancedDispatcher.handle_close(self)
class BMStringParser(BMProto):
"""
A special case of BMProto used by objectProcessor to send ACK
"""
def __init__(self):
super(BMStringParser, self).__init__()
self.destination = Peer('127.0.0.1', 8444)
self.payload = None
ObjectTracker.__init__(self)
def send_data(self, data):
"""Send object given by the data string"""
# This class is introduced specially for ACK sending, please
# change log strings if you are going to use it for something else
self.bm_proto_reset()
self.payload = data
try:
self.bm_command_object()
except BMObjectAlreadyHaveError:
pass # maybe the same msg received on different nodes
except BMObjectExpiredError:
logger.debug(
'Sending ACK failure (expired): %s', hexlify(data))
except Exception as e:
logger.debug(
'Exception of type %s while sending ACK',
type(e), exc_info=True)

View File

@ -5,14 +5,11 @@ Select which node to connect to
import logging
import random
from six.moves import queue
import knownnodes
import protocol
import state
from bmconfigparser import config
from network import portCheckerQueue
from queues import queue, portCheckerQueue
logger = logging.getLogger('default')

View File

@ -7,9 +7,9 @@ import re
import socket
import sys
import time
import random
import asyncore_pollchoose as asyncore
import helper_random
import knownnodes
import protocol
import state
@ -17,6 +17,7 @@ from bmconfigparser import config
from connectionchooser import chooseConnection
from node import Peer
from proxy import Proxy
from singleton import Singleton
from tcp import (
bootstrap, Socks4aBMConnection, Socks5BMConnection,
TCPConnection, TCPServer)
@ -25,6 +26,7 @@ from udp import UDPSocket
logger = logging.getLogger('default')
@Singleton
class BMConnectionPool(object):
"""Pool of all existing connections"""
# pylint: disable=too-many-instance-attributes
@ -88,6 +90,7 @@ class BMConnectionPool(object):
def connectToStream(self, streamNumber):
"""Connect to a bitmessage stream"""
self.streams.append(streamNumber)
state.streamsInWhichIAmParticipating.append(streamNumber)
def getConnectionByAddr(self, addr):
"""
@ -210,7 +213,7 @@ class BMConnectionPool(object):
connection_base = TCPConnection
elif proxy_type == 'SOCKS5':
connection_base = Socks5BMConnection
hostname = random.choice([ # nosec B311
hostname = helper_random.randomchoice([
'quzwelsuziwqgpt2.onion', None
])
elif proxy_type == 'SOCKS4a':
@ -222,7 +225,7 @@ class BMConnectionPool(object):
bootstrapper = bootstrap(connection_base)
if not hostname:
port = random.choice([8080, 8444]) # nosec B311
port = helper_random.randomchoice([8080, 8444])
hostname = 'bootstrap%s.bitmessage.org' % port
else:
port = 8444
@ -289,7 +292,7 @@ class BMConnectionPool(object):
state.maximumNumberOfHalfOpenConnections - pending):
try:
chosen = self.trustedPeer or chooseConnection(
random.choice(self.streams)) # nosec B311
helper_random.randomchoice(self.streams))
except ValueError:
continue
if chosen in self.outboundConnections:
@ -400,6 +403,3 @@ class BMConnectionPool(object):
pass
for i in reaper:
self.removeConnection(i)
pool = BMConnectionPool()

View File

@ -7,6 +7,10 @@ from random import choice, expovariate, sample
from threading import RLock
from time import time
import connectionpool
import state
from queues import invQueue
from singleton import Singleton
# randomise routes after 600 seconds
REASSIGN_INTERVAL = 600
@ -22,6 +26,7 @@ Stem = namedtuple('Stem', ['child', 'stream', 'timeout'])
logger = logging.getLogger('default')
@Singleton
class Dandelion: # pylint: disable=old-style-class
"""Dandelion class for tracking stem/fluff stages."""
def __init__(self):
@ -34,8 +39,6 @@ class Dandelion: # pylint: disable=old-style-class
# when to rerandomise routes
self.refresh = time() + REASSIGN_INTERVAL
self.lock = RLock()
self.enabled = None
self.pool = None
@staticmethod
def poissonTimeout(start=None, average=0):
@ -46,23 +49,10 @@ class Dandelion: # pylint: disable=old-style-class
average = FLUFF_TRIGGER_MEAN_DELAY
return start + expovariate(1.0 / average) + FLUFF_TRIGGER_FIXED_DELAY
def init_pool(self, pool):
"""pass pool instance"""
self.pool = pool
def init_dandelion_enabled(self, config):
"""Check if Dandelion is enabled and set value in enabled attribute"""
dandelion_enabled = config.safeGetInt('network', 'dandelion')
# dandelion requires outbound connections, without them,
# stem objects will get stuck forever
if not config.safeGetBoolean(
'bitmessagesettings', 'sendoutgoingconnections'):
dandelion_enabled = 0
self.enabled = dandelion_enabled
def addHash(self, hashId, source=None, stream=1):
"""Add inventory vector to dandelion stem return status of dandelion enabled"""
assert self.enabled is not None
"""Add inventory vector to dandelion stem"""
if not state.dandelion:
return
with self.lock:
self.hashMap[hashId] = Stem(
self.getNodeStem(source),
@ -101,7 +91,7 @@ class Dandelion: # pylint: disable=old-style-class
"""Child (i.e. next) node for an inventory vector during stem mode"""
return self.hashMap[hashId].child
def maybeAddStem(self, connection, invQueue):
def maybeAddStem(self, connection):
"""
If we had too few outbound connections, add the current one to the
current stem list. Dandelion as designed by the authors should
@ -175,7 +165,7 @@ class Dandelion: # pylint: disable=old-style-class
self.nodeMap[node] = self.pickStem(node)
return self.nodeMap[node]
def expire(self, invQueue):
def expire(self):
"""Switch expired objects from stem to fluff mode"""
with self.lock:
deadline = time()
@ -191,18 +181,16 @@ class Dandelion: # pylint: disable=old-style-class
def reRandomiseStems(self):
"""Re-shuffle stem mapping (parent <-> child pairs)"""
assert self.pool is not None
if self.refresh > time():
return
with self.lock:
try:
# random two connections
self.stem = sample(
self.pool.outboundConnections.values(), MAX_STEMS)
connectionpool.BMConnectionPool(
).outboundConnections.values(), MAX_STEMS)
# not enough stems available
except ValueError:
self.stem = self.pool.outboundConnections.values()
self.stem = connectionpool.BMConnectionPool(
).outboundConnections.values()
self.nodeMap = {}
# hashMap stays to cater for pending stems
self.refresh = time() + REASSIGN_INTERVAL

View File

@ -2,12 +2,13 @@
`DownloadThread` class definition
"""
import time
import random
import state
import addresses
import helper_random
import protocol
import connectionpool
from network import dandelion_ins
from dandelion import Dandelion
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from objectracker import missingObjects
from threads import StoppableThread
@ -42,8 +43,8 @@ class DownloadThread(StoppableThread):
while not self._stopped:
requested = 0
# Choose downloading peers randomly
connections = connectionpool.pool.establishedConnections()
random.shuffle(connections)
connections = BMConnectionPool().establishedConnections()
helper_random.randomshuffle(connections)
requestChunk = max(int(
min(self.maxRequestChunk, len(missingObjects))
/ len(connections)), 1) if connections else 1
@ -60,7 +61,7 @@ class DownloadThread(StoppableThread):
payload = bytearray()
chunkCount = 0
for chunk in request:
if chunk in state.Inventory and not dandelion_ins.hasHash(chunk):
if chunk in Inventory() and not Dandelion().hasHash(chunk):
try:
del i.objectsNewToMe[chunk]
except KeyError:

View File

@ -8,8 +8,9 @@ from time import time
import addresses
import protocol
import state
import connectionpool
from network import dandelion_ins, invQueue
from network.connectionpool import BMConnectionPool
from network.dandelion import Dandelion
from queues import invQueue
from threads import StoppableThread
@ -18,7 +19,7 @@ def handleExpiredDandelion(expired):
the object"""
if not expired:
return
for i in connectionpool.pool.connections():
for i in BMConnectionPool().connections():
if not i.fullyEstablished:
continue
for x in expired:
@ -39,10 +40,10 @@ class InvThread(StoppableThread):
@staticmethod
def handleLocallyGenerated(stream, hashId):
"""Locally generated inventory items require special handling"""
dandelion_ins.addHash(hashId, stream=stream)
for connection in connectionpool.pool.connections():
if dandelion_ins.enabled and connection != \
dandelion_ins.objectChildStem(hashId):
Dandelion().addHash(hashId, stream=stream)
for connection in BMConnectionPool().connections():
if state.dandelion and connection != \
Dandelion().objectChildStem(hashId):
continue
connection.objectsNewToThem[hashId] = time()
@ -51,7 +52,7 @@ class InvThread(StoppableThread):
chunk = []
while True:
# Dandelion fluff trigger by expiration
handleExpiredDandelion(dandelion_ins.expire(invQueue))
handleExpiredDandelion(Dandelion().expire())
try:
data = invQueue.get(False)
chunk.append((data[0], data[1]))
@ -62,7 +63,7 @@ class InvThread(StoppableThread):
break
if chunk:
for connection in connectionpool.pool.connections():
for connection in BMConnectionPool().connections():
fluffs = []
stems = []
for inv in chunk:
@ -74,10 +75,10 @@ class InvThread(StoppableThread):
except KeyError:
continue
try:
if connection == dandelion_ins.objectChildStem(inv[1]):
if connection == Dandelion().objectChildStem(inv[1]):
# Fluff trigger by RNG
# auto-ignore if config set to 0, i.e. dandelion is off
if random.randint(1, 100) >= dandelion_ins.enabled: # nosec B311
if random.randint(1, 100) >= state.dandelion: # nosec:B311
fluffs.append(inv[1])
# send a dinv only if the stem node supports dandelion
elif connection.services & protocol.NODE_DANDELION > 0:
@ -104,6 +105,7 @@ class InvThread(StoppableThread):
for _ in range(len(chunk)):
invQueue.task_done()
dandelion_ins.reRandomiseStems()
if Dandelion().refresh < time():
Dandelion().reRandomiseStems()
self.stop.wait(1)

View File

@ -85,7 +85,7 @@ def pickle_deserialize_old_knownnodes(source):
the new format is {Peer:{"lastseen":i, "rating":f}}
"""
global knownNodes
knownNodes = pickle.load(source) # nosec B301
knownNodes = pickle.load(source)
for stream in knownNodes.keys():
for node, params in knownNodes[stream].iteritems():
if isinstance(params, (float, int)):
@ -226,7 +226,7 @@ def dns():
1, Peer('bootstrap%s.bitmessage.org' % port, port))
def cleanupKnownNodes(pool):
def cleanupKnownNodes():
"""
Cleanup knownnodes: remove old nodes and nodes with low rating
"""
@ -236,7 +236,7 @@ def cleanupKnownNodes(pool):
with knownNodesLock:
for stream in knownNodes:
if stream not in pool.streams:
if stream not in state.streamsInWhichIAmParticipating:
continue
keys = knownNodes[stream].keys()
for node in keys:

View File

@ -2,7 +2,8 @@
A thread to handle network concerns
"""
import network.asyncore_pollchoose as asyncore
import connectionpool
import state
from network.connectionpool import BMConnectionPool
from queues import excQueue
from threads import StoppableThread
@ -13,28 +14,28 @@ class BMNetworkThread(StoppableThread):
def run(self):
try:
while not self._stopped:
connectionpool.pool.loop()
while not self._stopped and state.shutdown == 0:
BMConnectionPool().loop()
except Exception as e:
excQueue.put((self.name, e))
raise
def stopThread(self):
super(BMNetworkThread, self).stopThread()
for i in connectionpool.pool.listeningSockets.values():
for i in BMConnectionPool().listeningSockets.values():
try:
i.close()
except: # nosec B110 # pylint:disable=bare-except
except: # nosec:B110 pylint:disable=bare-except
pass
for i in connectionpool.pool.outboundConnections.values():
for i in BMConnectionPool().outboundConnections.values():
try:
i.close()
except: # nosec B110 # pylint:disable=bare-except
except: # nosec:B110 pylint:disable=bare-except
pass
for i in connectionpool.pool.inboundConnections.values():
for i in BMConnectionPool().inboundConnections.values():
try:
i.close()
except: # nosec B110 # pylint:disable=bare-except
except: # nosec:B110 pylint:disable=bare-except
pass
# just in case

View File

@ -4,8 +4,8 @@ Module for tracking objects
import time
from threading import RLock
import connectionpool
from network import dandelion_ins
import network.connectionpool
from network.dandelion import Dandelion
from randomtrackingdict import RandomTrackingDict
haveBloom = False
@ -100,21 +100,21 @@ class ObjectTracker(object):
def handleReceivedObject(self, streamNumber, hashid):
"""Handling received object"""
for i in connectionpool.pool.connections():
for i in network.connectionpool.BMConnectionPool().connections():
if not i.fullyEstablished:
continue
try:
del i.objectsNewToMe[hashid]
except KeyError:
if streamNumber in i.streams and (
not dandelion_ins.hasHash(hashid)
or dandelion_ins.objectChildStem(hashid) == i):
not Dandelion().hasHash(hashid)
or Dandelion().objectChildStem(hashid) == i):
with i.objectsNewToThemLock:
i.objectsNewToThem[hashid] = time.time()
# update stream number,
# which we didn't have when we just received the dinv
# also resets expiration of the stem mode
dandelion_ins.setHashStream(hashid, streamNumber)
Dandelion().setHashStream(hashid, streamNumber)
if i == self:
try:

View File

@ -5,9 +5,10 @@ import errno
import Queue
import socket
import connectionpool
import state
from network.advanceddispatcher import UnknownStateError
from network import receiveDataQueue
from network.connectionpool import BMConnectionPool
from queues import receiveDataQueue
from threads import StoppableThread
@ -18,13 +19,13 @@ class ReceiveQueueThread(StoppableThread):
super(ReceiveQueueThread, self).__init__(name="ReceiveQueue_%i" % num)
def run(self):
while not self._stopped:
while not self._stopped and state.shutdown == 0:
try:
dest = receiveDataQueue.get(block=True, timeout=1)
except Queue.Empty:
continue
if self._stopped:
if self._stopped or state.shutdown:
break
# cycle as long as there is data
@ -35,7 +36,7 @@ class ReceiveQueueThread(StoppableThread):
# enough data, or the connection is to be aborted
try:
connection = connectionpool.pool.getConnectionByAddr(dest)
connection = BMConnectionPool().getConnectionByAddr(dest)
# connection object not found
except KeyError:
receiveDataQueue.task_done()

View File

@ -4,7 +4,7 @@ Network statistics
import time
import asyncore_pollchoose as asyncore
import connectionpool
from network.connectionpool import BMConnectionPool
from objectracker import missingObjects
@ -18,7 +18,7 @@ currentSentSpeed = 0
def connectedHostsList():
"""List of all the connected hosts"""
return connectionpool.pool.establishedConnections()
return BMConnectionPool().establishedConnections()
def sentBytes():
@ -69,8 +69,8 @@ def pendingDownload():
def pendingUpload():
"""Getting pending uploads"""
# tmp = {}
# for connection in connectionpool.pool.inboundConnections.values() + \
# connectionpool.pool.outboundConnections.values():
# for connection in BMConnectionPool().inboundConnections.values() + \
# BMConnectionPool().outboundConnections.values():
# for k in connection.objectsNewToThem.keys():
# tmp[k] = True
# This probably isn't the correct logic so it's disabled

View File

@ -11,20 +11,22 @@ import time
# magic imports!
import addresses
import helper_random
import l10n
import protocol
import state
import connectionpool
from bmconfigparser import config
from highlevelcrypto import randomBytes
from network import dandelion_ins, invQueue, receiveDataQueue
from queues import UISignalQueue
from helper_random import randomBytes
from inventory import Inventory
from queues import invQueue, receiveDataQueue, UISignalQueue
from tr import _translate
import asyncore_pollchoose as asyncore
import connectionpool
import knownnodes
from network.advanceddispatcher import AdvancedDispatcher
from network.bmproto import BMProto
from network.dandelion import Dandelion
from network.objectracker import ObjectTracker
from network.socks4a import Socks4aConnection
from network.socks5 import Socks5Connection
@ -168,7 +170,7 @@ class TCPConnection(BMProto, TLSDispatcher):
knownnodes.increaseRating(self.destination)
knownnodes.addKnownNode(
self.streams, self.destination, time.time())
dandelion_ins.maybeAddStem(self, invQueue)
Dandelion().maybeAddStem(self)
self.sendAddr()
self.sendBigInv()
@ -200,7 +202,7 @@ class TCPConnection(BMProto, TLSDispatcher):
elemCount = min(
len(filtered),
maxAddrCount / 2 if n else maxAddrCount)
addrs[s] = random.sample(filtered, elemCount)
addrs[s] = helper_random.randomsample(filtered, elemCount)
for substream in addrs:
for peer, params in addrs[substream]:
templist.append((substream, peer, params["lastseen"]))
@ -228,9 +230,9 @@ class TCPConnection(BMProto, TLSDispatcher):
# may lock for a long time, but I think it's better than
# thousands of small locks
with self.objectsNewToThemLock:
for objHash in state.Inventory.unexpired_hashes_by_stream(stream):
for objHash in Inventory().unexpired_hashes_by_stream(stream):
# don't advertise stem objects on bigInv
if dandelion_ins.hasHash(objHash):
if Dandelion().hasHash(objHash):
continue
bigInvList[objHash] = 0
objectCount = 0
@ -267,7 +269,7 @@ class TCPConnection(BMProto, TLSDispatcher):
self.append_write_buf(
protocol.assembleVersionMessage(
self.destination.host, self.destination.port,
connectionpool.pool.streams, dandelion_ins.enabled,
connectionpool.BMConnectionPool().streams,
False, nodeid=self.nodeid))
self.connectedAt = time.time()
receiveDataQueue.put(self.destination)
@ -292,7 +294,7 @@ class TCPConnection(BMProto, TLSDispatcher):
if host_is_global:
knownnodes.addKnownNode(
self.streams, self.destination, time.time())
dandelion_ins.maybeRemoveStem(self)
Dandelion().maybeRemoveStem(self)
else:
self.checkTimeOffsetNotification()
if host_is_global:
@ -318,7 +320,7 @@ class Socks5BMConnection(Socks5Connection, TCPConnection):
self.append_write_buf(
protocol.assembleVersionMessage(
self.destination.host, self.destination.port,
connectionpool.pool.streams, dandelion_ins.enabled,
connectionpool.BMConnectionPool().streams,
False, nodeid=self.nodeid))
self.set_state("bm_header", expectBytes=protocol.Header.size)
return True
@ -342,7 +344,7 @@ class Socks4aBMConnection(Socks4aConnection, TCPConnection):
self.append_write_buf(
protocol.assembleVersionMessage(
self.destination.host, self.destination.port,
connectionpool.pool.streams, dandelion_ins.enabled,
connectionpool.BMConnectionPool().streams,
False, nodeid=self.nodeid))
self.set_state("bm_header", expectBytes=protocol.Header.size)
return True
@ -430,7 +432,7 @@ class TCPServer(AdvancedDispatcher):
state.ownAddresses[Peer(*sock.getsockname())] = True
if (
len(connectionpool.pool)
len(connectionpool.BMConnectionPool())
> config.safeGetInt(
'bitmessagesettings', 'maxtotalconnections')
+ config.safeGetInt(
@ -442,7 +444,7 @@ class TCPServer(AdvancedDispatcher):
sock.close()
return
try:
connectionpool.pool.addConnection(
connectionpool.BMConnectionPool().addConnection(
TCPConnection(sock=sock))
except socket.error:
pass

View File

@ -10,7 +10,7 @@ import sys
import network.asyncore_pollchoose as asyncore
import paths
from network.advanceddispatcher import AdvancedDispatcher
from network import receiveDataQueue
from queues import receiveDataQueue
logger = logging.getLogger('default')

View File

@ -8,9 +8,8 @@ import time
# magic imports!
import protocol
import state
import connectionpool
from queues import receiveDataQueue
from network import receiveDataQueue
from bmproto import BMProto
from node import Peer
from objectracker import ObjectTracker
@ -82,7 +81,7 @@ class UDPSocket(BMProto): # pylint: disable=too-many-instance-attributes
remoteport = False
for seenTime, stream, _, ip, port in addresses:
decodedIP = protocol.checkIPAddress(str(ip))
if stream not in connectionpool.pool.streams:
if stream not in state.streamsInWhichIAmParticipating:
continue
if (seenTime < time.time() - protocol.MAX_TIME_OFFSET
or seenTime > time.time() + protocol.MAX_TIME_OFFSET):

View File

@ -3,12 +3,12 @@
"""
import time
import random
import helper_random
import protocol
import state
import connectionpool
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.dandelion import Dandelion
from randomtrackingdict import RandomTrackingDict
from network import dandelion_ins
from threads import StoppableThread
@ -23,8 +23,8 @@ class UploadThread(StoppableThread):
while not self._stopped:
uploaded = 0
# Choose uploading peers randomly
connections = connectionpool.pool.establishedConnections()
random.shuffle(connections)
connections = BMConnectionPool().establishedConnections()
helper_random.randomshuffle(connections)
for i in connections:
now = time.time()
# avoid unnecessary delay
@ -41,8 +41,8 @@ class UploadThread(StoppableThread):
chunk_count = 0
for chunk in request:
del i.pendingUpload[chunk]
if dandelion_ins.hasHash(chunk) and \
i != dandelion_ins.objectChildStem(chunk):
if Dandelion().hasHash(chunk) and \
i != Dandelion().objectChildStem(chunk):
i.antiIntersectionDelay()
self.logger.info(
'%s asked for a stem object we didn\'t offer to it.',
@ -50,7 +50,7 @@ class UploadThread(StoppableThread):
break
try:
payload.extend(protocol.CreatePacket(
'object', state.Inventory[chunk].payload))
'object', Inventory()[chunk].payload))
chunk_count += 1
except KeyError:
i.antiIntersectionDelay()

View File

@ -47,7 +47,7 @@ def initCL():
device_type=cl.device_type.GPU))
if platform.vendor not in vendors:
vendors.append(platform.vendor)
except: # nosec B110 # noqa:E722 # pylint:disable=bare-except
except: # nosec:B110 noqa:E722 pylint:disable=bare-except
pass
if enabledGpus:
ctx = cl.Context(devices=enabledGpus)

View File

@ -11,14 +11,14 @@ try:
winsound.PlaySound(sound_file, winsound.SND_FILENAME)
except ImportError:
import os
import subprocess # nosec B404
import subprocess
play_cmd = {}
def _subprocess(*args):
FNULL = open(os.devnull, 'wb')
subprocess.call(
args, stdout=FNULL, stderr=subprocess.STDOUT, close_fds=True) # nosec B603
args, stdout=FNULL, stderr=subprocess.STDOUT, close_fds=True)
def connect_plugin(sound_file):
"""This function implements the entry point."""

View File

@ -4,14 +4,14 @@ Proof of work calculation
"""
import ctypes
import hashlib
import os
import subprocess # nosec B404
import sys
import tempfile
import time
from struct import pack, unpack
from subprocess import call
import highlevelcrypto
import openclpow
import paths
import queues
@ -82,25 +82,18 @@ def _set_idle():
pid = win32api.GetCurrentProcessId()
handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)
win32process.SetPriorityClass(handle, win32process.IDLE_PRIORITY_CLASS)
except: # nosec B110 # noqa:E722 # pylint:disable=bare-except
except: # nosec:B110 noqa:E722 pylint:disable=bare-except
# Windows 64-bit
pass
def trial_value(nonce, initialHash):
"""Calculate PoW trial value"""
trialValue, = unpack(
'>Q', highlevelcrypto.double_sha512(
pack('>Q', nonce) + initialHash)[0:8])
return trialValue
def _pool_worker(nonce, initialHash, target, pool_size):
_set_idle()
trialValue = float('inf')
while trialValue > target:
nonce += pool_size
trialValue = trial_value(nonce, initialHash)
trialValue, = unpack('>Q', hashlib.sha512(hashlib.sha512(
pack('>Q', nonce) + initialHash).digest()).digest()[0:8])
return [trialValue, nonce]
@ -110,9 +103,10 @@ def _doSafePoW(target, initialHash):
trialValue = float('inf')
while trialValue > target and state.shutdown == 0:
nonce += 1
trialValue = trial_value(nonce, initialHash)
trialValue, = unpack('>Q', hashlib.sha512(hashlib.sha512(
pack('>Q', nonce) + initialHash).digest()).digest()[0:8])
if state.shutdown != 0:
raise StopIteration("Interrupted")
raise StopIteration("Interrupted") # pylint: misplaced-bare-raise
logger.debug("Safe PoW done")
return [trialValue, nonce]
@ -141,7 +135,7 @@ def _doFastPoW(target, initialHash):
try:
pool.terminate()
pool.join()
except: # nosec B110 # noqa:E722 # pylint:disable=bare-except
except: # noqa:E722
pass
raise StopIteration("Interrupted")
for i in range(pool_size):
@ -169,7 +163,7 @@ def _doCPoW(target, initialHash):
logger.debug("C PoW start")
nonce = bmpow(out_h, out_m)
trialValue = trial_value(nonce, initialHash)
trialValue, = unpack('>Q', hashlib.sha512(hashlib.sha512(pack('>Q', nonce) + initialHash).digest()).digest()[0:8])
if state.shutdown != 0:
raise StopIteration("Interrupted")
logger.debug("C PoW done")
@ -179,7 +173,7 @@ def _doCPoW(target, initialHash):
def _doGPUPoW(target, initialHash):
logger.debug("GPU PoW start")
nonce = openclpow.do_opencl_pow(initialHash.encode("hex"), target)
trialValue = trial_value(nonce, initialHash)
trialValue, = unpack('>Q', hashlib.sha512(hashlib.sha512(pack('>Q', nonce) + initialHash).digest()).digest()[0:8])
if trialValue > target:
deviceNames = ", ".join(gpu.name for gpu in openclpow.enabledGpus)
queues.UISignalQueue.put((
@ -278,26 +272,16 @@ def buildCPoW():
try:
if "bsd" in sys.platform:
# BSD make
subprocess.check_call([ # nosec B607, B603
"make", "-C", os.path.join(paths.codePath(), "bitmsghash"),
'-f', 'Makefile.bsd'])
call(["make", "-C", os.path.join(paths.codePath(), "bitmsghash"), '-f', 'Makefile.bsd'])
else:
# GNU make
subprocess.check_call([ # nosec B607, B603
"make", "-C", os.path.join(paths.codePath(), "bitmsghash")])
if os.path.exists(
os.path.join(paths.codePath(), "bitmsghash", "bitmsghash.so")
):
call(["make", "-C", os.path.join(paths.codePath(), "bitmsghash")])
if os.path.exists(os.path.join(paths.codePath(), "bitmsghash", "bitmsghash.so")):
init()
notifyBuild(True)
else:
notifyBuild(True)
except (OSError, subprocess.CalledProcessError):
notifyBuild(True)
except: # noqa:E722
logger.warning(
'Unexpected exception rised when tried to build bitmsghash lib',
exc_info=True)
notifyBuild(True)
@ -312,14 +296,14 @@ def run(target, initialHash):
return _doGPUPoW(target, initialHash)
except StopIteration:
raise
except: # nosec B110 # noqa:E722 # pylint:disable=bare-except
except: # nosec:B110 noqa:E722 pylint:disable=bare-except
pass # fallback
if bmpow:
try:
return _doCPoW(target, initialHash)
except StopIteration:
raise
except: # nosec B110 # noqa:E722 # pylint:disable=bare-except
except: # nosec:B110 noqa:E722 pylint:disable=bare-except
pass # fallback
if paths.frozen == "macosx_app" or not paths.frozen:
# on my (Peter Surda) Windows 10, Windows Defender
@ -331,13 +315,13 @@ def run(target, initialHash):
except StopIteration:
logger.error("Fast PoW got StopIteration")
raise
except: # noqa:E722 # pylint:disable=bare-except
except: # noqa:E722 pylint:disable=bare-except
logger.error("Fast PoW got exception:", exc_info=True)
try:
return _doSafePoW(target, initialHash)
except StopIteration:
raise
except: # nosec B110 # noqa:E722 # pylint:disable=bare-except
except: # nosec:B110 noqa:E722 pylint:disable=bare-except
pass # fallback

View File

@ -20,6 +20,7 @@ from addresses import (
encodeVarint, decodeVarint, decodeAddress, varintDecodeError)
from bmconfigparser import config
from debug import logger
from fallback import RIPEMD160Hash
from helper_sql import sqlExecute
from network.node import Peer
from version import softwareVersion
@ -289,11 +290,12 @@ def isProofOfWorkSufficient(
if payloadLengthExtraBytes < defaults.networkDefaultPayloadLengthExtraBytes:
payloadLengthExtraBytes = defaults.networkDefaultPayloadLengthExtraBytes
endOfLifeTime, = unpack('>Q', data[8:16])
TTL = endOfLifeTime - int(recvTime if recvTime else time.time())
TTL = endOfLifeTime - (int(recvTime) if recvTime else int(time.time()))
if TTL < 300:
TTL = 300
POW, = unpack('>Q', highlevelcrypto.double_sha512(
data[:8] + hashlib.sha512(data[8:]).digest())[0:8])
POW, = unpack('>Q', hashlib.sha512(hashlib.sha512(
data[:8] + hashlib.sha512(data[8:]).digest()
).digest()).digest()[0:8])
return POW <= 2 ** 64 / (
nonceTrialsPerByte * (
len(data) + payloadLengthExtraBytes
@ -336,8 +338,8 @@ def assembleAddrMessage(peerList):
return retval
def assembleVersionMessage( # pylint: disable=too-many-arguments
remoteHost, remotePort, participatingStreams, dandelion_enabled=True, server=False, nodeid=None,
def assembleVersionMessage(
remoteHost, remotePort, participatingStreams, server=False, nodeid=None
):
"""
Construct the payload of a version message,
@ -350,7 +352,7 @@ def assembleVersionMessage( # pylint: disable=too-many-arguments
'>q',
NODE_NETWORK
| (NODE_SSL if haveSSL(server) else 0)
| (NODE_DANDELION if dandelion_enabled else 0)
| (NODE_DANDELION if state.dandelion else 0)
)
payload += pack('>q', int(time.time()))
@ -374,7 +376,7 @@ def assembleVersionMessage( # pylint: disable=too-many-arguments
'>q',
NODE_NETWORK
| (NODE_SSL if haveSSL(server) else 0)
| (NODE_DANDELION if dandelion_enabled else 0)
| (NODE_DANDELION if state.dandelion else 0)
)
# = 127.0.0.1. This will be ignored by the remote host.
# The actual remote connected IP will be used.
@ -434,17 +436,6 @@ def assembleErrorMessage(fatal=0, banTime=0, inventoryVector='', errorText=''):
# Packet decoding
def decodeObjectParameters(data):
"""Decode the parameters of a raw object needed to put it in inventory"""
# BMProto.decode_payload_content("QQIvv")
expiresTime = unpack('>Q', data[8:16])[0]
objectType = unpack('>I', data[16:20])[0]
parserPos = 20 + decodeVarint(data[20:30])[1]
toStreamNumber = decodeVarint(data[parserPos:parserPos + 10])[0]
return objectType, toStreamNumber, expiresTime
def decryptAndCheckPubkeyPayload(data, address):
"""
Version 4 pubkeys are encrypted. This function is run when we
@ -511,9 +502,9 @@ def decryptAndCheckPubkeyPayload(data, address):
readPosition = 0
# bitfieldBehaviors = decryptedData[readPosition:readPosition + 4]
readPosition += 4
pubSigningKey = '\x04' + decryptedData[readPosition:readPosition + 64]
publicSigningKey = '\x04' + decryptedData[readPosition:readPosition + 64]
readPosition += 64
pubEncryptionKey = '\x04' + decryptedData[readPosition:readPosition + 64]
publicEncryptionKey = '\x04' + decryptedData[readPosition:readPosition + 64]
readPosition += 64
specifiedNonceTrialsPerByteLength = decodeVarint(
decryptedData[readPosition:readPosition + 10])[1]
@ -529,7 +520,7 @@ def decryptAndCheckPubkeyPayload(data, address):
signature = decryptedData[readPosition:readPosition + signatureLength]
if not highlevelcrypto.verify(
signedData, signature, hexlify(pubSigningKey)):
signedData, signature, hexlify(publicSigningKey)):
logger.info(
'ECDSA verify failed (within decryptAndCheckPubkeyPayload)')
return 'failed'
@ -537,7 +528,9 @@ def decryptAndCheckPubkeyPayload(data, address):
logger.info(
'ECDSA verify passed (within decryptAndCheckPubkeyPayload)')
embeddedRipe = highlevelcrypto.to_ripe(pubSigningKey, pubEncryptionKey)
sha = hashlib.new('sha512')
sha.update(publicSigningKey + publicEncryptionKey)
embeddedRipe = RIPEMD160Hash(sha.digest()).digest()
if embeddedRipe != ripe:
# Although this pubkey object had the tag were were looking for
@ -555,7 +548,7 @@ def decryptAndCheckPubkeyPayload(data, address):
'addressVersion: %s, streamNumber: %s\nripe %s\n'
'publicSigningKey in hex: %s\npublicEncryptionKey in hex: %s',
addressVersion, streamNumber, hexlify(ripe),
hexlify(pubSigningKey), hexlify(pubEncryptionKey)
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
)
t = (address, addressVersion, storedData, int(time.time()), 'yes')

View File

@ -805,10 +805,6 @@ def loadOpenSSL():
'libcrypto.dylib', '/usr/local/opt/openssl/lib/libcrypto.dylib'])
elif 'win32' in sys.platform or 'win64' in sys.platform:
libdir.append('libeay32.dll')
# kivy
elif 'ANDROID_ARGUMENT' in environ:
libdir.append('libcrypto1.1.so')
libdir.append('libssl1.1.so')
else:
libdir.append('libcrypto.so')
libdir.append('libssl.so')
@ -823,7 +819,7 @@ def loadOpenSSL():
try:
OpenSSL = _OpenSSL(library)
return
except Exception: # nosec B110
except Exception: # nosec:B110
pass
raise Exception(
"Couldn't find and load the OpenSSL library. You must install it.")

View File

@ -2,43 +2,6 @@
from binascii import unhexlify
# These keys are from addresses test script
sample_pubsigningkey = (
b'044a367f049ec16cb6b6118eb734a9962d10b8db59c890cd08f210c43ff08bdf09d'
b'16f502ca26cd0713f38988a1237f1fc8fa07b15653c996dc4013af6d15505ce')
sample_pubencryptionkey = (
b'044597d59177fc1d89555d38915f581b5ff2286b39d022ca0283d2bdd5c36be5d3c'
b'e7b9b97792327851a562752e4b79475d1f51f5a71352482b241227f45ed36a9')
sample_privsigningkey = \
b'93d0b61371a54b53df143b954035d612f8efa8a3ed1cf842c2186bfd8f876665'
sample_privencryptionkey = \
b'4b0b73a54e19b059dc274ab69df095fe699f43b17397bca26fdf40f4d7400a3a'
# [chan] bitmessage
sample_privsigningkey_wif = \
b'5K42shDERM5g7Kbi3JT5vsAWpXMqRhWZpX835M2pdSoqQQpJMYm'
sample_privencryptionkey_wif = \
b'5HwugVWm31gnxtoYcvcK7oywH2ezYTh6Y4tzRxsndAeMi6NHqpA'
sample_wif_privsigningkey = \
b'a2e8b841a531c1c558ee0680c396789c7a2ea3ac4795ae3f000caf9fe367d144'
sample_wif_privencryptionkey = \
b'114ec0e2dca24a826a0eed064b0405b0ac148abc3b1d52729697f4d7b873fdc6'
sample_factor = \
66858749573256452658262553961707680376751171096153613379801854825275240965733
# G * sample_factor
sample_point = (
33567437183004486938355437500683826356288335339807546987348409590129959362313,
94730058721143827257669456336351159718085716196507891067256111928318063085006
)
sample_deterministic_addr3 = b'2DBPTgeSawWYZceFD69AbDT5q4iUWtj1ZN'
sample_deterministic_addr4 = b'2cWzSnwjJ7yRP3nLEWUV5LisTZyREWSzUK'
sample_daddr3_512 = 18875720106589866286514488037355423395410802084648916523381
sample_daddr4_512 = 25152821841976547050350277460563089811513157529113201589004
# pubkey K
sample_pubkey = unhexlify(
'0409d4e5c0ab3d25fe'

Some files were not shown because too many files have changed in this diff Show More