Changes based on style and lint checks. (final_code_quality_3) #1356

Merged
coffeedogs merged 1 commits from final_code_quality_3 into v0.6 2018-10-31 14:21:20 +01:00
3 changed files with 102 additions and 24 deletions
Showing only changes of commit c875769b15 - Show all commits

View File

@ -8,6 +8,7 @@ import traceback
HOST = "127.0.0.1"
PORT = 8912
def sslProtocolVersion():
# sslProtocolVersion
if sys.version_info >= (2, 7, 13):
@ -23,16 +24,19 @@ def sslProtocolVersion():
# "TLSv1.2" in < 2.7.9
return ssl.PROTOCOL_TLSv1
def sslProtocolCiphers():
if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000:
return "AECDH-AES256-SHA@SECLEVEL=0"
else:
return "AECDH-AES256-SHA"
def connect():
sock = socket.create_connection((HOST, PORT))
return sock
def listen():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@ -40,6 +44,7 @@ def listen():
sock.listen(0)
return sock
def sslHandshake(sock, server=False):
if sys.version_info >= (2, 7, 9):
context = ssl.SSLContext(sslProtocolVersion())
@ -50,7 +55,10 @@ def sslHandshake(sock, server=False):
context.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_SINGLE_ECDH_USE | ssl.OP_CIPHER_SERVER_PREFERENCE
sslSock = context.wrap_socket(sock, server_side=server, do_handshake_on_connect=False)
else:
sslSock = ssl.wrap_socket(sock, keyfile = os.path.join('src', 'sslkeys', 'key.pem'), certfile = os.path.join('src', 'sslkeys', 'cert.pem'), server_side = server, ssl_version=sslProtocolVersion(), do_handshake_on_connect=False, ciphers='AECDH-AES256-SHA')
sslSock = ssl.wrap_socket(sock, keyfile=os.path.join('src', 'sslkeys', 'key.pem'),
certfile=os.path.join('src', 'sslkeys', 'cert.pem'),
server_side=server, ssl_version=sslProtocolVersion(),
do_handshake_on_connect=False, ciphers='AECDH-AES256-SHA')
while True:
try:
@ -69,6 +77,7 @@ def sslHandshake(sock, server=False):
print "Success!"
return sslSock
if __name__ == "__main__":
if len(sys.argv) != 2:
print "Usage: ssltest.py client|server"

View File

@ -1,19 +1,32 @@
"""
src/network/advanceddispatcher.py
=================================
"""
# pylint: disable=attribute-defined-outside-init
import socket
import threading
import time
import asyncore_pollchoose as asyncore
import network.asyncore_pollchoose as asyncore
import state
from debug import logger
from helper_threading import BusyError, nonBlocking
import state
class ProcessingError(Exception):
"""General class for protocol parser exception, use as a base for others."""
pass
PeterSurda commented 2018-10-04 17:26:43 +02:00 (Migrated from github.com)
Review

General class for protocol parser exception, use as a base for others.

General class for protocol parser exception, use as a base for others.
class UnknownStateError(ProcessingError):
"""Parser points to an unknown (unimplemented) state."""
pass
PeterSurda commented 2018-10-04 17:28:05 +02:00 (Migrated from github.com)
Review

Parser points to an unknown (unimplemented) state.

Parser points to an unknown (unimplemented) state.
class AdvancedDispatcher(asyncore.dispatcher):
"""Improved version of asyncore dispatcher, with buffers and protocol state."""
# pylint: disable=too-many-instance-attributes
PeterSurda commented 2018-10-04 17:29:02 +02:00 (Migrated from github.com)
Review

Improved version of asyncore dispatcher, with buffers and protocol state.

Improved version of asyncore dispatcher, with buffers and protocol state.
_buf_len = 131072 # 128kB
def __init__(self, sock=None):
@ -31,6 +44,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.processingLock = threading.RLock()
def append_write_buf(self, data):
"""Append binary data to the end of stream write buffer."""
if data:
if isinstance(data, list):
with self.writeLock:
@ -41,6 +55,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.write_buf.extend(data)
def slice_write_buf(self, length=0):
"""Cut the beginning of the stream write buffer."""
if length > 0:
with self.writeLock:
if length >= len(self.write_buf):
@ -49,6 +64,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
del self.write_buf[0:length]
def slice_read_buf(self, length=0):
"""Cut the beginning of the stream read buffer."""
if length > 0:
with self.readLock:
if length >= len(self.read_buf):
@ -57,6 +73,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
del self.read_buf[0:length]
def process(self):
"""Process (parse) data that's in the buffer, as long as there is enough data and the connection is open."""
while self.connected and not state.shutdown:
try:
with nonBlocking(self.processingLock):
@ -68,19 +85,21 @@ class AdvancedDispatcher(asyncore.dispatcher):
cmd = getattr(self, "state_" + str(self.state))
except AttributeError:
logger.error("Unknown state %s", self.state, exc_info=True)
raise UnknownState(self.state)
raise UnknownStateError(self.state)
if not cmd():
break
except BusyError:
return False
return False
def set_state(self, state, length=0, expectBytes=0):
def set_state(self, state_str, length=0, expectBytes=0):
"""Set the next processing state."""
self.expectBytes = expectBytes
self.slice_read_buf(length)
self.state = state
self.state = state_str
def writable(self):
"""Is data from the write buffer ready to be sent to the network?"""
PeterSurda commented 2018-10-04 17:30:09 +02:00 (Migrated from github.com)
Review

Cut the beginning of the stream write buffer.

Cut the beginning of the stream write buffer.
self.uploadChunk = AdvancedDispatcher._buf_len
if asyncore.maxUploadRate > 0:
self.uploadChunk = int(asyncore.uploadBucket)
@ -89,6 +108,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
(self.connecting or (self.connected and self.uploadChunk > 0))
def readable(self):
"""Is the read buffer ready to accept data from the network?"""
self.downloadChunk = AdvancedDispatcher._buf_len
if asyncore.maxDownloadRate > 0:
self.downloadChunk = int(asyncore.downloadBucket)
@ -103,6 +123,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
(self.connecting or self.accepting or (self.connected and self.downloadChunk > 0))
def handle_read(self):
"""Append incoming data to the read buffer."""
self.lastTx = time.time()
newData = self.recv(self.downloadChunk)
self.receivedBytes += len(newData)
@ -111,6 +132,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.read_buf.extend(newData)
def handle_write(self):
"""Send outgoing data from write buffer."""
self.lastTx = time.time()
PeterSurda commented 2018-10-04 17:31:40 +02:00 (Migrated from github.com)
Review

Process (parse) data that's in the buffer, as long as there is enough data and the connection is open.

Process (parse) data that's in the buffer, as long as there is enough data and the connection is open.
written = self.send(self.write_buf[0:self.uploadChunk])
asyncore.update_sent(written)
@ -118,19 +140,24 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.slice_write_buf(written)
def handle_connect_event(self):
"""Callback for connection established event."""
try:
asyncore.dispatcher.handle_connect_event(self)
except socket.error as e:
if e.args[0] not in asyncore._DISCONNECTED:
if e.args[0] not in asyncore._DISCONNECTED: # pylint: disable=protected-access
raise
def handle_connect(self):
"""Method for handling connection established implementations."""
self.lastTx = time.time()
def state_close(self):
"""Signal to the processing loop to end."""
# pylint: disable=no-self-use
return False
def handle_close(self):
"""Callback for connection being closed, but can also be called directly when you want connection to close."""
with self.readLock:
self.read_buf = bytearray()
with self.writeLock:

View File

@ -1,44 +1,68 @@
from binascii import hexlify
"""
src/network/bmobject.py
======================
"""
import time
import protocol
import state
from addresses import calculateInventoryHash
from debug import logger
from inventory import Inventory
from network.dandelion import Dandelion
import protocol
import state
class BMObjectInsufficientPOWError(Exception):
"""Exception indicating the object doesn't have sufficient proof of work."""
errorCodes = ("Insufficient proof of work")
PeterSurda commented 2018-10-04 17:36:40 +02:00 (Migrated from github.com)
Review

Exception indicating the object doesn't have sufficient proof of work.

Exception indicating the object doesn't have sufficient proof of work.
class BMObjectInvalidDataError(Exception):
"""Exception indicating the data being parsed does not match the specification."""
errorCodes = ("Data invalid")
PeterSurda commented 2018-10-04 17:37:26 +02:00 (Migrated from github.com)
Review

Exception indicating the data being parsed does not match the specification.

Exception indicating the data being parsed does not match the specification.
class BMObjectExpiredError(Exception):
"""Exception indicating the object's lifetime has expired."""
errorCodes = ("Object expired")
PeterSurda commented 2018-10-04 17:37:55 +02:00 (Migrated from github.com)
Review

Exception indicating the object's lifetime has expired.

Exception indicating the object's lifetime has expired.
class BMObjectUnwantedStreamError(Exception):
"""Exception indicating the object is in a stream we didn't advertise as being interested in."""
errorCodes = ("Object in unwanted stream")
PeterSurda commented 2018-10-04 17:38:17 +02:00 (Migrated from github.com)
Review

Exception indicating the object is in a stream we didn't advertise as being interested in.

Exception indicating the object is in a stream we didn't advertise as being interested in.
class BMObjectInvalidError(Exception):
"""The object's data does not match object specification."""
errorCodes = ("Invalid object")
PeterSurda commented 2018-10-04 17:38:47 +02:00 (Migrated from github.com)
Review

The object's data does not match object specification.

The object's data does not match object specification.
class BMObjectAlreadyHaveError(Exception):
"""We received a duplicate object (one we already have)"""
errorCodes = ("Already have this object")
PeterSurda commented 2018-10-04 17:39:07 +02:00 (Migrated from github.com)
Review

We received a duplicate object (one we already have)

We received a duplicate object (one we already have)
class BMObject(object):
"""Bitmessage Object as a class."""
# pylint: disable=too-many-instance-attributes
PeterSurda commented 2018-10-04 17:39:33 +02:00 (Migrated from github.com)
Review

Bitmessage Object as a class.

Bitmessage Object as a class.
# max TTL, 28 days and 3 hours
maxTTL = 28 * 24 * 60 * 60 + 10800
# min TTL, 3 hour (in the past
minTTL = -3600
def __init__(self, nonce, expiresTime, objectType, version, streamNumber, data, payloadOffset):
def __init__(
self,
nonce,
expiresTime,
objectType,
version,
streamNumber,
data,
payloadOffset
): # pylint: disable=too-many-arguments
self.nonce = nonce
self.expiresTime = expiresTime
self.objectType = objectType
@ -50,29 +74,39 @@ class BMObject(object):
self.tag = self.data[payloadOffset:payloadOffset + 32]
def checkProofOfWorkSufficient(self):
"""Perform a proof of work check for sufficiency."""
# Let us check to make sure that the proof of work is sufficient.
if not protocol.isProofOfWorkSufficient(self.data):
logger.info('Proof of work is insufficient.')
raise BMObjectInsufficientPOWError()
def checkEOLSanity(self):
"""Check if object's lifetime isn't ridiculously far in the past or future."""
# EOL sanity check
if self.expiresTime - int(time.time()) > BMObject.maxTTL:
logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %i', self.expiresTime)
# TODO: remove from download queue
logger.info(
'This object\'s End of Life time is too far in the future. Ignoring it. Time is %i',
self.expiresTime)
# .. todo:: remove from download queue
raise BMObjectExpiredError()
if self.expiresTime - int(time.time()) < BMObject.minTTL:
logger.info('This object\'s End of Life time was too long ago. Ignoring the object. Time is %i', self.expiresTime)
# TODO: remove from download queue
logger.info(
'This object\'s End of Life time was too long ago. Ignoring the object. Time is %i',
self.expiresTime)
# .. todo:: remove from download queue
raise BMObjectExpiredError()
def checkStream(self):
"""Check if object's stream matches streams we are interested in"""
if self.streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %i isn\'t one we are interested in.', self.streamNumber)
raise BMObjectUnwantedStreamError()
def checkAlreadyHave(self):
"""
Check if we already have the object (so that we don't duplicate it in inventory or advertise it unnecessarily)
"""
# if it's a stem duplicate, pretend we don't have it
if Dandelion().hasHash(self.inventoryHash):
return
@ -80,6 +114,7 @@ class BMObject(object):
raise BMObjectAlreadyHaveError()
def checkObjectByType(self):
"""Call a object type specific check (objects can have additional checks based on their types)"""
if self.objectType == protocol.OBJECT_GETPUBKEY:
self.checkGetpubkey()
elif self.objectType == protocol.OBJECT_PUBKEY:
@ -91,21 +126,28 @@ class BMObject(object):
# other objects don't require other types of tests
def checkMessage(self):
""""Message" object type checks."""
# pylint: disable=no-self-use
return
def checkGetpubkey(self):
""""Getpubkey" object type checks."""
if len(self.data) < 42:
logger.info('getpubkey message doesn\'t contain enough data. Ignoring.')
raise BMObjectInvalidError()
def checkPubkey(self):
""""Pubkey" object type checks."""
if len(self.data) < 146 or len(self.data) > 440: # sanity check
logger.info('pubkey object too short or too long. Ignoring.')
raise BMObjectInvalidError()
def checkBroadcast(self):
""""Broadcast" object type checks."""
if len(self.data) < 180:
logger.debug('The payload length of this broadcast packet is unreasonably low. Someone is probably trying funny business. Ignoring message.')
logger.debug(
'The payload length of this broadcast packet is unreasonably low.'
' Someone is probably trying funny business. Ignoring message.')
PeterSurda commented 2018-10-04 17:39:56 +02:00 (Migrated from github.com)
Review

Perform a proof of work check for sufficiency.

Perform a proof of work check for sufficiency.
raise BMObjectInvalidError()
# this isn't supported anymore