Changes based on style and lint checks. (final_code_quality_3)

This commit is contained in:
coffeedogs 2018-10-04 15:23:01 +01:00
parent 2d4cb1e2ce
commit c875769b15
No known key found for this signature in database
GPG Key ID: 9D818C503D0B7E70
3 changed files with 102 additions and 24 deletions

View File

@ -8,6 +8,7 @@ import traceback
HOST = "127.0.0.1"
PORT = 8912
def sslProtocolVersion():
# sslProtocolVersion
if sys.version_info >= (2, 7, 13):
@ -23,16 +24,19 @@ def sslProtocolVersion():
# "TLSv1.2" in < 2.7.9
return ssl.PROTOCOL_TLSv1
def sslProtocolCiphers():
if ssl.OPENSSL_VERSION_NUMBER >= 0x10100000:
return "AECDH-AES256-SHA@SECLEVEL=0"
else:
return "AECDH-AES256-SHA"
def connect():
sock = socket.create_connection((HOST, PORT))
return sock
def listen():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@ -40,6 +44,7 @@ def listen():
sock.listen(0)
return sock
def sslHandshake(sock, server=False):
if sys.version_info >= (2, 7, 9):
context = ssl.SSLContext(sslProtocolVersion())
@ -50,7 +55,10 @@ def sslHandshake(sock, server=False):
context.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_SINGLE_ECDH_USE | ssl.OP_CIPHER_SERVER_PREFERENCE
sslSock = context.wrap_socket(sock, server_side=server, do_handshake_on_connect=False)
else:
sslSock = ssl.wrap_socket(sock, keyfile = os.path.join('src', 'sslkeys', 'key.pem'), certfile = os.path.join('src', 'sslkeys', 'cert.pem'), server_side = server, ssl_version=sslProtocolVersion(), do_handshake_on_connect=False, ciphers='AECDH-AES256-SHA')
sslSock = ssl.wrap_socket(sock, keyfile=os.path.join('src', 'sslkeys', 'key.pem'),
certfile=os.path.join('src', 'sslkeys', 'cert.pem'),
server_side=server, ssl_version=sslProtocolVersion(),
do_handshake_on_connect=False, ciphers='AECDH-AES256-SHA')
while True:
try:
@ -69,6 +77,7 @@ def sslHandshake(sock, server=False):
print "Success!"
return sslSock
if __name__ == "__main__":
if len(sys.argv) != 2:
print "Usage: ssltest.py client|server"

View File

@ -1,19 +1,32 @@
"""
src/network/advanceddispatcher.py
=================================
"""
# pylint: disable=attribute-defined-outside-init
import socket
import threading
import time
import asyncore_pollchoose as asyncore
import network.asyncore_pollchoose as asyncore
import state
from debug import logger
from helper_threading import BusyError, nonBlocking
import state
class ProcessingError(Exception):
"""General class for protocol parser exception, use as a base for others."""
pass
class UnknownStateError(ProcessingError):
"""Parser points to an unknown (unimplemented) state."""
pass
class AdvancedDispatcher(asyncore.dispatcher):
"""Improved version of asyncore dispatcher, with buffers and protocol state."""
# pylint: disable=too-many-instance-attributes
_buf_len = 131072 # 128kB
def __init__(self, sock=None):
@ -31,6 +44,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.processingLock = threading.RLock()
def append_write_buf(self, data):
"""Append binary data to the end of stream write buffer."""
if data:
if isinstance(data, list):
with self.writeLock:
@ -41,6 +55,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.write_buf.extend(data)
def slice_write_buf(self, length=0):
"""Cut the beginning of the stream write buffer."""
if length > 0:
with self.writeLock:
if length >= len(self.write_buf):
@ -49,6 +64,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
del self.write_buf[0:length]
def slice_read_buf(self, length=0):
"""Cut the beginning of the stream read buffer."""
if length > 0:
with self.readLock:
if length >= len(self.read_buf):
@ -57,6 +73,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
del self.read_buf[0:length]
def process(self):
"""Process (parse) data that's in the buffer, as long as there is enough data and the connection is open."""
while self.connected and not state.shutdown:
try:
with nonBlocking(self.processingLock):
@ -68,19 +85,21 @@ class AdvancedDispatcher(asyncore.dispatcher):
cmd = getattr(self, "state_" + str(self.state))
except AttributeError:
logger.error("Unknown state %s", self.state, exc_info=True)
raise UnknownState(self.state)
raise UnknownStateError(self.state)
if not cmd():
break
except BusyError:
return False
return False
def set_state(self, state, length=0, expectBytes=0):
def set_state(self, state_str, length=0, expectBytes=0):
"""Set the next processing state."""
self.expectBytes = expectBytes
self.slice_read_buf(length)
self.state = state
self.state = state_str
def writable(self):
"""Is data from the write buffer ready to be sent to the network?"""
self.uploadChunk = AdvancedDispatcher._buf_len
if asyncore.maxUploadRate > 0:
self.uploadChunk = int(asyncore.uploadBucket)
@ -89,6 +108,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
(self.connecting or (self.connected and self.uploadChunk > 0))
def readable(self):
"""Is the read buffer ready to accept data from the network?"""
self.downloadChunk = AdvancedDispatcher._buf_len
if asyncore.maxDownloadRate > 0:
self.downloadChunk = int(asyncore.downloadBucket)
@ -103,6 +123,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
(self.connecting or self.accepting or (self.connected and self.downloadChunk > 0))
def handle_read(self):
"""Append incoming data to the read buffer."""
self.lastTx = time.time()
newData = self.recv(self.downloadChunk)
self.receivedBytes += len(newData)
@ -111,6 +132,7 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.read_buf.extend(newData)
def handle_write(self):
"""Send outgoing data from write buffer."""
self.lastTx = time.time()
written = self.send(self.write_buf[0:self.uploadChunk])
asyncore.update_sent(written)
@ -118,19 +140,24 @@ class AdvancedDispatcher(asyncore.dispatcher):
self.slice_write_buf(written)
def handle_connect_event(self):
"""Callback for connection established event."""
try:
asyncore.dispatcher.handle_connect_event(self)
except socket.error as e:
if e.args[0] not in asyncore._DISCONNECTED:
if e.args[0] not in asyncore._DISCONNECTED: # pylint: disable=protected-access
raise
def handle_connect(self):
"""Method for handling connection established implementations."""
self.lastTx = time.time()
def state_close(self):
"""Signal to the processing loop to end."""
# pylint: disable=no-self-use
return False
def handle_close(self):
"""Callback for connection being closed, but can also be called directly when you want connection to close."""
with self.readLock:
self.read_buf = bytearray()
with self.writeLock:

View File

@ -1,44 +1,68 @@
from binascii import hexlify
"""
src/network/bmobject.py
======================
"""
import time
import protocol
import state
from addresses import calculateInventoryHash
from debug import logger
from inventory import Inventory
from network.dandelion import Dandelion
import protocol
import state
class BMObjectInsufficientPOWError(Exception):
"""Exception indicating the object doesn't have sufficient proof of work."""
errorCodes = ("Insufficient proof of work")
class BMObjectInvalidDataError(Exception):
"""Exception indicating the data being parsed does not match the specification."""
errorCodes = ("Data invalid")
class BMObjectExpiredError(Exception):
"""Exception indicating the object's lifetime has expired."""
errorCodes = ("Object expired")
class BMObjectUnwantedStreamError(Exception):
"""Exception indicating the object is in a stream we didn't advertise as being interested in."""
errorCodes = ("Object in unwanted stream")
class BMObjectInvalidError(Exception):
"""The object's data does not match object specification."""
errorCodes = ("Invalid object")
class BMObjectAlreadyHaveError(Exception):
"""We received a duplicate object (one we already have)"""
errorCodes = ("Already have this object")
class BMObject(object):
"""Bitmessage Object as a class."""
# pylint: disable=too-many-instance-attributes
# max TTL, 28 days and 3 hours
maxTTL = 28 * 24 * 60 * 60 + 10800
# min TTL, 3 hour (in the past
minTTL = -3600
def __init__(self, nonce, expiresTime, objectType, version, streamNumber, data, payloadOffset):
def __init__(
self,
nonce,
expiresTime,
objectType,
version,
streamNumber,
data,
payloadOffset
): # pylint: disable=too-many-arguments
self.nonce = nonce
self.expiresTime = expiresTime
self.objectType = objectType
@ -50,29 +74,39 @@ class BMObject(object):
self.tag = self.data[payloadOffset:payloadOffset + 32]
def checkProofOfWorkSufficient(self):
"""Perform a proof of work check for sufficiency."""
# Let us check to make sure that the proof of work is sufficient.
if not protocol.isProofOfWorkSufficient(self.data):
logger.info('Proof of work is insufficient.')
raise BMObjectInsufficientPOWError()
def checkEOLSanity(self):
"""Check if object's lifetime isn't ridiculously far in the past or future."""
# EOL sanity check
if self.expiresTime - int(time.time()) > BMObject.maxTTL:
logger.info('This object\'s End of Life time is too far in the future. Ignoring it. Time is %i', self.expiresTime)
# TODO: remove from download queue
logger.info(
'This object\'s End of Life time is too far in the future. Ignoring it. Time is %i',
self.expiresTime)
# .. todo:: remove from download queue
raise BMObjectExpiredError()
if self.expiresTime - int(time.time()) < BMObject.minTTL:
logger.info('This object\'s End of Life time was too long ago. Ignoring the object. Time is %i', self.expiresTime)
# TODO: remove from download queue
logger.info(
'This object\'s End of Life time was too long ago. Ignoring the object. Time is %i',
self.expiresTime)
# .. todo:: remove from download queue
raise BMObjectExpiredError()
def checkStream(self):
"""Check if object's stream matches streams we are interested in"""
if self.streamNumber not in state.streamsInWhichIAmParticipating:
logger.debug('The streamNumber %i isn\'t one we are interested in.', self.streamNumber)
raise BMObjectUnwantedStreamError()
def checkAlreadyHave(self):
"""
Check if we already have the object (so that we don't duplicate it in inventory or advertise it unnecessarily)
"""
# if it's a stem duplicate, pretend we don't have it
if Dandelion().hasHash(self.inventoryHash):
return
@ -80,6 +114,7 @@ class BMObject(object):
raise BMObjectAlreadyHaveError()
def checkObjectByType(self):
"""Call a object type specific check (objects can have additional checks based on their types)"""
if self.objectType == protocol.OBJECT_GETPUBKEY:
self.checkGetpubkey()
elif self.objectType == protocol.OBJECT_PUBKEY:
@ -91,21 +126,28 @@ class BMObject(object):
# other objects don't require other types of tests
def checkMessage(self):
""""Message" object type checks."""
# pylint: disable=no-self-use
return
def checkGetpubkey(self):
""""Getpubkey" object type checks."""
if len(self.data) < 42:
logger.info('getpubkey message doesn\'t contain enough data. Ignoring.')
raise BMObjectInvalidError()
def checkPubkey(self):
""""Pubkey" object type checks."""
if len(self.data) < 146 or len(self.data) > 440: # sanity check
logger.info('pubkey object too short or too long. Ignoring.')
raise BMObjectInvalidError()
def checkBroadcast(self):
""""Broadcast" object type checks."""
if len(self.data) < 180:
logger.debug('The payload length of this broadcast packet is unreasonably low. Someone is probably trying funny business. Ignoring message.')
logger.debug(
'The payload length of this broadcast packet is unreasonably low.'
' Someone is probably trying funny business. Ignoring message.')
raise BMObjectInvalidError()
# this isn't supported anymore