use bytes for key of hashtable in replacement of hexlified string

This commit is contained in:
Kashiko Koibumi 2024-05-16 23:45:10 +09:00
parent ec91d9f20c
commit 1b3ce71f19
No known key found for this signature in database
GPG Key ID: 8F06E069E37C40C4
15 changed files with 130 additions and 160 deletions

View File

@ -1347,7 +1347,7 @@ class BMRPCDispatcher(object):
'Broadcasting inv for msg(API disseminatePreEncryptedMsg' 'Broadcasting inv for msg(API disseminatePreEncryptedMsg'
' command): %s', hexlify(inventoryHash)) ' command): %s', hexlify(inventoryHash))
queues.invQueue.put((toStreamNumber, inventoryHash)) queues.invQueue.put((toStreamNumber, inventoryHash))
return hexlify(inventoryHash).decode('ascii') return hexlify(inventoryHash).decode()
@command('trashSentMessageByAckData') @command('trashSentMessageByAckData')
def HandleTrashSentMessageByAckDAta(self, ackdata): def HandleTrashSentMessageByAckDAta(self, ackdata):

View File

@ -1769,7 +1769,7 @@ class MyForm(settingsmixin.SMainWindow):
if rect.width() > 20: if rect.width() > 20:
txt = "+" txt = "+"
fontSize = 15 fontSize = 15
font = QtGui.QFont(fontName, fontSize, QtGui.QFont.Bold) font = QtGui.QFont(fontName, fontSize, QtGui.QFont.Weight.Bold)
fontMetrics = QtGui.QFontMetrics(font) fontMetrics = QtGui.QFontMetrics(font)
rect = fontMetrics.boundingRect(txt) rect = fontMetrics.boundingRect(txt)
# draw text # draw text

View File

@ -172,10 +172,10 @@ class addressGenerator(StoppableThread):
config.set(address, 'payloadlengthextrabytes', str( config.set(address, 'payloadlengthextrabytes', str(
payloadLengthExtraBytes)) payloadLengthExtraBytes))
config.set( config.set(
address, 'privsigningkey', privSigningKeyWIF.decode('ascii')) address, 'privsigningkey', privSigningKeyWIF.decode())
config.set( config.set(
address, 'privencryptionkey', address, 'privencryptionkey',
privEncryptionKeyWIF.decode('ascii')) privEncryptionKeyWIF.decode())
config.save() config.save()
# The API and the join and create Chan functionality # The API and the join and create Chan functionality
@ -325,10 +325,10 @@ class addressGenerator(StoppableThread):
str(payloadLengthExtraBytes)) str(payloadLengthExtraBytes))
config.set( config.set(
address, 'privsigningkey', address, 'privsigningkey',
privSigningKeyWIF.decode('ascii')) privSigningKeyWIF.decode())
config.set( config.set(
address, 'privencryptionkey', address, 'privencryptionkey',
privEncryptionKeyWIF.decode('ascii')) privEncryptionKeyWIF.decode())
config.save() config.save()
queues.UISignalQueue.put(( queues.UISignalQueue.put((
@ -340,14 +340,12 @@ class addressGenerator(StoppableThread):
shared.myECCryptorObjects[ripe] = \ shared.myECCryptorObjects[ripe] = \
highlevelcrypto.makeCryptor( highlevelcrypto.makeCryptor(
hexlify(potentialPrivEncryptionKey)) hexlify(potentialPrivEncryptionKey))
hex_ripe = hexlify(ripe).decode('ascii') shared.myAddressesByHash[bytes(ripe)] = address
shared.myAddressesByHash[hex_ripe] = address
tag = highlevelcrypto.double_sha512( tag = highlevelcrypto.double_sha512(
encodeVarint(addressVersionNumber) encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + ripe + encodeVarint(streamNumber) + ripe
)[32:] )[32:]
hex_tag = hexlify(tag).decode('ascii') shared.myAddressesByTag[bytes(tag)] = address
shared.myAddressesByTag[hex_tag] = address
if addressVersionNumber == 3: if addressVersionNumber == 3:
# If this is a chan address, # If this is a chan address,
# the worker thread won't send out # the worker thread won't send out

View File

@ -141,10 +141,10 @@ class objectProcessor(threading.Thread):
# bypass nonce and time, retain object type/version/stream + body # bypass nonce and time, retain object type/version/stream + body
readPosition = 16 readPosition = 16
hex_data = hexlify(data[readPosition:]).decode('ascii') data_bytes = bytes(data[readPosition:])
if hex_data in state.ackdataForWhichImWatching: if data_bytes in state.ackdataForWhichImWatching:
logger.info('This object is an acknowledgement bound for me.') logger.info('This object is an acknowledgement bound for me.')
del state.ackdataForWhichImWatching[hex_data] del state.ackdataForWhichImWatching[data_bytes]
sqlExecute( sqlExecute(
"UPDATE sent SET status='ackreceived', lastactiontime=?" "UPDATE sent SET status='ackreceived', lastactiontime=?"
" WHERE ackdata=?", int(time.time()), data[readPosition:]) " WHERE ackdata=?", int(time.time()), data[readPosition:])
@ -214,24 +214,25 @@ class objectProcessor(threading.Thread):
return logger.debug( return logger.debug(
'The length of the requested hash is not 20 bytes.' 'The length of the requested hash is not 20 bytes.'
' Something is wrong. Ignoring.') ' Something is wrong. Ignoring.')
hex_hash = hexlify(requestedHash).decode('ascii')
logger.info( logger.info(
'the hash requested in this getpubkey request is: %s', 'the hash requested in this getpubkey request is: %s',
hex_hash) hexlify(requestedHash).decode())
requestedHash_bytes = bytes(requestedHash)
# if this address hash is one of mine # if this address hash is one of mine
if hex_hash in shared.myAddressesByHash: if requestedHash_bytes in shared.myAddressesByHash:
myAddress = shared.myAddressesByHash[hex_hash] myAddress = shared.myAddressesByHash[requestedHash_bytes]
elif requestedAddressVersionNumber >= 4: elif requestedAddressVersionNumber >= 4:
requestedTag = data[readPosition:readPosition + 32] requestedTag = data[readPosition:readPosition + 32]
if len(requestedTag) != 32: if len(requestedTag) != 32:
return logger.debug( return logger.debug(
'The length of the requested tag is not 32 bytes.' 'The length of the requested tag is not 32 bytes.'
' Something is wrong. Ignoring.') ' Something is wrong. Ignoring.')
hex_tag = hexlify(requestedTag).decode('ascii')
logger.debug( logger.debug(
'the tag requested in this getpubkey request is: %s', hex_tag) 'the tag requested in this getpubkey request is: %s',
if hex_tag in shared.myAddressesByTag: hexlify(requestedTag).decode())
myAddress = shared.myAddressesByTag[hex_tag] requestedTag_bytes = bytes(requestedTag)
if requestedTag_bytes in shared.myAddressesByTag:
myAddress = shared.myAddressesByTag[requestedTag_bytes]
if myAddress == '': if myAddress == '':
logger.info('This getpubkey request is not for any of my keys.') logger.info('This getpubkey request is not for any of my keys.')
@ -421,13 +422,13 @@ class objectProcessor(threading.Thread):
' Sanity check failed.') ' Sanity check failed.')
tag = data[readPosition:readPosition + 32] tag = data[readPosition:readPosition + 32]
hex_tag = 'tag-' + hexlify(tag).decode('ascii') tag_bytes = bytes(tag)
if hex_tag not in state.neededPubkeys: if tag_bytes not in state.neededPubkeys:
return logger.info( return logger.info(
'We don\'t need this v4 pubkey. We didn\'t ask for it.') 'We don\'t need this v4 pubkey. We didn\'t ask for it.')
# Let us try to decrypt the pubkey # Let us try to decrypt the pubkey
toAddress = state.neededPubkeys[hex_tag][0] toAddress = state.neededPubkeys[tag_bytes][0]
if protocol.decryptAndCheckPubkeyPayload(data, toAddress) == \ if protocol.decryptAndCheckPubkeyPayload(data, toAddress) == \
'successful': 'successful':
# At this point we know that we have been waiting on this # At this point we know that we have been waiting on this
@ -492,8 +493,7 @@ class objectProcessor(threading.Thread):
# This is a message bound for me. # This is a message bound for me.
# Look up my address based on the RIPE hash. # Look up my address based on the RIPE hash.
hex_ripe = hexlify(toRipe).decode('ascii') toAddress = shared.myAddressesByHash[bytes(toRipe)]
toAddress = shared.myAddressesByHash[hex_ripe]
readPosition = 0 readPosition = 0
sendersAddressVersionNumber, sendersAddressVersionNumberLength = \ sendersAddressVersionNumber, sendersAddressVersionNumberLength = \
decodeVarint(decryptedData[readPosition:readPosition + 10]) decodeVarint(decryptedData[readPosition:readPosition + 10])
@ -803,11 +803,11 @@ class objectProcessor(threading.Thread):
# of the sender's address to verify that it was # of the sender's address to verify that it was
# encrypted by with their key rather than some # encrypted by with their key rather than some
# other key. # other key.
toRipe = unhexlify(key) toRipe = key
initialDecryptionSuccessful = True initialDecryptionSuccessful = True
logger.info( logger.info(
'EC decryption successful using key associated' 'EC decryption successful using key associated'
' with ripe hash: %s', key) ' with ripe hash: %s', hexlify(key).decode())
except Exception as ex: except Exception as ex:
logger.debug( logger.debug(
'cryptorObject.decrypt Exception: {}'.format(ex)) 'cryptorObject.decrypt Exception: {}'.format(ex))
@ -820,14 +820,14 @@ class objectProcessor(threading.Thread):
elif broadcastVersion == 5: elif broadcastVersion == 5:
embeddedTag = data[readPosition:readPosition + 32] embeddedTag = data[readPosition:readPosition + 32]
readPosition += 32 readPosition += 32
hex_tag = hexlify(embeddedTag).decode('ascii') embeddedTag_bytes = bytes(embeddedTag)
if hex_tag not in shared.MyECSubscriptionCryptorObjects: if embeddedTag_bytes not in shared.MyECSubscriptionCryptorObjects:
logger.debug('We\'re not interested in this broadcast.') logger.debug('We\'re not interested in this broadcast.')
return return
# We are interested in this broadcast because of its tag. # We are interested in this broadcast because of its tag.
# We're going to add some more data which is signed further down. # We're going to add some more data which is signed further down.
signedData = bytes(data[8:readPosition]) signedData = bytes(data[8:readPosition])
cryptorObject = shared.MyECSubscriptionCryptorObjects[hex_tag] cryptorObject = shared.MyECSubscriptionCryptorObjects[embeddedTag_bytes]
try: try:
decryptedData = cryptorObject.decrypt(data[readPosition:]) decryptedData = cryptorObject.decrypt(data[readPosition:])
logger.debug('EC decryption successful') logger.debug('EC decryption successful')
@ -1011,9 +1011,9 @@ class objectProcessor(threading.Thread):
encodeVarint(addressVersion) + encodeVarint(streamNumber) encodeVarint(addressVersion) + encodeVarint(streamNumber)
+ ripe + ripe
)[32:] )[32:]
hex_tag = 'tag-' + hexlify(tag).decode('ascii') tag_bytes = bytes(tag)
if hex_tag in state.neededPubkeys: if tag_bytes in state.neededPubkeys:
del state.neededPubkeys[hex_tag] del state.neededPubkeys[tag_bytes]
self.sendMessages(address) self.sendMessages(address)
@staticmethod @staticmethod

View File

@ -87,8 +87,7 @@ class singleWorker(StoppableThread):
tag = doubleHashOfAddressData[32:] tag = doubleHashOfAddressData[32:]
# We'll need this for when we receive a pubkey reply: # We'll need this for when we receive a pubkey reply:
# it will be encrypted and we'll need to decrypt it. # it will be encrypted and we'll need to decrypt it.
hex_tag = 'tag-' + hexlify(tag).decode('ascii') state.neededPubkeys[bytes(tag)] = (
state.neededPubkeys[hex_tag] = (
toAddress, toAddress,
highlevelcrypto.makeCryptor( highlevelcrypto.makeCryptor(
hexlify(privEncryptionKey)) hexlify(privEncryptionKey))
@ -99,23 +98,20 @@ class singleWorker(StoppableThread):
'''SELECT ackdata FROM sent WHERE status = 'msgsent' AND folder = 'sent' ''') '''SELECT ackdata FROM sent WHERE status = 'msgsent' AND folder = 'sent' ''')
for row in queryreturn: for row in queryreturn:
ackdata, = row ackdata, = row
self.logger.info('Watching for ackdata %s', hexlify(ackdata)) self.logger.info('Watching for ackdata %s', hexlify(ackdata).decode())
hex_ackdata = hexlify(ackdata).decode('ascii') state.ackdataForWhichImWatching[bytes(ackdata)] = 0
state.ackdataForWhichImWatching[hex_ackdata] = 0
# Fix legacy (headerless) watched ackdata to include header # Fix legacy (headerless) watched ackdata to include header
for hex_oldack in state.ackdataForWhichImWatching: for oldack in state.ackdataForWhichImWatching:
oldack = unhexlify(hex_oldack)
if len(oldack) == 32: if len(oldack) == 32:
# attach legacy header, always constant (msg/1/1) # attach legacy header, always constant (msg/1/1)
newack = b'\x00\x00\x00\x02\x01\x01' + oldack newack = b'\x00\x00\x00\x02\x01\x01' + oldack
hex_newack = hexlify(newack).decode('ascii') state.ackdataForWhichImWatching[bytes(newack)] = 0
state.ackdataForWhichImWatching[hex_newack] = 0
sqlExecute( sqlExecute(
'''UPDATE sent SET ackdata=? WHERE ackdata=? AND folder = 'sent' ''', '''UPDATE sent SET ackdata=? WHERE ackdata=? AND folder = 'sent' ''',
newack, oldack newack, oldack
) )
del state.ackdataForWhichImWatching[hex_oldack] del state.ackdataForWhichImWatching[oldack]
# For the case if user deleted knownnodes # For the case if user deleted knownnodes
# but is still having onionpeer objects in inventory # but is still having onionpeer objects in inventory
@ -701,7 +697,7 @@ class singleWorker(StoppableThread):
ackdata, ackdata,
tr._translate( tr._translate(
"MainWindow", "MainWindow",
"Broadcast sent on %1" "Broadcast sent on {0}"
).format(l10n.formatTimestamp())) ).format(l10n.formatTimestamp()))
)) ))
@ -798,9 +794,9 @@ class singleWorker(StoppableThread):
encodeVarint(toAddressVersionNumber) encodeVarint(toAddressVersionNumber)
+ encodeVarint(toStreamNumber) + toRipe + encodeVarint(toStreamNumber) + toRipe
)[32:] )[32:]
hex_tag = 'tag-' + hexlify(toTag).decode('ascii') toTag_bytes = bytes(toTag)
if toaddress in state.neededPubkeys or \ if toaddress in state.neededPubkeys or \
hex_tag in state.neededPubkeys: toTag_bytes in state.neededPubkeys:
# We already sent a request for the pubkey # We already sent a request for the pubkey
sqlExecute( sqlExecute(
'''UPDATE sent SET status='awaitingpubkey', ''' '''UPDATE sent SET status='awaitingpubkey', '''
@ -841,8 +837,8 @@ class singleWorker(StoppableThread):
privEncryptionKey = doubleHashOfToAddressData[:32] privEncryptionKey = doubleHashOfToAddressData[:32]
# The second half of the sha512 hash. # The second half of the sha512 hash.
tag = doubleHashOfToAddressData[32:] tag = doubleHashOfToAddressData[32:]
hex_tag = 'tag-' + hexlify(tag).decode('ascii') tag_bytes = bytes(tag)
state.neededPubkeys[hex_tag] = ( state.neededPubkeys[tag_bytes] = (
toaddress, toaddress,
highlevelcrypto.makeCryptor( highlevelcrypto.makeCryptor(
hexlify(privEncryptionKey)) hexlify(privEncryptionKey))
@ -865,7 +861,7 @@ class singleWorker(StoppableThread):
''' status='doingpubkeypow') AND ''' ''' status='doingpubkeypow') AND '''
''' folder='sent' ''', ''' folder='sent' ''',
toaddress) toaddress)
del state.neededPubkeys[hex_tag] del state.neededPubkeys[tag_bytes]
break break
# else: # else:
# There was something wrong with this # There was something wrong with this
@ -907,8 +903,7 @@ class singleWorker(StoppableThread):
# if we aren't sending this to ourselves or a chan # if we aren't sending this to ourselves or a chan
if not config.has_section(toaddress): if not config.has_section(toaddress):
hex_ackdata = hexlify(ackdata).decode('ascii') state.ackdataForWhichImWatching[bytes(ackdata)] = 0
state.ackdataForWhichImWatching[hex_ackdata] = 0
queues.UISignalQueue.put(( queues.UISignalQueue.put((
'updateSentItemStatusByAckdata', ( 'updateSentItemStatusByAckdata', (
ackdata, ackdata,
@ -976,7 +971,7 @@ class singleWorker(StoppableThread):
" device who requests that the" " device who requests that the"
" destination be included in the" " destination be included in the"
" message but this is disallowed in" " message but this is disallowed in"
" your settings. %1" " your settings. {0}"
).format(l10n.formatTimestamp())) ).format(l10n.formatTimestamp()))
)) ))
# if the human changes their setting and then # if the human changes their setting and then
@ -1315,7 +1310,7 @@ class singleWorker(StoppableThread):
ackdata, ackdata,
tr._translate( tr._translate(
"MainWindow", "MainWindow",
"Message sent. Sent at %1" "Message sent. Sent at {0}"
).format(l10n.formatTimestamp())))) ).format(l10n.formatTimestamp()))))
else: else:
# not sending to a chan or one of my addresses # not sending to a chan or one of my addresses
@ -1418,11 +1413,11 @@ class singleWorker(StoppableThread):
privEncryptionKey = doubleHashOfAddressData[:32] privEncryptionKey = doubleHashOfAddressData[:32]
# Note that this is the second half of the sha512 hash. # Note that this is the second half of the sha512 hash.
tag = doubleHashOfAddressData[32:] tag = doubleHashOfAddressData[32:]
hex_tag = 'tag-' + hexlify(tag).decode('ascii') tag_bytes = bytes(tag)
if hex_tag not in state.neededPubkeys: if tag_bytes not in state.neededPubkeys:
# We'll need this for when we receive a pubkey reply: # We'll need this for when we receive a pubkey reply:
# it will be encrypted and we'll need to decrypt it. # it will be encrypted and we'll need to decrypt it.
state.neededPubkeys[hex_tag] = ( state.neededPubkeys[tag_bytes] = (
toAddress, toAddress,
highlevelcrypto.makeCryptor(hexlify(privEncryptionKey)) highlevelcrypto.makeCryptor(hexlify(privEncryptionKey))
) )

View File

@ -9,7 +9,6 @@ import re
import socket import socket
import struct import struct
import time import time
from binascii import hexlify
# magic imports! # magic imports!
import addresses import addresses
@ -111,16 +110,16 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
b"error", b"version", b"verack"): b"error", b"version", b"verack"):
logger.error( logger.error(
'Received command %s before connection was fully' 'Received command %s before connection was fully'
' established, ignoring', self.command.decode('ascii', 'backslashreplace')) ' established, ignoring', self.command.decode('utf-8', 'replace'))
self.invalid = True self.invalid = True
if not self.invalid: if not self.invalid:
try: try:
retval = getattr( retval = getattr(
self, "bm_command_" + self.command.decode('ascii', 'backslashreplace').lower())() self, "bm_command_" + self.command.decode('utf-8', 'replace').lower())()
except AttributeError as err: except AttributeError as err:
logger.debug('command = {}, err = {}'.format(self.command, err)) logger.debug('command = {}, err = {}'.format(self.command.decode('utf-8', 'replace'), err))
# unimplemented command # unimplemented command
logger.debug('unimplemented command %s', self.command.decode('ascii', 'backslashreplace')) logger.debug('unimplemented command %s', self.command.decode('utf-8', 'replace'))
except BMProtoInsufficientDataError: except BMProtoInsufficientDataError:
logger.debug('packet length too short, skipping') logger.debug('packet length too short, skipping')
except BMProtoExcessiveDataError: except BMProtoExcessiveDataError:
@ -143,8 +142,8 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
# broken read, ignore # broken read, ignore
pass pass
else: else:
logger.debug('Closing due to invalid command %s', self.command.decode('ascii', 'backslashreplace')) logger.debug('Closing due to invalid command %s', self.command.decode('utf-8', 'replace'))
self.close_reason = "Invalid command %s" % self.command.decode('ascii', 'backslashreplace') self.close_reason = "Invalid command %s" % self.command.decode('utf-8', 'replace')
self.set_state("close") self.set_state("close")
return False return False
if retval: if retval:
@ -417,8 +416,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
BMProto.stopDownloadingObject(self.object.inventoryHash, True) BMProto.stopDownloadingObject(self.object.inventoryHash, True)
else: else:
try: try:
hex_hash = hexlify(self.object.inventoryHash).decode('ascii') del missingObjects[bytes(self.object.inventoryHash)]
del missingObjects[hex_hash]
except KeyError: except KeyError:
pass pass
@ -446,16 +444,12 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
"""Incoming addresses, process them""" """Incoming addresses, process them"""
# not using services # not using services
for seenTime, stream, _, ip, port in self._decode_addr(): for seenTime, stream, _, ip, port in self._decode_addr():
if (stream not in state.streamsInWhichIAmParticipating): if (
stream not in state.streamsInWhichIAmParticipating
# FIXME: should check against complete list
or ip.decode('utf-8', 'replace').startswith('bootstrap')
):
continue continue
try:
if (
# FIXME: should check against complete list
ip.decode('ascii', 'backslashreplace').startswith('bootstrap')
):
continue
except UnicodeDecodeError:
pass
decodedIP = protocol.checkIPAddress(ip) decodedIP = protocol.checkIPAddress(ip)
if ( if (
decodedIP and time.time() - seenTime > 0 decodedIP and time.time() - seenTime > 0
@ -532,7 +526,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
logger.debug( logger.debug(
'remote node incoming address: %s:%i', 'remote node incoming address: %s:%i',
self.destination.host, self.peerNode.port) self.destination.host, self.peerNode.port)
logger.debug('user agent: %s', self.userAgent.decode('utf-8', 'backslashreplace')) logger.debug('user agent: %s', self.userAgent.decode('utf-8', 'replace'))
logger.debug('streams: [%s]', ','.join(map(str, self.streams))) logger.debug('streams: [%s]', ','.join(map(str, self.streams)))
if not self.peerValidityChecks(): if not self.peerValidityChecks():
# ABORT afterwards # ABORT afterwards
@ -540,7 +534,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
self.append_write_buf(protocol.CreatePacket(b'verack')) self.append_write_buf(protocol.CreatePacket(b'verack'))
self.verackSent = True self.verackSent = True
ua_valid = re.match( ua_valid = re.match(
r'^/[a-zA-Z]+:[0-9]+\.?[\w\s\(\)\./:;-]*/$', self.userAgent.decode('utf-8', 'backslashreplace')) r'^/[a-zA-Z]+:[0-9]+\.?[\w\s\(\)\./:;-]*/$', self.userAgent.decode('utf-8', 'replace'))
if not ua_valid: if not ua_valid:
self.userAgent = b'/INVALID:0/' self.userAgent = b'/INVALID:0/'
if not self.isOutbound: if not self.isOutbound:
@ -659,8 +653,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
except KeyError: except KeyError:
pass pass
try: try:
hex_hash = hexlify(hashId).decode('ascii') del missingObjects[bytes(hashId)]
del missingObjects[hex_hash]
except KeyError: except KeyError:
pass pass

View File

@ -6,7 +6,7 @@ from collections import namedtuple
from random import choice, expovariate, sample from random import choice, expovariate, sample
from threading import RLock from threading import RLock
from time import time from time import time
from binascii import hexlify, unhexlify from binascii import hexlify
import network.connectionpool as connectionpool import network.connectionpool as connectionpool
import state import state
@ -53,8 +53,7 @@ class Dandelion: # pylint: disable=old-style-class
if not state.dandelion_enabled: if not state.dandelion_enabled:
return return
with self.lock: with self.lock:
hex_hash = hexlify(hashId).decode('ascii') self.hashMap[bytes(hashId)] = Stem(
self.hashMap[hex_hash] = Stem(
self.getNodeStem(source), self.getNodeStem(source),
stream, stream,
self.poissonTimeout()) self.poissonTimeout())
@ -65,34 +64,31 @@ class Dandelion: # pylint: disable=old-style-class
include streams, we only learn this after receiving the object) include streams, we only learn this after receiving the object)
""" """
with self.lock: with self.lock:
hex_hash = hexlify(hashId).decode('ascii') hashId_bytes = bytes(hashId)
if hex_hash in self.hashMap: if hashId_bytes in self.hashMap:
self.hashMap[hex_hash] = Stem( self.hashMap[hashId_bytes] = Stem(
self.hashMap[hex_hash].child, self.hashMap[hashId_bytes].child,
stream, stream,
self.poissonTimeout()) self.poissonTimeout())
def removeHash(self, hashId, reason="no reason specified"): def removeHash(self, hashId, reason="no reason specified"):
"""Switch inventory vector from stem to fluff mode""" """Switch inventory vector from stem to fluff mode"""
hex_hash = hexlify(hashId).decode('ascii')
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug( logger.debug(
'%s entering fluff mode due to %s.', hex_hash, reason) '%s entering fluff mode due to %s.', hexlify(hashId).decode(), reason)
with self.lock: with self.lock:
try: try:
del self.hashMap[hex_hash] del self.hashMap[bytes(hashId)]
except KeyError: except KeyError:
pass pass
def hasHash(self, hashId): def hasHash(self, hashId):
"""Is inventory vector in stem mode?""" """Is inventory vector in stem mode?"""
hex_hash = hexlify(hashId).decode('ascii') return bytes(hashId) in self.hashMap
return hex_hash in self.hashMap
def objectChildStem(self, hashId): def objectChildStem(self, hashId):
"""Child (i.e. next) node for an inventory vector during stem mode""" """Child (i.e. next) node for an inventory vector during stem mode"""
hex_hash = hexlify(hashId).decode('ascii') return self.hashMap[bytes(hashId)].child
return self.hashMap[hex_hash].child
def maybeAddStem(self, connection): def maybeAddStem(self, connection):
""" """
@ -112,7 +108,7 @@ class Dandelion: # pylint: disable=old-style-class
}.items(): }.items():
self.hashMap[k] = Stem( self.hashMap[k] = Stem(
connection, v.stream, self.poissonTimeout()) connection, v.stream, self.poissonTimeout())
invQueue.put((v.stream, unhexlify(k), v.child)) invQueue.put((v.stream, k, v.child))
def maybeRemoveStem(self, connection): def maybeRemoveStem(self, connection):
""" """
@ -173,7 +169,7 @@ class Dandelion: # pylint: disable=old-style-class
with self.lock: with self.lock:
deadline = time() deadline = time()
toDelete = [ toDelete = [
[v.stream, unhexlify(k), v.child] for k, v in self.hashMap.items() [v.stream, k, v.child] for k, v in self.hashMap.items()
if v.timeout < deadline if v.timeout < deadline
] ]

View File

@ -9,7 +9,6 @@ import protocol
import network.connectionpool as connectionpool import network.connectionpool as connectionpool
from .objectracker import missingObjects from .objectracker import missingObjects
from .threads import StoppableThread from .threads import StoppableThread
from binascii import hexlify
class DownloadThread(StoppableThread): class DownloadThread(StoppableThread):
@ -68,8 +67,7 @@ class DownloadThread(StoppableThread):
continue continue
payload.extend(chunk) payload.extend(chunk)
chunkCount += 1 chunkCount += 1
hex_chunk = hexlify(chunk).decode('ascii') missingObjects[bytes(chunk)] = now
missingObjects[hex_chunk] = now
if not chunkCount: if not chunkCount:
continue continue
payload[0:0] = addresses.encodeVarint(chunkCount) payload[0:0] = addresses.encodeVarint(chunkCount)

View File

@ -3,7 +3,6 @@ Module for tracking objects
""" """
import time import time
from threading import RLock from threading import RLock
from binascii import hexlify
import state import state
import network.connectionpool as connectionpool import network.connectionpool as connectionpool
@ -82,27 +81,28 @@ class ObjectTracker(object):
def hasObj(self, hashid): def hasObj(self, hashid):
"""Do we already have object?""" """Do we already have object?"""
hashid_bytes = bytes(hashid)
if haveBloom: if haveBloom:
return hashid in self.invBloom return hashid_bytes in self.invBloom
return hashid in self.objectsNewToMe return hashid_bytes in self.objectsNewToMe
def handleReceivedInventory(self, hashId): def handleReceivedInventory(self, hashId):
"""Handling received inventory""" """Handling received inventory"""
hex_hash = hexlify(hashId).decode('ascii') hashId_bytes = bytes(hashId)
if haveBloom: if haveBloom:
self.invBloom.add(hex_hash) self.invBloom.add(hashId_bytes)
try: try:
with self.objectsNewToThemLock: with self.objectsNewToThemLock:
del self.objectsNewToThem[hex_hash] del self.objectsNewToThem[hashId_bytes]
except KeyError: except KeyError:
pass pass
if hex_hash not in missingObjects: if hashId_bytes not in missingObjects:
missingObjects[hex_hash] = time.time() missingObjects[hashId_bytes] = time.time()
self.objectsNewToMe[hashId] = True self.objectsNewToMe[hashId] = True
def handleReceivedObject(self, streamNumber, hashid): def handleReceivedObject(self, streamNumber, hashid):
"""Handling received object""" """Handling received object"""
hex_hash = hexlify(hashid).decode('ascii') hashid_bytes = bytes(hashid);
for i in connectionpool.pool.connections(): for i in connectionpool.pool.connections():
if not i.fullyEstablished: if not i.fullyEstablished:
continue continue
@ -113,7 +113,7 @@ class ObjectTracker(object):
not state.Dandelion.hasHash(hashid) not state.Dandelion.hasHash(hashid)
or state.Dandelion.objectChildStem(hashid) == i): or state.Dandelion.objectChildStem(hashid) == i):
with i.objectsNewToThemLock: with i.objectsNewToThemLock:
i.objectsNewToThem[hex_hash] = time.time() i.objectsNewToThem[hashid_bytes] = time.time()
# update stream number, # update stream number,
# which we didn't have when we just received the dinv # which we didn't have when we just received the dinv
# also resets expiration of the stem mode # also resets expiration of the stem mode
@ -122,7 +122,7 @@ class ObjectTracker(object):
if i == self: if i == self:
try: try:
with i.objectsNewToThemLock: with i.objectsNewToThemLock:
del i.objectsNewToThem[hex_hash] del i.objectsNewToThem[hashid_bytes]
except KeyError: except KeyError:
pass pass
self.objectsNewToMe.setLastObject() self.objectsNewToMe.setLastObject()
@ -136,4 +136,4 @@ class ObjectTracker(object):
def addAddr(self, hashid): def addAddr(self, hashid):
"""WIP, should be moved to addrthread.py or removed""" """WIP, should be moved to addrthread.py or removed"""
if haveBloom: if haveBloom:
self.addrBloom.add(hashid) self.addrBloom.add(bytes(hashid))

View File

@ -487,8 +487,7 @@ def decryptAndCheckPubkeyPayload(data, address):
encryptedData = data[readPosition:] encryptedData = data[readPosition:]
# Let us try to decrypt the pubkey # Let us try to decrypt the pubkey
hex_tag = 'tag-' + hexlify(tag).decode('ascii') toAddress, cryptorObject = state.neededPubkeys[bytes(tag)]
toAddress, cryptorObject = state.neededPubkeys[hex_tag]
if toAddress != address: if toAddress != address:
logger.critical( logger.critical(
'decryptAndCheckPubkeyPayload failed due to toAddress' 'decryptAndCheckPubkeyPayload failed due to toAddress'

View File

@ -39,12 +39,10 @@ class RandomTrackingDict(object):
return self.len return self.len
def __contains__(self, key): def __contains__(self, key):
hex_key = hexlify(key).decode('ascii') return bytes(key) in self.dictionary
return hex_key in self.dictionary
def __getitem__(self, key): def __getitem__(self, key):
hex_key = hexlify(key).decode('ascii') return self.dictionary[bytes(key)][1]
return self.dictionary[hex_key][1]
def _swap(self, i1, i2): def _swap(self, i1, i2):
with self.lock: with self.lock:
@ -52,30 +50,28 @@ class RandomTrackingDict(object):
key2 = self.indexDict[i2] key2 = self.indexDict[i2]
self.indexDict[i1] = key2 self.indexDict[i1] = key2
self.indexDict[i2] = key1 self.indexDict[i2] = key1
hex_key1 = hexlify(key1).decode('ascii') self.dictionary[bytes(key1)][0] = i2
hex_key2 = hexlify(key2).decode('ascii') self.dictionary[bytes(key2)][0] = i1
self.dictionary[hex_key1][0] = i2
self.dictionary[hex_key2][0] = i1
# for quick reassignment # for quick reassignment
return i2 return i2
def __setitem__(self, key, value): def __setitem__(self, key, value):
with self.lock: with self.lock:
hex_key = hexlify(key).decode('ascii') key_bytes = bytes(key)
if hex_key in self.dictionary: if key_bytes in self.dictionary:
self.dictionary[hex_key][1] = value self.dictionary[key_bytes][1] = value
else: else:
self.indexDict.append(key) self.indexDict.append(key)
self.dictionary[hex_key] = [self.len, value] self.dictionary[key_bytes] = [self.len, value]
self._swap(self.len, self.len - self.pendingLen) self._swap(self.len, self.len - self.pendingLen)
self.len += 1 self.len += 1
def __delitem__(self, key): def __delitem__(self, key):
hex_key = hexlify(key).decode('ascii') key_bytes = bytes(key)
if hex_key not in self.dictionary: if key_bytes not in self.dictionary:
raise KeyError raise KeyError
with self.lock: with self.lock:
index = self.dictionary[hex_key][0] index = self.dictionary[key_bytes][0]
# not pending # not pending
if index < self.len - self.pendingLen: if index < self.len - self.pendingLen:
# left of pending part # left of pending part
@ -89,7 +85,7 @@ class RandomTrackingDict(object):
# operation can improve 4x, but it's already very fast so we'll # operation can improve 4x, but it's already very fast so we'll
# ignore it for the time being # ignore it for the time being
del self.indexDict[-1] del self.indexDict[-1]
del self.dictionary[hex_key] del self.dictionary[key_bytes]
self.len -= 1 self.len -= 1
def setMaxPending(self, maxPending): def setMaxPending(self, maxPending):

View File

@ -114,13 +114,11 @@ def reloadMyAddressHashes():
if len(privEncryptionKey) == 64: if len(privEncryptionKey) == 64:
myECCryptorObjects[hashobj] = \ myECCryptorObjects[hashobj] = \
highlevelcrypto.makeCryptor(privEncryptionKey) highlevelcrypto.makeCryptor(privEncryptionKey)
hex_hash = hexlify(hashobj).decode('ascii') myAddressesByHash[bytes(hashobj)] = addressInKeysFile
myAddressesByHash[hex_hash] = addressInKeysFile
tag = highlevelcrypto.double_sha512( tag = highlevelcrypto.double_sha512(
encodeVarint(addressVersionNumber) encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + hashobj)[32:] + encodeVarint(streamNumber) + hashobj)[32:]
hex_tag = hexlify(tag).decode('ascii') myAddressesByTag[bytes(tag)] = addressInKeysFile
myAddressesByTag[hex_tag] = addressInKeysFile
if not keyfileSecure: if not keyfileSecure:
fixSensitiveFilePermissions(os.path.join( fixSensitiveFilePermissions(os.path.join(
@ -151,8 +149,7 @@ def reloadBroadcastSendersForWhichImWatching():
encodeVarint(addressVersionNumber) encodeVarint(addressVersionNumber)
+ encodeVarint(streamNumber) + hashobj + encodeVarint(streamNumber) + hashobj
).digest()[:32] ).digest()[:32]
hex_hash = hexlify(hashobj).decode('ascii') MyECSubscriptionCryptorObjects[bytes(hashobj)] = \
MyECSubscriptionCryptorObjects[hex_hash] = \
highlevelcrypto.makeCryptor(hexlify(privEncryptionKey)) highlevelcrypto.makeCryptor(hexlify(privEncryptionKey))
else: else:
doubleHashOfAddressData = highlevelcrypto.double_sha512( doubleHashOfAddressData = highlevelcrypto.double_sha512(
@ -161,8 +158,7 @@ def reloadBroadcastSendersForWhichImWatching():
) )
tag = doubleHashOfAddressData[32:] tag = doubleHashOfAddressData[32:]
privEncryptionKey = doubleHashOfAddressData[:32] privEncryptionKey = doubleHashOfAddressData[:32]
hex_tag = hexlify(tag).decode('ascii') MyECSubscriptionCryptorObjects[bytes(tag)] = \
MyECSubscriptionCryptorObjects[hex_tag] = \
highlevelcrypto.makeCryptor(hexlify(privEncryptionKey)) highlevelcrypto.makeCryptor(hexlify(privEncryptionKey))

View File

@ -70,7 +70,7 @@ class FilesystemInventory(InventoryStorage):
os.makedirs(os.path.join( os.makedirs(os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashval).decode('ascii'))) hexlify(hashval).decode()))
except OSError: except OSError:
pass pass
try: try:
@ -78,7 +78,7 @@ class FilesystemInventory(InventoryStorage):
os.path.join( os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashval).decode('ascii'), hexlify(hashval).decode(),
FilesystemInventory.metadataFilename, FilesystemInventory.metadataFilename,
), ),
"w", "w",
@ -87,12 +87,12 @@ class FilesystemInventory(InventoryStorage):
value.type, value.type,
value.stream, value.stream,
value.expires, value.expires,
hexlify(value.tag).decode('ascii'))) hexlify(value.tag).decode()))
with open( with open(
os.path.join( os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashval).decode('ascii'), hexlify(hashval).decode(),
FilesystemInventory.dataFilename, FilesystemInventory.dataFilename,
), ),
"wb", "wb",
@ -119,7 +119,7 @@ class FilesystemInventory(InventoryStorage):
os.path.join( os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashval).decode('ascii'), hexlify(hashval).decode(),
FilesystemInventory.metadataFilename)) FilesystemInventory.metadataFilename))
except IOError: except IOError:
pass pass
@ -128,7 +128,7 @@ class FilesystemInventory(InventoryStorage):
os.path.join( os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashval).decode('ascii'), hexlify(hashval).decode(),
FilesystemInventory.dataFilename)) FilesystemInventory.dataFilename))
except IOError: except IOError:
pass pass
@ -136,7 +136,7 @@ class FilesystemInventory(InventoryStorage):
os.rmdir(os.path.join( os.rmdir(os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashval).decode('ascii'))) hexlify(hashval).decode()))
except IOError: except IOError:
pass pass
@ -186,7 +186,7 @@ class FilesystemInventory(InventoryStorage):
os.path.join( os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashId).decode('ascii'), hexlify(hashId).decode(),
FilesystemInventory.dataFilename, FilesystemInventory.dataFilename,
), ),
"r", "r",
@ -202,7 +202,7 @@ class FilesystemInventory(InventoryStorage):
os.path.join( os.path.join(
self.baseDir, self.baseDir,
FilesystemInventory.objectDir, FilesystemInventory.objectDir,
hexlify(hashId).decode('ascii'), hexlify(hashId).decode(),
FilesystemInventory.metadataFilename, FilesystemInventory.metadataFilename,
), ),
"r", "r",

View File

@ -4,7 +4,6 @@ Sqlite Inventory
import sqlite3 import sqlite3
import time import time
from threading import RLock from threading import RLock
from binascii import hexlify, unhexlify
from helper_sql import SqlBulkExecute, sqlExecute, sqlQuery from helper_sql import SqlBulkExecute, sqlExecute, sqlQuery
from .storage import InventoryItem, InventoryStorage from .storage import InventoryItem, InventoryStorage
@ -30,21 +29,21 @@ class SqliteInventory(InventoryStorage):
def __contains__(self, hash_): def __contains__(self, hash_):
with self.lock: with self.lock:
hex_hash = hexlify(hash_).decode('ascii') hash_bytes = bytes(hash_)
if hex_hash in self._objects: if hash_bytes in self._objects:
return True return True
rows = sqlQuery( rows = sqlQuery(
'SELECT streamnumber FROM inventory WHERE hash=?', hash_) 'SELECT streamnumber FROM inventory WHERE hash=?', hash_)
if not rows: if not rows:
return False return False
self._objects[hex_hash] = rows[0][0] self._objects[hash_bytes] = rows[0][0]
return True return True
def __getitem__(self, hash_): def __getitem__(self, hash_):
with self.lock: with self.lock:
hex_hash = hexlify(hash_).decode('ascii') hash_bytes = bytes(hash_)
if hex_hash in self._inventory: if hash_bytes in self._inventory:
return self._inventory[hex_hash] return self._inventory[hash_bytes]
rows = sqlQuery( rows = sqlQuery(
'SELECT objecttype, streamnumber, payload, expirestime, tag' 'SELECT objecttype, streamnumber, payload, expirestime, tag'
' FROM inventory WHERE hash=?', hash_) ' FROM inventory WHERE hash=?', hash_)
@ -55,16 +54,16 @@ class SqliteInventory(InventoryStorage):
def __setitem__(self, hash_, value): def __setitem__(self, hash_, value):
with self.lock: with self.lock:
value = InventoryItem(*value) value = InventoryItem(*value)
hex_hash = hexlify(hash_).decode('ascii') hash_bytes = bytes(hash_)
self._inventory[hex_hash] = value self._inventory[hash_bytes] = value
self._objects[hex_hash] = value.stream self._objects[hash_bytes] = value.stream
def __delitem__(self, hash_): def __delitem__(self, hash_):
raise NotImplementedError raise NotImplementedError
def __iter__(self): def __iter__(self):
with self.lock: with self.lock:
hashes = map(unhexlify, self._inventory.keys()[:]) hashes = [] + self._inventory.keys()
hashes += (x for x, in sqlQuery('SELECT hash FROM inventory')) hashes += (x for x, in sqlQuery('SELECT hash FROM inventory'))
return hashes.__iter__() return hashes.__iter__()
@ -96,7 +95,7 @@ class SqliteInventory(InventoryStorage):
"""Return unexpired inventory vectors filtered by stream""" """Return unexpired inventory vectors filtered by stream"""
with self.lock: with self.lock:
t = int(time.time()) t = int(time.time())
hashes = [unhexlify(x) for x, value in self._inventory.items() hashes = [x for x, value in self._inventory.items()
if value.stream == stream and value.expires > t] if value.stream == stream and value.expires > t]
hashes += (payload for payload, in sqlQuery( hashes += (payload for payload, in sqlQuery(
'SELECT hash FROM inventory WHERE streamnumber=?' 'SELECT hash FROM inventory WHERE streamnumber=?'
@ -112,7 +111,7 @@ class SqliteInventory(InventoryStorage):
for objectHash, value in self._inventory.items(): for objectHash, value in self._inventory.items():
sql.execute( sql.execute(
'INSERT INTO inventory VALUES (?, ?, ?, ?, ?, ?)', 'INSERT INTO inventory VALUES (?, ?, ?, ?, ?, ?)',
unhexlify(objectHash), *value) objectHash, *value)
self._inventory.clear() self._inventory.clear()
def clean(self): def clean(self):

View File

@ -82,12 +82,12 @@ class TestAPIThread(TestPartialRun):
proofofwork.init() proofofwork.init()
self.assertEqual( self.assertEqual(
unhexlify(self.api.disseminatePreparedObject( unhexlify(self.api.disseminatePreparedObject(
hexlify(sample_object_data).decode('ascii'))), hexlify(sample_object_data).decode())),
calculateInventoryHash(sample_object_data)) calculateInventoryHash(sample_object_data))
update_object = b'\x00' * 8 + pack( update_object = b'\x00' * 8 + pack(
'>Q', int(time.time() + 7200)) + sample_object_data[16:] '>Q', int(time.time() + 7200)) + sample_object_data[16:]
invhash = unhexlify(self.api.disseminatePreEncryptedMsg( invhash = unhexlify(self.api.disseminatePreEncryptedMsg(
hexlify(update_object).decode('ascii') hexlify(update_object).decode()
)) ))
obj_type, obj_stream, obj_data = state.Inventory[invhash][:3] obj_type, obj_stream, obj_data = state.Inventory[invhash][:3]
self.assertEqual(obj_type, 42) self.assertEqual(obj_type, 42)