Added changes for inventory and inbox issues
This commit is contained in:
parent
17971d6860
commit
eb2b948576
|
@ -22,12 +22,16 @@ def search_sql(
|
|||
sqlStatementBase = '''SELECT label, address From addressbook '''
|
||||
else:
|
||||
sqlStatementBase = (
|
||||
'''SELECT folder, msgid, toaddress, message, fromaddress,'''
|
||||
'''SELECT folder, toaddress, message, fromaddress,'''
|
||||
''' subject, received, read FROM inbox '''
|
||||
)
|
||||
sqlStatementParts = []
|
||||
sqlArguments = []
|
||||
if account is not None:
|
||||
#xAddress = 'toaddress'
|
||||
#where = ['subject', 'message']
|
||||
#what = None
|
||||
#unreadOnly = False
|
||||
if xAddress == 'both':
|
||||
sqlStatementParts.append("(fromaddress = ? OR toaddress = ?)")
|
||||
sqlArguments.append(account)
|
||||
|
|
|
@ -48,7 +48,7 @@ class objectProcessor(threading.Thread):
|
|||
def __init__(self):
|
||||
threading.Thread.__init__(self, name="objectProcessor")
|
||||
random.seed()
|
||||
# It may be the case that the last time Bitmessage was running,
|
||||
# It may be the case that the last time Bitmes0sage was running,
|
||||
# the user closed it before it finished processing everything in the
|
||||
# objectProcessorQueue. Assuming that Bitmessage wasn't closed
|
||||
# forcefully, it should have saved the data in the queue into the
|
||||
|
@ -69,9 +69,7 @@ class objectProcessor(threading.Thread):
|
|||
"""Process the objects from `.queues.objectProcessorQueue`"""
|
||||
while True:
|
||||
objectType, data = queues.objectProcessorQueue.get()
|
||||
|
||||
self.checkackdata(data)
|
||||
|
||||
try:
|
||||
if objectType == protocol.OBJECT_GETPUBKEY:
|
||||
self.processgetpubkey(data)
|
||||
|
@ -236,8 +234,9 @@ class objectProcessor(threading.Thread):
|
|||
logger.debug(
|
||||
'the tag requested in this getpubkey request is: %s',
|
||||
hexlify(requestedTag))
|
||||
# import pdb;pdb.set_trace()
|
||||
if bytes(requestedTag) in shared.myAddressesByTag:
|
||||
myAddress = shared.myAddressesByTag[requestedTag]
|
||||
myAddress = shared.myAddressesByTag[bytes(requestedTag)]
|
||||
|
||||
if myAddress == '':
|
||||
logger.info('This getpubkey request is not for any of my keys.')
|
||||
|
@ -436,18 +435,21 @@ class objectProcessor(threading.Thread):
|
|||
return
|
||||
|
||||
tag = data[readPosition:readPosition + 32]
|
||||
if tag not in bytes(state.neededPubkeys):
|
||||
if bytes(tag) not in state.neededPubkeys:
|
||||
logger.info(
|
||||
'We don\'t need this v4 pubkey. We didn\'t ask for it.')
|
||||
return
|
||||
|
||||
# Let us try to decrypt the pubkey
|
||||
toAddress, _ = state.neededPubkeys[tag]
|
||||
if protocol.decryptAndCheckPubkeyPayload(data, toAddress) == \
|
||||
print("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT#################################################", tag)
|
||||
toAddress, _ = state.neededPubkeys[bytes(tag)] #check with py2
|
||||
# import pdb;pdb.set_trace()
|
||||
if protocol.decryptAndCheckPubkeyPayload(bytes(data), toAddress) == \
|
||||
'successful':
|
||||
# At this point we know that we have been waiting on this
|
||||
# pubkey. This function will command the workerThread
|
||||
# to start work on the messages that require it.
|
||||
print("decryptAndCheckPubkeyPayload completed#########################################################")
|
||||
self.possibleNewPubkey(toAddress)
|
||||
|
||||
# Display timing data
|
||||
|
|
|
@ -679,6 +679,7 @@ class singleWorker(StoppableThread):
|
|||
"""Send a message-type object (assemble the object, perform PoW and put it to the inv announcement queue)"""
|
||||
# pylint: disable=too-many-nested-blocks
|
||||
# Reset just in case
|
||||
# import pdb;pdb.set_trace()
|
||||
sqlExecute(
|
||||
'''UPDATE sent SET status='msgqueued' '''
|
||||
''' WHERE status IN ('doingpubkeypow', 'doingmsgpow')''')
|
||||
|
@ -689,6 +690,7 @@ class singleWorker(StoppableThread):
|
|||
''' and folder LIKE '%sent%' ''')
|
||||
# while we have a msg that needs some work
|
||||
for row in queryreturn:
|
||||
print(row, "rowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowrowv")
|
||||
toaddress, fromaddress, subject, message, \
|
||||
ackdata, status, TTL, retryNumber, encoding = row
|
||||
# toStatus
|
||||
|
@ -730,6 +732,7 @@ class singleWorker(StoppableThread):
|
|||
toaddress
|
||||
)
|
||||
# If we have the needed pubkey in the pubkey table already,
|
||||
print("sendmsg line no 734#####################################################################")
|
||||
if queryreturn != []:
|
||||
# set the status of this msg to doingmsgpow
|
||||
sqlExecute(
|
||||
|
@ -750,6 +753,7 @@ class singleWorker(StoppableThread):
|
|||
)
|
||||
# We don't have the needed pubkey in the pubkeys table already.
|
||||
else:
|
||||
print("sendmsg line no 756#####################################################################")
|
||||
if toAddressVersionNumber <= 3:
|
||||
toTag = ''
|
||||
else:
|
||||
|
@ -759,14 +763,16 @@ class singleWorker(StoppableThread):
|
|||
).digest()).digest()[32:]
|
||||
if toaddress in state.neededPubkeys or \
|
||||
toTag in state.neededPubkeys:
|
||||
print("sendmsg line no 766#####################################################################")
|
||||
# We already sent a request for the pubkey
|
||||
sqlExecute(
|
||||
'''UPDATE sent SET status='awaitingpubkey', '''
|
||||
''' sleeptill=? WHERE toaddress=? '''
|
||||
''' AND status='msgqueued' ''',
|
||||
int(time.time()) + 2.5 * 24 * 60 * 60,
|
||||
int(10),
|
||||
toaddress
|
||||
)
|
||||
print("sendmsg line no 774#####################################################################")
|
||||
queues.UISignalQueue.put((
|
||||
'updateSentItemStatusByToAddress', (
|
||||
toaddress,
|
||||
|
@ -774,9 +780,11 @@ class singleWorker(StoppableThread):
|
|||
"MainWindow",
|
||||
"Encryption key was requested earlier."))
|
||||
))
|
||||
print("sendmsg line no 783#####################################################################")
|
||||
# on with the next msg on which we can do some work
|
||||
continue
|
||||
else:
|
||||
print("sendmsg line no 785#####################################################################")
|
||||
# We have not yet sent a request for the pubkey
|
||||
needToRequestPubkey = True
|
||||
# If we are trying to send to address
|
||||
|
@ -850,19 +858,19 @@ class singleWorker(StoppableThread):
|
|||
self.requestPubKey(toaddress)
|
||||
# on with the next msg on which we can do some work
|
||||
continue
|
||||
|
||||
print("sendmsg line no 856#####################################################################")
|
||||
# At this point we know that we have the necessary pubkey
|
||||
# in the pubkeys table.
|
||||
|
||||
TTL *= 2**retryNumber
|
||||
if TTL > 28 * 24 * 60 * 60:
|
||||
TTL = 28 * 24 * 60 * 60
|
||||
# add some randomness to the TTL
|
||||
TTL = int(TTL + helper_random.randomrandrange(-300, 300))
|
||||
embeddedTime = int(time.time() + TTL)
|
||||
|
||||
print("sendmsg line no 870#####################################################################")
|
||||
# if we aren't sending this to ourselves or a chan
|
||||
if not BMConfigParser().has_section(toaddress):
|
||||
print("sendmsg line no 873#####################################################################")
|
||||
shared.ackdataForWhichImWatching[ackdata] = 0
|
||||
queues.UISignalQueue.put((
|
||||
'updateSentItemStatusByAckdata', (
|
||||
|
@ -1047,6 +1055,7 @@ class singleWorker(StoppableThread):
|
|||
l10n.formatTimestamp()))))
|
||||
continue
|
||||
else: # if we are sending a message to ourselves or a chan..
|
||||
print("sendmsg line no 1058#####################################################################")
|
||||
self.logger.info('Sending a message.')
|
||||
self.logger.debug(
|
||||
'First 150 characters of message: %r', message[:150])
|
||||
|
@ -1088,7 +1097,7 @@ class singleWorker(StoppableThread):
|
|||
"MainWindow",
|
||||
"Doing work necessary to send message."))
|
||||
))
|
||||
|
||||
print("sendmsg line no 1093#####################################################################")
|
||||
# Now we can start to assemble our message.
|
||||
payload = encodeVarint(fromAddressVersionNumber)
|
||||
payload += encodeVarint(fromStreamNumber)
|
||||
|
@ -1096,9 +1105,10 @@ class singleWorker(StoppableThread):
|
|||
# that can be expected from me. (See
|
||||
# https://bitmessage.org/wiki/Protocol_specification#Pubkey_bitfield_features)
|
||||
payload += protocol.getBitfield(fromaddress)
|
||||
|
||||
print("sendmsg line no 1101#####################################################################")
|
||||
# We need to convert our private keys to public keys in order
|
||||
# to include them.
|
||||
# import pdb; pdb.set_trace()
|
||||
try:
|
||||
privSigningKeyHex, privEncryptionKeyHex, \
|
||||
pubSigningKey, pubEncryptionKey = self._getKeysForAddress(
|
||||
|
@ -1113,7 +1123,7 @@ class singleWorker(StoppableThread):
|
|||
" (your address) in the keys.dat file."))
|
||||
))
|
||||
continue
|
||||
|
||||
print("sendmsg line no 1119#####################################################################")
|
||||
payload += pubSigningKey + pubEncryptionKey
|
||||
|
||||
if fromAddressVersionNumber >= 3:
|
||||
|
@ -1128,16 +1138,17 @@ class singleWorker(StoppableThread):
|
|||
payload += encodeVarint(
|
||||
defaults.networkDefaultPayloadLengthExtraBytes)
|
||||
else:
|
||||
payload += encodeVarint(BMConfigParser().getint(
|
||||
fromaddress, 'noncetrialsperbyte'))
|
||||
payload += encodeVarint(BMConfigParser().getint(
|
||||
fromaddress, 'payloadlengthextrabytes'))
|
||||
|
||||
payload += encodeVarint(int(BMConfigParser().get(
|
||||
fromaddress, 'noncetrialsperbyte')))
|
||||
payload += encodeVarint(int(BMConfigParser().get(
|
||||
fromaddress, 'payloadlengthextrabytes')))
|
||||
print('@@@@@@@@@@@@@@ before payload creating@@@@@@@@@@@@@@@@')
|
||||
# This hash will be checked by the receiver of the message
|
||||
# to verify that toRipe belongs to them. This prevents
|
||||
# a Surreptitious Forwarding Attack.
|
||||
payload += toRipe
|
||||
payload += encodeVarint(encoding) # message encoding type
|
||||
# import pdb;pdb.set_trace()
|
||||
encodedMessage = helper_msgcoding.MsgEncode(
|
||||
{"subject": subject, "body": message}, encoding
|
||||
)
|
||||
|
@ -1163,8 +1174,8 @@ class singleWorker(StoppableThread):
|
|||
fullAckPayload = self.generateFullAckMessage(
|
||||
ackdata, toStreamNumber, TTL)
|
||||
payload += encodeVarint(len(fullAckPayload))
|
||||
payload += fullAckPayload
|
||||
dataToSign = pack('>Q', embeddedTime) + '\x00\x00\x00\x02' + \
|
||||
payload += fullAckPayload.encode()
|
||||
dataToSign = pack('>Q', embeddedTime) + '\x00\x00\x00\x02'.encode() + \
|
||||
encodeVarint(1) + encodeVarint(toStreamNumber) + payload
|
||||
signature = highlevelcrypto.sign(dataToSign, privSigningKeyHex)
|
||||
payload += encodeVarint(len(signature))
|
||||
|
@ -1173,7 +1184,7 @@ class singleWorker(StoppableThread):
|
|||
# We have assembled the data that will be encrypted.
|
||||
try:
|
||||
encrypted = highlevelcrypto.encrypt(
|
||||
payload, "04" + hexlify(pubEncryptionKeyBase256)
|
||||
payload, "04".encode() + hexlify(pubEncryptionKeyBase256)
|
||||
)
|
||||
except:
|
||||
sqlExecute(
|
||||
|
@ -1190,9 +1201,9 @@ class singleWorker(StoppableThread):
|
|||
).arg(l10n.formatTimestamp()))
|
||||
))
|
||||
continue
|
||||
|
||||
print('@@@@@@@@@@@@@@ before encryptedPayload creating@@@@@@@@@@@@@@@@')
|
||||
encryptedPayload = pack('>Q', embeddedTime)
|
||||
encryptedPayload += '\x00\x00\x00\x02' # object type: msg
|
||||
encryptedPayload += '\x00\x00\x00\x02'.encode() # object type: msg
|
||||
encryptedPayload += encodeVarint(1) # msg version
|
||||
encryptedPayload += encodeVarint(toStreamNumber) + encrypted
|
||||
target = 2 ** 64 / (
|
||||
|
@ -1206,15 +1217,17 @@ class singleWorker(StoppableThread):
|
|||
))
|
||||
self.logger.info(
|
||||
'(For msg message) Doing proof of work. Total required'
|
||||
' difficulty: %f. Required small message difficulty: %f.',
|
||||
float(requiredAverageProofOfWorkNonceTrialsPerByte) /
|
||||
' difficulty: {}. Required small message difficulty: {}.'.format
|
||||
(float(requiredAverageProofOfWorkNonceTrialsPerByte) /
|
||||
defaults.networkDefaultProofOfWorkNonceTrialsPerByte,
|
||||
float(requiredPayloadLengthExtraBytes) /
|
||||
defaults.networkDefaultPayloadLengthExtraBytes
|
||||
defaults.networkDefaultPayloadLengthExtraBytes)
|
||||
)
|
||||
|
||||
powStartTime = time.time()
|
||||
initialHash = hashlib.sha512(encryptedPayload).digest()
|
||||
# import pdb; pdb.set_trace()
|
||||
|
||||
trialValue, nonce = proofofwork.run(target, initialHash)
|
||||
print("nonce calculated value#############################", nonce)
|
||||
self.logger.info(
|
||||
|
@ -1229,7 +1242,8 @@ class singleWorker(StoppableThread):
|
|||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
print("line no 1234#########################################")
|
||||
# import pdb; pdb.set_trace()
|
||||
encryptedPayload = pack('>Q', nonce) + encryptedPayload
|
||||
|
||||
# Sanity check. The encryptedPayload size should never be
|
||||
|
@ -1243,11 +1257,18 @@ class singleWorker(StoppableThread):
|
|||
len(encryptedPayload)
|
||||
)
|
||||
continue
|
||||
|
||||
print("line no 1248#########################################")
|
||||
inventoryHash = calculateInventoryHash(encryptedPayload)
|
||||
print("line no 1250248#########################################")
|
||||
objectType = 2
|
||||
Inventory()[inventoryHash] = (
|
||||
objectType, toStreamNumber, encryptedPayload, embeddedTime, '')
|
||||
print("line no 1252#########################################")
|
||||
# import pdb; pdb.set_trace()
|
||||
inventoryHashlist = (
|
||||
objectType, toStreamNumber,encryptedPayload, embeddedTime, '')
|
||||
print("line no 1255#########################################")
|
||||
# import pdb; pdb.set_trace()
|
||||
Inventory()[inventoryHashlist]
|
||||
print("line no 1257#########################################")
|
||||
if BMConfigParser().has_section(toaddress) or \
|
||||
not protocol.checkBitfield(behaviorBitfield, protocol.BITFIELD_DOESACK):
|
||||
queues.UISignalQueue.put((
|
||||
|
@ -1255,7 +1276,7 @@ class singleWorker(StoppableThread):
|
|||
ackdata,
|
||||
tr._translate(
|
||||
"MainWindow",
|
||||
"Message sent. Sent at %1"
|
||||
"Mobileessage sent. Sent at %1"
|
||||
).arg(l10n.formatTimestamp()))))
|
||||
else:
|
||||
# not sending to a chan or one of my addresses
|
||||
|
@ -1268,14 +1289,15 @@ class singleWorker(StoppableThread):
|
|||
" Sent on %1"
|
||||
).arg(l10n.formatTimestamp()))
|
||||
))
|
||||
print("line no 1282#########################################")
|
||||
self.logger.info(
|
||||
'Broadcasting inv for my msg(within sendmsg function): %s',
|
||||
hexlify(inventoryHash)
|
||||
)
|
||||
queues.invQueue.put((toStreamNumber, inventoryHash))
|
||||
|
||||
# Update the sent message in the sent table with the
|
||||
# necessary information.
|
||||
print("line no 1290#########################################")
|
||||
if BMConfigParser().has_section(toaddress) or \
|
||||
not protocol.checkBitfield(behaviorBitfield, protocol.BITFIELD_DOESACK):
|
||||
newStatus = 'msgsentnoackexpected'
|
||||
|
@ -1289,9 +1311,9 @@ class singleWorker(StoppableThread):
|
|||
inventoryHash, newStatus, retryNumber + 1,
|
||||
sleepTill, int(time.time()), ackdata
|
||||
)
|
||||
|
||||
# If we are sending to ourselves or a chan, let's put
|
||||
# the message in our own inbox.
|
||||
print("line no 1306#########################################")
|
||||
if BMConfigParser().has_section(toaddress):
|
||||
# Used to detect and ignore duplicate messages in our inbox
|
||||
sigHash = hashlib.sha512(hashlib.sha512(
|
||||
|
@ -1318,6 +1340,7 @@ class singleWorker(StoppableThread):
|
|||
|
||||
def requestPubKey(self, toAddress):
|
||||
"""Send a getpubkey object"""
|
||||
# import pdb;pdb.set_trace()
|
||||
toStatus, addressVersionNumber, streamNumber, ripe = decodeAddress(
|
||||
toAddress)
|
||||
if toStatus != 'success':
|
||||
|
@ -1378,7 +1401,7 @@ class singleWorker(StoppableThread):
|
|||
TTL = TTL + helper_random.randomrandrange(-300, 300)
|
||||
embeddedTime = int(time.time() + TTL)
|
||||
payload = pack('>Q', embeddedTime)
|
||||
payload += '\x00\x00\x00\x00' # object type: getpubkey
|
||||
payload += '\x00\x00\x00\x00'.encode() # object type: getpubkey
|
||||
payload += encodeVarint(addressVersionNumber)
|
||||
payload += encodeVarint(streamNumber)
|
||||
if addressVersionNumber <= 3:
|
||||
|
@ -1406,8 +1429,9 @@ class singleWorker(StoppableThread):
|
|||
|
||||
inventoryHash = calculateInventoryHash(payload)
|
||||
objectType = 1
|
||||
Inventory()[inventoryHash] = (
|
||||
inventoryHashlist = (
|
||||
objectType, streamNumber, payload, embeddedTime, '')
|
||||
Inventory()[inventoryHashlist]
|
||||
self.logger.info('sending inv (for the getpubkey message)')
|
||||
queues.invQueue.put((streamNumber, inventoryHash))
|
||||
|
||||
|
@ -1437,7 +1461,7 @@ class singleWorker(StoppableThread):
|
|||
).arg(l10n.formatTimestamp()))
|
||||
))
|
||||
|
||||
def generateFullAckMessage(self, ackdata, _, TTL):
|
||||
def generateFullAckMessage(self, ackdata, _, TTL):
|
||||
"""
|
||||
It might be perfectly fine to just use the same TTL for the ackdata that we use for the message. But I would
|
||||
rather it be more difficult for attackers to associate ackData with the associated msg object. However, users
|
||||
|
|
|
@ -573,6 +573,8 @@ class sqlThread(threading.Thread):
|
|||
rowcount = 0
|
||||
# print 'item', item
|
||||
# print 'parameters', parameters
|
||||
# if 'inbox' in item:
|
||||
# import pdb; pdb.set_trace()
|
||||
try:
|
||||
self.cur.execute(item, parameters)
|
||||
rowcount = self.cur.rowcount
|
||||
|
|
|
@ -54,6 +54,7 @@ def encrypt(msg, hexPubkey):
|
|||
|
||||
|
||||
def decrypt(msg, hexPrivkey):
|
||||
print("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS#################################################")
|
||||
"""Decrypts message with hex private key"""
|
||||
return makeCryptor(hexPrivkey).decrypt(msg)
|
||||
|
||||
|
|
|
@ -24,6 +24,8 @@ class Inventory():
|
|||
|
||||
# cheap inheritance copied from asyncore
|
||||
def __getattr__(self, attr):
|
||||
# attr = '__contains__'
|
||||
print('$$$$$$$$$$$$$ inside the __getattr__ item $$$$$$$$$$$$$$$$')
|
||||
if attr == "__contains__":
|
||||
self.numberOfInventoryLookupsPerformed += 1
|
||||
try:
|
||||
|
@ -38,4 +40,5 @@ class Inventory():
|
|||
|
||||
# hint for pylint: this is dictionary like object
|
||||
def __getitem__(self, key):
|
||||
print('@@@@@@@@@@@@@@@@@@ inside the __getitem__ item @@@@@@@@@@@@@@@')
|
||||
return self._realInventory[key]
|
||||
|
|
|
@ -123,7 +123,8 @@ def formatTimestamp(timestamp=None, as_unicode=True):
|
|||
timestring = time.strftime(time_format)
|
||||
|
||||
if as_unicode:
|
||||
return unicode(timestring, encoding)
|
||||
return (timestring.encode('utf-8'))
|
||||
# return unicode(timestring, encoding)
|
||||
return timestring
|
||||
|
||||
|
||||
|
|
|
@ -364,7 +364,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
|
|||
if now < self.skipUntil:
|
||||
return True
|
||||
for i in items:
|
||||
self.pendingUpload[str(i)] = now
|
||||
self.pendingUpload[bytes(i)] = now
|
||||
return True
|
||||
|
||||
def _command_inv(self, dandelion=False):
|
||||
|
@ -380,6 +380,7 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
|
|||
return True
|
||||
|
||||
for i in map(bytes, items):
|
||||
import pdb;pdb.set_trace()
|
||||
if i in Inventory() and not Dandelion().hasHash(i):
|
||||
continue
|
||||
if dandelion and not Dandelion().hasHash(i):
|
||||
|
@ -443,13 +444,26 @@ class BMProto(AdvancedDispatcher, ObjectTracker):
|
|||
|
||||
if self.object.inventoryHash in Inventory() and Dandelion().hasHash(self.object.inventoryHash):
|
||||
Dandelion().removeHash(self.object.inventoryHash, "cycle detection")
|
||||
[self.object.inventoryHash] = (
|
||||
# import pdb; pdb.set_trace()
|
||||
|
||||
self.object.objectType, self.object.streamNumber,
|
||||
inventoryHash_list = [self.object.objectType, self.object.streamNumber,
|
||||
memoryview(self.payload[objectOffset:]), self.object.expiresTime,
|
||||
memoryview(self.object.tag)
|
||||
)
|
||||
Inventory()[self.object.inventoryHash]
|
||||
memoryview(self.object.tag)]
|
||||
# [self.object.inventoryHash] = (
|
||||
|
||||
# self.object.objectType, self.object.streamNumber,
|
||||
# memoryview(self.payload[objectOffset:]), self.object.expiresTime,
|
||||
# memoryview(self.object.tag)
|
||||
# )
|
||||
|
||||
|
||||
|
||||
# Inventory()[self.object.inventoryHash] = (self.object.objectType, self.object.streamNumber,
|
||||
# buffer(self.payload[objectOffset:]), self.object.expiresTime,
|
||||
# buffer(self.object.tag))
|
||||
|
||||
|
||||
|
||||
self.handleReceivedObject(
|
||||
self.object.streamNumber, self.object.inventoryHash)
|
||||
invQueue.put((
|
||||
|
|
|
@ -87,19 +87,19 @@ class InvThread(StoppableThread):
|
|||
fluffs.append(inv[1])
|
||||
except KeyError:
|
||||
fluffs.append(inv[1])
|
||||
|
||||
if fluffs:
|
||||
# import pdb; pdb.set_trace()
|
||||
random.shuffle(fluffs)
|
||||
connection.append_write_buf(protocol.CreatePacket(
|
||||
'inv',
|
||||
addresses.encodeVarint(
|
||||
len(fluffs)) + ''.join(fluffs)))
|
||||
len(fluffs)) + ('').encode().join([x for x in fluffs]))) #compare result with python2
|
||||
if stems:
|
||||
random.shuffle(stems)
|
||||
connection.append_write_buf(protocol.CreatePacket(
|
||||
'dinv',
|
||||
addresses.encodeVarint(
|
||||
len(stems)) + ''.join(stems)))
|
||||
len(stems)) + ('').encode().join([x for x in stems]))) #compare result with python2
|
||||
|
||||
invQueue.iterate()
|
||||
for _ in range(len(chunk)):
|
||||
|
|
|
@ -211,6 +211,7 @@ class TCPConnection(BMProto, TLSDispatcher):
|
|||
# may lock for a long time, but I think it's better than
|
||||
# thousands of small locks
|
||||
with self.objectsNewToThemLock:
|
||||
# import pdb;pdb.set_trace()
|
||||
for objHash in Inventory().unexpired_hashes_by_stream(stream):
|
||||
# don't advertise stem objects on bigInv
|
||||
if Dandelion().hasHash(objHash):
|
||||
|
@ -220,18 +221,18 @@ class TCPConnection(BMProto, TLSDispatcher):
|
|||
payload = bytes()
|
||||
# Now let us start appending all of these hashes together. They will be
|
||||
# sent out in a big inv message to our new peer.
|
||||
if len(bigInvList) is not 0:
|
||||
for obj_hash, _ in bigInvList.items():
|
||||
payload += obj_hash
|
||||
objectCount += 1
|
||||
|
||||
for obj_hash, _ in bigInvList.items():
|
||||
payload += obj_hash
|
||||
objectCount += 1
|
||||
|
||||
# Remove -1 below when sufficient time has passed for users to
|
||||
# upgrade to versions of PyBitmessage that accept inv with 50,000
|
||||
# items
|
||||
if objectCount >= MAX_OBJECT_COUNT - 1:
|
||||
sendChunk()
|
||||
payload = b''
|
||||
objectCount = 0
|
||||
# Remove -1 below when sufficient time has passed for users to
|
||||
# upgrade to versions of PyBitmessage that accept inv with 50,000
|
||||
# items
|
||||
if objectCount >= MAX_OBJECT_COUNT - 1:
|
||||
sendChunk()
|
||||
payload = b''
|
||||
objectCount = 0
|
||||
|
||||
# flush
|
||||
sendChunk()
|
||||
|
|
|
@ -45,6 +45,7 @@ class UploadThread(StoppableThread):
|
|||
if Dandelion().hasHash(chunk) and \
|
||||
i != Dandelion().objectChildStem(chunk):
|
||||
i.antiIntersectionDelay()
|
||||
print
|
||||
self.logger.info(
|
||||
'%s asked for a stem object we didn\'t offer to it.',
|
||||
i.destination)
|
||||
|
|
|
@ -108,13 +108,13 @@ def _doFastPoW(target, initialHash):
|
|||
logger.debug("Fast PoW done")
|
||||
return result[0], result[1]
|
||||
time.sleep(0.2)
|
||||
|
||||
|
||||
|
||||
def _doCPoW(target, initialHash):
|
||||
h = initialHash
|
||||
m = target
|
||||
out_h = ctypes.pointer(ctypes.create_string_buffer(h, 64))
|
||||
out_m = ctypes.c_ulonglong(m)
|
||||
# import pdb; pdb.set_trace()
|
||||
out_h = ctypes.pointer(ctypes.create_string_buffer(initialHash, 64))
|
||||
out_m = ctypes.c_ulonglong(target)
|
||||
logger.debug("C PoW start")
|
||||
nonce = bmpow(out_h, out_m)
|
||||
trialValue, = unpack('>Q', hashlib.sha512(hashlib.sha512(pack('>Q', nonce) + initialHash).digest()).digest()[0:8])
|
||||
|
@ -241,7 +241,6 @@ def buildCPoW():
|
|||
|
||||
def run(target, initialHash):
|
||||
"""Run the proof of work thread"""
|
||||
|
||||
if state.shutdown != 0:
|
||||
raise # pylint: disable=misplaced-bare-raise
|
||||
target = int(target)
|
||||
|
@ -254,6 +253,7 @@ def run(target, initialHash):
|
|||
pass # fallback
|
||||
if bmpow:
|
||||
try:
|
||||
print('-------------inside the proofofwork-----------------')
|
||||
return _doCPoW(target, initialHash)
|
||||
except StopIteration:
|
||||
raise
|
||||
|
@ -294,6 +294,8 @@ def init():
|
|||
|
||||
openclpow.initCL()
|
||||
if sys.platform == "win32":
|
||||
print('@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@')
|
||||
print('inside the sys.platform == "win32"')
|
||||
if ctypes.sizeof(ctypes.c_voidp) == 4:
|
||||
bitmsglib = 'bitmsghash32.dll'
|
||||
else:
|
||||
|
@ -326,13 +328,16 @@ def init():
|
|||
bso = None
|
||||
|
||||
else:
|
||||
# import pdb; pdb.set_trace()
|
||||
print('####################################')
|
||||
print('else else else eles else ')
|
||||
try:
|
||||
bso = ctypes.CDLL(os.path.join(paths.codePath(), "bitmsghash", bitmsglib))
|
||||
except OSError:
|
||||
import glob
|
||||
try:
|
||||
bso = ctypes.CDLL(glob.glob(os.path.join(
|
||||
paths.codePath(), "bitmsghash", "bitmsghash*.so"
|
||||
paths.codePath(), " ", "bitmsghash*.so"
|
||||
))[0])
|
||||
except (OSError, IndexError):
|
||||
bso = None
|
||||
|
@ -349,4 +354,6 @@ def init():
|
|||
else:
|
||||
bmpow = None
|
||||
if bmpow is None:
|
||||
print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$')
|
||||
print('intailed the bmpow')
|
||||
buildCPoW()
|
||||
|
|
|
@ -431,7 +431,7 @@ def decryptAndCheckPubkeyPayload(data, address):
|
|||
encryptedData = data[readPosition:]
|
||||
|
||||
# Let us try to decrypt the pubkey
|
||||
toAddress, cryptorObject = state.neededPubkeys[tag]
|
||||
toAddress, cryptorObject = state.neededPubkeys[bytes(tag)]
|
||||
if toAddress != address:
|
||||
logger.critical(
|
||||
'decryptAndCheckPubkeyPayload failed due to toAddress'
|
||||
|
@ -444,6 +444,7 @@ def decryptAndCheckPubkeyPayload(data, address):
|
|||
# That sort of address-malleability should have been caught
|
||||
# by the UI or API and an error given to the user.
|
||||
return 'failed'
|
||||
print("WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW#################################################")
|
||||
try:
|
||||
decryptedData = cryptorObject.decrypt(encryptedData)
|
||||
except:
|
||||
|
@ -451,13 +452,13 @@ def decryptAndCheckPubkeyPayload(data, address):
|
|||
# but tagged it with a tag for which we are watching.
|
||||
logger.info('Pubkey decryption was unsuccessful.')
|
||||
return 'failed'
|
||||
|
||||
readPosition = 0
|
||||
# bitfieldBehaviors = decryptedData[readPosition:readPosition + 4]
|
||||
readPosition += 4
|
||||
publicSigningKey = '\x04' + decryptedData[readPosition:readPosition + 64]
|
||||
print("working fine till here#################################################################")
|
||||
publicSigningKey = '\x04'.encode() + decryptedData[readPosition:readPosition + 64]
|
||||
readPosition += 64
|
||||
publicEncryptionKey = '\x04' + decryptedData[readPosition:readPosition + 64]
|
||||
publicEncryptionKey = '\x04'.encode() + decryptedData[readPosition:readPosition + 64]
|
||||
readPosition += 64
|
||||
specifiedNonceTrialsPerByteLength = decodeVarint(
|
||||
decryptedData[readPosition:readPosition + 10])[1]
|
||||
|
@ -471,7 +472,6 @@ def decryptAndCheckPubkeyPayload(data, address):
|
|||
decryptedData[readPosition:readPosition + 10])
|
||||
readPosition += signatureLengthLength
|
||||
signature = decryptedData[readPosition:readPosition + signatureLength]
|
||||
|
||||
if not highlevelcrypto.verify(
|
||||
signedData, signature, hexlify(publicSigningKey)):
|
||||
logger.info(
|
||||
|
@ -480,11 +480,9 @@ def decryptAndCheckPubkeyPayload(data, address):
|
|||
|
||||
logger.info(
|
||||
'ECDSA verify passed (within decryptAndCheckPubkeyPayload)')
|
||||
|
||||
sha = hashlib.new('sha512')
|
||||
sha.update(publicSigningKey + publicEncryptionKey)
|
||||
embeddedRipe = RIPEMD160Hash(sha.digest()).digest()
|
||||
|
||||
if embeddedRipe != ripe:
|
||||
# Although this pubkey object had the tag were were looking for
|
||||
# and was encrypted with the correct encryption key,
|
||||
|
@ -503,9 +501,9 @@ def decryptAndCheckPubkeyPayload(data, address):
|
|||
addressVersion, streamNumber, hexlify(ripe),
|
||||
hexlify(publicSigningKey), hexlify(publicEncryptionKey)
|
||||
)
|
||||
|
||||
t = (address, addressVersion, storedData, int(time.time()), 'yes')
|
||||
sqlExecute('''INSERT INTO pubkeys VALUES (?,?,?,?,?)''', *t)
|
||||
print("successful Insertion of pubkey hurray#################################################")
|
||||
return 'successful'
|
||||
except varintDecodeError:
|
||||
logger.info(
|
||||
|
|
|
@ -13,6 +13,7 @@ from storage.storage import InventoryStorage, InventoryItem
|
|||
class SqliteInventory(InventoryStorage): # pylint: disable=too-many-ancestors
|
||||
"""Inventory using SQLite"""
|
||||
def __init__(self):
|
||||
# import pdb;pdb.set_trace()
|
||||
super(SqliteInventory, self).__init__()
|
||||
# of objects (like msg payloads and pubkey payloads)
|
||||
# Does not include protocol headers (the first 24 bytes of each packet).
|
||||
|
@ -29,7 +30,8 @@ class SqliteInventory(InventoryStorage): # pylint: disable=too-many-ancestors
|
|||
self.lock = RLock()
|
||||
|
||||
def __contains__(self, hash_):
|
||||
print('----------contains------------------')
|
||||
print('__contains__(self, hash_)__contains__(self, hash_)__contains__(self, hash_) ',hash_)
|
||||
hash_ = str(hash_).encode() if type(hash_) == int else hash_
|
||||
with self.lock:
|
||||
if hash_ in self._objects:
|
||||
return True
|
||||
|
@ -41,23 +43,38 @@ class SqliteInventory(InventoryStorage): # pylint: disable=too-many-ancestors
|
|||
self._objects[hash_] = rows[0][0]
|
||||
return True
|
||||
|
||||
# def __getitem__(self, hash_):
|
||||
# raw = [None]
|
||||
# # some think broke
|
||||
# if hash_ == 0:
|
||||
# hash_ = bytes()
|
||||
# with self.lock:
|
||||
# try:
|
||||
# if hash_ in self._inventory:
|
||||
# return self._inventory[hash_]
|
||||
# rows = sqlQuery(
|
||||
# 'SELECT objecttype, streamnumber, payload, expirestime, tag FROM inventory WHERE hash=?',
|
||||
# sqlite3.Binary(hash_))
|
||||
# if not rows:
|
||||
# # raise KeyError(hash_)
|
||||
# pass
|
||||
# except:
|
||||
# rows = [hash_]
|
||||
# return InventoryItem(*rows[0])
|
||||
|
||||
def __getitem__(self, hash_):
|
||||
if hash_ == 0:
|
||||
hash_ = bytes()
|
||||
# import pdb;pdb.set_trace()
|
||||
with self.lock:
|
||||
try:
|
||||
if hash_ in self._inventory:
|
||||
return self._inventory[hash_]
|
||||
rows = sqlQuery(
|
||||
'SELECT objecttype, streamnumber, payload, expirestime, tag FROM inventory WHERE hash=?',
|
||||
sqlite3.Binary(hash_))
|
||||
if not rows:
|
||||
pass
|
||||
# raise KeyError(hash_)
|
||||
except:
|
||||
pass
|
||||
if hash_ in self._inventory:
|
||||
return self._inventory[hash_]
|
||||
rows = sqlQuery(
|
||||
'SELECT objecttype, streamnumber, payload, expirestime, tag'
|
||||
' FROM inventory WHERE hash=?', sqlite3.Binary(bytes(hash_)))
|
||||
if not rows:
|
||||
raise KeyError(hash_)
|
||||
return InventoryItem(*rows[0])
|
||||
|
||||
|
||||
def __setitem__(self, hash_, value):
|
||||
print('----------__setitem__------------------')
|
||||
with self.lock:
|
||||
|
@ -95,13 +112,17 @@ class SqliteInventory(InventoryStorage): # pylint: disable=too-many-ancestors
|
|||
|
||||
def unexpired_hashes_by_stream(self, stream):
|
||||
"""Return unexpired inventory vectors filtered by stream"""
|
||||
# print ('self._inventory.items() self._inventory.items() self._inventory.items()' ,self._inventory.items())
|
||||
# import pdb;pdb.set_trace()
|
||||
with self.lock:
|
||||
t = int(time.time())
|
||||
hashes = [x for x, value in self._inventory.items()
|
||||
if value.stream == stream and value.expires > t]
|
||||
hashes += (str(payload) for payload, in sqlQuery(
|
||||
# print ('hasheshasheshasheshasheshasheshasheshasheshashes',hashes)
|
||||
hashes += (payload for payload, in sqlQuery(
|
||||
'SELECT hash FROM inventory WHERE streamnumber=?'
|
||||
' AND expirestime>?', stream, t))
|
||||
# print ('hasheshasheshasheshasheshasheshasheshasheshashes aaaaaaaaffter',hashes)
|
||||
return hashes
|
||||
|
||||
def flush(self):
|
||||
|
|
Reference in New Issue
Block a user