Refactor Inventory
This commit is contained in:
parent
c372adc92d
commit
814edd06df
|
@ -850,7 +850,6 @@ class MySimpleXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
|||
TTL = 2.5 * 24 * 60 * 60
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, toStreamNumber, encryptedPayload, int(time.time()) + TTL,'')
|
||||
shared.inventorySets[toStreamNumber].add(inventoryHash)
|
||||
with shared.printLock:
|
||||
print 'Broadcasting inv for msg(API disseminatePreEncryptedMsg command):', inventoryHash.encode('hex')
|
||||
shared.broadcastToSendDataQueues((
|
||||
|
@ -898,7 +897,6 @@ class MySimpleXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
|||
TTL = 28 * 24 * 60 * 60
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, pubkeyStreamNumber, payload, int(time.time()) + TTL,'')
|
||||
shared.inventorySets[pubkeyStreamNumber].add(inventoryHash)
|
||||
with shared.printLock:
|
||||
print 'broadcasting inv within API command disseminatePubkey with hash:', inventoryHash.encode('hex')
|
||||
shared.broadcastToSendDataQueues((
|
||||
|
|
|
@ -48,11 +48,6 @@ from helper_threading import *
|
|||
def connectToStream(streamNumber):
|
||||
shared.streamsInWhichIAmParticipating[streamNumber] = 'no data'
|
||||
selfInitiatedConnections[streamNumber] = {}
|
||||
shared.inventorySets[streamNumber] = set()
|
||||
queryData = sqlQuery('''SELECT hash FROM inventory WHERE streamnumber=?''', streamNumber)
|
||||
for row in queryData:
|
||||
shared.inventorySets[streamNumber].add(row[0])
|
||||
|
||||
|
||||
if isOurOperatingSystemLimitedToHavingVeryFewHalfOpenConnections():
|
||||
# Some XP and Vista systems can only have 10 outgoing connections at a time.
|
||||
|
|
|
@ -2306,16 +2306,12 @@ class MyForm(settingsmixin.SMainWindow):
|
|||
# in the objectProcessorQueue to be processed
|
||||
if self.NewSubscriptionDialogInstance.ui.checkBoxDisplayMessagesAlreadyInInventory.isChecked():
|
||||
status, addressVersion, streamNumber, ripe = decodeAddress(address)
|
||||
shared.flushInventory()
|
||||
shared.inventory.flush()
|
||||
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(encodeVarint(
|
||||
addressVersion) + encodeVarint(streamNumber) + ripe).digest()).digest()
|
||||
tag = doubleHashOfAddressData[32:]
|
||||
queryreturn = sqlQuery(
|
||||
'''select payload from inventory where objecttype=3 and tag=?''', tag)
|
||||
for row in queryreturn:
|
||||
payload, = row
|
||||
objectType = 3
|
||||
shared.objectProcessorQueue.put((objectType,payload))
|
||||
for value in shared.inventory.by_type_and_tag(3, tag):
|
||||
shared.objectProcessorQueue.put((value.type, value.payload))
|
||||
|
||||
def click_pushButtonStatusIcon(self):
|
||||
logger.debug('click_pushButtonStatusIcon')
|
||||
|
@ -4197,23 +4193,22 @@ class NewSubscriptionDialog(QtGui.QDialog):
|
|||
self.ui.checkBoxDisplayMessagesAlreadyInInventory.setText(
|
||||
_translate("MainWindow", "Address is an old type. We cannot display its past broadcasts."))
|
||||
else:
|
||||
shared.flushInventory()
|
||||
shared.inventory.flush()
|
||||
doubleHashOfAddressData = hashlib.sha512(hashlib.sha512(encodeVarint(
|
||||
addressVersion) + encodeVarint(streamNumber) + ripe).digest()).digest()
|
||||
tag = doubleHashOfAddressData[32:]
|
||||
queryreturn = sqlQuery(
|
||||
'''select hash from inventory where objecttype=3 and tag=?''', tag)
|
||||
if len(queryreturn) == 0:
|
||||
count = len(shared.inventory.by_type_and_tag(3, tag))
|
||||
if count == 0:
|
||||
self.ui.checkBoxDisplayMessagesAlreadyInInventory.setText(
|
||||
_translate("MainWindow", "There are no recent broadcasts from this address to display."))
|
||||
elif len(queryreturn) == 1:
|
||||
elif count == 1:
|
||||
self.ui.checkBoxDisplayMessagesAlreadyInInventory.setEnabled(True)
|
||||
self.ui.checkBoxDisplayMessagesAlreadyInInventory.setText(
|
||||
_translate("MainWindow", "Display the %1 recent broadcast from this address.").arg(str(len(queryreturn))))
|
||||
_translate("MainWindow", "Display the %1 recent broadcast from this address.").arg(count))
|
||||
else:
|
||||
self.ui.checkBoxDisplayMessagesAlreadyInInventory.setEnabled(True)
|
||||
self.ui.checkBoxDisplayMessagesAlreadyInInventory.setText(
|
||||
_translate("MainWindow", "Display the %1 recent broadcasts from this address.").arg(str(len(queryreturn))))
|
||||
_translate("MainWindow", "Display the %1 recent broadcasts from this address.").arg(count))
|
||||
|
||||
|
||||
class NewAddressDialog(QtGui.QDialog):
|
||||
|
|
|
@ -216,14 +216,8 @@ class receiveDataThread(threading.Thread):
|
|||
objectHash, = random.sample(
|
||||
self.objectsThatWeHaveYetToGetFromThisPeer, 1)
|
||||
if objectHash in shared.inventory:
|
||||
logger.debug('Inventory (in memory) already has object listed in inv message.')
|
||||
del self.objectsThatWeHaveYetToGetFromThisPeer[
|
||||
objectHash]
|
||||
elif shared.isInSqlInventory(objectHash):
|
||||
if shared.verbose >= 3:
|
||||
logger.debug('Inventory (SQL on disk) already has object listed in inv message.')
|
||||
del self.objectsThatWeHaveYetToGetFromThisPeer[
|
||||
objectHash]
|
||||
logger.debug('Inventory already has object listed in inv message.')
|
||||
del self.objectsThatWeHaveYetToGetFromThisPeer[objectHash]
|
||||
else:
|
||||
# We don't have the object in our inventory. Let's request it.
|
||||
self.sendgetdata(objectHash)
|
||||
|
@ -318,23 +312,10 @@ class receiveDataThread(threading.Thread):
|
|||
|
||||
def sendBigInv(self):
|
||||
# Select all hashes for objects in this stream.
|
||||
queryreturn = sqlQuery(
|
||||
'''SELECT hash FROM inventory WHERE expirestime>? and streamnumber=?''',
|
||||
int(time.time()),
|
||||
self.streamNumber)
|
||||
bigInvList = {}
|
||||
for row in queryreturn:
|
||||
hash, = row
|
||||
for hash in shared.inventory.unexpired_hashes_by_stream(self.streamNumber):
|
||||
if hash not in self.someObjectsOfWhichThisRemoteNodeIsAlreadyAware and not self.objectHashHolderInstance.hasHash(hash):
|
||||
bigInvList[hash] = 0
|
||||
# We also have messages in our inventory in memory (which is a python
|
||||
# dictionary). Let's fetch those too.
|
||||
with shared.inventoryLock:
|
||||
for hash, storedValue in shared.inventory.items():
|
||||
if hash not in self.someObjectsOfWhichThisRemoteNodeIsAlreadyAware:
|
||||
objectType, streamNumber, payload, expiresTime, tag = storedValue
|
||||
if streamNumber == self.streamNumber and expiresTime > int(time.time()):
|
||||
bigInvList[hash] = 0
|
||||
numberOfObjectsInInvMessage = 0
|
||||
payload = ''
|
||||
# Now let us start appending all of these hashes together. They will be
|
||||
|
@ -440,9 +421,7 @@ class receiveDataThread(threading.Thread):
|
|||
data[lengthOfVarint:32 + lengthOfVarint]] = 0
|
||||
shared.numberOfInventoryLookupsPerformed += 1
|
||||
if data[lengthOfVarint:32 + lengthOfVarint] in shared.inventory:
|
||||
logger.debug('Inventory (in memory) has inventory item already.')
|
||||
elif shared.isInSqlInventory(data[lengthOfVarint:32 + lengthOfVarint]):
|
||||
logger.debug('Inventory (SQL on disk) has inventory item already.')
|
||||
logger.debug('Inventory has inventory item already.')
|
||||
else:
|
||||
self.sendgetdata(data[lengthOfVarint:32 + lengthOfVarint])
|
||||
else:
|
||||
|
@ -453,7 +432,7 @@ class receiveDataThread(threading.Thread):
|
|||
advertisedSet = set()
|
||||
for i in range(numberOfItemsInInv):
|
||||
advertisedSet.add(data[lengthOfVarint + (32 * i):32 + lengthOfVarint + (32 * i)])
|
||||
objectsNewToMe = advertisedSet - shared.inventorySets[self.streamNumber]
|
||||
objectsNewToMe = advertisedSet - shared.inventory.hashes_by_stream(self.streamNumber)
|
||||
logger.info('inv message lists %s objects. Of those %s are new to me. It took %s seconds to figure that out.', numberOfItemsInInv, len(objectsNewToMe), time.time()-startTime)
|
||||
for item in objectsNewToMe:
|
||||
if totalNumberOfobjectsThatWeHaveYetToGetFromAllPeers > 200000 and len(self.objectsThatWeHaveYetToGetFromThisPeer) > 1000 and shared.trustedPeer == None: # inv flooding attack mitigation
|
||||
|
@ -491,20 +470,10 @@ class receiveDataThread(threading.Thread):
|
|||
if self.objectHashHolderInstance.hasHash(hash):
|
||||
shared.inventoryLock.release()
|
||||
self.antiIntersectionDelay()
|
||||
elif hash in shared.inventory:
|
||||
objectType, streamNumber, payload, expiresTime, tag = shared.inventory[hash]
|
||||
shared.inventoryLock.release()
|
||||
self.sendObject(payload)
|
||||
else:
|
||||
shared.inventoryLock.release()
|
||||
queryreturn = sqlQuery(
|
||||
'''select payload from inventory where hash=? and expirestime>=?''',
|
||||
hash,
|
||||
int(time.time()))
|
||||
if queryreturn != []:
|
||||
for row in queryreturn:
|
||||
payload, = row
|
||||
self.sendObject(payload)
|
||||
if hash in shared.inventory:
|
||||
self.sendObject(shared.inventory[hash].payload)
|
||||
else:
|
||||
self.antiIntersectionDelay()
|
||||
logger.warning('%s asked for an object with a getdata which is not in either our memory inventory or our SQL inventory. We probably cleaned it out after advertising it but before they got around to asking for it.' % (self.peer,))
|
||||
|
|
|
@ -46,19 +46,7 @@ class singleCleaner(threading.Thread, StoppableThread):
|
|||
while shared.shutdown == 0:
|
||||
shared.UISignalQueue.put((
|
||||
'updateStatusBar', 'Doing housekeeping (Flushing inventory in memory to disk...)'))
|
||||
with shared.inventoryLock: # If you use both the inventoryLock and the sqlLock, always use the inventoryLock OUTSIDE of the sqlLock.
|
||||
with SqlBulkExecute() as sql:
|
||||
for hash, storedValue in shared.inventory.items():
|
||||
objectType, streamNumber, payload, expiresTime, tag = storedValue
|
||||
sql.execute(
|
||||
'''INSERT INTO inventory VALUES (?,?,?,?,?,?)''',
|
||||
hash,
|
||||
objectType,
|
||||
streamNumber,
|
||||
payload,
|
||||
expiresTime,
|
||||
tag)
|
||||
del shared.inventory[hash]
|
||||
shared.inventory.flush()
|
||||
shared.UISignalQueue.put(('updateStatusBar', ''))
|
||||
|
||||
shared.broadcastToSendDataQueues((
|
||||
|
@ -70,9 +58,7 @@ class singleCleaner(threading.Thread, StoppableThread):
|
|||
shared.UISignalQueue.queue.clear()
|
||||
if timeWeLastClearedInventoryAndPubkeysTables < int(time.time()) - 7380:
|
||||
timeWeLastClearedInventoryAndPubkeysTables = int(time.time())
|
||||
sqlExecute(
|
||||
'''DELETE FROM inventory WHERE expirestime<? ''',
|
||||
int(time.time()) - (60 * 60 * 3))
|
||||
shared.inventory.clean()
|
||||
# pubkeys
|
||||
sqlExecute(
|
||||
'''DELETE FROM pubkeys WHERE time<? AND usedpersonally='no' ''',
|
||||
|
@ -94,20 +80,6 @@ class singleCleaner(threading.Thread, StoppableThread):
|
|||
elif status == 'msgsent':
|
||||
resendMsg(ackData)
|
||||
|
||||
# Let's also clear and reload shared.inventorySets to keep it from
|
||||
# taking up an unnecessary amount of memory.
|
||||
for streamNumber in shared.inventorySets:
|
||||
shared.inventorySets[streamNumber] = set()
|
||||
queryData = sqlQuery('''SELECT hash FROM inventory WHERE streamnumber=?''', streamNumber)
|
||||
for row in queryData:
|
||||
shared.inventorySets[streamNumber].add(row[0])
|
||||
with shared.inventoryLock:
|
||||
for hash, storedValue in shared.inventory.items():
|
||||
objectType, streamNumber, payload, expiresTime, tag = storedValue
|
||||
if not streamNumber in shared.inventorySets:
|
||||
shared.inventorySets[streamNumber] = set()
|
||||
shared.inventorySets[streamNumber].add(hash)
|
||||
|
||||
# Let us write out the knowNodes to disk if there is anything new to write out.
|
||||
if shared.needToWriteKnownNodesToDisk:
|
||||
shared.knownNodesLock.acquire()
|
||||
|
|
|
@ -158,7 +158,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
objectType = 1
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, streamNumber, payload, embeddedTime,'')
|
||||
shared.inventorySets[streamNumber].add(inventoryHash)
|
||||
|
||||
logger.info('broadcasting inv with hash: ' + inventoryHash.encode('hex'))
|
||||
|
||||
|
@ -249,7 +248,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
objectType = 1
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, streamNumber, payload, embeddedTime,'')
|
||||
shared.inventorySets[streamNumber].add(inventoryHash)
|
||||
|
||||
logger.info('broadcasting inv with hash: ' + inventoryHash.encode('hex'))
|
||||
|
||||
|
@ -340,7 +338,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
objectType = 1
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, streamNumber, payload, embeddedTime, doubleHashOfAddressData[32:])
|
||||
shared.inventorySets[streamNumber].add(inventoryHash)
|
||||
|
||||
logger.info('broadcasting inv with hash: ' + inventoryHash.encode('hex'))
|
||||
|
||||
|
@ -463,7 +460,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
objectType = 3
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, streamNumber, payload, embeddedTime, tag)
|
||||
shared.inventorySets[streamNumber].add(inventoryHash)
|
||||
logger.info('sending inv (within sendBroadcast function) for object: ' + inventoryHash.encode('hex'))
|
||||
shared.broadcastToSendDataQueues((
|
||||
streamNumber, 'advertiseobject', inventoryHash))
|
||||
|
@ -559,12 +555,8 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
tag = doubleHashOfToAddressData[32:] # The second half of the sha512 hash.
|
||||
shared.neededPubkeys[tag] = (toaddress, highlevelcrypto.makeCryptor(privEncryptionKey.encode('hex')))
|
||||
|
||||
queryreturn = sqlQuery(
|
||||
'''SELECT payload FROM inventory WHERE objecttype=1 and tag=? ''', toTag)
|
||||
if queryreturn != []: # if there are any pubkeys in our inventory with the correct tag..
|
||||
for row in queryreturn:
|
||||
payload, = row
|
||||
if shared.decryptAndCheckPubkeyPayload(payload, toaddress) == 'successful':
|
||||
for value in shared.inventory.by_type_and_tag(1, toTag):
|
||||
if shared.decryptAndCheckPubkeyPayload(value.payload, toaddress) == 'successful': #if valid, this function also puts it in the pubkeys table.
|
||||
needToRequestPubkey = False
|
||||
sqlExecute(
|
||||
'''UPDATE sent SET status='doingmsgpow', retrynumber=0 WHERE toaddress=? AND (status='msgqueued' or status='awaitingpubkey' or status='doingpubkeypow')''',
|
||||
|
@ -576,19 +568,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
# of malicious behavior or a badly programmed client. If
|
||||
# there are any other pubkeys in our inventory with the correct
|
||||
# tag then we'll try to decrypt those.
|
||||
|
||||
if needToRequestPubkey: # Obviously we had no success looking in the sql inventory. Let's look through the memory inventory.
|
||||
with shared.inventoryLock:
|
||||
for hash, storedValue in shared.inventory.items():
|
||||
objectType, streamNumber, payload, expiresTime, tag = storedValue
|
||||
if objectType == 1 and tag == toTag:
|
||||
if shared.decryptAndCheckPubkeyPayload(payload, toaddress) == 'successful': #if valid, this function also puts it in the pubkeys table.
|
||||
needToRequestPubkey = False
|
||||
sqlExecute(
|
||||
'''UPDATE sent SET status='doingmsgpow', retrynumber=0 WHERE toaddress=? AND (status='msgqueued' or status='awaitingpubkey' or status='doingpubkeypow')''',
|
||||
toaddress)
|
||||
del shared.neededPubkeys[tag]
|
||||
break
|
||||
if needToRequestPubkey:
|
||||
sqlExecute(
|
||||
'''UPDATE sent SET status='doingpubkeypow' WHERE toaddress=? AND status='msgqueued' ''',
|
||||
|
@ -811,7 +790,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
objectType = 2
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, toStreamNumber, encryptedPayload, embeddedTime, '')
|
||||
shared.inventorySets[toStreamNumber].add(inventoryHash)
|
||||
if shared.config.has_section(toaddress) or not checkBitfield(behaviorBitfield, shared.BITFIELD_DOESACK):
|
||||
shared.UISignalQueue.put(('updateSentItemStatusByAckdata', (ackdata, tr.translateText("MainWindow", "Message sent. Sent at %1").arg(l10n.formatTimestamp()))))
|
||||
else:
|
||||
|
@ -921,7 +899,6 @@ class singleWorker(threading.Thread, StoppableThread):
|
|||
objectType = 1
|
||||
shared.inventory[inventoryHash] = (
|
||||
objectType, streamNumber, payload, embeddedTime, '')
|
||||
shared.inventorySets[streamNumber].add(inventoryHash)
|
||||
logger.info('sending inv (for the getpubkey message)')
|
||||
shared.broadcastToSendDataQueues((
|
||||
streamNumber, 'advertiseobject', inventoryHash))
|
||||
|
|
125
src/shared.py
125
src/shared.py
|
@ -49,8 +49,7 @@ addressGeneratorQueue = Queue.Queue()
|
|||
knownNodesLock = threading.Lock()
|
||||
knownNodes = {}
|
||||
sendDataQueues = [] #each sendData thread puts its queue in this list.
|
||||
inventory = {} #of objects (like msg payloads and pubkey payloads) Does not include protocol headers (the first 24 bytes of each packet).
|
||||
inventoryLock = threading.Lock() #Guarantees that two receiveDataThreads don't receive and process the same message concurrently (probably sent by a malicious individual)
|
||||
inventoryLock = threading.RLock() #Guarantees that two receiveDataThreads don't receive and process the same message concurrently (probably sent by a malicious individual)
|
||||
printLock = threading.Lock()
|
||||
appdata = '' #holds the location of the application data storage directory
|
||||
statusIconColor = 'red'
|
||||
|
@ -85,7 +84,6 @@ lastTimeWeResetBytesSent = 0 # used for the bandwidth rate limit
|
|||
sendDataLock = threading.Lock() # used for the bandwidth rate limit
|
||||
receiveDataLock = threading.Lock() # used for the bandwidth rate limit
|
||||
daemon = False
|
||||
inventorySets = {1: set()} # key = streamNumer, value = a set which holds the inventory object hashes that we are aware of. This is used whenever we receive an inv message from a peer to check to see what items are new to us. We don't delete things out of it; instead, the singleCleaner thread clears and refills it every couple hours.
|
||||
needToWriteKnownNodesToDisk = False # If True, the singleCleaner will write it to disk eventually.
|
||||
maximumLengthOfTimeToBotherResendingMessages = 0
|
||||
objectProcessorQueue = ObjectProcessorQueue() # receiveDataThreads dump objects they hear on the network into this queue to be processed.
|
||||
|
@ -135,6 +133,88 @@ NODE_SSL = 2
|
|||
#Bitfield flags
|
||||
BITFIELD_DOESACK = 1
|
||||
|
||||
import collections
|
||||
|
||||
InventoryItem = collections.namedtuple('InventoryItem', 'type stream payload expires tag')
|
||||
|
||||
|
||||
class Inventory(collections.MutableMapping):
|
||||
def __init__(self):
|
||||
super(Inventory, self).__init__()
|
||||
self._inventory = {} #of objects (like msg payloads and pubkey payloads) Does not include protocol headers (the first 24 bytes of each packet).
|
||||
self._streams = collections.defaultdict(set) # key = streamNumer, value = a set which holds the inventory object hashes that we are aware of. This is used whenever we receive an inv message from a peer to check to see what items are new to us. We don't delete things out of it; instead, the singleCleaner thread clears and refills it every couple hours.
|
||||
|
||||
def __contains__(self, hash):
|
||||
global numberOfInventoryLookupsPerformed
|
||||
with inventoryLock:
|
||||
numberOfInventoryLookupsPerformed += 1
|
||||
if hash in self._inventory:
|
||||
return True
|
||||
return bool(sqlQuery('SELECT 1 FROM inventory WHERE hash=?', hash))
|
||||
|
||||
def __getitem__(self, hash):
|
||||
with inventoryLock:
|
||||
if hash in self._inventory:
|
||||
return self._inventory[hash]
|
||||
rows = sqlQuery('SELECT objecttype, streamnumber, payload, expirestime, tag FROM inventory WHERE hash=?', hash)
|
||||
if not rows:
|
||||
raise KeyError(hash)
|
||||
return InventoryItem(*rows[0])
|
||||
|
||||
def __setitem__(self, hash, value):
|
||||
with inventoryLock:
|
||||
value = InventoryItem(*value)
|
||||
self._inventory[hash] = value
|
||||
self._streams[value.stream].add(hash)
|
||||
|
||||
def __delitem__(self, hash):
|
||||
raise NotImplementedError
|
||||
|
||||
def __iter__(self):
|
||||
with inventoryLock:
|
||||
hashes = self._inventory.keys()[:]
|
||||
hashes += (hash for hash, in sqlQuery('SELECT hash FROM inventory'))
|
||||
return hashes.__iter__()
|
||||
|
||||
def __len__(self):
|
||||
with inventoryLock:
|
||||
return len(self._inventory) + sqlQuery('SELECT count(*) FROM inventory')[0][0]
|
||||
|
||||
def by_type_and_tag(self, type, tag):
|
||||
with inventoryLock:
|
||||
values = [value for value in self._inventory.values() if value.type == type and value.tag == tag]
|
||||
values += (InventoryItem(*value) for value in sqlQuery('SELECT objecttype, streamnumber, payload, expirestime, tag FROM inventory WHERE objecttype=? AND tag=?', type, tag))
|
||||
return values
|
||||
|
||||
def hashes_by_stream(self, stream):
|
||||
with inventoryLock:
|
||||
return self._streams[stream]
|
||||
|
||||
def unexpired_hashes_by_stream(self, stream):
|
||||
with inventoryLock:
|
||||
t = int(time.time())
|
||||
hashes = [hash for hash, value in self._inventory.items() if value.stream == stream and value.expires > t]
|
||||
hashes += (payload for payload, in sqlQuery('SELECT hash FROM inventory WHERE streamnumber=? AND expirestime>?', stream, t))
|
||||
return hashes
|
||||
|
||||
def flush(self):
|
||||
with inventoryLock: # If you use both the inventoryLock and the sqlLock, always use the inventoryLock OUTSIDE of the sqlLock.
|
||||
with SqlBulkExecute() as sql:
|
||||
for hash, value in self._inventory.items():
|
||||
sql.execute('INSERT INTO inventory VALUES (?, ?, ?, ?, ?, ?)', hash, *value)
|
||||
self._inventory.clear()
|
||||
|
||||
def clean(self):
|
||||
with inventoryLock:
|
||||
sqlExecute('DELETE FROM inventory WHERE expirestime<?',int(time.time()) - (60 * 60 * 3))
|
||||
self._streams.clear()
|
||||
for hash, value in self.items():
|
||||
self._streams[value.stream].add(hash)
|
||||
|
||||
|
||||
inventory = Inventory()
|
||||
|
||||
|
||||
#Create a packet
|
||||
def CreatePacket(command, payload=''):
|
||||
payload_length = len(payload)
|
||||
|
@ -145,9 +225,6 @@ def CreatePacket(command, payload=''):
|
|||
b[Header.size:] = payload
|
||||
return bytes(b)
|
||||
|
||||
def isInSqlInventory(hash):
|
||||
queryreturn = sqlQuery('''select hash from inventory where hash=?''', hash)
|
||||
return queryreturn != []
|
||||
|
||||
def encodeHost(host):
|
||||
if host.find('.onion') > -1:
|
||||
|
@ -417,7 +494,7 @@ def doCleanShutdown():
|
|||
UISignalQueue.put((
|
||||
'updateStatusBar',
|
||||
'Flushing inventory in memory out to disk. This should normally only take a second...'))
|
||||
flushInventory()
|
||||
inventory.flush()
|
||||
|
||||
# Verify that the objectProcessor has finished exiting. It should have incremented the
|
||||
# shutdown variable from 1 to 2. This must finish before we command the sqlThread to exit.
|
||||
|
@ -453,15 +530,6 @@ def broadcastToSendDataQueues(data):
|
|||
for q in sendDataQueues:
|
||||
q.put(data)
|
||||
|
||||
def flushInventory():
|
||||
#Note that the singleCleanerThread clears out the inventory dictionary from time to time, although it only clears things that have been in the dictionary for a long time. This clears the inventory dictionary Now.
|
||||
with SqlBulkExecute() as sql:
|
||||
for hash, storedValue in inventory.items():
|
||||
objectType, streamNumber, payload, expiresTime, tag = storedValue
|
||||
sql.execute('''INSERT INTO inventory VALUES (?,?,?,?,?,?)''',
|
||||
hash,objectType,streamNumber,payload,expiresTime,tag)
|
||||
del inventory[hash]
|
||||
|
||||
def fixPotentiallyInvalidUTF8Data(text):
|
||||
try:
|
||||
unicode(text,'utf-8')
|
||||
|
@ -703,14 +771,9 @@ def _checkAndShareUndefinedObjectWithPeers(data):
|
|||
logger.debug('We have already received this undefined object. Ignoring.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
elif isInSqlInventory(inventoryHash):
|
||||
logger.debug('We have already received this undefined object (it is stored on disk in the SQL inventory). Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
objectType, = unpack('>I', data[16:20])
|
||||
inventory[inventoryHash] = (
|
||||
objectType, streamNumber, data, embeddedTime,'')
|
||||
inventorySets[streamNumber].add(inventoryHash)
|
||||
inventoryLock.release()
|
||||
logger.debug('advertising inv with hash: %s' % inventoryHash.encode('hex'))
|
||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||
|
@ -735,15 +798,10 @@ def _checkAndShareMsgWithPeers(data):
|
|||
logger.debug('We have already received this msg message. Ignoring.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
elif isInSqlInventory(inventoryHash):
|
||||
logger.debug('We have already received this msg message (it is stored on disk in the SQL inventory). Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
# This msg message is valid. Let's let our peers know about it.
|
||||
objectType = 2
|
||||
inventory[inventoryHash] = (
|
||||
objectType, streamNumber, data, embeddedTime,'')
|
||||
inventorySets[streamNumber].add(inventoryHash)
|
||||
inventoryLock.release()
|
||||
logger.debug('advertising inv with hash: %s' % inventoryHash.encode('hex'))
|
||||
broadcastToSendDataQueues((streamNumber, 'advertiseobject', inventoryHash))
|
||||
|
@ -776,15 +834,10 @@ def _checkAndShareGetpubkeyWithPeers(data):
|
|||
logger.debug('We have already received this getpubkey request. Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
elif isInSqlInventory(inventoryHash):
|
||||
logger.debug('We have already received this getpubkey request (it is stored on disk in the SQL inventory). Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
|
||||
objectType = 0
|
||||
inventory[inventoryHash] = (
|
||||
objectType, streamNumber, data, embeddedTime,'')
|
||||
inventorySets[streamNumber].add(inventoryHash)
|
||||
inventoryLock.release()
|
||||
# This getpubkey request is valid. Forward to peers.
|
||||
logger.debug('advertising inv with hash: %s' % inventoryHash.encode('hex'))
|
||||
|
@ -820,14 +873,9 @@ def _checkAndSharePubkeyWithPeers(data):
|
|||
logger.debug('We have already received this pubkey. Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
elif isInSqlInventory(inventoryHash):
|
||||
logger.debug('We have already received this pubkey (it is stored on disk in the SQL inventory). Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
objectType = 1
|
||||
inventory[inventoryHash] = (
|
||||
objectType, streamNumber, data, embeddedTime, tag)
|
||||
inventorySets[streamNumber].add(inventoryHash)
|
||||
inventoryLock.release()
|
||||
# This object is valid. Forward it to peers.
|
||||
logger.debug('advertising inv with hash: %s' % inventoryHash.encode('hex'))
|
||||
|
@ -864,15 +912,10 @@ def _checkAndShareBroadcastWithPeers(data):
|
|||
logger.debug('We have already received this broadcast object. Ignoring.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
elif isInSqlInventory(inventoryHash):
|
||||
logger.debug('We have already received this broadcast object (it is stored on disk in the SQL inventory). Ignoring it.')
|
||||
inventoryLock.release()
|
||||
return
|
||||
# It is valid. Let's let our peers know about it.
|
||||
objectType = 3
|
||||
inventory[inventoryHash] = (
|
||||
objectType, streamNumber, data, embeddedTime, tag)
|
||||
inventorySets[streamNumber].add(inventoryHash)
|
||||
inventoryLock.release()
|
||||
# This object is valid. Forward it to peers.
|
||||
logger.debug('advertising inv with hash: %s' % inventoryHash.encode('hex'))
|
||||
|
|
Loading…
Reference in New Issue
Block a user