add transaction hashes and filter call function

This commit is contained in:
Robert Miller 2021-06-28 17:28:28 -04:00
parent 1427c952e2
commit f1719ab114
2 changed files with 6272 additions and 332 deletions

View File

@ -2,7 +2,8 @@ from web3 import Web3
from pathlib import Path from pathlib import Path
import json import json
***REMOVED***base_provider = Web3.HTTPProvider(taarush_Node) rpc_end_point = ""
base_provider = Web3.HTTPProvider(rpc_end_point)
w3 = Web3(base_provider) w3 = Web3(base_provider)
cache_directoty = './cache' cache_directoty = './cache'
@ -14,12 +15,27 @@ class BlockData:
self.receipts = receipts self.receipts = receipts
self.calls = calls self.calls = calls
self.logs = logs self.logs = logs
self.transaction_hashes = self.get_transaction_hashes()
pass pass
## Gets a list of unique transasction hashes in the calls of this block
def get_transaction_hashes(self):
result = []
for call in self.calls:
if call['type'] != 'reward':
if call['transactionHash'] in result:
continue
else:
result.append(call['transactionHash'])
return result
## Makes a nicely formatted JSON object out of this data object.
def toJSON(self): def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__, return json.dumps(self, default=lambda o: o.__dict__,
sort_keys=True, indent=4) sort_keys=True, indent=4)
## Writes this object to a JSON file for loading later
def writeJSON(self): def writeJSON(self):
json_data = self.toJSON() json_data = self.toJSON()
cache_file = '{cacheDirectory}/{blockNumber}.json'.format(cacheDirectory=cache_directoty, blockNumber=self.block_number) cache_file = '{cacheDirectory}/{blockNumber}.json'.format(cacheDirectory=cache_directoty, blockNumber=self.block_number)
@ -32,11 +48,25 @@ class BlockData:
f = open(cache_file, "x") f = open(cache_file, "x")
f.write(json_data) f.write(json_data)
f.close() f.close()
## Gets all the calls associated with a transaction hash
def get_filtered_calls(self, hash):
result = []
for call in self.calls:
if call['transactionHash'] == hash:
result.append(call)
return result
## Creates a block object, either from the cache or from the chain itself
def createFromBlockNumber(block_number): def createFromBlockNumber(block_number):
cache_file = '{cacheDirectory}/{blockNumber}.json'.format(cacheDirectory=cache_directoty, blockNumber=block_number) cache_file = '{cacheDirectory}/{blockNumber}.json'.format(cacheDirectory=cache_directoty, blockNumber=block_number)
## Check to see if the data already exists in the cache
## if it exists load the data from cache
## If not then get the data from the chain and save it to the cache
if (Path(cache_file).is_file()): if (Path(cache_file).is_file()):
print("Cache for this block exists, loading again") print("Cache for this block exists, loading again")
block_file = open(cache_file) block_file = open(cache_file)
@ -47,7 +77,7 @@ def createFromBlockNumber(block_number):
print("Cache for this block did not exist, getting data") print("Cache for this block did not exist, getting data")
## Get block data ## Get block data
block_data = w3.eth.get_block(block_number, False) block_data = w3.eth.get_block(block_number, True)
## Get the block receipts ## Get the block receipts
## TODO: evaluate whether or not this is sufficient or if gas used needs to be converted to a proper big number. ## TODO: evaluate whether or not this is sufficient or if gas used needs to be converted to a proper big number.
@ -60,6 +90,10 @@ def createFromBlockNumber(block_number):
## Get the logs ## Get the logs
block_hash = (block_data.hash).hex() block_hash = (block_data.hash).hex()
block_logs = w3.eth.get_logs({'blockHash': block_hash}) block_logs = w3.eth.get_logs({'blockHash': block_hash})
## Create a new object
block = BlockData(block_number, block_data, block_receipts_raw, block_calls, block_logs) block = BlockData(block_number, block_data, block_receipts_raw, block_calls, block_logs)
## Write the result to a JSON file for loading in the future
block.writeJSON() block.writeJSON()
return block return block

6564
cache/12412732.json vendored

File diff suppressed because it is too large Load Diff