Skip to content
This repository has been archived by the owner on Jul 12, 2021. It is now read-only.

flake8 cleanup #44

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion backends/bitcoind/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from blockchain_processor import BlockchainProcessor
from blockchain_processor import BlockchainProcessor # noqa
71 changes: 28 additions & 43 deletions backends/bitcoind/blockchain_processor.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
import ast
import hashlib
from json import dumps, loads
from Queue import Queue
import hashlib
import leveldb
import os
from Queue import Queue
import random
import sys
import time
import threading
import time
import traceback
import urllib

from backends.bitcoind import deserialize
from processor import Processor, print_log
from utils import *
from utils import * # todo: import * is generally frowned upon. should import just what is used


class BlockchainProcessor(Processor):
Expand All @@ -38,7 +37,7 @@ def __init__(self, config, shared):
self.address_queue = Queue()
self.dbpath = config.get('leveldb', 'path')
self.pruning_limit = config.getint('leveldb', 'pruning_limit')
self.db_version = 1 # increase this when database needs to be updated
self.db_version = 1 # increase this when database needs to be updated

self.dblock = threading.Lock()
try:
Expand Down Expand Up @@ -81,7 +80,7 @@ def __init__(self, config, shared):

# check version
if self.db_version != db_version:
print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
print_log("Your database '%s' is deprecated. Please create a new database" % self.dbpath)
self.shared.stop()
return

Expand Down Expand Up @@ -123,10 +122,10 @@ def serialize(self, h):
return s

def serialize_item(self, txid, txpos, height, spent=chr(0)):
s = (txid + int_to_hex(txpos, 4) + int_to_hex(height, 3)).decode('hex') + spent
s = (txid + int_to_hex(txpos, 4) + int_to_hex(height, 3)).decode('hex') + spent
return s

def deserialize_item(self,s):
def deserialize_item(self, s):
txid = s[0:32].encode('hex')
txpos = int(rev_hex(s[32:36].encode('hex')), 16)
height = int(rev_hex(s[36:39].encode('hex')), 16)
Expand Down Expand Up @@ -342,7 +341,6 @@ def get_merkle(self, tx_hash, height):

return {"block_height": height, "merkle": s, "pos": tx_pos}


def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
# keep it sorted
s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
Expand All @@ -366,55 +364,54 @@ def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
self.batch_txio[txo] = addr



def revert_add_to_history(self, addr, tx_hash, tx_pos, tx_height):

serialized_hist = self.batch_list[addr]
s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40*chr(0)
if serialized_hist.find(s) == -1: raise
if serialized_hist.find(s) == -1:
raise
serialized_hist = serialized_hist.replace(s, '')
self.batch_list[addr] = serialized_hist



def prune_history(self, addr, undo):
# remove items that have bit set to one
if undo.get(addr) is None: undo[addr] = []
if undo.get(addr) is None:
undo[addr] = []

serialized_hist = self.batch_list[addr]
l = len(serialized_hist)/80
for i in range(l):
if len(serialized_hist)/80 < self.pruning_limit: break
item = serialized_hist[80*i:80*(i+1)]
if len(serialized_hist)/80 < self.pruning_limit:
break
item = serialized_hist[80*i:80*(i+1)]
if item[39:40] == chr(1):
assert item[79:80] == chr(2)
serialized_hist = serialized_hist[0:80*i] + serialized_hist[80*(i+1):]
undo[addr].append(item) # items are ordered
self.batch_list[addr] = serialized_hist


def revert_prune_history(self, addr, undo):
# restore removed items
serialized_hist = self.batch_list[addr]

if undo.get(addr) is not None:
if undo.get(addr) is not None:
itemlist = undo.pop(addr)
else:
return
return

if not itemlist: return
if not itemlist:
return

l = len(serialized_hist)/80
tx_item = ''
for i in range(l-1, -1, -1):
if tx_item == '':
if not itemlist:
if not itemlist:
break
else:
tx_item = itemlist.pop(-1) # get the last element
tx_item = itemlist.pop(-1) # get the last element
tx_height = int(rev_hex(tx_item[36:39].encode('hex')), 16)

item = serialized_hist[80*i:80*(i+1)]
item_height = int(rev_hex(item[36:39].encode('hex')), 16)

Expand All @@ -427,7 +424,6 @@ def revert_prune_history(self, addr, undo):

self.batch_list[addr] = serialized_hist


def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None):
serialized_hist = self.batch_list[addr]
l = len(serialized_hist)/80
Expand All @@ -437,7 +433,7 @@ def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None)
if is_spent:
new_item = item[0:39] + chr(1) + self.serialize_item(txid, index, height, chr(2))
else:
new_item = item[0:39] + chr(0) + chr(0)*40
new_item = item[0:39] + chr(0) + chr(0)*40
serialized_hist = serialized_hist[0:80*i] + new_item + serialized_hist[80*(i+1):]
break
else:
Expand All @@ -447,12 +443,10 @@ def set_spent_bit(self, addr, txi, is_spent, txid=None, index=None, height=None)

self.batch_list[addr] = serialized_hist


def unset_spent_bit(self, addr, txi):
self.set_spent_bit(addr, txi, False)
self.batch_txio[txi] = addr


def deserialize_block(self, block):
txlist = block.get('tx')
tx_hashes = [] # ordered txids
Expand Down Expand Up @@ -501,7 +495,6 @@ def import_block(self, block, block_hash, block_height, sync, revert=False):
else:
undo_info = {}


if not revert:
# read addresses of tx inputs
for tx in txdict.values():
Expand Down Expand Up @@ -529,17 +522,13 @@ def import_block(self, block, block_hash, block_height, sync, revert=False):
for x in tx.get('outputs'):
txo = (txid + int_to_hex(x.get('index'), 4)).decode('hex')
block_outputs.append(txo)
addr_to_read.append( x.get('address') )
addr_to_read.append(x.get('address'))

undo = undo_info.get(txid)
for i, x in enumerate(tx.get('inputs')):
addr = undo['prev_addr'][i]
addr_to_read.append(addr)





# read histories of addresses
for txid, tx in txdict.items():
for x in tx.get('outputs'):
Expand All @@ -556,20 +545,18 @@ def import_block(self, block, block_hash, block_height, sync, revert=False):
self.shared.stop()
raise


# process
t1 = time.time()

if revert:
tx_hashes = tx_hashes[::-1]


for txid in tx_hashes: # must be ordered
tx = txdict[txid]
if not revert:

undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
undo = {'prev_addr': []} # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses

prev_addr = []
for i, x in enumerate(tx.get('inputs')):
txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
Expand All @@ -583,7 +570,7 @@ def import_block(self, block, block_hash, block_height, sync, revert=False):
self.prune_history(addr, undo)
prev_addr.append(addr)

undo['prev_addr'] = prev_addr
undo['prev_addr'] = prev_addr

# here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
for x in tx.get('outputs'):
Expand Down Expand Up @@ -611,10 +598,9 @@ def import_block(self, block, block_hash, block_height, sync, revert=False):

assert undo == {}

if revert:
if revert:
assert undo_info == {}


# write
max_len = 0
max_addr = ''
Expand Down Expand Up @@ -914,7 +900,6 @@ def memorypool_update(self):
for addr in touched_addresses:
self.invalidate_cache(addr)


def invalidate_cache(self, address):
with self.cache_lock:
if address in self.history_cache:
Expand Down
47 changes: 22 additions & 25 deletions backends/bitcoind/deserialize.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
# this code comes from ABE. it can probably be simplified
#
#

import mmap
import string
import struct
import types

from utils import *
from utils import * # todo: import * is generally frowned upon. should import just what is used


class SerializationError(Exception):
Expand Down Expand Up @@ -304,20 +302,20 @@ def script_GetOp(bytes):
(nSize,) = struct.unpack_from('<I', bytes, i)
i += 4
if i+nSize > len(bytes):
vch = "_INVALID_"+bytes[i:]
i = len(bytes)
vch = "_INVALID_"+bytes[i:]
i = len(bytes)
else:
vch = bytes[i:i+nSize]
i += nSize
vch = bytes[i:i+nSize]
i += nSize

yield (opcode, vch, i)


def script_GetOpName(opcode):
try:
return (opcodes.whatis(opcode)).replace("OP_", "")
except KeyError:
return "InvalidOp_"+str(opcode)
try:
return (opcodes.whatis(opcode)).replace("OP_", "")
except KeyError:
return "InvalidOp_"+str(opcode)


def decode_script(bytes):
Expand All @@ -344,52 +342,51 @@ def match_decoded(decoded, to_match):
return True



def get_address_from_input_script(bytes):
try:
decoded = [ x for x in script_GetOp(bytes) ]
decoded = [x for x in script_GetOp(bytes)]
except:
# coinbase transactions raise an exception
# coinbase transactions raise an exception
return [], [], None

# non-generated TxIn transactions push a signature
# (seventy-something bytes) and then their public key
# (33 or 65 bytes) onto the stack:

match = [ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4 ]
match = [opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4]
if match_decoded(decoded, match):
return None, None, public_key_to_bc_address(decoded[1][1])

# p2sh transaction, 2 of n
match = [ opcodes.OP_0 ]
match = [opcodes.OP_0]
while len(match) < len(decoded):
match.append(opcodes.OP_PUSHDATA4)

if match_decoded(decoded, match):

redeemScript = decoded[-1][1]
num = len(match) - 2
signatures = map(lambda x:x[1].encode('hex'), decoded[1:-1])
dec2 = [ x for x in script_GetOp(redeemScript) ]
signatures = map(lambda x: x[1].encode('hex'), decoded[1:-1])
dec2 = [x for x in script_GetOp(redeemScript)]

# 2 of 2
match2 = [ opcodes.OP_2, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_2, opcodes.OP_CHECKMULTISIG ]
match2 = [opcodes.OP_2, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_2, opcodes.OP_CHECKMULTISIG]
if match_decoded(dec2, match2):
pubkeys = [ dec2[1][1].encode('hex'), dec2[2][1].encode('hex') ]
pubkeys = [dec2[1][1].encode('hex'), dec2[2][1].encode('hex')]
return pubkeys, signatures, hash_160_to_bc_address(hash_160(redeemScript), 5)

# 2 of 3
match2 = [ opcodes.OP_2, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_3, opcodes.OP_CHECKMULTISIG ]
match2 = [opcodes.OP_2, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_3, opcodes.OP_CHECKMULTISIG]
if match_decoded(dec2, match2):
pubkeys = [ dec2[1][1].encode('hex'), dec2[2][1].encode('hex'), dec2[3][1].encode('hex') ]
pubkeys = [dec2[1][1].encode('hex'), dec2[2][1].encode('hex'), dec2[3][1].encode('hex')]
return pubkeys, signatures, hash_160_to_bc_address(hash_160(redeemScript), 5)

return [], [], None


def get_address_from_output_script(bytes):
try:
decoded = [ x for x in script_GetOp(bytes) ]
decoded = [x for x in script_GetOp(bytes)]
except:
return "None"

Expand Down Expand Up @@ -417,9 +414,9 @@ def get_address_from_output_script(bytes):
return hash_160_to_bc_address(decoded[2][1])

# p2sh
match = [ opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUAL ]
match = [opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUAL]
if match_decoded(decoded, match):
addr = hash_160_to_bc_address(decoded[1][1],5)
addr = hash_160_to_bc_address(decoded[1][1], 5)
return addr

return "None"
Loading