diff --git a/raiden/api/wamp_server.py b/raiden/api/wamp_server.py index 30be30c7e0..dcc0c372f5 100644 --- a/raiden/api/wamp_server.py +++ b/raiden/api/wamp_server.py @@ -1,5 +1,5 @@ +# -*- coding: utf-8 -*- import os -import json import types import inspect @@ -21,7 +21,7 @@ def register_pubsub_with_callback(func=None): if isinstance(func, types.FunctionType): - func._callback_pubsub = func.__name__ + "_status" + func._callback_pubsub = func.__name__ + '_status' # pylint: disable=protected-access return func @@ -39,8 +39,8 @@ def __init__(self, ws, extra): def register_pubsub(self, topic): assert isinstance(topic, str) - self.protocol.register_pubsub( - "http://localhost:{}/raiden#{}".format(self.port, topic)) + url = 'http://localhost:{}/raiden#{}'.format(self.port, topic) + self.protocol.register_pubsub(url) # pylint: disable=no-member print 'Publish URI created: /raiden#{}'.format(topic) def on_open(self): @@ -50,18 +50,20 @@ def on_open(self): # this is the place where the event topics are defined that don't correspond to # a callback event - self.protocol.register_object( - "http://localhost:{}/raiden#".format(self.port), self) + url = "http://localhost:{}/raiden#".format(self.port), self + self.protocol.register_object(url) # pylint: disable=no-member for topic in self.event_topics: self.register_pubsub(topic) + # register all functions in self decorated with @register_callback_pubsub # the uri will be the method name suffixed with '_status' # e.g. topic for 'transfer()' status will be '#transfer_status' for k in inspect.getmembers(self, inspect.ismethod): if '_callback_pubsub' in k[1].__dict__: - self.register_pubsub(k[1]._callback_pubsub) - print "WAMP registration complete\n" + self.register_pubsub(k[1]._callback_pubsub) # pylint: disable=protected-access + + print 'WAMP registration complete\n' def on_message(self, message): # FIXME: handle client reload/reconnect @@ -71,7 +73,7 @@ def on_message(self, message): return super(WebSocketAPI, self).on_message(message) - def on_close(self, reason): + def on_close(self, reason): # pylint: disable=unused-argument,arguments-differ print "closed" # application: @@ -80,13 +82,18 @@ def status_callback(self, _, status, id, topic, reason=None): topic name guidelines: 'transfer_callb' - for inititated transfers (used in webui-JS) """ + # pylint: disable=redefined-builtin, invalid-name + data = [id, status, reason] + # 7 - 'publish' message = [7, "http://localhost:{}/raiden#{}".format(self.port, topic), data] self.publish(message) return True def callback_with_exception(self, id, topic, reason=None): + # pylint: disable=redefined-builtin, invalid-name + if not reason: reason = 'UNKNOWN' self.status_callback(None, False, id, topic, reason=reason) @@ -97,8 +104,8 @@ def publish(self, message): 7 - 'WAMP publish' """ print message - assert type(message) is list and len(message) == 3 - self.protocol.pubsub_action(message) + assert isinstance(message, list) and len(message) == 3 + self.protocol.pubsub_action(message) # pylint: disable=no-member # refactor to API @export_rpc @@ -110,15 +117,17 @@ def get_assets(self): def get_address(self): return self.address - def print_callback(_, status): + def print_callback(self, status): # pylint: disable=no-self-use print status - - @register_pubsub_with_callback + @register_pubsub_with_callback # noqa @export_rpc def transfer(self, asset_address, amount, target, callback_id): - """ wraps around the APIs transfer() method to introduce additional PubSub and callback features - To get access to the raw API method, this method would have to be renamed + """ + Wraps around the APIs transfer() method to introduce additional + PubSub and callback features. + + To get access to the raw API method, this method would have to be renamed """ # TODO: check all possible errors and pass them to the WAMP-Protocol publish_topic = 'transfer_status' @@ -161,7 +170,7 @@ def transfer(self, asset_address, amount, target, callback_id): self.callback_with_exception(callback_id, publish_topic, reason='UNKNOWN') raise - def _dispatch_additional_instance_methods(self, instance): + def _dispatch_additional_instance_methods(self, instance): # pylint: disable=invalid-name """ dispatches all methods from the api that aren't already defined in WebSocketAPI""" # self_methods = set([attr for attr in dir(self) if is_method(self, attr)]) self_methods = [k[0] for k in inspect.getmembers(self, inspect.ismethod) @@ -171,8 +180,9 @@ def _dispatch_additional_instance_methods(self, instance): if '_callback_pubsub' in k[1].__dict__] methods_difference = list(set(instance_methods) - set(self_methods)) map(export_rpc, methods_difference) - self.protocol.register_object( - "http://localhost:{}/raiden#".format(self.port), instance) # XXX check for name collisions + + url = 'http://localhost:{}/raiden#'.format(self.port) + self.protocol.register_object(url, instance) # XXX check for name collisions class WAMPRouter(object): @@ -185,10 +195,9 @@ def __init__(self, raiden, port, events=None): self.port = port self.events = events or [] # XXX check syntax - def make_static_application(self, basepath, staticdir): + def make_static_application(self, basepath, staticdir): # pylint: disable=no-self-use def content_type(path): - """Guess mime-type - """ + """Guess mime-type. """ if path.endswith(".css"): return "text/css" @@ -201,7 +210,7 @@ def content_type(path): else: return "application/octet-stream" - def not_found(environ, start_response): + def not_found(environ, start_response): # pylint: disable=unused-argument start_response('404 Not Found', [('content-type', 'text/html')]) return ["""

Page not Found

That page is unknown. Return to @@ -223,7 +232,7 @@ def app(environ, start_response): return not_found(environ, start_response) return app - def serve_index(self, environ, start_response): + def serve_index(self, environ, start_response): # pylint: disable=unused-argument path = os.path.join(self.path, 'webui/index.html') start_response("200 OK", [("Content-Type", "text/html")]) return open(path).readlines() @@ -231,23 +240,30 @@ def serve_index(self, environ, start_response): def run(self): static_path = os.path.join(self.path, 'webui') # XXX naming - routes = [('^/static/', self.make_static_application('/static/', static_path)), - ('^/$', self.serve_index), - ('^/ws$', WebSocketAPI) - ] + routes = [ + ('^/static/', self.make_static_application('/static/', static_path)), + ('^/$', self.serve_index), + ('^/ws$', WebSocketAPI) + ] + + data = { + 'raiden': self.raiden, + 'port': self.port, + 'events': self.events + } - data = {'raiden': self.raiden, - 'port': self.port, - 'events': self.events - } resource = Resource(routes, extra=data) - server = WebSocketServer(("", self.port), resource, debug=True) + host_port = ('', self.port) + server = WebSocketServer( + host_port, + resource, + debug=True, + ) server.serve_forever() - def stop(): + def stop(self): raise NotImplementedError() -""" -Tuple index out of range when the receivers address is shorter than 40(?) chars -""" + +# Tuple index out of range when the receivers address is shorter than 40(?) chars diff --git a/raiden/app.py b/raiden/app.py index bdcaecab40..bc41584664 100644 --- a/raiden/app.py +++ b/raiden/app.py @@ -144,7 +144,10 @@ def app(address, accmgr = AccountManager(keystore_path) if not accmgr.address_in_keystore(address): addresses = list(accmgr.accounts.keys()) - formatted_addresses = ["[{:3d}] - 0x{}".format(idx, addr) for idx, addr in enumerate(addresses)] + formatted_addresses = [ + '[{:3d}] - 0x{}'.format(idx, addr) + for idx, addr in enumerate(addresses) + ] should_prompt = True while should_prompt: diff --git a/raiden/benchmark/utils.py b/raiden/benchmark/utils.py index 58d1702135..647803b15c 100644 --- a/raiden/benchmark/utils.py +++ b/raiden/benchmark/utils.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from __future__ import print_function def print_serialization(pstats): # pylint: disable=too-many-locals @@ -16,7 +17,7 @@ def print_serialization(pstats): # pylint: disable=too-many-locals # total calls count recursion # total time is the time for the function itself (excluding subcalls) # accumulated_time is the time of the function plus the subcalls - primitive_calls, total_calls, total_time, acc_time, callers = data # pylint: disable=unused-variable + primitive_calls, total_calls, total_time, acc_time, _ = data if primitive_calls != total_calls: calls = '{}/{}'.format(total_calls, primitive_calls) diff --git a/raiden/encoding/encoders.py b/raiden/encoding/encoders.py index 79109626a5..d89080734e 100644 --- a/raiden/encoding/encoders.py +++ b/raiden/encoding/encoders.py @@ -23,7 +23,9 @@ def validate(self, value): raise ValueError('value is not an integer') if self.minimum > value or self.maximum < value: - msg = '{} is outside the valide range [{},{}]'.format(value, self.minimum, self.maximum) + msg = ( + '{} is outside the valide range [{},{}]' + ).format(value, self.minimum, self.maximum) raise ValueError(msg) if PY2: diff --git a/raiden/encoding/format.py b/raiden/encoding/format.py index e5ce9c7022..72a26cf467 100644 --- a/raiden/encoding/format.py +++ b/raiden/encoding/format.py @@ -117,7 +117,11 @@ def __setattr__(self, name, value): length = len(value) if length > field.size_bytes: - raise ValueError('value with length {length} for {attr} is to big'.format(length=length, attr=name)) + msg = 'value with length {length} for {attr} is too big'.format( + length=length, + attr=name, + ) + raise ValueError(msg) elif length < field.size_bytes: pad_size = field.size_bytes - length pad_value = b'\x00' * pad_size diff --git a/raiden/messages.py b/raiden/messages.py index 2d23225fe9..189b07cbee 100644 --- a/raiden/messages.py +++ b/raiden/messages.py @@ -586,7 +586,9 @@ def __init__(self, identifier, nonce, asset, transferred_amount, recipient, self.initiator = initiator def __repr__(self): - return '<{} [asset:{} nonce:{} transferred_amount:{} lock_amount:{} hash:{} locksroot:{}]>'.format( + representation = ( + '<{} [asset:{} nonce:{} transferred_amount:{} lock_amount:{} hash:{} locksroot:{}]>' + ).format( self.__class__.__name__, pex(self.asset), self.nonce, @@ -596,6 +598,8 @@ def __repr__(self): pex(self.locksroot), ) + return representation + @staticmethod def unpack(packed): lock = Lock( diff --git a/raiden/network/discovery.py b/raiden/network/discovery.py index 71baea331e..071373a8fd 100644 --- a/raiden/network/discovery.py +++ b/raiden/network/discovery.py @@ -45,12 +45,15 @@ class ContractDiscovery(Discovery): """On chain smart contract raiden node discovery: allows to register endpoints (host, port) for your ethereum-/raiden-address and looking up endpoints for other ethereum-/raiden-addressess. """ + def __init__( self, blockchainservice, discovery_contract_address, poll_timeout=DEFAULT_POLL_TIMEOUT): + super(ContractDiscovery, self).__init__() + self.chain = blockchainservice self.poll_timeout = poll_timeout diff --git a/raiden/network/nat.py b/raiden/network/nat.py index 75c7bf5ee2..39be4e5dd5 100644 --- a/raiden/network/nat.py +++ b/raiden/network/nat.py @@ -1,10 +1,11 @@ +# -*- coding: utf-8 -*- import miniupnpc from ethereum import slogging MAX_PORT = 65535 RAIDEN_IDENTIFICATOR = "raiden-network udp service" -log = slogging.getLogger(__name__) +log = slogging.getLogger(__name__) # pylint: disable=invalid-name def connect(): @@ -13,25 +14,25 @@ def connect(): u (miniupnc.UPnP): the connected upnp-instance router (string): the connection information """ - u = miniupnpc.UPnP() - u.discoverdelay = 200 - providers = u.discover() + upnp = miniupnpc.UPnP() + upnp.discoverdelay = 200 + providers = upnp.discover() if providers > 1: log.warning("multiple upnp providers found", num_providers=providers) elif providers < 1: log.error("no upnp providers found") return - router = u.selectigd() + router = upnp.selectigd() log.debug("connected", router=router) - if u.lanaddr == '0.0.0.0': + if upnp.lanaddr == '0.0.0.0': log.error("could not query your lanaddr") return - if u.externalipaddress() == '0.0.0.0' or u.externalipaddress() is None: + if upnp.externalipaddress() == '0.0.0.0' or upnp.externalipaddress() is None: log.error("could not query your externalipaddress") return - return u, router + return upnp, router def open_port(internal_port, external_start_port=None): @@ -39,22 +40,28 @@ def open_port(internal_port, external_start_port=None): UPnP. Args: internal_port (int): the target port of the raiden service - external_start_port (int): query for an external port starting here (default: internal_port) + external_start_port (int): query for an external port starting here + (default: internal_port) Returns: external_ip_address, external_port (tuple(str, int)): if successful or None """ if external_start_port is None: external_start_port = internal_port - u, router = connect() - if u is None: + + upnp, _ = connect() + + if upnp is None: return - register = lambda internal, external: u.addportmapping(internal, - 'UDP', - u.lanaddr, - external, - RAIDEN_IDENTIFICATOR, - '') + def register(internal, external): + return upnp.addportmapping( + internal, + 'UDP', + upnp.lanaddr, + external, + RAIDEN_IDENTIFICATOR, + '', + ) external_port = external_start_port success = register(internal_port, external_port) @@ -63,30 +70,46 @@ def open_port(internal_port, external_start_port=None): success = register(internal_port, external_port) if success: - log.info("registered port-mapping per upnp", - internal="{}:{}".format(u.lanaddr, internal_port), - external="{}:{}".format(u.externalipaddress(), external_port)) - return (u.externalipaddress(), external_port) - else: - log.error("could not register a port-mapping", router='FIXME') - return + internal = '{}:{}'.format(upnp.lanaddr, internal_port) + external = '{}:{}'.format(upnp.externalipaddress(), external_port) + + log.info( + 'registered port-mapping per upnp', + internal=internal, + external=external, + ) + + return (upnp.externalipaddress(), external_port) + + log.error( + 'could not register a port-mapping', + router='FIXME', + ) + return def release_port(internal_port): """Try to release the port mapping for `internal_port`. + Args: internal_port (int): the port that was previously forwarded to. + Returns: success (boolean): if the release was successful. """ - u, router = connect() - mapping = u.getspecificportmapping(internal_port, 'UDP') + upnp, router = connect() + mapping = upnp.getspecificportmapping(internal_port, 'UDP') + if mapping is None: - log.error("could not find a port mapping", router=router) + log.error('could not find a port mapping', router=router) return False - if u.deleteportmapping(internal_port, 'UDP'): - log.info("successfully released port mapping", router=router) + + if upnp.deleteportmapping(internal_port, 'UDP'): + log.info('successfully released port mapping', router=router) return True - else: - log.warning("could not release port mapping, check your router for stale mappings", router=router) - return False + + log.warning( + 'could not release port mapping, check your router for stale mappings', + router=router, + ) + return False diff --git a/raiden/network/rpc/client.py b/raiden/network/rpc/client.py index 99d4d2d068..53545b03ef 100644 --- a/raiden/network/rpc/client.py +++ b/raiden/network/rpc/client.py @@ -67,7 +67,13 @@ def send_transaction(sender, to, value=0, data='', startgas=GAS_LIMIT, This is necessary to support other remotes that don't support pyethapp's extended specs. @see https://github.com/ethereum/pyethapp/blob/develop/pyethapp/rpc_client.py#L359 """ - nonce = int(client.call('eth_getTransactionCount', encode_hex(sender), 'pending'), 16) + nonce_offset + pending_transactions_hex = client.call( + 'eth_getTransactionCount', + encode_hex(sender), + 'pending', + ) + pending_transactions = int(pending_transactions_hex, 16) + nonce = pending_transactions + nonce_offset tx = Transaction(nonce, gasprice, startgas, to, value, data) assert hasattr(client, 'privkey') and client.privkey @@ -285,8 +291,15 @@ def uninstall(self): class Asset(object): - def __init__(self, jsonrpc_client, asset_address, startgas=GAS_LIMIT, # pylint: disable=too-many-arguments - gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): + def __init__( + self, + jsonrpc_client, + asset_address, + startgas=GAS_LIMIT, + gasprice=GAS_PRICE, + poll_timeout=DEFAULT_POLL_TIMEOUT): + # pylint: disable=too-many-arguments + result = jsonrpc_client.call( 'eth_getCode', address_encoder(asset_address), @@ -340,8 +353,10 @@ def transfer(self, to_address, amount): class Registry(object): - def __init__(self, jsonrpc_client, registry_address, startgas=GAS_LIMIT, # pylint: disable=too-many-arguments + def __init__(self, jsonrpc_client, registry_address, startgas=GAS_LIMIT, gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): + # pylint: disable=too-many-arguments + result = jsonrpc_client.call( 'eth_getCode', address_encoder(registry_address), @@ -418,8 +433,15 @@ def assetadded_filter(self): class ChannelManager(object): - def __init__(self, jsonrpc_client, manager_address, startgas=GAS_LIMIT, # pylint: disable=too-many-arguments - gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): + def __init__( + self, + jsonrpc_client, + manager_address, + startgas=GAS_LIMIT, + gasprice=GAS_PRICE, + poll_timeout=DEFAULT_POLL_TIMEOUT): + # pylint: disable=too-many-arguments + result = jsonrpc_client.call( 'eth_getCode', address_encoder(manager_address), @@ -538,8 +560,15 @@ def channelnew_filter(self): # pylint: disable=unused-argument class NettingChannel(object): - def __init__(self, jsonrpc_client, channel_address, startgas=GAS_LIMIT, # pylint: disable=too-many-arguments - gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): + def __init__( + self, + jsonrpc_client, + channel_address, + startgas=GAS_LIMIT, + gasprice=GAS_PRICE, + poll_timeout=DEFAULT_POLL_TIMEOUT): + # pylint: disable=too-many-arguments + result = jsonrpc_client.call( 'eth_getCode', address_encoder(channel_address), @@ -671,7 +700,12 @@ def close(self, our_address, first_transfer, second_transfer): ) self.client.poll(transaction_hash.decode('hex'), timeout=self.poll_timeout) - log.info('close called', contract=pex(self.address), first_transfer=first_transfer, second_transfer=second_transfer) + log.info( + 'close called', + contract=pex(self.address), + first_transfer=first_transfer, + second_transfer=second_transfer, + ) elif first_transfer: first_encoded = first_transfer.encode() @@ -712,10 +746,12 @@ def update_transfer(self, our_address, transfer): ) self.client.poll(transaction_hash.decode('hex'), timeout=self.poll_timeout) log.info('update_transfer called', contract=pex(self.address), transfer=transfer) - # TODO: check if the ChannelSecretRevealed event was emitted and if it wasn't raise an error + # TODO: check if the ChannelSecretRevealed event was emitted and if + # it wasn't raise an error def unlock(self, our_address, unlock_proofs): - unlock_proofs = list(unlock_proofs) # force a list to get the length (could be a generator) + # force a list to get the length (could be a generator) + unlock_proofs = list(unlock_proofs) log.info( '%s locks to unlock', len(unlock_proofs), @@ -736,11 +772,17 @@ def unlock(self, our_address, unlock_proofs): gasprice=self.gasprice, ) self.client.poll(transaction_hash.decode('hex'), timeout=self.poll_timeout) - # TODO: check if the ChannelSecretRevealed event was emitted and if it wasn't raise an error + # TODO: check if the ChannelSecretRevealed event was emitted and if + # it wasn't raise an error # if log.getEffectiveLevel() >= logging.INFO: # only decode the lock if need to lock = messages.Lock.from_bytes(locked_encoded) - log.info('unlock called', contract=pex(self.address), lock=lock, secret=encode_hex(secret)) + log.info( + 'unlock called', + contract=pex(self.address), + lock=lock, + secret=encode_hex(secret), + ) def settle(self): transaction_hash = self.proxy.settle.transact( diff --git a/raiden/patches/geventwebsocket.py b/raiden/patches/geventwebsocket.py index 4a5be590f4..91de6b43e0 100644 --- a/raiden/patches/geventwebsocket.py +++ b/raiden/patches/geventwebsocket.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import import json @@ -6,12 +7,14 @@ from geventwebsocket.resource import Resource as ResourceBase - # monkey patch: gevent-websocket to support 'extra' argument -class Resource(ResourceBase): +class Resource(ResourceBase): # pylint: disable=too-few-public-methods + def __init__(self, apps=None, extra=None): super(Resource, self).__init__(apps) - assert type(extra) is dict or None + + assert isinstance(extra, (dict, type(None))) + if extra is not None: self.extra = extra @@ -24,21 +27,22 @@ def __call__(self, environ, start_response): raise Exception("No apps defined") if is_websocket_call: - ws = environ['wsgi.websocket'] + websocket = environ['wsgi.websocket'] extra = self.extra # here the WebSocketApplication objects get constructed - current_app = current_app(ws, extra) - current_app.ws = ws # TODO: needed? + current_app = current_app(websocket, extra) + current_app.ws = websocket # TODO: needed? current_app.handle() # Always return something, calling WSGI middleware may rely on it return [] else: return current_app(environ, start_response) + class WampProtocol(WampProtocolBase): def __init__(self, *args, **kwargs): - super(WampProtocol,self).__init__(*args, **kwargs) + super(WampProtocol, self).__init__(*args, **kwargs) def on_message(self, message): # FIX: handle when ws is already closed (message is None) diff --git a/raiden/profiling/profiler.py b/raiden/profiling/profiler.py index 78c5b13c40..16317955f9 100644 --- a/raiden/profiling/profiler.py +++ b/raiden/profiling/profiler.py @@ -5,14 +5,12 @@ import sys import threading import time -import weakref from collections import OrderedDict, namedtuple from itertools import chain, izip_longest -from UserDict import UserDict import greenlet -from stack import get_trace_info, get_trace_from_frame +from raiden.profiling.stack import get_trace_info, get_trace_from_frame # TODO: @@ -45,7 +43,7 @@ # PEP-0418 # perf_counter = It does include time elapsed during sleep and is system-wide. try: - clock = time.perf_counter + clock = time.perf_counter # pylint: disable=no-member except: clock = time.clock @@ -280,7 +278,7 @@ def thread_profiler(frame, event, arg): now = clock() # measure once and reuse it - current_greenlet = greenlet.getcurrent() + current_greenlet = greenlet.getcurrent() # pylint: disable=no-member current_state = ensure_thread_state(current_greenlet, frame) if _state.last != current_state: @@ -336,13 +334,13 @@ def start_profiler(): _state = GlobalState() frame = sys._getframe(0) - current_greenlet = greenlet.getcurrent() + current_greenlet = greenlet.getcurrent() # pylint: disable=no-member thread_state = ensure_thread_state(current_greenlet, frame) _state.last = thread_state # this needs to be instantiate before the handler is installed - greenlet.settrace(greenlet_profiler) + greenlet.settrace(greenlet_profiler) # pylint: disable=no-member sys.setprofile(thread_profiler) threading.setprofile(thread_profiler) @@ -354,7 +352,7 @@ def stop_profiler(): # measurements in the end sys.setprofile(None) threading.setprofile(None) - greenlet.settrace(None) + greenlet.settrace(None) # pylint: disable=no-member @contextlib.contextmanager @@ -461,7 +459,6 @@ def equal(first_node, second_node): return (module and function) or runtime_id - calltrees = [thread_state.calltree for thread_state in threadstates] tree = [ (1, {}, [state.calltree for state in threadstates]) ] diff --git a/raiden/profiling/stack.py b/raiden/profiling/stack.py index 08ecb16aa0..cd9bdb18c5 100644 --- a/raiden/profiling/stack.py +++ b/raiden/profiling/stack.py @@ -28,6 +28,21 @@ def _getitem_from_frame(f_locals, key, default=None): return default +def to_dict(dictish): + """ + Given something that closely resembles a dictionary, we attempt + to coerce it into a propery dictionary. + """ + if hasattr(dictish, 'iterkeys'): + method = dictish.iterkeys + elif hasattr(dictish, 'keys'): + method = dictish.keys + else: + raise ValueError(dictish) + + return dict((k, dictish[k]) for k in method()) + + def get_lines_from_file(filename, lineno, context_lines, loader=None, module_name=None): """ Returns context_lines before and after lineno from file. @@ -39,20 +54,6 @@ def get_lines_from_file(filename, lineno, context_lines, loader=None, module_nam try: source = loader.get_source(module_name) except ImportError: - # Traceback (most recent call last): - # File "/Users/dcramer/Development/django-sentry/sentry/client/handlers.py", line 31, in emit - # get_client().create_from_record(record, request=request) - # File "/Users/dcramer/Development/django-sentry/sentry/client/base.py", line 325, in create_from_record - # data['__sentry__']['frames'] = varmap(shorten, get_stack_info(stack)) - # File "/Users/dcramer/Development/django-sentry/sentry/utils/stacks.py", line 112, in get_stack_info - # pre_context_lineno, pre_context, context_line, post_context = get_lines_from_file(filename, lineno, 7, loader, module_name) # noqa - # File "/Users/dcramer/Development/django-sentry/sentry/utils/stacks.py", line 24, in get_lines_from_file - # source = loader.get_source(module_name) - # File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/pkgutil.py", line 287, in get_source # noqa - # fullname = self._fix_name(fullname) - # File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/pkgutil.py", line 262, in _fix_nam: # noqa - # "module %s" % (self.fullname, fullname)) - # ImportError: Loader for module cProfile cannot handle module __main__ source = None if source is not None: source = source.splitlines() @@ -113,7 +114,8 @@ def get_stack_info(frame): loader = _getitem_from_frame(f_globals, '__loader__') if lineno is not None and abs_path: - pre_context, context_line, post_context = get_lines_from_file(abs_path, lineno - 1, 5, loader, module_name) + line_data = get_lines_from_file(abs_path, lineno - 1, 5, loader, module_name) + pre_context, context_line, post_context = line_data frame_result.update({ 'pre_context': pre_context, diff --git a/raiden/tasks.py b/raiden/tasks.py index 8b527957cf..36e5e3c4ee 100644 --- a/raiden/tasks.py +++ b/raiden/tasks.py @@ -572,6 +572,7 @@ def _run(self): # noqa fee = self.fee originating_transfer = self.originating_transfer + raiden = self.raiden assetmanager = raiden.get_manager_by_asset_address(self.asset_address) transfermanager = assetmanager.transfermanager from_address = originating_transfer.sender diff --git a/raiden/tests/conftest.py b/raiden/tests/conftest.py index 8b575a566e..5062dbe98c 100644 --- a/raiden/tests/conftest.py +++ b/raiden/tests/conftest.py @@ -5,7 +5,101 @@ from ethereum import slogging from ethereum.keys import PBKDF2_CONSTANTS -from raiden.tests.fixtures import * +from raiden.tests.fixtures import ( + token_abi, + registry_abi, + channel_manager_abi, + netting_channel_abi, + + assets_addresses, + blockchain_services, + blockchain_backend, + + raiden_chain, + raiden_network, + + tester_blockgas_limit, + tester_events, + tester_state, + tester_token_address, + tester_nettingchannel_library_address, + tester_channelmanager_library_address, + tester_registry_address, + tester_token_raw, + tester_token, + tester_registry, + tester_channelmanager, + tester_nettingcontracts, + tester_channels, + + settle_timeout, + reveal_timeout, + events_poll_timeout, + deposit, + number_of_assets, + number_of_nodes, + channels_per_node, + poll_timeout, + transport_class, + privatekey_seed, + asset_amount, + private_keys, + blockchain_type, + blockchain_number_of_nodes, + blockchain_key_seed, + blockchain_private_keys, + blockchain_p2p_base_port, +) + +__all__ = ( + 'token_abi', + 'registry_abi', + 'channel_manager_abi', + 'netting_channel_abi', + + 'assets_addresses', + 'blockchain_services', + 'blockchain_backend', + + 'raiden_chain', + 'raiden_network', + + 'tester_blockgas_limit', + 'tester_events', + 'tester_state', + 'tester_token_address', + 'tester_nettingchannel_library_address', + 'tester_channelmanager_library_address', + 'tester_registry_address', + 'tester_token_raw', + 'tester_token', + 'tester_registry', + 'tester_channelmanager', + 'tester_nettingcontracts', + 'tester_channels', + + 'settle_timeout', + 'reveal_timeout', + 'events_poll_timeout', + 'deposit', + 'number_of_assets', + 'number_of_nodes', + 'channels_per_node', + 'poll_timeout', + 'transport_class', + 'privatekey_seed', + 'asset_amount', + 'private_keys', + 'blockchain_type', + 'blockchain_number_of_nodes', + 'blockchain_key_seed', + 'blockchain_private_keys', + 'blockchain_p2p_base_port', + + 'pytest_addoption', + 'logging_level', + 'enable_greenlet_debugger', +) gevent.monkey.patch_socket() gevent.get_hub().SYSTEM_ERROR = BaseException diff --git a/raiden/tests/fixtures/__init__.py b/raiden/tests/fixtures/__init__.py index 152b7fbbed..94c2aaa0be 100644 --- a/raiden/tests/fixtures/__init__.py +++ b/raiden/tests/fixtures/__init__.py @@ -1,6 +1,101 @@ # -*- coding: utf-8 -*- -from raiden.tests.fixtures.abi import * -from raiden.tests.fixtures.blockchain import * -from raiden.tests.fixtures.raiden_network import * -from raiden.tests.fixtures.tester import * -from raiden.tests.fixtures.variables import * + +from raiden.tests.fixtures.abi import ( + token_abi, + registry_abi, + channel_manager_abi, + netting_channel_abi, +) + +from raiden.tests.fixtures.blockchain import ( + assets_addresses, + blockchain_services, + blockchain_backend, +) + +from raiden.tests.fixtures.raiden_network import ( + raiden_chain, + raiden_network, +) + +from raiden.tests.fixtures.tester import ( + tester_blockgas_limit, + tester_events, + tester_state, + tester_token_address, + tester_nettingchannel_library_address, + tester_channelmanager_library_address, + tester_registry_address, + tester_token_raw, + tester_token, + tester_registry, + tester_channelmanager, + tester_nettingcontracts, + tester_channels, +) + +from raiden.tests.fixtures.variables import ( + settle_timeout, + reveal_timeout, + events_poll_timeout, + deposit, + number_of_assets, + number_of_nodes, + channels_per_node, + poll_timeout, + transport_class, + privatekey_seed, + asset_amount, + private_keys, + blockchain_type, + blockchain_number_of_nodes, + blockchain_key_seed, + blockchain_private_keys, + blockchain_p2p_base_port, +) + +__all__ = ( + 'token_abi', + 'registry_abi', + 'channel_manager_abi', + 'netting_channel_abi', + + 'assets_addresses', + 'blockchain_services', + 'blockchain_backend', + + 'raiden_chain', + 'raiden_network', + + 'tester_blockgas_limit', + 'tester_events', + 'tester_state', + 'tester_token_address', + 'tester_nettingchannel_library_address', + 'tester_channelmanager_library_address', + 'tester_registry_address', + 'tester_token_raw', + 'tester_token', + 'tester_registry', + 'tester_channelmanager', + 'tester_nettingcontracts', + 'tester_channels', + + 'settle_timeout', + 'reveal_timeout', + 'events_poll_timeout', + 'deposit', + 'number_of_assets', + 'number_of_nodes', + 'channels_per_node', + 'poll_timeout', + 'transport_class', + 'privatekey_seed', + 'asset_amount', + 'private_keys', + 'blockchain_type', + 'blockchain_number_of_nodes', + 'blockchain_key_seed', + 'blockchain_private_keys', + 'blockchain_p2p_base_port', +) diff --git a/raiden/tests/integration/test_blockchainservice.py b/raiden/tests/integration/test_blockchainservice.py index 90531bfa49..f3edc9b2d1 100644 --- a/raiden/tests/integration/test_blockchainservice.py +++ b/raiden/tests/integration/test_blockchainservice.py @@ -100,7 +100,9 @@ def test_new_netting_contract(raiden_network, asset_amount, settle_timeout): for channel in channel_list: assert sorted(channel) in expected_channels - assert sorted(manager0.channels_by_participant(peer0_address)) == sorted([netting_address_01, netting_address_02]) + result0 = sorted(manager0.channels_by_participant(peer0_address)) + result1 = sorted([netting_address_01, netting_address_02]) + assert result0 == result1 assert manager0.channels_by_participant(peer1_address) == [netting_address_01] assert manager0.channels_by_participant(peer2_address) == [netting_address_02] @@ -140,34 +142,6 @@ def test_new_netting_contract(raiden_network, asset_amount, settle_timeout): assert netting_channel_02.detail(peer0_address)['our_balance'] == 70 assert netting_channel_02.detail(peer2_address)['our_balance'] == 130 - # TODO: - # we need to allow the settlement of the channel even if no transfers were - # made - # peer1_last_sent_transfer = None - # peer2_last_sent_transfer = None - # netting_channel_01.close( - # peer0_address, - # peer1_last_sent_transfer, - # peer2_last_sent_transfer, - # ) - - # with pytest.raises(Exception): - # blockchain_service0.close(asset_address, netting_address_02, peer0_address, peer1_last_sent_transfers) - - # assert netting_channel_01.isopen() is False - # assert netting_channel_02.isopen() is True - - # app2.raiden.chain.asset(asset_address).approve(netting_address_02, 21) - # app2.raiden.chain.netting_channel(netting_address_02).deposit(peer2_address, 21) - - # assert netting_channel_01.isopen() is False - # assert netting_channel_02.isopen() is True - - # netting_channel_01.update_transfer(peer1_address, peer2_last_sent_transfer) - - # assert netting_channel_01.isopen() is False - # assert netting_channel_02.isopen() is True - @pytest.mark.parametrize('blockchain_type', ['geth']) @pytest.mark.parametrize('privatekey_seed', ['blockchain:{}']) @@ -242,7 +216,9 @@ def test_blockchain(blockchain_backend, private_keys, number_of_nodes, poll_time ) assert len(log_list) == 1 - channel_manager_address_encoded = registry_proxy.channelManagerByAsset.call(token_proxy.address) + channel_manager_address_encoded = registry_proxy.channelManagerByAsset.call( + token_proxy.address, + ) channel_manager_address = channel_manager_address_encoded.decode('hex') log = log_list[0] diff --git a/raiden/tests/smart_contracts/test_channel_manager.py b/raiden/tests/smart_contracts/test_channel_manager.py index e20e46681e..431c6f5043 100644 --- a/raiden/tests/smart_contracts/test_channel_manager.py +++ b/raiden/tests/smart_contracts/test_channel_manager.py @@ -63,7 +63,8 @@ def test_channelmanager(tester_state, tester_token, tester_events, } ) - assert len(channel_manager.getChannelsParticipants()) == 0, 'newly deployed contract must be empty' + participants_count = len(channel_manager.getChannelsParticipants()) + assert participants_count == 0, 'newly deployed contract must be empty' netting_channel_translator = ContractTranslator(netting_channel_abi) diff --git a/raiden/tests/smart_contracts/test_registry.py b/raiden/tests/smart_contracts/test_registry.py index 0696654118..60adfd0329 100644 --- a/raiden/tests/smart_contracts/test_registry.py +++ b/raiden/tests/smart_contracts/test_registry.py @@ -22,7 +22,11 @@ def test_registry(tester_registry, tester_events): with pytest.raises(tester.TransactionFailed): tester_registry.addAsset(asset_address1, sender=privatekey0) - channel_manager_address = tester_registry.channelManagerByAsset(asset_address1, sender=privatekey0) + channel_manager_address = tester_registry.channelManagerByAsset( + asset_address1, + sender=privatekey0, + ) + assert channel_manager_address == contract_address1 with pytest.raises(tester.TransactionFailed): diff --git a/raiden/tests/test_mtree.py b/raiden/tests/test_mtree.py index 2a46a527f9..d89f409380 100644 --- a/raiden/tests/test_mtree.py +++ b/raiden/tests/test_mtree.py @@ -29,7 +29,10 @@ def test_duplicates(): hash_1 = keccak('y') assert merkleroot([hash_0, hash_0]) == hash_0, 'duplicates should be removed' - assert merkleroot([hash_0, hash_1, hash_0]) == merkleroot([hash_0, hash_1]), 'duplicates should be removed' + + result0 = merkleroot([hash_0, hash_1, hash_0]) + result1 = merkleroot([hash_0, hash_1]) + assert result0 == result1, 'duplicates should be removed' def test_one(): @@ -75,7 +78,10 @@ def sort_join(first, second): merkle_tree = [hash_0, hash_1, hash_2] - hash_01 = b'me\xef\x9c\xa9=5\x16\xa4\xd3\x8a\xb7\xd9\x89\xc2\xb5\x00\xe2\xfc\x89\xcc\xdc\xf8x\xf9\xc4m\xaa\xf6\xad\r[' + hash_01 = ( + b'me\xef\x9c\xa9=5\x16\xa4\xd3\x8a\xb7\xd9\x89\xc2\xb5\x00' + b'\xe2\xfc\x89\xcc\xdc\xf8x\xf9\xc4m\xaa\xf6\xad\r[' + ) assert keccak(hash_0 + hash_1) == hash_01 calculated_root = keccak(hash_2 + hash_01) diff --git a/raiden/tests/utils/mock_client.py b/raiden/tests/utils/mock_client.py index 873ab9d7de..9fc6df66fd 100644 --- a/raiden/tests/utils/mock_client.py +++ b/raiden/tests/utils/mock_client.py @@ -315,8 +315,9 @@ def channelnew_filter(self): class NettingChannelMock(object): - def __init__(self, asset_address, peer1, peer2, settle_timeout, # pylint: disable=too-many-arguments - address=None): + def __init__(self, asset_address, peer1, peer2, settle_timeout, address=None): + # pylint: disable=too-many-arguments + self.address = address or make_address() self.contract = NettingChannelContract( @@ -354,7 +355,12 @@ def deposit(self, our_address, amount): 'balance': our_data.deposit, 'block_number': BlockChainServiceMock.block_number(), } - event = ethereum_event(CHANNELNEWBALANCE_EVENTID, CHANNELNEWBALANCE_EVENT, data, self.address) + event = ethereum_event( + CHANNELNEWBALANCE_EVENTID, + CHANNELNEWBALANCE_EVENT, + data, + self.address, + ) for filter_ in BlockChainServiceMock.filters[self.address]: filter_.event(event) @@ -448,7 +454,12 @@ def unlock(self, our_address, unlock_proofs): '_event_type': 'ChannelSecretRevealed', 'secret': secret, } - event = ethereum_event(CHANNELSECRETREVEALED_EVENTID, CHANNELSECRETREVEALED_EVENT, data, self.address) + event = ethereum_event( + CHANNELSECRETREVEALED_EVENTID, + CHANNELSECRETREVEALED_EVENT, + data, + self.address, + ) for filter_ in BlockChainServiceMock.filters[self.address]: filter_.event(event) diff --git a/requirements.txt b/requirements.txt index 23f14eedb8..c7671cfcd3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,7 @@ gevent==1.1.0 rlp>=0.4.3 secp256k1==0.12.1 pycryptodome>=3.3.1 +miniupnpc networkx ethereum>=1.3.2 ethereum-serpent diff --git a/setup.cfg b/setup.cfg index 1cf30e3fb4..918dc52dda 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,6 @@ [flake8] ignore = E731 +max-line-length = 99 [pep8] max-line-length = 99 diff --git a/tools/config_builder.py b/tools/config_builder.py index 1c773428a5..8a7e7782a8 100755 --- a/tools/config_builder.py +++ b/tools/config_builder.py @@ -1,8 +1,10 @@ #!/usr/bin/env python -import click import json import random + +import click + from genesis_builder import generate_accounts, mk_genesis from create_compilation_dump import deploy_all from startcluster import RAIDEN_PORT as START_PORT @@ -63,8 +65,12 @@ def nodes(ctx, hosts, nodes_per_host): def genesis(ctx, hosts, nodes_per_host): pretty = ctx.obj['pretty'] node_list = build_node_list(hosts, nodes_per_host) - accounts = generate_accounts(node_list) - genesis = mk_genesis([acc['address'] for acc in accounts.values()]) + + accounts = generate_accounts(node_list) # pylint: disable=redefined-outer-name + all_addresses = [account['address'] for account in accounts.values()] + + genesis = mk_genesis(all_addresses) # pylint: disable=redefined-outer-name + print json.dumps(genesis, indent=2 if pretty else None) @@ -99,15 +105,18 @@ def accounts(ctx, hosts, nodes_per_host): @cli.command() @click.pass_context def build_scenario(ctx, hosts, nodes_per_host): + # pylint: disable=too-many-locals + pretty = ctx.obj['pretty'] node_list = build_node_list(hosts, nodes_per_host) - accounts = generate_accounts(node_list) + accounts = generate_accounts(node_list) # pylint: disable=redefined-outer-name addresses = [] - for node, data in accounts.items(): - for k, v in data.items(): - if k == 'address': - addresses.append(v) + for data in accounts.values(): + value = data.get('address') + + if value: + addresses.append(value) random.shuffle(addresses) @@ -150,16 +159,26 @@ def geth_commands(ctx, geth_hosts, datadir): (because they can use the content of `static_nodes` as `static-nodes.json`). """ pretty = ctx.obj['pretty'] - nodes = [] + nodes = [] # pylint: disable=redefined-outer-name + for i, host in enumerate(geth_hosts): nodes.append(create_node_configuration(host=host, node_key_seed=i)) + for node in nodes: node.pop('unlock') node.pop('rpcport') + config = {'{host}'.format(**node): ' '.join(to_cmd(node, datadir=datadir)) for node in nodes} config['static_nodes'] = [node['enode'] for node in nodes] - print json.dumps(config, - indent=2 if pretty else None) + + indent = None + if pretty: + indent = 2 + + print json.dumps( + config, + indent=indent, + ) @click.argument( @@ -174,14 +193,20 @@ def geth_commands(ctx, geth_hosts, datadir): @click.pass_context def merge(ctx, genesis_json, state_json): pretty = ctx.obj['pretty'] - genesis = json.load(genesis_json) + + genesis = json.load(genesis_json) # pylint: disable=redefined-outer-name state = json.load(state_json) assert 'alloc' in genesis - accounts = [key for key in genesis['alloc']] + + accounts = [key for key in genesis['alloc']] # pylint: disable=redefined-outer-name + for account, data in state['accounts'].items(): if account not in accounts: - [data.pop(key) for key in "nonce root codeHash".split()] + for key in ('nonce', 'root', 'codeHash'): + data.pop(key, None) + genesis['alloc'][account] = data + print json.dumps(genesis, indent=2 if pretty else None) @@ -208,28 +233,38 @@ def merge(ctx, genesis_json, state_json): @cli.command() @click.pass_context def full_genesis(ctx, hosts, nodes_per_host, scenario): + # pylint: disable=too-many-locals + pretty = ctx.obj['pretty'] node_list = build_node_list(hosts, nodes_per_host) - accounts = generate_accounts(node_list) - genesis = mk_genesis([acc['address'] for acc in accounts.values()]) + + accounts = generate_accounts(node_list) # pylint: disable=redefined-outer-name + + all_addresses = [ + account['address'] for account in accounts.values() + ] + + genesis = mk_genesis(all_addresses) # pylint: disable=redefined-outer-name if scenario is not None: - with open(scenario) as f: - script = json.load(f) - token_groups = {asset['name']: asset['channels'] - for asset in script['assets'] - } + with open(scenario) as handler: + script = json.load(handler) + + token_groups = { + asset['name']: asset['channels'] + for asset in script['assets'] + } else: # create tokens for addresses x addresses token_groups = { - account['address']: [acc['address'] for acc in accounts.values()] - for account in accounts.values() - } + account['address']: all_addresses + for account in accounts.values() + } dump, blockchain_config = deploy_all(token_groups=token_groups) for account, data in dump.items(): - if not account in genesis['alloc']: + if account not in genesis['alloc']: genesis['alloc'][account] = data genesis['config']['raidenFlags'] = blockchain_config['raiden_flags'] @@ -238,8 +273,9 @@ def full_genesis(ctx, hosts, nodes_per_host, scenario): if scenario is not None: for asset in script['assets']: asset['token_address'] = blockchain_config['token_groups'][asset['name']] - with open(scenario, 'w') as f: - json.dump(script, f) + + with open(scenario, 'w') as handler: + json.dump(script, handler) print json.dumps(genesis, indent=2 if pretty else None) @@ -252,28 +288,33 @@ def account_file(): @cli.command() def usage(): - print "Example usage:" - print "==============\n" - print "\tconfig_builder.py genesis 5 127.0.0.1 127.0.0.2" - print "\t-> create a genesis json with funding for 10 accounts on the two hosts (see also 'accounts')." - print "\n" - print "\tconfig_builder.py nodes 5 127.0.0.1 127.0.0.2" - print "\t-> create json list 10 raiden endpoint addresses on the two hosts." - print "\n" - print "\tconfig_builder.py accounts 5 127.0.0.1 127.0.0.2" - print "\t-> create full account-spec {endpoint: (privatekey, address)} for 10 nodes on the two hosts." - print "\n" - print "\tconfig_builder.py geth_commands /tmp/foo 127.0.0.1 127.0.0.2" - print "\t-> create commands for geth nodes on both hosts with the datadir set to /tmp/foo." - print "\n" - print "\tconfig_builder.py geth_static_nodes 127.0.0.1 127.0.0.2" - print "\t-> outputs geth compatible static-nodes.json contents for a private blockchain." - print "\n" - print "\tconfig_builder.py account_file" - print "\t-> create an account file that can be used as etherbase in geth instances." - print "\n" - print "\tconfig_builder.py merge state_dump.json genesis.json" - print "\t-> merge the deployed contracts of state_dump.json into genesis.json and create a new genesis.json." + usage_text = """\ +Example usage: +============== +config_builder.py genesis 5 127.0.0.1 127.0.0.2 +-> create a genesis json with funding for 10 accounts on the two hosts (see also 'accounts'). + +config_builder.py nodes 5 127.0.0.1 127.0.0.2 +-> create json list 10 raiden endpoint addresses on the two hosts. + +config_builder.py accounts 5 127.0.0.1 127.0.0.2 +-> create full account-spec {endpoint: (privatekey, address)} for 10 nodes on the two hosts. + +config_builder.py geth_commands /tmp/foo 127.0.0.1 127.0.0.2 +-> create commands for geth nodes on both hosts with the datadir set to /tmp/foo. + +config_builder.py geth_static_nodes 127.0.0.1 127.0.0.2 +-> outputs geth compatible static-nodes.json contents for a private blockchain. + +config_builder.py account_file +-> create an account file that can be used as etherbase in geth instances. + +config_builder.py merge state_dump.json genesis.json +-> merge the deployed contracts of state_dump.json into genesis.json and create +a new genesis.json.""" + + print usage_text + if __name__ == '__main__': - cli(obj={}) + cli(obj={}) # pylint: disable=unexpected-keyword-arg,no-value-for-parameter diff --git a/tools/create_compilation_dump.py b/tools/create_compilation_dump.py index 6b5dd4cc06..dbc0d2a042 100755 --- a/tools/create_compilation_dump.py +++ b/tools/create_compilation_dump.py @@ -1,14 +1,16 @@ #!/usr/bin/env python # -*- coding: utf-8 import json + from ethereum import tester from ethereum.utils import remove_0x_head +from ethereum import slogging + from raiden.utils import privatekey_to_address, get_contract_path -from ethereum import slogging slogging.configure(":INFO") -log = slogging.getLogger(__name__) +log = slogging.getLogger(__name__) # pylint: disable=invalid-name TARGETS = dict( registry='Registry.sol', @@ -20,17 +22,44 @@ DEFAULT_ACCOUNT = privatekey_to_address(DEFAULT_KEY) -def deploy_all(token_groups=dict()): - log.DEV("default key", raw=tester.DEFAULT_KEY, enc=tester.DEFAULT_KEY.encode('hex')) - log.DEV("default account", raw=tester.DEFAULT_ACCOUNT, enc=tester.DEFAULT_ACCOUNT.encode('hex')) +def deploy_all(token_groups=None): + if not token_groups: + token_groups.dict() + + log.DEV( # pylint: disable=no-member + 'default key', + raw=tester.DEFAULT_KEY, + enc=tester.DEFAULT_KEY.encode('hex'), + ) + log.DEV( # pylint: disable=no-member + 'default account', + raw=tester.DEFAULT_ACCOUNT, + enc=tester.DEFAULT_ACCOUNT.encode('hex'), + ) + tester.DEFAULT_KEY = DEFAULT_KEY tester.DEFAULT_ACCOUNT = DEFAULT_ACCOUNT tester.keys[0] = DEFAULT_KEY tester.accounts[0] = DEFAULT_ACCOUNT - log.DEV("default key", raw=tester.DEFAULT_KEY, enc=tester.DEFAULT_KEY.encode('hex')) - log.DEV("default account", raw=tester.DEFAULT_ACCOUNT, enc=tester.DEFAULT_ACCOUNT.encode('hex')) + + log.DEV( # pylint: disable=no-member + 'default key', + raw=tester.DEFAULT_KEY, + enc=tester.DEFAULT_KEY.encode('hex'), + ) + log.DEV( # pylint: disable=no-member + 'default account', + raw=tester.DEFAULT_ACCOUNT, + enc=tester.DEFAULT_ACCOUNT.encode('hex'), + ) + state = tester.state(num_accounts=1) - log.DEV('state', coinbase=state.block.coinbase.encode('hex'), balance=state.block.get_balance(DEFAULT_ACCOUNT)) + + log.DEV( # pylint: disable=no-member + 'state', + coinbase=state.block.coinbase.encode('hex'), + balance=state.block.get_balance(DEFAULT_ACCOUNT), + ) tester.gas_limit = 10 * 10 ** 6 state.block.number = 1158001 @@ -68,9 +97,13 @@ def deploy_all(token_groups=dict()): dump.update({account: state.block.account_to_dict(account)}) cleanup(dump) + raiden_flags = ( + '--registry_contract_address {Registry}' + ' --discovery_contract_address {EndpointRegistry}' + ).format(**deployed) + blockchain_config = dict( - raiden_flags='--registry_contract_address {Registry} --discovery_contract_address {EndpointRegistry}' - .format(**deployed), + raiden_flags=raiden_flags, token_groups=tokens, ) blockchain_config['contract_addresses'] = deployed @@ -100,35 +133,44 @@ def create_and_distribute_token(state, return (name, proxy.address.encode('hex')) -def deploy_with_dependencies(contract_name, state, libraries=dict()): - dependencies = find_dependencies( - get_contract_path(contract_name)) +def deploy_with_dependencies(contract_name, state, libraries=None): + if not libraries: + libraries = dict() + + dependencies = find_dependencies(get_contract_path(contract_name)) dependency_names = [d.split('.')[0] for d in dependencies] for key in list(libraries.keys()): - if not key in dependency_names: + if key not in dependency_names: libraries.pop(key) - log.DEV("in deploy_with_dependencies", contract=contract_name, dependencies=dependencies) + log.DEV( # pylint: disable=no-member + 'in deploy_with_dependencies', + contract=contract_name, + dependencies=dependencies, + ) for dependency in dependencies: # 'Contract's are included in 'Registry' and should not be deployed alone if 'Contract' in dependency: continue - log.DEV('deploying dependency', name=dependency) - log.DEV('known libraries', libraries=libraries) - deployed = state.abi_contract(None, - path=get_contract_path(dependency), - listen=False, - language='solidity', - libraries=libraries, - sender=DEFAULT_KEY, - ) + log.DEV('deploying dependency', name=dependency) # pylint: disable=no-member + log.DEV('known libraries', libraries=libraries) # pylint: disable=no-member + + deployed = state.abi_contract( + None, + path=get_contract_path(dependency), + listen=False, + language='solidity', + libraries=libraries, + sender=DEFAULT_KEY, + ) + libraries[dependency.split('.')[0]] = deployed.address.encode('hex') state.mine() - log.DEV('deploying target', name=contract_name) - log.DEV('known libraries', libraries=libraries) + log.DEV('deploying target', name=contract_name) # pylint: disable=no-member + log.DEV('known libraries', libraries=libraries) # pylint: disable=no-member contract = state.abi_contract( None, @@ -147,8 +189,8 @@ def find_dependencies(contract_file): """Resolve solidity dependencies depth first. """ dependencies = [] - with open(contract_file) as f: - for line in f.readlines(): + with open(contract_file) as handler: + for line in handler.readlines(): if line.startswith("import"): dependency = line.split()[1].split('"')[1] dependency = dependency.rsplit('/', 1)[-1] @@ -168,13 +210,17 @@ def strip_hex(val): return remove_0x_head(val) return val - for account, alloc in dump.items(): + for alloc in dump.values(): for key, value in alloc.items(): alloc[key] = strip_hex(value) -if __name__ == '__main__': +def main(): pretty = False dump, blockchain_config = deploy_all() print json.dumps(dump, indent=2 if pretty else None) print json.dumps(blockchain_config, indent=2 if pretty else None) + + +if __name__ == '__main__': + main() diff --git a/tools/deploy.py b/tools/deploy.py index f0ba71eebc..5cfd973e38 100755 --- a/tools/deploy.py +++ b/tools/deploy.py @@ -12,7 +12,12 @@ # ordered list of solidity files to deploy for the raiden registry -RAIDEN_CONTRACT_FILES = ['Token.sol', 'NettingChannelLibrary.sol', 'ChannelManagerLibrary.sol', 'Registry.sol'] +RAIDEN_CONTRACT_FILES = [ + 'Token.sol', + 'NettingChannelLibrary.sol', + 'ChannelManagerLibrary.sol', + 'Registry.sol', +] DISCOVERY_CONTRACT_FILES = ['EndpointRegistry.sol'] diff --git a/tools/scenario_runner.py b/tools/scenario_runner.py index 9b20759750..f4e73c04f6 100644 --- a/tools/scenario_runner.py +++ b/tools/scenario_runner.py @@ -1,30 +1,33 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function -from gevent import monkey -monkey.patch_all() - import signal -import gevent -import click import json + +import click +import gevent +from gevent import monkey from ethereum import slogging + from raiden.console import ConsoleTools from raiden.app import app as orig_app from raiden.app import options + +monkey.patch_all() log = slogging.get_logger(__name__) # pylint: disable=invalid-name -@click.option( +@click.option( # noqa '--scenario', help='path to scenario.json', type=click.File() ) @options @click.command() -@click.pass_context -def run(ctx, scenario, **kwargs): +@click.pass_context # pylint: disable=too-many-locals +def run(ctx, scenario, **kwargs): # pylint: disable=unused-argument ctx.params.pop('scenario') app = ctx.invoke(orig_app) if scenario: @@ -92,4 +95,4 @@ def transfer_(): if __name__ == '__main__': - run() + run() # pylint: disable=no-value-for-parameter diff --git a/tox.ini b/tox.ini index e262a89945..4515adf739 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27,coverage,flake8 +envlist = py27,coverage [testenv:devenv] envdir = devenv @@ -9,7 +9,9 @@ changedir = {toxinidir}/raiden deps = -rrequirements.txt pdbpp + flake8 commands = + flake8 . py.test --blockchain-type=tester --exitfirst --ignore tests/test_webui.py --ignore tests/integration {posargs} py.test --blockchain-type=tester --exitfirst --ignore tests/test_webui.py tests/integration {posargs} @@ -18,7 +20,9 @@ changedir = {toxinidir}/raiden deps = -rrequirements.txt coverage==4.0 + flake8 commands = + flake8 . py.test --exitfirst --ignore tests/test_webui.py --ignore tests/integration {posargs} py.test --exitfirst --ignore tests/test_webui.py tests/integration {posargs} coverage run --source raiden --branch -m py.test --ignore tests/test_webui.py {posargs} @@ -28,13 +32,3 @@ basepython = python2.7 skip_install = True deps = coverage==4.0 commands = coverage report --show-missing - -[testenv:flake8] -basepython = python2.7 -skip_install = True -deps = flake8==2.4.1 -commands = flake8 raiden - -[flake8] -max-line-length = 99 -max-complexity = 10