From 9938f4438dce6700afd58d33af9dc5ed65a2f4d0 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Mon, 26 Jul 2021 00:26:55 +0200 Subject: [PATCH 01/11] partial bitcoin#22550: improve `test_signing_with_{csv,cltv}` subtests partial: - 746f203f (only changes in test_framework/util.py) --- test/functional/test_framework/util.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 3577e98a2013a..f2c9448f8e4ac 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -624,6 +624,17 @@ def mine_large_block(node, utxos=None): node.generate(1) +def generate_to_height(node, target_height): + """Generates blocks until a given target block height has been reached. + To prevent timeouts, only up to 200 blocks are generated per RPC call. + Can be used to activate certain soft-forks (e.g. CSV, CLTV).""" + current_height = node.getblockcount() + while current_height < target_height: + nblocks = min(200, target_height - current_height) + current_height += len(node.generate(nblocks)) + assert_equal(node.getblockcount(), target_height) + + def find_vout_for_address(node, txid, addr): """ Locate the vout index of the given transaction sending to the From c17fd8bc59bd4842315a27b9ff0da57c5da4bdd7 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Thu, 26 Sep 2024 19:13:22 +0000 Subject: [PATCH 02/11] merge bitcoin#22741: Add generate* calls to test framework --- test/functional/feature_dbcrash.py | 5 +- test/functional/feature_governance.py | 2 +- test/functional/feature_maxuploadtarget.py | 4 +- test/functional/interface_zmq.py | 6 +- test/functional/mempool_limit.py | 2 +- test/functional/mempool_unbroadcast.py | 2 +- .../mining_getblocktemplate_longpoll.py | 6 +- .../mining_prioritisetransaction.py | 2 +- test/functional/p2p_instantsend.py | 4 +- test/functional/p2p_unrequested_blocks.py | 2 +- .../test_framework/test_framework.py | 67 ++++++++++++------- test/functional/test_framework/util.py | 14 ++-- test/functional/wallet_descriptor.py | 2 +- test/functional/wallet_importdescriptors.py | 4 +- 14 files changed, 70 insertions(+), 52 deletions(-) diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py index b40152e3b6e4c..49785cb6e93cb 100755 --- a/test/functional/feature_dbcrash.py +++ b/test/functional/feature_dbcrash.py @@ -217,7 +217,7 @@ def run_test(self): # Start by creating a lot of utxos on node3 initial_height = self.nodes[3].getblockcount() - utxo_list = create_confirmed_utxos(self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000) + utxo_list = create_confirmed_utxos(self, self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000) self.log.info("Prepped %d utxo entries", len(utxo_list)) # Sync these blocks with the other nodes @@ -253,7 +253,8 @@ def run_test(self): self.log.debug("Mining longer tip") block_hashes = [] while current_height + 1 > self.nodes[3].getblockcount(): - block_hashes.extend(self.nodes[3].generatetoaddress( + block_hashes.extend(self.generatetoaddress( + self.nodes[3], nblocks=min(10, current_height + 1 - self.nodes[3].getblockcount()), # new address to avoid mining a block that has just been invalidated address=self.nodes[3].getnewaddress(), diff --git a/test/functional/feature_governance.py b/test/functional/feature_governance.py index 58e4188824e25..4d02556416bf0 100755 --- a/test/functional/feature_governance.py +++ b/test/functional/feature_governance.py @@ -193,7 +193,7 @@ def run_test(self): isolated = self.nodes[payee_idx] self.log.info("Move 1 block inside the Superblock maturity window on the isolated node") - isolated.generate(1) + self.generate(isolated, 1) self.bump_mocktime(1) self.log.info("The isolated 'winner' should submit new trigger and vote for it") self.wait_until(lambda: len(isolated.gobject("list", "valid", "triggers")) == 1, timeout=5) diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py index 7772739bb2d01..11408573478eb 100755 --- a/test/functional/feature_maxuploadtarget.py +++ b/test/functional/feature_maxuploadtarget.py @@ -71,7 +71,7 @@ def run_test(self): p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn())) # Now mine a big block - mine_large_block(self.nodes[0], self.utxo_cache) + mine_large_block(self, self.nodes[0], self.utxo_cache) # Store the hash; we'll request this later big_old_block = self.nodes[0].getbestblockhash() @@ -82,7 +82,7 @@ def run_test(self): self.nodes[0].setmocktime(current_mocktime - 2*60*60*24) # Mine one more block, so that the prior block looks old - mine_large_block(self.nodes[0], self.utxo_cache) + mine_large_block(self, self.nodes[0], self.utxo_cache) # We'll be requesting this new block too big_new_block = self.nodes[0].getbestblockhash() diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py index 7dbdf45e98c42..8f77686fe6f2b 100755 --- a/test/functional/interface_zmq.py +++ b/test/functional/interface_zmq.py @@ -78,8 +78,8 @@ class ZMQTestSetupBlock: raw transaction data. """ - def __init__(self, node): - self.block_hash = node.generate(1)[0] + def __init__(self, test_framework, node): + self.block_hash = test_framework.generate(node, 1)[0] coinbase = node.getblock(self.block_hash, 2)['tx'][0] self.tx_hash = coinbase['txid'] self.raw_tx = coinbase['hex'] @@ -149,7 +149,7 @@ def setup_zmq_test(self, services, *, recv_timeout=60, sync_blocks=True, ipv6=Fa for sub in subscribers: sub.socket.set(zmq.RCVTIMEO, 1000) while True: - test_block = ZMQTestSetupBlock(self.nodes[0]) + test_block = ZMQTestSetupBlock(self, self.nodes[0]) recv_failed = False for sub in subscribers: try: diff --git a/test/functional/mempool_limit.py b/test/functional/mempool_limit.py index 65bc129ef9bea..3229537737008 100755 --- a/test/functional/mempool_limit.py +++ b/test/functional/mempool_limit.py @@ -32,7 +32,7 @@ def run_test(self): assert_equal(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000')) txids = [] - utxos = create_confirmed_utxos(relayfee, self.nodes[0], 491) + utxos = create_confirmed_utxos(self, relayfee, self.nodes[0], 491) self.log.info('Create a mempool tx that will be evicted') us0 = utxos.pop() diff --git a/test/functional/mempool_unbroadcast.py b/test/functional/mempool_unbroadcast.py index 7d9e6c306d8ac..be03be24ec122 100755 --- a/test/functional/mempool_unbroadcast.py +++ b/test/functional/mempool_unbroadcast.py @@ -32,7 +32,7 @@ def test_broadcast(self): node = self.nodes[0] min_relay_fee = node.getnetworkinfo()["relayfee"] - utxos = create_confirmed_utxos(min_relay_fee, node, 10) + utxos = create_confirmed_utxos(self, min_relay_fee, node, 10) self.disconnect_nodes(0, 1) diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py index d8151f472fa54..e880e5e4faa88 100755 --- a/test/functional/mining_getblocktemplate_longpoll.py +++ b/test/functional/mining_getblocktemplate_longpoll.py @@ -47,9 +47,9 @@ def run_test(self): thr.join(5) # wait 5 seconds or until thread exits assert thr.is_alive() - miniwallets = [ MiniWallet(node) for node in self.nodes ] + miniwallets = [MiniWallet(node) for node in self.nodes] self.log.info("Test that longpoll will terminate if another node generates a block") - miniwallets[1].generate(1) # generate a block on another node + self.generate(miniwallets[1], 1) # generate a block on another node # check that thread will exit now that new transaction entered mempool thr.join(5) # wait 5 seconds or until thread exits assert not thr.is_alive() @@ -57,7 +57,7 @@ def run_test(self): self.log.info("Test that longpoll will terminate if we generate a block ourselves") thr = LongpollThread(self.nodes[0]) thr.start() - miniwallets[0].generate(1) # generate a block on own node + self.generate(miniwallets[0], 1) # generate a block on own node thr.join(5) # wait 5 seconds or until thread exits assert not thr.is_alive() diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py index 9af7acdc2089b..8439ccb9c51a1 100755 --- a/test/functional/mining_prioritisetransaction.py +++ b/test/functional/mining_prioritisetransaction.py @@ -42,7 +42,7 @@ def run_test(self): self.relayfee = self.nodes[0].getnetworkinfo()['relayfee'] utxo_count = 90 - utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], utxo_count) + utxos = create_confirmed_utxos(self, self.relayfee, self.nodes[0], utxo_count) base_fee = self.relayfee*100 # our transactions are smaller than 100kb txids = [] diff --git a/test/functional/p2p_instantsend.py b/test/functional/p2p_instantsend.py index d7dec82adbc2d..c5966d6eb1cd8 100755 --- a/test/functional/p2p_instantsend.py +++ b/test/functional/p2p_instantsend.py @@ -67,11 +67,11 @@ def test_block_doublespend(self): dblspnd_txid = isolated.sendrawtransaction(dblspnd_tx['hex']) # generate block on isolated node with doublespend transaction self.bump_mocktime(599) - wrong_early_block = isolated.generate(1)[0] + wrong_early_block = self.generate(isolated, 1)[0] assert not "confirmation" in isolated.getrawtransaction(dblspnd_txid, 1) isolated.invalidateblock(wrong_early_block) self.bump_mocktime(1) - wrong_block = isolated.generate(1)[0] + wrong_block = self.generate(isolated, 1)[0] assert_equal(isolated.getrawtransaction(dblspnd_txid, 1)["confirmations"], 1) # connect isolated block to network self.reconnect_isolated_node(self.isolated_idx, 0) diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py index 7ed5c3645a555..6d5da0a3ba10c 100755 --- a/test/functional/p2p_unrequested_blocks.py +++ b/test/functional/p2p_unrequested_blocks.py @@ -73,7 +73,7 @@ def run_test(self): min_work_node = self.nodes[1].add_p2p_connection(P2PInterface()) # 1. Have nodes mine a block (leave IBD) - [n.generatetoaddress(1, n.get_deterministic_priv_key().address) for n in self.nodes] + [self.generatetoaddress(n, 1, n.get_deterministic_priv_key().address) for n in self.nodes] tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes] # 2. Send one block that builds on each tip. diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 0eeed8ba3a71f..17c3c9ccd8609 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -455,7 +455,7 @@ def setup_nodes(self): if not self.disable_mocktime: self.log.debug('Generate a block with current mocktime') self.bump_mocktime(156 * 200, update_schedulers=False) - block_hash = self.nodes[0].generate(1)[0] + block_hash = self.generate(self.nodes[0], 1)[0] block = self.nodes[0].getblock(blockhash=block_hash, verbosity=0) for n in self.nodes: n.submitblock(block) @@ -766,6 +766,22 @@ def join_network(self): self.connect_nodes(1, 2) self.sync_all() + def generate(self, generator, *args, **kwargs): + blocks = generator.generate(*args, **kwargs) + return blocks + + def generateblock(self, generator, *args, **kwargs): + blocks = generator.generateblock(*args, **kwargs) + return blocks + + def generatetoaddress(self, generator, *args, **kwargs): + blocks = generator.generatetoaddress(*args, **kwargs) + return blocks + + def generatetodescriptor(self, generator, *args, **kwargs): + blocks = generator.generatetodescriptor(*args, **kwargs) + return blocks + def sync_blocks(self, nodes=None, wait=1, timeout=60): """ Wait until everybody has the same tip. @@ -932,7 +948,8 @@ def _initialize_chain(self): assert_equal(len(gen_addresses), 4) for i in range(8): self.bump_mocktime((25 if i != 7 else 24) * 156, update_schedulers=False) - cache_node.generatetoaddress( + self.generatetoaddress( + cache_node, nblocks=25 if i != 7 else 24, address=gen_addresses[i % len(gen_addresses)], ) @@ -1161,22 +1178,22 @@ def activate_by_name(self, name, expected_activation_height=None): # Hence the last block prior to the activation is (expected_activation_height - 2). while expected_activation_height - height - 2 > batch_size: self.bump_mocktime(batch_size) - self.nodes[0].generate(batch_size) + self.generate(self.nodes[0], batch_size) height += batch_size self.sync_blocks() blocks_left = expected_activation_height - height - 2 assert blocks_left <= batch_size self.bump_mocktime(blocks_left) - self.nodes[0].generate(blocks_left) + self.generate(self.nodes[0], blocks_left) self.sync_blocks() assert not softfork_active(self.nodes[0], name) self.bump_mocktime(1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() else: while not softfork_active(self.nodes[0], name): self.bump_mocktime(batch_size) - self.nodes[0].generate(batch_size) + self.generate(self.nodes[0], batch_size) self.sync_blocks() assert softfork_active(self.nodes[0], name) @@ -1265,7 +1282,7 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None outputs = {collateral_address: collateral_amount, funds_address: 1} collateral_txid = self.nodes[0].sendmany("", outputs) self.wait_for_instantlock(collateral_txid, self.nodes[0]) - tip = self.nodes[0].generate(1)[0] + tip = self.generate(self.nodes[0], 1)[0] self.sync_all(self.nodes) rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1, tip) @@ -1287,7 +1304,7 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None protx_result = self.nodes[0].protx("register", collateral_txid, collateral_vout, ipAndPort, owner_address, bls['public'], voting_address, operatorReward, reward_address, funds_address, True) self.wait_for_instantlock(protx_result, self.nodes[0]) - tip = self.nodes[0].generate(1)[0] + tip = self.generate(self.nodes[0], 1)[0] self.sync_all(self.nodes) assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) @@ -1310,7 +1327,7 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= fund_txid = self.nodes[0].sendtoaddress(funds_address, 1) self.wait_for_instantlock(fund_txid, self.nodes[0]) - tip = self.nodes[0].generate(1)[0] + tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) self.sync_all(self.nodes) @@ -1318,7 +1335,7 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= try: protx_result = self.nodes[0].protx('update_service_evo', evo_info.proTxHash, evo_info.addr, evo_info.keyOperator, platform_node_id, platform_p2p_port, platform_http_port, operator_reward_address, funds_address) self.wait_for_instantlock(protx_result, self.nodes[0]) - tip = self.nodes[0].generate(1)[0] + tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) self.sync_all(self.nodes) self.log.info("Updated EvoNode %s: platformNodeID=%s, platformP2PPort=%s, platformHTTPPort=%s" % (evo_info.proTxHash, platform_node_id, platform_p2p_port, platform_http_port)) @@ -1369,7 +1386,7 @@ def prepare_masternode(self, idx): if register_fund: protx_result = self.nodes[0].protx('register_fund', address, ipAndPort, ownerAddr, bls['public'], votingAddr, operatorReward, rewardsAddr, address, submit) else: - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) protx_result = self.nodes[0].protx('register', txid, collateral_vout, ipAndPort, ownerAddr, bls['public'], votingAddr, operatorReward, rewardsAddr, address, submit) if submit: @@ -1378,7 +1395,7 @@ def prepare_masternode(self, idx): proTxHash = self.nodes[0].sendrawtransaction(protx_result) if operatorReward > 0: - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) operatorPayoutAddress = self.nodes[0].getnewaddress() self.nodes[0].protx('update_service', proTxHash, ipAndPort, bls['secret'], operatorPayoutAddress, address) @@ -1391,7 +1408,7 @@ def remove_masternode(self, idx): rawtx = self.nodes[0].createrawtransaction([{"txid": mn.collateral_txid, "vout": mn.collateral_vout}], {self.nodes[0].getnewaddress(): 999.9999}) rawtx = self.nodes[0].signrawtransactionwithwallet(rawtx) self.nodes[0].sendrawtransaction(rawtx["hex"]) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.mninfo.remove(mn) @@ -1470,7 +1487,7 @@ def setup_nodes(self): self.log.info("Generating %d coins" % required_balance) while self.nodes[0].getbalance() < required_balance: self.bump_mocktime(1) - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) # create masternodes self.prepare_masternodes() @@ -1488,7 +1505,7 @@ def setup_network(self): self.start_masternodes() self.bump_mocktime(1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # sync nodes self.sync_all() for i in range(0, num_simple_nodes): @@ -1763,7 +1780,7 @@ def wait_func(): if quorum_hash in self.nodes[0].quorum("list")[llmq_type_name]: return True self.bump_mocktime(sleep, nodes=nodes) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(nodes) return False wait_until_helper(wait_func, timeout=timeout, sleep=sleep) @@ -1775,14 +1792,14 @@ def wait_func(): if quorum_hash_1 in self.nodes[0].quorum("list")[llmq_type_name]: return True self.bump_mocktime(sleep, nodes=nodes) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(nodes) return False wait_until_helper(wait_func, timeout=timeout, sleep=sleep) def move_blocks(self, nodes, num_blocks): self.bump_mocktime(1, nodes=nodes) - self.nodes[0].generate(num_blocks) + self.generate(self.nodes[0], num_blocks) self.sync_blocks(nodes) def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connections=None, expected_members=None, expected_contributions=None, expected_complaints=0, expected_justifications=0, expected_commitments=None, mninfos_online=None, mninfos_valid=None): @@ -1812,7 +1829,7 @@ def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connec skip_count = 24 - (self.nodes[0].getblockcount() % 24) if skip_count != 0: self.bump_mocktime(1, nodes=nodes) - self.nodes[0].generate(skip_count) + self.generate(self.nodes[0], skip_count) self.sync_blocks(nodes) q = self.nodes[0].getbestblockhash() @@ -1854,7 +1871,7 @@ def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connec self.log.info("Mining final commitment") self.bump_mocktime(1, nodes=nodes) self.nodes[0].getblocktemplate() # this calls CreateNewBlock - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(nodes) self.log.info("Waiting for quorum to appear in the list") @@ -1865,7 +1882,7 @@ def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connec quorum_info = self.nodes[0].quorum("info", llmq_type, new_quorum) # Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(nodes) @@ -1975,7 +1992,7 @@ def mine_cycle_quorum(self, llmq_type_name="llmq_test_dip0024", llmq_type=103, self.log.info("Mining final commitments") self.bump_mocktime(1, nodes=nodes) self.nodes[0].getblocktemplate() # this calls CreateNewBlock - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(nodes) self.log.info("Waiting for quorum(s) to appear in the list") @@ -1984,7 +2001,7 @@ def mine_cycle_quorum(self, llmq_type_name="llmq_test_dip0024", llmq_type=103, quorum_info_0 = self.nodes[0].quorum("info", llmq_type, q_0) quorum_info_1 = self.nodes[0].quorum("info", llmq_type, q_1) # Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(nodes) self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info_0["height"], q_0, quorum_info_0["quorumIndex"], quorum_info_0["minedBlock"])) @@ -2010,7 +2027,7 @@ def move_to_next_cycle(self): skip_count = cycle_length - (cur_block % cycle_length) if skip_count != 0: self.bump_mocktime(1, nodes=nodes) - self.nodes[0].generate(skip_count) + self.generate(self.nodes[0], skip_count) self.sync_blocks(nodes) self.log.info('Moved from block %d to %d' % (cur_block, self.nodes[0].getblockcount())) @@ -2072,7 +2089,7 @@ def test_mns(): if recover: if self.mocktime % 2: self.bump_mocktime(self.quorum_data_request_expiration_timeout + 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() else: self.bump_mocktime(self.quorum_data_thread_request_timeout_seconds + 1) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index f2c9448f8e4ac..8b038dee83d8c 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -514,10 +514,10 @@ def find_output(node, txid, amount, *, blockhash=None): # Helper to create at least "count" utxos # Pass in a fee that is sufficient for relay and mining new transactions. -def create_confirmed_utxos(fee, node, count): +def create_confirmed_utxos(test_framework, fee, node, count): to_generate = int(0.5 * count) + 101 while to_generate > 0: - node.generate(min(25, to_generate)) + test_framework.generate(node, min(25, to_generate)) to_generate -= 25 utxos = node.listunspent() iterations = count - len(utxos) @@ -538,7 +538,7 @@ def create_confirmed_utxos(fee, node, count): node.sendrawtransaction(signed_tx) while (node.getmempoolinfo()['size'] > 0): - node.generate(1) + test_framework.generate(node, 1) utxos = node.listunspent() assert len(utxos) >= count @@ -610,7 +610,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee): return txids -def mine_large_block(node, utxos=None): +def mine_large_block(test_framework, node, utxos=None): # generate a 66k transaction, # and 14 of them is close to the 1MB block limit num = 14 @@ -621,17 +621,17 @@ def mine_large_block(node, utxos=None): utxos.extend(node.listunspent()) fee = 100 * node.getnetworkinfo()["relayfee"] create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) - node.generate(1) + test_framework.generate(node, 1) -def generate_to_height(node, target_height): +def generate_to_height(test_framework, node, target_height): """Generates blocks until a given target block height has been reached. To prevent timeouts, only up to 200 blocks are generated per RPC call. Can be used to activate certain soft-forks (e.g. CSV, CLTV).""" current_height = node.getblockcount() while current_height < target_height: nblocks = min(200, target_height - current_height) - current_height += len(node.generate(nblocks)) + current_height += len(test_framework.generate(node, nblocks)) assert_equal(node.getblockcount(), target_height) diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py index 1331dba392742..7c8ae73641888 100755 --- a/test/functional/wallet_descriptor.py +++ b/test/functional/wallet_descriptor.py @@ -63,7 +63,7 @@ def run_test(self): send_wrpc = self.nodes[0].get_wallet_rpc("desc1") # Generate some coins - send_wrpc.generatetoaddress(101, send_wrpc.getnewaddress()) + self.generatetoaddress(send_wrpc, 101, send_wrpc.getnewaddress()) # Make transactions self.log.info("Test sending and receiving") diff --git a/test/functional/wallet_importdescriptors.py b/test/functional/wallet_importdescriptors.py index 0b08038cf0a0f..246d7c6b3eb34 100755 --- a/test/functional/wallet_importdescriptors.py +++ b/test/functional/wallet_importdescriptors.py @@ -73,7 +73,7 @@ def run_test(self): assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0) self.log.info('Mining coins') - w0.generatetoaddress(101, w0.getnewaddress()) + self.generatetoaddress(w0, 101, w0.getnewaddress()) # RPC importdescriptors ----------------------------------------------- @@ -380,7 +380,7 @@ def run_test(self): solvable=True, ismine=True) txid = w0.sendtoaddress(address, 49.99995540) - w0.generatetoaddress(6, w0.getnewaddress()) + self.generatetoaddress(w0, 6, w0.getnewaddress()) self.sync_blocks() tx = wpriv.createrawtransaction([{"txid": txid, "vout": 0}], {w0.getnewaddress(): 49.999}) signed_tx = wpriv.signrawtransactionwithwallet(tx) From 7d3c3b4b648c7a059ab3efc9245c7c57699f34c4 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 19:25:52 +0000 Subject: [PATCH 03/11] merge bitcoin#22788: Use generate* from TestFramework --- test/functional/example_test.py | 2 +- test/functional/feature_abortnode.py | 6 +-- test/functional/feature_addressindex.py | 30 ++++++------ test/functional/feature_asset_locks.py | 46 +++++++++--------- .../feature_backwards_compatibility.py | 4 +- test/functional/feature_bip68_sequence.py | 16 +++---- .../feature_blockfilterindex_prune.py | 6 +-- test/functional/feature_blocksdir.py | 2 +- test/functional/feature_cltv.py | 4 +- test/functional/feature_coinstatsindex.py | 16 +++---- test/functional/feature_csv_activation.py | 4 +- test/functional/feature_dersig.py | 2 +- test/functional/feature_dip0020_activation.py | 8 ++-- .../feature_dip3_deterministicmns.py | 30 ++++++------ test/functional/feature_dip3_v19.py | 8 ++-- .../feature_dip4_coinbasemerkleroots.py | 6 +-- test/functional/feature_fee_estimation.py | 10 ++-- test/functional/feature_governance.py | 30 ++++++------ test/functional/feature_governance_cl.py | 16 +++---- test/functional/feature_llmq_chainlocks.py | 36 +++++++------- test/functional/feature_llmq_connections.py | 2 +- test/functional/feature_llmq_data_recovery.py | 6 +-- test/functional/feature_llmq_dkgerrors.py | 2 +- test/functional/feature_llmq_evo.py | 8 ++-- .../feature_llmq_is_cl_conflicts.py | 8 ++-- .../functional/feature_llmq_is_retroactive.py | 12 ++--- test/functional/feature_llmq_rotation.py | 10 ++-- test/functional/feature_llmq_simplepose.py | 8 ++-- test/functional/feature_loadblock.py | 2 +- test/functional/feature_maxuploadtarget.py | 2 +- test/functional/feature_minchainwork.py | 4 +- test/functional/feature_mnehf.py | 20 ++++---- .../feature_new_quorum_type_activation.py | 6 +-- test/functional/feature_notifications.py | 4 +- test/functional/feature_nulldummy.py | 4 +- test/functional/feature_pruning.py | 14 +++--- test/functional/feature_reindex.py | 2 +- test/functional/feature_spentindex.py | 6 +-- test/functional/feature_sporks.py | 2 +- test/functional/feature_timestampindex.py | 2 +- test/functional/feature_txindex.py | 4 +- test/functional/feature_utxo_set_hash.py | 4 +- .../functional/feature_versionbits_warning.py | 12 ++--- test/functional/interface_bitcoin_cli.py | 6 +-- test/functional/interface_rest.py | 14 +++--- test/functional/interface_usdt_utxocache.py | 6 +-- test/functional/interface_usdt_validation.py | 2 +- test/functional/interface_zmq.py | 26 +++++----- test/functional/interface_zmq_dash.py | 10 ++-- test/functional/mempool_accept.py | 4 +- test/functional/mempool_compatibility.py | 4 +- test/functional/mempool_expiry.py | 4 +- test/functional/mempool_package_limits.py | 16 +++---- test/functional/mempool_package_onemore.py | 2 +- test/functional/mempool_packages.py | 8 ++-- test/functional/mempool_persist.py | 2 +- test/functional/mempool_reorg.py | 4 +- test/functional/mempool_resurrect.py | 10 ++-- test/functional/mempool_spend_coinbase.py | 2 +- test/functional/mempool_unbroadcast.py | 2 +- test/functional/mempool_updatefromblock.py | 2 +- test/functional/mining_basic.py | 6 +-- .../mining_getblocktemplate_longpoll.py | 4 +- .../mining_prioritisetransaction.py | 4 +- test/functional/p2p_block_sync.py | 2 +- test/functional/p2p_blockfilters.py | 6 +-- test/functional/p2p_blocksonly.py | 2 +- test/functional/p2p_compactblocks.py | 16 +++---- .../p2p_compactblocks_blocksonly.py | 2 +- test/functional/p2p_compactblocks_hb.py | 4 +- test/functional/p2p_eviction.py | 2 +- test/functional/p2p_filter.py | 8 ++-- test/functional/p2p_fingerprint.py | 4 +- test/functional/p2p_ibd_txrelay.py | 2 +- test/functional/p2p_instantsend.py | 8 ++-- test/functional/p2p_invalid_block.py | 2 +- test/functional/p2p_invalid_locator.py | 2 +- test/functional/p2p_invalid_tx.py | 2 +- test/functional/p2p_leak.py | 2 +- test/functional/p2p_leak_tx.py | 4 +- test/functional/p2p_node_network_limited.py | 4 +- test/functional/p2p_permissions.py | 2 +- test/functional/p2p_quorum_data.py | 2 +- test/functional/p2p_sendheaders.py | 8 ++-- test/functional/p2p_sendheaders_compressed.py | 8 ++-- test/functional/rpc_addresses_deprecation.py | 2 +- test/functional/rpc_blockchain.py | 8 ++-- test/functional/rpc_createmultisig.py | 8 ++-- test/functional/rpc_deprecated.py | 2 +- test/functional/rpc_dumptxoutset.py | 2 +- test/functional/rpc_fundrawtransaction.py | 26 +++++----- test/functional/rpc_generateblock.py | 24 +++++----- test/functional/rpc_getblockfilter.py | 4 +- test/functional/rpc_getblockfrompeer.py | 4 +- test/functional/rpc_getblockstats.py | 6 +-- test/functional/rpc_getchaintips.py | 4 +- test/functional/rpc_invalidateblock.py | 16 +++---- test/functional/rpc_masternode.py | 8 ++-- test/functional/rpc_net.py | 6 +-- test/functional/rpc_packages.py | 4 +- test/functional/rpc_preciousblock.py | 12 ++--- test/functional/rpc_psbt.py | 10 ++-- test/functional/rpc_rawtransaction.py | 24 +++++----- test/functional/rpc_scantxoutset.py | 6 +-- test/functional/rpc_txoutproof.py | 8 ++-- test/functional/rpc_verifychainlock.py | 8 ++-- test/functional/rpc_verifyislock.py | 4 +- test/functional/rpc_wipewallettxes.py | 4 +- test/functional/test-shell.md | 2 +- test/functional/tool_wallet.py | 2 +- test/functional/wallet_abandonconflict.py | 6 +-- test/functional/wallet_avoidreuse.py | 20 ++++---- test/functional/wallet_backup.py | 12 ++--- test/functional/wallet_balance.py | 16 +++---- test/functional/wallet_basic.py | 48 +++++++++---------- test/functional/wallet_change_address.py | 4 +- test/functional/wallet_coinbase_category.py | 6 +-- test/functional/wallet_create_tx.py | 2 +- test/functional/wallet_createwallet.py | 2 +- test/functional/wallet_disable.py | 4 +- test/functional/wallet_dump.py | 2 +- test/functional/wallet_fallbackfee.py | 2 +- test/functional/wallet_groups.py | 16 +++---- test/functional/wallet_hd.py | 12 ++--- test/functional/wallet_import_rescan.py | 6 +-- test/functional/wallet_importdescriptors.py | 8 ++-- test/functional/wallet_importmulti.py | 20 ++++---- test/functional/wallet_importprunedfunds.py | 8 ++-- test/functional/wallet_keypool_hd.py | 2 +- test/functional/wallet_keypool_topup.py | 6 +-- test/functional/wallet_labels.py | 16 +++---- test/functional/wallet_listreceivedby.py | 10 ++-- test/functional/wallet_listsinceblock.py | 26 +++++----- test/functional/wallet_listtransactions.py | 4 +- test/functional/wallet_multiwallet.py | 8 ++-- test/functional/wallet_orphanedreward.py | 10 ++-- test/functional/wallet_reorgsrestore.py | 14 +++--- test/functional/wallet_send.py | 8 ++-- test/functional/wallet_txn_clone.py | 6 +-- test/functional/wallet_txn_doublespend.py | 6 +-- test/functional/wallet_upgradetohd.py | 2 +- test/functional/wallet_upgradewallet.py | 4 +- test/functional/wallet_watchonly.py | 4 +- 143 files changed, 590 insertions(+), 590 deletions(-) diff --git a/test/functional/example_test.py b/test/functional/example_test.py index 1beaac3ba75d6..f9a00d3b5ad7c 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -148,7 +148,7 @@ def run_test(self): peer_messaging = self.nodes[0].add_p2p_connection(BaseNode()) # Generating a block on one of the nodes will get us out of IBD - blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)] + blocks = [int(self.generate(self.nodes[0], nblocks=1)[0], 16)] self.sync_all(self.nodes[0:2]) # Notice above how we called an RPC by calling a method with the same diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py index ca135d23344b7..e849b855a8f5d 100755 --- a/test/functional/feature_abortnode.py +++ b/test/functional/feature_abortnode.py @@ -26,7 +26,7 @@ def setup_network(self): # We'll connect the nodes later def run_test(self): - self.nodes[0].generate(3) + self.generate(self.nodes[0], 3) datadir = get_datadir_path(self.options.tmpdir, 0) # Deleting the undo file will result in reorg failure @@ -34,10 +34,10 @@ def run_test(self): # Connecting to a node with a more work chain will trigger a reorg # attempt. - self.nodes[1].generate(3) + self.generate(self.nodes[1], 3) with self.nodes[0].assert_debug_log(["Failed to disconnect block"]): self.connect_nodes(0, 1) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) # Check that node0 aborted self.log.info("Waiting for crash") diff --git a/test/functional/feature_addressindex.py b/test/functional/feature_addressindex.py index 7def5b31fbf3e..c64f2b61cfddf 100755 --- a/test/functional/feature_addressindex.py +++ b/test/functional/feature_addressindex.py @@ -52,7 +52,7 @@ def run_test(self): self.log.info("Mining blocks...") mining_address = self.nodes[0].getnewaddress() - self.nodes[0].generatetoaddress(105, mining_address) + self.generatetoaddress(self.nodes[0], 105, mining_address) self.sync_all() chain_height = self.nodes[1].getblockcount() @@ -72,22 +72,22 @@ def run_test(self): self.log.info("Testing p2pkh and p2sh address index...") txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() @@ -141,7 +141,7 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") @@ -170,7 +170,7 @@ def run_test(self): tx.rehash() signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() balance1 = self.nodes[1].getaddressbalance(address2) assert_equal(balance1["balance"], amount) @@ -184,7 +184,7 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() balance2 = self.nodes[1].getaddressbalance(address2) @@ -233,12 +233,12 @@ def run_test(self): assert_equal(utxos2[0]["satoshis"], amount) # Check sorting of utxos - self.nodes[2].generate(150) + self.generate(self.nodes[2], 150) self.nodes[2].sendtoaddress(address2, 50) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.nodes[2].sendtoaddress(address2, 50) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all() utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]}) @@ -291,7 +291,7 @@ def run_test(self): assert_equal(mempool[2]["txid"], memtxid2) assert_equal(mempool[2]["index"], 1) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all() mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool2), 0) @@ -322,7 +322,7 @@ def run_test(self): address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].sendtoaddress(address1, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() utxos = self.nodes[1].getaddressutxos({"addresses": [address1]}) diff --git a/test/functional/feature_asset_locks.py b/test/functional/feature_asset_locks.py index 930415ee81d4f..3a19b5f229be1 100755 --- a/test/functional/feature_asset_locks.py +++ b/test/functional/feature_asset_locks.py @@ -230,7 +230,7 @@ def generate_batch(self, count): batch = min(50, count) count -= batch self.bump_mocktime(batch) - self.nodes[1].generate(batch) + self.generate(self.nodes[1], batch) self.sync_all() # This functional test intentionally setup only 2 MN and only 2 Evo nodes @@ -256,11 +256,11 @@ def run_test(self): for _ in range(2): self.dynamically_add_masternode(evo=True) - node.generate(8) + self.generate(node, 8) self.sync_blocks() self.set_sporks() - node.generate(1) + self.generate(node, 1) self.sync_all() self.mempool_size = 0 @@ -295,10 +295,10 @@ def test_asset_locks(self, node_wallet, node, pubkey): assert_equal(rpc_tx["assetLockTx"]["creditOutputs"][0]["scriptPubKey"]["hex"], key_to_p2pkh_script(pubkey).hex()) assert_equal(rpc_tx["assetLockTx"]["creditOutputs"][1]["scriptPubKey"]["hex"], key_to_p2pkh_script(pubkey).hex()) self.validate_credit_pool_balance(0) - node.generate(1) + self.generate(node, 1) assert_equal(self.get_credit_pool_balance(node=node), locked_1) self.log.info("Generate a number of blocks to ensure this is the longest chain for later in the test when we reconsiderblock") - node.generate(12) + self.generate(node, 12) self.sync_all() self.validate_credit_pool_balance(locked_1) @@ -309,14 +309,14 @@ def test_asset_locks(self, node_wallet, node, pubkey): for inode in self.nodes: inode.invalidateblock(self.block_hash_1) assert_equal(self.get_credit_pool_balance(node=inode), 0) - node.generate(3) + self.generate(node, 3) self.sync_all() self.validate_credit_pool_balance(0) self.log.info("Resubmit asset lock tx to new chain...") # NEW tx appears asset_lock_tx_2 = self.create_assetlock(coin, locked_2, pubkey) txid_in_block = self.send_tx(asset_lock_tx_2) - node.generate(1) + self.generate(node, 1) self.sync_all() self.validate_credit_pool_balance(locked_2) self.log.info("Reconsider old blocks...") @@ -401,7 +401,7 @@ def test_asset_unlocks(self, node_wallet, node, pubkey): self.mempool_size += 2 self.check_mempool_size() self.validate_credit_pool_balance(locked) - node.generate(1) + self.generate(node, 1) self.sync_all() assert_equal(rawtx["instantlock"], False) assert_equal(rawtx["chainlock"], False) @@ -424,14 +424,14 @@ def test_asset_unlocks(self, node_wallet, node, pubkey): self.log.info("Checking credit pool amount still is same...") self.validate_credit_pool_balance(locked - 1 * COIN) self.send_tx(asset_unlock_tx_late) - node.generate(1) + self.generate(node, 1) self.sync_all() self.validate_credit_pool_balance(locked - 2 * COIN) self.log.info("Generating many blocks to make quorum far behind (even still active)...") self.generate_batch(too_late_height - node.getblockcount() - 1) self.check_mempool_result(tx=asset_unlock_tx_too_late, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}}) - node.generate(1) + self.generate(node, 1) self.sync_all() self.check_mempool_result(tx=asset_unlock_tx_too_late, result_expected={'allowed': False, 'reject-reason' : 'bad-assetunlock-too-late'}) @@ -456,7 +456,7 @@ def test_asset_unlocks(self, node_wallet, node, pubkey): self.log.info("Forcibly mining asset_unlock_tx_too_late and ensure block is invalid") self.create_and_check_block([asset_unlock_tx_too_late], expected_error = "bad-assetunlock-not-active-quorum") - node.generate(1) + self.generate(node, 1) self.sync_all() self.validate_credit_pool_balance(locked - 2 * COIN) @@ -476,7 +476,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.check_mempool_result(tx=asset_unlock_tx_full, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}}) txid_in_block = self.send_tx(asset_unlock_tx_full) - node.generate(1) + self.generate(node, 1) self.sync_all() self.ensure_tx_is_not_mined(txid_in_block) @@ -490,7 +490,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): txid_in_block = self.send_tx(asset_unlock_tx_full) expected_balance = (Decimal(self.get_credit_pool_balance()) - Decimal(tiny_amount)) - node.generate(1) + self.generate(node, 1) self.sync_all() self.log.info("Check txid_in_block was mined") block = node.getblock(node.getbestblockhash()) @@ -508,7 +508,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.check_mempool_result(tx=spend_withdrawal, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}}) spend_txid_in_block = self.send_tx(spend_withdrawal) - node.generate(1) + self.generate(node, 1) block = node.getblock(node.getbestblockhash()) assert spend_txid_in_block in block['tx'] @@ -528,7 +528,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.send_tx_simple(tx) self.log.info(f"Collecting coins in pool... Collected {total}/{10_901 * COIN}") self.sync_mempools() - node.generate(1) + self.generate(node, 1) self.sync_all() credit_pool_balance_1 = self.get_credit_pool_balance() assert_greater_than(credit_pool_balance_1, 10_901 * COIN) @@ -548,7 +548,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): node.prioritisetransaction(last_txid, next_amount // 10000) self.sync_mempools() - node.generate(1) + self.generate(node, 1) self.sync_all() new_total = self.get_credit_pool_balance() @@ -561,7 +561,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): assert_greater_than_or_equal(limit_amount_1, amount_actually_withdrawn) assert_equal(amount_actually_withdrawn, 900 * COIN + 10001) - node.generate(1) + self.generate(node, 1) self.sync_all() self.log.info("Checking that exactly 1 tx stayed in mempool...") self.mempool_size = 1 @@ -575,7 +575,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): asset_unlock_tx = self.create_assetunlock(index, amount_to_withdraw_2, pubkey) self.send_tx_simple(asset_unlock_tx) self.sync_mempools() - node.generate(1) + self.generate(node, 1) self.sync_all() new_total = self.get_credit_pool_balance() amount_actually_withdrawn = total - new_total @@ -599,10 +599,10 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): index += 1 asset_unlock_tx = self.create_assetunlock(index, limit_amount_2, pubkey) self.send_tx(asset_unlock_tx) - node.generate(1) + self.generate(node, 1) self.sync_all() assert_equal(new_total, self.get_credit_pool_balance()) - node.generate(1) + self.generate(node, 1) self.sync_all() new_total -= limit_amount_2 assert_equal(new_total, self.get_credit_pool_balance()) @@ -610,7 +610,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): index += 1 asset_unlock_tx = self.create_assetunlock(index, COIN, pubkey) self.send_tx(asset_unlock_tx) - node.generate(1) + self.generate(node, 1) self.sync_all() tip = self.nodes[0].getblockcount() @@ -644,7 +644,7 @@ def test_mn_rr(self, node_wallet, node, pubkey): assert_equal(platform_reward, all_mn_rewards * 375 // 1000) # 0.375 platform share assert_equal(platform_reward, 34371430) assert_equal(locked, self.get_credit_pool_balance()) - node.generate(1) + self.generate(node, 1) self.sync_all() locked += platform_reward assert_equal(locked, self.get_credit_pool_balance()) @@ -653,7 +653,7 @@ def test_mn_rr(self, node_wallet, node, pubkey): coin = coins.pop() self.send_tx(self.create_assetlock(coin, COIN, pubkey)) locked += platform_reward + COIN - node.generate(1) + self.generate(node, 1) self.sync_all() assert_equal(locked, self.get_credit_pool_balance()) diff --git a/test/functional/feature_backwards_compatibility.py b/test/functional/feature_backwards_compatibility.py index 0a61a284503a7..f9dee4e19e539 100755 --- a/test/functional/feature_backwards_compatibility.py +++ b/test/functional/feature_backwards_compatibility.py @@ -63,7 +63,7 @@ def setup_nodes(self): self.import_deterministic_coinbase_privkeys() def run_test(self): - self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress()) + self.generatetoaddress(self.nodes[0], 101, self.nodes[0].getnewaddress()) self.sync_blocks() @@ -92,7 +92,7 @@ def run_test(self): address = wallet.getnewaddress() self.nodes[0].sendtoaddress(address, 1) self.sync_mempools() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() # w1_v19: regular wallet, created with v0.19 diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py index b66390feb6f79..25e3e56632316 100755 --- a/test/functional/feature_bip68_sequence.py +++ b/test/functional/feature_bip68_sequence.py @@ -54,7 +54,7 @@ def run_test(self): self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"] # Generate some coins - self.nodes[0].generate(110) + self.generate(self.nodes[0], 110) self.log.info("Running test disable flag") self.test_disable_flag() @@ -142,7 +142,7 @@ def test_sequence_lock_confirmed_inputs(self): for i in range(num_outputs): outputs[addresses[i]] = random.randint(1, 20)*0.01 self.nodes[0].sendmany("", outputs) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) utxos = self.nodes[0].listunspent() @@ -272,7 +272,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): cur_time = self.mocktime for _ in range(10): self.nodes[0].setmocktime(cur_time + 600) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) cur_time += 600 assert tx2.hash in self.nodes[0].getrawmempool() @@ -287,7 +287,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): self.nodes[0].setmocktime(cur_time+600) # Save block template now to use for the reorg later tmpl = self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) assert tx2.hash not in self.nodes[0].getrawmempool() # Now that tx2 is not in the mempool, a sequence locked spend should @@ -295,7 +295,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) assert tx3.hash in self.nodes[0].getrawmempool() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) assert tx3.hash not in self.nodes[0].getrawmempool() # One more test, this time using height locks @@ -348,7 +348,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): # Reset the chain and get rid of the mocktimed-blocks self.nodes[0].setmocktime(self.mocktime) self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1)) - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) # Make sure that BIP68 isn't being used to validate blocks prior to # activation height. If more blocks are mined prior to this test @@ -401,9 +401,9 @@ def activateCSV(self): min_activation_height = 432 height = self.nodes[0].getblockcount() assert_greater_than(min_activation_height - height, 2) - self.nodes[0].generate(min_activation_height - height - 2) + self.generate(self.nodes[0], min_activation_height - height - 2) assert not softfork_active(self.nodes[0], 'csv') - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) assert softfork_active(self.nodes[0], 'csv') self.sync_blocks() diff --git a/test/functional/feature_blockfilterindex_prune.py b/test/functional/feature_blockfilterindex_prune.py index 2f60d214563cb..cbf7efc282a7c 100755 --- a/test/functional/feature_blockfilterindex_prune.py +++ b/test/functional/feature_blockfilterindex_prune.py @@ -26,9 +26,9 @@ def run_test(self): self.sync_index(height=200) assert_greater_than(len(self.nodes[0].getblockfilter(self.nodes[0].getbestblockhash())['filter']), 0) # Mine two batches of blocks to avoid hitting NODE_NETWORK_LIMITED_MIN_BLOCKS disconnection - self.nodes[0].generate(250) + self.generate(self.nodes[0], 250) self.sync_all() - self.nodes[0].generate(250) + self.generate(self.nodes[0], 250) self.sync_all() self.sync_index(height=700) @@ -47,7 +47,7 @@ def run_test(self): self.log.info("make sure accessing the blockfilters throws an error") assert_raises_rpc_error(-1, "Index is not enabled for filtertype basic", self.nodes[0].getblockfilter, self.nodes[0].getblockhash(2)) - self.nodes[0].generate(1000) + self.generate(self.nodes[0], 1000) self.log.info("prune below the blockfilterindexes best block while blockfilters are disabled") pruneheight_new = self.nodes[0].pruneblockchain(1000) diff --git a/test/functional/feature_blocksdir.py b/test/functional/feature_blocksdir.py index ebe9bbaee7588..6ceb723a85adf 100755 --- a/test/functional/feature_blocksdir.py +++ b/test/functional/feature_blocksdir.py @@ -30,7 +30,7 @@ def run_test(self): self.log.info("Starting with existing blocksdir ...") self.start_node(0, ["-blocksdir=" + blocksdir_path]) self.log.info("mining blocks..") - self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat")) assert os.path.isdir(os.path.join(self.nodes[0].datadir, self.chain, "blocks", "index")) diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py index fda6037bc96e6..7e1a69f8ac5c5 100755 --- a/test/functional/feature_cltv.py +++ b/test/functional/feature_cltv.py @@ -110,8 +110,8 @@ def run_test(self): self.test_cltv_info(is_active=False) self.log.info("Mining %d blocks", CLTV_HEIGHT - 2) - wallet.generate(10) - self.nodes[0].generate(CLTV_HEIGHT - 2 - 10) + self.generate(wallet, 10) + self.generate(self.nodes[0], CLTV_HEIGHT - 2 - 10) assert_equal(self.nodes[0].getblockcount(), CLTV_HEIGHT - 2) self.log.info("Test that invalid-according-to-CLTV transactions can still appear in a block") diff --git a/test/functional/feature_coinstatsindex.py b/test/functional/feature_coinstatsindex.py index 0807aacf5fdf2..439307b4a4c09 100755 --- a/test/functional/feature_coinstatsindex.py +++ b/test/functional/feature_coinstatsindex.py @@ -81,10 +81,10 @@ def _test_coin_stats_index(self): index_hash_options = ['none', 'muhash'] # Generate a normal transaction and mine it - node.generate(101) + self.generate(node, 101) address = self.nodes[0].get_deterministic_priv_key().address node.sendtoaddress(address=address, amount=10, subtractfeefromamount=True) - node.generate(1) + self.generate(node, 1) self.sync_blocks(timeout=120) @@ -106,7 +106,7 @@ def _test_coin_stats_index(self): self.log.info("Test that gettxoutsetinfo() can get fetch data on specific heights with index") # Generate a new tip - node.generate(5) + self.generate(node, 5) for hash_option in index_hash_options: # Fetch old stats by height @@ -183,7 +183,7 @@ def _test_coin_stats_index(self): self.nodes[0].sendrawtransaction(tx2_hex) # Include both txs in a block - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() for hash_option in index_hash_options: @@ -242,7 +242,7 @@ def _test_coin_stats_index(self): res9 = index_node.gettxoutsetinfo('muhash') assert_equal(res8, res9) - index_node.generate(1) + self.generate(index_node, 1) res10 = index_node.gettxoutsetinfo('muhash') assert(res8['txouts'] < res10['txouts']) @@ -261,14 +261,14 @@ def _test_reorg_index(self): # Generate two block, let the index catch up, then invalidate the blocks index_node = self.nodes[1] - reorg_blocks = index_node.generatetoaddress(2, index_node.getnewaddress()) + reorg_blocks = self.generatetoaddress(index_node, 2, index_node.getnewaddress()) reorg_block = reorg_blocks[1] res_invalid = index_node.gettxoutsetinfo('muhash') index_node.invalidateblock(reorg_blocks[0]) assert_equal(index_node.gettxoutsetinfo('muhash')['height'], 110) # Add two new blocks - block = index_node.generate(2)[1] + block = self.generate(index_node, 2)[1] res = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=None, use_index=False) # Test that the result of the reorged block is not returned for its old block height @@ -284,7 +284,7 @@ def _test_reorg_index(self): # Add another block, so we don't depend on reconsiderblock remembering which # blocks were touched by invalidateblock - index_node.generate(1) + self.generate(index_node, 1) self.sync_all() # Ensure that removing and re-adding blocks yields consistent results diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py index 4502c74a29652..092bb707e8de6 100755 --- a/test/functional/feature_csv_activation.py +++ b/test/functional/feature_csv_activation.py @@ -198,7 +198,7 @@ def run_test(self): self.miniwallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK) self.log.info("Generate blocks in the past for coinbase outputs.") - self.coinbase_blocks = self.miniwallet.generate(COINBASE_BLOCK_COUNT) # blocks generated for inputs + self.coinbase_blocks = self.generate(self.miniwallet, COINBASE_BLOCK_COUNT) # blocks generated for inputs # set time so that there was enough time to build up to 1000 blocks 10 minutes apart on top of the last one # without worrying about getting into the future self.nodes[0].setmocktime(TIME_GENESIS_BLOCK + 600 * 1000 + 100) @@ -246,7 +246,7 @@ def run_test(self): bip113input = self.send_generic_input_tx(self.coinbase_blocks) self.nodes[0].setmocktime(self.last_block_time + 600) - inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 431 + inputblockhash = self.generate(self.nodes[0], 1)[0] # 1 block generated for inputs to be in chain at height 431 self.nodes[0].setmocktime(TIME_GENESIS_BLOCK + 600 * 1000 + 100) self.tip = int(inputblockhash, 16) self.tipheight += 1 diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py index 351edefdea404..0a6d1b5a71dc5 100755 --- a/test/functional/feature_dersig.py +++ b/test/functional/feature_dersig.py @@ -78,7 +78,7 @@ def run_test(self): self.test_dersig_info(is_active=False) self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2) - self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.miniwallet.generate(DERSIG_HEIGHT - 2)] + self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.generate(self.miniwallet, DERSIG_HEIGHT - 2)] self.log.info("Test that a transaction with non-DER signature can still appear in a block") diff --git a/test/functional/feature_dip0020_activation.py b/test/functional/feature_dip0020_activation.py index cf6898bcf3497..6f375f0649772 100755 --- a/test/functional/feature_dip0020_activation.py +++ b/test/functional/feature_dip0020_activation.py @@ -58,7 +58,7 @@ def run_test(self): # This tx should be completely valid, should be included in mempool and mined in the next block assert txid in set(node.getrawmempool()) - node.generate(1) + self.generate(node, 1) assert txid not in set(node.getrawmempool()) # Create spending tx @@ -83,9 +83,9 @@ def run_test(self): helper_peer.send_blocks_and_test([test_block], node, success=False, reject_reason='block-validation-failed', expect_disconnect=True) self.log.info("Generate enough blocks to activate DIP0020 opcodes") - node.generate(97) + self.generate(node, 97) assert not softfork_active(node, 'dip0020') - node.generate(1) + self.generate(node, 1) assert softfork_active(node, 'dip0020') # flush state to disk before potential crashes below @@ -103,7 +103,7 @@ def run_test(self): # txes spending new opcodes still won't be accepted into mempool if we roll back to the previous tip node.invalidateblock(node.getbestblockhash()) assert tx0id not in set(node.getrawmempool()) - node.generate(1) + self.generate(node, 1) self.log.info("Transactions spending coins with new opcodes are accepted one block after DIP0020 activation block") node.sendrawtransaction(tx0_hex) diff --git a/test/functional/feature_dip3_deterministicmns.py b/test/functional/feature_dip3_deterministicmns.py index 66cbd48c19377..e31f74d97fbd2 100755 --- a/test/functional/feature_dip3_deterministicmns.py +++ b/test/functional/feature_dip3_deterministicmns.py @@ -49,7 +49,7 @@ def start_controller_node(self): def run_test(self): self.log.info("funding controller node") while self.nodes[0].getbalance() < (self.num_initial_mn + 3) * 1000: - self.nodes[0].generate(10) # generate enough for collaterals + self.generate(self.nodes[0], 10) # generate enough for collaterals self.log.info("controller node has {} dash".format(self.nodes[0].getbalance())) # Make sure we're below block 135 (which activates dip3) @@ -65,11 +65,11 @@ def run_test(self): mns.append(before_dip3_mn) # block 150 starts enforcing DIP3 MN payments - self.nodes[0].generate(150 - self.nodes[0].getblockcount()) + self.generate(self.nodes[0], 150 - self.nodes[0].getblockcount()) assert self.nodes[0].getblockcount() == 150 self.log.info("mining final block for DIP3 activation") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # We have hundreds of blocks to sync here, give it more time self.log.info("syncing blocks for all nodes") @@ -101,7 +101,7 @@ def run_test(self): self.log.info("register %s" % mn.alias) self.register_mn(self.nodes[0], mn) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) if not start: self.start_mn(mn) @@ -125,7 +125,7 @@ def run_test(self): assert_equal(rpc_collateral_address, old_collateral_address) dummy_txin = self.spend_mn_collateral(mns[i], with_dummy_input_output=True) dummy_txins.append(dummy_txin) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() mns_tmp.remove(mns[i]) self.assert_mnlists(mns_tmp) @@ -144,7 +144,7 @@ def run_test(self): self.log.info("cause a reorg with a double spend and check that mnlists are still correct on all nodes") self.mine_double_spend(mns, self.nodes[0], dummy_txins, self.nodes[0].getnewaddress()) - self.nodes[0].generate(spend_mns_count) + self.generate(self.nodes[0], spend_mns_count) self.sync_all() self.assert_mnlists(mns_tmp) @@ -152,7 +152,7 @@ def run_test(self): for i in range(20): node = self.nodes[i % len(self.nodes)] self.test_invalid_mn_payment(mns, node) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.log.info("testing ProUpServTx") @@ -175,7 +175,7 @@ def run_test(self): bt = self.nodes[0].getblocktemplate() expected_payee = bt['masternode'][0]['payee'] expected_amount = bt['masternode'][0]['amount'] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() if expected_payee == multisig: block = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) @@ -198,7 +198,7 @@ def run_test(self): self.register_mn(self.nodes[0], new_mn) mns[i] = new_mn - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.assert_mnlists(mns) self.log.info("restarting MN %s" % new_mn.alias) @@ -217,7 +217,7 @@ def run_test(self): # also check if funds from payout address are used when no fee source address is specified node.sendtoaddress(mn.rewards_address, 0.001) node.protx('update_registrar', mn.protx_hash, "", new_voting_address, "") - node.generate(1) + self.generate(node, 1) self.sync_all() new_dmnState = mn.node.masternode("status")["dmnState"] new_voting_address_from_rpc = new_dmnState["votingAddress"] @@ -245,7 +245,7 @@ def create_mn_collateral(self, node, mn): mn.collateral_address = node.getnewaddress() mn.collateral_txid = node.sendtoaddress(mn.collateral_address, 1000) mn.collateral_vout = None - node.generate(1) + self.generate(node, 1) rawtx = node.getrawtransaction(mn.collateral_txid, 1) for txout in rawtx['vout']: @@ -277,7 +277,7 @@ def register_mn(self, node, mn): mn.rewards_address = node.getnewaddress() mn.protx_hash = node.protx('register', mn.collateral_txid, mn.collateral_vout, '127.0.0.1:%d' % mn.p2p_port, mn.ownerAddr, mn.operatorAddr, mn.votingAddr, mn.operator_reward, mn.rewards_address, mn.fundsAddr) - node.generate(1) + self.generate(node, 1) def start_mn(self, mn): if len(self.nodes) <= mn.idx: @@ -295,7 +295,7 @@ def spend_mn_collateral(self, mn, with_dummy_input_output=False): def update_mn_payee(self, mn, payee): self.nodes[0].sendtoaddress(mn.fundsAddr, 0.001) self.nodes[0].protx('update_registrar', mn.protx_hash, '', '', payee, mn.fundsAddr) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() info = self.nodes[0].protx('info', mn.protx_hash) assert info['state']['payoutAddress'] == payee @@ -303,7 +303,7 @@ def update_mn_payee(self, mn, payee): def test_protx_update_service(self, mn): self.nodes[0].sendtoaddress(mn.fundsAddr, 0.001) self.nodes[0].protx('update_service', mn.protx_hash, '127.0.0.2:%d' % mn.p2p_port, mn.blsMnkey, "", mn.fundsAddr) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() for node in self.nodes: protx_info = node.protx('info', mn.protx_hash) @@ -313,7 +313,7 @@ def test_protx_update_service(self, mn): # undo self.nodes[0].protx('update_service', mn.protx_hash, '127.0.0.1:%d' % mn.p2p_port, mn.blsMnkey, "", mn.fundsAddr) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) def assert_mnlists(self, mns): for node in self.nodes: diff --git a/test/functional/feature_dip3_v19.py b/test/functional/feature_dip3_v19.py index 41e3ce900bd60..5c72e4d23d093 100755 --- a/test/functional/feature_dip3_v19.py +++ b/test/functional/feature_dip3_v19.py @@ -86,7 +86,7 @@ def run_test(self): evo_info_0 = self.dynamically_add_masternode(evo=True, rnd=7) assert evo_info_0 is not None - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(self.nodes) self.log.info("Checking that protxs with duplicate EvoNodes fields are rejected") @@ -97,7 +97,7 @@ def run_test(self): assert evo_info_2 is None evo_info_3 = self.dynamically_add_masternode(evo=True, rnd=9) assert evo_info_3 is not None - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(self.nodes) self.dynamically_evo_update_service(evo_info_0, 9, should_be_rejected=True) @@ -126,13 +126,13 @@ def test_revoke_protx(self, node_idx, revoke_protx, revoke_keyoperator): funds_address = self.nodes[0].getnewaddress() fund_txid = self.nodes[0].sendtoaddress(funds_address, 1) self.wait_for_instantlock(fund_txid, self.nodes[0]) - tip = self.nodes[0].generate(1)[0] + tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) self.sync_all(self.nodes) protx_result = self.nodes[0].protx('revoke', revoke_protx, revoke_keyoperator, 1, funds_address) self.wait_for_instantlock(protx_result, self.nodes[0]) - tip = self.nodes[0].generate(1)[0] + tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) # Revoking a MN results in disconnects. Wait for disconnects to actually happen # and then reconnect the corresponding node back to let sync_blocks finish correctly. diff --git a/test/functional/feature_dip4_coinbasemerkleroots.py b/test/functional/feature_dip4_coinbasemerkleroots.py index d4fbf60795fc3..28d01b445ca7b 100755 --- a/test/functional/feature_dip4_coinbasemerkleroots.py +++ b/test/functional/feature_dip4_coinbasemerkleroots.py @@ -91,7 +91,7 @@ def run_test(self): ############################# # Now start testing quorum commitment merkle roots - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) oldhash = self.nodes[0].getbestblockhash() # Test DIP8 activation once with a pre-existing quorum and once without (we don't know in which order it will activate on mainnet) @@ -248,7 +248,7 @@ def activate_dip8(self, slow_mode=False): self.log.info("Wait for dip0008 activation") while self.nodes[0].getblockcount() < DIP0008_HEIGHT: self.bump_mocktime(10) - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) if slow_mode: self.sync_blocks() self.sync_blocks() @@ -301,7 +301,7 @@ def confirm_mns(self): break if not found_unconfirmed: break - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() if __name__ == '__main__': diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py index ddf3096095392..59f30a439bd0f 100755 --- a/test/functional/feature_fee_estimation.py +++ b/test/functional/feature_fee_estimation.py @@ -207,7 +207,7 @@ def transact_and_mine(self, numblocks, mining_node): tx_kbytes = (len(txhex) // 2) / 1000.0 self.fees_per_kb.append(float(fee) / tx_kbytes) self.sync_mempools(wait=.1) - mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"] + mined = mining_node.getblock(self.generate(mining_node, 1)[0], True)["tx"] self.sync_blocks(wait=.1) # update which txouts are confirmed newmem = [] @@ -231,7 +231,7 @@ def run_test(self): # Mine while len(self.nodes[0].getrawmempool()) > 0: - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Repeatedly split those 2 outputs, doubling twice for each rep # Use txouts to monitor the available utxo, since these won't be tracked in wallet @@ -241,12 +241,12 @@ def run_test(self): while len(self.txouts) > 0: split_inputs(self.nodes[0], self.txouts, self.txouts2) while len(self.nodes[0].getrawmempool()) > 0: - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Double txouts2 to txouts while len(self.txouts2) > 0: split_inputs(self.nodes[0], self.txouts2, self.txouts) while len(self.nodes[0].getrawmempool()) > 0: - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) reps += 1 self.log.info("Finished splitting") @@ -279,7 +279,7 @@ def run_test(self): # Finish by mining a normal-sized block: while len(self.nodes[1].getrawmempool()) > 0: - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_blocks(self.nodes[0:3], wait=.1) self.log.info("Final estimates after emptying mempools") diff --git a/test/functional/feature_governance.py b/test/functional/feature_governance.py index 4d02556416bf0..9d4497e73d5c9 100755 --- a/test/functional/feature_governance.py +++ b/test/functional/feature_governance.py @@ -89,7 +89,7 @@ def run_test(self): assert_equal(len(self.nodes[0].gobject("list-prepared")), 0) self.log.info("Check 1st superblock before v20") - self.nodes[0].generate(3) + self.generate(self.nodes[0], 3) self.bump_mocktime(3) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 130) @@ -97,7 +97,7 @@ def run_test(self): self.check_superblockbudget(False) self.log.info("Check 2nd superblock before v20") - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) self.bump_mocktime(10) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 140) @@ -117,7 +117,7 @@ def run_test(self): p1_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address) p2_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_2", self.p2_amount, self.p2_payout_address) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.bump_mocktime(6) self.sync_blocks() @@ -165,7 +165,7 @@ def run_test(self): self.log.info("v20 is expected to be activate since block 160") assert block_count + n < 160 for _ in range(n - 1): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() self.check_superblockbudget(False) @@ -203,7 +203,7 @@ def run_test(self): assert_equal(more_votes, False) self.log.info("Move 1 block enabling the Superblock maturity window on non-isolated nodes") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) assert_equal(self.nodes[0].getblockcount(), 150) assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], False) @@ -214,7 +214,7 @@ def run_test(self): assert_equal(has_trigger, False) self.log.info("Move 1 block inside the Superblock maturity window on non-isolated nodes") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.log.info("There is now new 'winner' who should submit new trigger and vote for it") @@ -232,7 +232,7 @@ def run_test(self): assert(amount_str in payment_amounts_expected) self.log.info("Move another block inside the Superblock maturity window on non-isolated nodes") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.log.info("Every non-isolated MN should vote for the same trigger now, no new triggers should be created") @@ -268,7 +268,7 @@ def sync_gov(node): assert_equal(more_triggers, False) self.log.info("Move another block inside the Superblock maturity window") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() @@ -284,7 +284,7 @@ def sync_gov(node): self.log.info("Move another block inside the Superblock maturity window") with self.nodes[1].assert_debug_log(["CGovernanceManager::VoteGovernanceTriggers"]): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() @@ -298,7 +298,7 @@ def sync_gov(node): self.log.info("Move remaining n blocks until actual Superblock") for i in range(n): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() # comparing to 159 because bip9 forks are active when the tip is one block behind the activation height @@ -310,7 +310,7 @@ def sync_gov(node): self.log.info("Move a few block past the recent superblock height and make sure we have no new votes") for _ in range(5): with self.nodes[1].assert_debug_log("", [f"Voting NO-FUNDING for trigger:{winning_trigger_hash} success"]): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() # Votes on both triggers should NOT change @@ -322,13 +322,13 @@ def sync_gov(node): self.log.info("Move remaining n blocks until the next Superblock") for _ in range(n - 1): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() self.log.info("Wait for new trigger and votes") self.wait_until(lambda: have_trigger_for_height(self.nodes, 180)) self.log.info("Mine superblock") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 180) @@ -337,14 +337,14 @@ def sync_gov(node): self.log.info("Mine and check a couple more superblocks") for i in range(2): for _ in range(sb_cycle - 1): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() # Wait for new trigger and votes sb_block_height = 180 + (i + 1) * sb_cycle self.wait_until(lambda: have_trigger_for_height(self.nodes, sb_block_height)) # Mine superblock - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(1) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), sb_block_height) diff --git a/test/functional/feature_governance_cl.py b/test/functional/feature_governance_cl.py index 718bdc80d3456..fdc9f01ce4bce 100755 --- a/test/functional/feature_governance_cl.py +++ b/test/functional/feature_governance_cl.py @@ -69,7 +69,7 @@ def run_test(self): n = sb_cycle - self.nodes[0].getblockcount() % sb_cycle for _ in range(n): self.bump_mocktime(156) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() self.log.info("Prepare proposals") @@ -84,7 +84,7 @@ def run_test(self): p1_collateral_prepare = self.prepare_object(1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address) self.bump_mocktime(60 * 10 + 1) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.bump_mocktime(6 * 156) self.sync_blocks() @@ -116,7 +116,7 @@ def run_test(self): assert n >= 0 for _ in range(n + 1): self.bump_mocktime(156) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:5]) self.log.info("Wait for new trigger and votes on non-isolated nodes") @@ -130,7 +130,7 @@ def run_test(self): self.log.info("Move remaining n blocks until the next Superblock") for _ in range(n - 1): self.bump_mocktime(156) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:5]) # Confirm all is good @@ -138,20 +138,20 @@ def run_test(self): self.log.info("Mine superblock") self.bump_mocktime(156) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:5]) self.wait_for_chainlocked_block(self.nodes[0], self.nodes[0].getbestblockhash()) self.log.info("Mine (superblock cycle + 1) blocks on non-isolated nodes to forget about this trigger") for _ in range(sb_cycle): self.bump_mocktime(156) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:5]) # Should still have at least 1 trigger for the old sb cycle and 0 for the current one assert len(self.nodes[0].gobject("list", "valid", "triggers")) >= 1 assert not have_trigger_for_height(self.nodes[0:5], sb_block_height + sb_cycle) self.bump_mocktime(156) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:5]) # Trigger scheduler to mark old triggers for deletion self.bump_mocktime(5 * 60) @@ -164,7 +164,7 @@ def run_test(self): self.log.info("Reconnect isolated node and confirm the next ChainLock will let it sync") self.reconnect_isolated_node(5, 0) assert_equal(self.nodes[5].mnsync("status")["IsSynced"], False) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # NOTE: bumping mocktime too much after recent reconnect can result in "timeout downloading block" self.bump_mocktime(1) self.sync_blocks() diff --git a/test/functional/feature_llmq_chainlocks.py b/test/functional/feature_llmq_chainlocks.py index dec019386817b..d56409032d67a 100755 --- a/test/functional/feature_llmq_chainlocks.py +++ b/test/functional/feature_llmq_chainlocks.py @@ -38,7 +38,7 @@ def run_test(self): self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=False) # v20 is active, no quorums, no CLs - null CL in CbTx - nocl_block_hash = self.nodes[0].generate(1)[0] + nocl_block_hash = self.generate(self.nodes[0], 1)[0] self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=True, expected_null_cl=True) cbtx = self.nodes[0].getspecialtxes(nocl_block_hash, 5, 1, 0, 2)[0] assert_equal(cbtx["instantlock"], False) @@ -59,7 +59,7 @@ def run_test(self): self.log.info("Mine single block, wait for chainlock") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) @@ -70,7 +70,7 @@ def run_test(self): assert_equal(cbtx["chainlock"], True) self.log.info("Mine many blocks, wait for chainlock") - self.nodes[0].generate(20) + self.generate(self.nodes[0], 20) # We need more time here due to 20 blocks being generated at once self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash(), timeout=30) self.test_coinbase_best_cl(self.nodes[0]) @@ -90,7 +90,7 @@ def run_test(self): self.log.info("Generate new blocks and verify that they are not chainlocked") previous_block_hash = self.nodes[0].getbestblockhash() for _ in range(2): - block_hash = self.nodes[0].generate(1)[0] + block_hash = self.generate(self.nodes[0], 1)[0] self.wait_for_chainlocked_block_all_nodes(block_hash, expected=False) assert self.nodes[0].getblock(previous_block_hash)["chainlock"] @@ -101,18 +101,18 @@ def run_test(self): self.isolate_node(0) node0_mining_addr = self.nodes[0].getnewaddress() node0_tip = self.nodes[0].getbestblockhash() - self.nodes[1].generatetoaddress(5, node0_mining_addr) + self.generatetoaddress(self.nodes[1], 5, node0_mining_addr) self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) assert self.nodes[0].getbestblockhash() == node0_tip self.reconnect_isolated_node(0, 1) - self.nodes[1].generatetoaddress(1, node0_mining_addr) + self.generatetoaddress(self.nodes[1], 1, node0_mining_addr) self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) self.log.info("Isolate node, mine on another, reconnect and submit CL via RPC") self.isolate_node(0) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash()) best_0 = self.nodes[0].getbestchainlock() best_1 = self.nodes[1].getbestchainlock() @@ -134,13 +134,13 @@ def run_test(self): self.log.info("Isolate node, mine on both parts of the network, and reconnect") self.isolate_node(0) - bad_tip = self.nodes[0].generate(5)[-1] - self.nodes[1].generatetoaddress(1, node0_mining_addr) + bad_tip = self.generate(self.nodes[0], 5)[-1] + self.generatetoaddress(self.nodes[1], 1, node0_mining_addr) good_tip = self.nodes[1].getbestblockhash() self.wait_for_chainlocked_block(self.nodes[1], good_tip) assert not self.nodes[0].getblock(self.nodes[0].getbestblockhash())["chainlock"] self.reconnect_isolated_node(0, 1) - self.nodes[1].generatetoaddress(1, node0_mining_addr) + self.generatetoaddress(self.nodes[1], 1, node0_mining_addr) self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) assert self.nodes[0].getblock(self.nodes[0].getbestblockhash())["previousblockhash"] == good_tip @@ -163,10 +163,10 @@ def run_test(self): assert self.nodes[0].getbestblockhash() == good_tip self.nodes[0].invalidateblock(good_tip) self.log.info("Now try to reorg the chain") - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) time.sleep(6) assert self.nodes[1].getbestblockhash() == good_tip - bad_tip = self.nodes[0].generate(2)[-1] + bad_tip = self.generate(self.nodes[0], 2)[-1] time.sleep(6) assert self.nodes[0].getbestblockhash() == bad_tip assert self.nodes[1].getbestblockhash() == good_tip @@ -175,7 +175,7 @@ def run_test(self): self.nodes[0].reconsiderblock(good_tip) assert self.nodes[0].getbestblockhash() != good_tip good_fork = good_tip - good_tip = self.nodes[1].generatetoaddress(1, node0_mining_addr)[-1] # this should mark bad_tip as conflicting + good_tip = self.generatetoaddress(self.nodes[1], 1, node0_mining_addr)[-1] # this should mark bad_tip as conflicting self.wait_for_chainlocked_block_all_nodes(good_tip) self.test_coinbase_best_cl(self.nodes[0]) assert self.nodes[0].getbestblockhash() == good_tip @@ -203,7 +203,7 @@ def run_test(self): txs.append(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)) txs += self.create_chained_txs(self.nodes[0], 1) self.log.info("Assert that after block generation these TXs are NOT included (as they are \"unsafe\")") - node0_tip = self.nodes[0].generate(1)[-1] + node0_tip = self.generate(self.nodes[0], 1)[-1] for txid in txs: tx = self.nodes[0].getrawtransaction(txid, 1) assert "confirmations" not in tx @@ -214,7 +214,7 @@ def run_test(self): self.log.info("Disable LLMQ based InstantSend for a very short time (this never gets propagated to other nodes)") self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 4070908800) self.log.info("Now the TXs should be included") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0) self.log.info("Assert that TXs got included now") for txid in txs: @@ -238,7 +238,7 @@ def run_test(self): self.log.info("Test that new node can mine without Chainlock info") tip_0 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2) - self.nodes[added_idx].generate(1) + self.generate(self.nodes[added_idx], 1) self.sync_blocks(self.nodes) tip_1 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2) assert_equal(tip_1['cbTx']['bestCLSignature'], tip_0['cbTx']['bestCLSignature']) @@ -297,12 +297,12 @@ def test_coinbase_best_cl(self, node, expected_cl_in_cb=True, expected_null_cl=F def test_bestCLHeightDiff(self, mn_rr_active): # We need 2 blocks we can grab clsigs from for _ in range(2): - self.wait_for_chainlocked_block_all_nodes(self.nodes[0].generate(1)[0]) + self.wait_for_chainlocked_block_all_nodes(self.generate(self.nodes[0], 1)[0]) assert_equal(softfork_active(self.nodes[1], "mn_rr"), mn_rr_active) tip1_hash = self.nodes[1].getbestblockhash() self.isolate_node(1) - tip0_hash = self.nodes[0].generate(1)[0] + tip0_hash = self.generate(self.nodes[0], 1)[0] block_hex = self.nodes[0].getblock(tip0_hash, 0) mal_block = CBlock() mal_block.deserialize(BytesIO(bytes.fromhex(block_hex))) diff --git a/test/functional/feature_llmq_connections.py b/test/functional/feature_llmq_connections.py index f75edc94d5e2c..10aab3789dcf4 100755 --- a/test/functional/feature_llmq_connections.py +++ b/test/functional/feature_llmq_connections.py @@ -44,7 +44,7 @@ def run_test(self): self.wait_for_sporks_same() self.log.info("mining one block and waiting for all members to connect to each other") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) for mn in self.get_quorum_masternodes(q): self.wait_for_mnauth(mn.node, 4) diff --git a/test/functional/feature_llmq_data_recovery.py b/test/functional/feature_llmq_data_recovery.py index 97c12c6fd027d..e7f2bed482fd7 100755 --- a/test/functional/feature_llmq_data_recovery.py +++ b/test/functional/feature_llmq_data_recovery.py @@ -46,7 +46,7 @@ def restart_mn(self, mn, reindex=False, qvvec_sync=None, qdata_recovery_enabled= self.connect_nodes(mn.node.index, 0) if qdata_recovery_enabled: # trigger recovery threads and wait for them to start - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.bump_mocktime(self.quorum_data_thread_request_timeout_seconds + 1) time.sleep(1) @@ -177,14 +177,14 @@ def run_test(self): self.test_mns(llmq_test_v17, quorum_hash_recover, valid_mns=[last_resort_v17], all_mns=member_mns_recover_v17) # If recovery would be enabled it would trigger after the mocktime bump / mined block self.bump_mocktime(self.quorum_data_request_expiration_timeout + 1) - node.generate(1) + self.generate(node, 1) time.sleep(10) # Make sure they are still invalid self.test_mns(llmq_test, quorum_hash_recover, valid_mns=[last_resort_test], all_mns=member_mns_recover_test) self.test_mns(llmq_test_v17, quorum_hash_recover, valid_mns=[last_resort_v17], all_mns=member_mns_recover_v17) # Mining a block should not result in a chainlock now because the responsible quorum shouldn't have enough # valid members. - self.wait_for_chainlocked_block(node, node.generate(1)[0], False, 5) + self.wait_for_chainlocked_block(node, self.generate(node, 1)[0], False, 5) # Now restart with recovery enabled self.restart_mns(mns=recover_members, exclude=exclude_members, reindex=True, qdata_recovery_enabled=True) # Validate that all invalid members recover. Note: recover=True leads to mocktime bumps and mining while waiting diff --git a/test/functional/feature_llmq_dkgerrors.py b/test/functional/feature_llmq_dkgerrors.py index cbf461e03ca90..f3151ef02f6bc 100755 --- a/test/functional/feature_llmq_dkgerrors.py +++ b/test/functional/feature_llmq_dkgerrors.py @@ -85,7 +85,7 @@ def heal_masternodes(self, blockCount): self.wait_for_sporks_same() for _ in range(blockCount): self.bump_mocktime(1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0) self.wait_for_sporks_same() diff --git a/test/functional/feature_llmq_evo.py b/test/functional/feature_llmq_evo.py index 921d8c7303a02..1734595cbef15 100755 --- a/test/functional/feature_llmq_evo.py +++ b/test/functional/feature_llmq_evo.py @@ -89,7 +89,7 @@ def run_test(self): for i in range(self.evo_count): evo_info = self.dynamically_add_masternode(evo=True) evo_protxhash_list.append(evo_info.proTxHash) - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(self.nodes) expectedUpdated.append(evo_info.proTxHash) @@ -116,7 +116,7 @@ def run_test(self): # Generate a few blocks to make EvoNode/MN analysis on a pure MN RewardReallocation window self.bump_mocktime(1) - self.nodes[0].generate(4) + self.generate(self.nodes[0], 4) self.sync_blocks() self.log.info("Test that EvoNodes are paid 1 block in a row after MN RewardReallocation activation") @@ -167,7 +167,7 @@ def test_evo_payments(self, window_analysis, mnrr_active): current_evo = None consecutive_payments = 0 - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) if i % 8 == 0: self.sync_blocks() @@ -215,7 +215,7 @@ def test_evo_is_rejected_before_v19(self): collateral_amount = 4000 outputs = {collateral_address: collateral_amount, funds_address: 1} collateral_txid = self.nodes[0].sendmany("", outputs) - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_all(self.nodes) rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1) diff --git a/test/functional/feature_llmq_is_cl_conflicts.py b/test/functional/feature_llmq_is_cl_conflicts.py index 333826f860bec..d81f15431a237 100755 --- a/test/functional/feature_llmq_is_cl_conflicts.py +++ b/test/functional/feature_llmq_is_cl_conflicts.py @@ -71,7 +71,7 @@ def run_test(self): self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103) # mine single block, wait for chainlock - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) self.test_chainlock_overrides_islock(False) @@ -118,7 +118,7 @@ def test_chainlock_overrides_islock(self, test_block_conflict, mine_confllicting cl = self.create_chainlock(self.nodes[0].getblockcount() + 1, block) if mine_confllicting: - islock_tip = self.nodes[0].generate(1)[-1] + islock_tip = self.generate(self.nodes[0], 1)[-1] # Make sure we won't sent clsig too early self.sync_blocks() @@ -223,7 +223,7 @@ def test_chainlock_overrides_islock_overrides_nonchainlock(self): # Mine the conflicting TX into a block good_tip = self.nodes[0].getbestblockhash() - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) self.sync_all() # Assert that the conflicting tx got mined and the locked TX is not valid @@ -255,7 +255,7 @@ def test_chainlock_overrides_islock_overrides_nonchainlock(self): # Should not allow competing txes now assert_raises_rpc_error(-26, "tx-txlock-conflict", self.nodes[0].sendrawtransaction, rawtx1) - islock_tip = self.nodes[0].generate(1)[0] + islock_tip = self.generate(self.nodes[0], 1)[0] self.sync_all() for node in self.nodes: diff --git a/test/functional/feature_llmq_is_retroactive.py b/test/functional/feature_llmq_is_retroactive.py index f026091c84abc..68e1aadccea39 100755 --- a/test/functional/feature_llmq_is_retroactive.py +++ b/test/functional/feature_llmq_is_retroactive.py @@ -54,7 +54,7 @@ def run_test(self): self.wait_for_sporks_same() # We have to wait in order to include tx in block self.bump_mocktime(10 * 60 + 1) - block = self.nodes[0].generate(1)[0] + block = self.generate(self.nodes[0], 1)[0] self.wait_for_instantlock(txid, self.nodes[0]) self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0) self.wait_for_sporks_same() @@ -70,7 +70,7 @@ def run_test(self): # are the only "neighbours" in intra-quorum connections for one of them. self.wait_for_instantlock(txid, self.nodes[0]) self.bump_mocktime(1) - block = self.nodes[0].generate(1)[0] + block = self.generate(self.nodes[0], 1)[0] self.wait_for_chainlocked_block_all_nodes(block) self.log.info("testing normal signing with partially known TX") @@ -100,7 +100,7 @@ def run_test(self): txid = self.nodes[3].sendrawtransaction(rawtx) # Make node 3 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.nodes[3].generatetoaddress(1, self.nodes[0].getnewaddress())[0] + block = self.generatetoaddress(self.nodes[3], 1, self.nodes[0].getnewaddress())[0] self.reconnect_isolated_node(3, 0) self.wait_for_chainlocked_block_all_nodes(block) self.nodes[0].setmocktime(self.mocktime) @@ -120,7 +120,7 @@ def run_test(self): self.wait_for_instantlock(txid, self.nodes[0], False, 5) # Make node0 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.nodes[0].generate(1)[0] + block = self.generate(self.nodes[0], 1)[0] assert txid in self.nodes[0].getblock(block, 1)['tx'] self.wait_for_chainlocked_block_all_nodes(block) @@ -166,7 +166,7 @@ def test_all_nodes_session_timeout(self, do_cycle_llmqs): self.wait_for_instantlock(txid, self.nodes[0], False, 5) # Make node 0 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.nodes[0].generate(1)[0] + block = self.generate(self.nodes[0], 1)[0] assert txid in self.nodes[0].getblock(block, 1)['tx'] self.wait_for_chainlocked_block_all_nodes(block) @@ -198,7 +198,7 @@ def test_single_node_session_timeout(self, do_cycle_llmqs): self.wait_for_instantlock(txid, self.nodes[0], False, 5) # Make node 0 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.nodes[0].generate(1)[0] + block = self.generate(self.nodes[0], 1)[0] assert txid in self.nodes[0].getblock(block, 1)['tx'] self.wait_for_chainlocked_block_all_nodes(block) diff --git a/test/functional/feature_llmq_rotation.py b/test/functional/feature_llmq_rotation.py index e2567c757926d..709e85b46136e 100755 --- a/test/functional/feature_llmq_rotation.py +++ b/test/functional/feature_llmq_rotation.py @@ -88,7 +88,7 @@ def run_test(self): h_104_1 = QuorumId(104, int(h_1, 16)) self.log.info("Mine single block, wait for chainlock") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) b_h_1 = self.nodes[0].getbestblockhash() @@ -119,7 +119,7 @@ def run_test(self): assert_equal(projected_activation_height, softfork_info['height']) # v20 is active for the next block, not for the tip - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.log.info("Wait for chainlock") self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) @@ -144,14 +144,14 @@ def run_test(self): # At this point, we want to wait for CLs just before the self.mine_cycle_quorum to diversify the CLs in CbTx. # Although because here a new quorum cycle is starting, and we don't want to mine them now, mine 8 blocks (to skip all DKG phases) nodes = [self.nodes[0]] + [mn.node for mn in self.mninfo.copy()] - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(nodes) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) # And for the remaining blocks, enforce new CL in CbTx skip_count = 23 - (self.nodes[0].getblockcount() % 24) for _ in range(skip_count): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(nodes) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) @@ -202,7 +202,7 @@ def run_test(self): self.sync_blocks(nodes) quorum_list = self.nodes[0].quorum("list", llmq_type) quorum_blockhash = self.nodes[0].getbestblockhash() - fallback_blockhash = self.nodes[0].generate(1)[0] + fallback_blockhash = self.generate(self.nodes[0], 1)[0] self.log.info("h("+str(self.nodes[0].getblockcount())+") quorum_list:"+str(quorum_list)) assert_greater_than_or_equal(len(intersection(quorum_members_0_0, quorum_members_1_0)), 3) diff --git a/test/functional/feature_llmq_simplepose.py b/test/functional/feature_llmq_simplepose.py index 432664c5337d2..72b1b3eec7b67 100755 --- a/test/functional/feature_llmq_simplepose.py +++ b/test/functional/feature_llmq_simplepose.py @@ -107,7 +107,7 @@ def mine_quorum_less_checks(self, expected_good_nodes, mninfos_online): skip_count = 24 - (self.nodes[0].getblockcount() % 24) if skip_count != 0: self.bump_mocktime(skip_count, nodes=nodes) - self.nodes[0].generate(skip_count) + self.generate(self.nodes[0], skip_count) self.sync_blocks(nodes) q = self.nodes[0].getbestblockhash() @@ -141,7 +141,7 @@ def mine_quorum_less_checks(self, expected_good_nodes, mninfos_online): self.log.info("Mining final commitment") self.bump_mocktime(1, nodes=nodes) self.nodes[0].getblocktemplate() # this calls CreateNewBlock - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(nodes) self.log.info("Waiting for quorum to appear in the list") @@ -153,7 +153,7 @@ def mine_quorum_less_checks(self, expected_good_nodes, mninfos_online): # Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions self.bump_mocktime(8) - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks(nodes) self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info["height"], new_quorum, quorum_info["quorumIndex"], quorum_info["minedBlock"])) @@ -213,7 +213,7 @@ def repair_masternodes(self, restart): # Make sure protxes are "safe" to mine even when InstantSend and ChainLocks are no longer functional self.bump_mocktime(60 * 10 + 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Isolate and re-connect all MNs (otherwise there might be open connections with no MNAUTH for MNs which were banned before) diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index dd0acaf578f5b..ddfbffe144c1e 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -29,7 +29,7 @@ def set_test_params(self): def run_test(self): self.nodes[1].setnetworkactive(state=False) - self.nodes[0].generate(COINBASE_MATURITY) + self.generate(self.nodes[0], COINBASE_MATURITY) # Parsing the url of our node to get settings for config file data_dir = self.nodes[0].datadir diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py index 11408573478eb..8b2f8f4cf570a 100755 --- a/test/functional/feature_maxuploadtarget.py +++ b/test/functional/feature_maxuploadtarget.py @@ -60,7 +60,7 @@ def run_test(self): self.nodes[0].setmocktime(old_mocktime) # Generate some old blocks - self.nodes[0].generate(130) + self.generate(self.nodes[0], 130) # p2p_conns[0] will only request old blocks # p2p_conns[1] will only request new blocks diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py index 81eca9545ee55..d8d8e43f100ac 100755 --- a/test/functional/feature_minchainwork.py +++ b/test/functional/feature_minchainwork.py @@ -57,7 +57,7 @@ def run_test(self): num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK) self.log.info("Generating %d blocks on node0", num_blocks_to_generate) - hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate, + hashes = self.generatetoaddress(self.nodes[0], num_blocks_to_generate, self.nodes[0].get_deterministic_priv_key().address) self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork']) @@ -88,7 +88,7 @@ def run_test(self): assert ("headers" not in peer.last_message or len(peer.last_message["headers"].headers) == 0) self.log.info("Generating one more block") - self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address) self.log.info("Verifying nodes are all synced") diff --git a/test/functional/feature_mnehf.py b/test/functional/feature_mnehf.py index 6a4ac76a4f980..d650dfd5f8568 100755 --- a/test/functional/feature_mnehf.py +++ b/test/functional/feature_mnehf.py @@ -148,7 +148,7 @@ def run_test(self): ehf_unknown_tx_sent = self.send_tx(ehf_unknown_tx) self.log.info(f"unknown ehf tx: {ehf_unknown_tx_sent}") self.sync_all() - ehf_blockhash = self.nodes[1].generate(1)[0] + ehf_blockhash = self.generate(self.nodes[1], 1)[0] self.sync_blocks() self.sync_all() @@ -163,7 +163,7 @@ def run_test(self): while (node.getblockcount() + 1) % 4 != 0: self.check_fork('defined') - node.generate(1) + self.generate(node, 1) self.sync_all() @@ -171,13 +171,13 @@ def run_test(self): for _ in range(4): self.check_fork('started') - node.generate(1) + self.generate(node, 1) self.sync_all() for i in range(4): self.check_fork('locked_in') - node.generate(1) + self.generate(node, 1) self.sync_all() if i == 7: self.restart_all_nodes() @@ -192,13 +192,13 @@ def run_test(self): self.log.info("Expecting for fork to be defined in next blocks because no MnEHF tx here") for _ in range(4): self.check_fork('defined') - node.generate(1) + self.generate(node, 1) self.sync_all() self.log.info("Re-sending MnEHF for new fork") tx_sent_2 = self.send_tx(ehf_tx) - ehf_blockhash_2 = node.generate(1)[0] + ehf_blockhash_2 = self.generate(node, 1)[0] self.sync_all() self.log.info(f"Check MnEhfTx again {tx_sent_2} was mined in {ehf_blockhash_2}") @@ -206,7 +206,7 @@ def run_test(self): self.log.info(f"Generate some more block to jump to `started` status") for _ in range(4): - node.generate(1) + self.generate(node, 1) self.check_fork('started') self.restart_all_nodes() self.check_fork('started') @@ -223,14 +223,14 @@ def run_test(self): self.log.info("Testing duplicate EHF signal with same bit") ehf_tx_duplicate = self.send_tx(self.create_mnehf(28, pubkey)) - tip_blockhash = node.generate(1)[0] + tip_blockhash = self.generate(node, 1)[0] self.sync_blocks() block = node.getblock(tip_blockhash) assert ehf_tx_duplicate in node.getrawmempool() and ehf_tx_duplicate not in block['tx'] self.log.info("Testing EHF signal with same bit but with newer start time") self.bump_mocktime(int(60 * 60 * 24 * 14), update_schedulers=False) - node.generate(1) + self.generate(node, 1) self.sync_blocks() self.restart_all_nodes(params=[self.mocktime, self.mocktime + 1000000]) self.check_fork('defined') @@ -243,7 +243,7 @@ def run_test(self): for _ in range(4 * 4): time.sleep(1) self.bump_mocktime(1) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() self.check_fork('active') diff --git a/test/functional/feature_new_quorum_type_activation.py b/test/functional/feature_new_quorum_type_activation.py index dbbcd89218a7f..85fa97a5f90dd 100755 --- a/test/functional/feature_new_quorum_type_activation.py +++ b/test/functional/feature_new_quorum_type_activation.py @@ -22,17 +22,17 @@ def set_test_params(self): def run_test(self): self.log.info(get_bip9_details(self.nodes[0], 'testdummy')) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'defined') - self.nodes[0].generate(9) + self.generate(self.nodes[0], 9) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'started') ql = self.nodes[0].quorum("list") assert_equal(len(ql), 3) assert "llmq_test_v17" not in ql - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'locked_in') ql = self.nodes[0].quorum("list") assert_equal(len(ql), 3) assert "llmq_test_v17" not in ql - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'active') ql = self.nodes[0].quorum("list") assert_equal(len(ql), 4) diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py index 5dee940fa562d..76061cd2291fe 100755 --- a/test/functional/feature_notifications.py +++ b/test/functional/feature_notifications.py @@ -64,7 +64,7 @@ def run_test(self): self.log.info("test -blocknotify") block_count = 10 - blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE) + blocks = self.generatetoaddress(self.nodes[1], block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE) # wait at most 10 seconds for expected number of files before reading the content self.wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10) @@ -116,7 +116,7 @@ def run_test(self): self.log.info("Mine single block, wait for chainlock") self.bump_mocktime(1) - tip = self.nodes[0].generate(1)[-1] + tip = self.generate(self.nodes[0], 1)[-1] self.wait_for_chainlocked_block_all_nodes(tip) # directory content should equal the chainlocked block hash assert_equal([tip], sorted(os.listdir(self.chainlocknotify_dir))) diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py index dd8c3e0ca4eb3..f23f750e62cc5 100755 --- a/test/functional/feature_nulldummy.py +++ b/test/functional/feature_nulldummy.py @@ -67,11 +67,11 @@ def run_test(self): # Legacy wallets need to import these so that they are watched by the wallet. This is unnecessary (and does not need to be tested) for descriptor wallets wmulti.importaddress(self.ms_address) - self.coinbase_blocks = self.nodes[0].generate(2) # block height = 2 + self.coinbase_blocks = self.generate(self.nodes[0], 2) # block height = 2 coinbase_txid = [] for i in self.coinbase_blocks: coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0]) - self.nodes[0].generate(COINBASE_MATURITY) # block height = COINBASE_MATURITY + 2 + self.generate(self.nodes[0], COINBASE_MATURITY) # block height = COINBASE_MATURITY + 2 self.lastblockhash = self.nodes[0].getbestblockhash() self.lastblockheight = COINBASE_MATURITY + 2 self.lastblocktime = self.mocktime + self.lastblockheight diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index e722c282267d2..1a82d57a2255c 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -120,9 +120,9 @@ def setup_nodes(self): def create_big_chain(self): # Start by creating some coinbases we can spend later - self.nodes[1].generate(200) + self.generate(self.nodes[1], 200) self.sync_blocks(self.nodes[0:2]) - self.nodes[0].generate(150) + self.generate(self.nodes[0], 150) # Then mine enough full blocks to create more than 550MiB of data mine_large_blocks(self.nodes[0], 645) @@ -214,13 +214,13 @@ def reorg_test(self): self.log.info("New best height: %d" % self.nodes[1].getblockcount()) # Mine one block to avoid automatic recovery from forks on restart - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) # Disconnect node1 and generate the new chain self.disconnect_nodes(0, 1) self.disconnect_nodes(1, 2) self.log.info("Generating new longer chain of 300 more blocks") - self.nodes[1].generate(299) + self.generate(self.nodes[1], 299) self.log.info("Reconnect nodes") self.connect_nodes(0, 1) @@ -272,7 +272,7 @@ def reorg_back(self): self.nodes[0].invalidateblock(curchainhash) assert_equal(self.nodes[0].getblockcount(), self.mainchainheight) assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2) - goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1] + goalbesthash = self.generate(self.nodes[0], blocks_to_mine)[-1] goalbestheight = first_reorg_height + 1 self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload") @@ -315,7 +315,7 @@ def has_block(index): assert_equal(block1_details["nTx"], len(block1_details["tx"])) # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight) - node.generate(6) + self.generate(node, 6) assert_equal(node.getblockchaininfo()["blocks"], 1001) # prune parameter in the future (block or timestamp) should raise an exception @@ -353,7 +353,7 @@ def has_block(index): assert has_block(2), "blk00002.dat is still there, should be pruned by now" # advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat) - node.generate(288) + self.generate(node, 288) prune(1000) assert not has_block(2), "blk00002.dat is still there, should be pruned by now" assert not has_block(3), "blk00003.dat is still there, should be pruned by now" diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py index 0131b851e9701..844af1b16852c 100755 --- a/test/functional/feature_reindex.py +++ b/test/functional/feature_reindex.py @@ -19,7 +19,7 @@ def set_test_params(self): self.num_nodes = 1 def reindex(self, justchainstate=False): - self.nodes[0].generatetoaddress(3, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 3, self.nodes[0].get_deterministic_priv_key().address) blockcount = self.nodes[0].getblockcount() self.stop_nodes() extra_args = [["-reindex-chainstate" if justchainstate else "-reindex"]] diff --git a/test/functional/feature_spentindex.py b/test/functional/feature_spentindex.py index 512ea21ee92e6..363f5e7f71007 100755 --- a/test/functional/feature_spentindex.py +++ b/test/functional/feature_spentindex.py @@ -53,7 +53,7 @@ def run_test(self): self.sync_all() self.log.info("Mining blocks...") - self.nodes[0].generate(105) + self.generate(self.nodes[0], 105) self.sync_all() chain_height = self.nodes[1].getblockcount() @@ -76,7 +76,7 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.log.info("Testing getspentinfo method...") @@ -120,7 +120,7 @@ def run_test(self): assert_equal(txVerbose3["vin"][0]["valueSat"], amount) # Check the database index - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() txVerbose4 = self.nodes[3].getrawtransaction(txid2, 1) diff --git a/test/functional/feature_sporks.py b/test/functional/feature_sporks.py index 572c2cd28cf2a..a958721567a84 100755 --- a/test/functional/feature_sporks.py +++ b/test/functional/feature_sporks.py @@ -53,7 +53,7 @@ def run_test(self): assert self.get_test_spork_state(self.nodes[1]) == spork_new_state # Generate one block to kick off masternode sync, which also starts sporks syncing for node2 - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) # connect new node and check spork propagation after restoring from cache self.connect_nodes(1, 2) diff --git a/test/functional/feature_timestampindex.py b/test/functional/feature_timestampindex.py index 454cdc71efda6..63d22baa76d7e 100755 --- a/test/functional/feature_timestampindex.py +++ b/test/functional/feature_timestampindex.py @@ -46,7 +46,7 @@ def run_test(self): self.sync_all() self.log.info("Mining 5 blocks...") - blockhashes = self.nodes[0].generate(5) + blockhashes = self.generate(self.nodes[0], 5) low = self.nodes[0].getblock(blockhashes[0])["time"] high = self.nodes[0].getblock(blockhashes[4])["time"] self.sync_all() diff --git a/test/functional/feature_txindex.py b/test/functional/feature_txindex.py index 64f3c76584ec0..e1a9633f8af24 100755 --- a/test/functional/feature_txindex.py +++ b/test/functional/feature_txindex.py @@ -38,7 +38,7 @@ def setup_network(self): def run_test(self): self.log.info("Mining blocks...") - self.nodes[0].generate(105) + self.generate(self.nodes[0], 105) self.sync_all() chain_height = self.nodes[1].getblockcount() @@ -58,7 +58,7 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Check verbose raw transaction results diff --git a/test/functional/feature_utxo_set_hash.py b/test/functional/feature_utxo_set_hash.py index 8b0e39fc5c647..5f36f5dc98176 100755 --- a/test/functional/feature_utxo_set_hash.py +++ b/test/functional/feature_utxo_set_hash.py @@ -31,13 +31,13 @@ def test_muhash_implementation(self): # Generate 100 blocks and remove the first since we plan to spend its # coinbase - block_hashes = wallet.generate(1) + node.generate(99) + block_hashes = self.generate(wallet, 1) + self.generate(node, 99) blocks = list(map(lambda block: from_hex(CBlock(), node.getblock(block, False)), block_hashes)) blocks.pop(0) # Create a spending transaction and mine a block which includes it txid = wallet.send_self_transfer(from_node=node)['txid'] - tx_block = node.generateblock(output=wallet.get_address(), transactions=[txid])['hash'] + tx_block = self.generateblock(node, output=wallet.get_address(), transactions=[txid])['hash'] blocks.append(from_hex(CBlock(), node.getblock(tx_block, False))) # Serialize the outputs that should be in the UTXO set and add them to diff --git a/test/functional/feature_versionbits_warning.py b/test/functional/feature_versionbits_warning.py index 0f2417db48dd3..cf02a08bf9688 100755 --- a/test/functional/feature_versionbits_warning.py +++ b/test/functional/feature_versionbits_warning.py @@ -65,12 +65,12 @@ def run_test(self): node_deterministic_address = node.get_deterministic_priv_key().address # Mine one period worth of blocks - node.generatetoaddress(VB_PERIOD, node_deterministic_address) + self.generatetoaddress(node, VB_PERIOD, node_deterministic_address) self.log.info("Check that there is no warning if previous VB_BLOCKS have =VB_THRESHOLD blocks with unknown versionbits version.") # Mine a period worth of expected blocks so the generic block-version warning # is cleared. This will move the versionbit state to ACTIVE. - node.generatetoaddress(VB_PERIOD, node_deterministic_address) + self.generatetoaddress(node, VB_PERIOD, node_deterministic_address) # Stop-start the node. This is required because dashd will only warn once about unknown versions or unknown rules activating. self.restart_node(0) # Generating one block guarantees that we'll get out of IBD - node.generatetoaddress(1, node_deterministic_address) + self.generatetoaddress(node, 1, node_deterministic_address) self.wait_until(lambda: not node.getblockchaininfo()['initialblockdownload']) # Generating one more block will be enough to generate an error. - node.generatetoaddress(1, node_deterministic_address) + self.generatetoaddress(node, 1, node_deterministic_address) # Check that get*info() shows the versionbits unknown rules warning assert WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"] assert WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"] diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py index c3de072521b38..d72cd31f577e4 100755 --- a/test/functional/interface_bitcoin_cli.py +++ b/test/functional/interface_bitcoin_cli.py @@ -77,7 +77,7 @@ def skip_test_if_missing_module(self): def run_test(self): """Main test logic""" - self.nodes[0].generate(BLOCKS) + self.generate(self.nodes[0], BLOCKS) self.log.info("Compare responses from getblockchaininfo RPC and `dash-cli getblockchaininfo`") cli_response = self.nodes[0].cli.getblockchaininfo() @@ -175,7 +175,7 @@ def run_test(self): w1.sendtoaddress(w3.getnewaddress(), amounts[2]) # Mine a block to confirm; adds a block reward (500 DASH) to the default wallet. - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.log.info("Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance") for i in range(len(wallets)): @@ -308,7 +308,7 @@ def run_test(self): assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo) else: self.log.info("*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped") - self.nodes[0].generate(25) # maintain block parity with the wallet_compiled conditional branch + self.generate(self.nodes[0], 25) # maintain block parity with the wallet_compiled conditional branch self.log.info("Test -version with node stopped") self.stop_node(0) diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py index 809134e72d86b..22017fc721f3e 100755 --- a/test/functional/interface_rest.py +++ b/test/functional/interface_rest.py @@ -84,9 +84,9 @@ def run_test(self): # Random address so node1's balance doesn't increase not_related_address = "yj949n1UH6fDhw6HtVE5VMj2iSTaSWBMcW" - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() - self.nodes[1].generatetoaddress(100, not_related_address) + self.generatetoaddress(self.nodes[1], 100, not_related_address) self.sync_all() assert_equal(self.nodes[0].getbalance(), 500) @@ -117,7 +117,7 @@ def run_test(self): self.log.info("Query an unspent TXO using the /getutxos URI") - self.nodes[1].generatetoaddress(1, not_related_address) + self.generatetoaddress(self.nodes[1], 1, not_related_address) self.sync_all() bb_hash = self.nodes[0].getbestblockhash() @@ -192,7 +192,7 @@ def run_test(self): json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent)) assert_equal(len(json_obj['utxos']), 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) @@ -213,7 +213,7 @@ def run_test(self): long_uri = '/'.join(['{}-{}'.format(txid, n) for n in range(15)]) self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) - self.nodes[0].generate(1) # generate block to not affect upcoming tests + self.generate(self.nodes[0], 1) # generate block to not affect upcoming tests self.sync_all() self.log.info("Test the /block, /blockhashbyheight and /headers URIs") @@ -284,7 +284,7 @@ def run_test(self): assert_equal(json_obj[0][key], rpc_block_json[key]) # See if we can get 5 headers in one response - self.nodes[1].generate(5) + self.generate(self.nodes[1], 5) self.sync_all() json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash)) assert_equal(len(json_obj), 5) # now we should have 5 header objects @@ -319,7 +319,7 @@ def run_test(self): assert_equal(json_obj[tx]['depends'], txs[i - 1:i]) # Now mine the transactions - newblockhash = self.nodes[1].generate(1) + newblockhash = self.generate(self.nodes[1], 1) self.sync_all() # Check if the 3 tx show up in the new block diff --git a/test/functional/interface_usdt_utxocache.py b/test/functional/interface_usdt_utxocache.py index 0098e9c98b07f..9ae1cba1e5868 100755 --- a/test/functional/interface_usdt_utxocache.py +++ b/test/functional/interface_usdt_utxocache.py @@ -145,7 +145,7 @@ def skip_test_if_missing_module(self): def run_test(self): self.wallet = MiniWallet(self.nodes[0]) - self.wallet.generate(101) + self.generate(self.wallet, 101) self.test_uncache() self.test_add_spent() @@ -235,7 +235,7 @@ def test_add_spent(self): # mining, we invalidate the block, start the tracing, and then trace the cache # changes to the active utxo cache. self.log.info("mine and invalidate a block that is later reconsidered") - block_hash = self.wallet.generate(1)[0] + block_hash = self.generate(self.wallet, 1)[0] self.nodes[0].invalidateblock(block_hash) self.log.info( @@ -385,7 +385,7 @@ def handle_utxocache_flush(_, data, __): BLOCKS_TO_MINE = 450 self.log.info(f"mine {BLOCKS_TO_MINE} blocks to be able to prune") - self.wallet.generate(BLOCKS_TO_MINE) + self.generate(self.wallet, BLOCKS_TO_MINE) # we added BLOCKS_TO_MINE coinbase UTXOs to the cache possible_cache_sizes = {BLOCKS_TO_MINE} expected_flushes.append( diff --git a/test/functional/interface_usdt_validation.py b/test/functional/interface_usdt_validation.py index 6c7b4fc927ed7..71e997fdca401 100755 --- a/test/functional/interface_usdt_validation.py +++ b/test/functional/interface_usdt_validation.py @@ -120,7 +120,7 @@ def handle_blockconnected(_, data, __): handle_blockconnected) self.log.info(f"mine {BLOCKS_EXPECTED} blocks") - block_hashes = self.nodes[0].generatetoaddress( + block_hashes = self.generatetoaddress(self.nodes[0], BLOCKS_EXPECTED, ADDRESS_BCRT1_UNSPENDABLE) for block_hash in block_hashes: expected_blocks[block_hash] = self.nodes[0].getblock(block_hash, 2) diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py index 8f77686fe6f2b..35220806d8144 100755 --- a/test/functional/interface_zmq.py +++ b/test/functional/interface_zmq.py @@ -188,7 +188,7 @@ def test_basic(self): num_blocks = 5 self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks}) - genhashes = self.nodes[0].generatetoaddress(num_blocks, ADDRESS_BCRT1_UNSPENDABLE) + genhashes = self.generatetoaddress(self.nodes[0], num_blocks, ADDRESS_BCRT1_UNSPENDABLE) self.sync_all() @@ -229,7 +229,7 @@ def test_basic(self): # Mining the block with this tx should result in second notification # after coinbase tx notification - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) hashtx.receive() txid = hashtx.receive() assert_equal(payment_txid, txid.hex()) @@ -261,14 +261,14 @@ def test_reorg(self): # Generate 1 block in nodes[0] with 1 mempool tx and receive all notifications payment_txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1.0) - disconnect_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0] + disconnect_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0] disconnect_cb = self.nodes[0].getblock(disconnect_block)["tx"][0] assert_equal(self.nodes[0].getbestblockhash(), hashblock.receive().hex()) assert_equal(hashtx.receive().hex(), payment_txid) assert_equal(hashtx.receive().hex(), disconnect_cb) # Generate 2 blocks in nodes[1] to a different address to ensure split - connect_blocks = self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_P2SH_OP_TRUE) + connect_blocks = self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE) # nodes[0] will reorg chain after connecting back nodes[1] self.connect_nodes(0, 1) @@ -312,13 +312,13 @@ def test_sequence(self): seq_num = 1 # Generate 1 block in nodes[0] and receive all notifications - dc_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0] + dc_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0] # Note: We are not notified of any block transactions, coinbase or mined assert_equal((self.nodes[0].getbestblockhash(), "C", None), seq.receive_sequence()) # Generate 2 blocks in nodes[1] to a different address to ensure a chain split - self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_P2SH_OP_TRUE) + self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE) # nodes[0] will reorg chain after connecting back nodes[1] self.connect_nodes(0, 1) @@ -344,7 +344,7 @@ def test_sequence(self): # though the mempool sequence number does go up by the number of transactions # removed from the mempool by the block mining it. mempool_size = len(self.nodes[0].getrawmempool()) - c_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0] + c_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0] self.sync_all() # Make sure the number of mined transactions matches the number of txs out of mempool mempool_size_delta = mempool_size - len(self.nodes[0].getrawmempool()) @@ -384,7 +384,7 @@ def test_sequence(self): # Other things may happen but aren't wallet-deterministic so we don't test for them currently self.nodes[0].reconsiderblock(best_hash) - self.nodes[1].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[1], 1, ADDRESS_BCRT1_UNSPENDABLE) self.sync_all() self.log.info("Evict mempool transaction by block conflict") @@ -429,7 +429,7 @@ def test_sequence(self): # Last tx assert_equal((orig_txid_2, "A", mempool_seq), seq.receive_sequence()) mempool_seq += 1 - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) self.sync_all() # want to make sure we didn't break "consensus" for other tests def test_mempool_sync(self): @@ -479,7 +479,7 @@ def test_mempool_sync(self): for _ in range(num_txs): txids.append(self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=0.1)) self.sync_all() - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) final_txid = self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=0.1) # 3) Consume ZMQ backlog until we get to "now" for the mempool snapshot @@ -535,7 +535,7 @@ def test_mempool_sync(self): # 5) If you miss a zmq/mempool sequence number, go back to step (2) - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) def test_multiple_interfaces(self): # Set up two subscribers with different addresses @@ -548,7 +548,7 @@ def test_multiple_interfaces(self): ], sync_blocks=False) # Generate 1 block in nodes[0] and receive all notifications - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) # Should receive the same block hash on both subscribers assert_equal(self.nodes[0].getbestblockhash(), subscribers[0].receive().hex()) @@ -565,7 +565,7 @@ def test_ipv6(self): ], ipv6=True) # Generate 1 block in nodes[0] - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) # Should receive the same block hash assert_equal(self.nodes[0].getbestblockhash(), subscribers[0].receive().hex()) diff --git a/test/functional/interface_zmq_dash.py b/test/functional/interface_zmq_dash.py index d5e2a9852ab86..508590af3a87f 100755 --- a/test/functional/interface_zmq_dash.py +++ b/test/functional/interface_zmq_dash.py @@ -170,7 +170,7 @@ def run_test(self): def generate_blocks(self, num_blocks): mninfos_online = self.mninfo.copy() nodes = [self.nodes[0]] + [mn.node for mn in mninfos_online] - self.nodes[0].generate(num_blocks) + self.generate(self.nodes[0], num_blocks) self.sync_blocks(nodes) def subscribe(self, publishers): @@ -214,7 +214,7 @@ def validate_recovered_sig(request_id, msg_hash): # Subscribe to recovered signature messages self.subscribe(recovered_sig_publishers) # Generate a ChainLock and make sure this leads to valid recovered sig ZMQ messages - rpc_last_block_hash = self.nodes[0].generate(1)[0] + rpc_last_block_hash = self.generate(self.nodes[0], 1)[0] self.wait_for_chainlocked_block_all_nodes(rpc_last_block_hash) height = self.nodes[0].getblockcount() rpc_request_id = hash256(ser_string(b"clsig") + struct.pack(" sizes[0] + sizes[1]): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # High fee transaction should not have been mined, but other high fee rate # transactions should have been. diff --git a/test/functional/p2p_block_sync.py b/test/functional/p2p_block_sync.py index 0506735971285..d821edc1b1814 100755 --- a/test/functional/p2p_block_sync.py +++ b/test/functional/p2p_block_sync.py @@ -29,7 +29,7 @@ def setup_network(self): def run_test(self): self.log.info("Setup network: node0->node1->node2") self.log.info("Mining one block on node0 and verify all nodes sync") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.log.info("Success!") diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py index 3b987f7276bc9..ea215784846ef 100755 --- a/test/functional/p2p_blockfilters.py +++ b/test/functional/p2p_blockfilters.py @@ -56,17 +56,17 @@ def run_test(self): peer_1 = self.nodes[1].add_p2p_connection(FiltersClient()) # Nodes 0 & 1 share the same first 999 blocks in the chain. - self.nodes[0].generate(999) + self.generate(self.nodes[0], 999) self.sync_blocks(timeout=600) # Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting self.disconnect_nodes(0, 1) - stale_block_hash = self.nodes[0].generate(1)[0] + stale_block_hash = self.generate(self.nodes[0], 1)[0] self.nodes[0].syncwithvalidationinterfacequeue() assert_equal(self.nodes[0].getblockcount(), 1000) - self.nodes[1].generate(1001) + self.generate(self.nodes[1], 1001) assert_equal(self.nodes[1].getblockcount(), 2000) # Check that nodes have signalled NODE_COMPACT_FILTERS correctly. diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py index e7f9592e916c5..95030b2428ce2 100755 --- a/test/functional/p2p_blocksonly.py +++ b/test/functional/p2p_blocksonly.py @@ -78,7 +78,7 @@ def blocksonly_mode_tests(self): self.log.info("Relay-permission peer's transaction is accepted and relayed") self.nodes[0].disconnect_p2ps() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) def blocks_relay_conn_tests(self): self.log.info('Tests with node in normal mode with block-relay-only connections') diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py index 839a034d71664..96f69e0eea91d 100755 --- a/test/functional/p2p_compactblocks.py +++ b/test/functional/p2p_compactblocks.py @@ -155,7 +155,7 @@ def make_utxos(self): block = self.build_block_on_tip(self.nodes[0]) self.test_node.send_and_ping(msg_block(block)) assert int(self.nodes[0].getbestblockhash(), 16) == block.sha256 - self.nodes[0].generatetoaddress(COINBASE_MATURITY, self.nodes[0].getnewaddress()) + self.generatetoaddress(self.nodes[0], COINBASE_MATURITY, self.nodes[0].getnewaddress()) total_value = block.vtx[0].vout[0].nValue out_value = total_value // 10 @@ -198,7 +198,7 @@ def received_sendcmpct(): def check_announcement_of_new_block(node, peer, predicate): peer.clear_block_announcement() - block_hash = int(node.generate(1)[0], 16) + block_hash = int(self.generate(node, 1)[0], 16) peer.wait_for_block_announcement(block_hash, timeout=30) assert peer.block_announced @@ -261,7 +261,7 @@ def check_announcement_of_new_block(node, peer, predicate): # This test actually causes dashd to (reasonably!) disconnect us, so do this last. def test_invalid_cmpctblock_message(self): - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) block = self.build_block_on_tip(self.nodes[0]) cmpct_block = P2PHeaderAndShortIDs() @@ -278,7 +278,7 @@ def test_invalid_cmpctblock_message(self): def test_compactblock_construction(self, test_node): node = self.nodes[0] # Generate a bunch of transactions. - node.generate(COINBASE_MATURITY + 1) + self.generate(node, COINBASE_MATURITY + 1) num_transactions = 25 address = node.getnewaddress() @@ -296,7 +296,7 @@ def test_compactblock_construction(self, test_node): # Now mine a block, and look at the resulting compact block. test_node.clear_block_announcement() - block_hash = int(node.generate(1)[0], 16) + block_hash = int(self.generate(node, 1)[0], 16) # Store the raw block in our internal format. block = from_hex(CBlock(), node.getblock("%064x" % block_hash, False)) @@ -613,7 +613,7 @@ def test_compactblocks_not_at_tip(self, test_node): new_blocks = [] for _ in range(MAX_CMPCTBLOCK_DEPTH + 1): test_node.clear_block_announcement() - new_blocks.append(node.generate(1)[0]) + new_blocks.append(self.generate(node, 1)[0]) test_node.wait_until(test_node.received_block_announcement, timeout=30) test_node.clear_block_announcement() @@ -621,7 +621,7 @@ def test_compactblocks_not_at_tip(self, test_node): test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30) test_node.clear_block_announcement() - node.generate(1) + self.generate(node, 1) test_node.wait_until(test_node.received_block_announcement, timeout=30) test_node.clear_block_announcement() with p2p_lock: @@ -789,7 +789,7 @@ def assert_highbandwidth_states(node, hb_to, hb_from): def run_test(self): # Get the nodes out of IBD - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Setup the p2p connections self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn()) diff --git a/test/functional/p2p_compactblocks_blocksonly.py b/test/functional/p2p_compactblocks_blocksonly.py index 0d4d8ce22f55c..b6cf2fe5a33ba 100755 --- a/test/functional/p2p_compactblocks_blocksonly.py +++ b/test/functional/p2p_compactblocks_blocksonly.py @@ -32,7 +32,7 @@ def setup_network(self): self.sync_all() def build_block_on_tip(self): - blockhash = self.nodes[2].generate(1)[0] + blockhash = self.generate(self.nodes[2], 1)[0] block_hex = self.nodes[2].getblock(blockhash=blockhash, verbosity=0) block = from_hex(CBlock(), block_hex) block.rehash() diff --git a/test/functional/p2p_compactblocks_hb.py b/test/functional/p2p_compactblocks_hb.py index a3d30a6f0423f..72b3897b4f81c 100755 --- a/test/functional/p2p_compactblocks_hb.py +++ b/test/functional/p2p_compactblocks_hb.py @@ -30,7 +30,7 @@ def setup_network(self): def relay_block_through(self, peer): """Relay a new block through peer peer, and return HB status between 1 and [2,3,4,5].""" self.connect_nodes(peer, 0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() self.disconnect_nodes(peer, 0) status_to = [self.peer_info(1, i)['bip152_hb_to'] for i in range(2, 6)] @@ -44,7 +44,7 @@ def run_test(self): # Connect everyone to node 0, and mine some blocks to get all nodes out of IBD. for i in range(1, 6): self.connect_nodes(i, 0) - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) self.sync_blocks() for i in range(1, 6): self.disconnect_nodes(i, 0) diff --git a/test/functional/p2p_eviction.py b/test/functional/p2p_eviction.py index a648d7620b980..a8c1eb425e56b 100755 --- a/test/functional/p2p_eviction.py +++ b/test/functional/p2p_eviction.py @@ -53,7 +53,7 @@ def run_test(self): protected_peers = set() # peers that we expect to be protected from eviction current_peer = -1 node = self.nodes[0] - node.generatetoaddress(COINBASE_MATURITY + 1, node.get_deterministic_priv_key().address) + self.generatetoaddress(node, COINBASE_MATURITY + 1, node.get_deterministic_priv_key().address) self.log.info("Create 4 peers and protect them from eviction by sending us a block") for _ in range(4): diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py index 359cfb9c346ef..a040665fbac5b 100755 --- a/test/functional/p2p_filter.py +++ b/test/functional/p2p_filter.py @@ -149,7 +149,7 @@ def test_frelay_false(self, filter_peer): assert not filter_peer.tx_received # Clear the mempool so that this transaction does not impact subsequent tests - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) def test_filter(self, filter_peer): # Set the bloomfilter using filterload @@ -159,14 +159,14 @@ def test_filter(self, filter_peer): filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['address'] self.log.info('Check that we receive merkleblock and tx if the filter matches a tx in a block') - block_hash = self.nodes[0].generatetoaddress(1, filter_address)[0] + block_hash = self.generatetoaddress(self.nodes[0], 1, filter_address)[0] txid = self.nodes[0].getblock(block_hash)['tx'][0] filter_peer.wait_for_merkleblock(block_hash) filter_peer.wait_for_tx(txid) self.log.info('Check that we only receive a merkleblock if the filter does not match a tx in a block') filter_peer.tx_received = False - block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0] + block_hash = self.generatetoaddress(self.nodes[0], 1, self.nodes[0].getnewaddress())[0] filter_peer.wait_for_merkleblock(block_hash) assert not filter_peer.tx_received @@ -194,7 +194,7 @@ def test_filter(self, filter_peer): filter_peer.merkleblock_received = False filter_peer.tx_received = False with self.nodes[0].assert_debug_log(expected_msgs=['received getdata']): - block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0] + block_hash = self.generatetoaddress(self.nodes[0], 1, self.nodes[0].getnewaddress())[0] filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))]) filter_peer.sync_with_ping() assert not filter_peer.merkleblock_received diff --git a/test/functional/p2p_fingerprint.py b/test/functional/p2p_fingerprint.py index a23f741723752..e992b7475decd 100755 --- a/test/functional/p2p_fingerprint.py +++ b/test/functional/p2p_fingerprint.py @@ -69,7 +69,7 @@ def run_test(self): self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60) # Generating a chain of 10 blocks - block_hashes = self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + block_hashes = self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) # Create longer chain starting 2 blocks before current tip height = len(block_hashes) - 2 @@ -98,7 +98,7 @@ def run_test(self): # Longest chain is extended so stale is much older than chain tip self.nodes[0].setmocktime(0) - block_hash = int(self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[-1], 16) + block_hash = int(self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address)[-1], 16) assert_equal(self.nodes[0].getblockcount(), 14) node0.wait_for_block(block_hash, timeout=3) diff --git a/test/functional/p2p_ibd_txrelay.py b/test/functional/p2p_ibd_txrelay.py index 1af89d6a2ffd7..bc1da72c2db32 100755 --- a/test/functional/p2p_ibd_txrelay.py +++ b/test/functional/p2p_ibd_txrelay.py @@ -66,7 +66,7 @@ def run_test(self): self.nodes[0].disconnect_p2ps() # Come out of IBD by generating a block - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.log.info("Check that nodes process the same transaction, even when unsolicited, when no longer in IBD") diff --git a/test/functional/p2p_instantsend.py b/test/functional/p2p_instantsend.py index c5966d6eb1cd8..347eb3fa26b11 100755 --- a/test/functional/p2p_instantsend.py +++ b/test/functional/p2p_instantsend.py @@ -47,7 +47,7 @@ def test_block_doublespend(self): for node in self.nodes: self.wait_for_instantlock(is_id, node) self.bump_mocktime(1) - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) self.sync_all() # create doublespending transaction, but don't relay it @@ -92,7 +92,7 @@ def test_block_doublespend(self): self.bump_mocktime(1) # make sure the above TX is on node0 self.sync_mempools([n for n in self.nodes if n is not isolated]) - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) self.sync_all() def test_mempool_doublespend(self): @@ -108,7 +108,7 @@ def test_mempool_doublespend(self): for node in self.nodes: self.wait_for_instantlock(is_id, node) self.bump_mocktime(1) - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) self.sync_all() # create doublespending transaction, but don't relay it @@ -141,7 +141,7 @@ def test_mempool_doublespend(self): assert_equal(receiver.getwalletinfo()["balance"], 0) # mine more blocks self.bump_mocktime(1) - self.nodes[0].generate(2) + self.generate(self.nodes[0], 2) self.sync_all() if __name__ == '__main__': diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py index bc8d90d589788..dafef7412df27 100755 --- a/test/functional/p2p_invalid_block.py +++ b/test/functional/p2p_invalid_block.py @@ -51,7 +51,7 @@ def run_test(self): peer.send_blocks_and_test([block1], node, success=True) self.log.info("Mature the block.") - node.generatetoaddress(100, node.get_deterministic_priv_key().address) + self.generatetoaddress(node, 100, node.get_deterministic_priv_key().address) best_block = node.getblock(node.getbestblockhash()) tip = int(node.getbestblockhash(), 16) diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py index f884cf90ff037..a586b48d4c00b 100755 --- a/test/functional/p2p_invalid_locator.py +++ b/test/functional/p2p_invalid_locator.py @@ -16,7 +16,7 @@ def set_test_params(self): def run_test(self): node = self.nodes[0] # convenience reference to the node - node.generatetoaddress(1, node.get_deterministic_priv_key().address) # Get node out of IBD + self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address) # Get node out of IBD self.log.info('Test max locator size') block_count = node.getblockcount() diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py index 3a4fd8919b7c8..f7a6b644c7e05 100755 --- a/test/functional/p2p_invalid_tx.py +++ b/test/functional/p2p_invalid_tx.py @@ -76,7 +76,7 @@ def run_test(self): node.p2ps[0].send_blocks_and_test([block1, block2], node, success=True) self.log.info("Mature the block.") - self.nodes[0].generatetoaddress(100, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 100, self.nodes[0].get_deterministic_priv_key().address) # Iterate through a list of known invalid transaction types, ensuring each is # rejected. Some are consensus invalid and some just violate policy. diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py index da037b60aa51f..2db2c31f1face 100755 --- a/test/functional/p2p_leak.py +++ b/test/functional/p2p_leak.py @@ -111,7 +111,7 @@ def run_test(self): no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received) # Mine a block and make sure that it's not sent to the connected peers - self.nodes[0].generate(nblocks=1) + self.generate(self.nodes[0], nblocks=1) #Give the node enough time to possibly leak out a message time.sleep(5) diff --git a/test/functional/p2p_leak_tx.py b/test/functional/p2p_leak_tx.py index 6aa74a4f3e523..df30ab74844da 100755 --- a/test/functional/p2p_leak_tx.py +++ b/test/functional/p2p_leak_tx.py @@ -26,8 +26,8 @@ def run_test(self): gen_node = self.nodes[0] # The block and tx generating node miniwallet = MiniWallet(gen_node) # Add enough mature utxos to the wallet, so that all txs spend confirmed coins - miniwallet.generate(1) - gen_node.generate(100) + self.generate(miniwallet, 1) + self.generate(gen_node, 100) inbound_peer = self.nodes[0].add_p2p_connection(P2PNode()) # An "attacking" inbound peer diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py index 16862fb9185ec..f3286df85289b 100755 --- a/test/functional/p2p_node_network_limited.py +++ b/test/functional/p2p_node_network_limited.py @@ -57,7 +57,7 @@ def run_test(self): self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.") self.connect_nodes(0, 1) - blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address) + blocks = self.generatetoaddress(self.nodes[1], 292, self.nodes[1].get_deterministic_priv_key().address) self.sync_blocks([self.nodes[0], self.nodes[1]]) self.log.info("Make sure we can max retrieve block at tip-288.") @@ -89,7 +89,7 @@ def run_test(self): self.disconnect_all() # mine 10 blocks on node 0 (pruned node) - self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) # connect node1 (non pruned) with node0 (pruned) and check if the can sync self.connect_nodes(0, 1) diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py index 1401c642088a0..98ae52fb48c7a 100755 --- a/test/functional/p2p_permissions.py +++ b/test/functional/p2p_permissions.py @@ -115,7 +115,7 @@ def run_test(self): self.nodes[1].assert_start_raises_init_error(["-whitebind=noban@127.0.0.1", "-bind=127.0.0.1", "-listen=0"], "Cannot set -bind or -whitebind together with -listen=0", match=ErrorMatch.PARTIAL_REGEX) def check_tx_relay(self): - block_op_true = self.nodes[0].getblock(self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_P2SH_OP_TRUE)[0]) + block_op_true = self.nodes[0].getblock(self.generatetoaddress(self.nodes[0], 100, ADDRESS_BCRT1_P2SH_OP_TRUE)[0]) self.sync_all() self.log.debug("Create a connection from a forcerelay peer that rebroadcasts raw txs") diff --git a/test/functional/p2p_quorum_data.py b/test/functional/p2p_quorum_data.py index e736b118717c9..d72894d21750d 100755 --- a/test/functional/p2p_quorum_data.py +++ b/test/functional/p2p_quorum_data.py @@ -135,7 +135,7 @@ def force_request_expire(bump_seconds=self.quorum_data_request_expiration_timeou self.bump_mocktime(bump_seconds) # Test with/without expired request cleanup if self.cleanup: - node0.generate(1) + self.generate(node0, 1) self.sync_blocks() def test_basics(): diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py index b115fbbb390a0..45d4e95113b76 100755 --- a/test/functional/p2p_sendheaders.py +++ b/test/functional/p2p_sendheaders.py @@ -204,7 +204,7 @@ def mine_blocks(self, count): # Clear out block announcements from each p2p listener [x.clear_block_announcements() for x in self.nodes[0].p2ps] - self.nodes[0].generatetoaddress(count, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], count, self.nodes[0].get_deterministic_priv_key().address) return int(self.nodes[0].getbestblockhash(), 16) def mine_reorg(self, length): @@ -215,7 +215,7 @@ def mine_reorg(self, length): return the list of block hashes newly mined.""" # make sure all invalidated blocks are node0's - self.nodes[0].generatetoaddress(length, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], length, self.nodes[0].get_deterministic_priv_key().address) self.sync_blocks(self.nodes, wait=0.1) for x in self.nodes[0].p2ps: x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) @@ -224,7 +224,7 @@ def mine_reorg(self, length): tip_height = self.nodes[1].getblockcount() hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1)) self.nodes[1].invalidateblock(hash_to_invalidate) - all_hashes = self.nodes[1].generatetoaddress(length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain + all_hashes = self.generatetoaddress(self.nodes[1], length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain self.sync_blocks(self.nodes, wait=0.1) return [int(x, 16) for x in all_hashes] @@ -239,7 +239,7 @@ def run_test(self): self.test_nonnull_locators(test_node, inv_node) def test_null_locators(self, test_node, inv_node): - tip = self.nodes[0].getblockheader(self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[0]) + tip = self.nodes[0].getblockheader(self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address)[0]) tip_hash = int(tip["hash"], 16) inv_node.check_last_inv_announcement(inv=[tip_hash]) diff --git a/test/functional/p2p_sendheaders_compressed.py b/test/functional/p2p_sendheaders_compressed.py index 181a4e634acb1..0897357006e3c 100755 --- a/test/functional/p2p_sendheaders_compressed.py +++ b/test/functional/p2p_sendheaders_compressed.py @@ -139,7 +139,7 @@ def mine_blocks(self, count): # Clear out block announcements from each p2p listener [p2p.clear_block_announcements() for p2p in self.nodes[0].p2ps] - self.nodes[0].generate(count) + self.generate(self.nodes[0], count) return int(self.nodes[0].getbestblockhash(), 16) def mine_reorg(self, length): @@ -149,7 +149,7 @@ def mine_reorg(self, length): to-be-reorged-out blocks are mined, so that we don't break later tests. return the list of block hashes newly mined.""" - self.nodes[0].generate(length) # make sure all invalidated blocks are node0's + self.generate(self.nodes[0], length) # make sure all invalidated blocks are node0's self.sync_blocks(self.nodes, wait=0.1) for p2p in self.nodes[0].p2ps: p2p.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) @@ -158,7 +158,7 @@ def mine_reorg(self, length): tip_height = self.nodes[1].getblockcount() hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1)) self.nodes[1].invalidateblock(hash_to_invalidate) - all_hashes = self.nodes[1].generate(length + 1) # Must be longer than the orig chain + all_hashes = self.generate(self.nodes[1], length + 1) # Must be longer than the orig chain self.sync_blocks(self.nodes, wait=0.1) return [int(hash_value, 16) for hash_value in all_hashes] @@ -183,7 +183,7 @@ def test_null_locators(self, test_node, inv_node): a block which hasn't been validated. Verifies only the first request returns headers. """ - tip = self.nodes[0].getblockheader(self.nodes[0].generate(1)[0]) + tip = self.nodes[0].getblockheader(self.generate(self.nodes[0], 1)[0]) tip_hash = int(tip["hash"], 16) inv_node.check_last_inv_announcement(inv=[tip_hash]) diff --git a/test/functional/rpc_addresses_deprecation.py b/test/functional/rpc_addresses_deprecation.py index dfeba3ab445d7..1028f4b55382c 100755 --- a/test/functional/rpc_addresses_deprecation.py +++ b/test/functional/rpc_addresses_deprecation.py @@ -41,7 +41,7 @@ def test_addresses_deprecation(self): txid = node.sendrawtransaction(hexstring=tx_signed, maxfeerate=0) self.log.info("Test RPCResult scriptPubKey no longer returns the fields addresses or reqSigs by default") - hash = node.generateblock(output=node.getnewaddress(), transactions=[txid])['hash'] + hash = self.generateblock(node, output=node.getnewaddress(), transactions=[txid])['hash'] # Ensure both nodes have the newly generated block on disk. self.sync_blocks() script_pub_key = node.getblock(blockhash=hash, verbose=2)['tx'][-1]['vout'][0]['scriptPubKey'] diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py index c2b4fcf9013eb..b29f633442490 100755 --- a/test/functional/rpc_blockchain.py +++ b/test/functional/rpc_blockchain.py @@ -86,7 +86,7 @@ def mine_chain(self): for t in range(TIME_GENESIS_BLOCK, TIME_RANGE_END, TIME_RANGE_STEP): # 156 sec steps from genesis block time set_node_times(self.nodes, t) - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2SH_OP_TRUE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_P2SH_OP_TRUE) assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200) def _test_getblockchaininfo(self): @@ -347,12 +347,12 @@ def _test_getnetworkhashps(self): def _test_stopatheight(self): assert_equal(self.nodes[0].getblockcount(), HEIGHT) - self.nodes[0].generatetoaddress(6, ADDRESS_BCRT1_P2SH_OP_TRUE) + self.generatetoaddress(self.nodes[0], 6, ADDRESS_BCRT1_P2SH_OP_TRUE) assert_equal(self.nodes[0].getblockcount(), HEIGHT + 6) self.log.debug('Node should not stop at this height') assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3)) try: - self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_P2SH_OP_TRUE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_P2SH_OP_TRUE) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug('Node should stop at this height...') @@ -408,7 +408,7 @@ def _test_getblock(self): fee_per_kb = 1000 * fee_per_byte miniwallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node) - blockhash = node.generate(1)[0] + blockhash = self.generate(node, 1)[0] self.log.info("Test getblock with verbosity 1 doesn't include fee") block = node.getblock(blockhash, 1) diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py index fd8450567c359..5bb08cdb746f8 100755 --- a/test/functional/rpc_createmultisig.py +++ b/test/functional/rpc_createmultisig.py @@ -44,7 +44,7 @@ def run_test(self): self.check_addmultisigaddress_errors() self.log.info('Generating blocks ...') - node0.generate(149) + self.generate(node0, 149) self.sync_all() self.moved = 0 @@ -101,7 +101,7 @@ def check_addmultisigaddress_errors(self): def checkbalances(self): node0, node1, node2 = self.nodes - node0.generate(1) + self.generate(node0, 1) self.sync_all() bal0 = node0.getbalance() @@ -158,7 +158,7 @@ def do_multisig(self): value = tx["vout"][vout]["value"] prevtxs = [{"txid": txid, "vout": vout, "scriptPubKey": scriptPubKey, "redeemScript": mredeem, "amount": value}] - node0.generate(1) + self.generate(node0, 1) outval = value - decimal.Decimal("0.00001000") rawtx = node2.createrawtransaction([{"txid": txid, "vout": vout}], [{self.final: outval}]) @@ -181,7 +181,7 @@ def do_multisig(self): self.moved += outval tx = node0.sendrawtransaction(rawtx3["hex"], 0) - blk = node0.generate(1)[0] + blk = self.generate(node0, 1)[0] assert tx in node0.getblock(blk)["tx"] txinfo = node0.getrawtransaction(tx, True, blk) diff --git a/test/functional/rpc_deprecated.py b/test/functional/rpc_deprecated.py index 2649c299c0cb4..a839eda5d108d 100755 --- a/test/functional/rpc_deprecated.py +++ b/test/functional/rpc_deprecated.py @@ -22,7 +22,7 @@ def run_test(self): # In run_test: # self.log.info("Test generate RPC") # assert_raises_rpc_error(-32, 'The wallet generate rpc method is deprecated', self.nodes[0].rpc.generate, 1) - # self.nodes[1].generate(1) + # self.generate(self.nodes[1], 1) self.log.info("No tested deprecated RPC methods") if __name__ == '__main__': diff --git a/test/functional/rpc_dumptxoutset.py b/test/functional/rpc_dumptxoutset.py index ecd93593c104b..c0e5519233f1f 100755 --- a/test/functional/rpc_dumptxoutset.py +++ b/test/functional/rpc_dumptxoutset.py @@ -23,7 +23,7 @@ def run_test(self): node = self.nodes[0] mocktime = node.getblockheader(node.getblockhash(0))['time'] + 1 node.setmocktime(mocktime) - node.generate(COINBASE_MATURITY) + self.generate(node, COINBASE_MATURITY) FILENAME = 'txoutset.dat' out = node.dumptxoutset(FILENAME) diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py index a1c1334e94140..e009cb8e2eb44 100755 --- a/test/functional/rpc_fundrawtransaction.py +++ b/test/functional/rpc_fundrawtransaction.py @@ -67,9 +67,9 @@ def run_test(self): # = 2 bytes * minRelayTxFeePerByte self.fee_tolerance = 2 * self.min_relay_tx_fee / 1000 - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all() - self.nodes[0].generate(121) + self.generate(self.nodes[0], 121) self.sync_all() self.test_change_position() @@ -127,7 +127,7 @@ def test_change_position(self): self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() wwatch.unloadwallet() @@ -491,7 +491,7 @@ def test_spend_2of2(self): # send 12 DASH to msig addr self.nodes[0].sendtoaddress(mSigObj, 12) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() oldBalance = self.nodes[1].getbalance() @@ -502,7 +502,7 @@ def test_spend_2of2(self): signed_psbt = w2.walletprocesspsbt(funded_psbt) final_psbt = w2.finalizepsbt(signed_psbt['psbt']) self.nodes[2].sendrawtransaction(final_psbt['hex']) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all() # Make sure funds are received at node1. @@ -564,7 +564,7 @@ def test_locked_wallet(self): self.nodes[1].walletpassphrase("test", 600) signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) self.nodes[1].sendrawtransaction(signedTx['hex']) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() # Make sure funds are received at node1. @@ -576,12 +576,12 @@ def test_many_inputs_fee(self): # Empty node1, send some small coins from node0 to node1. self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() for _ in range(20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Fund a tx with ~20 small inputs. @@ -604,12 +604,12 @@ def test_many_inputs_send(self): # Again, empty node1, send some small coins from node0 to node1. self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() for _ in range(20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Fund a tx with ~20 small inputs. @@ -621,7 +621,7 @@ def test_many_inputs_send(self): fundedTx = self.nodes[1].fundrawtransaction(rawtx) fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() assert_equal(oldBalance+Decimal('500.19000000'), self.nodes[0].getbalance()) #0.19+block reward @@ -699,7 +699,7 @@ def test_all_watched_funds(self): signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"]) assert signedtx["complete"] self.nodes[0].sendrawtransaction(signedtx["hex"]) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() wwatch.unloadwallet() @@ -936,7 +936,7 @@ def test_include_unsafe(self): wallet.sendrawtransaction(signedtx['hex']) # And we can also use them once they're confirmed. - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) rawtx = wallet.createrawtransaction([], [{self.nodes[2].getnewaddress(): 3}]) fundedtx = wallet.fundrawtransaction(rawtx, {"include_unsafe": True}) tx_dec = wallet.decoderawtransaction(fundedtx['hex']) diff --git a/test/functional/rpc_generateblock.py b/test/functional/rpc_generateblock.py index 671fa65ad9f9f..6d880d430af2b 100755 --- a/test/functional/rpc_generateblock.py +++ b/test/functional/rpc_generateblock.py @@ -23,13 +23,13 @@ def run_test(self): self.log.info('Generate an empty block to address') address = node.getnewaddress() - hash = node.generateblock(address, [])['hash'] + hash = self.generateblock(node, address, [])['hash'] block = node.getblock(hash, 2) assert_equal(len(block['tx']), 1) assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], address) self.log.info('Generate an empty block to a descriptor') - hash = node.generateblock('addr(' + address + ')', [])['hash'] + hash = self.generateblock(node, 'addr(' + address + ')', [])['hash'] block = node.getblock(hash, 2) assert_equal(len(block['tx']), 1) assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], address) @@ -37,13 +37,13 @@ def run_test(self): self.log.info('Generate an empty block to a combo descriptor with compressed pubkey') combo_key = '0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798' combo_address = 'yWziQMcwmKjRdzi7eWjwiQX8EjWcd6dSg6' - hash = node.generateblock('combo(' + combo_key + ')', [])['hash'] + hash = self.generateblock(node, 'combo(' + combo_key + ')', [])['hash'] block = node.getblock(hash, 2) assert_equal(len(block['tx']), 1) assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], combo_address) # Generate 110 blocks to spend - node.generatetoaddress(110, address) + self.generatetoaddress(node, 110, address) # Generate some extra mempool transactions to verify they don't get mined for _ in range(10): @@ -51,7 +51,7 @@ def run_test(self): self.log.info('Generate block with txid') txid = node.sendtoaddress(address, 1) - hash = node.generateblock(address, [txid])['hash'] + hash = self.generateblock(node, address, [txid])['hash'] block = node.getblock(hash, 1) assert_equal(len(block['tx']), 2) assert_equal(block['tx'][1], txid) @@ -60,7 +60,7 @@ def run_test(self): utxos = node.listunspent(addresses=[address]) raw = node.createrawtransaction([{'txid':utxos[0]['txid'], 'vout':utxos[0]['vout']}],[{address:1}]) signed_raw = node.signrawtransactionwithwallet(raw)['hex'] - hash = node.generateblock(address, [signed_raw])['hash'] + hash = self.generateblock(node, address, [signed_raw])['hash'] block = node.getblock(hash, 1) assert_equal(len(block['tx']), 2) txid = block['tx'][1] @@ -72,26 +72,26 @@ def run_test(self): txid1 = node.sendrawtransaction(signed_raw1) raw2 = node.createrawtransaction([{'txid':txid1, 'vout':0}],[{address:0.999}]) signed_raw2 = node.signrawtransactionwithwallet(raw2)['hex'] - assert_raises_rpc_error(-25, 'TestBlockValidity failed: bad-txns-inputs-missingorspent', node.generateblock, address, [signed_raw2, txid1]) + assert_raises_rpc_error(-25, 'TestBlockValidity failed: bad-txns-inputs-missingorspent', self.generateblock, node, address, [signed_raw2, txid1]) self.log.info('Fail to generate block with txid not in mempool') missing_txid = '0000000000000000000000000000000000000000000000000000000000000000' - assert_raises_rpc_error(-5, 'Transaction ' + missing_txid + ' not in mempool.', node.generateblock, address, [missing_txid]) + assert_raises_rpc_error(-5, 'Transaction ' + missing_txid + ' not in mempool.', self.generateblock, node, address, [missing_txid]) self.log.info('Fail to generate block with invalid raw tx') invalid_raw_tx = '0000' - assert_raises_rpc_error(-22, 'Transaction decode failed for ' + invalid_raw_tx, node.generateblock, address, [invalid_raw_tx]) + assert_raises_rpc_error(-22, 'Transaction decode failed for ' + invalid_raw_tx, self.generateblock, node, address, [invalid_raw_tx]) self.log.info('Fail to generate block with invalid address/descriptor') - assert_raises_rpc_error(-5, 'Invalid address or descriptor', node.generateblock, '1234', []) + assert_raises_rpc_error(-5, 'Invalid address or descriptor', self.generateblock, node, '1234', []) self.log.info('Fail to generate block with a ranged descriptor') ranged_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0/*)' - assert_raises_rpc_error(-8, 'Ranged descriptor not accepted. Maybe pass through deriveaddresses first?', node.generateblock, ranged_descriptor, []) + assert_raises_rpc_error(-8, 'Ranged descriptor not accepted. Maybe pass through deriveaddresses first?', self.generateblock, node, ranged_descriptor, []) self.log.info('Fail to generate block with a descriptor missing a private key') child_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0\'/0)' - assert_raises_rpc_error(-5, 'Cannot derive script without private keys', node.generateblock, child_descriptor, []) + assert_raises_rpc_error(-5, 'Cannot derive script without private keys', self.generateblock, node, child_descriptor, []) if __name__ == '__main__': GenerateBlockTest().main() diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py index a99e50f29f87d..4d860d0f36c33 100755 --- a/test/functional/rpc_getblockfilter.py +++ b/test/functional/rpc_getblockfilter.py @@ -21,8 +21,8 @@ def run_test(self): # Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting self.disconnect_nodes(0, 1) - self.nodes[0].generate(3) - self.nodes[1].generate(4) + self.generate(self.nodes[0], 3) + self.generate(self.nodes[1], 4) assert_equal(self.nodes[0].getblockcount(), 3) chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)] diff --git a/test/functional/rpc_getblockfrompeer.py b/test/functional/rpc_getblockfrompeer.py index 2988e0d967cd1..a37888cbbe03d 100755 --- a/test/functional/rpc_getblockfrompeer.py +++ b/test/functional/rpc_getblockfrompeer.py @@ -27,11 +27,11 @@ def check_for_block(self, hash): def run_test(self): self.log.info("Mine 4 blocks on Node 0") - self.nodes[0].generate(4) + self.generate(self.nodes[0], 4) assert_equal(self.nodes[0].getblockcount(), 204) self.log.info("Mine competing 3 blocks on Node 1") - self.nodes[1].generate(3) + self.generate(self.nodes[1], 3) assert_equal(self.nodes[1].getblockcount(), 203) short_tip = self.nodes[1].getbestblockhash() diff --git a/test/functional/rpc_getblockstats.py b/test/functional/rpc_getblockstats.py index 19295b70e5f1e..55bebd870b385 100755 --- a/test/functional/rpc_getblockstats.py +++ b/test/functional/rpc_getblockstats.py @@ -43,11 +43,11 @@ def get_stats(self): def generate_test_data(self, filename): self.nodes[0].setmocktime(self.mocktime) - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) address = self.nodes[0].get_deterministic_priv_key().address self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True) @@ -55,7 +55,7 @@ def generate_test_data(self, filename): self.nodes[0].settxfee(amount=0.003) self.nodes[0].sendtoaddress(address=address, amount=1, subtractfeefromamount=True) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.expected_stats = self.get_stats() diff --git a/test/functional/rpc_getchaintips.py b/test/functional/rpc_getchaintips.py index e39332bbc65c6..9e3defa256966 100755 --- a/test/functional/rpc_getchaintips.py +++ b/test/functional/rpc_getchaintips.py @@ -26,8 +26,8 @@ def run_test(self): # Split the network and build two chains of different lengths. self.split_network() - self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) - self.nodes[2].generatetoaddress(20, self.nodes[2].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[2], 20, self.nodes[2].get_deterministic_priv_key().address) self.sync_all(self.nodes[:2]) self.sync_all(self.nodes[2:]) diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py index 54dc8ffb2211c..36bfbb64c82ca 100755 --- a/test/functional/rpc_invalidateblock.py +++ b/test/functional/rpc_invalidateblock.py @@ -23,12 +23,12 @@ def setup_network(self): def run_test(self): self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:") self.log.info("Mine 4 blocks on Node 0") - self.nodes[0].generatetoaddress(4, self.nodes[0].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[0], 4, self.nodes[0].get_deterministic_priv_key().address) assert_equal(self.nodes[0].getblockcount(), 4) besthash_n0 = self.nodes[0].getbestblockhash() self.log.info("Mine competing 6 blocks on Node 1") - self.nodes[1].generatetoaddress(6, self.nodes[1].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[1], 6, self.nodes[1].get_deterministic_priv_key().address) assert_equal(self.nodes[1].getblockcount(), 6) self.log.info("Connect nodes to force a reorg") @@ -54,7 +54,7 @@ def run_test(self): self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3)) assert_equal(self.nodes[2].getblockcount(), 2) self.log.info("..and then mine a block") - self.nodes[2].generatetoaddress(1, self.nodes[2].get_deterministic_priv_key().address) + self.generatetoaddress(self.nodes[2], 1, self.nodes[2].get_deterministic_priv_key().address) self.log.info("Verify all nodes are at the right height") self.wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5) self.wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5) @@ -64,13 +64,13 @@ def run_test(self): self.restart_node(0, extra_args=["-checkblocks=5"]) self.restart_node(1, extra_args=["-checkblocks=5"]) self.connect_nodes(0, 1) - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) self.sync_blocks(self.nodes[0:2]) newheight = self.nodes[0].getblockcount() for j in range(2): self.restart_node(0, extra_args=["-checkblocks=5"]) - tip = self.nodes[0].generate(10)[-1] - self.nodes[1].generate(9) + tip = self.generate(self.nodes[0], 10)[-1] + self.generate(self.nodes[1], 9) self.connect_nodes(0, 1) self.sync_blocks(self.nodes[0:2]) assert_equal(self.nodes[0].getblockcount(), newheight + 10 * (j + 1)) @@ -87,7 +87,7 @@ def run_test(self): assert_equal(tip, self.nodes[1].getbestblockhash()) self.log.info("Verify that we reconsider all ancestors as well") - blocks = self.nodes[1].generatetodescriptor(10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR) + blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR) assert_equal(self.nodes[1].getbestblockhash(), blocks[-1]) # Invalidate the two blocks at the tip self.nodes[1].invalidateblock(blocks[-1]) @@ -99,7 +99,7 @@ def run_test(self): assert_equal(self.nodes[1].getbestblockhash(), blocks[-1]) self.log.info("Verify that we reconsider all descendants") - blocks = self.nodes[1].generatetodescriptor(10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR) + blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR) assert_equal(self.nodes[1].getbestblockhash(), blocks[-1]) # Invalidate the two blocks at the tip self.nodes[1].invalidateblock(blocks[-2]) diff --git a/test/functional/rpc_masternode.py b/test/functional/rpc_masternode.py index 158b8cbb67d86..f9d1b8666a164 100755 --- a/test/functional/rpc_masternode.py +++ b/test/functional/rpc_masternode.py @@ -23,7 +23,7 @@ def run_test(self): checked_0_operator_reward = False checked_non_0_operator_reward = False while not checked_0_operator_reward or not checked_non_0_operator_reward: - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) bi = self.nodes[0].getblockchaininfo() height = bi["blocks"] blockhash = bi["bestblockhash"] @@ -59,7 +59,7 @@ def run_test(self): self.log.info("test that `masternode payments` results at chaintip match `getblocktemplate` results for that block") gbt_masternode = self.nodes[0].getblocktemplate()["masternode"] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) payments_masternode = self.nodes[0].masternode("payments")[0]["masternodes"][0] for i in range(0, len(gbt_masternode)): assert_equal(gbt_masternode[i]["payee"], payments_masternode["payees"][i]["address"]) @@ -84,13 +84,13 @@ def run_test(self): protx_info["state"]["operatorPayoutAddress"] == payments_masternode["payees"][0]["address"] assert option1 or option2 checked_non_0_operator_reward = True - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.log.info("test that `masternode outputs` show correct list") addr1 = self.nodes[0].getnewaddress() addr2 = self.nodes[0].getnewaddress() self.nodes[0].sendmany('', {addr1: 1000, addr2: 1000}) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # we have 3 masternodes that are running already and 2 new outputs we just created assert_equal(len(self.nodes[0].masternode("outputs")), 5) diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py index 1b7c6311bf7e1..72cef90ffdb03 100755 --- a/test/functional/rpc_net.py +++ b/test/functional/rpc_net.py @@ -47,9 +47,9 @@ def set_test_params(self): def run_test(self): # We need miniwallet to make a transaction self.wallet = MiniWallet(self.nodes[0]) - self.wallet.generate(1) + self.generate(self.wallet, 1) # Get out of IBD for the getpeerinfo tests. - self.nodes[0].generate(101) + self.generate(self.nodes[0], 101) # By default, the test framework sets up an addnode connection from # node 1 --> node0. By connecting node0 --> node 1, we're left with @@ -78,7 +78,7 @@ def test_getpeerinfo(self): self.log.info("Test getpeerinfo") # Create a few getpeerinfo last_block/last_transaction values. self.wallet.send_self_transfer(from_node=self.nodes[0]) # Make a transaction so we can see it in the getpeerinfo results - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() time_now = self.mocktime peer_info = [x.getpeerinfo() for x in self.nodes] diff --git a/test/functional/rpc_packages.py b/test/functional/rpc_packages.py index 1b207061df22d..740a8c02750f7 100755 --- a/test/functional/rpc_packages.py +++ b/test/functional/rpc_packages.py @@ -48,7 +48,7 @@ def run_test(self): self.address = node.get_deterministic_priv_key().address self.coins = [] # The last 100 coinbase transactions are premature - for b in node.generatetoaddress(200, self.address)[:100]: + for b in self.generatetoaddress(node, 200, self.address)[:100]: coinbase = node.getblock(blockhash=b, verbosity=2)["tx"][0] self.coins.append({ "txid": coinbase["txid"], @@ -148,7 +148,7 @@ def test_chain(self): assert_equal(testres_single, testres_multiple) # Clean up by clearing the mempool - node.generate(1) + self.generate(node, 1) def test_multiple_children(self): node = self.nodes[0] diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py index 7d3a5cc9ce1d8..3a00992ddc3ed 100755 --- a/test/functional/rpc_preciousblock.py +++ b/test/functional/rpc_preciousblock.py @@ -43,18 +43,18 @@ def setup_network(self): def run_test(self): self.log.info("Ensure submitblock can in principle reorg to a competing chain") gen_address = lambda i: self.nodes[i].get_deterministic_priv_key().address # A non-wallet address to mine to - self.nodes[0].generatetoaddress(1, gen_address(0)) + self.generatetoaddress(self.nodes[0], 1, gen_address(0)) assert_equal(self.nodes[0].getblockcount(), 1) - hashZ = self.nodes[1].generatetoaddress(2, gen_address(1))[-1] + hashZ = self.generatetoaddress(self.nodes[1], 2, gen_address(1))[-1] assert_equal(self.nodes[1].getblockcount(), 2) node_sync_via_rpc(self.nodes[0:3]) assert_equal(self.nodes[0].getbestblockhash(), hashZ) self.log.info("Mine blocks A-B-C on Node 0") - hashC = self.nodes[0].generatetoaddress(3, gen_address(0))[-1] + hashC = self.generatetoaddress(self.nodes[0], 3, gen_address(0))[-1] assert_equal(self.nodes[0].getblockcount(), 5) self.log.info("Mine competing blocks E-F-G on Node 1") - hashG = self.nodes[1].generatetoaddress(3, gen_address(1))[-1] + hashG = self.generatetoaddress(self.nodes[1], 3, gen_address(1))[-1] assert_equal(self.nodes[1].getblockcount(), 5) assert hashC != hashG self.log.info("Connect nodes and check no reorg occurs") @@ -83,7 +83,7 @@ def run_test(self): self.nodes[1].preciousblock(hashC) assert_equal(self.nodes[1].getbestblockhash(), hashC) self.log.info("Mine another block (E-F-G-)H on Node 0 and reorg Node 1") - self.nodes[0].generatetoaddress(1, gen_address(0)) + self.generatetoaddress(self.nodes[0], 1, gen_address(0)) assert_equal(self.nodes[0].getblockcount(), 6) self.sync_blocks(self.nodes[0:2]) hashH = self.nodes[0].getbestblockhash() @@ -92,7 +92,7 @@ def run_test(self): self.nodes[1].preciousblock(hashC) assert_equal(self.nodes[1].getbestblockhash(), hashH) self.log.info("Mine competing blocks I-J-K-L on Node 2") - self.nodes[2].generatetoaddress(4, gen_address(2)) + self.generatetoaddress(self.nodes[2], 4, gen_address(2)) assert_equal(self.nodes[2].getblockcount(), 6) hashL = self.nodes[2].getbestblockhash() self.log.info("Connect nodes and check no reorg occurs") diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py index e05ccad04ff72..dd6c97ea9f58c 100755 --- a/test/functional/rpc_psbt.py +++ b/test/functional/rpc_psbt.py @@ -83,7 +83,7 @@ def run_test(self): rawtx = self.nodes[0].fundrawtransaction(rawtx, {"changePosition":1}) signed_tx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])['hex'] txid = self.nodes[0].sendrawtransaction(signed_tx) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() # Find the output pos @@ -227,7 +227,7 @@ def run_test(self): node2_addr = self.nodes[2].getnewaddress() txid1 = self.nodes[0].sendtoaddress(node1_addr, 13) txid2 = self.nodes[0].sendtoaddress(node2_addr, 13) - blockhash = self.nodes[0].generate(6)[0] + blockhash = self.generate(self.nodes[0], 6)[0] self.sync_all() vout1 = find_output(self.nodes[1], txid1, 13, blockhash=blockhash) vout2 = find_output(self.nodes[2], txid2, 13, blockhash=blockhash) @@ -255,7 +255,7 @@ def run_test(self): combined = self.nodes[0].combinepsbt([psbt1, psbt2]) finalized = self.nodes[0].finalizepsbt(combined)['hex'] self.nodes[0].sendrawtransaction(finalized) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() # Make sure change address wallet does not have P2SH innerscript access to results in success @@ -394,7 +394,7 @@ def test_psbt_input_keys(psbt_input, keys): addr4 = self.nodes[1].getnewaddress() txid4 = self.nodes[0].sendtoaddress(addr4, 5) vout4 = find_output(self.nodes[0], txid4, 5) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() psbt2 = self.nodes[1].createpsbt([{"txid":txid4, "vout":vout4}], {self.nodes[0].getnewaddress():Decimal('4.999')}) psbt2 = self.nodes[1].walletprocesspsbt(psbt2)['psbt'] @@ -407,7 +407,7 @@ def test_psbt_input_keys(psbt_input, keys): # Newly created PSBT needs UTXOs and updating addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 7) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() vout = find_output(self.nodes[0], txid, 7) psbt = self.nodes[1].createpsbt([{"txid":txid, "vout":vout}], {self.nodes[0].getnewaddress():Decimal('6.999')}) diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py index a696a25101425..e059d9be95d35 100755 --- a/test/functional/rpc_rawtransaction.py +++ b/test/functional/rpc_rawtransaction.py @@ -71,15 +71,15 @@ def setup_network(self): def run_test(self): self.log.info('prepare some coins for multiple *rawtransaction commands') - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all() - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) self.sync_all() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) self.sync_all() - self.nodes[0].generate(5) + self.generate(self.nodes[0], 5) self.sync_all() self.log.info('Test getrawtransaction on genesis block coinbase returns an error') @@ -166,7 +166,7 @@ def run_test(self): # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) - block1, block2 = self.nodes[2].generate(2) + block1, block2 = self.generate(self.nodes[2], 2) self.sync_all() # We should be able to get the raw transaction by providing the correct block gottx = self.nodes[0].getrawtransaction(tx, True, block1) @@ -217,7 +217,7 @@ def run_test(self): # send 1.2 BTC to msig adr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance @@ -238,7 +238,7 @@ def run_test(self): decTx = self.nodes[0].gettransaction(txId) rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() #THIS IS AN INCOMPLETE FEATURE @@ -261,7 +261,7 @@ def run_test(self): self.nodes[2].sendrawtransaction(rawTxSigned['hex']) rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('500.00000000')+Decimal('2.19000000')) #block reward + tx @@ -281,7 +281,7 @@ def run_test(self): decTx = self.nodes[0].gettransaction(txId) rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable @@ -306,7 +306,7 @@ def run_test(self): self.nodes[2].sendrawtransaction(rawTxComb) rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('500.00000000')+Decimal('2.19000000')) #block reward + tx @@ -314,13 +314,13 @@ def run_test(self): # Basic signrawtransaction test addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() vout = find_vout_for_address(self.nodes[1], txid, addr) rawTx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9.999}) rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx) txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex']) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # getrawtransaction tests @@ -449,7 +449,7 @@ def run_test(self): self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000') self.log.info('sendrawtransaction/testmempoolaccept with tx that is already in the chain') - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_blocks() for node in self.nodes: testres = node.testmempoolaccept([rawTxSigned['hex']])[0] diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py index 5d0fbe7c9600a..fdff1617e13c8 100755 --- a/test/functional/rpc_scantxoutset.py +++ b/test/functional/rpc_scantxoutset.py @@ -23,7 +23,7 @@ def skip_test_if_missing_module(self): def run_test(self): self.log.info("Mining blocks...") - self.nodes[0].generate(110) + self.generate(self.nodes[0], 110) addr1 = self.nodes[0].getnewaddress("") pubk1 = self.nodes[0].getaddressinfo(addr1)['pubkey'] @@ -50,14 +50,14 @@ def run_test(self): self.nodes[0].sendtoaddress("yVCdQxPXJ3SrtTLv8FuLXDNaynz6kmjPNq", 16.384) # (m/1/1/1500) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.log.info("Stop node, remove wallet, mine again some blocks...") self.stop_node(0) shutil.rmtree(os.path.join(self.nodes[0].datadir, self.chain, 'wallets')) self.start_node(0, ['-nowallet']) self.import_deterministic_coinbase_privkeys() - self.nodes[0].generate(110) + self.generate(self.nodes[0], 110) scan = self.nodes[0].scantxoutset("start", []) info = self.nodes[0].gettxoutsetinfo() diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py index 3e5831c2c1329..5fa902caf3f48 100755 --- a/test/functional/rpc_txoutproof.py +++ b/test/functional/rpc_txoutproof.py @@ -31,8 +31,8 @@ def set_test_params(self): def run_test(self): miniwallet = MiniWallet(self.nodes[0]) # Add enough mature utxos to the wallet, so that all txs spend confirmed coins - miniwallet.generate(5) - self.nodes[0].generate(COINBASE_MATURITY) + self.generate(miniwallet, 5) + self.generate(self.nodes[0], COINBASE_MATURITY) self.sync_all() chain_height = self.nodes[1].getblockcount() @@ -43,7 +43,7 @@ def run_test(self): # This will raise an exception because the transaction is not yet in a block assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1]) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) blockhash = self.nodes[0].getblockhash(chain_height + 1) self.sync_all() @@ -59,7 +59,7 @@ def run_test(self): txin_spent = miniwallet.get_utxo() # Get the change from txid2 tx3 = miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=txin_spent) txid3 = tx3['txid'] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() txid_spent = txin_spent["txid"] diff --git a/test/functional/rpc_verifychainlock.py b/test/functional/rpc_verifychainlock.py index 42d05597331df..34af1660f9649 100755 --- a/test/functional/rpc_verifychainlock.py +++ b/test/functional/rpc_verifychainlock.py @@ -31,7 +31,7 @@ def run_test(self): self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0) self.wait_for_sporks_same() self.mine_quorum() - self.wait_for_chainlocked_block(node0, node0.generate(1)[0]) + self.wait_for_chainlocked_block(node0, self.generate(node0, 1)[0]) chainlock = node0.getbestchainlock() block_hash = chainlock["blockhash"] height = chainlock["height"] @@ -47,8 +47,8 @@ def run_test(self): self.wait_for_chainlocked_block_all_nodes(block_hash) # Isolate node1, mine a block on node0 and wait for its ChainLock node1.setnetworkactive(False) - node0.generate(1) - self.wait_for_chainlocked_block(node0, node0.generate(1)[0]) + self.generate(node0, 1) + self.wait_for_chainlocked_block(node0, self.generate(node0, 1)[0]) chainlock = node0.getbestchainlock() assert chainlock != node1.getbestchainlock() block_hash = chainlock["blockhash"] @@ -61,7 +61,7 @@ def run_test(self): assert node0.verifychainlock(block_hash, chainlock_signature, height) assert node1.verifychainlock(block_hash, chainlock_signature, height) - node1.generate(1) + self.generate(node1, 1) height1 = node1.getblockcount() tx0 = node0.getblock(node0.getbestblockhash())['tx'][0] tx1 = node1.getblock(node1.getbestblockhash())['tx'][0] diff --git a/test/functional/rpc_verifyislock.py b/test/functional/rpc_verifyislock.py index 10309b6e584f0..81800d4b6cc8c 100755 --- a/test/functional/rpc_verifyislock.py +++ b/test/functional/rpc_verifyislock.py @@ -44,7 +44,7 @@ def run_test(self): self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103) self.bump_mocktime(1) - self.nodes[0].generate(8) + self.generate(self.nodes[0], 8) self.sync_blocks() txid = node.sendtoaddress(node.getnewaddress(), 1) @@ -57,7 +57,7 @@ def run_test(self): assert node.verifyislock(request_id, txid, rec_sig) # Not mined, should use maxHeight assert not node.verifyislock(request_id, txid, rec_sig, 1) - node.generate(1) + self.generate(node, 1) assert txid not in node.getrawmempool() # Mined but at higher height, should use maxHeight assert not node.verifyislock(request_id, txid, rec_sig, 1) diff --git a/test/functional/rpc_wipewallettxes.py b/test/functional/rpc_wipewallettxes.py index ff1d252d43710..e45f57df514f8 100755 --- a/test/functional/rpc_wipewallettxes.py +++ b/test/functional/rpc_wipewallettxes.py @@ -18,9 +18,9 @@ def skip_test_if_missing_module(self): def run_test(self): self.log.info("Test that wipewallettxes removes txes and rescanblockchain is able to recover them") - self.nodes[0].generate(101) + self.generate(self.nodes[0], 101) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) assert_equal(self.nodes[0].getwalletinfo()["txcount"], 103) self.nodes[0].wipewallettxes() assert_equal(self.nodes[0].getwalletinfo()["txcount"], 0) diff --git a/test/functional/test-shell.md b/test/functional/test-shell.md index b8e899d6758f7..78737509cb749 100644 --- a/test/functional/test-shell.md +++ b/test/functional/test-shell.md @@ -94,7 +94,7 @@ rewards to a wallet address owned by the mining node. ``` >>> address = test.nodes[0].getnewaddress() ->>> test.nodes[0].generatetoaddress(101, address) +>>> test.self.generatetoaddress(nodes[0], 101, address) ['2b98dd0044aae6f1cca7f88a0acf366a4bfe053c7f7b00da3c0d115f03d67efb', ... ``` Since the two nodes are both initialized by default to establish an outbound diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py index 1e1b773c44340..153193930f8e0 100755 --- a/test/functional/tool_wallet.py +++ b/test/functional/tool_wallet.py @@ -242,7 +242,7 @@ def test_tool_wallet_info_after_transaction(self): """ self.start_node(0) self.log.info('Generating transaction to mutate wallet') - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.stop_node(0) self.log.info('Calling wallet tool info after generating a transaction, testing output') diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py index e41b0b26383c7..8f54e505983cf 100755 --- a/test/functional/wallet_abandonconflict.py +++ b/test/functional/wallet_abandonconflict.py @@ -29,14 +29,14 @@ def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): - self.nodes[1].generate(COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY) self.sync_blocks() balance = self.nodes[0].getbalance() txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) self.sync_mempools() - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) # Can not abandon non-wallet transaction assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32)) @@ -167,7 +167,7 @@ def run_test(self): tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.connect_nodes(0, 1) self.sync_blocks() diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py index 39b243f62b426..3ca8075e94a4e 100755 --- a/test/functional/wallet_avoidreuse.py +++ b/test/functional/wallet_avoidreuse.py @@ -79,7 +79,7 @@ def run_test(self): self.test_persistence() self.test_immutable() - self.nodes[0].generate(110) + self.generate(self.nodes[0], 110) self.sync_all() self.test_change_remains_change(self.nodes[1]) reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) @@ -171,7 +171,7 @@ def test_sending_from_reused_address_without_avoid_reuse(self): retaddr = self.nodes[0].getnewaddress() self.nodes[0].sendtoaddress(fundaddr, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # listunspent should show 1 single, unused 10 btc output @@ -182,7 +182,7 @@ def test_sending_from_reused_address_without_avoid_reuse(self): assert("used" not in self.nodes[0].getbalances()["mine"]) self.nodes[1].sendtoaddress(retaddr, 5) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # listunspent should show 1 single, unused 5 btc output @@ -191,7 +191,7 @@ def test_sending_from_reused_address_without_avoid_reuse(self): assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5}) self.nodes[0].sendtoaddress(fundaddr, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10) @@ -225,7 +225,7 @@ def test_sending_from_reused_address_fails(self): retaddr = self.nodes[0].getnewaddress() self.nodes[0].sendtoaddress(fundaddr, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # listunspent should show 1 single, unused 10 btc output @@ -234,7 +234,7 @@ def test_sending_from_reused_address_fails(self): assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10}) self.nodes[1].sendtoaddress(retaddr, 5) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # listunspent should show 1 single, unused 5 btc output @@ -244,7 +244,7 @@ def test_sending_from_reused_address_fails(self): if not self.options.descriptors: self.nodes[0].sendtoaddress(fundaddr, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10) @@ -287,7 +287,7 @@ def test_getbalances_used(self): for _ in range(101): self.nodes[0].sendtoaddress(new_addr, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # send transaction that should not use all the available outputs @@ -319,7 +319,7 @@ def test_full_destination_group_is_preferred(self): for _ in range(101): self.nodes[0].sendtoaddress(new_addr, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Sending a transaction that is smaller than each one of the @@ -348,7 +348,7 @@ def test_all_destination_groups_are_used(self): for _ in range(202): self.nodes[0].sendtoaddress(new_addr, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Sending a transaction that needs to use the full groups diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py index cdb772b9ca209..310b56de95a08 100755 --- a/test/functional/wallet_backup.py +++ b/test/functional/wallet_backup.py @@ -84,7 +84,7 @@ def do_one_round(self): # Have the miner (node3) mine a block. # Must sync mempools before mining. self.sync_mempools() - self.nodes[3].generate(1) + self.generate(self.nodes[3], 1) self.sync_blocks() # As above, this mirrors the original bash test. @@ -141,13 +141,13 @@ def init_three(self): def run_test(self): self.log.info("Generating initial blockchain") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_blocks() - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_blocks() - self.nodes[3].generate(COINBASE_MATURITY) + self.generate(self.nodes[3], COINBASE_MATURITY) self.sync_blocks() assert_equal(self.nodes[0].getbalance(), 500) @@ -176,7 +176,7 @@ def run_test(self): self.do_one_round() # Generate 101 more blocks, so any fees paid mature - self.nodes[3].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[3], COINBASE_MATURITY + 1) self.sync_all() balance0 = self.nodes[0].getbalance() diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py index e1976568fb138..2a6c4ba41ab18 100755 --- a/test/functional/wallet_balance.py +++ b/test/functional/wallet_balance.py @@ -70,10 +70,10 @@ def run_test(self): assert 'watchonly' not in self.nodes[1].getbalances() self.log.info("Mining blocks ...") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() - self.nodes[1].generate(1) - self.nodes[1].generatetoaddress(COINBASE_MATURITY + 1, ADDRESS_WATCHONLY) + self.generate(self.nodes[1], 1) + self.generatetoaddress(self.nodes[1], COINBASE_MATURITY + 1, ADDRESS_WATCHONLY) self.sync_all() if not self.options.descriptors: @@ -200,7 +200,7 @@ def test_balances(*, fee_node_1=0): self.log.info("Test getbalance and getbalances.mine.untrusted_pending with conflicted unconfirmed inputs") # test_balances(fee_node_1=Decimal('0.02')) # disabled, no RBF in Dash - self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY) + self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY) self.sync_all() # balances are correct after the transactions are confirmed @@ -214,7 +214,7 @@ def test_balances(*, fee_node_1=0): # Send total balance away from node 1 txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(), Decimal('29.98'), [Decimal('0.01')]) self.nodes[1].sendrawtransaction(txs[0]['hex']) - self.nodes[1].generatetoaddress(2, ADDRESS_WATCHONLY) + self.generatetoaddress(self.nodes[1], 2, ADDRESS_WATCHONLY) self.sync_all() # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago @@ -261,7 +261,7 @@ def test_balances(*, fee_node_1=0): self.nodes[1].sendrawtransaction(hexstring=tx_replace, maxfeerate=0) # Now confirm tx_replace - block_reorg = self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY)[0] + block_reorg = self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY)[0] self.sync_all() assert_equal(self.nodes[0].getbalance(minconf=0), total_amount) @@ -269,7 +269,7 @@ def test_balances(*, fee_node_1=0): self.nodes[0].invalidateblock(block_reorg) self.nodes[1].invalidateblock(block_reorg) assert_equal(self.nodes[0].getbalance(minconf=0), 0) # wallet txs not in the mempool are untrusted - self.nodes[0].generatetoaddress(1, ADDRESS_WATCHONLY) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_WATCHONLY) assert_equal(self.nodes[0].getbalance(minconf=0), 0) # wallet txs not in the mempool are untrusted # Now confirm tx_orig @@ -278,7 +278,7 @@ def test_balances(*, fee_node_1=0): self.connect_nodes(1, 0) self.sync_blocks() self.nodes[1].sendrawtransaction(tx_orig) - self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY) + self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY) self.sync_all() assert_equal(self.nodes[0].getbalance(minconf=0), total_amount + 1) # The reorg recovered our fee of 1 coin diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py index f9d82ed4a86ee..3dac8aacf3b60 100755 --- a/test/functional/wallet_basic.py +++ b/test/functional/wallet_basic.py @@ -66,14 +66,14 @@ def run_test(self): self.log.info("Mining blocks...") - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 500) assert_equal(walletinfo['balance'], 0) self.sync_all(self.nodes[0:3]) - self.nodes[1].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[1], COINBASE_MATURITY + 1) self.sync_all(self.nodes[0:3]) assert_equal(self.nodes[0].getbalance(), 500) @@ -124,7 +124,7 @@ def run_test(self): assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all(self.nodes[0:3]) # Exercise locking of unspent outputs @@ -168,7 +168,7 @@ def run_test(self): assert_equal(len(self.nodes[1].listlockunspent()), 0) # Have node1 generate 100 blocks (so node0 can recover the fee) - self.nodes[1].generate(COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY) self.sync_all(self.nodes[0:3]) # node0 should end up with 1000 DASH in block rewards plus fees, but @@ -200,7 +200,7 @@ def run_test(self): self.nodes[1].sendrawtransaction(hexstring=txns_to_send[1]["hex"], maxfeerate=0) # Have node1 mine a block to confirm transactions: - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all(self.nodes[0:3]) assert_equal(self.nodes[0].getbalance(), 0) @@ -215,14 +215,14 @@ def run_test(self): fee_per_byte = Decimal('0.00001') / 1000 self.nodes[2].settxfee(fee_per_byte * 1000) txid = self.nodes[2].sendtoaddress(address, 100, "", "", False) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all(self.nodes[0:3]) node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('900') - totalfee, fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), Decimal('100')) # Send 100 DASH with subtract fee from amount txid = self.nodes[2].sendtoaddress(address, 100, "", "", True) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all(self.nodes[0:3]) node_2_bal -= Decimal('100') assert_equal(self.nodes[2].getbalance(), node_2_bal) @@ -232,7 +232,7 @@ def run_test(self): # Sendmany 100 DASH txid = self.nodes[2].sendmany('', {address: 100}, 0, False, "", []) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all(self.nodes[0:3]) node_0_bal += Decimal('100') node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('100'), fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex'])) @@ -240,7 +240,7 @@ def run_test(self): # Sendmany 100 DASH with subtract fee from amount txid = self.nodes[2].sendmany('', {address: 100}, 0, False, "", [address]) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all(self.nodes[0:3]) node_2_bal -= Decimal('100') assert_equal(self.nodes[2].getbalance(), node_2_bal) @@ -253,7 +253,7 @@ def run_test(self): # Test passing fee_rate as a string txid = self.nodes[2].sendmany(amounts={address: 10}, fee_rate=str(fee_rate_sat_vb)) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all(self.nodes[0:3]) balance = self.nodes[2].getbalance() node_2_bal = self.check_fee_amount(balance, node_2_bal - Decimal('10'), explicit_fee_rate_btc_kvb, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) @@ -264,7 +264,7 @@ def run_test(self): # Test passing fee_rate as an integer amount = Decimal("0.0001") txid = self.nodes[2].sendmany(amounts={address: amount}, fee_rate=fee_rate_sat_vb) - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) self.sync_all(self.nodes[0:3]) balance = self.nodes[2].getbalance() node_2_bal = self.check_fee_amount(balance, node_2_bal - amount, explicit_fee_rate_btc_kvb, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) @@ -326,7 +326,7 @@ def run_test(self): self.nodes[1].sendrawtransaction(signed_raw_tx['hex']) self.sync_all() - self.nodes[1].generate(1) # mine a block + self.generate(self.nodes[1], 1) # mine a block self.sync_all() unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output @@ -349,13 +349,13 @@ def run_test(self): txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) - self.nodes[1].generate(1) # mine a block, tx should not be in there + self.generate(self.nodes[1], 1) # mine a block, tx should not be in there self.sync_all(self.nodes[0:3]) assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted # now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex']) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all(self.nodes[0:3]) node_2_bal += 2 tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) @@ -374,7 +374,7 @@ def run_test(self): self.connect_nodes(0, 2) self.sync_blocks(self.nodes[0:3]) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:3]) node_2_bal += 2 @@ -403,7 +403,7 @@ def run_test(self): assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4") # This will raise an exception since generate does not accept a string - assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2") + assert_raises_rpc_error(-1, "not an integer", self.generate, self.nodes[0], "2") if not self.options.descriptors: @@ -436,7 +436,7 @@ def run_test(self): # 1. Send some coins to generate new UTXO address_to_import = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(address_to_import, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all(self.nodes[0:3]) self.log.info("Test sendtoaddress with fee_rate param (explicit fee rate in duff/B)") @@ -449,7 +449,7 @@ def run_test(self): # Test passing fee_rate as an integer txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=fee_rate_sat_vb) tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all(self.nodes[0:3]) postbalance = self.nodes[2].getbalance() fee = prebalance - postbalance - Decimal(amount) @@ -462,7 +462,7 @@ def run_test(self): # Test passing fee_rate as a string txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=str(fee_rate_sat_vb)) tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all(self.nodes[0:3]) postbalance = self.nodes[2].getbalance() fee = prebalance - postbalance - amount @@ -526,7 +526,7 @@ def run_test(self): # Mine a block from node0 to an address from node1 coinbase_addr = self.nodes[1].getnewaddress() - block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0] + block_hash = self.generatetoaddress(self.nodes[0], 1, coinbase_addr)[0] coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0] self.sync_all(self.nodes[0:3]) @@ -535,7 +535,7 @@ def run_test(self): # check if wallet or blockchain maintenance changes the balance self.sync_all(self.nodes[0:3]) - blocks = self.nodes[0].generate(2) + blocks = self.generate(self.nodes[0], 2) self.sync_all(self.nodes[0:3]) balance_nodes = [self.nodes[i].getbalance() for i in range(3)] block_count = self.nodes[0].getblockcount() @@ -583,13 +583,13 @@ def run_test(self): # Get all non-zero utxos together chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()] singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) node0_balance = self.nodes[0].getbalance() # Split into two chains rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance // 2 - Decimal('0.01'), chain_addrs[1]: node0_balance // 2 - Decimal('0.01')}) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) singletxid = self.nodes[0].sendrawtransaction(hexstring=signedtx["hex"], maxfeerate=0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Make a long chain of unconfirmed payments without hitting mempool limit # Each tx we make leaves only one output of change on a chain 1 longer @@ -640,7 +640,7 @@ def run_test(self): assert not address_info["ischange"] # Test getaddressinfo 'ischange' field on change address. - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) destination = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(destination, 0.123) tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex']) diff --git a/test/functional/wallet_change_address.py b/test/functional/wallet_change_address.py index c396de8932dca..a9a4a8696de00 100755 --- a/test/functional/wallet_change_address.py +++ b/test/functional/wallet_change_address.py @@ -48,7 +48,7 @@ def assert_change_pos(self, wallet, tx, pos): def run_test(self): self.log.info("Setting up") # Mine some coins - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) # Get some addresses from the two nodes addr1 = [self.nodes[1].getnewaddress() for _ in range(3)] @@ -58,7 +58,7 @@ def run_test(self): # Send 1 + 0.5 coin to each address [self.nodes[0].sendtoaddress(addr, 10) for addr in addrs] [self.nodes[0].sendtoaddress(addr, 5) for addr in addrs] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) for i in range(20): for n in [1, 2]: diff --git a/test/functional/wallet_coinbase_category.py b/test/functional/wallet_coinbase_category.py index 92c99e4ea26f4..ad66ee8d17b2c 100755 --- a/test/functional/wallet_coinbase_category.py +++ b/test/functional/wallet_coinbase_category.py @@ -34,7 +34,7 @@ def assert_category(self, category, address, txid, skip): def run_test(self): # Generate one block to an address address = self.nodes[0].getnewaddress() - self.nodes[0].generatetoaddress(1, address) + self.generatetoaddress(self.nodes[0], 1, address) hash = self.nodes[0].getbestblockhash() txid = self.nodes[0].getblock(hash)["tx"][0] @@ -42,12 +42,12 @@ def run_test(self): self.assert_category("immature", address, txid, 0) # Mine another 99 blocks on top - self.nodes[0].generate(99) + self.generate(self.nodes[0], 99) # Coinbase transaction is still immature after 100 confirmations self.assert_category("immature", address, txid, 99) # Mine one more block - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Coinbase transaction is now matured, so category is "generate" self.assert_category("generate", address, txid, 100) diff --git a/test/functional/wallet_create_tx.py b/test/functional/wallet_create_tx.py index 730e3140c05df..bd880bb40126a 100755 --- a/test/functional/wallet_create_tx.py +++ b/test/functional/wallet_create_tx.py @@ -30,7 +30,7 @@ def test_anti_fee_sniping(self): assert_equal(tx['locktime'], 0) self.log.info('Check that anti-fee-sniping is enabled when we mine a recent block') - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex']) assert 0 < tx['locktime'] <= 201 diff --git a/test/functional/wallet_createwallet.py b/test/functional/wallet_createwallet.py index d310b2d8a7530..8f50d180e4388 100755 --- a/test/functional/wallet_createwallet.py +++ b/test/functional/wallet_createwallet.py @@ -24,7 +24,7 @@ def skip_test_if_missing_module(self): def run_test(self): node = self.nodes[0] - node.generate(1) # Leave IBD for sethdseed + self.generate(node, 1) # Leave IBD for sethdseed self.nodes[0].createwallet(wallet_name='w0') w0 = node.get_wallet_rpc('w0') diff --git a/test/functional/wallet_disable.py b/test/functional/wallet_disable.py index 3ebc5dd0b6c7d..de8cd0af0b242 100755 --- a/test/functional/wallet_disable.py +++ b/test/functional/wallet_disable.py @@ -28,8 +28,8 @@ def run_test (self): # Checking mining to an address without a wallet. Generating to a valid address should succeed # but generating to an invalid address will fail. - self.nodes[0].generatetoaddress(1, 'ycwedq2f3sz2Yf9JqZsBCQPxp18WU3Hp4J') - assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '7TSBtVu959hGEGPKyHjJz9k55RpWrPffXz') + self.generatetoaddress(self.nodes[0], 1, 'ycwedq2f3sz2Yf9JqZsBCQPxp18WU3Hp4J') + assert_raises_rpc_error(-5, "Invalid address", self.generatetoaddress, self.nodes[0], 1, '7TSBtVu959hGEGPKyHjJz9k55RpWrPffXz') if __name__ == '__main__': DisableWalletTest ().main () diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py index fe585b1f9ba00..947b8f1cf83d1 100755 --- a/test/functional/wallet_dump.py +++ b/test/functional/wallet_dump.py @@ -120,7 +120,7 @@ def run_test(self): self.log.info('Mine a block one second before the wallet is dumped') dump_time = int(time.time()) self.nodes[0].setmocktime(dump_time - 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.nodes[0].setmocktime(dump_time) dump_time_str = '# * Created on {}Z'.format( datetime.datetime.fromtimestamp( diff --git a/test/functional/wallet_fallbackfee.py b/test/functional/wallet_fallbackfee.py index b28f3ecebc4dc..674c37dc73c53 100755 --- a/test/functional/wallet_fallbackfee.py +++ b/test/functional/wallet_fallbackfee.py @@ -17,7 +17,7 @@ def skip_test_if_missing_module(self): self.skip_if_no_wallet() def run_test(self): - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) # sending a transaction without fee estimations must be possible by default on regtest self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py index d30a7c0a58156..c01f52de17210 100755 --- a/test/functional/wallet_groups.py +++ b/test/functional/wallet_groups.py @@ -34,7 +34,7 @@ def skip_test_if_missing_module(self): def run_test(self): self.log.info("Setting up") # Mine some coins - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) # Get some addresses from the two nodes addr1 = [self.nodes[1].getnewaddress() for _ in range(3)] @@ -45,7 +45,7 @@ def run_test(self): [self.nodes[0].sendtoaddress(addr, 1.0) for addr in addrs] [self.nodes[0].sendtoaddress(addr, 0.5) for addr in addrs] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # For each node, send 0.2 coins back to 0; @@ -77,7 +77,7 @@ def run_test(self): self.log.info("Test avoiding partial spends if warranted, even if avoidpartialspends is disabled") self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Nodes 1-2 now have confirmed UTXOs (letters denote destinations): # Node #1: Node #2: # - A 1.0 - D0 1.0 @@ -113,7 +113,7 @@ def run_test(self): addr_aps = self.nodes[3].getnewaddress() self.nodes[0].sendtoaddress(addr_aps, 1.0) self.nodes[0].sendtoaddress(addr_aps, 1.0) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() with self.nodes[3].assert_debug_log(['Fee non-grouped = 225, grouped = 372, using grouped']): txid4 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 0.1) @@ -125,7 +125,7 @@ def run_test(self): addr_aps2 = self.nodes[3].getnewaddress() [self.nodes[0].sendtoaddress(addr_aps2, 1.0) for _ in range(5)] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() with self.nodes[3].assert_debug_log(['Fee non-grouped = 519, grouped = 813, using non-grouped']): txid5 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 2.95) @@ -139,7 +139,7 @@ def run_test(self): self.log.info("Test wallet option maxapsfee threshold from non-grouped to grouped") addr_aps3 = self.nodes[4].getnewaddress() [self.nodes[0].sendtoaddress(addr_aps3, 1.0) for _ in range(5)] - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() with self.nodes[4].assert_debug_log(['Fee non-grouped = 519, grouped = 813, using grouped']): txid6 = self.nodes[4].sendtoaddress(self.nodes[0].getnewaddress(), 2.95) @@ -151,7 +151,7 @@ def run_test(self): # Empty out node2's wallet self.nodes[2].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=self.nodes[2].getbalance(), subtractfeefromamount=True) self.sync_all() - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.log.info("Fill a wallet with 10,000 outputs corresponding to the same scriptPubKey") for _ in range(5): @@ -162,7 +162,7 @@ def run_test(self): funded_tx = self.nodes[0].fundrawtransaction(tx.serialize().hex()) signed_tx = self.nodes[0].signrawtransactionwithwallet(funded_tx['hex']) self.nodes[0].sendrawtransaction(signed_tx['hex']) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Check that we can create a transaction that only requires ~100 of our diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py index a9b7e052e878f..7f191aa4d3fdc 100755 --- a/test/functional/wallet_hd.py +++ b/test/functional/wallet_hd.py @@ -55,7 +55,7 @@ def run_test(self): # Derive some HD addresses and remember the last # Also send funds to each add - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) hd_add = None NUM_HD_ADDS = 10 for i in range(1, NUM_HD_ADDS + 1): @@ -64,9 +64,9 @@ def run_test(self): assert_equal(hd_info["hdkeypath"], "m/44'/1'/0'/0/" + str(i)) assert_equal(hd_info["hdmasterfingerprint"], hd_fingerprint) self.nodes[0].sendtoaddress(hd_add, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.nodes[0].sendtoaddress(non_hd_add, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # create an internal key (again) change_addr = self.nodes[1].getrawchangeaddress() @@ -186,7 +186,7 @@ def run_test(self): assert_raises_rpc_error(-5, "Already have this key", wallet_no_seed.sethdseed, False, non_hd_key) self.log.info('Test sethdseed restoring with keys outside of the initial keypool') - self.nodes[0].generate(10) + self.generate(self.nodes[0], 10) # Restart node 1 with keypool of 3 and a different wallet self.nodes[1].createwallet(wallet_name='origin', blank=True) self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin']) @@ -233,7 +233,7 @@ def run_test(self): # The wallet that has set a new seed (restore_rpc) should not detect this transaction. txid = self.nodes[0].sendtoaddress(addr, 1) origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex']) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() origin_rpc.gettransaction(txid) assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, txid) @@ -244,7 +244,7 @@ def run_test(self): # The previous transaction (out_of_kp_txid) should still not be detected as a rescan is required. txid = self.nodes[0].sendtoaddress(last_addr, 1) origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex']) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() origin_rpc.gettransaction(txid) restore_rpc.gettransaction(txid) diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py index 4b1969afb2bef..1a47f63263189 100755 --- a/test/functional/wallet_import_rescan.py +++ b/test/functional/wallet_import_rescan.py @@ -166,7 +166,7 @@ def run_test(self): variant.key = self.nodes[1].dumpprivkey(variant.address["address"]) variant.initial_amount = get_rand_amount() variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount) - self.nodes[0].generate(1) # Generate one block for each send + self.generate(self.nodes[0], 1) # Generate one block for each send variant.confirmation_height = self.nodes[0].getblockcount() variant.timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] self.sync_all() # Conclude sync before calling setmocktime to avoid timeouts @@ -177,7 +177,7 @@ def run_test(self): self.nodes, self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] + TIMESTAMP_WINDOW + 1, ) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # For each variation of wallet key import, invoke the import RPC and @@ -200,7 +200,7 @@ def run_test(self): for i, variant in enumerate(IMPORT_VARIANTS): variant.sent_amount = get_rand_amount() variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount) - self.nodes[0].generate(1) # Generate one block for each send + self.generate(self.nodes[0], 1) # Generate one block for each send variant.confirmation_height = self.nodes[0].getblockcount() assert_equal(self.nodes[0].getrawmempool(), []) diff --git a/test/functional/wallet_importdescriptors.py b/test/functional/wallet_importdescriptors.py index 246d7c6b3eb34..ea5ba770484ad 100755 --- a/test/functional/wallet_importdescriptors.py +++ b/test/functional/wallet_importdescriptors.py @@ -426,10 +426,10 @@ def run_test(self): assert_equal(change_addr, '91WxMwg2NHD1PwHChhbAkeCN6nQ8ikdLEx') assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000) txid = w0.sendtoaddress(addr, 10) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() wmulti_priv.sendtoaddress(w0.getnewaddress(), 8) # uses change 1 - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() self.nodes[1].createwallet(wallet_name="wmulti_pub", disable_private_keys=True, blank=True, descriptors=True) @@ -467,7 +467,7 @@ def run_test(self): txid2 = w0.sendtoaddress(addr2, 10) vout2 = find_vout_for_address(self.nodes[0], txid2, addr2) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance()) @@ -558,7 +558,7 @@ def run_test(self): addr = multi_priv_big.getnewaddress("") w0.sendtoaddress(addr, 10) - self.nodes[0].generate(6) + self.generate(self.nodes[0], 6) self.sync_all() # It is standard and would relay. txid = multi_priv_big.sendtoaddress(w0.getnewaddress(), 10, "", "", diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py index 3de9ee36d449c..47b19ecfab995 100755 --- a/test/functional/wallet_importmulti.py +++ b/test/functional/wallet_importmulti.py @@ -61,8 +61,8 @@ def test_importmulti(self, req, success, error_code=None, error_message=None, wa def run_test(self): self.log.info("Mining blocks...") - self.nodes[0].generate(1) - self.nodes[1].generate(1) + self.generate(self.nodes[0], 1) + self.generate(self.nodes[1], 1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) @@ -254,9 +254,9 @@ def run_test(self): # P2SH address multisig = get_multisig(self.nodes[0]) - self.nodes[1].generate(COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh") @@ -274,9 +274,9 @@ def run_test(self): # P2SH + Redeem script multisig = get_multisig(self.nodes[0]) - self.nodes[1].generate(COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script") @@ -294,9 +294,9 @@ def run_test(self): # P2SH + Redeem script + Private Keys + !Watchonly multisig = get_multisig(self.nodes[0]) - self.nodes[1].generate(COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script and private keys") @@ -319,9 +319,9 @@ def run_test(self): # P2SH + Redeem script + Private Keys + Watchonly multisig = get_multisig(self.nodes[0]) - self.nodes[1].generate(COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script and private keys") diff --git a/test/functional/wallet_importprunedfunds.py b/test/functional/wallet_importprunedfunds.py index 2cce6f88d220b..7ecfb408ebb04 100755 --- a/test/functional/wallet_importprunedfunds.py +++ b/test/functional/wallet_importprunedfunds.py @@ -25,7 +25,7 @@ def skip_test_if_missing_module(self): def run_test(self): self.log.info("Mining blocks...") - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) self.sync_all() @@ -64,17 +64,17 @@ def run_test(self): # Send funds to self txnid1 = self.nodes[0].sendtoaddress(address1, 0.1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex'] proof1 = self.nodes[0].gettxoutproof([txnid1]) txnid2 = self.nodes[0].sendtoaddress(address2, 0.05) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex'] proof2 = self.nodes[0].gettxoutproof([txnid2]) txnid3 = self.nodes[0].sendtoaddress(address3, 0.025) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex'] proof3 = self.nodes[0].gettxoutproof([txnid3]) diff --git a/test/functional/wallet_keypool_hd.py b/test/functional/wallet_keypool_hd.py index de3f01650dfc2..7ef2e16401038 100755 --- a/test/functional/wallet_keypool_hd.py +++ b/test/functional/wallet_keypool_hd.py @@ -179,7 +179,7 @@ def run_test(self): w1.walletpassphrase('test', 100) res = w1.sendtoaddress(address=address, amount=0.00010000) - nodes[0].generate(1) + self.generate(nodes[0], 1) destination = addr.pop() # Using a fee rate (10 sat / byte) well above the minimum relay rate diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py index 3eb496da21ffa..67f90b9ce1095 100755 --- a/test/functional/wallet_keypool_topup.py +++ b/test/functional/wallet_keypool_topup.py @@ -32,7 +32,7 @@ def skip_test_if_missing_module(self): def run_test(self): wallet_path = os.path.join(self.nodes[1].datadir, self.chain, "wallets", self.default_wallet_name, self.wallet_data_filename) wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak") - self.nodes[0].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[0], COINBASE_MATURITY + 1) self.log.info("Make backup of wallet") self.stop_node(1) @@ -58,9 +58,9 @@ def run_test(self): self.log.info("Send funds to wallet") self.nodes[0].sendtoaddress(addr_oldpool, 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.nodes[0].sendtoaddress(addr_extpool, 5) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() self.log.info("Restart node with wallet backup") diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py index f57a16d9c3392..0b8511af038df 100755 --- a/test/functional/wallet_labels.py +++ b/test/functional/wallet_labels.py @@ -32,8 +32,8 @@ def run_test(self): # Note each time we call generate, all generated coins go into # the same address, so we call twice to get two addresses w/500 each - node.generatetoaddress(nblocks=1, address=node.getnewaddress(label='coinbase')) - node.generatetoaddress(nblocks=COINBASE_MATURITY + 1, address=node.getnewaddress(label='coinbase')) + self.generatetoaddress(node, nblocks=1, address=node.getnewaddress(label='coinbase')) + self.generatetoaddress(node, nblocks=COINBASE_MATURITY + 1, address=node.getnewaddress(label='coinbase')) assert_equal(node.getbalance(), 1000) # there should be 2 address groups @@ -67,7 +67,7 @@ def run_test(self): assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses) assert_equal([a[1] for a in address_groups[0]], [0, 0]) - node.generate(1) + self.generate(node, 1) # we want to reset so that the "" label has what's expected. # otherwise we're off by exactly the fee amount as that's mined @@ -97,7 +97,7 @@ def run_test(self): label.verify(node) # Check the amounts received. - node.generate(1) + self.generate(node, 1) for label in labels: assert_equal( node.getreceivedbyaddress(label.addresses[0]), amount_to_send) @@ -106,14 +106,14 @@ def run_test(self): for i, label in enumerate(labels): to_label = labels[(i + 1) % len(labels)] node.sendtoaddress(to_label.addresses[0], amount_to_send) - node.generate(1) + self.generate(node, 1) for label in labels: address = node.getnewaddress(label.name) label.add_receive_address(address) label.verify(node) assert_equal(node.getreceivedbylabel(label.name), 2) label.verify(node) - node.generate(COINBASE_MATURITY + 1) + self.generate(node, COINBASE_MATURITY + 1) # Check that setlabel can assign a label to a new unused address. for label in labels: @@ -133,7 +133,7 @@ def run_test(self): label.add_address(multisig_address) label.purpose[multisig_address] = "send" label.verify(node) - node.generate(COINBASE_MATURITY + 1) + self.generate(node, COINBASE_MATURITY + 1) # Check that setlabel can change the label of an address from a # different label. @@ -158,7 +158,7 @@ def run_test(self): for l in VALID: ad = VALID[l] wallet_watch_only.importaddress(label=l, rescan=False, address=ad) - node.generatetoaddress(1, ad) + self.generatetoaddress(node, 1, ad) assert_equal(wallet_watch_only.getaddressesbylabel(label=l), {ad: {'purpose': 'receive'}}) assert_equal(wallet_watch_only.getreceivedbylabel(label=l), 0) for l in INVALID: diff --git a/test/functional/wallet_listreceivedby.py b/test/functional/wallet_listreceivedby.py index 1b169832b9225..27950fb4cb1ea 100755 --- a/test/functional/wallet_listreceivedby.py +++ b/test/functional/wallet_listreceivedby.py @@ -24,7 +24,7 @@ def skip_test_if_missing_module(self): def run_test(self): # Generate block to get out of IBD - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() # save the number of coinbase reward addresses so far @@ -43,7 +43,7 @@ def run_test(self): {}, True) # Bury Tx under 10 block so it will be returned by listreceivedbyaddress - self.nodes[1].generate(10) + self.generate(self.nodes[1], 10) self.sync_all() assert_array_result(self.nodes[1].listreceivedbyaddress(), {"address": addr}, @@ -78,7 +78,7 @@ def run_test(self): assert_equal(len(res), 2 + num_cb_reward_addresses) # Right now 2 entries other_addr = self.nodes[1].getnewaddress() txid2 = self.nodes[0].sendtoaddress(other_addr, 0.1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_all() # Same test as above should still pass expected = {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 11, "txids": [txid, ]} @@ -115,7 +115,7 @@ def run_test(self): assert_equal(balance, Decimal("0.1")) # Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress - self.nodes[1].generate(10) + self.generate(self.nodes[1], 10) self.sync_all() balance = self.nodes[1].getreceivedbyaddress(addr) assert_equal(balance, Decimal("0.1")) @@ -144,7 +144,7 @@ def run_test(self): balance = self.nodes[1].getreceivedbylabel(label) assert_equal(balance, balance_by_label) - self.nodes[1].generate(10) + self.generate(self.nodes[1], 10) self.sync_all() # listreceivedbylabel should return updated received list assert_array_result(self.nodes[1].listreceivedbylabel(), diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py index 24c9f9eef56fd..71d738b2400c2 100755 --- a/test/functional/wallet_listsinceblock.py +++ b/test/functional/wallet_listsinceblock.py @@ -30,7 +30,7 @@ def run_test(self): # All nodes are in IBD from genesis, so they'll need the miner (node2) to be an outbound connection, or have # only one connection. (See fPreferredDownload in net_processing) self.connect_nodes(1, 2) - self.nodes[2].generate(COINBASE_MATURITY + 1) + self.generate(self.nodes[2], COINBASE_MATURITY + 1) self.sync_all() self.test_no_blockhash() @@ -44,7 +44,7 @@ def run_test(self): def test_no_blockhash(self): self.log.info("Test no blockhash") txid = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) - blockhash, = self.nodes[2].generate(1) + blockhash, = self.generate(self.nodes[2], 1) blockheight = self.nodes[2].getblockheader(blockhash)['height'] self.sync_all() @@ -86,7 +86,7 @@ def test_targetconfirmations(self): a -8 invalid parameter error is thrown. ''' self.log.info("Test target_confirmations") - blockhash, = self.nodes[2].generate(1) + blockhash, = self.generate(self.nodes[2], 1) blockheight = self.nodes[2].getblockheader(blockhash)['height'] self.sync_all() @@ -136,8 +136,8 @@ def test_reorg(self): senttx = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) # generate on both sides - nodes1_last_blockhash = self.nodes[1].generate(6)[-1] - nodes2_first_blockhash = self.nodes[2].generate(7)[0] + nodes1_last_blockhash = self.generate(self.nodes[1], 6)[-1] + nodes2_first_blockhash = self.generate(self.nodes[2], 7)[0] self.log.debug("nodes[1] last blockhash = {}".format(nodes1_last_blockhash)) self.log.debug("nodes[2] first blockhash = {}".format(nodes2_first_blockhash)) @@ -191,7 +191,7 @@ def test_double_spend(self): privkey = bytes_to_wif(eckey.get_bytes()) address = key_to_p2pkh(eckey.get_pubkey().get_bytes()) self.nodes[2].sendtoaddress(address, 10) - self.nodes[2].generate(6) + self.generate(self.nodes[2], 6) self.sync_all() self.nodes[2].importprivkey(privkey) utxos = self.nodes[2].listunspent() @@ -225,8 +225,8 @@ def test_double_spend(self): self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict2))['hex']) # generate on both sides - lastblockhash = self.nodes[1].generate(3)[2] - self.nodes[2].generate(4) + lastblockhash = self.generate(self.nodes[1], 3)[2] + self.generate(self.nodes[2], 4) self.join_network() @@ -297,7 +297,7 @@ def test_double_send(self): txid1 = self.nodes[1].sendrawtransaction(signedtx) # generate bb1-bb2 on right side - self.nodes[2].generate(2) + self.generate(self.nodes[2], 2) # send from nodes[2]; this will end up in bb3 txid2 = self.nodes[2].sendrawtransaction(signedtx) @@ -305,8 +305,8 @@ def test_double_send(self): assert_equal(txid1, txid2) # generate on both sides - lastblockhash = self.nodes[1].generate(3)[2] - self.nodes[2].generate(2) + lastblockhash = self.generate(self.nodes[1], 3)[2] + self.generate(self.nodes[2], 2) self.join_network() @@ -360,7 +360,7 @@ def double_spends_filtered(self): double_signedtx = spending_node.signrawtransactionwithwallet(double_rawtx) dbl_tx_id = double_spending_node.sendrawtransaction(double_signedtx["hex"]) double_tx = double_spending_node.getrawtransaction(dbl_tx_id, 1) - lastblockhash = double_spending_node.generate(1)[0] + lastblockhash = self.generate(double_spending_node, 1)[0] self.reconnect_isolated_node(3, 2) self.sync_all() @@ -379,7 +379,7 @@ def double_spends_filtered(self): assert_equal(original_found, True) assert_equal(double_found, True) - lastblockhash = spending_node.generate(1)[0] + lastblockhash = self.generate(spending_node, 1)[0] # check that neither transaction exists block_hash = spending_node.listsinceblock(lastblockhash) diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py index dc0106a4831fa..a8a0a70170b48 100755 --- a/test/functional/wallet_listtransactions.py +++ b/test/functional/wallet_listtransactions.py @@ -32,7 +32,7 @@ def run_test(self): {"txid": txid}, {"category": "receive", "amount": Decimal("0.1"), "confirmations": 0}) self.log.info("Test confirmations change after mining a block") - blockhash = self.nodes[0].generate(1)[0] + blockhash = self.generate(self.nodes[0], 1)[0] blockheight = self.nodes[0].getblockheader(blockhash)['height'] self.sync_all() assert_array_result(self.nodes[0].listtransactions(), @@ -90,7 +90,7 @@ def run_test(self): multisig = self.nodes[1].createmultisig(1, [pubkey]) self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True) txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.sync_all() assert_equal(len(self.nodes[0].listtransactions(label="watchonly", include_watchonly=True)), 1) assert_equal(len(self.nodes[0].listtransactions(dummy="watchonly", include_watchonly=True)), 1) diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py index e2b6650c60079..42ce63d36397e 100755 --- a/test/functional/wallet_multiwallet.py +++ b/test/functional/wallet_multiwallet.py @@ -183,7 +183,7 @@ def wallet_file(name): self.nodes[0].createwallet("w5") assert_equal(set(node.listwallets()), {"w4", "w5"}) w5 = wallet("w5") - node.generatetoaddress(nblocks=1, address=w5.getnewaddress()) + self.generatetoaddress(node, nblocks=1, address=w5.getnewaddress()) # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded os.rename(wallet_dir2, wallet_dir()) @@ -215,7 +215,7 @@ def wallet_file(name): wallet_bad = wallet("bad") # check wallet names and balances - node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress()) + self.generatetoaddress(node, nblocks=1, address=wallets[0].getnewaddress()) for wallet_name, wallet in zip(wallet_names, wallets): info = wallet.getwalletinfo() assert_equal(info['immature_balance'], 500 if wallet is wallets[0] else 0) @@ -228,7 +228,7 @@ def wallet_file(name): assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) w1, w2, w3, w4, *_ = wallets - node.generatetoaddress(nblocks=COINBASE_MATURITY + 1, address=w1.getnewaddress()) + self.generatetoaddress(node, nblocks=COINBASE_MATURITY + 1, address=w1.getnewaddress()) assert_equal(w1.getbalance(), 1000) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) @@ -237,7 +237,7 @@ def wallet_file(name): w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.sendtoaddress(w4.getnewaddress(), 3) - node.generatetoaddress(nblocks=1, address=w1.getnewaddress()) + self.generatetoaddress(node, nblocks=1, address=w1.getnewaddress()) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2) assert_equal(w4.getbalance(), 3) diff --git a/test/functional/wallet_orphanedreward.py b/test/functional/wallet_orphanedreward.py index 0c7fd58e1798b..3e4922022e472 100755 --- a/test/functional/wallet_orphanedreward.py +++ b/test/functional/wallet_orphanedreward.py @@ -19,19 +19,19 @@ def skip_test_if_missing_module(self): def run_test(self): # Generate some blocks and obtain some coins on node 0. We send # some balance to node 1, which will hold it as a single coin. - self.nodes[0].generate(150) + self.generate(self.nodes[0], 150) self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # Get a block reward with node 1 and remember the block so we can orphan # it later. self.sync_blocks() - blk = self.nodes[1].generate(1)[0] + blk = self.generate(self.nodes[1], 1)[0] self.sync_blocks() # Let the block reward mature and send coins including both # the existing balance and the block reward. - self.nodes[0].generate(150) + self.generate(self.nodes[0], 150) self.sync_blocks() assert_equal(self.nodes[1].getbalance(), Decimal("474.28571429")) txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 30) @@ -39,7 +39,7 @@ def run_test(self): # Orphan the block reward and make sure that the original coins # from the wallet can still be spent. self.nodes[0].invalidateblock(blk) - self.nodes[0].generate(152) + self.generate(self.nodes[0], 152) self.sync_blocks() # Without the following abandontransaction call, the coins are # not considered available yet. diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py index 9a5866a36112a..1f452f8337883 100755 --- a/test/functional/wallet_reorgsrestore.py +++ b/test/functional/wallet_reorgsrestore.py @@ -32,7 +32,7 @@ def skip_test_if_missing_module(self): def run_test(self): # Send a tx from which to conflict outputs later txid_conflict_from = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() # Disconnect node1 from others to reorg its chain later @@ -43,7 +43,7 @@ def run_test(self): # Send a tx to be unconfirmed later txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) tx = self.nodes[0].gettransaction(txid) - self.nodes[0].generate(4) + self.generate(self.nodes[0], 4) tx_before_reorg = self.nodes[0].gettransaction(txid) assert_equal(tx_before_reorg["confirmations"], 4) @@ -62,9 +62,9 @@ def run_test(self): conflicting = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs_2)) conflicted_txid = self.nodes[0].sendrawtransaction(conflicted["hex"]) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) conflicting_txid = self.nodes[2].sendrawtransaction(conflicting["hex"]) - self.nodes[2].generate(9) + self.generate(self.nodes[2], 9) # Reconnect node0 and node2 and check that conflicted_txid is effectively conflicted self.connect_nodes(0, 2) @@ -78,11 +78,11 @@ def run_test(self): self.restart_node(0) # The block chain re-orgs and the tx is included in a different block - self.nodes[1].generate(9) + self.generate(self.nodes[1], 9) self.nodes[1].sendrawtransaction(tx["hex"]) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) self.nodes[1].sendrawtransaction(conflicted["hex"]) - self.nodes[1].generate(1) + self.generate(self.nodes[1], 1) # Node0 wallet file is loaded on longest sync'ed node1 self.stop_node(1) diff --git a/test/functional/wallet_send.py b/test/functional/wallet_send.py index 81292298cdca6..1fcd9ba725471 100755 --- a/test/functional/wallet_send.py +++ b/test/functional/wallet_send.py @@ -236,7 +236,7 @@ def run_test(self): assert_equal(res, [{"success": True}, {"success": True}]) w0.sendtoaddress(a2_receive, 10) # fund w3 - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() if not self.options.descriptors: @@ -255,7 +255,7 @@ def run_test(self): assert_equal(res, [{"success": True}]) w0.sendtoaddress(a2_receive, 10) # fund w4 - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks() self.log.info("Send to address...") @@ -430,14 +430,14 @@ def run_test(self): assert not res[0]["allowed"] assert_equal(res[0]["reject-reason"], "non-final") # It shouldn't be confirmed in the next block - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 0) # The mempool should allow it now: res = self.nodes[0].testmempoolaccept([hex]) assert res[0]["allowed"] # Don't wait for wallet to add it to the mempool: res = self.nodes[0].sendrawtransaction(hex) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 1) self.sync_all() diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py index 820f73513cac0..36dd258507283 100755 --- a/test/functional/wallet_txn_clone.py +++ b/test/functional/wallet_txn_clone.py @@ -77,7 +77,7 @@ def run_test(self): # Have node0 mine a block, if requested: if (self.options.mine_block): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:2]) tx1 = self.nodes[0].gettransaction(txid1) @@ -103,13 +103,13 @@ def run_test(self): self.nodes[2].sendrawtransaction(node0_tx1["hex"]) txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"]) # ... mine a block... - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) # Reconnect the split network, and sync chain: self.connect_nodes(1, 2) self.nodes[2].sendrawtransaction(node0_tx2["hex"]) self.nodes[2].sendrawtransaction(tx2["hex"]) - self.nodes[2].generate(1) # Mine another block to make sure we sync + self.generate(self.nodes[2], 1) # Mine another block to make sure we sync self.sync_blocks() # Re-fetch transaction info: diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py index 322a4bebb9b6f..107127473a12a 100755 --- a/test/functional/wallet_txn_doublespend.py +++ b/test/functional/wallet_txn_doublespend.py @@ -82,7 +82,7 @@ def run_test(self): # Have node0 mine a block: if (self.options.mine_block): - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) self.sync_blocks(self.nodes[0:2]) tx1 = self.nodes[0].gettransaction(txid1) @@ -111,11 +111,11 @@ def run_test(self): self.nodes[2].sendrawtransaction(fund_bar_tx["hex"]) doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"]) # ... mine a block... - self.nodes[2].generate(1) + self.generate(self.nodes[2], 1) # Reconnect the split network, and sync chain: self.connect_nodes(1, 2) - self.nodes[2].generate(1) # Mine another block to make sure we sync + self.generate(self.nodes[2], 1) # Mine another block to make sure we sync self.sync_blocks() assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2) diff --git a/test/functional/wallet_upgradetohd.py b/test/functional/wallet_upgradetohd.py index b1d09534b87e8..b6dedce0f817e 100755 --- a/test/functional/wallet_upgradetohd.py +++ b/test/functional/wallet_upgradetohd.py @@ -65,7 +65,7 @@ def run_test(self): assert_equal(keypath, "m/44'/1'/0'/1/%d" % i) self.bump_mocktime(1) - node.generate(1) + self.generate(node, 1) self.log.info("Should no longer be able to start it with HD disabled") self.stop_node(0) diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py index f7cf9ee8da5c5..9f40375c91843 100755 --- a/test/functional/wallet_upgradewallet.py +++ b/test/functional/wallet_upgradewallet.py @@ -96,7 +96,7 @@ def test_upgradewallet_error(self, wallet, previous_version, requested_version, assert_equal(wallet.getwalletinfo()["walletversion"], previous_version) def run_test(self): - self.nodes[0].generatetoaddress(COINBASE_MATURITY + 1, self.nodes[0].getnewaddress()) + self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, self.nodes[0].getnewaddress()) self.dumb_sync_blocks() # # Sanity check the test framework: res = self.nodes[0].getblockchaininfo() @@ -108,7 +108,7 @@ def run_test(self): # Send coins to old wallets for later conversion checks. v18_2_wallet = v18_2_node.get_wallet_rpc(self.default_wallet_name) v18_2_address = v18_2_wallet.getnewaddress() - node_master.generatetoaddress(COINBASE_MATURITY + 1, v18_2_address) + self.generatetoaddress(node_master, COINBASE_MATURITY + 1, v18_2_address) self.dumb_sync_blocks() v18_2_balance = v18_2_wallet.getbalance() diff --git a/test/functional/wallet_watchonly.py b/test/functional/wallet_watchonly.py index c3ac93229f844..7532cf9a5b2bb 100755 --- a/test/functional/wallet_watchonly.py +++ b/test/functional/wallet_watchonly.py @@ -38,11 +38,11 @@ def run_test(self): wo_wallet.importpubkey(pubkey=def_wallet.getaddressinfo(wo_change)['pubkey']) # generate some btc for testing - node.generatetoaddress(COINBASE_MATURITY + 1, a1) + self.generatetoaddress(node, COINBASE_MATURITY + 1, a1) # send 1 btc to our watch-only address txid = def_wallet.sendtoaddress(wo_addr, 1) - self.nodes[0].generate(1) + self.generate(self.nodes[0], 1) # getbalance self.log.info('include_watchonly should default to true for watch-only wallets') From 3dcd87506e832e108daa54677f51e7f9871b5ce1 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 15:56:48 +0000 Subject: [PATCH 04/11] merge bitcoin#23207: Delete generate* calls from TestNode --- test/functional/test_framework/test_framework.py | 8 ++++---- test/functional/test_framework/test_node.py | 16 ++++++++++++++-- test/functional/test_framework/wallet.py | 4 ++-- test/functional/wallet_descriptor.py | 2 +- test/functional/wallet_importdescriptors.py | 4 ++-- 5 files changed, 23 insertions(+), 11 deletions(-) diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 17c3c9ccd8609..a75c0f708beb0 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -767,19 +767,19 @@ def join_network(self): self.sync_all() def generate(self, generator, *args, **kwargs): - blocks = generator.generate(*args, **kwargs) + blocks = generator.generate(*args, invalid_call=False, **kwargs) return blocks def generateblock(self, generator, *args, **kwargs): - blocks = generator.generateblock(*args, **kwargs) + blocks = generator.generateblock(*args, invalid_call=False, **kwargs) return blocks def generatetoaddress(self, generator, *args, **kwargs): - blocks = generator.generatetoaddress(*args, **kwargs) + blocks = generator.generatetoaddress(*args, invalid_call=False, **kwargs) return blocks def generatetodescriptor(self, generator, *args, **kwargs): - blocks = generator.generatetodescriptor(*args, **kwargs) + blocks = generator.generatetodescriptor(*args, invalid_call=False, **kwargs) return blocks def sync_blocks(self, nodes=None, wait=1, timeout=60): diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index dbdeb255debfe..ebd78f755e570 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -317,9 +317,21 @@ def wait_for_cookie_credentials(self): time.sleep(1.0 / poll_per_s) self._raise_assertion_error("Unable to retrieve cookie credentials after {}s".format(self.rpc_timeout)) - def generate(self, nblocks, maxtries=1000000): + def generate(self, nblocks, maxtries=1000000, **kwargs): self.log.debug("TestNode.generate() dispatches `generate` call to `generatetoaddress`") - return self.generatetoaddress(nblocks=nblocks, address=self.get_deterministic_priv_key().address, maxtries=maxtries) + return self.generatetoaddress(nblocks=nblocks, address=self.get_deterministic_priv_key().address, maxtries=maxtries, **kwargs) + + def generateblock(self, *args, invalid_call, **kwargs): + assert not invalid_call + return self.__getattr__('generateblock')(*args, **kwargs) + + def generatetoaddress(self, *args, invalid_call, **kwargs): + assert not invalid_call + return self.__getattr__('generatetoaddress')(*args, **kwargs) + + def generatetodescriptor(self, *args, invalid_call, **kwargs): + assert not invalid_call + return self.__getattr__('generatetodescriptor')(*args, **kwargs) def get_wallet_rpc(self, wallet_name): if self.use_cli: diff --git a/test/functional/test_framework/wallet.py b/test/functional/test_framework/wallet.py index 16d2594568cb3..6d494ee2c6584 100644 --- a/test/functional/test_framework/wallet.py +++ b/test/functional/test_framework/wallet.py @@ -114,9 +114,9 @@ def sign_tx(self, tx, fixed_length=True): break tx.vin[0].scriptSig = CScript([der_sig + bytes(bytearray([SIGHASH_ALL]))]) - def generate(self, num_blocks): + def generate(self, num_blocks, **kwargs): """Generate blocks with coinbase outputs to the internal address, and append the outputs to the internal list""" - blocks = self._test_node.generatetodescriptor(num_blocks, self.get_descriptor()) + blocks = self._test_node.generatetodescriptor(num_blocks, self.get_descriptor(), **kwargs) for b in blocks: block_info = self._test_node.getblock(blockhash=b, verbosity=2) cb_tx = block_info['tx'][0] diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py index 7c8ae73641888..ca7d8875c1dd9 100755 --- a/test/functional/wallet_descriptor.py +++ b/test/functional/wallet_descriptor.py @@ -63,7 +63,7 @@ def run_test(self): send_wrpc = self.nodes[0].get_wallet_rpc("desc1") # Generate some coins - self.generatetoaddress(send_wrpc, 101, send_wrpc.getnewaddress()) + self.generatetoaddress(self.nodes[0], 101, send_wrpc.getnewaddress()) # Make transactions self.log.info("Test sending and receiving") diff --git a/test/functional/wallet_importdescriptors.py b/test/functional/wallet_importdescriptors.py index ea5ba770484ad..d58b7fe83f0f6 100755 --- a/test/functional/wallet_importdescriptors.py +++ b/test/functional/wallet_importdescriptors.py @@ -73,7 +73,7 @@ def run_test(self): assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0) self.log.info('Mining coins') - self.generatetoaddress(w0, 101, w0.getnewaddress()) + self.generatetoaddress(self.nodes[0], 101, w0.getnewaddress()) # RPC importdescriptors ----------------------------------------------- @@ -380,7 +380,7 @@ def run_test(self): solvable=True, ismine=True) txid = w0.sendtoaddress(address, 49.99995540) - self.generatetoaddress(w0, 6, w0.getnewaddress()) + self.generatetoaddress(self.nodes[0], 6, w0.getnewaddress()) self.sync_blocks() tx = wpriv.createrawtransaction([{"txid": txid, "vout": 0}], {w0.getnewaddress(): 49.999}) signed_tx = wpriv.signrawtransactionwithwallet(tx) From e913a45eafae18f2ef42d83748e38f7b010a5dde Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 16:06:39 +0000 Subject: [PATCH 05/11] test: remove redundant `self.nodes` from `self.sync_`{`blocks`,`all`} Co-authored-by: UdjinM6 --- test/functional/feature_dip3_deterministicmns.py | 2 +- test/functional/feature_dip3_v19.py | 8 ++++---- test/functional/feature_llmq_chainlocks.py | 2 +- test/functional/feature_llmq_evo.py | 4 ++-- test/functional/p2p_sendheaders.py | 4 ++-- test/functional/p2p_sendheaders_compressed.py | 4 ++-- test/functional/test_framework/test_framework.py | 10 +++++----- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/test/functional/feature_dip3_deterministicmns.py b/test/functional/feature_dip3_deterministicmns.py index e31f74d97fbd2..b5e24d6b16063 100755 --- a/test/functional/feature_dip3_deterministicmns.py +++ b/test/functional/feature_dip3_deterministicmns.py @@ -73,7 +73,7 @@ def run_test(self): # We have hundreds of blocks to sync here, give it more time self.log.info("syncing blocks for all nodes") - self.sync_blocks(self.nodes, timeout=120) + self.sync_blocks(timeout=120) # DIP3 is fully enforced here diff --git a/test/functional/feature_dip3_v19.py b/test/functional/feature_dip3_v19.py index 5c72e4d23d093..0bdfad5b211c9 100755 --- a/test/functional/feature_dip3_v19.py +++ b/test/functional/feature_dip3_v19.py @@ -87,7 +87,7 @@ def run_test(self): evo_info_0 = self.dynamically_add_masternode(evo=True, rnd=7) assert evo_info_0 is not None self.generate(self.nodes[0], 8) - self.sync_blocks(self.nodes) + self.sync_blocks() self.log.info("Checking that protxs with duplicate EvoNodes fields are rejected") evo_info_1 = self.dynamically_add_masternode(evo=True, rnd=7, should_be_rejected=True) @@ -98,7 +98,7 @@ def run_test(self): evo_info_3 = self.dynamically_add_masternode(evo=True, rnd=9) assert evo_info_3 is not None self.generate(self.nodes[0], 8) - self.sync_blocks(self.nodes) + self.sync_blocks() self.dynamically_evo_update_service(evo_info_0, 9, should_be_rejected=True) revoke_protx = self.mninfo[-1].proTxHash @@ -128,7 +128,7 @@ def test_revoke_protx(self, node_idx, revoke_protx, revoke_keyoperator): self.wait_for_instantlock(fund_txid, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) - self.sync_all(self.nodes) + self.sync_all() protx_result = self.nodes[0].protx('revoke', revoke_protx, revoke_keyoperator, 1, funds_address) self.wait_for_instantlock(protx_result, self.nodes[0]) @@ -138,7 +138,7 @@ def test_revoke_protx(self, node_idx, revoke_protx, revoke_keyoperator): # and then reconnect the corresponding node back to let sync_blocks finish correctly. self.wait_until(lambda: self.nodes[node_idx].getconnectioncount() == 0) self.connect_nodes(node_idx, 0) - self.sync_all(self.nodes) + self.sync_all() self.log.info(f"Successfully revoked={revoke_protx}") for mn in self.mninfo: if mn.proTxHash == revoke_protx: diff --git a/test/functional/feature_llmq_chainlocks.py b/test/functional/feature_llmq_chainlocks.py index d56409032d67a..fa90a7e2331ff 100755 --- a/test/functional/feature_llmq_chainlocks.py +++ b/test/functional/feature_llmq_chainlocks.py @@ -239,7 +239,7 @@ def run_test(self): self.log.info("Test that new node can mine without Chainlock info") tip_0 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2) self.generate(self.nodes[added_idx], 1) - self.sync_blocks(self.nodes) + self.sync_blocks() tip_1 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2) assert_equal(tip_1['cbTx']['bestCLSignature'], tip_0['cbTx']['bestCLSignature']) assert_equal(tip_1['cbTx']['bestCLHeightDiff'], tip_0['cbTx']['bestCLHeightDiff'] + 1) diff --git a/test/functional/feature_llmq_evo.py b/test/functional/feature_llmq_evo.py index 1734595cbef15..524aee231456e 100755 --- a/test/functional/feature_llmq_evo.py +++ b/test/functional/feature_llmq_evo.py @@ -90,7 +90,7 @@ def run_test(self): evo_info = self.dynamically_add_masternode(evo=True) evo_protxhash_list.append(evo_info.proTxHash) self.generate(self.nodes[0], 8) - self.sync_blocks(self.nodes) + self.sync_blocks() expectedUpdated.append(evo_info.proTxHash) b_i = self.nodes[0].getbestblockhash() @@ -216,7 +216,7 @@ def test_evo_is_rejected_before_v19(self): outputs = {collateral_address: collateral_amount, funds_address: 1} collateral_txid = self.nodes[0].sendmany("", outputs) self.generate(self.nodes[0], 8) - self.sync_all(self.nodes) + self.sync_all() rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1) collateral_vout = 0 diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py index 45d4e95113b76..c0be5cfee1acf 100755 --- a/test/functional/p2p_sendheaders.py +++ b/test/functional/p2p_sendheaders.py @@ -216,7 +216,7 @@ def mine_reorg(self, length): # make sure all invalidated blocks are node0's self.generatetoaddress(self.nodes[0], length, self.nodes[0].get_deterministic_priv_key().address) - self.sync_blocks(self.nodes, wait=0.1) + self.sync_blocks(wait=0.1) for x in self.nodes[0].p2ps: x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) x.clear_block_announcements() @@ -225,7 +225,7 @@ def mine_reorg(self, length): hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1)) self.nodes[1].invalidateblock(hash_to_invalidate) all_hashes = self.generatetoaddress(self.nodes[1], length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain - self.sync_blocks(self.nodes, wait=0.1) + self.sync_blocks(wait=0.1) return [int(x, 16) for x in all_hashes] def run_test(self): diff --git a/test/functional/p2p_sendheaders_compressed.py b/test/functional/p2p_sendheaders_compressed.py index 0897357006e3c..dc5beb17027ee 100755 --- a/test/functional/p2p_sendheaders_compressed.py +++ b/test/functional/p2p_sendheaders_compressed.py @@ -150,7 +150,7 @@ def mine_reorg(self, length): return the list of block hashes newly mined.""" self.generate(self.nodes[0], length) # make sure all invalidated blocks are node0's - self.sync_blocks(self.nodes, wait=0.1) + self.sync_blocks(wait=0.1) for p2p in self.nodes[0].p2ps: p2p.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) p2p.clear_block_announcements() @@ -159,7 +159,7 @@ def mine_reorg(self, length): hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1)) self.nodes[1].invalidateblock(hash_to_invalidate) all_hashes = self.generate(self.nodes[1], length + 1) # Must be longer than the orig chain - self.sync_blocks(self.nodes, wait=0.1) + self.sync_blocks(wait=0.1) return [int(hash_value, 16) for hash_value in all_hashes] def run_test(self): diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index a75c0f708beb0..552271c68f0f8 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -1260,7 +1260,7 @@ def dynamically_add_masternode(self, evo=False, rnd=None, should_be_rejected=Fal self.connect_nodes(mn_idx, 0) self.wait_for_sporks_same() - self.sync_blocks(self.nodes) + self.sync_blocks() force_finish_mnsync(self.nodes[mn_idx]) self.log.info("Successfully started and synced proTx:"+str(created_mn_info.proTxHash)) @@ -1283,7 +1283,7 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None collateral_txid = self.nodes[0].sendmany("", outputs) self.wait_for_instantlock(collateral_txid, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] - self.sync_all(self.nodes) + self.sync_all() rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1, tip) assert_equal(rawtx['confirmations'], 1) @@ -1305,7 +1305,7 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None self.wait_for_instantlock(protx_result, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] - self.sync_all(self.nodes) + self.sync_all() assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) mn_info = MasternodeInfo(protx_result, owner_address, voting_address, reward_address, operatorReward, bls['public'], bls['secret'], collateral_address, collateral_txid, collateral_vout, ipAndPort, evo) @@ -1329,7 +1329,7 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= self.wait_for_instantlock(fund_txid, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) - self.sync_all(self.nodes) + self.sync_all() protx_success = False try: @@ -1337,7 +1337,7 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= self.wait_for_instantlock(protx_result, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) - self.sync_all(self.nodes) + self.sync_all() self.log.info("Updated EvoNode %s: platformNodeID=%s, platformP2PPort=%s, platformHTTPPort=%s" % (evo_info.proTxHash, platform_node_id, platform_p2p_port, platform_http_port)) protx_success = True except: From 9b3fbdde101ca1de4f4344b05f34bebef2331d13 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Thu, 26 Sep 2024 19:17:04 +0000 Subject: [PATCH 06/11] merge bitcoin#23300: Implicitly sync after generate*, unless opted out --- test/functional/example_test.py | 3 +- test/functional/feature_abortnode.py | 6 +- test/functional/feature_bip68_sequence.py | 8 +-- test/functional/feature_coinstatsindex.py | 4 +- test/functional/feature_dbcrash.py | 3 +- test/functional/feature_fee_estimation.py | 6 +- test/functional/feature_loadblock.py | 2 +- test/functional/feature_minchainwork.py | 4 +- test/functional/feature_notifications.py | 2 +- test/functional/feature_pruning.py | 13 ++-- test/functional/interface_zmq.py | 15 ++-- test/functional/mempool_compatibility.py | 4 +- test/functional/mempool_persist.py | 2 +- test/functional/mempool_reorg.py | 3 +- test/functional/mempool_unbroadcast.py | 3 +- test/functional/mining_basic.py | 2 +- .../mining_prioritisetransaction.py | 2 +- test/functional/p2p_blockfilters.py | 4 +- .../p2p_compactblocks_blocksonly.py | 2 +- test/functional/p2p_node_network_limited.py | 4 +- test/functional/p2p_unrequested_blocks.py | 2 +- test/functional/rpc_blockchain.py | 2 +- test/functional/rpc_getblockfilter.py | 4 +- test/functional/rpc_getblockfrompeer.py | 4 +- test/functional/rpc_getchaintips.py | 6 +- test/functional/rpc_invalidateblock.py | 17 +++-- test/functional/rpc_preciousblock.py | 13 ++-- test/functional/rpc_txoutproof.py | 1 - test/functional/rpc_wipewallettxes.py | 4 +- .../test_framework/test_framework.py | 68 ++++++++----------- test/functional/test_framework/util.py | 6 +- test/functional/wallet_abandonconflict.py | 3 +- test/functional/wallet_balance.py | 2 +- test/functional/wallet_basic.py | 62 ++++++----------- test/functional/wallet_import_rescan.py | 1 - test/functional/wallet_importmulti.py | 20 +++--- test/functional/wallet_listsinceblock.py | 25 +++---- test/functional/wallet_listtransactions.py | 1 - test/functional/wallet_multiwallet.py | 8 +-- test/functional/wallet_reorgsrestore.py | 12 ++-- test/functional/wallet_send.py | 1 - test/functional/wallet_txn_clone.py | 5 +- test/functional/wallet_txn_doublespend.py | 5 +- test/functional/wallet_upgradetohd.py | 2 +- test/functional/wallet_upgradewallet.py | 6 +- 45 files changed, 161 insertions(+), 211 deletions(-) diff --git a/test/functional/example_test.py b/test/functional/example_test.py index f9a00d3b5ad7c..17ca92d00776e 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -148,8 +148,7 @@ def run_test(self): peer_messaging = self.nodes[0].add_p2p_connection(BaseNode()) # Generating a block on one of the nodes will get us out of IBD - blocks = [int(self.generate(self.nodes[0], nblocks=1)[0], 16)] - self.sync_all(self.nodes[0:2]) + blocks = [int(self.generate(self.nodes[0], sync_fun=lambda: self.sync_all(self.nodes[0:2]), nblocks=1)[0], 16)] # Notice above how we called an RPC by calling a method with the same # name on the node object. Notice also how we used a keyword argument diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py index e849b855a8f5d..19431d2baaf86 100755 --- a/test/functional/feature_abortnode.py +++ b/test/functional/feature_abortnode.py @@ -26,7 +26,7 @@ def setup_network(self): # We'll connect the nodes later def run_test(self): - self.generate(self.nodes[0], 3) + self.generate(self.nodes[0], 3, sync_fun=self.no_op) datadir = get_datadir_path(self.options.tmpdir, 0) # Deleting the undo file will result in reorg failure @@ -34,10 +34,10 @@ def run_test(self): # Connecting to a node with a more work chain will trigger a reorg # attempt. - self.generate(self.nodes[1], 3) + self.generate(self.nodes[1], 3, sync_fun=self.no_op) with self.nodes[0].assert_debug_log(["Failed to disconnect block"]): self.connect_nodes(0, 1) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) # Check that node0 aborted self.log.info("Waiting for crash") diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py index 25e3e56632316..66587e7e14a72 100755 --- a/test/functional/feature_bip68_sequence.py +++ b/test/functional/feature_bip68_sequence.py @@ -272,7 +272,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): cur_time = self.mocktime for _ in range(10): self.nodes[0].setmocktime(cur_time + 600) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) cur_time += 600 assert tx2.hash in self.nodes[0].getrawmempool() @@ -348,7 +348,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): # Reset the chain and get rid of the mocktimed-blocks self.nodes[0].setmocktime(self.mocktime) self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1)) - self.generate(self.nodes[0], 10) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) # Make sure that BIP68 isn't being used to validate blocks prior to # activation height. If more blocks are mined prior to this test @@ -401,9 +401,9 @@ def activateCSV(self): min_activation_height = 432 height = self.nodes[0].getblockcount() assert_greater_than(min_activation_height - height, 2) - self.generate(self.nodes[0], min_activation_height - height - 2) + self.generate(self.nodes[0], min_activation_height - height - 2, sync_fun=self.no_op) assert not softfork_active(self.nodes[0], 'csv') - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) assert softfork_active(self.nodes[0], 'csv') self.sync_blocks() diff --git a/test/functional/feature_coinstatsindex.py b/test/functional/feature_coinstatsindex.py index 439307b4a4c09..54a656a9c4fca 100755 --- a/test/functional/feature_coinstatsindex.py +++ b/test/functional/feature_coinstatsindex.py @@ -242,7 +242,7 @@ def _test_coin_stats_index(self): res9 = index_node.gettxoutsetinfo('muhash') assert_equal(res8, res9) - self.generate(index_node, 1) + self.generate(index_node, 1, sync_fun=self.no_op) res10 = index_node.gettxoutsetinfo('muhash') assert(res8['txouts'] < res10['txouts']) @@ -268,7 +268,7 @@ def _test_reorg_index(self): assert_equal(index_node.gettxoutsetinfo('muhash')['height'], 110) # Add two new blocks - block = self.generate(index_node, 2)[1] + block = self.generate(index_node, 2, sync_fun=self.no_op)[1] res = index_node.gettxoutsetinfo(hash_type='muhash', hash_or_height=None, use_index=False) # Test that the result of the reorged block is not returned for its old block height diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py index 49785cb6e93cb..01e0ffccc57ab 100755 --- a/test/functional/feature_dbcrash.py +++ b/test/functional/feature_dbcrash.py @@ -217,7 +217,7 @@ def run_test(self): # Start by creating a lot of utxos on node3 initial_height = self.nodes[3].getblockcount() - utxo_list = create_confirmed_utxos(self, self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000) + utxo_list = create_confirmed_utxos(self, self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000, sync_fun=self.no_op) self.log.info("Prepped %d utxo entries", len(utxo_list)) # Sync these blocks with the other nodes @@ -258,6 +258,7 @@ def run_test(self): nblocks=min(10, current_height + 1 - self.nodes[3].getblockcount()), # new address to avoid mining a block that has just been invalidated address=self.nodes[3].getnewaddress(), + sync_fun=self.no_op, )) self.log.debug("Syncing %d new blocks...", len(block_hashes)) self.sync_node3blocks(block_hashes) diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py index 59f30a439bd0f..12efe8dc395d0 100755 --- a/test/functional/feature_fee_estimation.py +++ b/test/functional/feature_fee_estimation.py @@ -231,7 +231,7 @@ def run_test(self): # Mine while len(self.nodes[0].getrawmempool()) > 0: - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) # Repeatedly split those 2 outputs, doubling twice for each rep # Use txouts to monitor the available utxo, since these won't be tracked in wallet @@ -241,12 +241,12 @@ def run_test(self): while len(self.txouts) > 0: split_inputs(self.nodes[0], self.txouts, self.txouts2) while len(self.nodes[0].getrawmempool()) > 0: - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) # Double txouts2 to txouts while len(self.txouts2) > 0: split_inputs(self.nodes[0], self.txouts2, self.txouts) while len(self.nodes[0].getrawmempool()) > 0: - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) reps += 1 self.log.info("Finished splitting") diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index ddfbffe144c1e..c4f3003d63611 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -29,7 +29,7 @@ def set_test_params(self): def run_test(self): self.nodes[1].setnetworkactive(state=False) - self.generate(self.nodes[0], COINBASE_MATURITY) + self.generate(self.nodes[0], COINBASE_MATURITY, sync_fun=self.no_op) # Parsing the url of our node to get settings for config file data_dir = self.nodes[0].datadir diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py index d8d8e43f100ac..1e54dacea1461 100755 --- a/test/functional/feature_minchainwork.py +++ b/test/functional/feature_minchainwork.py @@ -58,7 +58,7 @@ def run_test(self): num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK) self.log.info("Generating %d blocks on node0", num_blocks_to_generate) hashes = self.generatetoaddress(self.nodes[0], num_blocks_to_generate, - self.nodes[0].get_deterministic_priv_key().address) + self.nodes[0].get_deterministic_priv_key().address, sync_fun=self.no_op) self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork']) @@ -88,7 +88,7 @@ def run_test(self): assert ("headers" not in peer.last_message or len(peer.last_message["headers"].headers) == 0) self.log.info("Generating one more block") - self.generatetoaddress(self.nodes[0], 1, self.nodes[0].get_deterministic_priv_key().address) + self.generate(self.nodes[0], 1) self.log.info("Verifying nodes are all synced") diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py index 76061cd2291fe..d8f09b5e2ebb8 100755 --- a/test/functional/feature_notifications.py +++ b/test/functional/feature_notifications.py @@ -116,7 +116,7 @@ def run_test(self): self.log.info("Mine single block, wait for chainlock") self.bump_mocktime(1) - tip = self.generate(self.nodes[0], 1)[-1] + tip = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[-1] self.wait_for_chainlocked_block_all_nodes(tip) # directory content should equal the chainlocked block hash assert_equal([tip], sorted(os.listdir(self.chainlocknotify_dir))) diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index 1a82d57a2255c..67173dada75c1 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -120,9 +120,8 @@ def setup_nodes(self): def create_big_chain(self): # Start by creating some coinbases we can spend later - self.generate(self.nodes[1], 200) - self.sync_blocks(self.nodes[0:2]) - self.generate(self.nodes[0], 150) + self.generate(self.nodes[1], 200, sync_fun=lambda: self.sync_blocks(self.nodes[0:2])) + self.generate(self.nodes[0], 150, sync_fun=self.no_op) # Then mine enough full blocks to create more than 550MiB of data mine_large_blocks(self.nodes[0], 645) @@ -220,7 +219,7 @@ def reorg_test(self): self.disconnect_nodes(1, 2) self.log.info("Generating new longer chain of 300 more blocks") - self.generate(self.nodes[1], 299) + self.generate(self.nodes[1], 299, sync_fun=self.no_op) self.log.info("Reconnect nodes") self.connect_nodes(0, 1) @@ -272,7 +271,7 @@ def reorg_back(self): self.nodes[0].invalidateblock(curchainhash) assert_equal(self.nodes[0].getblockcount(), self.mainchainheight) assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2) - goalbesthash = self.generate(self.nodes[0], blocks_to_mine)[-1] + goalbesthash = self.generate(self.nodes[0], blocks_to_mine, sync_fun=self.no_op)[-1] goalbestheight = first_reorg_height + 1 self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload") @@ -315,7 +314,7 @@ def has_block(index): assert_equal(block1_details["nTx"], len(block1_details["tx"])) # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight) - self.generate(node, 6) + self.generate(node, 6, sync_fun=self.no_op) assert_equal(node.getblockchaininfo()["blocks"], 1001) # prune parameter in the future (block or timestamp) should raise an exception @@ -353,7 +352,7 @@ def has_block(index): assert has_block(2), "blk00002.dat is still there, should be pruned by now" # advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat) - self.generate(node, 288) + self.generate(node, 288, sync_fun=self.no_op) prune(1000) assert not has_block(2), "blk00002.dat is still there, should be pruned by now" assert not has_block(3), "blk00003.dat is still there, should be pruned by now" diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py index 35220806d8144..430687ca4bd96 100755 --- a/test/functional/interface_zmq.py +++ b/test/functional/interface_zmq.py @@ -77,9 +77,8 @@ class ZMQTestSetupBlock: the generated block's hash, it's (coinbase) transaction id, the raw block or raw transaction data. """ - def __init__(self, test_framework, node): - self.block_hash = test_framework.generate(node, 1)[0] + self.block_hash = test_framework.generate(node, 1, sync_fun=test_framework.no_op)[0] coinbase = node.getblock(self.block_hash, 2)['tx'][0] self.tx_hash = coinbase['txid'] self.raw_tx = coinbase['hex'] @@ -261,14 +260,14 @@ def test_reorg(self): # Generate 1 block in nodes[0] with 1 mempool tx and receive all notifications payment_txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1.0) - disconnect_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0] + disconnect_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE, sync_fun=self.no_op)[0] disconnect_cb = self.nodes[0].getblock(disconnect_block)["tx"][0] assert_equal(self.nodes[0].getbestblockhash(), hashblock.receive().hex()) assert_equal(hashtx.receive().hex(), payment_txid) assert_equal(hashtx.receive().hex(), disconnect_cb) # Generate 2 blocks in nodes[1] to a different address to ensure split - connect_blocks = self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE) + connect_blocks = self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE, sync_fun=self.no_op) # nodes[0] will reorg chain after connecting back nodes[1] self.connect_nodes(0, 1) @@ -312,13 +311,13 @@ def test_sequence(self): seq_num = 1 # Generate 1 block in nodes[0] and receive all notifications - dc_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0] + dc_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE, sync_fun=self.no_op)[0] # Note: We are not notified of any block transactions, coinbase or mined assert_equal((self.nodes[0].getbestblockhash(), "C", None), seq.receive_sequence()) # Generate 2 blocks in nodes[1] to a different address to ensure a chain split - self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE) + self.generatetoaddress(self.nodes[1], 2, ADDRESS_BCRT1_P2SH_OP_TRUE, sync_fun=self.no_op) # nodes[0] will reorg chain after connecting back nodes[1] self.connect_nodes(0, 1) @@ -458,7 +457,7 @@ def test_mempool_sync(self): # 1) Consume backlog until we get a mempool sequence number (hash_str, label, zmq_mem_seq) = seq.receive_sequence() while zmq_mem_seq is None: - (hash_str, label, zmq_mem_seq) = seq.receive_sequence() + (hash_str, label, zmq_mem_seq) = seq.receive_sequence() assert label == "A" or label == "R" assert hash_str is not None @@ -548,7 +547,7 @@ def test_multiple_interfaces(self): ], sync_blocks=False) # Generate 1 block in nodes[0] and receive all notifications - self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE, sync_fun=self.no_op) # Should receive the same block hash on both subscribers assert_equal(self.nodes[0].getbestblockhash(), subscribers[0].receive().hex()) diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py index ecff300b67545..91f08964d9402 100755 --- a/test/functional/mempool_compatibility.py +++ b/test/functional/mempool_compatibility.py @@ -41,8 +41,8 @@ def run_test(self): old_node, new_node = self.nodes new_wallet = MiniWallet(new_node) - self.generate(new_wallet, 1) - self.generate(new_node, 100) + self.generate(new_wallet, 1, sync_fun=self.no_op) + self.generate(new_node, 100, sync_fun=self.no_op) # Sync the nodes to ensure old_node has the block that contains the coinbase that new_wallet will spend. # Otherwise, because coinbases are only valid in a block and not as loose txns, if the nodes aren't synced # unbroadcasted_tx won't pass old_node's `MemPoolAccept::PreChecks`. diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py index 9e2ebfe0588b1..a40c7f4e72d9f 100755 --- a/test/functional/mempool_persist.py +++ b/test/functional/mempool_persist.py @@ -173,7 +173,7 @@ def test_persist_unbroadcast(self): self.start_node(0) # clear out mempool - self.generate(node0, 1) + self.generate(node0, 1, sync_fun=self.no_op) # ensure node0 doesn't have any connections # make a transaction that will remain in the unbroadcast set diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py index 0ee6af62f61ae..781b00fea426c 100755 --- a/test/functional/mempool_reorg.py +++ b/test/functional/mempool_reorg.py @@ -79,9 +79,8 @@ def run_test(self): spend_3_1_id = self.nodes[0].sendrawtransaction(spend_3_1['hex']) self.log.info("Generate a block") last_block = self.generate(self.nodes[0], 1) - # Sync blocks, so that peer 1 gets the block before timelock_tx + # generate() implicitly syncs blocks, so that peer 1 gets the block before timelock_tx # Otherwise, peer 1 would put the timelock_tx in m_recent_rejects - self.sync_all() self.log.info("The time-locked transaction can now be spent") timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx) diff --git a/test/functional/mempool_unbroadcast.py b/test/functional/mempool_unbroadcast.py index e81ba281e9c1d..d200689352249 100755 --- a/test/functional/mempool_unbroadcast.py +++ b/test/functional/mempool_unbroadcast.py @@ -111,7 +111,8 @@ def test_txn_removal(self): # a block removal_reason = "Removed {} from set of unbroadcast txns before confirmation that txn was sent out".format(txhsh) with node.assert_debug_log([removal_reason]): - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) + if __name__ == "__main__": MempoolUnbroadcastTest().main() diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py index ee65cea91d6af..03097805c3d2f 100755 --- a/test/functional/mining_basic.py +++ b/test/functional/mining_basic.py @@ -52,7 +52,7 @@ def mine_chain(self): self.log.info('Create some old blocks') for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 156, 156): self.bump_mocktime(156) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) mining_info = self.nodes[0].getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['currentblocktx'], 0) diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py index 96e966bbdb7ab..31b0c15adb094 100755 --- a/test/functional/mining_prioritisetransaction.py +++ b/test/functional/mining_prioritisetransaction.py @@ -99,7 +99,7 @@ def run_test(self): # the other high fee transactions. Keep mining until our mempool has # decreased by all the high fee size that we calculated above. while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) # High fee transaction should not have been mined, but other high fee rate # transactions should have been. diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py index ea215784846ef..2f52a226fe8ff 100755 --- a/test/functional/p2p_blockfilters.py +++ b/test/functional/p2p_blockfilters.py @@ -62,11 +62,11 @@ def run_test(self): # Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting self.disconnect_nodes(0, 1) - stale_block_hash = self.generate(self.nodes[0], 1)[0] + stale_block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.nodes[0].syncwithvalidationinterfacequeue() assert_equal(self.nodes[0].getblockcount(), 1000) - self.generate(self.nodes[1], 1001) + self.generate(self.nodes[1], 1001, sync_fun=self.no_op) assert_equal(self.nodes[1].getblockcount(), 2000) # Check that nodes have signalled NODE_COMPACT_FILTERS correctly. diff --git a/test/functional/p2p_compactblocks_blocksonly.py b/test/functional/p2p_compactblocks_blocksonly.py index b6cf2fe5a33ba..3b9473856df4e 100755 --- a/test/functional/p2p_compactblocks_blocksonly.py +++ b/test/functional/p2p_compactblocks_blocksonly.py @@ -32,7 +32,7 @@ def setup_network(self): self.sync_all() def build_block_on_tip(self): - blockhash = self.generate(self.nodes[2], 1)[0] + blockhash = self.generate(self.nodes[2], 1, sync_fun=self.no_op)[0] block_hex = self.nodes[2].getblock(blockhash=blockhash, verbosity=0) block = from_hex(CBlock(), block_hex) block.rehash() diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py index f3286df85289b..ae54419352c06 100755 --- a/test/functional/p2p_node_network_limited.py +++ b/test/functional/p2p_node_network_limited.py @@ -57,7 +57,7 @@ def run_test(self): self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.") self.connect_nodes(0, 1) - blocks = self.generatetoaddress(self.nodes[1], 292, self.nodes[1].get_deterministic_priv_key().address) + blocks = self.generate(self.nodes[1], 292, sync_fun=self.no_op) self.sync_blocks([self.nodes[0], self.nodes[1]]) self.log.info("Make sure we can max retrieve block at tip-288.") @@ -89,7 +89,7 @@ def run_test(self): self.disconnect_all() # mine 10 blocks on node 0 (pruned node) - self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) # connect node1 (non pruned) with node0 (pruned) and check if the can sync self.connect_nodes(0, 1) diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py index 6d5da0a3ba10c..6dec380072082 100755 --- a/test/functional/p2p_unrequested_blocks.py +++ b/test/functional/p2p_unrequested_blocks.py @@ -73,7 +73,7 @@ def run_test(self): min_work_node = self.nodes[1].add_p2p_connection(P2PInterface()) # 1. Have nodes mine a block (leave IBD) - [self.generatetoaddress(n, 1, n.get_deterministic_priv_key().address) for n in self.nodes] + [self.generate(n, 1, sync_fun=self.no_op) for n in self.nodes] tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes] # 2. Send one block that builds on each tip. diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py index b29f633442490..a36e881c68a3f 100755 --- a/test/functional/rpc_blockchain.py +++ b/test/functional/rpc_blockchain.py @@ -352,7 +352,7 @@ def _test_stopatheight(self): self.log.debug('Node should not stop at this height') assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3)) try: - self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_P2SH_OP_TRUE) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_P2SH_OP_TRUE, sync_fun=self.no_op) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug('Node should stop at this height...') diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py index 4d860d0f36c33..1c456a5b82ba0 100755 --- a/test/functional/rpc_getblockfilter.py +++ b/test/functional/rpc_getblockfilter.py @@ -21,8 +21,8 @@ def run_test(self): # Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting self.disconnect_nodes(0, 1) - self.generate(self.nodes[0], 3) - self.generate(self.nodes[1], 4) + self.generate(self.nodes[0], 3, sync_fun=self.no_op) + self.generate(self.nodes[1], 4, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 3) chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)] diff --git a/test/functional/rpc_getblockfrompeer.py b/test/functional/rpc_getblockfrompeer.py index a37888cbbe03d..983d08730bfee 100755 --- a/test/functional/rpc_getblockfrompeer.py +++ b/test/functional/rpc_getblockfrompeer.py @@ -27,11 +27,11 @@ def check_for_block(self, hash): def run_test(self): self.log.info("Mine 4 blocks on Node 0") - self.generate(self.nodes[0], 4) + self.generate(self.nodes[0], 4, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 204) self.log.info("Mine competing 3 blocks on Node 1") - self.generate(self.nodes[1], 3) + self.generate(self.nodes[1], 3, sync_fun=self.no_op) assert_equal(self.nodes[1].getblockcount(), 203) short_tip = self.nodes[1].getbestblockhash() diff --git a/test/functional/rpc_getchaintips.py b/test/functional/rpc_getchaintips.py index 9e3defa256966..c6833ef4759fb 100755 --- a/test/functional/rpc_getchaintips.py +++ b/test/functional/rpc_getchaintips.py @@ -26,10 +26,8 @@ def run_test(self): # Split the network and build two chains of different lengths. self.split_network() - self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) - self.generatetoaddress(self.nodes[2], 20, self.nodes[2].get_deterministic_priv_key().address) - self.sync_all(self.nodes[:2]) - self.sync_all(self.nodes[2:]) + self.generate(self.nodes[0], 10, sync_fun=lambda: self.sync_all(self.nodes[:2])) + self.generate(self.nodes[2], 20, sync_fun=lambda: self.sync_all(self.nodes[2:])) tips = self.nodes[1].getchaintips () assert_equal (len (tips), 1) diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py index 36bfbb64c82ca..1fdff5f101e23 100755 --- a/test/functional/rpc_invalidateblock.py +++ b/test/functional/rpc_invalidateblock.py @@ -23,12 +23,12 @@ def setup_network(self): def run_test(self): self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:") self.log.info("Mine 4 blocks on Node 0") - self.generatetoaddress(self.nodes[0], 4, self.nodes[0].get_deterministic_priv_key().address) + self.generate(self.nodes[0], 4, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 4) besthash_n0 = self.nodes[0].getbestblockhash() self.log.info("Mine competing 6 blocks on Node 1") - self.generatetoaddress(self.nodes[1], 6, self.nodes[1].get_deterministic_priv_key().address) + self.generate(self.nodes[1], 6, sync_fun=self.no_op) assert_equal(self.nodes[1].getblockcount(), 6) self.log.info("Connect nodes to force a reorg") @@ -54,7 +54,7 @@ def run_test(self): self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3)) assert_equal(self.nodes[2].getblockcount(), 2) self.log.info("..and then mine a block") - self.generatetoaddress(self.nodes[2], 1, self.nodes[2].get_deterministic_priv_key().address) + self.generate(self.nodes[2], 1, sync_fun=self.no_op) self.log.info("Verify all nodes are at the right height") self.wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5) self.wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5) @@ -64,13 +64,12 @@ def run_test(self): self.restart_node(0, extra_args=["-checkblocks=5"]) self.restart_node(1, extra_args=["-checkblocks=5"]) self.connect_nodes(0, 1) - self.generate(self.nodes[0], 10) - self.sync_blocks(self.nodes[0:2]) + self.generate(self.nodes[0], 10, sync_fun=lambda: self.sync_blocks(self.nodes[0:2])) newheight = self.nodes[0].getblockcount() for j in range(2): self.restart_node(0, extra_args=["-checkblocks=5"]) - tip = self.generate(self.nodes[0], 10)[-1] - self.generate(self.nodes[1], 9) + tip = self.generate(self.nodes[0], 10, sync_fun=self.no_op)[-1] + self.generate(self.nodes[1], 9, sync_fun=self.no_op) self.connect_nodes(0, 1) self.sync_blocks(self.nodes[0:2]) assert_equal(self.nodes[0].getblockcount(), newheight + 10 * (j + 1)) @@ -87,7 +86,7 @@ def run_test(self): assert_equal(tip, self.nodes[1].getbestblockhash()) self.log.info("Verify that we reconsider all ancestors as well") - blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR) + blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR, sync_fun=self.no_op) assert_equal(self.nodes[1].getbestblockhash(), blocks[-1]) # Invalidate the two blocks at the tip self.nodes[1].invalidateblock(blocks[-1]) @@ -99,7 +98,7 @@ def run_test(self): assert_equal(self.nodes[1].getbestblockhash(), blocks[-1]) self.log.info("Verify that we reconsider all descendants") - blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR) + blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR, sync_fun=self.no_op) assert_equal(self.nodes[1].getbestblockhash(), blocks[-1]) # Invalidate the two blocks at the tip self.nodes[1].invalidateblock(blocks[-2]) diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py index 3a00992ddc3ed..2e526efd9abd5 100755 --- a/test/functional/rpc_preciousblock.py +++ b/test/functional/rpc_preciousblock.py @@ -42,19 +42,18 @@ def setup_network(self): def run_test(self): self.log.info("Ensure submitblock can in principle reorg to a competing chain") - gen_address = lambda i: self.nodes[i].get_deterministic_priv_key().address # A non-wallet address to mine to - self.generatetoaddress(self.nodes[0], 1, gen_address(0)) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 1) - hashZ = self.generatetoaddress(self.nodes[1], 2, gen_address(1))[-1] + hashZ = self.generate(self.nodes[1], 2, sync_fun=self.no_op)[-1] assert_equal(self.nodes[1].getblockcount(), 2) node_sync_via_rpc(self.nodes[0:3]) assert_equal(self.nodes[0].getbestblockhash(), hashZ) self.log.info("Mine blocks A-B-C on Node 0") - hashC = self.generatetoaddress(self.nodes[0], 3, gen_address(0))[-1] + hashC = self.generate(self.nodes[0], 3, sync_fun=self.no_op)[-1] assert_equal(self.nodes[0].getblockcount(), 5) self.log.info("Mine competing blocks E-F-G on Node 1") - hashG = self.generatetoaddress(self.nodes[1], 3, gen_address(1))[-1] + hashG = self.generate(self.nodes[1], 3, sync_fun=self.no_op)[-1] assert_equal(self.nodes[1].getblockcount(), 5) assert hashC != hashG self.log.info("Connect nodes and check no reorg occurs") @@ -83,7 +82,7 @@ def run_test(self): self.nodes[1].preciousblock(hashC) assert_equal(self.nodes[1].getbestblockhash(), hashC) self.log.info("Mine another block (E-F-G-)H on Node 0 and reorg Node 1") - self.generatetoaddress(self.nodes[0], 1, gen_address(0)) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 6) self.sync_blocks(self.nodes[0:2]) hashH = self.nodes[0].getbestblockhash() @@ -92,7 +91,7 @@ def run_test(self): self.nodes[1].preciousblock(hashC) assert_equal(self.nodes[1].getbestblockhash(), hashH) self.log.info("Mine competing blocks I-J-K-L on Node 2") - self.generatetoaddress(self.nodes[2], 4, gen_address(2)) + self.generate(self.nodes[2], 4, sync_fun=self.no_op) assert_equal(self.nodes[2].getblockcount(), 6) hashL = self.nodes[2].getbestblockhash() self.log.info("Connect nodes and check no reorg occurs") diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py index 5fa902caf3f48..2b0bad984b1b7 100755 --- a/test/functional/rpc_txoutproof.py +++ b/test/functional/rpc_txoutproof.py @@ -45,7 +45,6 @@ def run_test(self): self.generate(self.nodes[0], 1) blockhash = self.nodes[0].getblockhash(chain_height + 1) - self.sync_all() txlist = [] blocktxn = self.nodes[0].getblock(blockhash, True)["tx"] diff --git a/test/functional/rpc_wipewallettxes.py b/test/functional/rpc_wipewallettxes.py index e45f57df514f8..856a15f4acf1d 100755 --- a/test/functional/rpc_wipewallettxes.py +++ b/test/functional/rpc_wipewallettxes.py @@ -18,9 +18,9 @@ def skip_test_if_missing_module(self): def run_test(self): self.log.info("Test that wipewallettxes removes txes and rescanblockchain is able to recover them") - self.generate(self.nodes[0], 101) + self.generate(self.nodes[0], 101, sync_fun=self.no_op) txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) assert_equal(self.nodes[0].getwalletinfo()["txcount"], 103) self.nodes[0].wipewallettxes() assert_equal(self.nodes[0].getwalletinfo()["txcount"], 0) diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 552271c68f0f8..067f4ad01df8b 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -455,7 +455,7 @@ def setup_nodes(self): if not self.disable_mocktime: self.log.debug('Generate a block with current mocktime') self.bump_mocktime(156 * 200, update_schedulers=False) - block_hash = self.generate(self.nodes[0], 1)[0] + block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] block = self.nodes[0].getblock(blockhash=block_hash, verbosity=0) for n in self.nodes: n.submitblock(block) @@ -766,20 +766,27 @@ def join_network(self): self.connect_nodes(1, 2) self.sync_all() - def generate(self, generator, *args, **kwargs): + def no_op(self): + pass + + def generate(self, generator, *args, sync_fun=None, **kwargs): blocks = generator.generate(*args, invalid_call=False, **kwargs) + sync_fun() if sync_fun else self.sync_all() return blocks - def generateblock(self, generator, *args, **kwargs): + def generateblock(self, generator, *args, sync_fun=None, **kwargs): blocks = generator.generateblock(*args, invalid_call=False, **kwargs) + sync_fun() if sync_fun else self.sync_all() return blocks - def generatetoaddress(self, generator, *args, **kwargs): + def generatetoaddress(self, generator, *args, sync_fun=None, **kwargs): blocks = generator.generatetoaddress(*args, invalid_call=False, **kwargs) + sync_fun() if sync_fun else self.sync_all() return blocks - def generatetodescriptor(self, generator, *args, **kwargs): + def generatetodescriptor(self, generator, *args, sync_fun=None, **kwargs): blocks = generator.generatetodescriptor(*args, invalid_call=False, **kwargs) + sync_fun() if sync_fun else self.sync_all() return blocks def sync_blocks(self, nodes=None, wait=1, timeout=60): @@ -1178,23 +1185,19 @@ def activate_by_name(self, name, expected_activation_height=None): # Hence the last block prior to the activation is (expected_activation_height - 2). while expected_activation_height - height - 2 > batch_size: self.bump_mocktime(batch_size) - self.generate(self.nodes[0], batch_size) + self.generate(self.nodes[0], batch_size, sync_fun=lambda: self.sync_blocks()) height += batch_size - self.sync_blocks() blocks_left = expected_activation_height - height - 2 assert blocks_left <= batch_size self.bump_mocktime(blocks_left) - self.generate(self.nodes[0], blocks_left) - self.sync_blocks() + self.generate(self.nodes[0], blocks_left, sync_fun=lambda: self.sync_blocks()) assert not softfork_active(self.nodes[0], name) self.bump_mocktime(1) - self.generate(self.nodes[0], 1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks()) else: while not softfork_active(self.nodes[0], name): self.bump_mocktime(batch_size) - self.generate(self.nodes[0], batch_size) - self.sync_blocks() + self.generate(self.nodes[0], batch_size, sync_fun=lambda: self.sync_blocks()) assert softfork_active(self.nodes[0], name) @@ -1283,7 +1286,6 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None collateral_txid = self.nodes[0].sendmany("", outputs) self.wait_for_instantlock(collateral_txid, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] - self.sync_all() rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1, tip) assert_equal(rawtx['confirmations'], 1) @@ -1305,7 +1307,6 @@ def dynamically_prepare_masternode(self, idx, node_p2p_port, evo=False, rnd=None self.wait_for_instantlock(protx_result, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] - self.sync_all() assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) mn_info = MasternodeInfo(protx_result, owner_address, voting_address, reward_address, operatorReward, bls['public'], bls['secret'], collateral_address, collateral_txid, collateral_vout, ipAndPort, evo) @@ -1327,7 +1328,7 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= fund_txid = self.nodes[0].sendtoaddress(funds_address, 1) self.wait_for_instantlock(fund_txid, self.nodes[0]) - tip = self.generate(self.nodes[0], 1)[0] + tip = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) self.sync_all() @@ -1335,7 +1336,7 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= try: protx_result = self.nodes[0].protx('update_service_evo', evo_info.proTxHash, evo_info.addr, evo_info.keyOperator, platform_node_id, platform_p2p_port, platform_http_port, operator_reward_address, funds_address) self.wait_for_instantlock(protx_result, self.nodes[0]) - tip = self.generate(self.nodes[0], 1)[0] + tip = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) self.sync_all() self.log.info("Updated EvoNode %s: platformNodeID=%s, platformP2PPort=%s, platformHTTPPort=%s" % (evo_info.proTxHash, platform_node_id, platform_p2p_port, platform_http_port)) @@ -1487,7 +1488,7 @@ def setup_nodes(self): self.log.info("Generating %d coins" % required_balance) while self.nodes[0].getbalance() < required_balance: self.bump_mocktime(1) - self.generate(self.nodes[0], 10) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) # create masternodes self.prepare_masternodes() @@ -1506,8 +1507,6 @@ def setup_network(self): self.bump_mocktime(1) self.generate(self.nodes[0], 1) - # sync nodes - self.sync_all() for i in range(0, num_simple_nodes): force_finish_mnsync(self.nodes[i + 1]) @@ -1780,8 +1779,7 @@ def wait_func(): if quorum_hash in self.nodes[0].quorum("list")[llmq_type_name]: return True self.bump_mocktime(sleep, nodes=nodes) - self.generate(self.nodes[0], 1) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(nodes)) return False wait_until_helper(wait_func, timeout=timeout, sleep=sleep) @@ -1792,15 +1790,13 @@ def wait_func(): if quorum_hash_1 in self.nodes[0].quorum("list")[llmq_type_name]: return True self.bump_mocktime(sleep, nodes=nodes) - self.generate(self.nodes[0], 1) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(nodes)) return False wait_until_helper(wait_func, timeout=timeout, sleep=sleep) def move_blocks(self, nodes, num_blocks): self.bump_mocktime(1, nodes=nodes) - self.generate(self.nodes[0], num_blocks) - self.sync_blocks(nodes) + self.generate(self.nodes[0], num_blocks, sync_fun=lambda: self.sync_blocks(nodes)) def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connections=None, expected_members=None, expected_contributions=None, expected_complaints=0, expected_justifications=0, expected_commitments=None, mninfos_online=None, mninfos_valid=None): spork21_active = self.nodes[0].spork('show')['SPORK_21_QUORUM_ALL_CONNECTED'] <= 1 @@ -1829,7 +1825,7 @@ def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connec skip_count = 24 - (self.nodes[0].getblockcount() % 24) if skip_count != 0: self.bump_mocktime(1, nodes=nodes) - self.generate(self.nodes[0], skip_count) + self.generate(self.nodes[0], skip_count, sync_fun=self.no_op) self.sync_blocks(nodes) q = self.nodes[0].getbestblockhash() @@ -1871,8 +1867,7 @@ def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connec self.log.info("Mining final commitment") self.bump_mocktime(1, nodes=nodes) self.nodes[0].getblocktemplate() # this calls CreateNewBlock - self.generate(self.nodes[0], 1) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(nodes)) self.log.info("Waiting for quorum to appear in the list") self.wait_for_quorum_list(q, nodes, llmq_type_name=llmq_type_name) @@ -1882,9 +1877,7 @@ def mine_quorum(self, llmq_type_name="llmq_test", llmq_type=100, expected_connec quorum_info = self.nodes[0].quorum("info", llmq_type, new_quorum) # Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions - self.generate(self.nodes[0], 8) - - self.sync_blocks(nodes) + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks(nodes)) self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info["height"], new_quorum, quorum_info["quorumIndex"], quorum_info["minedBlock"])) @@ -1992,8 +1985,7 @@ def mine_cycle_quorum(self, llmq_type_name="llmq_test_dip0024", llmq_type=103, self.log.info("Mining final commitments") self.bump_mocktime(1, nodes=nodes) self.nodes[0].getblocktemplate() # this calls CreateNewBlock - self.generate(self.nodes[0], 1) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(nodes)) self.log.info("Waiting for quorum(s) to appear in the list") self.wait_for_quorums_list(q_0, q_1, nodes, llmq_type_name) @@ -2001,9 +1993,8 @@ def mine_cycle_quorum(self, llmq_type_name="llmq_test_dip0024", llmq_type=103, quorum_info_0 = self.nodes[0].quorum("info", llmq_type, q_0) quorum_info_1 = self.nodes[0].quorum("info", llmq_type, q_1) # Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions - self.generate(self.nodes[0], 8) + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks(nodes)) - self.sync_blocks(nodes) self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info_0["height"], q_0, quorum_info_0["quorumIndex"], quorum_info_0["minedBlock"])) self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info_1["height"], q_1, quorum_info_1["quorumIndex"], quorum_info_1["minedBlock"])) @@ -2027,7 +2018,7 @@ def move_to_next_cycle(self): skip_count = cycle_length - (cur_block % cycle_length) if skip_count != 0: self.bump_mocktime(1, nodes=nodes) - self.generate(self.nodes[0], skip_count) + self.generate(self.nodes[0], skip_count, sync_fun=self.no_op) self.sync_blocks(nodes) self.log.info('Moved from block %d to %d' % (cur_block, self.nodes[0].getblockcount())) @@ -2089,8 +2080,7 @@ def test_mns(): if recover: if self.mocktime % 2: self.bump_mocktime(self.quorum_data_request_expiration_timeout + 1) - self.generate(self.nodes[0], 1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks()) else: self.bump_mocktime(self.quorum_data_thread_request_timeout_seconds + 1) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 8b038dee83d8c..c2326780ecb67 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -514,10 +514,10 @@ def find_output(node, txid, amount, *, blockhash=None): # Helper to create at least "count" utxos # Pass in a fee that is sufficient for relay and mining new transactions. -def create_confirmed_utxos(test_framework, fee, node, count): +def create_confirmed_utxos(test_framework, fee, node, count, **kwargs): to_generate = int(0.5 * count) + 101 while to_generate > 0: - test_framework.generate(node, min(25, to_generate)) + test_framework.generate(node, min(25, to_generate), **kwargs) to_generate -= 25 utxos = node.listunspent() iterations = count - len(utxos) @@ -538,7 +538,7 @@ def create_confirmed_utxos(test_framework, fee, node, count): node.sendrawtransaction(signed_tx) while (node.getmempoolinfo()['size'] > 0): - test_framework.generate(node, 1) + test_framework.generate(node, 1, **kwargs) utxos = node.listunspent() assert len(utxos) >= count diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py index 8f54e505983cf..edb3779f82285 100755 --- a/test/functional/wallet_abandonconflict.py +++ b/test/functional/wallet_abandonconflict.py @@ -43,7 +43,6 @@ def run_test(self): # Can not abandon confirmed transaction assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA)) - self.sync_blocks() newbalance = self.nodes[0].getbalance() assert balance - newbalance < Decimal("0.001") #no more than fees lost balance = newbalance @@ -167,7 +166,7 @@ def run_test(self): tx = self.nodes[0].createrawtransaction(inputs, outputs) signed = self.nodes[0].signrawtransactionwithwallet(tx) self.nodes[1].sendrawtransaction(signed["hex"]) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) self.connect_nodes(0, 1) self.sync_blocks() diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py index 2a6c4ba41ab18..9f5d36ac8fc23 100755 --- a/test/functional/wallet_balance.py +++ b/test/functional/wallet_balance.py @@ -269,7 +269,7 @@ def test_balances(*, fee_node_1=0): self.nodes[0].invalidateblock(block_reorg) self.nodes[1].invalidateblock(block_reorg) assert_equal(self.nodes[0].getbalance(minconf=0), 0) # wallet txs not in the mempool are untrusted - self.generatetoaddress(self.nodes[0], 1, ADDRESS_WATCHONLY) + self.generatetoaddress(self.nodes[0], 1, ADDRESS_WATCHONLY, sync_fun=self.no_op) assert_equal(self.nodes[0].getbalance(minconf=0), 0) # wallet txs not in the mempool are untrusted # Now confirm tx_orig diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py index 3dac8aacf3b60..9b27dccadb126 100755 --- a/test/functional/wallet_basic.py +++ b/test/functional/wallet_basic.py @@ -66,15 +66,14 @@ def run_test(self): self.log.info("Mining blocks...") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 500) assert_equal(walletinfo['balance'], 0) self.sync_all(self.nodes[0:3]) - self.generate(self.nodes[1], COINBASE_MATURITY + 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[1], COINBASE_MATURITY + 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) assert_equal(self.nodes[0].getbalance(), 500) assert_equal(self.nodes[1].getbalance(), 500) @@ -124,8 +123,7 @@ def run_test(self): assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. - self.generate(self.nodes[0], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) # Exercise locking of unspent outputs unspent_0 = self.nodes[2].listunspent()[0] @@ -168,8 +166,7 @@ def run_test(self): assert_equal(len(self.nodes[1].listlockunspent()), 0) # Have node1 generate 100 blocks (so node0 can recover the fee) - self.generate(self.nodes[1], COINBASE_MATURITY) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=lambda: self.sync_all(self.nodes[0:3])) # node0 should end up with 1000 DASH in block rewards plus fees, but # minus the 210 plus fees sent to node2 @@ -200,8 +197,7 @@ def run_test(self): self.nodes[1].sendrawtransaction(hexstring=txns_to_send[1]["hex"], maxfeerate=0) # Have node1 mine a block to confirm transactions: - self.generate(self.nodes[1], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[1], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 1000 - totalfee) @@ -215,15 +211,13 @@ def run_test(self): fee_per_byte = Decimal('0.00001') / 1000 self.nodes[2].settxfee(fee_per_byte * 1000) txid = self.nodes[2].sendtoaddress(address, 100, "", "", False) - self.generate(self.nodes[2], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[2], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('900') - totalfee, fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), Decimal('100')) # Send 100 DASH with subtract fee from amount txid = self.nodes[2].sendtoaddress(address, 100, "", "", True) - self.generate(self.nodes[2], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[2], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) node_2_bal -= Decimal('100') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('200'), fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex'])) @@ -232,16 +226,14 @@ def run_test(self): # Sendmany 100 DASH txid = self.nodes[2].sendmany('', {address: 100}, 0, False, "", []) - self.generate(self.nodes[2], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[2], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) node_0_bal += Decimal('100') node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('100'), fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), node_0_bal) # Sendmany 100 DASH with subtract fee from amount txid = self.nodes[2].sendmany('', {address: 100}, 0, False, "", [address]) - self.generate(self.nodes[2], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[2], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) node_2_bal -= Decimal('100') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('100'), fee_per_byte, count_bytes(self.nodes[2].gettransaction(txid)['hex'])) @@ -253,8 +245,7 @@ def run_test(self): # Test passing fee_rate as a string txid = self.nodes[2].sendmany(amounts={address: 10}, fee_rate=str(fee_rate_sat_vb)) - self.generate(self.nodes[2], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[2], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) balance = self.nodes[2].getbalance() node_2_bal = self.check_fee_amount(balance, node_2_bal - Decimal('10'), explicit_fee_rate_btc_kvb, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(balance, node_2_bal) @@ -264,8 +255,7 @@ def run_test(self): # Test passing fee_rate as an integer amount = Decimal("0.0001") txid = self.nodes[2].sendmany(amounts={address: amount}, fee_rate=fee_rate_sat_vb) - self.generate(self.nodes[2], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[2], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) balance = self.nodes[2].getbalance() node_2_bal = self.check_fee_amount(balance, node_2_bal - amount, explicit_fee_rate_btc_kvb, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(balance, node_2_bal) @@ -327,7 +317,6 @@ def run_test(self): self.sync_all() self.generate(self.nodes[1], 1) # mine a block - self.sync_all() unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output found = False @@ -349,14 +338,12 @@ def run_test(self): txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) - self.generate(self.nodes[1], 1) # mine a block, tx should not be in there - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[1], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) # mine a block, tx should not be in there assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted # now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex']) - self.generate(self.nodes[1], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[1], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) node_2_bal += 2 tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) assert_equal(self.nodes[2].getbalance(), node_2_bal) @@ -374,8 +361,7 @@ def run_test(self): self.connect_nodes(0, 2) self.sync_blocks(self.nodes[0:3]) - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:3]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:3])) node_2_bal += 2 # tx should be added to balance because after restarting the nodes tx should be broadcast @@ -436,8 +422,7 @@ def run_test(self): # 1. Send some coins to generate new UTXO address_to_import = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(address_to_import, 1) - self.generate(self.nodes[0], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) self.log.info("Test sendtoaddress with fee_rate param (explicit fee rate in duff/B)") prebalance = self.nodes[2].getbalance() @@ -449,8 +434,7 @@ def run_test(self): # Test passing fee_rate as an integer txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=fee_rate_sat_vb) tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) - self.generate(self.nodes[0], 1) - self.sync_all(self.nodes[0:3]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) postbalance = self.nodes[2].getbalance() fee = prebalance - postbalance - Decimal(amount) assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb)) @@ -462,7 +446,7 @@ def run_test(self): # Test passing fee_rate as a string txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=str(fee_rate_sat_vb)) tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_all(self.nodes[0:3])) self.sync_all(self.nodes[0:3]) postbalance = self.nodes[2].getbalance() fee = prebalance - postbalance - amount @@ -526,17 +510,15 @@ def run_test(self): # Mine a block from node0 to an address from node1 coinbase_addr = self.nodes[1].getnewaddress() - block_hash = self.generatetoaddress(self.nodes[0], 1, coinbase_addr)[0] + block_hash = self.generatetoaddress(self.nodes[0], 1, coinbase_addr, sync_fun=lambda: self.sync_all(self.nodes[0:3]))[0] coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0] - self.sync_all(self.nodes[0:3]) # Check that the txid and balance is found by node1 self.nodes[1].gettransaction(coinbase_txid) # check if wallet or blockchain maintenance changes the balance self.sync_all(self.nodes[0:3]) - blocks = self.generate(self.nodes[0], 2) - self.sync_all(self.nodes[0:3]) + blocks = self.generate(self.nodes[0], 2, sync_fun=lambda: self.sync_all(self.nodes[0:3])) balance_nodes = [self.nodes[i].getbalance() for i in range(3)] block_count = self.nodes[0].getblockcount() @@ -583,13 +565,13 @@ def run_test(self): # Get all non-zero utxos together chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()] singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) node0_balance = self.nodes[0].getbalance() # Split into two chains rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance // 2 - Decimal('0.01'), chain_addrs[1]: node0_balance // 2 - Decimal('0.01')}) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) singletxid = self.nodes[0].sendrawtransaction(hexstring=signedtx["hex"], maxfeerate=0) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) # Make a long chain of unconfirmed payments without hitting mempool limit # Each tx we make leaves only one output of change on a chain 1 longer @@ -640,7 +622,7 @@ def run_test(self): assert not address_info["ischange"] # Test getaddressinfo 'ischange' field on change address. - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) destination = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(destination, 0.123) tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex']) diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py index 1a47f63263189..3de36972b812e 100755 --- a/test/functional/wallet_import_rescan.py +++ b/test/functional/wallet_import_rescan.py @@ -169,7 +169,6 @@ def run_test(self): self.generate(self.nodes[0], 1) # Generate one block for each send variant.confirmation_height = self.nodes[0].getblockcount() variant.timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] - self.sync_all() # Conclude sync before calling setmocktime to avoid timeouts # Generate a block further in the future (past the rescan window). assert_equal(self.nodes[0].getrawmempool(), []) diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py index 47b19ecfab995..606ccb1ffc6b5 100755 --- a/test/functional/wallet_importmulti.py +++ b/test/functional/wallet_importmulti.py @@ -61,8 +61,8 @@ def test_importmulti(self, req, success, error_code=None, error_message=None, wa def run_test(self): self.log.info("Mining blocks...") - self.generate(self.nodes[0], 1) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) @@ -254,9 +254,9 @@ def run_test(self): # P2SH address multisig = get_multisig(self.nodes[0]) - self.generate(self.nodes[1], COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh") @@ -274,9 +274,9 @@ def run_test(self): # P2SH + Redeem script multisig = get_multisig(self.nodes[0]) - self.generate(self.nodes[1], COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script") @@ -294,9 +294,9 @@ def run_test(self): # P2SH + Redeem script + Private Keys + !Watchonly multisig = get_multisig(self.nodes[0]) - self.generate(self.nodes[1], COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script and private keys") @@ -319,9 +319,9 @@ def run_test(self): # P2SH + Redeem script + Private Keys + Watchonly multisig = get_multisig(self.nodes[0]) - self.generate(self.nodes[1], COINBASE_MATURITY) + self.generate(self.nodes[1], COINBASE_MATURITY, sync_fun=self.no_op) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] self.log.info("Should import a p2sh with respective redeem script and private keys") diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py index 71d738b2400c2..969d981a777aa 100755 --- a/test/functional/wallet_listsinceblock.py +++ b/test/functional/wallet_listsinceblock.py @@ -31,7 +31,6 @@ def run_test(self): # only one connection. (See fPreferredDownload in net_processing) self.connect_nodes(1, 2) self.generate(self.nodes[2], COINBASE_MATURITY + 1) - self.sync_all() self.test_no_blockhash() self.test_invalid_blockhash() @@ -46,7 +45,6 @@ def test_no_blockhash(self): txid = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) blockhash, = self.generate(self.nodes[2], 1) blockheight = self.nodes[2].getblockheader(blockhash)['height'] - self.sync_all() txs = self.nodes[0].listtransactions() assert_array_result(txs, {"txid": txid}, { @@ -88,7 +86,6 @@ def test_targetconfirmations(self): self.log.info("Test target_confirmations") blockhash, = self.generate(self.nodes[2], 1) blockheight = self.nodes[2].getblockheader(blockhash)['height'] - self.sync_all() assert_equal( self.nodes[0].getblockhash(0), @@ -136,14 +133,11 @@ def test_reorg(self): senttx = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) # generate on both sides - nodes1_last_blockhash = self.generate(self.nodes[1], 6)[-1] - nodes2_first_blockhash = self.generate(self.nodes[2], 7)[0] + nodes1_last_blockhash = self.generate(self.nodes[1], 6, sync_fun=lambda: self.sync_all(self.nodes[:2]))[-1] + nodes2_first_blockhash = self.generate(self.nodes[2], 7, sync_fun=lambda: self.sync_all(self.nodes[2:]))[0] self.log.debug("nodes[1] last blockhash = {}".format(nodes1_last_blockhash)) self.log.debug("nodes[2] first blockhash = {}".format(nodes2_first_blockhash)) - self.sync_all(self.nodes[:2]) - self.sync_all(self.nodes[2:]) - self.join_network() # listsinceblock(nodes1_last_blockhash) should now include tx as seen from nodes[0] @@ -192,7 +186,6 @@ def test_double_spend(self): address = key_to_p2pkh(eckey.get_pubkey().get_bytes()) self.nodes[2].sendtoaddress(address, 10) self.generate(self.nodes[2], 6) - self.sync_all() self.nodes[2].importprivkey(privkey) utxos = self.nodes[2].listunspent() utxo = [u for u in utxos if u["address"] == address][0] @@ -225,8 +218,8 @@ def test_double_spend(self): self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict2))['hex']) # generate on both sides - lastblockhash = self.generate(self.nodes[1], 3)[2] - self.generate(self.nodes[2], 4) + lastblockhash = self.generate(self.nodes[1], 3, sync_fun=self.no_op)[2] + self.generate(self.nodes[2], 4, sync_fun=self.no_op) self.join_network() @@ -297,7 +290,7 @@ def test_double_send(self): txid1 = self.nodes[1].sendrawtransaction(signedtx) # generate bb1-bb2 on right side - self.generate(self.nodes[2], 2) + self.generate(self.nodes[2], 2, sync_fun=self.no_op) # send from nodes[2]; this will end up in bb3 txid2 = self.nodes[2].sendrawtransaction(signedtx) @@ -305,8 +298,8 @@ def test_double_send(self): assert_equal(txid1, txid2) # generate on both sides - lastblockhash = self.generate(self.nodes[1], 3)[2] - self.generate(self.nodes[2], 2) + lastblockhash = self.generate(self.nodes[1], 3, sync_fun=self.no_op)[2] + self.generate(self.nodes[2], 2, sync_fun=self.no_op) self.join_network() @@ -360,7 +353,7 @@ def double_spends_filtered(self): double_signedtx = spending_node.signrawtransactionwithwallet(double_rawtx) dbl_tx_id = double_spending_node.sendrawtransaction(double_signedtx["hex"]) double_tx = double_spending_node.getrawtransaction(dbl_tx_id, 1) - lastblockhash = self.generate(double_spending_node, 1)[0] + lastblockhash = self.generate(double_spending_node, 1, sync_fun=self.no_op)[0] self.reconnect_isolated_node(3, 2) self.sync_all() @@ -379,7 +372,7 @@ def double_spends_filtered(self): assert_equal(original_found, True) assert_equal(double_found, True) - lastblockhash = self.generate(spending_node, 1)[0] + lastblockhash = self.generate(spending_node, 1, sync_fun=self.no_op)[0] # check that neither transaction exists block_hash = spending_node.listsinceblock(lastblockhash) diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py index a8a0a70170b48..f486e9bf7956b 100755 --- a/test/functional/wallet_listtransactions.py +++ b/test/functional/wallet_listtransactions.py @@ -34,7 +34,6 @@ def run_test(self): self.log.info("Test confirmations change after mining a block") blockhash = self.generate(self.nodes[0], 1)[0] blockheight = self.nodes[0].getblockheader(blockhash)['height'] - self.sync_all() assert_array_result(self.nodes[0].listtransactions(), {"txid": txid}, {"category": "send", "amount": Decimal("-0.1"), "confirmations": 1, "blockhash": blockhash, "blockheight": blockheight}) diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py index 42ce63d36397e..87eeba6bf9ef3 100755 --- a/test/functional/wallet_multiwallet.py +++ b/test/functional/wallet_multiwallet.py @@ -183,7 +183,7 @@ def wallet_file(name): self.nodes[0].createwallet("w5") assert_equal(set(node.listwallets()), {"w4", "w5"}) w5 = wallet("w5") - self.generatetoaddress(node, nblocks=1, address=w5.getnewaddress()) + self.generatetoaddress(node, nblocks=1, address=w5.getnewaddress(), sync_fun=self.no_op) # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded os.rename(wallet_dir2, wallet_dir()) @@ -215,7 +215,7 @@ def wallet_file(name): wallet_bad = wallet("bad") # check wallet names and balances - self.generatetoaddress(node, nblocks=1, address=wallets[0].getnewaddress()) + self.generatetoaddress(node, nblocks=1, address=wallets[0].getnewaddress(), sync_fun=self.no_op) for wallet_name, wallet in zip(wallet_names, wallets): info = wallet.getwalletinfo() assert_equal(info['immature_balance'], 500 if wallet is wallets[0] else 0) @@ -228,7 +228,7 @@ def wallet_file(name): assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) w1, w2, w3, w4, *_ = wallets - self.generatetoaddress(node, nblocks=COINBASE_MATURITY + 1, address=w1.getnewaddress()) + self.generatetoaddress(node, nblocks=COINBASE_MATURITY + 1, address=w1.getnewaddress(), sync_fun=self.no_op) assert_equal(w1.getbalance(), 1000) assert_equal(w2.getbalance(), 0) assert_equal(w3.getbalance(), 0) @@ -237,7 +237,7 @@ def wallet_file(name): w1.sendtoaddress(w2.getnewaddress(), 1) w1.sendtoaddress(w3.getnewaddress(), 2) w1.sendtoaddress(w4.getnewaddress(), 3) - self.generatetoaddress(node, nblocks=1, address=w1.getnewaddress()) + self.generatetoaddress(node, nblocks=1, address=w1.getnewaddress(), sync_fun=self.no_op) assert_equal(w2.getbalance(), 1) assert_equal(w3.getbalance(), 2) assert_equal(w4.getbalance(), 3) diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py index 1f452f8337883..9a86ede5f94f2 100755 --- a/test/functional/wallet_reorgsrestore.py +++ b/test/functional/wallet_reorgsrestore.py @@ -43,7 +43,7 @@ def run_test(self): # Send a tx to be unconfirmed later txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) tx = self.nodes[0].gettransaction(txid) - self.generate(self.nodes[0], 4) + self.generate(self.nodes[0], 4, sync_fun=self.no_op) tx_before_reorg = self.nodes[0].gettransaction(txid) assert_equal(tx_before_reorg["confirmations"], 4) @@ -62,9 +62,9 @@ def run_test(self): conflicting = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs_2)) conflicted_txid = self.nodes[0].sendrawtransaction(conflicted["hex"]) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) conflicting_txid = self.nodes[2].sendrawtransaction(conflicting["hex"]) - self.generate(self.nodes[2], 9) + self.generate(self.nodes[2], 9, sync_fun=self.no_op) # Reconnect node0 and node2 and check that conflicted_txid is effectively conflicted self.connect_nodes(0, 2) @@ -78,11 +78,11 @@ def run_test(self): self.restart_node(0) # The block chain re-orgs and the tx is included in a different block - self.generate(self.nodes[1], 9) + self.generate(self.nodes[1], 9, sync_fun=self.no_op) self.nodes[1].sendrawtransaction(tx["hex"]) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) self.nodes[1].sendrawtransaction(conflicted["hex"]) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) # Node0 wallet file is loaded on longest sync'ed node1 self.stop_node(1) diff --git a/test/functional/wallet_send.py b/test/functional/wallet_send.py index 1fcd9ba725471..033d38c1914d9 100755 --- a/test/functional/wallet_send.py +++ b/test/functional/wallet_send.py @@ -439,7 +439,6 @@ def run_test(self): res = self.nodes[0].sendrawtransaction(hex) self.generate(self.nodes[0], 1) assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 1) - self.sync_all() self.log.info("Lock unspents...") utxo1 = w0.listunspent()[0] diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py index 36dd258507283..283860ab027ab 100755 --- a/test/functional/wallet_txn_clone.py +++ b/test/functional/wallet_txn_clone.py @@ -77,8 +77,7 @@ def run_test(self): # Have node0 mine a block, if requested: if (self.options.mine_block): - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:2]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:2])) tx1 = self.nodes[0].gettransaction(txid1) tx2 = self.nodes[0].gettransaction(txid2) @@ -103,7 +102,7 @@ def run_test(self): self.nodes[2].sendrawtransaction(node0_tx1["hex"]) txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"]) # ... mine a block... - self.generate(self.nodes[2], 1) + self.generate(self.nodes[2], 1, sync_fun=self.no_op) # Reconnect the split network, and sync chain: self.connect_nodes(1, 2) diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py index 107127473a12a..08b29c70fa089 100755 --- a/test/functional/wallet_txn_doublespend.py +++ b/test/functional/wallet_txn_doublespend.py @@ -82,8 +82,7 @@ def run_test(self): # Have node0 mine a block: if (self.options.mine_block): - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:2]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:2])) tx1 = self.nodes[0].gettransaction(txid1) tx2 = self.nodes[0].gettransaction(txid2) @@ -111,7 +110,7 @@ def run_test(self): self.nodes[2].sendrawtransaction(fund_bar_tx["hex"]) doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"]) # ... mine a block... - self.generate(self.nodes[2], 1) + self.generate(self.nodes[2], 1, sync_fun=self.no_op) # Reconnect the split network, and sync chain: self.connect_nodes(1, 2) diff --git a/test/functional/wallet_upgradetohd.py b/test/functional/wallet_upgradetohd.py index b6dedce0f817e..0fc1c7e4d12b6 100755 --- a/test/functional/wallet_upgradetohd.py +++ b/test/functional/wallet_upgradetohd.py @@ -65,7 +65,7 @@ def run_test(self): assert_equal(keypath, "m/44'/1'/0'/1/%d" % i) self.bump_mocktime(1) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) self.log.info("Should no longer be able to start it with HD disabled") self.stop_node(0) diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py index 9f40375c91843..a662fbd387697 100755 --- a/test/functional/wallet_upgradewallet.py +++ b/test/functional/wallet_upgradewallet.py @@ -96,8 +96,7 @@ def test_upgradewallet_error(self, wallet, previous_version, requested_version, assert_equal(wallet.getwalletinfo()["walletversion"], previous_version) def run_test(self): - self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, self.nodes[0].getnewaddress()) - self.dumb_sync_blocks() + self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, self.nodes[0].getnewaddress(), sync_fun=lambda: self.dumb_sync_blocks()) # # Sanity check the test framework: res = self.nodes[0].getblockchaininfo() assert_equal(res['blocks'], COINBASE_MATURITY + 1) @@ -108,8 +107,7 @@ def run_test(self): # Send coins to old wallets for later conversion checks. v18_2_wallet = v18_2_node.get_wallet_rpc(self.default_wallet_name) v18_2_address = v18_2_wallet.getnewaddress() - self.generatetoaddress(node_master, COINBASE_MATURITY + 1, v18_2_address) - self.dumb_sync_blocks() + self.generatetoaddress(node_master, COINBASE_MATURITY + 1, v18_2_address, sync_fun=lambda: self.dumb_sync_blocks()) v18_2_balance = v18_2_wallet.getbalance() self.log.info("Test upgradewallet RPC...") From 82da45a8bf8c07fb76a799202fecf0df2546dbf5 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 16:08:19 +0000 Subject: [PATCH 07/11] test: move differing sync logic into `sync_fun` lambda in Dash tests Co-authored-by: UdjinM6 --- test/functional/feature_asset_locks.py | 3 +-- test/functional/feature_dip3_v19.py | 6 ++---- test/functional/feature_governance_cl.py | 18 ++++++------------ test/functional/feature_llmq_chainlocks.py | 3 +-- test/functional/feature_llmq_evo.py | 6 ++---- .../functional/feature_llmq_is_cl_conflicts.py | 3 +-- test/functional/feature_llmq_rotation.py | 6 ++---- test/functional/feature_llmq_simplepose.py | 6 ++---- test/functional/feature_mnehf.py | 6 ++---- test/functional/interface_zmq_dash.py | 6 ++---- test/functional/p2p_quorum_data.py | 3 +-- test/functional/p2p_sendheaders_compressed.py | 8 ++++---- test/functional/rpc_verifyislock.py | 3 +-- 13 files changed, 27 insertions(+), 50 deletions(-) diff --git a/test/functional/feature_asset_locks.py b/test/functional/feature_asset_locks.py index 3a19b5f229be1..8070b151dc2af 100755 --- a/test/functional/feature_asset_locks.py +++ b/test/functional/feature_asset_locks.py @@ -256,8 +256,7 @@ def run_test(self): for _ in range(2): self.dynamically_add_masternode(evo=True) - self.generate(node, 8) - self.sync_blocks() + self.generate(node, 8, sync_fun=lambda: self.sync_blocks()) self.set_sporks() self.generate(node, 1) diff --git a/test/functional/feature_dip3_v19.py b/test/functional/feature_dip3_v19.py index 0bdfad5b211c9..56d663013b801 100755 --- a/test/functional/feature_dip3_v19.py +++ b/test/functional/feature_dip3_v19.py @@ -86,8 +86,7 @@ def run_test(self): evo_info_0 = self.dynamically_add_masternode(evo=True, rnd=7) assert evo_info_0 is not None - self.generate(self.nodes[0], 8) - self.sync_blocks() + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks()) self.log.info("Checking that protxs with duplicate EvoNodes fields are rejected") evo_info_1 = self.dynamically_add_masternode(evo=True, rnd=7, should_be_rejected=True) @@ -97,8 +96,7 @@ def run_test(self): assert evo_info_2 is None evo_info_3 = self.dynamically_add_masternode(evo=True, rnd=9) assert evo_info_3 is not None - self.generate(self.nodes[0], 8) - self.sync_blocks() + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks()) self.dynamically_evo_update_service(evo_info_0, 9, should_be_rejected=True) revoke_protx = self.mninfo[-1].proTxHash diff --git a/test/functional/feature_governance_cl.py b/test/functional/feature_governance_cl.py index fdc9f01ce4bce..fb7bee08f68b1 100755 --- a/test/functional/feature_governance_cl.py +++ b/test/functional/feature_governance_cl.py @@ -69,8 +69,7 @@ def run_test(self): n = sb_cycle - self.nodes[0].getblockcount() % sb_cycle for _ in range(n): self.bump_mocktime(156) - self.generate(self.nodes[0], 1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks()) self.log.info("Prepare proposals") @@ -116,8 +115,7 @@ def run_test(self): assert n >= 0 for _ in range(n + 1): self.bump_mocktime(156) - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:5]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:5])) self.log.info("Wait for new trigger and votes on non-isolated nodes") sb_block_height = self.nodes[0].getblockcount() // sb_cycle * sb_cycle + sb_cycle @@ -130,29 +128,25 @@ def run_test(self): self.log.info("Move remaining n blocks until the next Superblock") for _ in range(n - 1): self.bump_mocktime(156) - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:5]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:5])) # Confirm all is good self.wait_until(lambda: have_trigger_for_height(self.nodes[0:5], sb_block_height), timeout=5) self.log.info("Mine superblock") self.bump_mocktime(156) - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:5]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:5])) self.wait_for_chainlocked_block(self.nodes[0], self.nodes[0].getbestblockhash()) self.log.info("Mine (superblock cycle + 1) blocks on non-isolated nodes to forget about this trigger") for _ in range(sb_cycle): self.bump_mocktime(156) - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:5]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:5])) # Should still have at least 1 trigger for the old sb cycle and 0 for the current one assert len(self.nodes[0].gobject("list", "valid", "triggers")) >= 1 assert not have_trigger_for_height(self.nodes[0:5], sb_block_height + sb_cycle) self.bump_mocktime(156) - self.generate(self.nodes[0], 1) - self.sync_blocks(self.nodes[0:5]) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(self.nodes[0:5])) # Trigger scheduler to mark old triggers for deletion self.bump_mocktime(5 * 60) # Let it do the job diff --git a/test/functional/feature_llmq_chainlocks.py b/test/functional/feature_llmq_chainlocks.py index fa90a7e2331ff..10bb909390c3e 100755 --- a/test/functional/feature_llmq_chainlocks.py +++ b/test/functional/feature_llmq_chainlocks.py @@ -238,8 +238,7 @@ def run_test(self): self.log.info("Test that new node can mine without Chainlock info") tip_0 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2) - self.generate(self.nodes[added_idx], 1) - self.sync_blocks() + self.generate(self.nodes[added_idx], 1, sync_fun=lambda: self.sync_blocks()) tip_1 = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2) assert_equal(tip_1['cbTx']['bestCLSignature'], tip_0['cbTx']['bestCLSignature']) assert_equal(tip_1['cbTx']['bestCLHeightDiff'], tip_0['cbTx']['bestCLHeightDiff'] + 1) diff --git a/test/functional/feature_llmq_evo.py b/test/functional/feature_llmq_evo.py index 524aee231456e..b2b60bdb29bbd 100755 --- a/test/functional/feature_llmq_evo.py +++ b/test/functional/feature_llmq_evo.py @@ -89,8 +89,7 @@ def run_test(self): for i in range(self.evo_count): evo_info = self.dynamically_add_masternode(evo=True) evo_protxhash_list.append(evo_info.proTxHash) - self.generate(self.nodes[0], 8) - self.sync_blocks() + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks()) expectedUpdated.append(evo_info.proTxHash) b_i = self.nodes[0].getbestblockhash() @@ -116,8 +115,7 @@ def run_test(self): # Generate a few blocks to make EvoNode/MN analysis on a pure MN RewardReallocation window self.bump_mocktime(1) - self.generate(self.nodes[0], 4) - self.sync_blocks() + self.generate(self.nodes[0], 4, sync_fun=lambda: self.sync_blocks()) self.log.info("Test that EvoNodes are paid 1 block in a row after MN RewardReallocation activation") self.test_evo_payments(window_analysis=48, mnrr_active=True) diff --git a/test/functional/feature_llmq_is_cl_conflicts.py b/test/functional/feature_llmq_is_cl_conflicts.py index d81f15431a237..2369c59fa4aca 100755 --- a/test/functional/feature_llmq_is_cl_conflicts.py +++ b/test/functional/feature_llmq_is_cl_conflicts.py @@ -118,9 +118,8 @@ def test_chainlock_overrides_islock(self, test_block_conflict, mine_confllicting cl = self.create_chainlock(self.nodes[0].getblockcount() + 1, block) if mine_confllicting: - islock_tip = self.generate(self.nodes[0], 1)[-1] # Make sure we won't sent clsig too early - self.sync_blocks() + islock_tip = self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks())[-1] self.test_node.send_clsig(cl) diff --git a/test/functional/feature_llmq_rotation.py b/test/functional/feature_llmq_rotation.py index 709e85b46136e..8dd187f7aee76 100755 --- a/test/functional/feature_llmq_rotation.py +++ b/test/functional/feature_llmq_rotation.py @@ -144,15 +144,13 @@ def run_test(self): # At this point, we want to wait for CLs just before the self.mine_cycle_quorum to diversify the CLs in CbTx. # Although because here a new quorum cycle is starting, and we don't want to mine them now, mine 8 blocks (to skip all DKG phases) nodes = [self.nodes[0]] + [mn.node for mn in self.mninfo.copy()] - self.generate(self.nodes[0], 8) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks(nodes)) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) # And for the remaining blocks, enforce new CL in CbTx skip_count = 23 - (self.nodes[0].getblockcount() % 24) for _ in range(skip_count): - self.generate(self.nodes[0], 1) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(nodes)) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) diff --git a/test/functional/feature_llmq_simplepose.py b/test/functional/feature_llmq_simplepose.py index 72b1b3eec7b67..b984b89bc4aa5 100755 --- a/test/functional/feature_llmq_simplepose.py +++ b/test/functional/feature_llmq_simplepose.py @@ -141,8 +141,7 @@ def mine_quorum_less_checks(self, expected_good_nodes, mninfos_online): self.log.info("Mining final commitment") self.bump_mocktime(1, nodes=nodes) self.nodes[0].getblocktemplate() # this calls CreateNewBlock - self.generate(self.nodes[0], 1) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 1, sync_fun=lambda: self.sync_blocks(nodes)) self.log.info("Waiting for quorum to appear in the list") self.wait_for_quorum_list(q, nodes) @@ -153,8 +152,7 @@ def mine_quorum_less_checks(self, expected_good_nodes, mninfos_online): # Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligible for signing sessions self.bump_mocktime(8) - self.generate(self.nodes[0], 8) - self.sync_blocks(nodes) + self.generate(self.nodes[0], 8, sync_fun=lambda: self.sync_blocks(nodes)) self.log.info("New quorum: height=%d, quorumHash=%s, quorumIndex=%d, minedBlock=%s" % (quorum_info["height"], new_quorum, quorum_info["quorumIndex"], quorum_info["minedBlock"])) return new_quorum diff --git a/test/functional/feature_mnehf.py b/test/functional/feature_mnehf.py index d650dfd5f8568..3c77a69cafbb3 100755 --- a/test/functional/feature_mnehf.py +++ b/test/functional/feature_mnehf.py @@ -223,15 +223,13 @@ def run_test(self): self.log.info("Testing duplicate EHF signal with same bit") ehf_tx_duplicate = self.send_tx(self.create_mnehf(28, pubkey)) - tip_blockhash = self.generate(node, 1)[0] - self.sync_blocks() + tip_blockhash = self.generate(node, 1, sync_fun=lambda: self.sync_blocks())[0] block = node.getblock(tip_blockhash) assert ehf_tx_duplicate in node.getrawmempool() and ehf_tx_duplicate not in block['tx'] self.log.info("Testing EHF signal with same bit but with newer start time") self.bump_mocktime(int(60 * 60 * 24 * 14), update_schedulers=False) - self.generate(node, 1) - self.sync_blocks() + self.generate(node, 1, sync_fun=lambda: self.sync_blocks()) self.restart_all_nodes(params=[self.mocktime, self.mocktime + 1000000]) self.check_fork('defined') diff --git a/test/functional/interface_zmq_dash.py b/test/functional/interface_zmq_dash.py index 508590af3a87f..6483b12460490 100755 --- a/test/functional/interface_zmq_dash.py +++ b/test/functional/interface_zmq_dash.py @@ -170,8 +170,7 @@ def run_test(self): def generate_blocks(self, num_blocks): mninfos_online = self.mninfo.copy() nodes = [self.nodes[0]] + [mn.node for mn in mninfos_online] - self.generate(self.nodes[0], num_blocks) - self.sync_blocks(nodes) + self.generate(self.nodes[0], num_blocks, sync_fun=lambda: self.sync_blocks(nodes)) def subscribe(self, publishers): import zmq @@ -375,8 +374,7 @@ def test_governance_publishers(self): proposal_hex = ''.join(format(x, '02x') for x in json.dumps(proposal_data).encode()) collateral = self.nodes[0].gobject("prepare", "0", proposal_rev, proposal_time, proposal_hex) self.wait_for_instantlock(collateral, self.nodes[0]) - self.generate(self.nodes[0], 6) - self.sync_blocks() + self.generate(self.nodes[0], 6, sync_fun=lambda: self.sync_blocks()) rpc_proposal_hash = self.nodes[0].gobject("submit", "0", proposal_rev, proposal_time, proposal_hex, collateral) # Validate hashgovernanceobject zmq_governance_object_hash = self.subscribers[ZMQPublisher.hash_governance_object].receive().read(32).hex() diff --git a/test/functional/p2p_quorum_data.py b/test/functional/p2p_quorum_data.py index d72894d21750d..20f451eac76c2 100755 --- a/test/functional/p2p_quorum_data.py +++ b/test/functional/p2p_quorum_data.py @@ -135,8 +135,7 @@ def force_request_expire(bump_seconds=self.quorum_data_request_expiration_timeou self.bump_mocktime(bump_seconds) # Test with/without expired request cleanup if self.cleanup: - self.generate(node0, 1) - self.sync_blocks() + self.generate(node0, 1, sync_fun=lambda: self.sync_blocks()) def test_basics(): self.log.info("Testing basics of QGETDATA/QDATA") diff --git a/test/functional/p2p_sendheaders_compressed.py b/test/functional/p2p_sendheaders_compressed.py index dc5beb17027ee..c31c52aa806c8 100755 --- a/test/functional/p2p_sendheaders_compressed.py +++ b/test/functional/p2p_sendheaders_compressed.py @@ -149,8 +149,8 @@ def mine_reorg(self, length): to-be-reorged-out blocks are mined, so that we don't break later tests. return the list of block hashes newly mined.""" - self.generate(self.nodes[0], length) # make sure all invalidated blocks are node0's - self.sync_blocks(wait=0.1) + # make sure all invalidated blocks are node0's + self.generate(self.nodes[0], length, sync_fun=lambda: self.sync_blocks(wait=0.1)) for p2p in self.nodes[0].p2ps: p2p.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) p2p.clear_block_announcements() @@ -158,8 +158,8 @@ def mine_reorg(self, length): tip_height = self.nodes[1].getblockcount() hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1)) self.nodes[1].invalidateblock(hash_to_invalidate) - all_hashes = self.generate(self.nodes[1], length + 1) # Must be longer than the orig chain - self.sync_blocks(wait=0.1) + # Must be longer than the orig chain + all_hashes = self.generate(self.nodes[1], length + 1, sync_fun=lambda: self.sync_blocks(wait=0.1)) return [int(hash_value, 16) for hash_value in all_hashes] def run_test(self): diff --git a/test/functional/rpc_verifyislock.py b/test/functional/rpc_verifyislock.py index 81800d4b6cc8c..b268f7ea070c8 100755 --- a/test/functional/rpc_verifyislock.py +++ b/test/functional/rpc_verifyislock.py @@ -44,8 +44,7 @@ def run_test(self): self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103) self.bump_mocktime(1) - self.generate(self.nodes[0], 8) - self.sync_blocks() + self.generate(self.nodes[0], 8, sync_fun=self.sync_blocks()) txid = node.sendtoaddress(node.getnewaddress(), 1) self.wait_for_instantlock(txid, node) From 1367115f7bf2f3862a79e023dc16417b9c25020f Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 19:22:56 +0000 Subject: [PATCH 08/11] test: opt-out of post-`generate*` syncing in some Dash tests Co-authored-by: UdjinM6 --- test/functional/feature_addressindex.py | 14 ++++---- test/functional/feature_asset_locks.py | 4 +-- test/functional/feature_dip0020_activation.py | 8 ++--- .../feature_dip3_deterministicmns.py | 14 ++++---- test/functional/feature_dip3_v19.py | 2 +- .../feature_dip4_coinbasemerkleroots.py | 6 ++-- test/functional/feature_governance.py | 32 ++++++++--------- test/functional/feature_governance_cl.py | 4 +-- test/functional/feature_llmq_chainlocks.py | 34 +++++++++---------- test/functional/feature_llmq_connections.py | 2 +- test/functional/feature_llmq_data_recovery.py | 6 ++-- test/functional/feature_llmq_dkgerrors.py | 2 +- test/functional/feature_llmq_evo.py | 2 +- .../feature_llmq_is_cl_conflicts.py | 2 +- .../functional/feature_llmq_is_retroactive.py | 12 +++---- test/functional/feature_llmq_rotation.py | 6 ++-- test/functional/feature_llmq_simplepose.py | 2 +- .../feature_new_quorum_type_activation.py | 6 ++-- test/functional/feature_sporks.py | 2 +- test/functional/interface_zmq_dash.py | 6 ++-- test/functional/p2p_instantsend.py | 4 +-- test/functional/p2p_sendheaders_compressed.py | 4 +-- test/functional/rpc_masternode.py | 8 ++--- test/functional/rpc_verifychainlock.py | 8 ++--- test/functional/rpc_verifyislock.py | 2 +- .../test_framework/test_framework.py | 8 ++--- 26 files changed, 100 insertions(+), 100 deletions(-) diff --git a/test/functional/feature_addressindex.py b/test/functional/feature_addressindex.py index c64f2b61cfddf..0a281cbb80a61 100755 --- a/test/functional/feature_addressindex.py +++ b/test/functional/feature_addressindex.py @@ -72,19 +72,19 @@ def run_test(self): self.log.info("Testing p2pkh and p2sh address index...") txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20) self.generate(self.nodes[0], 1) @@ -233,10 +233,10 @@ def run_test(self): assert_equal(utxos2[0]["satoshis"], amount) # Check sorting of utxos - self.generate(self.nodes[2], 150) + self.generate(self.nodes[2], 150, sync_fun=self.no_op) self.nodes[2].sendtoaddress(address2, 50) - self.generate(self.nodes[2], 1) + self.generate(self.nodes[2], 1, sync_fun=self.no_op) self.nodes[2].sendtoaddress(address2, 50) self.generate(self.nodes[2], 1) self.sync_all() diff --git a/test/functional/feature_asset_locks.py b/test/functional/feature_asset_locks.py index 8070b151dc2af..89680fb254c31 100755 --- a/test/functional/feature_asset_locks.py +++ b/test/functional/feature_asset_locks.py @@ -294,7 +294,7 @@ def test_asset_locks(self, node_wallet, node, pubkey): assert_equal(rpc_tx["assetLockTx"]["creditOutputs"][0]["scriptPubKey"]["hex"], key_to_p2pkh_script(pubkey).hex()) assert_equal(rpc_tx["assetLockTx"]["creditOutputs"][1]["scriptPubKey"]["hex"], key_to_p2pkh_script(pubkey).hex()) self.validate_credit_pool_balance(0) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) assert_equal(self.get_credit_pool_balance(node=node), locked_1) self.log.info("Generate a number of blocks to ensure this is the longest chain for later in the test when we reconsiderblock") self.generate(node, 12) @@ -507,7 +507,7 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.check_mempool_result(tx=spend_withdrawal, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}}) spend_txid_in_block = self.send_tx(spend_withdrawal) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) block = node.getblock(node.getbestblockhash()) assert spend_txid_in_block in block['tx'] diff --git a/test/functional/feature_dip0020_activation.py b/test/functional/feature_dip0020_activation.py index 6f375f0649772..b091a30ddbaad 100755 --- a/test/functional/feature_dip0020_activation.py +++ b/test/functional/feature_dip0020_activation.py @@ -58,7 +58,7 @@ def run_test(self): # This tx should be completely valid, should be included in mempool and mined in the next block assert txid in set(node.getrawmempool()) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) assert txid not in set(node.getrawmempool()) # Create spending tx @@ -83,9 +83,9 @@ def run_test(self): helper_peer.send_blocks_and_test([test_block], node, success=False, reject_reason='block-validation-failed', expect_disconnect=True) self.log.info("Generate enough blocks to activate DIP0020 opcodes") - self.generate(node, 97) + self.generate(node, 97, sync_fun=self.no_op) assert not softfork_active(node, 'dip0020') - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) assert softfork_active(node, 'dip0020') # flush state to disk before potential crashes below @@ -103,7 +103,7 @@ def run_test(self): # txes spending new opcodes still won't be accepted into mempool if we roll back to the previous tip node.invalidateblock(node.getbestblockhash()) assert tx0id not in set(node.getrawmempool()) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) self.log.info("Transactions spending coins with new opcodes are accepted one block after DIP0020 activation block") node.sendrawtransaction(tx0_hex) diff --git a/test/functional/feature_dip3_deterministicmns.py b/test/functional/feature_dip3_deterministicmns.py index b5e24d6b16063..4a88f8170bad0 100755 --- a/test/functional/feature_dip3_deterministicmns.py +++ b/test/functional/feature_dip3_deterministicmns.py @@ -49,7 +49,7 @@ def start_controller_node(self): def run_test(self): self.log.info("funding controller node") while self.nodes[0].getbalance() < (self.num_initial_mn + 3) * 1000: - self.generate(self.nodes[0], 10) # generate enough for collaterals + self.generate(self.nodes[0], 10, sync_fun=self.no_op) # generate enough for collaterals self.log.info("controller node has {} dash".format(self.nodes[0].getbalance())) # Make sure we're below block 135 (which activates dip3) @@ -65,11 +65,11 @@ def run_test(self): mns.append(before_dip3_mn) # block 150 starts enforcing DIP3 MN payments - self.generate(self.nodes[0], 150 - self.nodes[0].getblockcount()) + self.generate(self.nodes[0], 150 - self.nodes[0].getblockcount(), sync_fun=self.no_op) assert self.nodes[0].getblockcount() == 150 self.log.info("mining final block for DIP3 activation") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) # We have hundreds of blocks to sync here, give it more time self.log.info("syncing blocks for all nodes") @@ -101,7 +101,7 @@ def run_test(self): self.log.info("register %s" % mn.alias) self.register_mn(self.nodes[0], mn) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) if not start: self.start_mn(mn) @@ -245,7 +245,7 @@ def create_mn_collateral(self, node, mn): mn.collateral_address = node.getnewaddress() mn.collateral_txid = node.sendtoaddress(mn.collateral_address, 1000) mn.collateral_vout = None - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) rawtx = node.getrawtransaction(mn.collateral_txid, 1) for txout in rawtx['vout']: @@ -277,7 +277,7 @@ def register_mn(self, node, mn): mn.rewards_address = node.getnewaddress() mn.protx_hash = node.protx('register', mn.collateral_txid, mn.collateral_vout, '127.0.0.1:%d' % mn.p2p_port, mn.ownerAddr, mn.operatorAddr, mn.votingAddr, mn.operator_reward, mn.rewards_address, mn.fundsAddr) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) def start_mn(self, mn): if len(self.nodes) <= mn.idx: @@ -313,7 +313,7 @@ def test_protx_update_service(self, mn): # undo self.nodes[0].protx('update_service', mn.protx_hash, '127.0.0.1:%d' % mn.p2p_port, mn.blsMnkey, "", mn.fundsAddr) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) def assert_mnlists(self, mns): for node in self.nodes: diff --git a/test/functional/feature_dip3_v19.py b/test/functional/feature_dip3_v19.py index 56d663013b801..7da604aedc4c2 100755 --- a/test/functional/feature_dip3_v19.py +++ b/test/functional/feature_dip3_v19.py @@ -130,7 +130,7 @@ def test_revoke_protx(self, node_idx, revoke_protx, revoke_keyoperator): protx_result = self.nodes[0].protx('revoke', revoke_protx, revoke_keyoperator, 1, funds_address) self.wait_for_instantlock(protx_result, self.nodes[0]) - tip = self.generate(self.nodes[0], 1)[0] + tip = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) # Revoking a MN results in disconnects. Wait for disconnects to actually happen # and then reconnect the corresponding node back to let sync_blocks finish correctly. diff --git a/test/functional/feature_dip4_coinbasemerkleroots.py b/test/functional/feature_dip4_coinbasemerkleroots.py index 28d01b445ca7b..844e26ea61192 100755 --- a/test/functional/feature_dip4_coinbasemerkleroots.py +++ b/test/functional/feature_dip4_coinbasemerkleroots.py @@ -91,7 +91,7 @@ def run_test(self): ############################# # Now start testing quorum commitment merkle roots - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) oldhash = self.nodes[0].getbestblockhash() # Test DIP8 activation once with a pre-existing quorum and once without (we don't know in which order it will activate on mainnet) @@ -248,7 +248,7 @@ def activate_dip8(self, slow_mode=False): self.log.info("Wait for dip0008 activation") while self.nodes[0].getblockcount() < DIP0008_HEIGHT: self.bump_mocktime(10) - self.generate(self.nodes[0], 10) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) if slow_mode: self.sync_blocks() self.sync_blocks() @@ -301,7 +301,7 @@ def confirm_mns(self): break if not found_unconfirmed: break - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.sync_blocks() if __name__ == '__main__': diff --git a/test/functional/feature_governance.py b/test/functional/feature_governance.py index 9d4497e73d5c9..c53c6210d905e 100755 --- a/test/functional/feature_governance.py +++ b/test/functional/feature_governance.py @@ -89,7 +89,7 @@ def run_test(self): assert_equal(len(self.nodes[0].gobject("list-prepared")), 0) self.log.info("Check 1st superblock before v20") - self.generate(self.nodes[0], 3) + self.generate(self.nodes[0], 3, sync_fun=self.no_op) self.bump_mocktime(3) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 130) @@ -97,7 +97,7 @@ def run_test(self): self.check_superblockbudget(False) self.log.info("Check 2nd superblock before v20") - self.generate(self.nodes[0], 10) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) self.bump_mocktime(10) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 140) @@ -117,7 +117,7 @@ def run_test(self): p1_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address) p2_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_2", self.p2_amount, self.p2_payout_address) - self.generate(self.nodes[0], 6) + self.generate(self.nodes[0], 6, sync_fun=self.no_op) self.bump_mocktime(6) self.sync_blocks() @@ -165,7 +165,7 @@ def run_test(self): self.log.info("v20 is expected to be activate since block 160") assert block_count + n < 160 for _ in range(n - 1): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() self.check_superblockbudget(False) @@ -193,8 +193,8 @@ def run_test(self): isolated = self.nodes[payee_idx] self.log.info("Move 1 block inside the Superblock maturity window on the isolated node") - self.generate(isolated, 1) self.bump_mocktime(1) + self.generate(isolated, 1, sync_fun=self.no_op) self.log.info("The isolated 'winner' should submit new trigger and vote for it") self.wait_until(lambda: len(isolated.gobject("list", "valid", "triggers")) == 1, timeout=5) isolated_trigger_hash = list(isolated.gobject("list", "valid", "triggers").keys())[0] @@ -203,8 +203,8 @@ def run_test(self): assert_equal(more_votes, False) self.log.info("Move 1 block enabling the Superblock maturity window on non-isolated nodes") - self.generate(self.nodes[0], 1) self.bump_mocktime(1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) assert_equal(self.nodes[0].getblockcount(), 150) assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], False) self.check_superblockbudget(False) @@ -214,8 +214,8 @@ def run_test(self): assert_equal(has_trigger, False) self.log.info("Move 1 block inside the Superblock maturity window on non-isolated nodes") - self.generate(self.nodes[0], 1) self.bump_mocktime(1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.log.info("There is now new 'winner' who should submit new trigger and vote for it") self.wait_until(lambda: len(self.nodes[0].gobject("list", "valid", "triggers")) == 1, timeout=5) @@ -232,8 +232,8 @@ def run_test(self): assert(amount_str in payment_amounts_expected) self.log.info("Move another block inside the Superblock maturity window on non-isolated nodes") - self.generate(self.nodes[0], 1) self.bump_mocktime(1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.log.info("Every non-isolated MN should vote for the same trigger now, no new triggers should be created") self.wait_until(lambda: list(self.nodes[0].gobject("list", "valid", "triggers").values())[0]['YesCount'] == self.mn_count - 1, timeout=5) @@ -268,7 +268,7 @@ def sync_gov(node): assert_equal(more_triggers, False) self.log.info("Move another block inside the Superblock maturity window") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() @@ -284,7 +284,7 @@ def sync_gov(node): self.log.info("Move another block inside the Superblock maturity window") with self.nodes[1].assert_debug_log(["CGovernanceManager::VoteGovernanceTriggers"]): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() @@ -298,7 +298,7 @@ def sync_gov(node): self.log.info("Move remaining n blocks until actual Superblock") for i in range(n): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() # comparing to 159 because bip9 forks are active when the tip is one block behind the activation height @@ -310,7 +310,7 @@ def sync_gov(node): self.log.info("Move a few block past the recent superblock height and make sure we have no new votes") for _ in range(5): with self.nodes[1].assert_debug_log("", [f"Voting NO-FUNDING for trigger:{winning_trigger_hash} success"]): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() # Votes on both triggers should NOT change @@ -322,13 +322,13 @@ def sync_gov(node): self.log.info("Move remaining n blocks until the next Superblock") for _ in range(n - 1): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() self.log.info("Wait for new trigger and votes") self.wait_until(lambda: have_trigger_for_height(self.nodes, 180)) self.log.info("Mine superblock") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), 180) @@ -337,14 +337,14 @@ def sync_gov(node): self.log.info("Mine and check a couple more superblocks") for i in range(2): for _ in range(sb_cycle - 1): - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() # Wait for new trigger and votes sb_block_height = 180 + (i + 1) * sb_cycle self.wait_until(lambda: have_trigger_for_height(self.nodes, sb_block_height)) # Mine superblock - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) self.sync_blocks() assert_equal(self.nodes[0].getblockcount(), sb_block_height) diff --git a/test/functional/feature_governance_cl.py b/test/functional/feature_governance_cl.py index fb7bee08f68b1..bf5e6530bd769 100755 --- a/test/functional/feature_governance_cl.py +++ b/test/functional/feature_governance_cl.py @@ -83,7 +83,7 @@ def run_test(self): p1_collateral_prepare = self.prepare_object(1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address) self.bump_mocktime(60 * 10 + 1) - self.generate(self.nodes[0], 6) + self.generate(self.nodes[0], 6, sync_fun=self.no_op) self.bump_mocktime(6 * 156) self.sync_blocks() @@ -158,7 +158,7 @@ def run_test(self): self.log.info("Reconnect isolated node and confirm the next ChainLock will let it sync") self.reconnect_isolated_node(5, 0) assert_equal(self.nodes[5].mnsync("status")["IsSynced"], False) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) # NOTE: bumping mocktime too much after recent reconnect can result in "timeout downloading block" self.bump_mocktime(1) self.sync_blocks() diff --git a/test/functional/feature_llmq_chainlocks.py b/test/functional/feature_llmq_chainlocks.py index 10bb909390c3e..03807ddd2ba30 100755 --- a/test/functional/feature_llmq_chainlocks.py +++ b/test/functional/feature_llmq_chainlocks.py @@ -38,7 +38,7 @@ def run_test(self): self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=False) # v20 is active, no quorums, no CLs - null CL in CbTx - nocl_block_hash = self.generate(self.nodes[0], 1)[0] + nocl_block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.test_coinbase_best_cl(self.nodes[0], expected_cl_in_cb=True, expected_null_cl=True) cbtx = self.nodes[0].getspecialtxes(nocl_block_hash, 5, 1, 0, 2)[0] assert_equal(cbtx["instantlock"], False) @@ -59,7 +59,7 @@ def run_test(self): self.log.info("Mine single block, wait for chainlock") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) @@ -70,7 +70,7 @@ def run_test(self): assert_equal(cbtx["chainlock"], True) self.log.info("Mine many blocks, wait for chainlock") - self.generate(self.nodes[0], 20) + self.generate(self.nodes[0], 20, sync_fun=self.no_op) # We need more time here due to 20 blocks being generated at once self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash(), timeout=30) self.test_coinbase_best_cl(self.nodes[0]) @@ -90,7 +90,7 @@ def run_test(self): self.log.info("Generate new blocks and verify that they are not chainlocked") previous_block_hash = self.nodes[0].getbestblockhash() for _ in range(2): - block_hash = self.generate(self.nodes[0], 1)[0] + block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.wait_for_chainlocked_block_all_nodes(block_hash, expected=False) assert self.nodes[0].getblock(previous_block_hash)["chainlock"] @@ -101,18 +101,18 @@ def run_test(self): self.isolate_node(0) node0_mining_addr = self.nodes[0].getnewaddress() node0_tip = self.nodes[0].getbestblockhash() - self.generatetoaddress(self.nodes[1], 5, node0_mining_addr) + self.generatetoaddress(self.nodes[1], 5, node0_mining_addr, sync_fun=self.no_op) self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) assert self.nodes[0].getbestblockhash() == node0_tip self.reconnect_isolated_node(0, 1) - self.generatetoaddress(self.nodes[1], 1, node0_mining_addr) + self.generatetoaddress(self.nodes[1], 1, node0_mining_addr, sync_fun=self.no_op) self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) self.log.info("Isolate node, mine on another, reconnect and submit CL via RPC") self.isolate_node(0) - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) self.wait_for_chainlocked_block(self.nodes[1], self.nodes[1].getbestblockhash()) best_0 = self.nodes[0].getbestchainlock() best_1 = self.nodes[1].getbestchainlock() @@ -134,13 +134,13 @@ def run_test(self): self.log.info("Isolate node, mine on both parts of the network, and reconnect") self.isolate_node(0) - bad_tip = self.generate(self.nodes[0], 5)[-1] - self.generatetoaddress(self.nodes[1], 1, node0_mining_addr) + bad_tip = self.generate(self.nodes[0], 5, sync_fun=self.no_op)[-1] + self.generatetoaddress(self.nodes[1], 1, node0_mining_addr, sync_fun=self.no_op) good_tip = self.nodes[1].getbestblockhash() self.wait_for_chainlocked_block(self.nodes[1], good_tip) assert not self.nodes[0].getblock(self.nodes[0].getbestblockhash())["chainlock"] self.reconnect_isolated_node(0, 1) - self.generatetoaddress(self.nodes[1], 1, node0_mining_addr) + self.generatetoaddress(self.nodes[1], 1, node0_mining_addr, sync_fun=self.no_op) self.wait_for_chainlocked_block_all_nodes(self.nodes[1].getbestblockhash()) self.test_coinbase_best_cl(self.nodes[0]) assert self.nodes[0].getblock(self.nodes[0].getbestblockhash())["previousblockhash"] == good_tip @@ -163,10 +163,10 @@ def run_test(self): assert self.nodes[0].getbestblockhash() == good_tip self.nodes[0].invalidateblock(good_tip) self.log.info("Now try to reorg the chain") - self.generate(self.nodes[0], 2) + self.generate(self.nodes[0], 2, sync_fun=self.no_op) time.sleep(6) assert self.nodes[1].getbestblockhash() == good_tip - bad_tip = self.generate(self.nodes[0], 2)[-1] + bad_tip = self.generate(self.nodes[0], 2, sync_fun=self.no_op)[-1] time.sleep(6) assert self.nodes[0].getbestblockhash() == bad_tip assert self.nodes[1].getbestblockhash() == good_tip @@ -175,7 +175,7 @@ def run_test(self): self.nodes[0].reconsiderblock(good_tip) assert self.nodes[0].getbestblockhash() != good_tip good_fork = good_tip - good_tip = self.generatetoaddress(self.nodes[1], 1, node0_mining_addr)[-1] # this should mark bad_tip as conflicting + good_tip = self.generatetoaddress(self.nodes[1], 1, node0_mining_addr, sync_fun=self.no_op)[-1] # this should mark bad_tip as conflicting self.wait_for_chainlocked_block_all_nodes(good_tip) self.test_coinbase_best_cl(self.nodes[0]) assert self.nodes[0].getbestblockhash() == good_tip @@ -203,7 +203,7 @@ def run_test(self): txs.append(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)) txs += self.create_chained_txs(self.nodes[0], 1) self.log.info("Assert that after block generation these TXs are NOT included (as they are \"unsafe\")") - node0_tip = self.generate(self.nodes[0], 1)[-1] + node0_tip = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[-1] for txid in txs: tx = self.nodes[0].getrawtransaction(txid, 1) assert "confirmations" not in tx @@ -214,7 +214,7 @@ def run_test(self): self.log.info("Disable LLMQ based InstantSend for a very short time (this never gets propagated to other nodes)") self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 4070908800) self.log.info("Now the TXs should be included") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.nodes[0].sporkupdate("SPORK_2_INSTANTSEND_ENABLED", 0) self.log.info("Assert that TXs got included now") for txid in txs: @@ -296,12 +296,12 @@ def test_coinbase_best_cl(self, node, expected_cl_in_cb=True, expected_null_cl=F def test_bestCLHeightDiff(self, mn_rr_active): # We need 2 blocks we can grab clsigs from for _ in range(2): - self.wait_for_chainlocked_block_all_nodes(self.generate(self.nodes[0], 1)[0]) + self.wait_for_chainlocked_block_all_nodes(self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0]) assert_equal(softfork_active(self.nodes[1], "mn_rr"), mn_rr_active) tip1_hash = self.nodes[1].getbestblockhash() self.isolate_node(1) - tip0_hash = self.generate(self.nodes[0], 1)[0] + tip0_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] block_hex = self.nodes[0].getblock(tip0_hash, 0) mal_block = CBlock() mal_block.deserialize(BytesIO(bytes.fromhex(block_hex))) diff --git a/test/functional/feature_llmq_connections.py b/test/functional/feature_llmq_connections.py index 10aab3789dcf4..29a335378a62f 100755 --- a/test/functional/feature_llmq_connections.py +++ b/test/functional/feature_llmq_connections.py @@ -44,7 +44,7 @@ def run_test(self): self.wait_for_sporks_same() self.log.info("mining one block and waiting for all members to connect to each other") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) for mn in self.get_quorum_masternodes(q): self.wait_for_mnauth(mn.node, 4) diff --git a/test/functional/feature_llmq_data_recovery.py b/test/functional/feature_llmq_data_recovery.py index e7f2bed482fd7..4dd6cf7c8313e 100755 --- a/test/functional/feature_llmq_data_recovery.py +++ b/test/functional/feature_llmq_data_recovery.py @@ -46,7 +46,7 @@ def restart_mn(self, mn, reindex=False, qvvec_sync=None, qdata_recovery_enabled= self.connect_nodes(mn.node.index, 0) if qdata_recovery_enabled: # trigger recovery threads and wait for them to start - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(self.quorum_data_thread_request_timeout_seconds + 1) time.sleep(1) @@ -177,14 +177,14 @@ def run_test(self): self.test_mns(llmq_test_v17, quorum_hash_recover, valid_mns=[last_resort_v17], all_mns=member_mns_recover_v17) # If recovery would be enabled it would trigger after the mocktime bump / mined block self.bump_mocktime(self.quorum_data_request_expiration_timeout + 1) - self.generate(node, 1) + self.generate(node, 1, sync_fun=self.no_op) time.sleep(10) # Make sure they are still invalid self.test_mns(llmq_test, quorum_hash_recover, valid_mns=[last_resort_test], all_mns=member_mns_recover_test) self.test_mns(llmq_test_v17, quorum_hash_recover, valid_mns=[last_resort_v17], all_mns=member_mns_recover_v17) # Mining a block should not result in a chainlock now because the responsible quorum shouldn't have enough # valid members. - self.wait_for_chainlocked_block(node, self.generate(node, 1)[0], False, 5) + self.wait_for_chainlocked_block(node, self.generate(node, 1, sync_fun=self.no_op)[0], False, 5) # Now restart with recovery enabled self.restart_mns(mns=recover_members, exclude=exclude_members, reindex=True, qdata_recovery_enabled=True) # Validate that all invalid members recover. Note: recover=True leads to mocktime bumps and mining while waiting diff --git a/test/functional/feature_llmq_dkgerrors.py b/test/functional/feature_llmq_dkgerrors.py index f3151ef02f6bc..380474544efad 100755 --- a/test/functional/feature_llmq_dkgerrors.py +++ b/test/functional/feature_llmq_dkgerrors.py @@ -85,7 +85,7 @@ def heal_masternodes(self, blockCount): self.wait_for_sporks_same() for _ in range(blockCount): self.bump_mocktime(1) - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.sync_all() self.nodes[0].sporkupdate("SPORK_17_QUORUM_DKG_ENABLED", 0) self.wait_for_sporks_same() diff --git a/test/functional/feature_llmq_evo.py b/test/functional/feature_llmq_evo.py index b2b60bdb29bbd..81f7549312ab4 100755 --- a/test/functional/feature_llmq_evo.py +++ b/test/functional/feature_llmq_evo.py @@ -165,7 +165,7 @@ def test_evo_payments(self, window_analysis, mnrr_active): current_evo = None consecutive_payments = 0 - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) if i % 8 == 0: self.sync_blocks() diff --git a/test/functional/feature_llmq_is_cl_conflicts.py b/test/functional/feature_llmq_is_cl_conflicts.py index 2369c59fa4aca..db1188cfcc91e 100755 --- a/test/functional/feature_llmq_is_cl_conflicts.py +++ b/test/functional/feature_llmq_is_cl_conflicts.py @@ -71,7 +71,7 @@ def run_test(self): self.mine_cycle_quorum(llmq_type_name='llmq_test_dip0024', llmq_type=103) # mine single block, wait for chainlock - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) self.test_chainlock_overrides_islock(False) diff --git a/test/functional/feature_llmq_is_retroactive.py b/test/functional/feature_llmq_is_retroactive.py index 68e1aadccea39..78ca8737c3c33 100755 --- a/test/functional/feature_llmq_is_retroactive.py +++ b/test/functional/feature_llmq_is_retroactive.py @@ -54,7 +54,7 @@ def run_test(self): self.wait_for_sporks_same() # We have to wait in order to include tx in block self.bump_mocktime(10 * 60 + 1) - block = self.generate(self.nodes[0], 1)[0] + block = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.wait_for_instantlock(txid, self.nodes[0]) self.nodes[0].sporkupdate("SPORK_19_CHAINLOCKS_ENABLED", 0) self.wait_for_sporks_same() @@ -70,7 +70,7 @@ def run_test(self): # are the only "neighbours" in intra-quorum connections for one of them. self.wait_for_instantlock(txid, self.nodes[0]) self.bump_mocktime(1) - block = self.generate(self.nodes[0], 1)[0] + block = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.wait_for_chainlocked_block_all_nodes(block) self.log.info("testing normal signing with partially known TX") @@ -100,7 +100,7 @@ def run_test(self): txid = self.nodes[3].sendrawtransaction(rawtx) # Make node 3 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.generatetoaddress(self.nodes[3], 1, self.nodes[0].getnewaddress())[0] + block = self.generatetoaddress(self.nodes[3], 1, self.nodes[0].getnewaddress(), sync_fun=self.no_op)[0] self.reconnect_isolated_node(3, 0) self.wait_for_chainlocked_block_all_nodes(block) self.nodes[0].setmocktime(self.mocktime) @@ -120,7 +120,7 @@ def run_test(self): self.wait_for_instantlock(txid, self.nodes[0], False, 5) # Make node0 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.generate(self.nodes[0], 1)[0] + block = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] assert txid in self.nodes[0].getblock(block, 1)['tx'] self.wait_for_chainlocked_block_all_nodes(block) @@ -166,7 +166,7 @@ def test_all_nodes_session_timeout(self, do_cycle_llmqs): self.wait_for_instantlock(txid, self.nodes[0], False, 5) # Make node 0 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.generate(self.nodes[0], 1)[0] + block = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] assert txid in self.nodes[0].getblock(block, 1)['tx'] self.wait_for_chainlocked_block_all_nodes(block) @@ -198,7 +198,7 @@ def test_single_node_session_timeout(self, do_cycle_llmqs): self.wait_for_instantlock(txid, self.nodes[0], False, 5) # Make node 0 consider the TX as safe self.bump_mocktime(10 * 60 + 1) - block = self.generate(self.nodes[0], 1)[0] + block = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] assert txid in self.nodes[0].getblock(block, 1)['tx'] self.wait_for_chainlocked_block_all_nodes(block) diff --git a/test/functional/feature_llmq_rotation.py b/test/functional/feature_llmq_rotation.py index 8dd187f7aee76..31e637686b3b5 100755 --- a/test/functional/feature_llmq_rotation.py +++ b/test/functional/feature_llmq_rotation.py @@ -88,7 +88,7 @@ def run_test(self): h_104_1 = QuorumId(104, int(h_1, 16)) self.log.info("Mine single block, wait for chainlock") - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) b_h_1 = self.nodes[0].getbestblockhash() @@ -119,7 +119,7 @@ def run_test(self): assert_equal(projected_activation_height, softfork_info['height']) # v20 is active for the next block, not for the tip - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.log.info("Wait for chainlock") self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash()) @@ -200,7 +200,7 @@ def run_test(self): self.sync_blocks(nodes) quorum_list = self.nodes[0].quorum("list", llmq_type) quorum_blockhash = self.nodes[0].getbestblockhash() - fallback_blockhash = self.generate(self.nodes[0], 1)[0] + fallback_blockhash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.log.info("h("+str(self.nodes[0].getblockcount())+") quorum_list:"+str(quorum_list)) assert_greater_than_or_equal(len(intersection(quorum_members_0_0, quorum_members_1_0)), 3) diff --git a/test/functional/feature_llmq_simplepose.py b/test/functional/feature_llmq_simplepose.py index b984b89bc4aa5..4c8a6c92d8ee5 100755 --- a/test/functional/feature_llmq_simplepose.py +++ b/test/functional/feature_llmq_simplepose.py @@ -107,7 +107,7 @@ def mine_quorum_less_checks(self, expected_good_nodes, mninfos_online): skip_count = 24 - (self.nodes[0].getblockcount() % 24) if skip_count != 0: self.bump_mocktime(skip_count, nodes=nodes) - self.generate(self.nodes[0], skip_count) + self.generate(self.nodes[0], skip_count, sync_fun=self.no_op) self.sync_blocks(nodes) q = self.nodes[0].getbestblockhash() diff --git a/test/functional/feature_new_quorum_type_activation.py b/test/functional/feature_new_quorum_type_activation.py index 85fa97a5f90dd..9e7dc08b8c2b4 100755 --- a/test/functional/feature_new_quorum_type_activation.py +++ b/test/functional/feature_new_quorum_type_activation.py @@ -22,17 +22,17 @@ def set_test_params(self): def run_test(self): self.log.info(get_bip9_details(self.nodes[0], 'testdummy')) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'defined') - self.generate(self.nodes[0], 9) + self.generate(self.nodes[0], 9, sync_fun=self.no_op) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'started') ql = self.nodes[0].quorum("list") assert_equal(len(ql), 3) assert "llmq_test_v17" not in ql - self.generate(self.nodes[0], 10) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'locked_in') ql = self.nodes[0].quorum("list") assert_equal(len(ql), 3) assert "llmq_test_v17" not in ql - self.generate(self.nodes[0], 10) + self.generate(self.nodes[0], 10, sync_fun=self.no_op) assert_equal(get_bip9_details(self.nodes[0], 'testdummy')['status'], 'active') ql = self.nodes[0].quorum("list") assert_equal(len(ql), 4) diff --git a/test/functional/feature_sporks.py b/test/functional/feature_sporks.py index a958721567a84..4843c2d7503b2 100755 --- a/test/functional/feature_sporks.py +++ b/test/functional/feature_sporks.py @@ -53,7 +53,7 @@ def run_test(self): assert self.get_test_spork_state(self.nodes[1]) == spork_new_state # Generate one block to kick off masternode sync, which also starts sporks syncing for node2 - self.generate(self.nodes[1], 1) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) # connect new node and check spork propagation after restoring from cache self.connect_nodes(1, 2) diff --git a/test/functional/interface_zmq_dash.py b/test/functional/interface_zmq_dash.py index 6483b12460490..fdec9a474447a 100755 --- a/test/functional/interface_zmq_dash.py +++ b/test/functional/interface_zmq_dash.py @@ -213,7 +213,7 @@ def validate_recovered_sig(request_id, msg_hash): # Subscribe to recovered signature messages self.subscribe(recovered_sig_publishers) # Generate a ChainLock and make sure this leads to valid recovered sig ZMQ messages - rpc_last_block_hash = self.generate(self.nodes[0], 1)[0] + rpc_last_block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0] self.wait_for_chainlocked_block_all_nodes(rpc_last_block_hash) height = self.nodes[0].getblockcount() rpc_request_id = hash256(ser_string(b"clsig") + struct.pack(" 0: - self.generate(self.nodes[0], 1) + self.generate(self.nodes[0], 1, sync_fun=self.no_op) operatorPayoutAddress = self.nodes[0].getnewaddress() self.nodes[0].protx('update_service', proTxHash, ipAndPort, bls['secret'], operatorPayoutAddress, address) From a99a39ce8d2337d1d3d87d00945a1f508a4c0c7d Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 16:08:01 +0000 Subject: [PATCH 09/11] test: remove redundant sync after `generate*` calls in Dash tests --- test/functional/feature_addressindex.py | 9 --------- test/functional/feature_asset_locks.py | 20 ------------------- .../feature_dip3_deterministicmns.py | 8 -------- test/functional/feature_dip3_v19.py | 1 - test/functional/feature_llmq_evo.py | 1 - .../feature_llmq_is_cl_conflicts.py | 2 -- test/functional/feature_llmq_simplepose.py | 1 - test/functional/feature_mnehf.py | 8 -------- test/functional/feature_spentindex.py | 3 --- test/functional/feature_timestampindex.py | 1 - test/functional/feature_txindex.py | 2 -- test/functional/p2p_instantsend.py | 4 ---- .../test_framework/test_framework.py | 3 --- 13 files changed, 63 deletions(-) diff --git a/test/functional/feature_addressindex.py b/test/functional/feature_addressindex.py index 0a281cbb80a61..213115b341256 100755 --- a/test/functional/feature_addressindex.py +++ b/test/functional/feature_addressindex.py @@ -53,7 +53,6 @@ def run_test(self): self.log.info("Mining blocks...") mining_address = self.nodes[0].getnewaddress() self.generatetoaddress(self.nodes[0], 105, mining_address) - self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) @@ -89,8 +88,6 @@ def run_test(self): txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20) self.generate(self.nodes[0], 1) - self.sync_all() - txids = self.nodes[1].getaddresstxids("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4") assert_equal(len(txids), 3) assert_equal(txids[0], txid0) @@ -142,7 +139,6 @@ def run_test(self): sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.generate(self.nodes[0], 1) - self.sync_all() txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsmany), 4) @@ -171,7 +167,6 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.generate(self.nodes[0], 1) - self.sync_all() balance1 = self.nodes[1].getaddressbalance(address2) assert_equal(balance1["balance"], amount) @@ -185,7 +180,6 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.generate(self.nodes[0], 1) - self.sync_all() balance2 = self.nodes[1].getaddressbalance(address2) assert_equal(balance2["balance"], change_amount) @@ -239,7 +233,6 @@ def run_test(self): self.generate(self.nodes[2], 1, sync_fun=self.no_op) self.nodes[2].sendtoaddress(address2, 50) self.generate(self.nodes[2], 1) - self.sync_all() utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos3), 3) @@ -292,7 +285,6 @@ def run_test(self): assert_equal(mempool[2]["index"], 1) self.generate(self.nodes[2], 1) - self.sync_all() mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool2), 0) @@ -323,7 +315,6 @@ def run_test(self): self.nodes[0].sendtoaddress(address1, 10) self.generate(self.nodes[0], 1) - self.sync_all() utxos = self.nodes[1].getaddressutxos({"addresses": [address1]}) assert_equal(len(utxos), 1) diff --git a/test/functional/feature_asset_locks.py b/test/functional/feature_asset_locks.py index 89680fb254c31..91480c000749a 100755 --- a/test/functional/feature_asset_locks.py +++ b/test/functional/feature_asset_locks.py @@ -231,7 +231,6 @@ def generate_batch(self, count): count -= batch self.bump_mocktime(batch) self.generate(self.nodes[1], batch) - self.sync_all() # This functional test intentionally setup only 2 MN and only 2 Evo nodes # to ensure that corner case of quorum with minimum amount of nodes as possible @@ -260,7 +259,6 @@ def run_test(self): self.set_sporks() self.generate(node, 1) - self.sync_all() self.mempool_size = 0 key = ECKey() @@ -298,7 +296,6 @@ def test_asset_locks(self, node_wallet, node, pubkey): assert_equal(self.get_credit_pool_balance(node=node), locked_1) self.log.info("Generate a number of blocks to ensure this is the longest chain for later in the test when we reconsiderblock") self.generate(node, 12) - self.sync_all() self.validate_credit_pool_balance(locked_1) @@ -309,14 +306,12 @@ def test_asset_locks(self, node_wallet, node, pubkey): inode.invalidateblock(self.block_hash_1) assert_equal(self.get_credit_pool_balance(node=inode), 0) self.generate(node, 3) - self.sync_all() self.validate_credit_pool_balance(0) self.log.info("Resubmit asset lock tx to new chain...") # NEW tx appears asset_lock_tx_2 = self.create_assetlock(coin, locked_2, pubkey) txid_in_block = self.send_tx(asset_lock_tx_2) self.generate(node, 1) - self.sync_all() self.validate_credit_pool_balance(locked_2) self.log.info("Reconsider old blocks...") for inode in self.nodes: @@ -401,7 +396,6 @@ def test_asset_unlocks(self, node_wallet, node, pubkey): self.check_mempool_size() self.validate_credit_pool_balance(locked) self.generate(node, 1) - self.sync_all() assert_equal(rawtx["instantlock"], False) assert_equal(rawtx["chainlock"], False) rawtx = node.getrawtransaction(txid, 1) @@ -424,14 +418,12 @@ def test_asset_unlocks(self, node_wallet, node, pubkey): self.validate_credit_pool_balance(locked - 1 * COIN) self.send_tx(asset_unlock_tx_late) self.generate(node, 1) - self.sync_all() self.validate_credit_pool_balance(locked - 2 * COIN) self.log.info("Generating many blocks to make quorum far behind (even still active)...") self.generate_batch(too_late_height - node.getblockcount() - 1) self.check_mempool_result(tx=asset_unlock_tx_too_late, result_expected={'allowed': True, 'fees': {'base': Decimal(str(tiny_amount / COIN))}}) self.generate(node, 1) - self.sync_all() self.check_mempool_result(tx=asset_unlock_tx_too_late, result_expected={'allowed': False, 'reject-reason' : 'bad-assetunlock-too-late'}) @@ -456,7 +448,6 @@ def test_asset_unlocks(self, node_wallet, node, pubkey): self.create_and_check_block([asset_unlock_tx_too_late], expected_error = "bad-assetunlock-not-active-quorum") self.generate(node, 1) - self.sync_all() self.validate_credit_pool_balance(locked - 2 * COIN) self.validate_credit_pool_balance(block_hash=self.block_hash_1, expected=locked) @@ -476,7 +467,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): txid_in_block = self.send_tx(asset_unlock_tx_full) self.generate(node, 1) - self.sync_all() self.ensure_tx_is_not_mined(txid_in_block) @@ -490,7 +480,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): txid_in_block = self.send_tx(asset_unlock_tx_full) expected_balance = (Decimal(self.get_credit_pool_balance()) - Decimal(tiny_amount)) self.generate(node, 1) - self.sync_all() self.log.info("Check txid_in_block was mined") block = node.getblock(node.getbestblockhash()) assert txid_in_block in block['tx'] @@ -528,7 +517,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.log.info(f"Collecting coins in pool... Collected {total}/{10_901 * COIN}") self.sync_mempools() self.generate(node, 1) - self.sync_all() credit_pool_balance_1 = self.get_credit_pool_balance() assert_greater_than(credit_pool_balance_1, 10_901 * COIN) limit_amount_1 = 1000 * COIN @@ -548,7 +536,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.sync_mempools() self.generate(node, 1) - self.sync_all() new_total = self.get_credit_pool_balance() amount_actually_withdrawn = total - new_total @@ -561,7 +548,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): assert_equal(amount_actually_withdrawn, 900 * COIN + 10001) self.generate(node, 1) - self.sync_all() self.log.info("Checking that exactly 1 tx stayed in mempool...") self.mempool_size = 1 self.check_mempool_size() @@ -575,7 +561,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): self.send_tx_simple(asset_unlock_tx) self.sync_mempools() self.generate(node, 1) - self.sync_all() new_total = self.get_credit_pool_balance() amount_actually_withdrawn = total - new_total assert_equal(limit_amount_1, amount_actually_withdrawn) @@ -599,10 +584,8 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): asset_unlock_tx = self.create_assetunlock(index, limit_amount_2, pubkey) self.send_tx(asset_unlock_tx) self.generate(node, 1) - self.sync_all() assert_equal(new_total, self.get_credit_pool_balance()) self.generate(node, 1) - self.sync_all() new_total -= limit_amount_2 assert_equal(new_total, self.get_credit_pool_balance()) self.log.info("Trying to withdraw more... expecting to fail") @@ -610,7 +593,6 @@ def test_withdrawal_limits(self, node_wallet, node, pubkey): asset_unlock_tx = self.create_assetunlock(index, COIN, pubkey) self.send_tx(asset_unlock_tx) self.generate(node, 1) - self.sync_all() tip = self.nodes[0].getblockcount() indexes_statuses_no_height = self.nodes[0].getassetunlockstatuses(["101", "102", "103"]) @@ -644,7 +626,6 @@ def test_mn_rr(self, node_wallet, node, pubkey): assert_equal(platform_reward, 34371430) assert_equal(locked, self.get_credit_pool_balance()) self.generate(node, 1) - self.sync_all() locked += platform_reward assert_equal(locked, self.get_credit_pool_balance()) @@ -653,7 +634,6 @@ def test_mn_rr(self, node_wallet, node, pubkey): self.send_tx(self.create_assetlock(coin, COIN, pubkey)) locked += platform_reward + COIN self.generate(node, 1) - self.sync_all() assert_equal(locked, self.get_credit_pool_balance()) diff --git a/test/functional/feature_dip3_deterministicmns.py b/test/functional/feature_dip3_deterministicmns.py index 4a88f8170bad0..1cc6e17e2db0b 100755 --- a/test/functional/feature_dip3_deterministicmns.py +++ b/test/functional/feature_dip3_deterministicmns.py @@ -126,7 +126,6 @@ def run_test(self): dummy_txin = self.spend_mn_collateral(mns[i], with_dummy_input_output=True) dummy_txins.append(dummy_txin) self.generate(self.nodes[0], 1) - self.sync_all() mns_tmp.remove(mns[i]) self.assert_mnlists(mns_tmp) new_rpc_info = self.nodes[0].protx("info", old_protx_hash, old_blockhash) @@ -145,7 +144,6 @@ def run_test(self): self.log.info("cause a reorg with a double spend and check that mnlists are still correct on all nodes") self.mine_double_spend(mns, self.nodes[0], dummy_txins, self.nodes[0].getnewaddress()) self.generate(self.nodes[0], spend_mns_count) - self.sync_all() self.assert_mnlists(mns_tmp) self.log.info("test mn payment enforcement with deterministic MNs") @@ -153,7 +151,6 @@ def run_test(self): node = self.nodes[i % len(self.nodes)] self.test_invalid_mn_payment(mns, node) self.generate(self.nodes[0], 1) - self.sync_all() self.log.info("testing ProUpServTx") for mn in mns: @@ -176,7 +173,6 @@ def run_test(self): expected_payee = bt['masternode'][0]['payee'] expected_amount = bt['masternode'][0]['amount'] self.generate(self.nodes[0], 1) - self.sync_all() if expected_payee == multisig: block = self.nodes[0].getblock(self.nodes[0].getbestblockhash()) cbtx = self.nodes[0].getrawtransaction(block['tx'][0], 1) @@ -199,7 +195,6 @@ def run_test(self): self.register_mn(self.nodes[0], new_mn) mns[i] = new_mn self.generate(self.nodes[0], 1) - self.sync_all() self.assert_mnlists(mns) self.log.info("restarting MN %s" % new_mn.alias) self.stop_node(new_mn.idx) @@ -218,7 +213,6 @@ def run_test(self): node.sendtoaddress(mn.rewards_address, 0.001) node.protx('update_registrar', mn.protx_hash, "", new_voting_address, "") self.generate(node, 1) - self.sync_all() new_dmnState = mn.node.masternode("status")["dmnState"] new_voting_address_from_rpc = new_dmnState["votingAddress"] assert new_voting_address_from_rpc == new_voting_address @@ -296,7 +290,6 @@ def update_mn_payee(self, mn, payee): self.nodes[0].sendtoaddress(mn.fundsAddr, 0.001) self.nodes[0].protx('update_registrar', mn.protx_hash, '', '', payee, mn.fundsAddr) self.generate(self.nodes[0], 1) - self.sync_all() info = self.nodes[0].protx('info', mn.protx_hash) assert info['state']['payoutAddress'] == payee @@ -304,7 +297,6 @@ def test_protx_update_service(self, mn): self.nodes[0].sendtoaddress(mn.fundsAddr, 0.001) self.nodes[0].protx('update_service', mn.protx_hash, '127.0.0.2:%d' % mn.p2p_port, mn.blsMnkey, "", mn.fundsAddr) self.generate(self.nodes[0], 1) - self.sync_all() for node in self.nodes: protx_info = node.protx('info', mn.protx_hash) mn_list = node.masternode('list') diff --git a/test/functional/feature_dip3_v19.py b/test/functional/feature_dip3_v19.py index 7da604aedc4c2..adeec5c818e84 100755 --- a/test/functional/feature_dip3_v19.py +++ b/test/functional/feature_dip3_v19.py @@ -126,7 +126,6 @@ def test_revoke_protx(self, node_idx, revoke_protx, revoke_keyoperator): self.wait_for_instantlock(fund_txid, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) - self.sync_all() protx_result = self.nodes[0].protx('revoke', revoke_protx, revoke_keyoperator, 1, funds_address) self.wait_for_instantlock(protx_result, self.nodes[0]) diff --git a/test/functional/feature_llmq_evo.py b/test/functional/feature_llmq_evo.py index 81f7549312ab4..bc2d63707afb7 100755 --- a/test/functional/feature_llmq_evo.py +++ b/test/functional/feature_llmq_evo.py @@ -214,7 +214,6 @@ def test_evo_is_rejected_before_v19(self): outputs = {collateral_address: collateral_amount, funds_address: 1} collateral_txid = self.nodes[0].sendmany("", outputs) self.generate(self.nodes[0], 8) - self.sync_all() rawtx = self.nodes[0].getrawtransaction(collateral_txid, 1) collateral_vout = 0 diff --git a/test/functional/feature_llmq_is_cl_conflicts.py b/test/functional/feature_llmq_is_cl_conflicts.py index db1188cfcc91e..1f7c835d49cdf 100755 --- a/test/functional/feature_llmq_is_cl_conflicts.py +++ b/test/functional/feature_llmq_is_cl_conflicts.py @@ -223,7 +223,6 @@ def test_chainlock_overrides_islock_overrides_nonchainlock(self): # Mine the conflicting TX into a block good_tip = self.nodes[0].getbestblockhash() self.generate(self.nodes[0], 2) - self.sync_all() # Assert that the conflicting tx got mined and the locked TX is not valid assert self.nodes[0].getrawtransaction(rawtx1_txid, True)['confirmations'] > 0 @@ -255,7 +254,6 @@ def test_chainlock_overrides_islock_overrides_nonchainlock(self): assert_raises_rpc_error(-26, "tx-txlock-conflict", self.nodes[0].sendrawtransaction, rawtx1) islock_tip = self.generate(self.nodes[0], 1)[0] - self.sync_all() for node in self.nodes: self.wait_for_instantlock(rawtx2_txid, node) diff --git a/test/functional/feature_llmq_simplepose.py b/test/functional/feature_llmq_simplepose.py index 4c8a6c92d8ee5..ea4d51d381e2e 100755 --- a/test/functional/feature_llmq_simplepose.py +++ b/test/functional/feature_llmq_simplepose.py @@ -212,7 +212,6 @@ def repair_masternodes(self, restart): # Make sure protxes are "safe" to mine even when InstantSend and ChainLocks are no longer functional self.bump_mocktime(60 * 10 + 1) self.generate(self.nodes[0], 1) - self.sync_all() # Isolate and re-connect all MNs (otherwise there might be open connections with no MNAUTH for MNs which were banned before) for mn in self.mninfo: diff --git a/test/functional/feature_mnehf.py b/test/functional/feature_mnehf.py index 3c77a69cafbb3..9578da6767272 100755 --- a/test/functional/feature_mnehf.py +++ b/test/functional/feature_mnehf.py @@ -149,8 +149,6 @@ def run_test(self): self.log.info(f"unknown ehf tx: {ehf_unknown_tx_sent}") self.sync_all() ehf_blockhash = self.generate(self.nodes[1], 1)[0] - self.sync_blocks() - self.sync_all() self.log.info(f"Check MnEhfTx {ehf_tx_sent} was mined in {ehf_blockhash}") assert ehf_tx_sent in node.getblock(ehf_blockhash)['tx'] @@ -164,7 +162,6 @@ def run_test(self): while (node.getblockcount() + 1) % 4 != 0: self.check_fork('defined') self.generate(node, 1) - self.sync_all() self.restart_all_nodes() @@ -172,13 +169,11 @@ def run_test(self): for _ in range(4): self.check_fork('started') self.generate(node, 1) - self.sync_all() for i in range(4): self.check_fork('locked_in') self.generate(node, 1) - self.sync_all() if i == 7: self.restart_all_nodes() @@ -193,13 +188,11 @@ def run_test(self): for _ in range(4): self.check_fork('defined') self.generate(node, 1) - self.sync_all() self.log.info("Re-sending MnEHF for new fork") tx_sent_2 = self.send_tx(ehf_tx) ehf_blockhash_2 = self.generate(node, 1)[0] - self.sync_all() self.log.info(f"Check MnEhfTx again {tx_sent_2} was mined in {ehf_blockhash_2}") assert tx_sent_2 in node.getblock(ehf_blockhash_2)['tx'] @@ -242,7 +235,6 @@ def run_test(self): time.sleep(1) self.bump_mocktime(1) self.generate(self.nodes[1], 1) - self.sync_all() self.check_fork('active') diff --git a/test/functional/feature_spentindex.py b/test/functional/feature_spentindex.py index 363f5e7f71007..857d6f030fa4b 100755 --- a/test/functional/feature_spentindex.py +++ b/test/functional/feature_spentindex.py @@ -54,7 +54,6 @@ def run_test(self): self.log.info("Mining blocks...") self.generate(self.nodes[0], 105) - self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) @@ -77,7 +76,6 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.generate(self.nodes[0], 1) - self.sync_all() self.log.info("Testing getspentinfo method...") @@ -121,7 +119,6 @@ def run_test(self): # Check the database index self.generate(self.nodes[0], 1) - self.sync_all() txVerbose4 = self.nodes[3].getrawtransaction(txid2, 1) assert_equal(txVerbose4["vin"][0]["address"], address2) diff --git a/test/functional/feature_timestampindex.py b/test/functional/feature_timestampindex.py index 63d22baa76d7e..947c5021acadd 100755 --- a/test/functional/feature_timestampindex.py +++ b/test/functional/feature_timestampindex.py @@ -49,7 +49,6 @@ def run_test(self): blockhashes = self.generate(self.nodes[0], 5) low = self.nodes[0].getblock(blockhashes[0])["time"] high = self.nodes[0].getblock(blockhashes[4])["time"] - self.sync_all() self.log.info("Checking timestamp index...") hashes = self.nodes[1].getblockhashes(high, low) assert_equal(len(hashes), 5) diff --git a/test/functional/feature_txindex.py b/test/functional/feature_txindex.py index e1a9633f8af24..d2246cd20eff8 100755 --- a/test/functional/feature_txindex.py +++ b/test/functional/feature_txindex.py @@ -39,7 +39,6 @@ def setup_network(self): def run_test(self): self.log.info("Mining blocks...") self.generate(self.nodes[0], 105) - self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) @@ -59,7 +58,6 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(tx.serialize().hex()) txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], 0) self.generate(self.nodes[0], 1) - self.sync_all() # Check verbose raw transaction results verbose = self.nodes[3].getrawtransaction(txid, 1) diff --git a/test/functional/p2p_instantsend.py b/test/functional/p2p_instantsend.py index 88983142e4fac..8fe878ebd5428 100755 --- a/test/functional/p2p_instantsend.py +++ b/test/functional/p2p_instantsend.py @@ -48,7 +48,6 @@ def test_block_doublespend(self): self.wait_for_instantlock(is_id, node) self.bump_mocktime(1) self.generate(self.nodes[0], 2) - self.sync_all() # create doublespending transaction, but don't relay it dblspnd_tx = self.create_raw_tx(sender, isolated, 0.5, 1, 100) @@ -93,7 +92,6 @@ def test_block_doublespend(self): # make sure the above TX is on node0 self.sync_mempools([n for n in self.nodes if n is not isolated]) self.generate(self.nodes[0], 2) - self.sync_all() def test_mempool_doublespend(self): sender = self.nodes[self.sender_idx] @@ -109,7 +107,6 @@ def test_mempool_doublespend(self): self.wait_for_instantlock(is_id, node) self.bump_mocktime(1) self.generate(self.nodes[0], 2) - self.sync_all() # create doublespending transaction, but don't relay it dblspnd_tx = self.create_raw_tx(sender, isolated, 0.5, 1, 100) @@ -142,7 +139,6 @@ def test_mempool_doublespend(self): # mine more blocks self.bump_mocktime(1) self.generate(self.nodes[0], 2) - self.sync_all() if __name__ == '__main__': InstantSendTest().main() diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 24e679f42b303..4db465ae10453 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -1330,7 +1330,6 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= self.wait_for_instantlock(fund_txid, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(fund_txid, 1, tip)['confirmations'], 1) - self.sync_all() protx_success = False try: @@ -1338,7 +1337,6 @@ def dynamically_evo_update_service(self, evo_info, rnd=None, should_be_rejected= self.wait_for_instantlock(protx_result, self.nodes[0]) tip = self.generate(self.nodes[0], 1)[0] assert_equal(self.nodes[0].getrawtransaction(protx_result, 1, tip)['confirmations'], 1) - self.sync_all() self.log.info("Updated EvoNode %s: platformNodeID=%s, platformP2PPort=%s, platformHTTPPort=%s" % (evo_info.proTxHash, platform_node_id, platform_p2p_port, platform_http_port)) protx_success = True except: @@ -1410,7 +1408,6 @@ def remove_masternode(self, idx): rawtx = self.nodes[0].signrawtransactionwithwallet(rawtx) self.nodes[0].sendrawtransaction(rawtx["hex"]) self.generate(self.nodes[0], 1) - self.sync_all() self.mninfo.remove(mn) self.log.info("Removed masternode %d", idx) From dfeeb34d1884276980801524ad1bfb01d7a8c7a0 Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Thu, 26 Sep 2024 20:38:56 +0000 Subject: [PATCH 10/11] test: remove redundant sync after `generate*` calls in Bitcoin tests --- test/functional/feature_blockfilterindex_prune.py | 2 -- test/functional/feature_coinstatsindex.py | 2 -- test/functional/feature_minchainwork.py | 1 - test/functional/interface_rest.py | 7 ------- test/functional/interface_zmq.py | 5 ----- test/functional/mempool_packages.py | 1 - test/functional/p2p_ibd_txrelay.py | 1 - test/functional/p2p_permissions.py | 1 - test/functional/rpc_createmultisig.py | 2 -- test/functional/rpc_fundrawtransaction.py | 12 ------------ test/functional/rpc_getblockstats.py | 1 - test/functional/rpc_net.py | 1 - test/functional/rpc_psbt.py | 5 ----- test/functional/rpc_rawtransaction.py | 11 ----------- test/functional/rpc_txoutproof.py | 2 -- test/functional/wallet_avoidreuse.py | 10 ---------- test/functional/wallet_backup.py | 1 - test/functional/wallet_balance.py | 5 ----- test/functional/wallet_groups.py | 5 ----- test/functional/wallet_import_rescan.py | 1 - test/functional/wallet_importdescriptors.py | 4 ---- test/functional/wallet_importprunedfunds.py | 2 -- test/functional/wallet_listreceivedby.py | 4 ---- test/functional/wallet_listtransactions.py | 1 - 24 files changed, 87 deletions(-) diff --git a/test/functional/feature_blockfilterindex_prune.py b/test/functional/feature_blockfilterindex_prune.py index cbf7efc282a7c..f418f2c75dde7 100755 --- a/test/functional/feature_blockfilterindex_prune.py +++ b/test/functional/feature_blockfilterindex_prune.py @@ -27,9 +27,7 @@ def run_test(self): assert_greater_than(len(self.nodes[0].getblockfilter(self.nodes[0].getbestblockhash())['filter']), 0) # Mine two batches of blocks to avoid hitting NODE_NETWORK_LIMITED_MIN_BLOCKS disconnection self.generate(self.nodes[0], 250) - self.sync_all() self.generate(self.nodes[0], 250) - self.sync_all() self.sync_index(height=700) self.log.info("prune some blocks") diff --git a/test/functional/feature_coinstatsindex.py b/test/functional/feature_coinstatsindex.py index 54a656a9c4fca..91d66fc9c5306 100755 --- a/test/functional/feature_coinstatsindex.py +++ b/test/functional/feature_coinstatsindex.py @@ -184,7 +184,6 @@ def _test_coin_stats_index(self): # Include both txs in a block self.generate(self.nodes[0], 1) - self.sync_all() for hash_option in index_hash_options: # Check all amounts were registered correctly @@ -285,7 +284,6 @@ def _test_reorg_index(self): # Add another block, so we don't depend on reconsiderblock remembering which # blocks were touched by invalidateblock self.generate(index_node, 1) - self.sync_all() # Ensure that removing and re-adding blocks yields consistent results block = index_node.getblockhash(99) diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py index 1e54dacea1461..72edc238af524 100755 --- a/test/functional/feature_minchainwork.py +++ b/test/functional/feature_minchainwork.py @@ -98,7 +98,6 @@ def run_test(self): # insufficient work chain, in which case we'd need to reconnect them to # continue the test. - self.sync_all() self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes]) self.log.info("Test that getheaders requests to node2 are not ignored") diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py index 22017fc721f3e..2ece391bf759d 100755 --- a/test/functional/interface_rest.py +++ b/test/functional/interface_rest.py @@ -85,9 +85,7 @@ def run_test(self): not_related_address = "yj949n1UH6fDhw6HtVE5VMj2iSTaSWBMcW" self.generate(self.nodes[0], 1) - self.sync_all() self.generatetoaddress(self.nodes[1], 100, not_related_address) - self.sync_all() assert_equal(self.nodes[0].getbalance(), 500) @@ -118,7 +116,6 @@ def run_test(self): self.log.info("Query an unspent TXO using the /getutxos URI") self.generatetoaddress(self.nodes[1], 1, not_related_address) - self.sync_all() bb_hash = self.nodes[0].getbestblockhash() assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) @@ -193,7 +190,6 @@ def run_test(self): assert_equal(len(json_obj['utxos']), 0) self.generate(self.nodes[0], 1) - self.sync_all() json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) assert_equal(len(json_obj['utxos']), 1) @@ -214,7 +210,6 @@ def run_test(self): self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) self.generate(self.nodes[0], 1) # generate block to not affect upcoming tests - self.sync_all() self.log.info("Test the /block, /blockhashbyheight and /headers URIs") bb_hash = self.nodes[0].getbestblockhash() @@ -285,7 +280,6 @@ def run_test(self): # See if we can get 5 headers in one response self.generate(self.nodes[1], 5) - self.sync_all() json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash)) assert_equal(len(json_obj), 5) # now we should have 5 header objects @@ -320,7 +314,6 @@ def run_test(self): # Now mine the transactions newblockhash = self.generate(self.nodes[1], 1) - self.sync_all() # Check if the 3 tx show up in the new block json_obj = self.test_rest_request("/block/{}".format(newblockhash[0])) diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py index 430687ca4bd96..d8565814c2bcb 100755 --- a/test/functional/interface_zmq.py +++ b/test/functional/interface_zmq.py @@ -189,8 +189,6 @@ def test_basic(self): self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks}) genhashes = self.generatetoaddress(self.nodes[0], num_blocks, ADDRESS_BCRT1_UNSPENDABLE) - self.sync_all() - for x in range(num_blocks): # Should receive the coinbase txid. txid = hashtx.receive() @@ -344,7 +342,6 @@ def test_sequence(self): # removed from the mempool by the block mining it. mempool_size = len(self.nodes[0].getrawmempool()) c_block = self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE)[0] - self.sync_all() # Make sure the number of mined transactions matches the number of txs out of mempool mempool_size_delta = mempool_size - len(self.nodes[0].getrawmempool()) assert_equal(len(self.nodes[0].getblock(c_block)["tx"])-1, mempool_size_delta) @@ -384,7 +381,6 @@ def test_sequence(self): # Other things may happen but aren't wallet-deterministic so we don't test for them currently self.nodes[0].reconsiderblock(best_hash) self.generatetoaddress(self.nodes[1], 1, ADDRESS_BCRT1_UNSPENDABLE) - self.sync_all() self.log.info("Evict mempool transaction by block conflict") orig_txid = self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=1.0) @@ -429,7 +425,6 @@ def test_sequence(self): assert_equal((orig_txid_2, "A", mempool_seq), seq.receive_sequence()) mempool_seq += 1 self.generatetoaddress(self.nodes[0], 1, ADDRESS_BCRT1_UNSPENDABLE) - self.sync_all() # want to make sure we didn't break "consensus" for other tests def test_mempool_sync(self): """ diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py index 8afa709ee9110..6b9096bbc5ac1 100755 --- a/test/functional/mempool_packages.py +++ b/test/functional/mempool_packages.py @@ -312,7 +312,6 @@ def run_test(self): # Mine these in a block self.generate(self.nodes[0], 1) - self.sync_all() # Now generate tx8, with a big fee inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ] diff --git a/test/functional/p2p_ibd_txrelay.py b/test/functional/p2p_ibd_txrelay.py index bc1da72c2db32..20fd41e5cad7b 100755 --- a/test/functional/p2p_ibd_txrelay.py +++ b/test/functional/p2p_ibd_txrelay.py @@ -67,7 +67,6 @@ def run_test(self): # Come out of IBD by generating a block self.generate(self.nodes[0], 1) - self.sync_all() self.log.info("Check that nodes process the same transaction, even when unsolicited, when no longer in IBD") peer_txer = self.nodes[0].add_p2p_connection(P2PInterface()) diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py index 98ae52fb48c7a..2fc0c8e4cf9f8 100755 --- a/test/functional/p2p_permissions.py +++ b/test/functional/p2p_permissions.py @@ -116,7 +116,6 @@ def run_test(self): def check_tx_relay(self): block_op_true = self.nodes[0].getblock(self.generatetoaddress(self.nodes[0], 100, ADDRESS_BCRT1_P2SH_OP_TRUE)[0]) - self.sync_all() self.log.debug("Create a connection from a forcerelay peer that rebroadcasts raw txs") # A test framework p2p connection is needed to send the raw transaction directly. If a full node was used, it could only diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py index 5bb08cdb746f8..cb87eca254c66 100755 --- a/test/functional/rpc_createmultisig.py +++ b/test/functional/rpc_createmultisig.py @@ -45,7 +45,6 @@ def run_test(self): self.log.info('Generating blocks ...') self.generate(node0, 149) - self.sync_all() self.moved = 0 for self.nkeys in [3, 5]: @@ -102,7 +101,6 @@ def check_addmultisigaddress_errors(self): def checkbalances(self): node0, node1, node2 = self.nodes self.generate(node0, 1) - self.sync_all() bal0 = node0.getbalance() bal1 = node1.getbalance() diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py index e009cb8e2eb44..6e14c9f44fe26 100755 --- a/test/functional/rpc_fundrawtransaction.py +++ b/test/functional/rpc_fundrawtransaction.py @@ -68,9 +68,7 @@ def run_test(self): self.fee_tolerance = 2 * self.min_relay_tx_fee / 1000 self.generate(self.nodes[2], 1) - self.sync_all() self.generate(self.nodes[0], 121) - self.sync_all() self.test_change_position() self.test_simple() @@ -128,7 +126,6 @@ def test_change_position(self): self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50) self.generate(self.nodes[0], 1) - self.sync_all() wwatch.unloadwallet() @@ -492,7 +489,6 @@ def test_spend_2of2(self): # send 12 DASH to msig addr self.nodes[0].sendtoaddress(mSigObj, 12) self.generate(self.nodes[0], 1) - self.sync_all() oldBalance = self.nodes[1].getbalance() inputs = [] @@ -503,7 +499,6 @@ def test_spend_2of2(self): final_psbt = w2.finalizepsbt(signed_psbt['psbt']) self.nodes[2].sendrawtransaction(final_psbt['hex']) self.generate(self.nodes[2], 1) - self.sync_all() # Make sure funds are received at node1. assert_equal(oldBalance+Decimal('11.0000000'), self.nodes[1].getbalance()) @@ -565,7 +560,6 @@ def test_locked_wallet(self): signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) self.nodes[1].sendrawtransaction(signedTx['hex']) self.generate(self.nodes[1], 1) - self.sync_all() # Make sure funds are received at node1. assert_equal(oldBalance+Decimal('511.0000000'), self.nodes[0].getbalance()) @@ -577,12 +571,10 @@ def test_many_inputs_fee(self): # Empty node1, send some small coins from node0 to node1. self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.generate(self.nodes[1], 1) - self.sync_all() for _ in range(20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.generate(self.nodes[0], 1) - self.sync_all() # Fund a tx with ~20 small inputs. inputs = [] @@ -605,12 +597,10 @@ def test_many_inputs_send(self): # Again, empty node1, send some small coins from node0 to node1. self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.generate(self.nodes[1], 1) - self.sync_all() for _ in range(20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.generate(self.nodes[0], 1) - self.sync_all() # Fund a tx with ~20 small inputs. oldBalance = self.nodes[0].getbalance() @@ -622,7 +612,6 @@ def test_many_inputs_send(self): fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) self.generate(self.nodes[1], 1) - self.sync_all() assert_equal(oldBalance+Decimal('500.19000000'), self.nodes[0].getbalance()) #0.19+block reward def test_op_return(self): @@ -700,7 +689,6 @@ def test_all_watched_funds(self): assert signedtx["complete"] self.nodes[0].sendrawtransaction(signedtx["hex"]) self.generate(self.nodes[0], 1) - self.sync_all() wwatch.unloadwallet() diff --git a/test/functional/rpc_getblockstats.py b/test/functional/rpc_getblockstats.py index 55bebd870b385..239e955713c3c 100755 --- a/test/functional/rpc_getblockstats.py +++ b/test/functional/rpc_getblockstats.py @@ -48,7 +48,6 @@ def generate_test_data(self, filename): address = self.nodes[0].get_deterministic_priv_key().address self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True) self.generate(self.nodes[0], 1) - self.sync_all() self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True) self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=False) diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py index 72cef90ffdb03..b7bb9655abe6f 100755 --- a/test/functional/rpc_net.py +++ b/test/functional/rpc_net.py @@ -79,7 +79,6 @@ def test_getpeerinfo(self): # Create a few getpeerinfo last_block/last_transaction values. self.wallet.send_self_transfer(from_node=self.nodes[0]) # Make a transaction so we can see it in the getpeerinfo results self.generate(self.nodes[1], 1) - self.sync_all() time_now = self.mocktime peer_info = [x.getpeerinfo() for x in self.nodes] # Verify last_block and last_transaction keys/values. diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py index dd6c97ea9f58c..a23fb9372ad6e 100755 --- a/test/functional/rpc_psbt.py +++ b/test/functional/rpc_psbt.py @@ -84,7 +84,6 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])['hex'] txid = self.nodes[0].sendrawtransaction(signed_tx) self.generate(self.nodes[0], 6) - self.sync_all() # Find the output pos p2sh_pos = -1 @@ -228,7 +227,6 @@ def run_test(self): txid1 = self.nodes[0].sendtoaddress(node1_addr, 13) txid2 = self.nodes[0].sendtoaddress(node2_addr, 13) blockhash = self.generate(self.nodes[0], 6)[0] - self.sync_all() vout1 = find_output(self.nodes[1], txid1, 13, blockhash=blockhash) vout2 = find_output(self.nodes[2], txid2, 13, blockhash=blockhash) @@ -256,7 +254,6 @@ def run_test(self): finalized = self.nodes[0].finalizepsbt(combined)['hex'] self.nodes[0].sendrawtransaction(finalized) self.generate(self.nodes[0], 6) - self.sync_all() # Make sure change address wallet does not have P2SH innerscript access to results in success # when attempting BnB coin selection @@ -395,7 +392,6 @@ def test_psbt_input_keys(psbt_input, keys): txid4 = self.nodes[0].sendtoaddress(addr4, 5) vout4 = find_output(self.nodes[0], txid4, 5) self.generate(self.nodes[0], 6) - self.sync_all() psbt2 = self.nodes[1].createpsbt([{"txid":txid4, "vout":vout4}], {self.nodes[0].getnewaddress():Decimal('4.999')}) psbt2 = self.nodes[1].walletprocesspsbt(psbt2)['psbt'] psbt2_decoded = self.nodes[0].decodepsbt(psbt2) @@ -408,7 +404,6 @@ def test_psbt_input_keys(psbt_input, keys): addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 7) self.generate(self.nodes[0], 6) - self.sync_all() vout = find_output(self.nodes[0], txid, 7) psbt = self.nodes[1].createpsbt([{"txid":txid, "vout":vout}], {self.nodes[0].getnewaddress():Decimal('6.999')}) analyzed = self.nodes[0].analyzepsbt(psbt) diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py index e059d9be95d35..db8da986f177c 100755 --- a/test/functional/rpc_rawtransaction.py +++ b/test/functional/rpc_rawtransaction.py @@ -72,15 +72,12 @@ def setup_network(self): def run_test(self): self.log.info('prepare some coins for multiple *rawtransaction commands') self.generate(self.nodes[2], 1) - self.sync_all() self.generate(self.nodes[0], COINBASE_MATURITY + 1) - self.sync_all() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) self.sync_all() self.generate(self.nodes[0], 5) - self.sync_all() self.log.info('Test getrawtransaction on genesis block coinbase returns an error') block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) @@ -167,7 +164,6 @@ def run_test(self): # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) block1, block2 = self.generate(self.nodes[2], 2) - self.sync_all() # We should be able to get the raw transaction by providing the correct block gottx = self.nodes[0].getrawtransaction(tx, True, block1) assert_equal(gottx['txid'], tx) @@ -218,7 +214,6 @@ def run_test(self): txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.generate(self.nodes[0], 1) - self.sync_all() assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance @@ -239,7 +234,6 @@ def run_test(self): rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.generate(self.nodes[0], 1) - self.sync_all() #THIS IS AN INCOMPLETE FEATURE #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION @@ -262,7 +256,6 @@ def run_test(self): rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() self.generate(self.nodes[0], 1) - self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('500.00000000')+Decimal('2.19000000')) #block reward + tx # 2of2 test for combining transactions @@ -282,7 +275,6 @@ def run_test(self): rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.generate(self.nodes[0], 1) - self.sync_all() assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable @@ -307,7 +299,6 @@ def run_test(self): rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() self.generate(self.nodes[0], 1) - self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('500.00000000')+Decimal('2.19000000')) #block reward + tx @@ -315,13 +306,11 @@ def run_test(self): addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 10) self.generate(self.nodes[0], 1) - self.sync_all() vout = find_vout_for_address(self.nodes[1], txid, addr) rawTx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9.999}) rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx) txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex']) self.generate(self.nodes[0], 1) - self.sync_all() # getrawtransaction tests # 1. valid parameters - only supply txid diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py index 2b0bad984b1b7..34eb48a89ea41 100755 --- a/test/functional/rpc_txoutproof.py +++ b/test/functional/rpc_txoutproof.py @@ -33,7 +33,6 @@ def run_test(self): # Add enough mature utxos to the wallet, so that all txs spend confirmed coins self.generate(miniwallet, 5) self.generate(self.nodes[0], COINBASE_MATURITY) - self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 5 + COINBASE_MATURITY) @@ -59,7 +58,6 @@ def run_test(self): tx3 = miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=txin_spent) txid3 = tx3['txid'] self.generate(self.nodes[0], 1) - self.sync_all() txid_spent = txin_spent["txid"] txid_unspent = txid1 # Input was change from txid2, so txid1 should be unspent diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py index 3ca8075e94a4e..f87abb88ddcab 100755 --- a/test/functional/wallet_avoidreuse.py +++ b/test/functional/wallet_avoidreuse.py @@ -80,7 +80,6 @@ def run_test(self): self.test_immutable() self.generate(self.nodes[0], 110) - self.sync_all() self.test_change_remains_change(self.nodes[1]) reset_balance(self.nodes[1], self.nodes[0].getnewaddress()) self.test_sending_from_reused_address_without_avoid_reuse() @@ -172,7 +171,6 @@ def test_sending_from_reused_address_without_avoid_reuse(self): self.nodes[0].sendtoaddress(fundaddr, 10) self.generate(self.nodes[0], 1) - self.sync_all() # listunspent should show 1 single, unused 10 btc output assert_unspent(self.nodes[1], total_count=1, total_sum=10, reused_supported=True, reused_count=0) @@ -183,7 +181,6 @@ def test_sending_from_reused_address_without_avoid_reuse(self): self.nodes[1].sendtoaddress(retaddr, 5) self.generate(self.nodes[0], 1) - self.sync_all() # listunspent should show 1 single, unused 5 btc output assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_supported=True, reused_count=0) @@ -192,7 +189,6 @@ def test_sending_from_reused_address_without_avoid_reuse(self): self.nodes[0].sendtoaddress(fundaddr, 10) self.generate(self.nodes[0], 1) - self.sync_all() # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10) assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10) @@ -226,7 +222,6 @@ def test_sending_from_reused_address_fails(self): self.nodes[0].sendtoaddress(fundaddr, 10) self.generate(self.nodes[0], 1) - self.sync_all() # listunspent should show 1 single, unused 10 btc output assert_unspent(self.nodes[1], total_count=1, total_sum=10, reused_supported=True, reused_count=0) @@ -235,7 +230,6 @@ def test_sending_from_reused_address_fails(self): self.nodes[1].sendtoaddress(retaddr, 5) self.generate(self.nodes[0], 1) - self.sync_all() # listunspent should show 1 single, unused 5 btc output assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_supported=True, reused_count=0) @@ -245,7 +239,6 @@ def test_sending_from_reused_address_fails(self): if not self.options.descriptors: self.nodes[0].sendtoaddress(fundaddr, 10) self.generate(self.nodes[0], 1) - self.sync_all() # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10) assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10) @@ -288,7 +281,6 @@ def test_getbalances_used(self): self.nodes[0].sendtoaddress(new_addr, 1) self.generate(self.nodes[0], 1) - self.sync_all() # send transaction that should not use all the available outputs # per the current coin selection algorithm @@ -320,7 +312,6 @@ def test_full_destination_group_is_preferred(self): self.nodes[0].sendtoaddress(new_addr, 1) self.generate(self.nodes[0], 1) - self.sync_all() # Sending a transaction that is smaller than each one of the # available outputs @@ -349,7 +340,6 @@ def test_all_destination_groups_are_used(self): self.nodes[0].sendtoaddress(new_addr, 1) self.generate(self.nodes[0], 1) - self.sync_all() # Sending a transaction that needs to use the full groups # of 100 inputs but also the incomplete group of 2 inputs. diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py index 310b56de95a08..9371cee0f5223 100755 --- a/test/functional/wallet_backup.py +++ b/test/functional/wallet_backup.py @@ -177,7 +177,6 @@ def run_test(self): # Generate 101 more blocks, so any fees paid mature self.generate(self.nodes[3], COINBASE_MATURITY + 1) - self.sync_all() balance0 = self.nodes[0].getbalance() balance1 = self.nodes[1].getbalance() diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py index 9f5d36ac8fc23..317b48ea9f0e2 100755 --- a/test/functional/wallet_balance.py +++ b/test/functional/wallet_balance.py @@ -71,10 +71,8 @@ def run_test(self): self.log.info("Mining blocks ...") self.generate(self.nodes[0], 1) - self.sync_all() self.generate(self.nodes[1], 1) self.generatetoaddress(self.nodes[1], COINBASE_MATURITY + 1, ADDRESS_WATCHONLY) - self.sync_all() if not self.options.descriptors: # Tests legacy watchonly behavior which is not present (and does not need to be tested) in descriptor wallets @@ -201,7 +199,6 @@ def test_balances(*, fee_node_1=0): # test_balances(fee_node_1=Decimal('0.02')) # disabled, no RBF in Dash self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY) - self.sync_all() # balances are correct after the transactions are confirmed balance_node0 = Decimal('969.99') # node 1's send plus change from node 0's send @@ -215,7 +212,6 @@ def test_balances(*, fee_node_1=0): txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(), Decimal('29.98'), [Decimal('0.01')]) self.nodes[1].sendrawtransaction(txs[0]['hex']) self.generatetoaddress(self.nodes[1], 2, ADDRESS_WATCHONLY) - self.sync_all() # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago # TODO: fix getbalance tracking of coin spentness depth @@ -279,7 +275,6 @@ def test_balances(*, fee_node_1=0): self.sync_blocks() self.nodes[1].sendrawtransaction(tx_orig) self.generatetoaddress(self.nodes[1], 1, ADDRESS_WATCHONLY) - self.sync_all() assert_equal(self.nodes[0].getbalance(minconf=0), total_amount + 1) # The reorg recovered our fee of 1 coin diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py index c01f52de17210..b2b85a566b68e 100755 --- a/test/functional/wallet_groups.py +++ b/test/functional/wallet_groups.py @@ -46,7 +46,6 @@ def run_test(self): [self.nodes[0].sendtoaddress(addr, 0.5) for addr in addrs] self.generate(self.nodes[0], 1) - self.sync_all() # For each node, send 0.2 coins back to 0; # - node[1] should pick one 0.5 UTXO and leave the rest @@ -114,7 +113,6 @@ def run_test(self): self.nodes[0].sendtoaddress(addr_aps, 1.0) self.nodes[0].sendtoaddress(addr_aps, 1.0) self.generate(self.nodes[0], 1) - self.sync_all() with self.nodes[3].assert_debug_log(['Fee non-grouped = 225, grouped = 372, using grouped']): txid4 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 0.1) tx4 = self.nodes[3].getrawtransaction(txid4, True) @@ -126,7 +124,6 @@ def run_test(self): addr_aps2 = self.nodes[3].getnewaddress() [self.nodes[0].sendtoaddress(addr_aps2, 1.0) for _ in range(5)] self.generate(self.nodes[0], 1) - self.sync_all() with self.nodes[3].assert_debug_log(['Fee non-grouped = 519, grouped = 813, using non-grouped']): txid5 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 2.95) tx5 = self.nodes[3].getrawtransaction(txid5, True) @@ -140,7 +137,6 @@ def run_test(self): addr_aps3 = self.nodes[4].getnewaddress() [self.nodes[0].sendtoaddress(addr_aps3, 1.0) for _ in range(5)] self.generate(self.nodes[0], 1) - self.sync_all() with self.nodes[4].assert_debug_log(['Fee non-grouped = 519, grouped = 813, using grouped']): txid6 = self.nodes[4].sendtoaddress(self.nodes[0].getnewaddress(), 2.95) tx6 = self.nodes[4].getrawtransaction(txid6, True) @@ -163,7 +159,6 @@ def run_test(self): signed_tx = self.nodes[0].signrawtransactionwithwallet(funded_tx['hex']) self.nodes[0].sendrawtransaction(signed_tx['hex']) self.generate(self.nodes[0], 1) - self.sync_all() # Check that we can create a transaction that only requires ~100 of our # utxos, without pulling in all outputs and creating a transaction that diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py index 3de36972b812e..f23afa2d68ad3 100755 --- a/test/functional/wallet_import_rescan.py +++ b/test/functional/wallet_import_rescan.py @@ -177,7 +177,6 @@ def run_test(self): self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] + TIMESTAMP_WINDOW + 1, ) self.generate(self.nodes[0], 1) - self.sync_all() # For each variation of wallet key import, invoke the import RPC and # check the results from getbalance and listtransactions. diff --git a/test/functional/wallet_importdescriptors.py b/test/functional/wallet_importdescriptors.py index d58b7fe83f0f6..c3c11ee11fa7a 100755 --- a/test/functional/wallet_importdescriptors.py +++ b/test/functional/wallet_importdescriptors.py @@ -427,10 +427,8 @@ def run_test(self): assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000) txid = w0.sendtoaddress(addr, 10) self.generate(self.nodes[0], 6) - self.sync_all() wmulti_priv.sendtoaddress(w0.getnewaddress(), 8) # uses change 1 self.generate(self.nodes[0], 6) - self.sync_all() self.nodes[1].createwallet(wallet_name="wmulti_pub", disable_private_keys=True, blank=True, descriptors=True) wmulti_pub = self.nodes[1].get_wallet_rpc("wmulti_pub") @@ -468,7 +466,6 @@ def run_test(self): vout2 = find_vout_for_address(self.nodes[0], txid2, addr2) self.generate(self.nodes[0], 6) - self.sync_all() assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance()) # Make sure that descriptor wallets containing multiple xpubs in a single descriptor load correctly @@ -559,7 +556,6 @@ def run_test(self): addr = multi_priv_big.getnewaddress("") w0.sendtoaddress(addr, 10) self.generate(self.nodes[0], 6) - self.sync_all() # It is standard and would relay. txid = multi_priv_big.sendtoaddress(w0.getnewaddress(), 10, "", "", True) diff --git a/test/functional/wallet_importprunedfunds.py b/test/functional/wallet_importprunedfunds.py index 7ecfb408ebb04..57f67f8abc591 100755 --- a/test/functional/wallet_importprunedfunds.py +++ b/test/functional/wallet_importprunedfunds.py @@ -27,8 +27,6 @@ def run_test(self): self.log.info("Mining blocks...") self.generate(self.nodes[0], COINBASE_MATURITY + 1) - self.sync_all() - # address address1 = self.nodes[0].getnewaddress() # pubkey diff --git a/test/functional/wallet_listreceivedby.py b/test/functional/wallet_listreceivedby.py index 27950fb4cb1ea..040bc5e402518 100755 --- a/test/functional/wallet_listreceivedby.py +++ b/test/functional/wallet_listreceivedby.py @@ -44,7 +44,6 @@ def run_test(self): True) # Bury Tx under 10 block so it will be returned by listreceivedbyaddress self.generate(self.nodes[1], 10) - self.sync_all() assert_array_result(self.nodes[1].listreceivedbyaddress(), {"address": addr}, {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]}) @@ -79,7 +78,6 @@ def run_test(self): other_addr = self.nodes[1].getnewaddress() txid2 = self.nodes[0].sendtoaddress(other_addr, 0.1) self.generate(self.nodes[0], 1) - self.sync_all() # Same test as above should still pass expected = {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 11, "txids": [txid, ]} res = self.nodes[1].listreceivedbyaddress(0, True, True, True, addr) @@ -116,7 +114,6 @@ def run_test(self): # Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress self.generate(self.nodes[1], 10) - self.sync_all() balance = self.nodes[1].getreceivedbyaddress(addr) assert_equal(balance, Decimal("0.1")) @@ -145,7 +142,6 @@ def run_test(self): assert_equal(balance, balance_by_label) self.generate(self.nodes[1], 10) - self.sync_all() # listreceivedbylabel should return updated received list assert_array_result(self.nodes[1].listreceivedbylabel(), {"label": label}, diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py index f486e9bf7956b..77a58b0d2192e 100755 --- a/test/functional/wallet_listtransactions.py +++ b/test/functional/wallet_listtransactions.py @@ -90,7 +90,6 @@ def run_test(self): self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True) txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1) self.generate(self.nodes[1], 1) - self.sync_all() assert_equal(len(self.nodes[0].listtransactions(label="watchonly", include_watchonly=True)), 1) assert_equal(len(self.nodes[0].listtransactions(dummy="watchonly", include_watchonly=True)), 1) assert len(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=False)) == 0 From 131d16133c656adc66717bfc819c5751d59a7f6c Mon Sep 17 00:00:00 2001 From: Kittywhiskers Van Gogh <63189531+kwvg@users.noreply.github.com> Date: Tue, 1 Oct 2024 15:50:09 +0000 Subject: [PATCH 11/11] test: cleanup `generate` logic in some governance functional tests Co-authored-by: UdjinM6 --- test/functional/feature_governance.py | 36 ++++++++---------------- test/functional/feature_governance_cl.py | 7 ++--- 2 files changed, 14 insertions(+), 29 deletions(-) diff --git a/test/functional/feature_governance.py b/test/functional/feature_governance.py index c53c6210d905e..4e499ffa31ace 100755 --- a/test/functional/feature_governance.py +++ b/test/functional/feature_governance.py @@ -89,17 +89,15 @@ def run_test(self): assert_equal(len(self.nodes[0].gobject("list-prepared")), 0) self.log.info("Check 1st superblock before v20") - self.generate(self.nodes[0], 3, sync_fun=self.no_op) self.bump_mocktime(3) - self.sync_blocks() + self.generate(self.nodes[0], 3, sync_fun=self.sync_blocks()) assert_equal(self.nodes[0].getblockcount(), 130) assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], False) self.check_superblockbudget(False) self.log.info("Check 2nd superblock before v20") - self.generate(self.nodes[0], 10, sync_fun=self.no_op) self.bump_mocktime(10) - self.sync_blocks() + self.generate(self.nodes[0], 10, sync_fun=self.sync_blocks()) assert_equal(self.nodes[0].getblockcount(), 140) assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], False) self.check_superblockbudget(False) @@ -117,9 +115,8 @@ def run_test(self): p1_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address) p2_collateral_prepare = prepare_object(self.nodes[0], 1, uint256_to_string(0), proposal_time, 1, "Proposal_2", self.p2_amount, self.p2_payout_address) - self.generate(self.nodes[0], 6, sync_fun=self.no_op) self.bump_mocktime(6) - self.sync_blocks() + self.generate(self.nodes[0], 6, sync_fun=self.sync_blocks()) assert_equal(len(self.nodes[0].gobject("list-prepared")), 3) assert_equal(len(self.nodes[0].gobject("list")), 0) @@ -165,9 +162,8 @@ def run_test(self): self.log.info("v20 is expected to be activate since block 160") assert block_count + n < 160 for _ in range(n - 1): - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) self.check_superblockbudget(False) assert_equal(len(self.nodes[0].gobject("list", "valid", "triggers")), 0) @@ -268,9 +264,8 @@ def sync_gov(node): assert_equal(more_triggers, False) self.log.info("Move another block inside the Superblock maturity window") - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) self.log.info("Should see NO votes on both triggers now") self.wait_until(lambda: self.nodes[0].gobject("list", "valid", "triggers")[winning_trigger_hash]['NoCount'] == 1, timeout=5) @@ -284,9 +279,8 @@ def sync_gov(node): self.log.info("Move another block inside the Superblock maturity window") with self.nodes[1].assert_debug_log(["CGovernanceManager::VoteGovernanceTriggers"]): - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) self.log.info("Vote count should not change even though MNs are allowed to vote again") assert_equal(before, self.nodes[1].gobject("count")["votes"]) @@ -298,9 +292,8 @@ def sync_gov(node): self.log.info("Move remaining n blocks until actual Superblock") for i in range(n): - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) # comparing to 159 because bip9 forks are active when the tip is one block behind the activation height self.check_superblockbudget(block_count + i + 1 >= 159) @@ -310,9 +303,8 @@ def sync_gov(node): self.log.info("Move a few block past the recent superblock height and make sure we have no new votes") for _ in range(5): with self.nodes[1].assert_debug_log("", [f"Voting NO-FUNDING for trigger:{winning_trigger_hash} success"]): - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) # Votes on both triggers should NOT change assert_equal(self.nodes[0].gobject("list", "valid", "triggers")[winning_trigger_hash]['NoCount'], 1) assert_equal(self.nodes[0].gobject("list", "valid", "triggers")[isolated_trigger_hash]['NoCount'], self.mn_count - 1) @@ -322,31 +314,27 @@ def sync_gov(node): self.log.info("Move remaining n blocks until the next Superblock") for _ in range(n - 1): - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) self.log.info("Wait for new trigger and votes") self.wait_until(lambda: have_trigger_for_height(self.nodes, 180)) self.log.info("Mine superblock") - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) assert_equal(self.nodes[0].getblockcount(), 180) assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], True) self.log.info("Mine and check a couple more superblocks") for i in range(2): for _ in range(sb_cycle - 1): - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) # Wait for new trigger and votes sb_block_height = 180 + (i + 1) * sb_cycle self.wait_until(lambda: have_trigger_for_height(self.nodes, sb_block_height)) # Mine superblock - self.generate(self.nodes[0], 1, sync_fun=self.no_op) self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) assert_equal(self.nodes[0].getblockcount(), sb_block_height) assert_equal(self.nodes[0].getblockchaininfo()["softforks"]["v20"]["active"], True) self.check_superblockbudget(True) diff --git a/test/functional/feature_governance_cl.py b/test/functional/feature_governance_cl.py index bf5e6530bd769..6b1dab92d0210 100755 --- a/test/functional/feature_governance_cl.py +++ b/test/functional/feature_governance_cl.py @@ -83,9 +83,7 @@ def run_test(self): p1_collateral_prepare = self.prepare_object(1, uint256_to_string(0), proposal_time, 1, "Proposal_1", self.p1_amount, self.p1_payout_address) self.bump_mocktime(60 * 10 + 1) - self.generate(self.nodes[0], 6, sync_fun=self.no_op) - self.bump_mocktime(6 * 156) - self.sync_blocks() + self.generate(self.nodes[0], 6, sync_fun=self.sync_blocks()) assert_equal(len(self.nodes[0].gobject("list-prepared")), 2) assert_equal(len(self.nodes[0].gobject("list")), 0) @@ -158,10 +156,9 @@ def run_test(self): self.log.info("Reconnect isolated node and confirm the next ChainLock will let it sync") self.reconnect_isolated_node(5, 0) assert_equal(self.nodes[5].mnsync("status")["IsSynced"], False) - self.generate(self.nodes[0], 1, sync_fun=self.no_op) # NOTE: bumping mocktime too much after recent reconnect can result in "timeout downloading block" self.bump_mocktime(1) - self.sync_blocks() + self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks()) if __name__ == '__main__':