/ test / functional / test_framework / test_framework.py
test_framework.py
   1  #!/usr/bin/env python3
   2  # Copyright (c) 2014-2022 The Bitcoin Core developers
   3  # Distributed under the MIT software license, see the accompanying
   4  # file COPYING or http://www.opensource.org/licenses/mit-license.php.
   5  """Base class for RPC testing."""
   6  
   7  import configparser
   8  from enum import Enum
   9  import argparse
  10  import logging
  11  import os
  12  import platform
  13  import pdb
  14  import random
  15  import re
  16  import shutil
  17  import subprocess
  18  import sys
  19  import tempfile
  20  import time
  21  
  22  from .address import create_deterministic_address_bcrt1_p2tr_op_true
  23  from .authproxy import JSONRPCException
  24  from . import coverage
  25  from .p2p import NetworkThread
  26  from .test_node import TestNode
  27  from .util import (
  28      MAX_NODES,
  29      PortSeed,
  30      assert_equal,
  31      check_json_precision,
  32      find_vout_for_address,
  33      get_datadir_path,
  34      initialize_datadir,
  35      p2p_port,
  36      wait_until_helper_internal,
  37  )
  38  
  39  
  40  class TestStatus(Enum):
  41      PASSED = 1
  42      FAILED = 2
  43      SKIPPED = 3
  44  
  45  TEST_EXIT_PASSED = 0
  46  TEST_EXIT_FAILED = 1
  47  TEST_EXIT_SKIPPED = 77
  48  
  49  TMPDIR_PREFIX = "bitcoin_func_test_"
  50  
  51  
  52  class SkipTest(Exception):
  53      """This exception is raised to skip a test"""
  54  
  55      def __init__(self, message):
  56          self.message = message
  57  
  58  
  59  class BitcoinTestMetaClass(type):
  60      """Metaclass for BitcoinTestFramework.
  61  
  62      Ensures that any attempt to register a subclass of `BitcoinTestFramework`
  63      adheres to a standard whereby the subclass overrides `set_test_params` and
  64      `run_test` but DOES NOT override either `__init__` or `main`. If any of
  65      those standards are violated, a ``TypeError`` is raised."""
  66  
  67      def __new__(cls, clsname, bases, dct):
  68          if not clsname == 'BitcoinTestFramework':
  69              if not ('run_test' in dct and 'set_test_params' in dct):
  70                  raise TypeError("BitcoinTestFramework subclasses must override "
  71                                  "'run_test' and 'set_test_params'")
  72              if '__init__' in dct or 'main' in dct:
  73                  raise TypeError("BitcoinTestFramework subclasses may not override "
  74                                  "'__init__' or 'main'")
  75  
  76          return super().__new__(cls, clsname, bases, dct)
  77  
  78  
  79  class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
  80      """Base class for a bitcoin test script.
  81  
  82      Individual bitcoin test scripts should subclass this class and override the set_test_params() and run_test() methods.
  83  
  84      Individual tests can also override the following methods to customize the test setup:
  85  
  86      - add_options()
  87      - setup_chain()
  88      - setup_network()
  89      - setup_nodes()
  90  
  91      The __init__() and main() methods should not be overridden.
  92  
  93      This class also contains various public and private helper methods."""
  94  
  95      def __init__(self) -> None:
  96          """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
  97          self.chain: str = 'regtest'
  98          self.setup_clean_chain: bool = False
  99          self.noban_tx_relay: bool = False
 100          self.nodes: list[TestNode] = []
 101          self.extra_args = None
 102          self.network_thread = None
 103          self.rpc_timeout = 60  # Wait for up to 60 seconds for the RPC server to respond
 104          self.supports_cli = True
 105          self.bind_to_localhost_only = True
 106          self.parse_args()
 107          self.default_wallet_name = "default_wallet" if self.options.descriptors else ""
 108          self.wallet_data_filename = "wallet.dat"
 109          # Optional list of wallet names that can be set in set_test_params to
 110          # create and import keys to. If unset, default is len(nodes) *
 111          # [default_wallet_name]. If wallet names are None, wallet creation is
 112          # skipped. If list is truncated, wallet creation is skipped and keys
 113          # are not imported.
 114          self.wallet_names = None
 115          # By default the wallet is not required. Set to true by skip_if_no_wallet().
 116          # When False, we ignore wallet_names regardless of what it is.
 117          self._requires_wallet = False
 118          # Disable ThreadOpenConnections by default, so that adding entries to
 119          # addrman will not result in automatic connections to them.
 120          self.disable_autoconnect = True
 121          self.set_test_params()
 122          assert self.wallet_names is None or len(self.wallet_names) <= self.num_nodes
 123          self.rpc_timeout = int(self.rpc_timeout * self.options.timeout_factor) # optionally, increase timeout by a factor
 124  
 125      def main(self):
 126          """Main function. This should not be overridden by the subclass test scripts."""
 127  
 128          assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
 129  
 130          try:
 131              self.setup()
 132              self.run_test()
 133          except JSONRPCException:
 134              self.log.exception("JSONRPC error")
 135              self.success = TestStatus.FAILED
 136          except SkipTest as e:
 137              self.log.warning("Test Skipped: %s" % e.message)
 138              self.success = TestStatus.SKIPPED
 139          except AssertionError:
 140              self.log.exception("Assertion failed")
 141              self.success = TestStatus.FAILED
 142          except KeyError:
 143              self.log.exception("Key error")
 144              self.success = TestStatus.FAILED
 145          except subprocess.CalledProcessError as e:
 146              self.log.exception("Called Process failed with '{}'".format(e.output))
 147              self.success = TestStatus.FAILED
 148          except Exception:
 149              self.log.exception("Unexpected exception caught during testing")
 150              self.success = TestStatus.FAILED
 151          except KeyboardInterrupt:
 152              self.log.warning("Exiting after keyboard interrupt")
 153              self.success = TestStatus.FAILED
 154          finally:
 155              exit_code = self.shutdown()
 156              sys.exit(exit_code)
 157  
 158      def parse_args(self):
 159          previous_releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
 160          parser = argparse.ArgumentParser(usage="%(prog)s [options]")
 161          parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true",
 162                              help="Leave bitcoinds and test.* datadir on exit or error")
 163          parser.add_argument("--noshutdown", dest="noshutdown", default=False, action="store_true",
 164                              help="Don't stop bitcoinds after the test execution")
 165          parser.add_argument("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
 166                              help="Directory for caching pregenerated datadirs (default: %(default)s)")
 167          parser.add_argument("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
 168          parser.add_argument("-l", "--loglevel", dest="loglevel", default="INFO",
 169                              help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
 170          parser.add_argument("--tracerpc", dest="trace_rpc", default=False, action="store_true",
 171                              help="Print out all RPC calls as they are made")
 172          parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int,
 173                              help="The seed to use for assigning port numbers (default: current process id)")
 174          parser.add_argument("--previous-releases", dest="prev_releases", action="store_true",
 175                              default=os.path.isdir(previous_releases_path) and bool(os.listdir(previous_releases_path)),
 176                              help="Force test of previous releases (default: %(default)s)")
 177          parser.add_argument("--coveragedir", dest="coveragedir",
 178                              help="Write tested RPC commands into this directory")
 179          parser.add_argument("--configfile", dest="configfile",
 180                              default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../config.ini"),
 181                              help="Location of the test framework config file (default: %(default)s)")
 182          parser.add_argument("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
 183                              help="Attach a python debugger if test fails")
 184          parser.add_argument("--usecli", dest="usecli", default=False, action="store_true",
 185                              help="use bitcoin-cli instead of RPC for all commands")
 186          parser.add_argument("--perf", dest="perf", default=False, action="store_true",
 187                              help="profile running nodes with perf for the duration of the test")
 188          parser.add_argument("--valgrind", dest="valgrind", default=False, action="store_true",
 189                              help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown. valgrind 3.14 or later required. Does not apply to previous release binaries.")
 190          parser.add_argument("--randomseed", type=int,
 191                              help="set a random seed for deterministically reproducing a previous test run")
 192          parser.add_argument("--timeout-factor", dest="timeout_factor", type=float, help="adjust test timeouts by a factor. Setting it to 0 disables all timeouts")
 193          parser.add_argument("--v2transport", dest="v2transport", default=False, action="store_true",
 194                              help="use BIP324 v2 connections between all nodes by default")
 195          parser.add_argument("--v1transport", dest="v1transport", default=False, action="store_true",
 196                              help="Explicitly use v1 transport (can be used to overwrite global --v2transport option)")
 197  
 198          self.add_options(parser)
 199          # Running TestShell in a Jupyter notebook causes an additional -f argument
 200          # To keep TestShell from failing with an "unrecognized argument" error, we add a dummy "-f" argument
 201          # source: https://stackoverflow.com/questions/48796169/how-to-fix-ipykernel-launcher-py-error-unrecognized-arguments-in-jupyter/56349168#56349168
 202          parser.add_argument("-f", "--fff", help="a dummy argument to fool ipython", default="1")
 203          self.options = parser.parse_args()
 204          if self.options.timeout_factor == 0:
 205              self.options.timeout_factor = 99999
 206          self.options.timeout_factor = self.options.timeout_factor or (4 if self.options.valgrind else 1)
 207          self.options.previous_releases_path = previous_releases_path
 208  
 209          config = configparser.ConfigParser()
 210          config.read_file(open(self.options.configfile))
 211          self.config = config
 212          if self.options.v1transport:
 213              self.options.v2transport=False
 214  
 215          if "descriptors" not in self.options:
 216              # Wallet is not required by the test at all and the value of self.options.descriptors won't matter.
 217              # It still needs to exist and be None in order for tests to work however.
 218              # So set it to None to force -disablewallet, because the wallet is not needed.
 219              self.options.descriptors = None
 220          elif self.options.descriptors is None:
 221              # Some wallet is either required or optionally used by the test.
 222              # Prefer SQLite unless it isn't available
 223              if self.is_sqlite_compiled():
 224                  self.options.descriptors = True
 225              elif self.is_bdb_compiled():
 226                  self.options.descriptors = False
 227              else:
 228                  # If neither are compiled, tests requiring a wallet will be skipped and the value of self.options.descriptors won't matter
 229                  # It still needs to exist and be None in order for tests to work however.
 230                  # So set it to None, which will also set -disablewallet.
 231                  self.options.descriptors = None
 232  
 233          PortSeed.n = self.options.port_seed
 234  
 235      def set_binary_paths(self):
 236          """Update self.options with the paths of all binaries from environment variables or their default values"""
 237  
 238          binaries = {
 239              "bitcoind": ("bitcoind", "BITCOIND"),
 240              "bitcoin-cli": ("bitcoincli", "BITCOINCLI"),
 241              "bitcoin-util": ("bitcoinutil", "BITCOINUTIL"),
 242              "bitcoin-wallet": ("bitcoinwallet", "BITCOINWALLET"),
 243          }
 244          for binary, [attribute_name, env_variable_name] in binaries.items():
 245              default_filename = os.path.join(
 246                  self.config["environment"]["BUILDDIR"],
 247                  "src",
 248                  binary + self.config["environment"]["EXEEXT"],
 249              )
 250              setattr(self.options, attribute_name, os.getenv(env_variable_name, default=default_filename))
 251  
 252      def setup(self):
 253          """Call this method to start up the test framework object with options set."""
 254  
 255          check_json_precision()
 256  
 257          self.options.cachedir = os.path.abspath(self.options.cachedir)
 258  
 259          config = self.config
 260  
 261          self.set_binary_paths()
 262  
 263          os.environ['PATH'] = os.pathsep.join([
 264              os.path.join(config['environment']['BUILDDIR'], 'src'),
 265              os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'), os.environ['PATH']
 266          ])
 267  
 268          # Set up temp directory and start logging
 269          if self.options.tmpdir:
 270              self.options.tmpdir = os.path.abspath(self.options.tmpdir)
 271              os.makedirs(self.options.tmpdir, exist_ok=False)
 272          else:
 273              self.options.tmpdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX)
 274          self._start_logging()
 275  
 276          # Seed the PRNG. Note that test runs are reproducible if and only if
 277          # a single thread accesses the PRNG. For more information, see
 278          # https://docs.python.org/3/library/random.html#notes-on-reproducibility.
 279          # The network thread shouldn't access random. If we need to change the
 280          # network thread to access randomness, it should instantiate its own
 281          # random.Random object.
 282          seed = self.options.randomseed
 283  
 284          if seed is None:
 285              seed = random.randrange(sys.maxsize)
 286          else:
 287              self.log.info("User supplied random seed {}".format(seed))
 288  
 289          random.seed(seed)
 290          self.log.info("PRNG seed is: {}".format(seed))
 291  
 292          self.log.debug('Setting up network thread')
 293          self.network_thread = NetworkThread()
 294          self.network_thread.start()
 295  
 296          if self.options.usecli:
 297              if not self.supports_cli:
 298                  raise SkipTest("--usecli specified but test does not support using CLI")
 299              self.skip_if_no_cli()
 300          self.skip_test_if_missing_module()
 301          self.setup_chain()
 302          self.setup_network()
 303  
 304          self.success = TestStatus.PASSED
 305  
 306      def shutdown(self):
 307          """Call this method to shut down the test framework object."""
 308  
 309          if self.success == TestStatus.FAILED and self.options.pdbonfailure:
 310              print("Testcase failed. Attaching python debugger. Enter ? for help")
 311              pdb.set_trace()
 312  
 313          self.log.debug('Closing down network thread')
 314          self.network_thread.close()
 315          if not self.options.noshutdown:
 316              self.log.info("Stopping nodes")
 317              if self.nodes:
 318                  self.stop_nodes()
 319          else:
 320              for node in self.nodes:
 321                  node.cleanup_on_exit = False
 322              self.log.info("Note: bitcoinds were not stopped and may still be running")
 323  
 324          should_clean_up = (
 325              not self.options.nocleanup and
 326              not self.options.noshutdown and
 327              self.success != TestStatus.FAILED and
 328              not self.options.perf
 329          )
 330          if should_clean_up:
 331              self.log.info("Cleaning up {} on exit".format(self.options.tmpdir))
 332              cleanup_tree_on_exit = True
 333          elif self.options.perf:
 334              self.log.warning("Not cleaning up dir {} due to perf data".format(self.options.tmpdir))
 335              cleanup_tree_on_exit = False
 336          else:
 337              self.log.warning("Not cleaning up dir {}".format(self.options.tmpdir))
 338              cleanup_tree_on_exit = False
 339  
 340          if self.success == TestStatus.PASSED:
 341              self.log.info("Tests successful")
 342              exit_code = TEST_EXIT_PASSED
 343          elif self.success == TestStatus.SKIPPED:
 344              self.log.info("Test skipped")
 345              exit_code = TEST_EXIT_SKIPPED
 346          else:
 347              self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
 348              self.log.error("")
 349              self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
 350              self.log.error("")
 351              self.log.error("If this failure happened unexpectedly or intermittently, please file a bug and provide a link or upload of the combined log.")
 352              self.log.error(self.config['environment']['PACKAGE_BUGREPORT'])
 353              self.log.error("")
 354              exit_code = TEST_EXIT_FAILED
 355          # Logging.shutdown will not remove stream- and filehandlers, so we must
 356          # do it explicitly. Handlers are removed so the next test run can apply
 357          # different log handler settings.
 358          # See: https://docs.python.org/3/library/logging.html#logging.shutdown
 359          for h in list(self.log.handlers):
 360              h.flush()
 361              h.close()
 362              self.log.removeHandler(h)
 363          rpc_logger = logging.getLogger("BitcoinRPC")
 364          for h in list(rpc_logger.handlers):
 365              h.flush()
 366              rpc_logger.removeHandler(h)
 367          if cleanup_tree_on_exit:
 368              shutil.rmtree(self.options.tmpdir)
 369  
 370          self.nodes.clear()
 371          return exit_code
 372  
 373      # Methods to override in subclass test scripts.
 374      def set_test_params(self):
 375          """Tests must override this method to change default values for number of nodes, topology, etc"""
 376          raise NotImplementedError
 377  
 378      def add_options(self, parser):
 379          """Override this method to add command-line options to the test"""
 380          pass
 381  
 382      def skip_test_if_missing_module(self):
 383          """Override this method to skip a test if a module is not compiled"""
 384          pass
 385  
 386      def setup_chain(self):
 387          """Override this method to customize blockchain setup"""
 388          self.log.info("Initializing test directory " + self.options.tmpdir)
 389          if self.setup_clean_chain:
 390              self._initialize_chain_clean()
 391          else:
 392              self._initialize_chain()
 393  
 394      def setup_network(self):
 395          """Override this method to customize test network topology"""
 396          self.setup_nodes()
 397  
 398          # Connect the nodes as a "chain".  This allows us
 399          # to split the network between nodes 1 and 2 to get
 400          # two halves that can work on competing chains.
 401          #
 402          # Topology looks like this:
 403          # node0 <-- node1 <-- node2 <-- node3
 404          #
 405          # If all nodes are in IBD (clean chain from genesis), node0 is assumed to be the source of blocks (miner). To
 406          # ensure block propagation, all nodes will establish outgoing connections toward node0.
 407          # See fPreferredDownload in net_processing.
 408          #
 409          # If further outbound connections are needed, they can be added at the beginning of the test with e.g.
 410          # self.connect_nodes(1, 2)
 411          for i in range(self.num_nodes - 1):
 412              self.connect_nodes(i + 1, i)
 413          self.sync_all()
 414  
 415      def setup_nodes(self):
 416          """Override this method to customize test node setup"""
 417          self.add_nodes(self.num_nodes, self.extra_args)
 418          self.start_nodes()
 419          if self._requires_wallet:
 420              self.import_deterministic_coinbase_privkeys()
 421          if not self.setup_clean_chain:
 422              for n in self.nodes:
 423                  assert_equal(n.getblockchaininfo()["blocks"], 199)
 424              # To ensure that all nodes are out of IBD, the most recent block
 425              # must have a timestamp not too old (see IsInitialBlockDownload()).
 426              self.log.debug('Generate a block with current time')
 427              block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0]
 428              block = self.nodes[0].getblock(blockhash=block_hash, verbosity=0)
 429              for n in self.nodes:
 430                  n.submitblock(block)
 431                  chain_info = n.getblockchaininfo()
 432                  assert_equal(chain_info["blocks"], 200)
 433                  assert_equal(chain_info["initialblockdownload"], False)
 434  
 435      def import_deterministic_coinbase_privkeys(self):
 436          for i in range(self.num_nodes):
 437              self.init_wallet(node=i)
 438  
 439      def init_wallet(self, *, node):
 440          wallet_name = self.default_wallet_name if self.wallet_names is None else self.wallet_names[node] if node < len(self.wallet_names) else False
 441          if wallet_name is not False:
 442              n = self.nodes[node]
 443              if wallet_name is not None:
 444                  n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
 445              n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase', rescan=True)
 446  
 447      def run_test(self):
 448          """Tests must override this method to define test logic"""
 449          raise NotImplementedError
 450  
 451      # Public helper methods. These can be accessed by the subclass test scripts.
 452  
 453      def add_wallet_options(self, parser, *, descriptors=True, legacy=True):
 454          kwargs = {}
 455          if descriptors + legacy == 1:
 456              # If only one type can be chosen, set it as default
 457              kwargs["default"] = descriptors
 458          group = parser.add_mutually_exclusive_group(
 459              # If only one type is allowed, require it to be set in test_runner.py
 460              required=os.getenv("REQUIRE_WALLET_TYPE_SET") == "1" and "default" in kwargs)
 461          if descriptors:
 462              group.add_argument("--descriptors", action='store_const', const=True, **kwargs,
 463                                 help="Run test using a descriptor wallet", dest='descriptors')
 464          if legacy:
 465              group.add_argument("--legacy-wallet", action='store_const', const=False, **kwargs,
 466                                 help="Run test using legacy wallets", dest='descriptors')
 467  
 468      def add_nodes(self, num_nodes: int, extra_args=None, *, rpchost=None, binary=None, binary_cli=None, versions=None):
 469          """Instantiate TestNode objects.
 470  
 471          Should only be called once after the nodes have been specified in
 472          set_test_params()."""
 473          def get_bin_from_version(version, bin_name, bin_default):
 474              if not version:
 475                  return bin_default
 476              if version > 219999:
 477                  # Starting at client version 220000 the first two digits represent
 478                  # the major version, e.g. v22.0 instead of v0.22.0.
 479                  version *= 100
 480              return os.path.join(
 481                  self.options.previous_releases_path,
 482                  re.sub(
 483                      r'\.0$' if version <= 219999 else r'(\.0){1,2}$',
 484                      '', # Remove trailing dot for point releases, after 22.0 also remove double trailing dot.
 485                      'v{}.{}.{}.{}'.format(
 486                          (version % 100000000) // 1000000,
 487                          (version % 1000000) // 10000,
 488                          (version % 10000) // 100,
 489                          (version % 100) // 1,
 490                      ),
 491                  ),
 492                  'bin',
 493                  bin_name,
 494              )
 495  
 496          if self.bind_to_localhost_only:
 497              extra_confs = [["bind=127.0.0.1"]] * num_nodes
 498          else:
 499              extra_confs = [[]] * num_nodes
 500          if extra_args is None:
 501              extra_args = [[]] * num_nodes
 502          # Whitelist peers to speed up tx relay / mempool sync. Don't use it if testing tx relay or timing.
 503          if self.noban_tx_relay:
 504              for i in range(len(extra_args)):
 505                  extra_args[i] = extra_args[i] + ["-whitelist=noban,in,out@127.0.0.1"]
 506          if versions is None:
 507              versions = [None] * num_nodes
 508          if binary is None:
 509              binary = [get_bin_from_version(v, 'bitcoind', self.options.bitcoind) for v in versions]
 510          if binary_cli is None:
 511              binary_cli = [get_bin_from_version(v, 'bitcoin-cli', self.options.bitcoincli) for v in versions]
 512          assert_equal(len(extra_confs), num_nodes)
 513          assert_equal(len(extra_args), num_nodes)
 514          assert_equal(len(versions), num_nodes)
 515          assert_equal(len(binary), num_nodes)
 516          assert_equal(len(binary_cli), num_nodes)
 517          for i in range(num_nodes):
 518              args = list(extra_args[i])
 519              test_node_i = TestNode(
 520                  i,
 521                  get_datadir_path(self.options.tmpdir, i),
 522                  chain=self.chain,
 523                  rpchost=rpchost,
 524                  timewait=self.rpc_timeout,
 525                  timeout_factor=self.options.timeout_factor,
 526                  bitcoind=binary[i],
 527                  bitcoin_cli=binary_cli[i],
 528                  version=versions[i],
 529                  coverage_dir=self.options.coveragedir,
 530                  cwd=self.options.tmpdir,
 531                  extra_conf=extra_confs[i],
 532                  extra_args=args,
 533                  use_cli=self.options.usecli,
 534                  start_perf=self.options.perf,
 535                  use_valgrind=self.options.valgrind,
 536                  descriptors=self.options.descriptors,
 537                  v2transport=self.options.v2transport,
 538              )
 539              self.nodes.append(test_node_i)
 540              if not test_node_i.version_is_at_least(170000):
 541                  # adjust conf for pre 17
 542                  test_node_i.replace_in_config([('[regtest]', '')])
 543  
 544      def start_node(self, i, *args, **kwargs):
 545          """Start a bitcoind"""
 546  
 547          node = self.nodes[i]
 548  
 549          node.start(*args, **kwargs)
 550          node.wait_for_rpc_connection()
 551  
 552          if self.options.coveragedir is not None:
 553              coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
 554  
 555      def start_nodes(self, extra_args=None, *args, **kwargs):
 556          """Start multiple bitcoinds"""
 557  
 558          if extra_args is None:
 559              extra_args = [None] * self.num_nodes
 560          assert_equal(len(extra_args), self.num_nodes)
 561          try:
 562              for i, node in enumerate(self.nodes):
 563                  node.start(extra_args[i], *args, **kwargs)
 564              for node in self.nodes:
 565                  node.wait_for_rpc_connection()
 566          except Exception:
 567              # If one node failed to start, stop the others
 568              self.stop_nodes()
 569              raise
 570  
 571          if self.options.coveragedir is not None:
 572              for node in self.nodes:
 573                  coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
 574  
 575      def stop_node(self, i, expected_stderr='', wait=0):
 576          """Stop a bitcoind test node"""
 577          self.nodes[i].stop_node(expected_stderr, wait=wait)
 578  
 579      def stop_nodes(self, wait=0):
 580          """Stop multiple bitcoind test nodes"""
 581          for node in self.nodes:
 582              # Issue RPC to stop nodes
 583              node.stop_node(wait=wait, wait_until_stopped=False)
 584  
 585          for node in self.nodes:
 586              # Wait for nodes to stop
 587              node.wait_until_stopped()
 588  
 589      def restart_node(self, i, extra_args=None, clear_addrman=False):
 590          """Stop and start a test node"""
 591          self.stop_node(i)
 592          if clear_addrman:
 593              peers_dat = self.nodes[i].chain_path / "peers.dat"
 594              os.remove(peers_dat)
 595              with self.nodes[i].assert_debug_log(expected_msgs=[f'Creating peers.dat because the file was not found ("{peers_dat}")']):
 596                  self.start_node(i, extra_args)
 597          else:
 598              self.start_node(i, extra_args)
 599  
 600      def wait_for_node_exit(self, i, timeout):
 601          self.nodes[i].process.wait(timeout)
 602  
 603      def connect_nodes(self, a, b, *, peer_advertises_v2=None, wait_for_connect: bool = True):
 604          """
 605          Kwargs:
 606              wait_for_connect: if True, block until the nodes are verified as connected. You might
 607                  want to disable this when using -stopatheight with one of the connected nodes,
 608                  since there will be a race between the actual connection and performing
 609                  the assertions before one node shuts down.
 610          """
 611          from_connection = self.nodes[a]
 612          to_connection = self.nodes[b]
 613          from_num_peers = 1 + len(from_connection.getpeerinfo())
 614          to_num_peers = 1 + len(to_connection.getpeerinfo())
 615          ip_port = "127.0.0.1:" + str(p2p_port(b))
 616  
 617          if peer_advertises_v2 is None:
 618              peer_advertises_v2 = from_connection.use_v2transport
 619  
 620          if peer_advertises_v2 != from_connection.use_v2transport:
 621              from_connection.addnode(node=ip_port, command="onetry", v2transport=peer_advertises_v2)
 622          else:
 623              # skip the optional third argument if it matches the default, for
 624              # compatibility with older clients
 625              from_connection.addnode(ip_port, "onetry")
 626  
 627          if not wait_for_connect:
 628              return
 629  
 630          # poll until version handshake complete to avoid race conditions
 631          # with transaction relaying
 632          # See comments in net_processing:
 633          # * Must have a version message before anything else
 634          # * Must have a verack message before anything else
 635          self.wait_until(lambda: sum(peer['version'] != 0 for peer in from_connection.getpeerinfo()) == from_num_peers)
 636          self.wait_until(lambda: sum(peer['version'] != 0 for peer in to_connection.getpeerinfo()) == to_num_peers)
 637          self.wait_until(lambda: sum(peer['bytesrecv_per_msg'].pop('verack', 0) >= 21 for peer in from_connection.getpeerinfo()) == from_num_peers)
 638          self.wait_until(lambda: sum(peer['bytesrecv_per_msg'].pop('verack', 0) >= 21 for peer in to_connection.getpeerinfo()) == to_num_peers)
 639          # The message bytes are counted before processing the message, so make
 640          # sure it was fully processed by waiting for a ping.
 641          self.wait_until(lambda: sum(peer["bytesrecv_per_msg"].pop("pong", 0) >= 29 for peer in from_connection.getpeerinfo()) == from_num_peers)
 642          self.wait_until(lambda: sum(peer["bytesrecv_per_msg"].pop("pong", 0) >= 29 for peer in to_connection.getpeerinfo()) == to_num_peers)
 643  
 644      def disconnect_nodes(self, a, b):
 645          def disconnect_nodes_helper(node_a, node_b):
 646              def get_peer_ids(from_connection, node_num):
 647                  result = []
 648                  for peer in from_connection.getpeerinfo():
 649                      if "testnode{}".format(node_num) in peer['subver']:
 650                          result.append(peer['id'])
 651                  return result
 652  
 653              peer_ids = get_peer_ids(node_a, node_b.index)
 654              if not peer_ids:
 655                  self.log.warning("disconnect_nodes: {} and {} were not connected".format(
 656                      node_a.index,
 657                      node_b.index,
 658                  ))
 659                  return
 660              for peer_id in peer_ids:
 661                  try:
 662                      node_a.disconnectnode(nodeid=peer_id)
 663                  except JSONRPCException as e:
 664                      # If this node is disconnected between calculating the peer id
 665                      # and issuing the disconnect, don't worry about it.
 666                      # This avoids a race condition if we're mass-disconnecting peers.
 667                      if e.error['code'] != -29:  # RPC_CLIENT_NODE_NOT_CONNECTED
 668                          raise
 669  
 670              # wait to disconnect
 671              self.wait_until(lambda: not get_peer_ids(node_a, node_b.index), timeout=5)
 672              self.wait_until(lambda: not get_peer_ids(node_b, node_a.index), timeout=5)
 673  
 674          disconnect_nodes_helper(self.nodes[a], self.nodes[b])
 675  
 676      def split_network(self):
 677          """
 678          Split the network of four nodes into nodes 0/1 and 2/3.
 679          """
 680          self.disconnect_nodes(1, 2)
 681          self.sync_all(self.nodes[:2])
 682          self.sync_all(self.nodes[2:])
 683  
 684      def join_network(self):
 685          """
 686          Join the (previously split) network halves together.
 687          """
 688          self.connect_nodes(1, 2)
 689          self.sync_all()
 690  
 691      def no_op(self):
 692          pass
 693  
 694      def generate(self, generator, *args, sync_fun=None, **kwargs):
 695          blocks = generator.generate(*args, invalid_call=False, **kwargs)
 696          sync_fun() if sync_fun else self.sync_all()
 697          return blocks
 698  
 699      def generateblock(self, generator, *args, sync_fun=None, **kwargs):
 700          blocks = generator.generateblock(*args, invalid_call=False, **kwargs)
 701          sync_fun() if sync_fun else self.sync_all()
 702          return blocks
 703  
 704      def generatetoaddress(self, generator, *args, sync_fun=None, **kwargs):
 705          blocks = generator.generatetoaddress(*args, invalid_call=False, **kwargs)
 706          sync_fun() if sync_fun else self.sync_all()
 707          return blocks
 708  
 709      def generatetodescriptor(self, generator, *args, sync_fun=None, **kwargs):
 710          blocks = generator.generatetodescriptor(*args, invalid_call=False, **kwargs)
 711          sync_fun() if sync_fun else self.sync_all()
 712          return blocks
 713  
 714      def create_outpoints(self, node, *, outputs):
 715          """Send funds to a given list of `{address: amount}` targets using the bitcoind
 716          wallet and return the corresponding outpoints as a list of dictionaries
 717          `[{"txid": txid, "vout": vout1}, {"txid": txid, "vout": vout2}, ...]`.
 718          The result can be used to specify inputs for RPCs like `createrawtransaction`,
 719          `createpsbt`, `lockunspent` etc."""
 720          assert all(len(output.keys()) == 1 for output in outputs)
 721          send_res = node.send(outputs)
 722          assert send_res["complete"]
 723          utxos = []
 724          for output in outputs:
 725              address = list(output.keys())[0]
 726              vout = find_vout_for_address(node, send_res["txid"], address)
 727              utxos.append({"txid": send_res["txid"], "vout": vout})
 728          return utxos
 729  
 730      def sync_blocks(self, nodes=None, wait=1, timeout=60):
 731          """
 732          Wait until everybody has the same tip.
 733          sync_blocks needs to be called with an rpc_connections set that has least
 734          one node already synced to the latest, stable tip, otherwise there's a
 735          chance it might return before all nodes are stably synced.
 736          """
 737          rpc_connections = nodes or self.nodes
 738          timeout = int(timeout * self.options.timeout_factor)
 739          stop_time = time.time() + timeout
 740          while time.time() <= stop_time:
 741              best_hash = [x.getbestblockhash() for x in rpc_connections]
 742              if best_hash.count(best_hash[0]) == len(rpc_connections):
 743                  return
 744              # Check that each peer has at least one connection
 745              assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
 746              time.sleep(wait)
 747          raise AssertionError("Block sync timed out after {}s:{}".format(
 748              timeout,
 749              "".join("\n  {!r}".format(b) for b in best_hash),
 750          ))
 751  
 752      def sync_mempools(self, nodes=None, wait=1, timeout=60, flush_scheduler=True):
 753          """
 754          Wait until everybody has the same transactions in their memory
 755          pools
 756          """
 757          rpc_connections = nodes or self.nodes
 758          timeout = int(timeout * self.options.timeout_factor)
 759          stop_time = time.time() + timeout
 760          while time.time() <= stop_time:
 761              pool = [set(r.getrawmempool()) for r in rpc_connections]
 762              if pool.count(pool[0]) == len(rpc_connections):
 763                  if flush_scheduler:
 764                      for r in rpc_connections:
 765                          r.syncwithvalidationinterfacequeue()
 766                  return
 767              # Check that each peer has at least one connection
 768              assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
 769              time.sleep(wait)
 770          raise AssertionError("Mempool sync timed out after {}s:{}".format(
 771              timeout,
 772              "".join("\n  {!r}".format(m) for m in pool),
 773          ))
 774  
 775      def sync_all(self, nodes=None):
 776          self.sync_blocks(nodes)
 777          self.sync_mempools(nodes)
 778  
 779      def wait_until(self, test_function, timeout=60):
 780          return wait_until_helper_internal(test_function, timeout=timeout, timeout_factor=self.options.timeout_factor)
 781  
 782      # Private helper methods. These should not be accessed by the subclass test scripts.
 783  
 784      def _start_logging(self):
 785          # Add logger and logging handlers
 786          self.log = logging.getLogger('TestFramework')
 787          self.log.setLevel(logging.DEBUG)
 788          # Create file handler to log all messages
 789          fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log', encoding='utf-8')
 790          fh.setLevel(logging.DEBUG)
 791          # Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
 792          ch = logging.StreamHandler(sys.stdout)
 793          # User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
 794          ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
 795          ch.setLevel(ll)
 796          # Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted)
 797          formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')
 798          formatter.converter = time.gmtime
 799          fh.setFormatter(formatter)
 800          ch.setFormatter(formatter)
 801          # add the handlers to the logger
 802          self.log.addHandler(fh)
 803          self.log.addHandler(ch)
 804  
 805          if self.options.trace_rpc:
 806              rpc_logger = logging.getLogger("BitcoinRPC")
 807              rpc_logger.setLevel(logging.DEBUG)
 808              rpc_handler = logging.StreamHandler(sys.stdout)
 809              rpc_handler.setLevel(logging.DEBUG)
 810              rpc_logger.addHandler(rpc_handler)
 811  
 812      def _initialize_chain(self):
 813          """Initialize a pre-mined blockchain for use by the test.
 814  
 815          Create a cache of a 199-block-long chain
 816          Afterward, create num_nodes copies from the cache."""
 817  
 818          CACHE_NODE_ID = 0  # Use node 0 to create the cache for all other nodes
 819          cache_node_dir = get_datadir_path(self.options.cachedir, CACHE_NODE_ID)
 820          assert self.num_nodes <= MAX_NODES
 821  
 822          if not os.path.isdir(cache_node_dir):
 823              self.log.debug("Creating cache directory {}".format(cache_node_dir))
 824  
 825              initialize_datadir(self.options.cachedir, CACHE_NODE_ID, self.chain, self.disable_autoconnect)
 826              self.nodes.append(
 827                  TestNode(
 828                      CACHE_NODE_ID,
 829                      cache_node_dir,
 830                      chain=self.chain,
 831                      extra_conf=["bind=127.0.0.1"],
 832                      extra_args=['-disablewallet'],
 833                      rpchost=None,
 834                      timewait=self.rpc_timeout,
 835                      timeout_factor=self.options.timeout_factor,
 836                      bitcoind=self.options.bitcoind,
 837                      bitcoin_cli=self.options.bitcoincli,
 838                      coverage_dir=None,
 839                      cwd=self.options.tmpdir,
 840                      descriptors=self.options.descriptors,
 841                  ))
 842              self.start_node(CACHE_NODE_ID)
 843              cache_node = self.nodes[CACHE_NODE_ID]
 844  
 845              # Wait for RPC connections to be ready
 846              cache_node.wait_for_rpc_connection()
 847  
 848              # Set a time in the past, so that blocks don't end up in the future
 849              cache_node.setmocktime(cache_node.getblockheader(cache_node.getbestblockhash())['time'])
 850  
 851              # Create a 199-block-long chain; each of the 3 first nodes
 852              # gets 25 mature blocks and 25 immature.
 853              # The 4th address gets 25 mature and only 24 immature blocks so that the very last
 854              # block in the cache does not age too much (have an old tip age).
 855              # This is needed so that we are out of IBD when the test starts,
 856              # see the tip age check in IsInitialBlockDownload().
 857              gen_addresses = [k.address for k in TestNode.PRIV_KEYS][:3] + [create_deterministic_address_bcrt1_p2tr_op_true()[0]]
 858              assert_equal(len(gen_addresses), 4)
 859              for i in range(8):
 860                  self.generatetoaddress(
 861                      cache_node,
 862                      nblocks=25 if i != 7 else 24,
 863                      address=gen_addresses[i % len(gen_addresses)],
 864                  )
 865  
 866              assert_equal(cache_node.getblockchaininfo()["blocks"], 199)
 867  
 868              # Shut it down, and clean up cache directories:
 869              self.stop_nodes()
 870              self.nodes = []
 871  
 872              def cache_path(*paths):
 873                  return os.path.join(cache_node_dir, self.chain, *paths)
 874  
 875              os.rmdir(cache_path('wallets'))  # Remove empty wallets dir
 876              for entry in os.listdir(cache_path()):
 877                  if entry not in ['chainstate', 'blocks', 'indexes']:  # Only indexes, chainstate and blocks folders
 878                      os.remove(cache_path(entry))
 879  
 880          for i in range(self.num_nodes):
 881              self.log.debug("Copy cache directory {} to node {}".format(cache_node_dir, i))
 882              to_dir = get_datadir_path(self.options.tmpdir, i)
 883              shutil.copytree(cache_node_dir, to_dir)
 884              initialize_datadir(self.options.tmpdir, i, self.chain, self.disable_autoconnect)  # Overwrite port/rpcport in bitcoin.conf
 885  
 886      def _initialize_chain_clean(self):
 887          """Initialize empty blockchain for use by the test.
 888  
 889          Create an empty blockchain and num_nodes wallets.
 890          Useful if a test case wants complete control over initialization."""
 891          for i in range(self.num_nodes):
 892              initialize_datadir(self.options.tmpdir, i, self.chain, self.disable_autoconnect)
 893  
 894      def skip_if_no_py3_zmq(self):
 895          """Attempt to import the zmq package and skip the test if the import fails."""
 896          try:
 897              import zmq  # noqa
 898          except ImportError:
 899              raise SkipTest("python3-zmq module not available.")
 900  
 901      def skip_if_no_py_sqlite3(self):
 902          """Attempt to import the sqlite3 package and skip the test if the import fails."""
 903          try:
 904              import sqlite3  # noqa
 905          except ImportError:
 906              raise SkipTest("sqlite3 module not available.")
 907  
 908      def skip_if_no_python_bcc(self):
 909          """Attempt to import the bcc package and skip the tests if the import fails."""
 910          try:
 911              import bcc  # type: ignore[import] # noqa: F401
 912          except ImportError:
 913              raise SkipTest("bcc python module not available")
 914  
 915      def skip_if_no_bitcoind_tracepoints(self):
 916          """Skip the running test if bitcoind has not been compiled with USDT tracepoint support."""
 917          if not self.is_usdt_compiled():
 918              raise SkipTest("bitcoind has not been built with USDT tracepoints enabled.")
 919  
 920      def skip_if_no_bpf_permissions(self):
 921          """Skip the running test if we don't have permissions to do BPF syscalls and load BPF maps."""
 922          # check for 'root' permissions
 923          if os.geteuid() != 0:
 924              raise SkipTest("no permissions to use BPF (please review the tests carefully before running them with higher privileges)")
 925  
 926      def skip_if_platform_not_linux(self):
 927          """Skip the running test if we are not on a Linux platform"""
 928          if platform.system() != "Linux":
 929              raise SkipTest("not on a Linux system")
 930  
 931      def skip_if_platform_not_posix(self):
 932          """Skip the running test if we are not on a POSIX platform"""
 933          if os.name != 'posix':
 934              raise SkipTest("not on a POSIX system")
 935  
 936      def skip_if_no_bitcoind_zmq(self):
 937          """Skip the running test if bitcoind has not been compiled with zmq support."""
 938          if not self.is_zmq_compiled():
 939              raise SkipTest("bitcoind has not been built with zmq enabled.")
 940  
 941      def skip_if_no_wallet(self):
 942          """Skip the running test if wallet has not been compiled."""
 943          self._requires_wallet = True
 944          if not self.is_wallet_compiled():
 945              raise SkipTest("wallet has not been compiled.")
 946          if self.options.descriptors:
 947              self.skip_if_no_sqlite()
 948          else:
 949              self.skip_if_no_bdb()
 950  
 951      def skip_if_no_sqlite(self):
 952          """Skip the running test if sqlite has not been compiled."""
 953          if not self.is_sqlite_compiled():
 954              raise SkipTest("sqlite has not been compiled.")
 955  
 956      def skip_if_no_bdb(self):
 957          """Skip the running test if BDB has not been compiled."""
 958          if not self.is_bdb_compiled():
 959              raise SkipTest("BDB has not been compiled.")
 960  
 961      def skip_if_no_wallet_tool(self):
 962          """Skip the running test if bitcoin-wallet has not been compiled."""
 963          if not self.is_wallet_tool_compiled():
 964              raise SkipTest("bitcoin-wallet has not been compiled")
 965  
 966      def skip_if_no_bitcoin_util(self):
 967          """Skip the running test if bitcoin-util has not been compiled."""
 968          if not self.is_bitcoin_util_compiled():
 969              raise SkipTest("bitcoin-util has not been compiled")
 970  
 971      def skip_if_no_cli(self):
 972          """Skip the running test if bitcoin-cli has not been compiled."""
 973          if not self.is_cli_compiled():
 974              raise SkipTest("bitcoin-cli has not been compiled.")
 975  
 976      def skip_if_no_previous_releases(self):
 977          """Skip the running test if previous releases are not available."""
 978          if not self.has_previous_releases():
 979              raise SkipTest("previous releases not available or disabled")
 980  
 981      def has_previous_releases(self):
 982          """Checks whether previous releases are present and enabled."""
 983          if not os.path.isdir(self.options.previous_releases_path):
 984              if self.options.prev_releases:
 985                  raise AssertionError("Force test of previous releases but releases missing: {}".format(
 986                      self.options.previous_releases_path))
 987          return self.options.prev_releases
 988  
 989      def skip_if_no_external_signer(self):
 990          """Skip the running test if external signer support has not been compiled."""
 991          if not self.is_external_signer_compiled():
 992              raise SkipTest("external signer support has not been compiled.")
 993  
 994      def is_cli_compiled(self):
 995          """Checks whether bitcoin-cli was compiled."""
 996          return self.config["components"].getboolean("ENABLE_CLI")
 997  
 998      def is_external_signer_compiled(self):
 999          """Checks whether external signer support was compiled."""
1000          return self.config["components"].getboolean("ENABLE_EXTERNAL_SIGNER")
1001  
1002      def is_wallet_compiled(self):
1003          """Checks whether the wallet module was compiled."""
1004          return self.config["components"].getboolean("ENABLE_WALLET")
1005  
1006      def is_specified_wallet_compiled(self):
1007          """Checks whether wallet support for the specified type
1008             (legacy or descriptor wallet) was compiled."""
1009          if self.options.descriptors:
1010              return self.is_sqlite_compiled()
1011          else:
1012              return self.is_bdb_compiled()
1013  
1014      def is_wallet_tool_compiled(self):
1015          """Checks whether bitcoin-wallet was compiled."""
1016          return self.config["components"].getboolean("ENABLE_WALLET_TOOL")
1017  
1018      def is_bitcoin_util_compiled(self):
1019          """Checks whether bitcoin-util was compiled."""
1020          return self.config["components"].getboolean("ENABLE_BITCOIN_UTIL")
1021  
1022      def is_zmq_compiled(self):
1023          """Checks whether the zmq module was compiled."""
1024          return self.config["components"].getboolean("ENABLE_ZMQ")
1025  
1026      def is_usdt_compiled(self):
1027          """Checks whether the USDT tracepoints were compiled."""
1028          return self.config["components"].getboolean("ENABLE_USDT_TRACEPOINTS")
1029  
1030      def is_sqlite_compiled(self):
1031          """Checks whether the wallet module was compiled with Sqlite support."""
1032          return self.config["components"].getboolean("USE_SQLITE")
1033  
1034      def is_bdb_compiled(self):
1035          """Checks whether the wallet module was compiled with BDB support."""
1036          return self.config["components"].getboolean("USE_BDB")
1037  
1038      def has_blockfile(self, node, filenum: str):
1039          return (node.blocks_path/ f"blk{filenum}.dat").is_file()