/ contrib / verify-binaries / verify.py
verify.py
  1  #!/usr/bin/env python3
  2  # Copyright (c) 2020-present The Bitcoin Core developers
  3  # Distributed under the MIT software license, see the accompanying
  4  # file COPYING or http://www.opensource.org/licenses/mit-license.php.
  5  """Script for verifying Bitcoin Core release binaries.
  6  
  7  This script attempts to download the sum file SHA256SUMS and corresponding
  8  signature file SHA256SUMS.asc from bitcoincore.org and bitcoin.org and
  9  compares them.
 10  
 11  The sum-signature file is signed by a number of builder keys. This script
 12  ensures that there is a minimum threshold of signatures from pubkeys that
 13  we trust. This trust is articulated on the basis of configuration options
 14  here, but by default is based upon local GPG trust settings.
 15  
 16  The builder keys are available in the guix.sigs repo:
 17  
 18      https://github.com/bitcoin-core/guix.sigs/tree/main/builder-keys
 19  
 20  If a minimum good, trusted signature threshold is met on the sum file, we then
 21  download the files specified in SHA256SUMS, and check if the hashes of these
 22  files match those that are specified. The script returns 0 if everything passes
 23  the checks. It returns 1 if either the signature check or the hash check
 24  doesn't pass. If an error occurs the return value is >= 2.
 25  
 26  Logging output goes to stderr and final binary verification data goes to stdout.
 27  
 28  JSON output can by obtained by setting env BINVERIFY_JSON=1.
 29  """
 30  import argparse
 31  import difflib
 32  import json
 33  import logging
 34  import os
 35  import subprocess
 36  import typing as t
 37  import re
 38  import sys
 39  import shutil
 40  import tempfile
 41  import textwrap
 42  import enum
 43  from hashlib import sha256
 44  from pathlib import PurePath, Path
 45  
 46  # The primary host; this will fail if we can't retrieve files from here.
 47  HOST1 = "https://bitcoincore.org"
 48  HOST2 = "https://bitcoin.org"
 49  VERSIONPREFIX = "bitcoin-core-"
 50  SUMS_FILENAME = 'SHA256SUMS'
 51  SIGNATUREFILENAME = f"{SUMS_FILENAME}.asc"
 52  
 53  
 54  class ReturnCode(enum.IntEnum):
 55      SUCCESS = 0
 56      INTEGRITY_FAILURE = 1
 57      FILE_GET_FAILED = 4
 58      FILE_MISSING_FROM_ONE_HOST = 5
 59      FILES_NOT_EQUAL = 6
 60      NO_BINARIES_MATCH = 7
 61      NOT_ENOUGH_GOOD_SIGS = 9
 62      BINARY_DOWNLOAD_FAILED = 10
 63      BAD_VERSION = 11
 64  
 65  
 66  def set_up_logger(is_verbose: bool = True) -> logging.Logger:
 67      """Set up a logger that writes to stderr."""
 68      log = logging.getLogger(__name__)
 69      log.setLevel(logging.INFO if is_verbose else logging.WARNING)
 70      console = logging.StreamHandler(sys.stderr)  # log to stderr
 71      console.setLevel(logging.DEBUG)
 72      formatter = logging.Formatter('[%(levelname)s] %(message)s')
 73      console.setFormatter(formatter)
 74      log.addHandler(console)
 75      return log
 76  
 77  
 78  log = set_up_logger()
 79  
 80  
 81  def indent(output: str) -> str:
 82      return textwrap.indent(output, '  ')
 83  
 84  
 85  def bool_from_env(key, default=False) -> bool:
 86      if key not in os.environ:
 87          return default
 88      raw = os.environ[key]
 89  
 90      if raw.lower() in ('1', 'true'):
 91          return True
 92      elif raw.lower() in ('0', 'false'):
 93          return False
 94      raise ValueError(f"Unrecognized environment value {key}={raw!r}")
 95  
 96  
 97  VERSION_FORMAT = "<major>.<minor>[.<patch>][-rc[0-9]][-platform]"
 98  VERSION_EXAMPLE = "22.0 or 23.1-rc1-darwin.dmg or 27.0-x86_64-linux-gnu"
 99  
100  def parse_version_string(version_str):
101      # "<version>[-rcN][-platform]"
102      version_base, _, platform = version_str.partition('-')
103      rc = ""
104      if platform.startswith("rc"): # "<version>-rcN[-platform]"
105          rc, _, platform = platform.partition('-')
106      # else "<version>" or "<version>-platform"
107  
108      return version_base, rc, platform
109  
110  
111  def download_with_wget(remote_file, local_file):
112      result = subprocess.run(['wget', '-O', local_file, remote_file],
113                              stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
114      return result.returncode == 0, result.stdout.decode().rstrip()
115  
116  
117  def verify_with_gpg(
118      filename,
119      signature_filename,
120      output_filename: t.Optional[str] = None
121  ) -> tuple[int, str]:
122      with tempfile.NamedTemporaryFile() as status_file:
123          args = [
124              'gpg', '--yes', '--verify', '--verify-options', 'show-primary-uid-only', "--status-file", status_file.name,
125              '--output', output_filename if output_filename else '', signature_filename, filename]
126  
127          env = dict(os.environ, LANGUAGE='en')
128          result = subprocess.run(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env)
129  
130          gpg_data = status_file.read().decode().rstrip()
131  
132      log.debug(f'Result from GPG ({result.returncode}): {result.stdout.decode()}')
133      log.debug(f"{gpg_data}")
134      return result.returncode, gpg_data
135  
136  
137  class SigData:
138      """GPG signature data as parsed from GPG stdout."""
139      def __init__(self):
140          self.key = None
141          self.name = ""
142          self.trusted = False
143          self.status = ""
144  
145      def __bool__(self):
146          return self.key is not None
147  
148      def __repr__(self):
149          return (
150              "SigData(%r, %r, trusted=%s, status=%r)" %
151              (self.key, self.name, self.trusted, self.status))
152  
153  
154  def parse_gpg_result(
155      output: list[str]
156  ) -> tuple[list[SigData], list[SigData], list[SigData]]:
157      """Returns good, unknown, and bad signatures from GPG stdout."""
158      good_sigs: list[SigData] = []
159      unknown_sigs: list[SigData] = []
160      bad_sigs: list[SigData] = []
161      total_resolved_sigs = 0
162  
163      # Ensure that all lines we match on include a prefix that prevents malicious input
164      # from fooling the parser.
165      def line_begins_with(patt: str, line: str) -> t.Optional[re.Match]:
166          return re.match(r'^(\[GNUPG:\])\s+' + patt, line)
167  
168      curr_sigs = unknown_sigs
169      curr_sigdata = SigData()
170  
171      for line in output:
172          if line_begins_with(r"NEWSIG(?:\s|$)", line):
173              total_resolved_sigs += 1
174              if curr_sigdata:
175                  curr_sigs.append(curr_sigdata)
176                  curr_sigdata = SigData()
177              newsig_split = line.split()
178              if len(newsig_split) == 3:
179                  curr_sigdata.name = newsig_split[2]
180  
181          elif line_begins_with(r"GOODSIG(?:\s|$)", line):
182              curr_sigdata.key, curr_sigdata.name = line.split(maxsplit=3)[2:4]
183              curr_sigs = good_sigs
184  
185          elif line_begins_with(r"EXPKEYSIG(?:\s|$)", line):
186              curr_sigdata.key, curr_sigdata.name = line.split(maxsplit=3)[2:4]
187              curr_sigs = good_sigs
188              curr_sigdata.status = "expired"
189  
190          elif line_begins_with(r"REVKEYSIG(?:\s|$)", line):
191              curr_sigdata.key, curr_sigdata.name = line.split(maxsplit=3)[2:4]
192              curr_sigs = good_sigs
193              curr_sigdata.status = "revoked"
194  
195          elif line_begins_with(r"BADSIG(?:\s|$)", line):
196              curr_sigdata.key, curr_sigdata.name = line.split(maxsplit=3)[2:4]
197              curr_sigs = bad_sigs
198  
199          elif line_begins_with(r"ERRSIG(?:\s|$)", line):
200              curr_sigdata.key, _, _, _, _, _ = line.split()[2:8]
201              curr_sigs = unknown_sigs
202  
203          elif line_begins_with(r"TRUST_(UNDEFINED|NEVER)(?:\s|$)", line):
204              curr_sigdata.trusted = False
205  
206          elif line_begins_with(r"TRUST_(MARGINAL|FULLY|ULTIMATE)(?:\s|$)", line):
207              curr_sigdata.trusted = True
208  
209      # The last one won't have been added, so add it now
210      assert curr_sigdata
211      curr_sigs.append(curr_sigdata)
212  
213      all_found = len(good_sigs + bad_sigs + unknown_sigs)
214      if all_found != total_resolved_sigs:
215          raise RuntimeError(
216              f"failed to evaluate all signatures: found {all_found} "
217              f"but expected {total_resolved_sigs}")
218  
219      return (good_sigs, unknown_sigs, bad_sigs)
220  
221  
222  def files_are_equal(filename1, filename2):
223      with open(filename1, 'rb') as file1:
224          contents1 = file1.read()
225      with open(filename2, 'rb') as file2:
226          contents2 = file2.read()
227      eq = contents1 == contents2
228  
229      if not eq:
230          with open(filename1, 'r') as f1, \
231                  open(filename2, 'r') as f2:
232              f1lines = f1.readlines()
233              f2lines = f2.readlines()
234  
235              diff = indent(
236                  ''.join(difflib.unified_diff(f1lines, f2lines)))
237              log.warning(f"found diff in files ({filename1}, {filename2}):\n{diff}\n")
238  
239      return eq
240  
241  
242  def get_files_from_hosts_and_compare(
243      hosts: list[str], path: str, filename: str, require_all: bool = False
244  ) -> ReturnCode:
245      """
246      Retrieve the same file from a number of hosts and ensure they have the same contents.
247      The first host given will be treated as the "primary" host, and is required to succeed.
248  
249      Args:
250          filename: for writing the file locally.
251      """
252      assert len(hosts) > 1
253      primary_host = hosts[0]
254      other_hosts = hosts[1:]
255      got_files = []
256  
257      def join_url(host: str) -> str:
258          return host.rstrip('/') + '/' + path.lstrip('/')
259  
260      url = join_url(primary_host)
261      success, output = download_with_wget(url, filename)
262      if not success:
263          log.error(
264              f"couldn't fetch file ({url}). "
265              "Have you specified the version number in the following format?\n"
266              f"{VERSION_FORMAT} "
267              f"(example: {VERSION_EXAMPLE})\n"
268              f"wget output:\n{indent(output)}")
269          return ReturnCode.FILE_GET_FAILED
270      else:
271          log.info(f"got file {url} as {filename}")
272          got_files.append(filename)
273  
274      for i, host in enumerate(other_hosts):
275          url = join_url(host)
276          fname = filename + f'.{i + 2}'
277          success, output = download_with_wget(url, fname)
278  
279          if require_all and not success:
280              log.error(
281                  f"{host} failed to provide file ({url}), but {primary_host} did?\n"
282                  f"wget output:\n{indent(output)}")
283              return ReturnCode.FILE_MISSING_FROM_ONE_HOST
284          elif not success:
285              log.warning(
286                  f"{host} failed to provide file ({url}). "
287                  f"Continuing based solely upon {primary_host}.")
288          else:
289              log.info(f"got file {url} as {fname}")
290              got_files.append(fname)
291  
292      for i, got_file in enumerate(got_files):
293          if got_file == got_files[-1]:
294              break  # break on last file, nothing after it to compare to
295  
296          compare_to = got_files[i + 1]
297          if not files_are_equal(got_file, compare_to):
298              log.error(f"files not equal: {got_file} and {compare_to}")
299              return ReturnCode.FILES_NOT_EQUAL
300  
301      return ReturnCode.SUCCESS
302  
303  
304  def check_multisig(sums_file: str, sigfilename: str, args: argparse.Namespace) -> tuple[int, str, list[SigData], list[SigData], list[SigData]]:
305      # check signature
306      #
307      # We don't write output to a file because this command will almost certainly
308      # fail with GPG exit code '2' (and so not writing to --output) because of the
309      # likely presence of multiple untrusted signatures.
310      retval, output = verify_with_gpg(sums_file, sigfilename)
311  
312      if args.verbose:
313          log.info(f"gpg output:\n{indent(output)}")
314  
315      good, unknown, bad = parse_gpg_result(output.splitlines())
316  
317      if unknown and args.import_keys:
318          # Retrieve unknown keys and then try GPG again.
319          for unsig in unknown:
320              if prompt_yn(f" ? Retrieve key {unsig.key} ({unsig.name})? (y/N) "):
321                  ran = subprocess.run(
322                      ["gpg", "--keyserver", args.keyserver, "--recv-keys", unsig.key])
323  
324                  if ran.returncode != 0:
325                      log.warning(f"failed to retrieve key {unsig.key}")
326  
327          # Reparse the GPG output now that we have more keys
328          retval, output = verify_with_gpg(sums_file, sigfilename)
329          good, unknown, bad = parse_gpg_result(output.splitlines())
330  
331      return retval, output, good, unknown, bad
332  
333  
334  def prompt_yn(prompt) -> bool:
335      """Return true if the user inputs 'y'."""
336      got = ''
337      while got not in ['y', 'n']:
338          got = input(prompt).lower()
339      return got == 'y'
340  
341  def verify_shasums_signature(
342      signature_file_path: str, sums_file_path: str, args: argparse.Namespace
343  ) -> tuple[
344     ReturnCode, list[SigData], list[SigData], list[SigData], list[SigData]
345  ]:
346      min_good_sigs = args.min_good_sigs
347      gpg_allowed_codes = [0, 2]  # 2 is returned when untrusted signatures are present.
348  
349      gpg_retval, gpg_output, good, unknown, bad = check_multisig(sums_file_path, signature_file_path, args)
350  
351      if gpg_retval not in gpg_allowed_codes:
352          if gpg_retval == 1:
353              log.critical(f"Bad signature (code: {gpg_retval}).")
354          else:
355              log.critical(f"unexpected GPG exit code ({gpg_retval})")
356  
357          log.error(f"gpg output:\n{indent(gpg_output)}")
358          return (ReturnCode.INTEGRITY_FAILURE, [], [], [], [])
359  
360      # Decide which keys we trust, though not "trust" in the GPG sense, but rather
361      # which pubkeys convince us that this sums file is legitimate. In other words,
362      # which pubkeys within the Bitcoin community do we trust for the purposes of
363      # binary verification?
364      trusted_keys = set()
365      if args.trusted_keys:
366          trusted_keys |= set(args.trusted_keys.split(','))
367  
368      # Tally signatures and make sure we have enough goods to fulfill
369      # our threshold.
370      good_trusted = [sig for sig in good if sig.trusted or sig.key in trusted_keys]
371      good_untrusted = [sig for sig in good if sig not in good_trusted]
372      num_trusted = len(good_trusted) + len(good_untrusted)
373      log.info(f"got {num_trusted} good signatures")
374  
375      if num_trusted < min_good_sigs:
376          log.info("Maybe you need to import "
377                    f"(`gpg --keyserver {args.keyserver} --recv-keys <key-id>`) "
378                    "some of the following keys: ")
379          log.info('')
380          for sig in unknown:
381              log.info(f"    {sig.key} ({sig.name})")
382          log.info('')
383          log.error(
384              "not enough trusted sigs to meet threshold "
385              f"({num_trusted} vs. {min_good_sigs})")
386  
387          return (ReturnCode.NOT_ENOUGH_GOOD_SIGS, [], [], [], [])
388  
389      for sig in good_trusted:
390          log.info(f"GOOD SIGNATURE: {sig}")
391  
392      for sig in good_untrusted:
393          log.info(f"GOOD SIGNATURE (untrusted): {sig}")
394  
395      for sig in [sig for sig in good if sig.status == 'expired']:
396          log.warning(f"key {sig.key} for {sig.name} is expired")
397  
398      for sig in bad:
399          log.warning(f"BAD SIGNATURE: {sig}")
400  
401      for sig in unknown:
402          log.warning(f"UNKNOWN SIGNATURE: {sig}")
403  
404      return (ReturnCode.SUCCESS, good_trusted, good_untrusted, unknown, bad)
405  
406  
407  def parse_sums_file(sums_file_path: str, filename_filter: list[str]) -> list[list[str]]:
408      # extract hashes/filenames of binaries to verify from hash file;
409      # each line has the following format: "<hash> <binary_filename>"
410      with open(sums_file_path, 'r') as hash_file:
411          return [line.split()[:2] for line in hash_file if len(filename_filter) == 0 or any(f in line for f in filename_filter)]
412  
413  
414  def verify_binary_hashes(hashes_to_verify: list[list[str]]) -> tuple[ReturnCode, dict[str, str]]:
415      offending_files = []
416      files_to_hashes = {}
417  
418      for hash_expected, binary_filename in hashes_to_verify:
419          with open(binary_filename, 'rb') as binary_file:
420              hash_calculated = sha256(binary_file.read()).hexdigest()
421          if hash_calculated != hash_expected:
422              offending_files.append(binary_filename)
423          else:
424              files_to_hashes[binary_filename] = hash_calculated
425  
426      if offending_files:
427          joined_files = '\n'.join(offending_files)
428          log.critical(
429              "Hashes don't match.\n"
430              f"Offending files:\n{joined_files}")
431          return (ReturnCode.INTEGRITY_FAILURE, files_to_hashes)
432  
433      return (ReturnCode.SUCCESS, files_to_hashes)
434  
435  
436  def verify_published_handler(args: argparse.Namespace) -> ReturnCode:
437      WORKINGDIR = Path(tempfile.gettempdir()) / f"bitcoin_verify_binaries.{args.version}"
438  
439      def cleanup():
440          log.info("cleaning up files")
441          os.chdir(Path.home())
442          shutil.rmtree(WORKINGDIR)
443  
444      # determine remote dir dependent on provided version string
445      try:
446          version_base, version_rc, os_filter = parse_version_string(args.version)
447          version_tuple = [int(i) for i in version_base.split('.')]
448      except Exception as e:
449          log.debug(e)
450          log.error(f"unable to parse version; expected format is {VERSION_FORMAT}")
451          log.error(f"  e.g. {VERSION_EXAMPLE}")
452          return ReturnCode.BAD_VERSION
453  
454      remote_dir = f"/bin/{VERSIONPREFIX}{version_base}/"
455      if version_rc:
456          remote_dir += f"test.{version_rc}/"
457      remote_sigs_path = remote_dir + SIGNATUREFILENAME
458      remote_sums_path = remote_dir + SUMS_FILENAME
459  
460      # create working directory
461      os.makedirs(WORKINGDIR, exist_ok=True)
462      os.chdir(WORKINGDIR)
463  
464      hosts = [HOST1, HOST2]
465  
466      got_sig_status = get_files_from_hosts_and_compare(
467          hosts, remote_sigs_path, SIGNATUREFILENAME, args.require_all_hosts)
468      if got_sig_status != ReturnCode.SUCCESS:
469          return got_sig_status
470  
471      # Multi-sig verification is available after 22.0.
472      if version_tuple[0] < 22:
473          log.error("Version too old - single sig not supported. Use a previous "
474                    "version of this script from the repo.")
475          return ReturnCode.BAD_VERSION
476  
477      got_sums_status = get_files_from_hosts_and_compare(
478          hosts, remote_sums_path, SUMS_FILENAME, args.require_all_hosts)
479      if got_sums_status != ReturnCode.SUCCESS:
480          return got_sums_status
481  
482      # Verify the signature on the SHA256SUMS file
483      sigs_status, good_trusted, good_untrusted, unknown, bad = verify_shasums_signature(SIGNATUREFILENAME, SUMS_FILENAME, args)
484      if sigs_status != ReturnCode.SUCCESS:
485          if sigs_status == ReturnCode.INTEGRITY_FAILURE:
486              cleanup()
487          return sigs_status
488  
489      # Extract hashes and filenames
490      hashes_to_verify = parse_sums_file(SUMS_FILENAME, [os_filter])
491      if not hashes_to_verify:
492          available_versions = ["-".join(line[1].split("-")[2:]) for line in parse_sums_file(SUMS_FILENAME, [])]
493          closest_match = difflib.get_close_matches(os_filter, available_versions, cutoff=0, n=1)[0]
494          log.error(f"No files matched the platform specified. Did you mean: {closest_match}")
495          return ReturnCode.NO_BINARIES_MATCH
496  
497      # remove binaries that are known not to be hosted by bitcoincore.org
498      fragments_to_remove = ['-unsigned', '-debug', '-codesignatures']
499      for fragment in fragments_to_remove:
500          nobinaries = [i for i in hashes_to_verify if fragment in i[1]]
501          if nobinaries:
502              remove_str = ', '.join(i[1] for i in nobinaries)
503              log.info(
504                  f"removing *{fragment} binaries ({remove_str}) from verification "
505                  f"since {HOST1} does not host *{fragment} binaries")
506              hashes_to_verify = [i for i in hashes_to_verify if fragment not in i[1]]
507  
508      # download binaries
509      for _, binary_filename in hashes_to_verify:
510          log.info(f"downloading {binary_filename} to {WORKINGDIR}")
511          success, output = download_with_wget(
512              HOST1 + remote_dir + binary_filename, binary_filename)
513  
514          if not success:
515              log.error(
516                  f"failed to download {binary_filename}\n"
517                  f"wget output:\n{indent(output)}")
518              return ReturnCode.BINARY_DOWNLOAD_FAILED
519  
520      # verify hashes
521      hashes_status, files_to_hashes = verify_binary_hashes(hashes_to_verify)
522      if hashes_status != ReturnCode.SUCCESS:
523          return hashes_status
524  
525  
526      if args.cleanup:
527          cleanup()
528      else:
529          log.info(f"did not clean up {WORKINGDIR}")
530  
531      if args.json:
532          output = {
533              'good_trusted_sigs': [str(s) for s in good_trusted],
534              'good_untrusted_sigs': [str(s) for s in good_untrusted],
535              'unknown_sigs': [str(s) for s in unknown],
536              'bad_sigs': [str(s) for s in bad],
537              'verified_binaries': files_to_hashes,
538          }
539          print(json.dumps(output, indent=2))
540      else:
541          for filename in files_to_hashes:
542              print(f"VERIFIED: {filename}")
543  
544      return ReturnCode.SUCCESS
545  
546  
547  def verify_binaries_handler(args: argparse.Namespace) -> ReturnCode:
548      binary_to_basename = {}
549      for file in args.binary:
550          binary_to_basename[PurePath(file).name] = file
551  
552      sums_sig_path = None
553      if args.sums_sig_file:
554          sums_sig_path = Path(args.sums_sig_file)
555      else:
556          log.info(f"No signature file specified, assuming it is {args.sums_file}.asc")
557          sums_sig_path = Path(args.sums_file).with_suffix(".asc")
558  
559      # Verify the signature on the SHA256SUMS file
560      sigs_status, good_trusted, good_untrusted, unknown, bad = verify_shasums_signature(str(sums_sig_path), args.sums_file, args)
561      if sigs_status != ReturnCode.SUCCESS:
562          return sigs_status
563  
564      # Extract hashes and filenames
565      hashes_to_verify = parse_sums_file(args.sums_file, [k for k, n in binary_to_basename.items()])
566      if not hashes_to_verify:
567          log.error(f"No files in {args.sums_file} match the specified binaries")
568          return ReturnCode.NO_BINARIES_MATCH
569  
570      # Make sure all files are accounted for
571      sums_file_path = Path(args.sums_file)
572      missing_files = []
573      files_to_hash = []
574      if len(binary_to_basename) > 0:
575          for file_hash, file in hashes_to_verify:
576              files_to_hash.append([file_hash, binary_to_basename[file]])
577              del binary_to_basename[file]
578          if len(binary_to_basename) > 0:
579              log.error(f"Not all specified binaries are in {args.sums_file}")
580              return ReturnCode.NO_BINARIES_MATCH
581      else:
582          log.info(f"No binaries specified, assuming all files specified in {args.sums_file} are located relatively")
583          for file_hash, file in hashes_to_verify:
584              file_path = Path(sums_file_path.parent.joinpath(file))
585              if file_path.exists():
586                  files_to_hash.append([file_hash, str(file_path)])
587              else:
588                  missing_files.append(file)
589  
590      # verify hashes
591      hashes_status, files_to_hashes = verify_binary_hashes(files_to_hash)
592      if hashes_status != ReturnCode.SUCCESS:
593          return hashes_status
594  
595      if args.json:
596          output = {
597              'good_trusted_sigs': [str(s) for s in good_trusted],
598              'good_untrusted_sigs': [str(s) for s in good_untrusted],
599              'unknown_sigs': [str(s) for s in unknown],
600              'bad_sigs': [str(s) for s in bad],
601              'verified_binaries': files_to_hashes,
602              "missing_binaries": missing_files,
603          }
604          print(json.dumps(output, indent=2))
605      else:
606          for filename in files_to_hashes:
607              print(f"VERIFIED: {filename}")
608          for filename in missing_files:
609              print(f"MISSING: {filename}")
610  
611      return ReturnCode.SUCCESS
612  
613  
614  def main():
615      parser = argparse.ArgumentParser(description=__doc__)
616      parser.add_argument(
617          '-v', '--verbose', action='store_true',
618          default=bool_from_env('BINVERIFY_VERBOSE'),
619      )
620      parser.add_argument(
621          '-q', '--quiet', action='store_true',
622          default=bool_from_env('BINVERIFY_QUIET'),
623      )
624      parser.add_argument(
625          '--import-keys', action='store_true',
626          default=bool_from_env('BINVERIFY_IMPORTKEYS'),
627          help='if specified, ask to import each unknown builder key'
628      )
629      parser.add_argument(
630          '--min-good-sigs', type=int, action='store', nargs='?',
631          default=int(os.environ.get('BINVERIFY_MIN_GOOD_SIGS', 3)),
632          help=(
633              'The minimum number of good signatures to require successful termination.'),
634      )
635      parser.add_argument(
636          '--keyserver', action='store', nargs='?',
637          default=os.environ.get('BINVERIFY_KEYSERVER', 'hkps://keys.openpgp.org'),
638          help='which keyserver to use',
639      )
640      parser.add_argument(
641          '--trusted-keys', action='store', nargs='?',
642          default=os.environ.get('BINVERIFY_TRUSTED_KEYS', ''),
643          help='A list of trusted signer GPG keys, separated by commas. Not "trusted keys" in the GPG sense.',
644      )
645      parser.add_argument(
646          '--json', action='store_true',
647          default=bool_from_env('BINVERIFY_JSON'),
648          help='If set, output the result as JSON',
649      )
650  
651      subparsers = parser.add_subparsers(title="Commands", required=True, dest="command")
652  
653      pub_parser = subparsers.add_parser("pub", help="Verify a published release.")
654      pub_parser.set_defaults(func=verify_published_handler)
655      pub_parser.add_argument(
656          'version', type=str, help=(
657              f'version of the bitcoin release to download; of the format '
658              f'{VERSION_FORMAT}. Example: {VERSION_EXAMPLE}')
659      )
660      pub_parser.add_argument(
661          '--cleanup', action='store_true',
662          default=bool_from_env('BINVERIFY_CLEANUP'),
663          help='if specified, clean up files afterwards'
664      )
665      pub_parser.add_argument(
666          '--require-all-hosts', action='store_true',
667          default=bool_from_env('BINVERIFY_REQUIRE_ALL_HOSTS'),
668          help=(
669              f'If set, require all hosts ({HOST1}, {HOST2}) to provide signatures. '
670              '(Sometimes bitcoin.org lags behind bitcoincore.org.)')
671      )
672  
673      bin_parser = subparsers.add_parser("bin", help="Verify local binaries.")
674      bin_parser.set_defaults(func=verify_binaries_handler)
675      bin_parser.add_argument("--sums-sig-file", "-s", help="Path to the SHA256SUMS.asc file to verify")
676      bin_parser.add_argument("sums_file", help="Path to the SHA256SUMS file to verify")
677      bin_parser.add_argument(
678          "binary", nargs="*",
679          help="Path to a binary distribution file to verify. Can be specified multiple times for multiple files to verify."
680      )
681  
682      args = parser.parse_args()
683      if args.quiet:
684          log.setLevel(logging.WARNING)
685  
686      return args.func(args)
687  
688  
689  if __name__ == '__main__':
690      sys.exit(main())