diff options
Diffstat (limited to 'tools/patman')
35 files changed, 15412 insertions, 0 deletions
diff --git a/tools/patman/.checkpatch.conf b/tools/patman/.checkpatch.conf new file mode 120000 index 00000000000..c0e2020afe4 --- /dev/null +++ b/tools/patman/.checkpatch.conf @@ -0,0 +1 @@ +../../.checkpatch.conf
\ No newline at end of file diff --git a/tools/patman/.gitignore b/tools/patman/.gitignore new file mode 100644 index 00000000000..0d20b6487c6 --- /dev/null +++ b/tools/patman/.gitignore @@ -0,0 +1 @@ +*.pyc diff --git a/tools/patman/README.rst b/tools/patman/README.rst new file mode 120000 index 00000000000..76368b95980 --- /dev/null +++ b/tools/patman/README.rst @@ -0,0 +1 @@ +patman.rst
\ No newline at end of file diff --git a/tools/patman/__init__.py b/tools/patman/__init__.py new file mode 100644 index 00000000000..0cca6f42435 --- /dev/null +++ b/tools/patman/__init__.py @@ -0,0 +1,8 @@ +# SPDX-License-Identifier: GPL-2.0+ + +__all__ = [ + 'checkpatch', 'cmdline', 'commit', 'control', 'cser_helper', 'cseries', + 'database', 'func_test', 'get_maintainer', '__main__', 'patchstream', + 'patchwork', 'project', 'send', 'series', 'settings', 'setup', 'status', + 'test_checkpatch', 'test_common', 'test_cseries', 'test_settings' +] diff --git a/tools/patman/__main__.py b/tools/patman/__main__.py new file mode 100755 index 00000000000..edfb1b5927c --- /dev/null +++ b/tools/patman/__main__.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright (c) 2011 The Chromium OS Authors. +# + +"""See README for more information""" + +import os +import sys + +# Allow 'from patman import xxx to work' +# pylint: disable=C0413 +our_path = os.path.dirname(os.path.realpath(__file__)) +sys.path.append(os.path.join(our_path, '..')) + +# Our modules +from u_boot_pylib import test_util +from u_boot_pylib import tout +from patman import cmdline +from patman import control + + +def run_patman(): + """Run patamn + + This is the main program. It collects arguments and runs either the tests or + the control module. + """ + args = cmdline.parse_args() + + if not args.debug: + sys.tracebacklimit = 0 + + tout.init(tout.INFO if args.verbose else tout.WARNING) + + # Run our reasonably good tests + if args.cmd == 'test': + # pylint: disable=C0415 + from patman import func_test + from patman import test_checkpatch + from patman import test_cseries + + to_run = args.testname if args.testname not in [None, 'test'] else None + result = test_util.run_test_suites( + 'patman', False, args.verbose, args.no_capture, + args.test_preserve_dirs, None, to_run, None, + [test_checkpatch.TestPatch, func_test.TestFunctional, 'settings', + test_cseries.TestCseries]) + sys.exit(0 if result.wasSuccessful() else 1) + + # Process commits, produce patches files, check them, email them + else: + exit_code = control.do_patman(args) + sys.exit(exit_code) + + +if __name__ == "__main__": + sys.exit(run_patman()) diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py new file mode 100644 index 00000000000..f9204a907ef --- /dev/null +++ b/tools/patman/checkpatch.py @@ -0,0 +1,287 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2011 The Chromium OS Authors. +# + +import collections +import concurrent.futures +import os +import re +import sys + +from u_boot_pylib import command +from u_boot_pylib import gitutil +from u_boot_pylib import terminal + +EMACS_PREFIX = r'(?:[0-9]{4}.*\.patch:[0-9]+: )?' +TYPE_NAME = r'([A-Z_]+:)?' +RE_ERROR = re.compile(r'ERROR:%s (.*)' % TYPE_NAME) +RE_WARNING = re.compile(EMACS_PREFIX + r'WARNING:%s (.*)' % TYPE_NAME) +RE_CHECK = re.compile(r'CHECK:%s (.*)' % TYPE_NAME) +RE_FILE = re.compile(r'#(\d+): (FILE: ([^:]*):(\d+):)?') +RE_NOTE = re.compile(r'NOTE: (.*)') + + +def find_check_patch(): + top_level = gitutil.get_top_level() or '' + try_list = [ + os.getcwd(), + os.path.join(os.getcwd(), '..', '..'), + os.path.join(top_level, 'tools'), + os.path.join(top_level, 'scripts'), + '%s/bin' % os.getenv('HOME'), + ] + # Look in current dir + for path in try_list: + fname = os.path.join(path, 'checkpatch.pl') + if os.path.isfile(fname): + return fname + + # Look upwwards for a Chrome OS tree + while not os.path.ismount(path): + fname = os.path.join(path, 'src', 'third_party', 'kernel', 'files', + 'scripts', 'checkpatch.pl') + if os.path.isfile(fname): + return fname + path = os.path.dirname(path) + + sys.exit('Cannot find checkpatch.pl - please put it in your ' + + '~/bin directory or use --no-check') + + +def check_patch_parse_one_message(message): + """Parse one checkpatch message + + Args: + message: string to parse + + Returns: + dict: + 'type'; error or warning + 'msg': text message + 'file' : filename + 'line': line number + """ + + if RE_NOTE.match(message): + return {} + + item = {} + + err_match = RE_ERROR.match(message) + warn_match = RE_WARNING.match(message) + check_match = RE_CHECK.match(message) + if err_match: + item['cptype'] = err_match.group(1) + item['msg'] = err_match.group(2) + item['type'] = 'error' + elif warn_match: + item['cptype'] = warn_match.group(1) + item['msg'] = warn_match.group(2) + item['type'] = 'warning' + elif check_match: + item['cptype'] = check_match.group(1) + item['msg'] = check_match.group(2) + item['type'] = 'check' + else: + message_indent = ' ' + print('patman: failed to parse checkpatch message:\n%s' % + (message_indent + message.replace('\n', '\n' + message_indent)), + file=sys.stderr) + return {} + + file_match = RE_FILE.search(message) + # some messages have no file, catch those here + no_file_match = any(s in message for s in [ + '\nSubject:', 'Missing Signed-off-by: line(s)', + 'does MAINTAINERS need updating' + ]) + + if file_match: + err_fname = file_match.group(3) + if err_fname: + item['file'] = err_fname + item['line'] = int(file_match.group(4)) + else: + item['file'] = '<patch>' + item['line'] = int(file_match.group(1)) + elif no_file_match: + item['file'] = '<patch>' + else: + message_indent = ' ' + print('patman: failed to find file / line information:\n%s' % + (message_indent + message.replace('\n', '\n' + message_indent)), + file=sys.stderr) + + return item + + +def check_patch_parse(checkpatch_output, verbose=False): + """Parse checkpatch.pl output + + Args: + checkpatch_output: string to parse + verbose: True to print out every line of the checkpatch output as it is + parsed + + Returns: + namedtuple containing: + ok: False=failure, True=ok + problems (list of problems): each a dict: + 'type'; error or warning + 'msg': text message + 'file' : filename + 'line': line number + errors: Number of errors + warnings: Number of warnings + checks: Number of checks + lines: Number of lines + stdout: checkpatch_output + """ + fields = ['ok', 'problems', 'errors', 'warnings', 'checks', 'lines', + 'stdout'] + result = collections.namedtuple('CheckPatchResult', fields) + result.stdout = checkpatch_output + result.ok = False + result.errors, result.warnings, result.checks = 0, 0, 0 + result.lines = 0 + result.problems = [] + + # total: 0 errors, 0 warnings, 159 lines checked + # or: + # total: 0 errors, 2 warnings, 7 checks, 473 lines checked + emacs_stats = r'(?:[0-9]{4}.*\.patch )?' + re_stats = re.compile(emacs_stats + + r'total: (\d+) errors, (\d+) warnings, (\d+)') + re_stats_full = re.compile(emacs_stats + + r'total: (\d+) errors, (\d+) warnings, (\d+)' + r' checks, (\d+)') + re_ok = re.compile(r'.*has no obvious style problems') + re_bad = re.compile(r'.*has style problems, please review') + + # A blank line indicates the end of a message + for message in result.stdout.split('\n\n'): + if verbose: + print(message) + + # either find stats, the verdict, or delegate + match = re_stats_full.match(message) + if not match: + match = re_stats.match(message) + if match: + result.errors = int(match.group(1)) + result.warnings = int(match.group(2)) + if len(match.groups()) == 4: + result.checks = int(match.group(3)) + result.lines = int(match.group(4)) + else: + result.lines = int(match.group(3)) + elif re_ok.match(message): + result.ok = True + elif re_bad.match(message): + result.ok = False + else: + problem = check_patch_parse_one_message(message) + if problem: + result.problems.append(problem) + + return result + + +def check_patch(fname, verbose=False, show_types=False, use_tree=False, + cwd=None): + """Run checkpatch.pl on a file and parse the results. + + Args: + fname: Filename to check + verbose: True to print out every line of the checkpatch output as it is + parsed + show_types: Tell checkpatch to show the type (number) of each message + use_tree (bool): If False we'll pass '--no-tree' to checkpatch. + cwd (str): Path to use for patch files (None to use current dir) + + Returns: + namedtuple containing: + ok: False=failure, True=ok + problems: List of problems, each a dict: + 'type'; error or warning + 'msg': text message + 'file' : filename + 'line': line number + errors: Number of errors + warnings: Number of warnings + checks: Number of checks + lines: Number of lines + stdout: Full output of checkpatch + """ + chk = find_check_patch() + args = [chk] + if not use_tree: + args.append('--no-tree') + if show_types: + args.append('--show-types') + output = command.output( + *args, os.path.join(cwd or '', fname), raise_on_error=False, + capture_stderr=not use_tree) + + return check_patch_parse(output, verbose) + + +def get_warning_msg(col, msg_type, fname, line, msg): + '''Create a message for a given file/line + + Args: + msg_type: Message type ('error' or 'warning') + fname: Filename which reports the problem + line: Line number where it was noticed + msg: Message to report + ''' + if msg_type == 'warning': + msg_type = col.build(col.YELLOW, msg_type) + elif msg_type == 'error': + msg_type = col.build(col.RED, msg_type) + elif msg_type == 'check': + msg_type = col.build(col.MAGENTA, msg_type) + line_str = '' if line is None else '%d' % line + return '%s:%s: %s: %s\n' % (fname, line_str, msg_type, msg) + +def check_patches(verbose, args, use_tree, cwd): + '''Run the checkpatch.pl script on each patch''' + error_count, warning_count, check_count = 0, 0, 0 + col = terminal.Color() + + with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor: + futures = [] + for fname in args: + f = executor.submit(check_patch, fname, verbose, use_tree=use_tree, + cwd=cwd) + futures.append(f) + + for fname, f in zip(args, futures): + result = f.result() + if not result.ok: + error_count += result.errors + warning_count += result.warnings + check_count += result.checks + print('%d errors, %d warnings, %d checks for %s:' % (result.errors, + result.warnings, result.checks, col.build(col.BLUE, fname))) + if (len(result.problems) != result.errors + result.warnings + + result.checks): + print("Internal error: some problems lost") + # Python seems to get confused by this + # pylint: disable=E1133 + for item in result.problems: + sys.stderr.write( + get_warning_msg(col, item.get('type', '<unknown>'), + item.get('file', '<unknown>'), + item.get('line', 0), item.get('msg', 'message'))) + print + if error_count or warning_count or check_count: + str = 'checkpatch.pl found %d error(s), %d warning(s), %d checks(s)' + color = col.GREEN + if warning_count: + color = col.YELLOW + if error_count: + color = col.RED + print(col.build(color, str % (error_count, warning_count, check_count))) + return False + return True diff --git a/tools/patman/cmdline.py b/tools/patman/cmdline.py new file mode 100644 index 00000000000..924f0ad4e42 --- /dev/null +++ b/tools/patman/cmdline.py @@ -0,0 +1,516 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2023 Google LLC +# + +"""Handles parsing of buildman arguments + +This creates the argument parser and uses it to parse the arguments passed in +""" + +import argparse +import os +import pathlib +import sys + +from u_boot_pylib import gitutil +from patman import project +from patman import settings + +PATMAN_DIR = pathlib.Path(__file__).parent +HAS_TESTS = os.path.exists(PATMAN_DIR / "func_test.py") + +# Aliases for subcommands +ALIASES = { + 'series': ['s', 'ser'], + 'status': ['st'], + 'patchwork': ['pw'], + 'upstream': ['us'], + + # Series aliases + 'archive': ['ar'], + 'autolink': ['au'], + 'gather': ['g'], + 'open': ['o'], + 'progress': ['p', 'pr', 'prog'], + 'rm-version': ['rmv'], + 'unarchive': ['unar'], + } + + +class ErrorCatchingArgumentParser(argparse.ArgumentParser): + def __init__(self, **kwargs): + self.exit_state = None + self.catch_error = False + super().__init__(**kwargs) + + def error(self, message): + if self.catch_error: + self.message = message + else: + super().error(message) + + def exit(self, status=0, message=None): + if self.catch_error: + self.exit_state = True + else: + super().exit(status, message) + + +def add_send_args(par): + """Add arguments for the 'send' command + + Arguments: + par (ArgumentParser): Parser to add to + """ + par.add_argument( + '-c', '--count', dest='count', type=int, default=-1, + help='Automatically create patches from top n commits') + par.add_argument( + '-e', '--end', type=int, default=0, + help='Commits to skip at end of patch list') + par.add_argument( + '-i', '--ignore-errors', action='store_true', + dest='ignore_errors', default=False, + help='Send patches email even if patch errors are found') + par.add_argument( + '-l', '--limit-cc', dest='limit', type=int, default=None, + help='Limit the cc list to LIMIT entries [default: %(default)s]') + par.add_argument( + '-m', '--no-maintainers', action='store_false', + dest='add_maintainers', default=True, + help="Don't cc the file maintainers automatically") + default_arg = None + top_level = gitutil.get_top_level() + if top_level: + default_arg = os.path.join(top_level, 'scripts', + 'get_maintainer.pl') + ' --norolestats' + par.add_argument( + '--get-maintainer-script', dest='get_maintainer_script', type=str, + action='store', + default=default_arg, + help='File name of the get_maintainer.pl (or compatible) script.') + par.add_argument( + '-r', '--in-reply-to', type=str, action='store', + help="Message ID that this series is in reply to") + par.add_argument( + '-s', '--start', dest='start', type=int, default=0, + help='Commit to start creating patches from (0 = HEAD)') + par.add_argument( + '-t', '--ignore-bad-tags', action='store_true', default=False, + help='Ignore bad tags / aliases (default=warn)') + par.add_argument( + '--no-binary', action='store_true', dest='ignore_binary', + default=False, + help="Do not output contents of changes in binary files") + par.add_argument( + '--no-check', action='store_false', dest='check_patch', default=True, + help="Don't check for patch compliance") + par.add_argument( + '--tree', dest='check_patch_use_tree', default=False, + action='store_true', + help=("Set `tree` to True. If `tree` is False then we'll pass " + "'--no-tree' to checkpatch (default: tree=%(default)s)")) + par.add_argument( + '--no-tree', dest='check_patch_use_tree', action='store_false', + help="Set `tree` to False") + par.add_argument( + '--no-tags', action='store_false', dest='process_tags', default=True, + help="Don't process subject tags as aliases") + par.add_argument( + '--no-signoff', action='store_false', dest='add_signoff', + default=True, help="Don't add Signed-off-by to patches") + par.add_argument( + '--smtp-server', type=str, + help="Specify the SMTP server to 'git send-email'") + par.add_argument( + '--keep-change-id', action='store_true', + help='Preserve Change-Id tags in patches to send.') + + +def _add_show_comments(parser): + parser.add_argument('-c', '--show-comments', action='store_true', + help='Show comments from each patch') + + +def _add_show_cover_comments(parser): + parser.add_argument('-C', '--show-cover-comments', action='store_true', + help='Show comments from the cover letter') + + +def add_patchwork_subparser(subparsers): + """Add the 'patchwork' subparser + + Args: + subparsers (argparse action): Subparser parent + + Return: + ArgumentParser: patchwork subparser + """ + patchwork = subparsers.add_parser( + 'patchwork', aliases=ALIASES['patchwork'], + help='Manage patchwork connection') + patchwork.defaults_cmds = [ + ['set-project', 'U-Boot'], + ] + patchwork_subparsers = patchwork.add_subparsers(dest='subcmd') + patchwork_subparsers.add_parser('get-project') + uset = patchwork_subparsers.add_parser('set-project') + uset.add_argument( + 'project_name', help="Patchwork project name, e.g. 'U-Boot'") + return patchwork + + +def add_series_subparser(subparsers): + """Add the 'series' subparser + + Args: + subparsers (argparse action): Subparser parent + + Return: + ArgumentParser: series subparser + """ + def _add_allow_unmarked(parser): + parser.add_argument('-M', '--allow-unmarked', action='store_true', + default=False, + help="Don't require commits to be marked") + + def _add_mark(parser): + parser.add_argument( + '-m', '--mark', action='store_true', + help='Mark unmarked commits with a Change-Id field') + + def _add_update(parser): + parser.add_argument('-u', '--update', action='store_true', + help='Update the branch commit') + + def _add_wait(parser, default_s): + """Add a -w option to a parser + + Args: + parser (ArgumentParser): Parser to adjust + default_s (int): Default value to use, in seconds + """ + parser.add_argument( + '-w', '--autolink-wait', type=int, default=default_s, + help='Seconds to wait for patchwork to get a sent series') + + def _upstream_add(parser): + parser.add_argument('-U', '--upstream', help='Commit to end before') + + def _add_gather(parser): + parser.add_argument( + '-G', '--no-gather-tags', dest='gather_tags', default=True, + action='store_false', + help="Don't gather review/test tags / update local series") + + series = subparsers.add_parser('series', aliases=ALIASES['series'], + help='Manage series of patches') + series.defaults_cmds = [ + ['set-link', 'fred'], + ] + series.add_argument( + '-n', '--dry-run', action='store_true', dest='dry_run', default=False, + help="Do a dry run (create but don't email patches)") + series.add_argument('-s', '--series', help='Name of series') + series.add_argument('-V', '--version', type=int, + help='Version number to link') + series_subparsers = series.add_subparsers(dest='subcmd') + + # This causes problem at present, perhaps due to the 'defaults' handling in + # settings + # series_subparsers.required = True + + add = series_subparsers.add_parser('add') + add.add_argument('-D', '--desc', + help='Series description / cover-letter title') + add.add_argument( + '-f', '--force-version', action='store_true', + help='Change the Series-version on a series to match its branch') + _add_mark(add) + _add_allow_unmarked(add) + _upstream_add(add) + + series_subparsers.add_parser('archive', aliases=ALIASES['archive']) + + auto = series_subparsers.add_parser('autolink', + aliases=ALIASES['autolink']) + _add_update(auto) + _add_wait(auto, 0) + + aall = series_subparsers.add_parser('autolink-all') + aall.add_argument('-a', '--link-all-versions', action='store_true', + help='Link all series versions, not just the latest') + aall.add_argument('-r', '--replace-existing', action='store_true', + help='Replace existing links') + _add_update(aall) + + series_subparsers.add_parser('dec') + + gat = series_subparsers.add_parser('gather', aliases=ALIASES['gather']) + _add_gather(gat) + _add_show_comments(gat) + _add_show_cover_comments(gat) + + sall = series_subparsers.add_parser('gather-all') + sall.add_argument( + '-a', '--gather-all-versions', action='store_true', + help='Gather tags from all series versions, not just the latest') + _add_gather(sall) + _add_show_comments(sall) + _add_show_cover_comments(sall) + + series_subparsers.add_parser('get-link') + series_subparsers.add_parser('inc') + series_subparsers.add_parser('ls') + + mar = series_subparsers.add_parser('mark') + mar.add_argument('-m', '--allow-marked', action='store_true', + default=False, + help="Don't require commits to be unmarked") + + series_subparsers.add_parser('open', aliases=ALIASES['open']) + pat = series_subparsers.add_parser( + 'patches', epilog='Show a list of patches and optional details') + pat.add_argument('-t', '--commit', action='store_true', + help='Show the commit and diffstat') + pat.add_argument('-p', '--patch', action='store_true', + help='Show the patch body') + + prog = series_subparsers.add_parser('progress', + aliases=ALIASES['progress']) + prog.add_argument('-a', '--show-all-versions', action='store_true', + help='Show all series versions, not just the latest') + prog.add_argument('-l', '--list-patches', action='store_true', + help='List patch subject and status') + + ren = series_subparsers.add_parser('rename') + ren.add_argument('-N', '--new-name', help='New name for the series') + + series_subparsers.add_parser('rm') + series_subparsers.add_parser('rm-version', aliases=ALIASES['rm-version']) + + scan = series_subparsers.add_parser('scan') + _add_mark(scan) + _add_allow_unmarked(scan) + _upstream_add(scan) + + ssend = series_subparsers.add_parser('send') + add_send_args(ssend) + ssend.add_argument( + '--no-autolink', action='store_false', default=True, dest='autolink', + help='Monitor patchwork after sending so the series can be autolinked') + _add_wait(ssend, 120) + + setl = series_subparsers.add_parser('set-link') + _add_update(setl) + + setl.add_argument( + 'link', help='Link to use, i.e. patchwork series number (e.g. 452329)') + stat = series_subparsers.add_parser('status', aliases=ALIASES['status']) + _add_show_comments(stat) + _add_show_cover_comments(stat) + + series_subparsers.add_parser('summary') + + series_subparsers.add_parser('unarchive', aliases=ALIASES['unarchive']) + + unm = series_subparsers.add_parser('unmark') + _add_allow_unmarked(unm) + + ver = series_subparsers.add_parser( + 'version-change', help='Change a version to a different version') + ver.add_argument('--new-version', type=int, + help='New version number to change this one too') + + return series + + +def add_send_subparser(subparsers): + """Add the 'send' subparser + + Args: + subparsers (argparse action): Subparser parent + + Return: + ArgumentParser: send subparser + """ + send = subparsers.add_parser( + 'send', help='Format, check and email patches (default command)') + send.add_argument( + '-b', '--branch', type=str, + help="Branch to process (by default, the current branch)") + send.add_argument( + '-n', '--dry-run', action='store_true', dest='dry_run', + default=False, help="Do a dry run (create but don't email patches)") + send.add_argument( + '--cc-cmd', dest='cc_cmd', type=str, action='store', + default=None, help='Output cc list for patch file (used by git)') + add_send_args(send) + send.add_argument('patchfiles', nargs='*') + return send + + +def add_status_subparser(subparsers): + """Add the 'status' subparser + + Args: + subparsers (argparse action): Subparser parent + + Return: + ArgumentParser: status subparser + """ + status = subparsers.add_parser('status', aliases=ALIASES['status'], + help='Check status of patches in patchwork') + _add_show_comments(status) + status.add_argument( + '-d', '--dest-branch', type=str, + help='Name of branch to create with collected responses') + status.add_argument('-f', '--force', action='store_true', + help='Force overwriting an existing branch') + status.add_argument('-T', '--single-thread', action='store_true', + help='Disable multithreading when reading patchwork') + return status + + +def add_upstream_subparser(subparsers): + """Add the 'status' subparser + + Args: + subparsers (argparse action): Subparser parent + + Return: + ArgumentParser: status subparser + """ + upstream = subparsers.add_parser('upstream', aliases=ALIASES['upstream'], + help='Manage upstream destinations') + upstream.defaults_cmds = [ + ['add', 'us', 'http://fred'], + ['delete', 'us'], + ] + upstream_subparsers = upstream.add_subparsers(dest='subcmd') + uadd = upstream_subparsers.add_parser('add') + uadd.add_argument('remote_name', + help="Git remote name used for this upstream, e.g. 'us'") + uadd.add_argument( + 'url', help='URL to use for this upstream, e.g. ' + "'https://gitlab.denx.de/u-boot/u-boot.git'") + udel = upstream_subparsers.add_parser('delete') + udel.add_argument( + 'remote_name', + help="Git remote name used for this upstream, e.g. 'us'") + upstream_subparsers.add_parser('list') + udef = upstream_subparsers.add_parser('default') + udef.add_argument('-u', '--unset', action='store_true', + help='Unset the default upstream') + udef.add_argument('remote_name', nargs='?', + help="Git remote name used for this upstream, e.g. 'us'") + return upstream + + +def setup_parser(): + """Set up command-line parser + + Returns: + argparse.Parser object + """ + epilog = '''Create patches from commits in a branch, check them and email + them as specified by tags you place in the commits. Use -n to do a dry + run first.''' + + parser = ErrorCatchingArgumentParser(epilog=epilog) + parser.add_argument( + '-D', '--debug', action='store_true', + help='Enabling debugging (provides a full traceback on error)') + parser.add_argument( + '-N', '--no-capture', action='store_true', + help='Disable capturing of console output in tests') + parser.add_argument('-p', '--project', default=project.detect_project(), + help="Project name; affects default option values and " + "aliases [default: %(default)s]") + parser.add_argument('-P', '--patchwork-url', + default='https://patchwork.ozlabs.org', + help='URL of patchwork server [default: %(default)s]') + parser.add_argument( + '-T', '--thread', action='store_true', dest='thread', + default=False, help='Create patches as a single thread') + parser.add_argument( + '-v', '--verbose', action='store_true', dest='verbose', default=False, + help='Verbose output of errors and warnings') + parser.add_argument( + '-X', '--test-preserve-dirs', action='store_true', + help='Preserve and display test-created directories') + parser.add_argument( + '-H', '--full-help', action='store_true', dest='full_help', + default=False, help='Display the README file') + + subparsers = parser.add_subparsers(dest='cmd') + add_send_subparser(subparsers) + patchwork = add_patchwork_subparser(subparsers) + series = add_series_subparser(subparsers) + add_status_subparser(subparsers) + upstream = add_upstream_subparser(subparsers) + + # Only add the 'test' action if the test data files are available. + if HAS_TESTS: + test_parser = subparsers.add_parser('test', help='Run tests') + test_parser.add_argument('testname', type=str, default=None, nargs='?', + help="Specify the test to run") + + parsers = { + 'main': parser, + 'series': series, + 'patchwork': patchwork, + 'upstream': upstream, + } + return parsers + + +def parse_args(argv=None, config_fname=None, parsers=None): + """Parse command line arguments from sys.argv[] + + Args: + argv (str or None): Arguments to process, or None to use sys.argv[1:] + config_fname (str): Config file to read, or None for default, or False + for an empty config + + Returns: + tuple containing: + options: command line options + args: command lin arguments + """ + if not parsers: + parsers = setup_parser() + parser = parsers['main'] + + # Parse options twice: first to get the project and second to handle + # defaults properly (which depends on project) + # Use parse_known_args() in case 'cmd' is omitted + if not argv: + argv = sys.argv[1:] + + args, rest = parser.parse_known_args(argv) + if hasattr(args, 'project'): + settings.Setup(parser, args.project, argv, config_fname) + args, rest = parser.parse_known_args(argv) + + # If we have a command, it is safe to parse all arguments + if args.cmd: + args = parser.parse_args(argv) + elif not args.full_help: + # No command, so insert it after the known arguments and before the ones + # that presumably relate to the 'send' subcommand + nargs = len(rest) + argv = argv[:-nargs] + ['send'] + rest + args = parser.parse_args(argv) + + # Resolve aliases + for full, aliases in ALIASES.items(): + if args.cmd in aliases: + args.cmd = full + if 'subcmd' in args and args.subcmd in aliases: + args.subcmd = full + if args.cmd in ['series', 'upstream', 'patchwork'] and not args.subcmd: + parser.parse_args([args.cmd, '--help']) + + return args diff --git a/tools/patman/commit.py b/tools/patman/commit.py new file mode 100644 index 00000000000..ce37a3d95eb --- /dev/null +++ b/tools/patman/commit.py @@ -0,0 +1,114 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2011 The Chromium OS Authors. +# + +import collections +import re + +# Separates a tag: at the beginning of the subject from the rest of it +re_subject_tag = re.compile(r'([^:\s]*):\s*(.*)') + +class Commit: + """Holds information about a single commit/patch in the series. + + Args: + hash: Commit hash (as a string) + + Variables: + hash: Commit hash + subject: Subject line + tags: List of maintainer tag strings + changes: Dict containing a list of changes (single line strings). + The dict is indexed by change version (an integer) + cc_list: List of people to aliases/emails to cc on this commit + notes: List of lines in the commit (not series) notes + change_id: the Change-Id: tag that was stripped from this commit + and can be used to generate the Message-Id. + rtags: Response tags (e.g. Reviewed-by) collected by the commit, dict: + key: rtag type (e.g. 'Reviewed-by') + value: Set of people who gave that rtag, each a name/email string + warn: List of warnings for this commit, each a str + patch (str): Filename of the patch file for this commit + future (concurrent.futures.Future): Future object for processing this + commit, or None + """ + def __init__(self, hash): + self.hash = hash + self.subject = '' + self.tags = [] + self.changes = {} + self.cc_list = [] + self.signoff_set = set() + self.notes = [] + self.change_id = None + self.rtags = collections.defaultdict(set) + self.warn = [] + self.patch = '' + self.future = None + + def __str__(self): + return self.subject + + def add_change(self, version, info): + """Add a new change line to the change list for a version. + + Args: + version: Patch set version (integer: 1, 2, 3) + info: Description of change in this version + """ + if not self.changes.get(version): + self.changes[version] = [] + self.changes[version].append(info) + + def check_tags(self): + """Create a list of subject tags in the commit + + Subject tags look like this: + + propounder: fort: Change the widget to propound correctly + + Here the tags are propounder and fort. Multiple tags are supported. + The list is updated in self.tag. + + Returns: + None if ok, else the name of a tag with no email alias + """ + str = self.subject + m = True + while m: + m = re_subject_tag.match(str) + if m: + tag = m.group(1) + self.tags.append(tag) + str = m.group(2) + return None + + def add_cc(self, cc_list): + """Add a list of people to Cc when we send this patch. + + Args: + cc_list: List of aliases or email addresses + """ + self.cc_list += cc_list + + def check_duplicate_signoff(self, signoff): + """Check a list of signoffs we have send for this patch + + Args: + signoff: Signoff line + Returns: + True if this signoff is new, False if we have already seen it. + """ + if signoff in self.signoff_set: + return False + self.signoff_set.add(signoff) + return True + + def add_rtag(self, rtag_type, who): + """Add a response tag to a commit + + Args: + key: rtag type (e.g. 'Reviewed-by') + who: Person who gave that rtag, e.g. 'Fred Bloggs <fred@bloggs.org>' + """ + self.rtags[rtag_type].add(who) diff --git a/tools/patman/control.py b/tools/patman/control.py new file mode 100644 index 00000000000..3e09b16e87b --- /dev/null +++ b/tools/patman/control.py @@ -0,0 +1,333 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2020 Google LLC +# +"""Handles the main control logic of patman + +This module provides various functions called by the main program to implement +the features of patman. +""" + +import re +import traceback + +try: + from importlib import resources +except ImportError: + # for Python 3.6 + import importlib_resources as resources + +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tools +from u_boot_pylib import tout +from patman import cseries +from patman import cser_helper +from patman import patchstream +from patman.patchwork import Patchwork +from patman import send +from patman import settings + + +def setup(): + """Do required setup before doing anything""" + gitutil.setup() + alias_fname = gitutil.get_alias_file() + if alias_fname: + settings.ReadGitAliases(alias_fname) + + +def do_send(args): + """Create, check and send patches by email + + Args: + args (argparse.Namespace): Arguments to patman + """ + setup() + send.send(args) + + +def patchwork_status(branch, count, start, end, dest_branch, force, + show_comments, url, single_thread=False): + """Check the status of patches in patchwork + + This finds the series in patchwork using the Series-link tag, checks for new + comments and review tags, displays then and creates a new branch with the + review tags. + + Args: + branch (str): Branch to create patches from (None = current) + count (int): Number of patches to produce, or -1 to produce patches for + the current branch back to the upstream commit + start (int): Start partch to use (0=first / top of branch) + end (int): End patch to use (0=last one in series, 1=one before that, + etc.) + dest_branch (str): Name of new branch to create with the updated tags + (None to not create a branch) + force (bool): With dest_branch, force overwriting an existing branch + show_comments (bool): True to display snippets from the comments + provided by reviewers + url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'. + This is ignored if the series provides a Series-patchwork-url tag. + + Raises: + ValueError: if the branch has no Series-link value + """ + if not branch: + branch = gitutil.get_branch() + if count == -1: + # Work out how many patches to send if we can + count = gitutil.count_commits_to_branch(branch) - start + + series = patchstream.get_metadata(branch, start, count - end) + warnings = 0 + for cmt in series.commits: + if cmt.warn: + print('%d warnings for %s:' % (len(cmt.warn), cmt.hash)) + for warn in cmt.warn: + print('\t', warn) + warnings += 1 + print + if warnings: + raise ValueError('Please fix warnings before running status') + links = series.get('links') + if not links: + raise ValueError("Branch has no Series-links value") + + _, version = cser_helper.split_name_version(branch) + link = series.get_link_for_version(version, links) + if not link: + raise ValueError('Series-links has no link for v{version}') + tout.debug(f"Link '{link}") + + # Allow the series to override the URL + if 'patchwork_url' in series: + url = series.patchwork_url + pwork = Patchwork(url, single_thread=single_thread) + + # Import this here to avoid failing on other commands if the dependencies + # are not present + from patman import status + pwork = Patchwork(url) + status.check_and_show_status(series, link, branch, dest_branch, force, + show_comments, False, pwork) + + +def do_series(args, test_db=None, pwork=None, cser=None): + """Process a series subcommand + + Args: + args (Namespace): Arguments to process + test_db (str or None): Directory containing the test database, None to + use the normal one + pwork (Patchwork): Patchwork object to use, None to create one if + needed + cser (Cseries): Cseries object to use, None to create one + """ + if not cser: + cser = cseries.Cseries(test_db) + needs_patchwork = [ + 'autolink', 'autolink-all', 'open', 'send', 'status', 'gather', + 'gather-all' + ] + try: + cser.open_database() + if args.subcmd in needs_patchwork: + if not pwork: + pwork = Patchwork(args.patchwork_url) + proj = cser.project_get() + if not proj: + raise ValueError( + "Please set project ID with 'patman patchwork set-project'") + _, proj_id, link_name = cser.project_get() + pwork.project_set(proj_id, link_name) + elif pwork and pwork is not True: + raise ValueError( + f"Internal error: command '{args.subcmd}' should not have patchwork") + if args.subcmd == 'add': + cser.add(args.series, args.desc, mark=args.mark, + allow_unmarked=args.allow_unmarked, end=args.upstream, + dry_run=args.dry_run) + elif args.subcmd == 'archive': + cser.archive(args.series) + elif args.subcmd == 'autolink': + cser.link_auto(pwork, args.series, args.version, args.update, + args.autolink_wait) + elif args.subcmd == 'autolink-all': + cser.link_auto_all(pwork, update_commit=args.update, + link_all_versions=args.link_all_versions, + replace_existing=args.replace_existing, + dry_run=args.dry_run, show_summary=True) + elif args.subcmd == 'dec': + cser.decrement(args.series, args.dry_run) + elif args.subcmd == 'gather': + cser.gather(pwork, args.series, args.version, args.show_comments, + args.show_cover_comments, args.gather_tags, + dry_run=args.dry_run) + elif args.subcmd == 'gather-all': + cser.gather_all( + pwork, args.show_comments, args.show_cover_comments, + args.gather_all_versions, args.gather_tags, args.dry_run) + elif args.subcmd == 'get-link': + link = cser.link_get(args.series, args.version) + print(link) + elif args.subcmd == 'inc': + cser.increment(args.series, args.dry_run) + elif args.subcmd == 'ls': + cser.series_list() + elif args.subcmd == 'open': + cser.open(pwork, args.series, args.version) + elif args.subcmd == 'mark': + cser.mark(args.series, args.allow_marked, dry_run=args.dry_run) + elif args.subcmd == 'patches': + cser.list_patches(args.series, args.version, args.commit, + args.patch) + elif args.subcmd == 'progress': + cser.progress(args.series, args.show_all_versions, + args.list_patches) + elif args.subcmd == 'rm': + cser.remove(args.series, dry_run=args.dry_run) + elif args.subcmd == 'rm-version': + cser.version_remove(args.series, args.version, dry_run=args.dry_run) + elif args.subcmd == 'rename': + cser.rename(args.series, args.new_name, dry_run=args.dry_run) + elif args.subcmd == 'scan': + cser.scan(args.series, mark=args.mark, + allow_unmarked=args.allow_unmarked, end=args.upstream, + dry_run=args.dry_run) + elif args.subcmd == 'send': + cser.send(pwork, args.series, args.autolink, args.autolink_wait, + args) + elif args.subcmd == 'set-link': + cser.link_set(args.series, args.version, args.link, args.update) + elif args.subcmd == 'status': + cser.status(pwork, args.series, args.version, args.show_comments, + args.show_cover_comments) + elif args.subcmd == 'summary': + cser.summary(args.series) + elif args.subcmd == 'unarchive': + cser.unarchive(args.series) + elif args.subcmd == 'unmark': + cser.unmark(args.series, args.allow_unmarked, dry_run=args.dry_run) + elif args.subcmd == 'version-change': + cser.version_change(args.series, args.version, args.new_version, + dry_run=args.dry_run) + else: + raise ValueError(f"Unknown series subcommand '{args.subcmd}'") + finally: + cser.close_database() + + +def upstream(args, test_db=None): + """Process an 'upstream' subcommand + + Args: + args (Namespace): Arguments to process + test_db (str or None): Directory containing the test database, None to + use the normal one + """ + cser = cseries.Cseries(test_db) + try: + cser.open_database() + if args.subcmd == 'add': + cser.upstream_add(args.remote_name, args.url) + elif args.subcmd == 'default': + if args.unset: + cser.upstream_set_default(None) + elif args.remote_name: + cser.upstream_set_default(args.remote_name) + else: + result = cser.upstream_get_default() + print(result if result else 'unset') + elif args.subcmd == 'delete': + cser.upstream_delete(args.remote_name) + elif args.subcmd == 'list': + cser.upstream_list() + else: + raise ValueError(f"Unknown upstream subcommand '{args.subcmd}'") + finally: + cser.close_database() + + +def patchwork(args, test_db=None, pwork=None): + """Process a 'patchwork' subcommand + Args: + args (Namespace): Arguments to process + test_db (str or None): Directory containing the test database, None to + use the normal one + pwork (Patchwork): Patchwork object to use + """ + cser = cseries.Cseries(test_db) + try: + cser.open_database() + if args.subcmd == 'set-project': + if not pwork: + pwork = Patchwork(args.patchwork_url) + cser.project_set(pwork, args.project_name) + elif args.subcmd == 'get-project': + info = cser.project_get() + if not info: + raise ValueError("Project has not been set; use 'patman patchwork set-project'") + name, pwid, link_name = info + print(f"Project '{name}' patchwork-ID {pwid} link-name {link_name}") + else: + raise ValueError(f"Unknown patchwork subcommand '{args.subcmd}'") + finally: + cser.close_database() + +def do_patman(args, test_db=None, pwork=None, cser=None): + """Process a patman command + + Args: + args (Namespace): Arguments to process + test_db (str or None): Directory containing the test database, None to + use the normal one + pwork (Patchwork): Patchwork object to use, or None to create one + cser (Cseries): Cseries object to use when executing the command, + or None to create one + """ + if args.full_help: + with resources.path('patman', 'README.rst') as readme: + tools.print_full_help(str(readme)) + return 0 + if args.cmd == 'send': + # Called from git with a patch filename as argument + # Printout a list of additional CC recipients for this patch + if args.cc_cmd: + re_line = re.compile(r'(\S*) (.*)') + with open(args.cc_cmd, 'r', encoding='utf-8') as inf: + for line in inf.readlines(): + match = re_line.match(line) + if match and match.group(1) == args.patchfiles[0]: + for cca in match.group(2).split('\0'): + cca = cca.strip() + if cca: + print(cca) + else: + # If we are not processing tags, no need to warning about bad ones + if not args.process_tags: + args.ignore_bad_tags = True + do_send(args) + return 0 + + ret_code = 0 + try: + # Check status of patches in patchwork + if args.cmd == 'status': + patchwork_status(args.branch, args.count, args.start, args.end, + args.dest_branch, args.force, args.show_comments, + args.patchwork_url) + elif args.cmd == 'series': + do_series(args, test_db, pwork, cser) + elif args.cmd == 'upstream': + upstream(args, test_db) + elif args.cmd == 'patchwork': + patchwork(args, test_db, pwork) + except Exception as exc: + terminal.tprint(f'patman: {type(exc).__name__}: {exc}', + colour=terminal.Color.RED) + if args.debug: + print() + traceback.print_exc() + ret_code = 1 + return ret_code diff --git a/tools/patman/cser_helper.py b/tools/patman/cser_helper.py new file mode 100644 index 00000000000..2841fcd9c20 --- /dev/null +++ b/tools/patman/cser_helper.py @@ -0,0 +1,1524 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2025 Simon Glass <sjg@chromium.org> +# +"""Helper functions for handling the 'series' subcommand +""" + +import asyncio +from collections import OrderedDict, defaultdict, namedtuple +from datetime import datetime +import hashlib +import os +import re +import sys +import time +from types import SimpleNamespace + +import aiohttp +import pygit2 +from pygit2.enums import CheckoutStrategy + +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tout + +from patman import patchstream +from patman.database import Database, Pcommit, SerVer +from patman import patchwork +from patman.series import Series +from patman import status + + +# Tag to use for Change IDs +CHANGE_ID_TAG = 'Change-Id' + +# Length of hash to display +HASH_LEN = 10 + +# Shorter version of some states, to save horizontal space +SHORTEN_STATE = { + 'handled-elsewhere': 'elsewhere', + 'awaiting-upstream': 'awaiting', + 'not-applicable': 'n/a', + 'changes-requested': 'changes', +} + +# Summary info returned from Cseries.link_auto_all() +AUTOLINK = namedtuple('autolink', 'name,version,link,desc,result') + + +def oid(oid_val): + """Convert a hash string into a shortened hash + + The number of hex digits git uses for showing hashes depends on the size of + the repo. For the purposes of showing hashes to the user in lists, we use a + fixed value for now + + Args: + str or Pygit2.oid: Hash value to shorten + + Return: + str: Shortened hash + """ + return str(oid_val)[:HASH_LEN] + + +def split_name_version(in_name): + """Split a branch name into its series name and its version + + For example: + 'series' returns ('series', 1) + 'series3' returns ('series', 3) + Args: + in_name (str): Name to parse + + Return: + tuple: + str: series name + int: series version, or None if there is none in in_name + """ + m_ver = re.match(r'([^0-9]*)(\d*)', in_name) + version = None + if m_ver: + name = m_ver.group(1) + if m_ver.group(2): + version = int(m_ver.group(2)) + else: + name = in_name + return name, version + + +class CseriesHelper: + """Helper functions for Cseries + + This class handles database read/write as well as operations in a git + directory to update series information. + """ + def __init__(self, topdir=None, colour=terminal.COLOR_IF_TERMINAL): + """Set up a new CseriesHelper + + Args: + topdir (str): Top-level directory of the repo + colour (terminal.enum): Whether to enable ANSI colour or not + + Properties: + gitdir (str): Git directory (typically topdir + '/.git') + db (Database): Database handler + col (terminal.Colour): Colour object + _fake_time (float): Holds the current fake time for tests, in + seconds + _fake_sleep (func): Function provided by a test; called to fake a + 'time.sleep()' call and take whatever action it wants to take. + The only argument is the (Float) time to sleep for; it returns + nothing + loop (asyncio event loop): Loop used for Patchwork operations + """ + self.topdir = topdir + self.gitdir = None + self.db = None + self.col = terminal.Color(colour) + self._fake_time = None + self._fake_sleep = None + self.fake_now = None + self.loop = asyncio.get_event_loop() + + def open_database(self): + """Open the database ready for use""" + if not self.topdir: + self.topdir = gitutil.get_top_level() + if not self.topdir: + raise ValueError('No git repo detected in current directory') + self.gitdir = os.path.join(self.topdir, '.git') + fname = f'{self.topdir}/.patman.db' + + # For the first instance, start it up with the expected schema + self.db, is_new = Database.get_instance(fname) + if is_new: + self.db.start() + else: + # If a previous test has already checked the schema, just open it + self.db.open_it() + + def close_database(self): + """Close the database""" + if self.db: + self.db.close() + + def commit(self): + """Commit changes to the database""" + self.db.commit() + + def rollback(self): + """Roll back changes to the database""" + self.db.rollback() + + def set_fake_time(self, fake_sleep): + """Setup the fake timer + + Args: + fake_sleep (func(float)): Function to call to fake a sleep + """ + self._fake_time = 0 + self._fake_sleep = fake_sleep + + def inc_fake_time(self, inc_s): + """Increment the fake time + + Args: + inc_s (float): Amount to increment the fake time by + """ + self._fake_time += inc_s + + def get_time(self): + """Get the current time, fake or real + + This function should always be used to read the time so that faking the + time works correctly in tests. + + Return: + float: Fake time, if time is being faked, else real time + """ + if self._fake_time is not None: + return self._fake_time + return time.monotonic() + + def sleep(self, time_s): + """Sleep for a while + + This function should always be used to sleep so that faking the time + works correctly in tests. + + Args: + time_s (float): Amount of seconds to sleep for + """ + print(f'Sleeping for {time_s} seconds') + if self._fake_time is not None: + self._fake_sleep(time_s) + else: + time.sleep(time_s) + + def get_now(self): + """Get the time now + + This function should always be used to read the datetime, so that + faking the time works correctly in tests + + Return: + DateTime object + """ + if self.fake_now: + return self.fake_now + return datetime.now() + + def get_ser_ver_list(self): + """Get a list of patchwork entries from the database + + Return: + list of SER_VER + """ + return self.db.ser_ver_get_list() + + def get_ser_ver_dict(self): + """Get a dict of patchwork entries from the database + + Return: dict contain all records: + key (int): ser_ver id + value (SER_VER): Information about one ser_ver record + """ + svlist = self.get_ser_ver_list() + svdict = {} + for sver in svlist: + svdict[sver.idnum] = sver + return svdict + + def get_upstream_dict(self): + """Get a list of upstream entries from the database + + Return: + OrderedDict: + key (str): upstream name + value (str): url + """ + return self.db.upstream_get_dict() + + def get_pcommit_dict(self, find_svid=None): + """Get a dict of pcommits entries from the database + + Args: + find_svid (int): If not None, finds the records associated with a + particular series and version + + Return: + OrderedDict: + key (int): record ID if find_svid is None, else seq + value (PCOMMIT): record data + """ + pcdict = OrderedDict() + for rec in self.db.pcommit_get_list(find_svid): + if find_svid is not None: + pcdict[rec.seq] = rec + else: + pcdict[rec.idnum] = rec + return pcdict + + def _get_series_info(self, idnum): + """Get information for a series from the database + + Args: + idnum (int): Series ID to look up + + Return: tuple: + str: Series name + str: Series description + + Raises: + ValueError: Series is not found + """ + return self.db.series_get_info(idnum) + + def prep_series(self, name, end=None): + """Prepare to work with a series + + Args: + name (str): Branch name with version appended, e.g. 'fix2' + end (str or None): Commit to end at, e.g. 'my_branch~16'. Only + commits up to that are processed. None to process commits up to + the upstream branch + + Return: tuple: + str: Series name, e.g. 'fix' + Series: Collected series information, including name + int: Version number, e.g. 2 + str: Message to show + """ + ser, version = self._parse_series_and_version(name, None) + if not name: + name = self._get_branch_name(ser.name, version) + + # First check we have a branch with this name + if not gitutil.check_branch(name, git_dir=self.gitdir): + raise ValueError(f"No branch named '{name}'") + + count = gitutil.count_commits_to_branch(name, self.gitdir, end) + if not count: + raise ValueError('Cannot detect branch automatically: ' + 'Perhaps use -U <upstream-commit> ?') + + series = patchstream.get_metadata(name, 0, count, git_dir=self.gitdir) + self._copy_db_fields_to(series, ser) + msg = None + if end: + repo = pygit2.init_repository(self.gitdir) + target = repo.revparse_single(end) + first_line = target.message.splitlines()[0] + msg = f'Ending before {oid(target.id)} {first_line}' + + return name, series, version, msg + + def _copy_db_fields_to(self, series, in_series): + """Copy over fields used by Cseries from one series to another + + This copes desc, idnum and name + + Args: + series (Series): Series to copy to + in_series (Series): Series to copy from + """ + series.desc = in_series.desc + series.idnum = in_series.idnum + series.name = in_series.name + + def _handle_mark(self, branch_name, in_series, version, mark, + allow_unmarked, force_version, dry_run): + """Handle marking a series, checking for unmarked commits, etc. + + Args: + branch_name (str): Name of branch to sync, or None for current one + in_series (Series): Series object + version (int): branch version, e.g. 2 for 'mychange2' + mark (bool): True to mark each commit with a change ID + allow_unmarked (str): True to not require each commit to be marked + force_version (bool): True if ignore a Series-version tag that + doesn't match its branch name + dry_run (bool): True to do a dry run + + Returns: + Series: New series object, if the series was marked; + copy_db_fields_to() is used to copy fields over + + Raises: + ValueError: Series being unmarked when it should be marked, etc. + """ + series = in_series + if 'version' in series and int(series.version) != version: + msg = (f"Series name '{branch_name}' suggests version {version} " + f"but Series-version tag indicates {series.version}") + if not force_version: + raise ValueError(msg + ' (see --force-version)') + + tout.warning(msg) + tout.warning(f'Updating Series-version tag to version {version}') + self.update_series(branch_name, series, int(series.version), + new_name=None, dry_run=dry_run, + add_vers=version) + + # Collect the commits again, as the hashes have changed + series = patchstream.get_metadata(branch_name, 0, + len(series.commits), + git_dir=self.gitdir) + self._copy_db_fields_to(series, in_series) + + if mark: + add_oid = self._mark_series(branch_name, series, dry_run=dry_run) + + # Collect the commits again, as the hashes have changed + series = patchstream.get_metadata(add_oid, 0, len(series.commits), + git_dir=self.gitdir) + self._copy_db_fields_to(series, in_series) + + bad_count = 0 + for commit in series.commits: + if not commit.change_id: + bad_count += 1 + if bad_count and not allow_unmarked: + raise ValueError( + f'{bad_count} commit(s) are unmarked; please use -m or -M') + + return series + + def _add_series_commits(self, series, svid): + """Add a commits from a series into the database + + Args: + series (Series): Series containing commits to add + svid (int): ser_ver-table ID to use for each commit + """ + to_add = [Pcommit(None, seq, commit.subject, None, commit.change_id, + None, None, None) + for seq, commit in enumerate(series.commits)] + + self.db.pcommit_add_list(svid, to_add) + + def get_series_by_name(self, name, include_archived=False): + """Get a Series object from the database by name + + Args: + name (str): Name of series to get + include_archived (bool): True to search in archives series + + Return: + Series: Object containing series info, or None if none + """ + idnum = self.db.series_find_by_name(name, include_archived) + if not idnum: + return None + name, desc = self.db.series_get_info(idnum) + + return Series.from_fields(idnum, name, desc) + + def _get_branch_name(self, name, version): + """Get the branch name for a particular version + + Args: + name (str): Base name of branch + version (int): Version number to use + """ + return name + (f'{version}' if version > 1 else '') + + def _ensure_version(self, ser, version): + """Ensure that a version exists in a series + + Args: + ser (Series): Series information, with idnum and name used here + version (int): Version to check + + Returns: + list of int: List of versions + """ + versions = self._get_version_list(ser.idnum) + if version not in versions: + raise ValueError( + f"Series '{ser.name}' does not have a version {version}") + return versions + + def _set_link(self, ser_id, name, version, link, update_commit, + dry_run=False): + """Add / update a series-links link for a series + + Args: + ser_id (int): Series ID number + name (str): Series name (used to find the branch) + version (int): Version number (used to update the database) + link (str): Patchwork link-string for the series + update_commit (bool): True to update the current commit with the + link + dry_run (bool): True to do a dry run + + Return: + bool: True if the database was update, False if the ser_id or + version was not found + """ + if update_commit: + branch_name = self._get_branch_name(name, version) + _, ser, max_vers, _ = self.prep_series(branch_name) + self.update_series(branch_name, ser, max_vers, add_vers=version, + dry_run=dry_run, add_link=link) + if link is None: + link = '' + updated = 1 if self.db.ser_ver_set_link(ser_id, version, link) else 0 + if dry_run: + self.rollback() + else: + self.commit() + + return updated + + def _get_autolink_dict(self, sdict, link_all_versions): + """Get a dict of ser_vers to fetch, along with their patchwork links + + Note that this returns items that already have links, as well as those + without links + + Args: + sdict: + key: series ID + value: Series with idnum, name and desc filled out + link_all_versions (bool): True to sync all versions of a series, + False to sync only the latest version + + Return: tuple: + dict: + key (int): svid + value (tuple): + int: series ID + str: series name + int: series version + str: patchwork link for the series, or None if none + desc: cover-letter name / series description + """ + svdict = self.get_ser_ver_dict() + to_fetch = {} + + if link_all_versions: + for svinfo in self.get_ser_ver_list(): + ser = sdict[svinfo.series_id] + + pwc = self.get_pcommit_dict(svinfo.idnum) + count = len(pwc) + branch = self._join_name_version(ser.name, svinfo.version) + series = patchstream.get_metadata(branch, 0, count, + git_dir=self.gitdir) + self._copy_db_fields_to(series, ser) + + to_fetch[svinfo.idnum] = (svinfo.series_id, series.name, + svinfo.version, svinfo.link, series) + else: + # Find the maximum version for each series + max_vers = self._series_all_max_versions() + + # Get a list of links to fetch + for svid, ser_id, version in max_vers: + svinfo = svdict[svid] + ser = sdict[ser_id] + + pwc = self.get_pcommit_dict(svid) + count = len(pwc) + branch = self._join_name_version(ser.name, version) + series = patchstream.get_metadata(branch, 0, count, + git_dir=self.gitdir) + self._copy_db_fields_to(series, ser) + + to_fetch[svid] = (ser_id, series.name, version, svinfo.link, + series) + return to_fetch + + def _get_version_list(self, idnum): + """Get a list of the versions available for a series + + Args: + idnum (int): ID of series to look up + + Return: + str: List of versions + """ + if idnum is None: + raise ValueError('Unknown series idnum') + return self.db.series_get_version_list(idnum) + + def _join_name_version(self, in_name, version): + """Convert a series name plus a version into a branch name + + For example: + ('series', 1) returns 'series' + ('series', 3) returns 'series3' + + Args: + in_name (str): Series name + version (int): Version number + + Return: + str: associated branch name + """ + if version == 1: + return in_name + return f'{in_name}{version}' + + def _parse_series(self, name, include_archived=False): + """Parse the name of a series, or detect it from the current branch + + Args: + name (str or None): name of series + include_archived (bool): True to search in archives series + + Return: + Series: New object with the name set; idnum is also set if the + series exists in the database + """ + if not name: + name = gitutil.get_branch(self.gitdir) + name, _ = split_name_version(name) + ser = self.get_series_by_name(name, include_archived) + if not ser: + ser = Series() + ser.name = name + return ser + + def _parse_series_and_version(self, in_name, in_version): + """Parse name and version of a series, or detect from current branch + + Figures out the name from in_name, or if that is None, from the current + branch. + + Uses the version in_version, or if that is None, uses the int at the + end of the name (e.g. 'series' is version 1, 'series4' is version 4) + + Args: + in_name (str or None): name of series + in_version (str or None): version of series + + Return: + tuple: + Series: New object with the name set; idnum is also set if the + series exists in the database + int: Series version-number detected from the name + (e.g. 'fred' is version 1, 'fred2' is version 2) + """ + name = in_name + if not name: + name = gitutil.get_branch(self.gitdir) + if not name: + raise ValueError('No branch detected: please use -s <series>') + name, version = split_name_version(name) + if not name: + raise ValueError(f"Series name '{in_name}' cannot be a number, " + f"use '<name><version>'") + if in_version: + if version and version != in_version: + tout.warning( + f"Version mismatch: -V has {in_version} but branch name " + f'indicates {version}') + version = in_version + if not version: + version = 1 + if version > 99: + raise ValueError(f"Version {version} exceeds 99") + ser = self.get_series_by_name(name) + if not ser: + ser = Series() + ser.name = name + return ser, version + + def _series_get_version_stats(self, idnum, vers): + """Get the stats for a series + + Args: + idnum (int): ID number of series to process + vers (int): Version number to process + + Return: + tuple: + str: Status string, '<accepted>/<count>' + OrderedDict: + key (int): record ID if find_svid is None, else seq + value (PCOMMIT): record data + """ + svid, link = self._get_series_svid_link(idnum, vers) + pwc = self.get_pcommit_dict(svid) + count = len(pwc.values()) + if link: + accepted = 0 + for pcm in pwc.values(): + accepted += pcm.state == 'accepted' + else: + accepted = '-' + return f'{accepted}/{count}', pwc + + def get_series_svid(self, series_id, version): + """Get the patchwork ID of a series version + + Args: + series_id (int): id of the series to look up + version (int): version number to look up + + Return: + str: link found + + Raises: + ValueError: No matching series found + """ + return self._get_series_svid_link(series_id, version)[0] + + def _get_series_svid_link(self, series_id, version): + """Get the patchwork ID of a series version + + Args: + series_id (int): series ID to look up + version (int): version number to look up + + Return: + tuple: + int: record id + str: link + """ + recs = self.get_ser_ver(series_id, version) + return recs.idnum, recs.link + + def get_ser_ver(self, series_id, version): + """Get the patchwork details for a series version + + Args: + series_id (int): series ID to look up + version (int): version number to look up + + Return: + SER_VER: Requested information + + Raises: + ValueError: There is no matching idnum/version + """ + return self.db.ser_ver_get_for_series(series_id, version) + + def _prepare_process(self, name, count, new_name=None, quiet=False): + """Get ready to process all commits in a branch + + Args: + name (str): Name of the branch to process + count (int): Number of commits + new_name (str or None): New name, if a new branch is to be created + quiet (bool): True to avoid output (used for testing) + + Return: tuple: + pygit2.repo: Repo to use + pygit2.oid: Upstream commit, onto which commits should be added + Pygit2.branch: Original branch, for later use + str: (Possibly new) name of branch to process + list of Commit: commits to process, in order + pygit2.Reference: Original head before processing started + """ + upstream_guess = gitutil.get_upstream(self.gitdir, name)[0] + + tout.debug(f"_process_series name '{name}' new_name '{new_name}' " + f"upstream_guess '{upstream_guess}'") + dirty = gitutil.check_dirty(self.gitdir, self.topdir) + if dirty: + raise ValueError( + f"Modified files exist: use 'git status' to check: " + f'{dirty[:5]}') + repo = pygit2.init_repository(self.gitdir) + + commit = None + upstream_name = None + if upstream_guess: + try: + upstream = repo.lookup_reference(upstream_guess) + upstream_name = upstream.name + commit = upstream.peel(pygit2.enums.ObjectType.COMMIT) + except KeyError: + pass + except pygit2.repository.InvalidSpecError as exc: + print(f"Error '{exc}'") + if not upstream_name: + upstream_name = f'{name}~{count}' + commit = repo.revparse_single(upstream_name) + + branch = repo.lookup_branch(name) + if not quiet: + tout.info( + f'Checking out upstream commit {upstream_name}: ' + f'{oid(commit.oid)}') + + old_head = repo.head + if old_head.shorthand == name: + old_head = None + else: + old_head = repo.head + + if new_name: + name = new_name + repo.set_head(commit.oid) + + commits = [] + cmt = repo.get(branch.target) + for _ in range(count): + commits.append(cmt) + cmt = cmt.parents[0] + + return (repo, repo.head, branch, name, commit, list(reversed(commits)), + old_head) + + def _pick_commit(self, repo, cmt): + """Apply a commit to the source tree, without committing it + + _prepare_process() must be called before starting to pick commits + + This function must be called before _finish_commit() + + Note that this uses a cherry-pick method, creating a new tree_id each + time, so can make source-code changes + + Args: + repo (pygit2.repo): Repo to use + cmt (Commit): Commit to apply + + Return: tuple: + tree_id (pygit2.oid): Oid of index with source-changes applied + commit (pygit2.oid): Old commit being cherry-picked + """ + tout.detail(f"- adding {oid(cmt.hash)} {cmt}") + repo.cherrypick(cmt.hash) + if repo.index.conflicts: + raise ValueError('Conflicts detected') + + tree_id = repo.index.write_tree() + cherry = repo.get(cmt.hash) + tout.detail(f"cherry {oid(cherry.oid)}") + return tree_id, cherry + + def _finish_commit(self, repo, tree_id, commit, cur, msg=None): + """Complete a commit + + This must be called after _pick_commit(). + + Args: + repo (pygit2.repo): Repo to use + tree_id (pygit2.oid): Oid of index with source-changes applied; if + None then the existing commit.tree_id is used + commit (pygit2.oid): Old commit being cherry-picked + cur (pygit2.reference): Reference to parent to use for the commit + msg (str): Commit subject and message; None to use commit.message + """ + if msg is None: + msg = commit.message + if not tree_id: + tree_id = commit.tree_id + repo.create_commit('HEAD', commit.author, commit.committer, + msg, tree_id, [cur.target]) + return repo.head + + def _finish_process(self, repo, branch, name, cur, old_head, new_name=None, + switch=False, dry_run=False, quiet=False): + """Finish processing commits + + Args: + repo (pygit2.repo): Repo to use + branch (pygit2.branch): Branch returned by _prepare_process() + name (str): Name of the branch to process + new_name (str or None): New name, if a new branch is being created + switch (bool): True to switch to the new branch after processing; + otherwise HEAD remains at the original branch, as amended + dry_run (bool): True to do a dry run, restoring the original tree + afterwards + quiet (bool): True to avoid output (used for testing) + + Return: + pygit2.reference: Final commit after everything is completed + """ + repo.state_cleanup() + + # Update the branch + target = repo.revparse_single('HEAD') + if not quiet: + tout.info(f'Updating branch {name} from {oid(branch.target)} to ' + f'{str(target.oid)[:HASH_LEN]}') + if dry_run: + if new_name: + repo.head.set_target(branch.target) + else: + branch_oid = branch.peel(pygit2.enums.ObjectType.COMMIT).oid + repo.head.set_target(branch_oid) + repo.head.set_target(branch.target) + repo.set_head(branch.name) + else: + if new_name: + new_branch = repo.branches.create(new_name, target) + if branch.upstream: + new_branch.upstream = branch.upstream + branch = new_branch + else: + branch.set_target(cur.target) + repo.set_head(branch.name) + if old_head: + if not switch: + repo.set_head(old_head.name) + return target + + def make_change_id(self, commit): + """Make a Change ID for a commit + + This is similar to the gerrit script: + git var GIT_COMMITTER_IDENT ; echo "$refhash" ; cat "README"; } + | git hash-object --stdin) + + Args: + commit (pygit2.commit): Commit to process + + Return: + Change ID in hex format + """ + sig = commit.committer + val = hashlib.sha1() + to_hash = f'{sig.name} <{sig.email}> {sig.time} {sig.offset}' + val.update(to_hash.encode('utf-8')) + val.update(str(commit.tree_id).encode('utf-8')) + val.update(commit.message.encode('utf-8')) + return val.hexdigest() + + def _filter_commits(self, name, series, seq_to_drop): + """Filter commits to drop one + + This function rebases the current branch, dropping a single commit, + thus changing the resulting code in the tree. + + Args: + name (str): Name of the branch to process + series (Series): Series object + seq_to_drop (int): Commit sequence to drop; commits are numbered + from 0, which is the one after the upstream branch, to + count - 1 + """ + count = len(series.commits) + (repo, cur, branch, name, commit, _, _) = self._prepare_process( + name, count, quiet=True) + repo.checkout_tree(commit, strategy=CheckoutStrategy.FORCE | + CheckoutStrategy.RECREATE_MISSING) + repo.set_head(commit.oid) + for seq, cmt in enumerate(series.commits): + if seq != seq_to_drop: + tree_id, cherry = self._pick_commit(repo, cmt) + cur = self._finish_commit(repo, tree_id, cherry, cur) + self._finish_process(repo, branch, name, cur, None, quiet=True) + + def process_series(self, name, series, new_name=None, switch=False, + dry_run=False): + """Rewrite a series commit messages, leaving code alone + + This uses a 'vals' namespace to pass things to the controlling + function. + + Each time _process_series() yields, it sets up: + commit (Commit): The pygit2 commit that is being processed + msg (str): Commit message, which can be modified + info (str): Initially empty; the controlling function can add a + short message here which will be shown to the user + final (bool): True if this is the last commit to apply + seq (int): Current sequence number in the commits to apply (0,,n-1) + + It also sets git HEAD at the commit before this commit being + processed + + The function can change msg and info, e.g. to add or remove tags from + the commit. + + Args: + name (str): Name of the branch to process + series (Series): Series object + new_name (str or None): New name, if a new branch is to be created + switch (bool): True to switch to the new branch after processing; + otherwise HEAD remains at the original branch, as amended + dry_run (bool): True to do a dry run, restoring the original tree + afterwards + + Return: + pygit.oid: oid of the new branch + """ + count = len(series.commits) + repo, cur, branch, name, _, commits, old_head = self._prepare_process( + name, count, new_name) + vals = SimpleNamespace() + vals.final = False + tout.info(f"Processing {count} commits from branch '{name}'") + + # Record the message lines + lines = [] + for seq, cmt in enumerate(series.commits): + commit = commits[seq] + vals.commit = commit + vals.msg = commit.message + vals.info = '' + vals.final = seq == len(series.commits) - 1 + vals.seq = seq + yield vals + + cur = self._finish_commit(repo, None, commit, cur, vals.msg) + lines.append([vals.info.strip(), + f'{oid(cmt.hash)} as {oid(cur.target)} {cmt}']) + + max_len = max(len(info) for info, rest in lines) + 1 + for info, rest in lines: + if info: + info += ':' + tout.info(f'- {info.ljust(max_len)} {rest}') + target = self._finish_process(repo, branch, name, cur, old_head, + new_name, switch, dry_run) + vals.oid = target.oid + + def _mark_series(self, name, series, dry_run=False): + """Mark a series with Change-Id tags + + Args: + name (str): Name of the series to mark + series (Series): Series object + dry_run (bool): True to do a dry run, restoring the original tree + afterwards + + Return: + pygit.oid: oid of the new branch + """ + vals = None + for vals in self.process_series(name, series, dry_run=dry_run): + if CHANGE_ID_TAG not in vals.msg: + change_id = self.make_change_id(vals.commit) + vals.msg = vals.msg + f'\n{CHANGE_ID_TAG}: {change_id}' + tout.detail(" - adding mark") + vals.info = 'marked' + else: + vals.info = 'has mark' + + return vals.oid + + def update_series(self, branch_name, series, max_vers, new_name=None, + dry_run=False, add_vers=None, add_link=None, + add_rtags=None, switch=False): + """Rewrite a series to update the Series-version/Series-links lines + + This updates the series in git; it does not update the database + + Args: + branch_name (str): Name of the branch to process + series (Series): Series object + max_vers (int): Version number of the series being updated + new_name (str or None): New name, if a new branch is to be created + dry_run (bool): True to do a dry run, restoring the original tree + afterwards + add_vers (int or None): Version number to add to the series, if any + add_link (str or None): Link to add to the series, if any + add_rtags (list of dict): List of review tags to add, one item for + each commit, each a dict: + key: Response tag (e.g. 'Reviewed-by') + value: Set of people who gave that response, each a name/email + string + switch (bool): True to switch to the new branch after processing; + otherwise HEAD remains at the original branch, as amended + + Return: + pygit.oid: oid of the new branch + """ + def _do_version(): + if add_vers: + if add_vers == 1: + vals.info += f'rm v{add_vers} ' + else: + vals.info += f'add v{add_vers} ' + out.append(f'Series-version: {add_vers}') + + def _do_links(new_links): + if add_link: + if 'add' not in vals.info: + vals.info += 'add ' + vals.info += f"links '{new_links}' " + else: + vals.info += f"upd links '{new_links}' " + out.append(f'Series-links: {new_links}') + + added_version = False + added_link = False + for vals in self.process_series(branch_name, series, new_name, switch, + dry_run): + out = [] + for line in vals.msg.splitlines(): + m_ver = re.match('Series-version:(.*)', line) + m_links = re.match('Series-links:(.*)', line) + if m_ver and add_vers: + if ('version' in series and + int(series.version) != max_vers): + tout.warning( + f'Branch {branch_name}: Series-version tag ' + f'{series.version} does not match expected ' + f'version {max_vers}') + _do_version() + added_version = True + elif m_links: + links = series.get_links(m_links.group(1), max_vers) + if add_link: + links[max_vers] = add_link + _do_links(series.build_links(links)) + added_link = True + else: + out.append(line) + if vals.final: + if not added_version and add_vers and add_vers > 1: + _do_version() + if not added_link and add_link: + _do_links(f'{max_vers}:{add_link}') + + vals.msg = '\n'.join(out) + '\n' + if add_rtags and add_rtags[vals.seq]: + lines = [] + for tag, people in add_rtags[vals.seq].items(): + for who in people: + lines.append(f'{tag}: {who}') + vals.msg = patchstream.insert_tags(vals.msg.rstrip(), + sorted(lines)) + vals.info += (f'added {len(lines)} ' + f"tag{'' if len(lines) == 1 else 's'}") + + def _build_col(self, state, prefix='', base_str=None): + """Build a patch-state string with colour + + Args: + state (str): State to colourise (also indicates the colour to use) + prefix (str): Prefix string to also colourise + base_str (str or None): String to show instead of state, or None to + show state + + Return: + str: String with ANSI colour characters + """ + bright = True + if state == 'accepted': + col = self.col.GREEN + elif state == 'awaiting-upstream': + bright = False + col = self.col.GREEN + elif state in ['changes-requested']: + col = self.col.CYAN + elif state in ['rejected', 'deferred', 'not-applicable', 'superseded', + 'handled-elsewhere']: + col = self.col.RED + elif not state: + state = 'unknown' + col = self.col.MAGENTA + else: + # under-review, rfc, needs-review-ack + col = self.col.WHITE + out = base_str or SHORTEN_STATE.get(state, state) + pad = ' ' * (10 - len(out)) + col_state = self.col.build(col, prefix + out, bright) + return col_state, pad + + def _get_patches(self, series, version): + """Get a Series object containing the patches in a series + + Args: + series (str): Name of series to use, or None to use current branch + version (int): Version number, or None to detect from name + + Return: tuple: + str: Name of branch, e.g. 'mary2' + Series: Series object containing the commits and idnum, desc, name + int: Version number of series, e.g. 2 + OrderedDict: + key (int): record ID if find_svid is None, else seq + value (PCOMMIT): record data + str: series name (for this version) + str: patchwork link + str: cover_id + int: cover_num_comments + """ + ser, version = self._parse_series_and_version(series, version) + if not ser.idnum: + raise ValueError(f"Unknown series '{series}'") + self._ensure_version(ser, version) + svinfo = self.get_ser_ver(ser.idnum, version) + pwc = self.get_pcommit_dict(svinfo.idnum) + + count = len(pwc) + branch = self._join_name_version(ser.name, version) + series = patchstream.get_metadata(branch, 0, count, + git_dir=self.gitdir) + self._copy_db_fields_to(series, ser) + + return (branch, series, version, pwc, svinfo.name, svinfo.link, + svinfo.cover_id, svinfo.cover_num_comments) + + def _list_patches(self, branch, pwc, series, desc, cover_id, num_comments, + show_commit, show_patch, list_patches, state_totals): + """List patches along with optional status info + + Args: + branch (str): Branch name if self.show_progress + pwc (dict): pcommit records: + key (int): seq + value (PCOMMIT): Record from database + series (Series): Series to show, or None to just use the database + desc (str): Series title + cover_id (int): Cover-letter ID + num_comments (int): The number of comments on the cover letter + show_commit (bool): True to show the commit and diffstate + show_patch (bool): True to show the patch + list_patches (bool): True to list all patches for each series, + False to just show the series summary on a single line + state_totals (dict): Holds totals for each state across all patches + key (str): state name + value (int): Number of patches in that state + + Return: + bool: True if OK, False if any commit subjects don't match their + patchwork subjects + """ + lines = [] + states = defaultdict(int) + count = len(pwc) + ok = True + for seq, item in enumerate(pwc.values()): + if series: + cmt = series.commits[seq] + if cmt.subject != item.subject: + ok = False + + col_state, pad = self._build_col(item.state) + patch_id = item.patch_id if item.patch_id else '' + if item.num_comments: + comments = str(item.num_comments) + elif item.num_comments is None: + comments = '-' + else: + comments = '' + + if show_commit or show_patch: + subject = self.col.build(self.col.BLACK, item.subject, + bright=False, back=self.col.YELLOW) + else: + subject = item.subject + + line = (f'{seq:3} {col_state}{pad} {comments.rjust(3)} ' + f'{patch_id:7} {oid(cmt.hash)} {subject}') + lines.append(line) + states[item.state] += 1 + out = '' + for state, freq in states.items(): + out += ' ' + self._build_col(state, f'{freq}:')[0] + state_totals[state] += freq + name = '' + if not list_patches: + name = desc or series.desc + name = self.col.build(self.col.YELLOW, name[:41].ljust(41)) + if not ok: + out = '*' + out[1:] + print(f"{branch:16} {name} {len(pwc):5} {out}") + return ok + print(f"Branch '{branch}' (total {len(pwc)}):{out}{name}") + + print(self.col.build( + self.col.MAGENTA, + f"Seq State Com PatchId {'Commit'.ljust(HASH_LEN)} Subject")) + + comments = '' if num_comments is None else str(num_comments) + if desc or comments or cover_id: + cov = 'Cov' if cover_id else '' + print(self.col.build( + self.col.WHITE, + f"{cov:14} {comments.rjust(3)} {cover_id or '':7} " + f'{desc or series.desc}', + bright=False)) + for seq in range(count): + line = lines[seq] + print(line) + if show_commit or show_patch: + print() + cmt = series.commits[seq] if series else '' + msg = gitutil.show_commit( + cmt.hash, show_commit, True, show_patch, + colour=self.col.enabled(), git_dir=self.gitdir) + sys.stdout.write(msg) + if seq != count - 1: + print() + print() + + return ok + + def _find_matched_commit(self, commits, pcm): + """Find a commit in a list of possible matches + + Args: + commits (dict of Commit): Possible matches + key (int): sequence number of patch (from 0) + value (Commit): Commit object + pcm (PCOMMIT): Patch to check + + Return: + int: Sequence number of matching commit, or None if not found + """ + for seq, cmt in commits.items(): + tout.debug(f"- match subject: '{cmt.subject}'") + if pcm.subject == cmt.subject: + return seq + return None + + def _find_matched_patch(self, patches, cmt): + """Find a patch in a list of possible matches + + Args: + patches: dict of ossible matches + key (int): sequence number of patch + value (PCOMMIT): patch + cmt (Commit): Commit to check + + Return: + int: Sequence number of matching patch, or None if not found + """ + for seq, pcm in patches.items(): + tout.debug(f"- match subject: '{pcm.subject}'") + if cmt.subject == pcm.subject: + return seq + return None + + def _sync_one(self, svid, series_name, version, show_comments, + show_cover_comments, gather_tags, cover, patches, dry_run): + """Sync one series to the database + + Args: + svid (int): Ser/ver ID + cover (dict or None): Cover letter from patchwork, with keys: + id (int): Cover-letter ID in patchwork + num_comments (int): Number of comments + name (str): Cover-letter name + patches (list of Patch): Patches in the series + """ + pwc = self.get_pcommit_dict(svid) + if gather_tags: + count = len(pwc) + branch = self._join_name_version(series_name, version) + series = patchstream.get_metadata(branch, 0, count, + git_dir=self.gitdir) + + _, new_rtag_list = status.do_show_status( + series, cover, patches, show_comments, show_cover_comments, + self.col, warnings_on_stderr=False) + self.update_series(branch, series, version, None, dry_run, + add_rtags=new_rtag_list) + + updated = 0 + for seq, item in enumerate(pwc.values()): + if seq >= len(patches): + continue + patch = patches[seq] + if patch.id: + if self.db.pcommit_update( + Pcommit(item.idnum, seq, None, None, None, patch.state, + patch.id, len(patch.comments))): + updated += 1 + if cover: + info = SerVer(svid, None, None, None, cover.id, + cover.num_comments, cover.name, None) + else: + info = SerVer(svid, None, None, None, None, None, patches[0].name, + None) + self.db.ser_ver_set_info(info) + + return updated, 1 if cover else 0 + + async def _gather(self, pwork, link, show_cover_comments): + """Sync the series status from patchwork + + Creates a new client sesion and calls _sync() + + Args: + pwork (Patchwork): Patchwork object to use + link (str): Patchwork link for the series + show_cover_comments (bool): True to show the comments on the cover + letter + + Return: tuple: + COVER object, or None if none or not read_cover_comments + list of PATCH objects + """ + async with aiohttp.ClientSession() as client: + return await pwork.series_get_state(client, link, True, + show_cover_comments) + + def _get_fetch_dict(self, sync_all_versions): + """Get a dict of ser_vers to fetch, along with their patchwork links + + Args: + sync_all_versions (bool): True to sync all versions of a series, + False to sync only the latest version + + Return: tuple: + dict: things to fetch + key (int): svid + value (str): patchwork link for the series + int: number of series which are missing a link + """ + missing = 0 + svdict = self.get_ser_ver_dict() + sdict = self.db.series_get_dict_by_id() + to_fetch = {} + + if sync_all_versions: + for svinfo in self.get_ser_ver_list(): + ser_ver = svdict[svinfo.idnum] + if svinfo.link: + to_fetch[svinfo.idnum] = patchwork.STATE_REQ( + svinfo.link, svinfo.series_id, + sdict[svinfo.series_id].name, svinfo.version, False, + False) + else: + missing += 1 + else: + # Find the maximum version for each series + max_vers = self._series_all_max_versions() + + # Get a list of links to fetch + for svid, series_id, version in max_vers: + ser_ver = svdict[svid] + if series_id not in sdict: + # skip archived item + continue + if ser_ver.link: + to_fetch[svid] = patchwork.STATE_REQ( + ser_ver.link, series_id, sdict[series_id].name, + version, False, False) + else: + missing += 1 + + # order by series name, version + ordered = OrderedDict() + for svid in sorted( + to_fetch, + key=lambda k: (to_fetch[k].series_name, to_fetch[k].version)): + sync = to_fetch[svid] + ordered[svid] = sync + + return ordered, missing + + async def _sync_all(self, client, pwork, to_fetch): + """Sync all series status from patchwork + + Args: + pwork (Patchwork): Patchwork object to use + sync_all_versions (bool): True to sync all versions of a series, + False to sync only the latest version + gather_tags (bool): True to gather review/test tags + + Return: list of tuple: + COVER object, or None if none or not read_cover_comments + list of PATCH objects + """ + with pwork.collect_stats() as stats: + tasks = [pwork.series_get_state(client, sync.link, True, True) + for sync in to_fetch.values() if sync.link] + result = await asyncio.gather(*tasks) + return result, stats.request_count + + async def _do_series_sync_all(self, pwork, to_fetch): + async with aiohttp.ClientSession() as client: + return await self._sync_all(client, pwork, to_fetch) + + def _progress_one(self, ser, show_all_versions, list_patches, + state_totals): + """Show progress information for all versions in a series + + Args: + ser (Series): Series to use + show_all_versions (bool): True to show all versions of a series, + False to show only the final version + list_patches (bool): True to list all patches for each series, + False to just show the series summary on a single line + state_totals (dict): Holds totals for each state across all patches + key (str): state name + value (int): Number of patches in that state + + Return: tuple + int: Number of series shown + int: Number of patches shown + int: Number of version which need a 'scan' + """ + max_vers = self._series_max_version(ser.idnum) + name, desc = self._get_series_info(ser.idnum) + coloured = self.col.build(self.col.BLACK, desc, bright=False, + back=self.col.YELLOW) + versions = self._get_version_list(ser.idnum) + vstr = list(map(str, versions)) + + if list_patches: + print(f"{name}: {coloured} (versions: {' '.join(vstr)})") + add_blank_line = False + total_series = 0 + total_patches = 0 + need_scan = 0 + for ver in versions: + if not show_all_versions and ver != max_vers: + continue + if add_blank_line: + print() + _, pwc = self._series_get_version_stats(ser.idnum, ver) + count = len(pwc) + branch = self._join_name_version(ser.name, ver) + series = patchstream.get_metadata(branch, 0, count, + git_dir=self.gitdir) + svinfo = self.get_ser_ver(ser.idnum, ver) + self._copy_db_fields_to(series, ser) + + ok = self._list_patches( + branch, pwc, series, svinfo.name, svinfo.cover_id, + svinfo.cover_num_comments, False, False, list_patches, + state_totals) + if not ok: + need_scan += 1 + add_blank_line = list_patches + total_series += 1 + total_patches += count + return total_series, total_patches, need_scan + + def _summary_one(self, ser): + """Show summary information for the latest version in a series + + Args: + series (str): Name of series to use, or None to show progress for + all series + """ + max_vers = self._series_max_version(ser.idnum) + name, desc = self._get_series_info(ser.idnum) + stats, pwc = self._series_get_version_stats(ser.idnum, max_vers) + states = {x.state for x in pwc.values()} + state = 'accepted' + for val in ['awaiting-upstream', 'changes-requested', 'rejected', + 'deferred', 'not-applicable', 'superseded', + 'handled-elsewhere']: + if val in states: + state = val + state_str, pad = self._build_col(state, base_str=name) + print(f"{state_str}{pad} {stats.rjust(6)} {desc}") + + def _series_max_version(self, idnum): + """Find the latest version of a series + + Args: + idnum (int): Series ID to look up + + Return: + int: maximum version + """ + return self.db.series_get_max_version(idnum) + + def _series_all_max_versions(self): + """Find the latest version of all series + + Return: list of: + int: ser_ver ID + int: series ID + int: Maximum version + """ + return self.db.series_get_all_max_versions() diff --git a/tools/patman/cseries.py b/tools/patman/cseries.py new file mode 100644 index 00000000000..bcbc4963cea --- /dev/null +++ b/tools/patman/cseries.py @@ -0,0 +1,1165 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2025 Google LLC +# +"""Handles the 'series' subcommand +""" + +import asyncio +from collections import OrderedDict, defaultdict + +import pygit2 + +from u_boot_pylib import cros_subprocess +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tout + +from patman import patchstream +from patman import cser_helper +from patman.cser_helper import AUTOLINK, oid +from patman import send +from patman import status + + +class Cseries(cser_helper.CseriesHelper): + """Database with information about series + + This class handles database read/write as well as operations in a git + directory to update series information. + """ + def __init__(self, topdir=None, colour=terminal.COLOR_IF_TERMINAL): + """Set up a new Cseries + + Args: + topdir (str): Top-level directory of the repo + colour (terminal.enum): Whether to enable ANSI colour or not + """ + super().__init__(topdir, colour) + + def add(self, branch_name, desc=None, mark=False, allow_unmarked=False, + end=None, force_version=False, dry_run=False): + """Add a series (or new version of a series) to the database + + Args: + branch_name (str): Name of branch to sync, or None for current one + desc (str): Description to use, or None to use the series subject + mark (str): True to mark each commit with a change ID + allow_unmarked (str): True to not require each commit to be marked + end (str): Add only commits up to but exclu + force_version (bool): True if ignore a Series-version tag that + doesn't match its branch name + dry_run (bool): True to do a dry run + """ + name, ser, version, msg = self.prep_series(branch_name, end) + tout.info(f"Adding series '{ser.name}' v{version}: mark {mark} " + f'allow_unmarked {allow_unmarked}') + if msg: + tout.info(msg) + if desc is None: + if not ser.cover: + raise ValueError(f"Branch '{name}' has no cover letter - " + 'please provide description') + desc = ser['cover'][0] + + ser = self._handle_mark(name, ser, version, mark, allow_unmarked, + force_version, dry_run) + link = ser.get_link_for_version(version) + + msg = 'Added' + added = False + series_id = self.db.series_find_by_name(ser.name) + if not series_id: + series_id = self.db.series_add(ser.name, desc) + added = True + msg += f" series '{ser.name}'" + + if version not in self._get_version_list(series_id): + svid = self.db.ser_ver_add(series_id, version, link) + msg += f" v{version}" + if not added: + msg += f" to existing series '{ser.name}'" + added = True + + self._add_series_commits(ser, svid) + count = len(ser.commits) + msg += f" ({count} commit{'s' if count > 1 else ''})" + if not added: + tout.info(f"Series '{ser.name}' v{version} already exists") + msg = None + elif not dry_run: + self.commit() + else: + self.rollback() + series_id = None + ser.desc = desc + ser.idnum = series_id + + if msg: + tout.info(msg) + if dry_run: + tout.info('Dry run completed') + + def decrement(self, series, dry_run=False): + """Decrement a series to the previous version and delete the branch + + Args: + series (str): Name of series to use, or None to use current branch + dry_run (bool): True to do a dry run + """ + ser = self._parse_series(series) + if not ser.idnum: + raise ValueError(f"Series '{ser.name}' not found in database") + + max_vers = self._series_max_version(ser.idnum) + if max_vers < 2: + raise ValueError(f"Series '{ser.name}' only has one version") + + tout.info(f"Removing series '{ser.name}' v{max_vers}") + + new_max = max_vers - 1 + + repo = pygit2.init_repository(self.gitdir) + if not dry_run: + name = self._get_branch_name(ser.name, new_max) + branch = repo.lookup_branch(name) + try: + repo.checkout(branch) + except pygit2.errors.GitError: + tout.warning(f"Failed to checkout branch {name}") + raise + + del_name = f'{ser.name}{max_vers}' + del_branch = repo.lookup_branch(del_name) + branch_oid = del_branch.peel(pygit2.enums.ObjectType.COMMIT).oid + del_branch.delete() + print(f"Deleted branch '{del_name}' {oid(branch_oid)}") + + self.db.ser_ver_remove(ser.idnum, max_vers) + if not dry_run: + self.commit() + else: + self.rollback() + + def increment(self, series_name, dry_run=False): + """Increment a series to the next version and create a new branch + + Args: + series_name (str): Name of series to use, or None to use current + branch + dry_run (bool): True to do a dry run + """ + ser = self._parse_series(series_name) + if not ser.idnum: + raise ValueError(f"Series '{ser.name}' not found in database") + + max_vers = self._series_max_version(ser.idnum) + + branch_name = self._get_branch_name(ser.name, max_vers) + on_branch = gitutil.get_branch(self.gitdir) == branch_name + svid = self.get_series_svid(ser.idnum, max_vers) + pwc = self.get_pcommit_dict(svid) + count = len(pwc.values()) + series = patchstream.get_metadata(branch_name, 0, count, + git_dir=self.gitdir) + tout.info(f"Increment '{ser.name}' v{max_vers}: {count} patches") + + # Create a new branch + vers = max_vers + 1 + new_name = self._join_name_version(ser.name, vers) + + self.update_series(branch_name, series, max_vers, new_name, dry_run, + add_vers=vers, switch=on_branch) + + old_svid = self.get_series_svid(ser.idnum, max_vers) + pcd = self.get_pcommit_dict(old_svid) + + svid = self.db.ser_ver_add(ser.idnum, vers) + self.db.pcommit_add_list(svid, pcd.values()) + if not dry_run: + self.commit() + else: + self.rollback() + + # repo.head.set_target(amended) + tout.info(f'Added new branch {new_name}') + if dry_run: + tout.info('Dry run completed') + + def link_set(self, series_name, version, link, update_commit): + """Add / update a series-links link for a series + + Args: + series_name (str): Name of series to use, or None to use current + branch + version (int): Version number, or None to detect from name + link (str): Patchwork link-string for the series + update_commit (bool): True to update the current commit with the + link + """ + ser, version = self._parse_series_and_version(series_name, version) + self._ensure_version(ser, version) + + self._set_link(ser.idnum, ser.name, version, link, update_commit) + self.commit() + tout.info(f"Setting link for series '{ser.name}' v{version} to {link}") + + def link_get(self, series, version): + """Get the patchwork link for a version of a series + + Args: + series (str): Name of series to use, or None to use current branch + version (int): Version number or None for current + + Return: + str: Patchwork link as a string, e.g. '12325' + """ + ser, version = self._parse_series_and_version(series, version) + self._ensure_version(ser, version) + return self.db.ser_ver_get_link(ser.idnum, version) + + def link_search(self, pwork, series, version): + """Search patch for the link for a series + + Returns either the single match, or None, in which case the second part + of the tuple is filled in + + Args: + pwork (Patchwork): Patchwork object to use + series (str): Series name to search for, or None for current series + that is checked out + version (int): Version to search for, or None for current version + detected from branch name + + Returns: + tuple: + int: ID of the series found, or None + list of possible matches, or None, each a dict: + 'id': series ID + 'name': series name + str: series name + int: series version + str: series description + """ + _, ser, version, _, _, _, _, _ = self._get_patches(series, version) + + if not ser.desc: + raise ValueError(f"Series '{ser.name}' has an empty description") + + pws, options = self.loop.run_until_complete(pwork.find_series( + ser, version)) + return pws, options, ser.name, version, ser.desc + + def link_auto(self, pwork, series, version, update_commit, wait_s=0): + """Automatically find a series link by looking in patchwork + + Args: + pwork (Patchwork): Patchwork object to use + series (str): Series name to search for, or None for current series + that is checked out + version (int): Version to search for, or None for current version + detected from branch name + update_commit (bool): True to update the current commit with the + link + wait_s (int): Number of seconds to wait for the autolink to succeed + """ + start = self.get_time() + stop = start + wait_s + sleep_time = 5 + while True: + pws, options, name, version, desc = self.link_search( + pwork, series, version) + if pws: + if wait_s: + tout.info('Link completed after ' + f'{self.get_time() - start} seconds') + break + + print(f"Possible matches for '{name}' v{version} desc '{desc}':") + print(' Link Version Description') + for opt in options: + print(f"{opt['id']:6} {opt['version']:7} {opt['name']}") + if not wait_s or self.get_time() > stop: + delay = f' after {wait_s} seconds' if wait_s else '' + raise ValueError(f"Cannot find series '{desc}{delay}'") + + self.sleep(sleep_time) + + self.link_set(name, version, pws, update_commit) + + def link_auto_all(self, pwork, update_commit, link_all_versions, + replace_existing, dry_run, show_summary=True): + """Automatically find a series link by looking in patchwork + + Args: + pwork (Patchwork): Patchwork object to use + update_commit (bool): True to update the current commit with the + link + link_all_versions (bool): True to sync all versions of a series, + False to sync only the latest version + replace_existing (bool): True to sync a series even if it already + has a link + dry_run (bool): True to do a dry run + show_summary (bool): True to show a summary of how things went + + Return: + OrderedDict of summary info: + key (int): ser_ver ID + value (AUTOLINK): result of autolinking on this ser_ver + """ + sdict = self.db.series_get_dict_by_id() + all_ser_vers = self._get_autolink_dict(sdict, link_all_versions) + + # Get rid of things without a description + valid = {} + state = {} + no_desc = 0 + not_found = 0 + updated = 0 + failed = 0 + already = 0 + for svid, (ser_id, name, version, link, desc) in all_ser_vers.items(): + if link and not replace_existing: + state[svid] = f'already:{link}' + already += 1 + elif desc: + valid[svid] = ser_id, version, link, desc + else: + no_desc += 1 + state[svid] = 'missing description' + + results, requests = self.loop.run_until_complete( + pwork.find_series_list(valid)) + + for svid, ser_id, link, _ in results: + if link: + version = all_ser_vers[svid][2] + if self._set_link(ser_id, sdict[ser_id].name, version, + link, update_commit, dry_run=dry_run): + updated += 1 + state[svid] = f'linked:{link}' + else: + failed += 1 + state[svid] = 'failed' + else: + not_found += 1 + state[svid] = 'not found' + + # Create a summary sorted by name and version + summary = OrderedDict() + for svid in sorted(all_ser_vers, key=lambda k: all_ser_vers[k][1:2]): + _, name, version, link, ser = all_ser_vers[svid] + summary[svid] = AUTOLINK(name, version, link, ser.desc, + state[svid]) + + if show_summary: + msg = f'{updated} series linked' + if already: + msg += f', {already} already linked' + if not_found: + msg += f', {not_found} not found' + if no_desc: + msg += f', {no_desc} missing description' + if failed: + msg += f', {failed} updated failed' + tout.info(msg + f' ({requests} requests)') + + tout.info('') + tout.info(f"{'Name':15} Version {'Description':40} Result") + border = f"{'-' * 15} ------- {'-' * 40} {'-' * 15}" + tout.info(border) + for name, version, link, desc, state in summary.values(): + bright = True + if state.startswith('already'): + col = self.col.GREEN + bright = False + elif state.startswith('linked'): + col = self.col.MAGENTA + else: + col = self.col.RED + col_state = self.col.build(col, state, bright) + tout.info(f"{name:16.16} {version:7} {desc or '':40.40} " + f'{col_state}') + tout.info(border) + if dry_run: + tout.info('Dry run completed') + + return summary + + def series_list(self): + """List all series + + Lines all series along with their description, number of patches + accepted and the available versions + """ + sdict = self.db.series_get_dict() + print(f"{'Name':15} {'Description':40} Accepted Versions") + border = f"{'-' * 15} {'-' * 40} -------- {'-' * 15}" + print(border) + for name in sorted(sdict): + ser = sdict[name] + versions = self._get_version_list(ser.idnum) + stat = self._series_get_version_stats( + ser.idnum, self._series_max_version(ser.idnum))[0] + + vlist = ' '.join([str(ver) for ver in sorted(versions)]) + + print(f'{name:16.16} {ser.desc:41.41} {stat.rjust(8)} {vlist}') + print(border) + + def list_patches(self, series, version, show_commit=False, + show_patch=False): + """List patches in a series + + Args: + series (str): Name of series to use, or None to use current branch + version (int): Version number, or None to detect from name + show_commit (bool): True to show the commit and diffstate + show_patch (bool): True to show the patch + """ + branch, series, version, pwc, name, _, cover_id, num_comments = ( + self._get_patches(series, version)) + with terminal.pager(): + state_totals = defaultdict(int) + self._list_patches(branch, pwc, series, name, cover_id, + num_comments, show_commit, show_patch, True, + state_totals) + + def mark(self, in_name, allow_marked=False, dry_run=False): + """Add Change-Id tags to a series + + Args: + in_name (str): Name of the series to unmark + allow_marked (bool): Allow commits to be (already) marked + dry_run (bool): True to do a dry run, restoring the original tree + afterwards + + Return: + pygit.oid: oid of the new branch + """ + name, ser, _, _ = self.prep_series(in_name) + tout.info(f"Marking series '{name}': allow_marked {allow_marked}") + + if not allow_marked: + bad = [] + for cmt in ser.commits: + if cmt.change_id: + bad.append(cmt) + if bad: + print(f'{len(bad)} commit(s) already have marks') + for cmt in bad: + print(f' - {oid(cmt.hash)} {cmt.subject}') + raise ValueError( + f'Marked commits {len(bad)}/{len(ser.commits)}') + new_oid = self._mark_series(in_name, ser, dry_run=dry_run) + + if dry_run: + tout.info('Dry run completed') + return new_oid + + def unmark(self, name, allow_unmarked=False, dry_run=False): + """Remove Change-Id tags from a series + + Args: + name (str): Name of the series to unmark + allow_unmarked (bool): Allow commits to be (already) unmarked + dry_run (bool): True to do a dry run, restoring the original tree + afterwards + + Return: + pygit.oid: oid of the new branch + """ + name, ser, _, _ = self.prep_series(name) + tout.info( + f"Unmarking series '{name}': allow_unmarked {allow_unmarked}") + + if not allow_unmarked: + bad = [] + for cmt in ser.commits: + if not cmt.change_id: + bad.append(cmt) + if bad: + print(f'{len(bad)} commit(s) are missing marks') + for cmt in bad: + print(f' - {oid(cmt.hash)} {cmt.subject}') + raise ValueError( + f'Unmarked commits {len(bad)}/{len(ser.commits)}') + vals = None + for vals in self.process_series(name, ser, dry_run=dry_run): + if cser_helper.CHANGE_ID_TAG in vals.msg: + lines = vals.msg.splitlines() + updated = [line for line in lines + if not line.startswith(cser_helper.CHANGE_ID_TAG)] + vals.msg = '\n'.join(updated) + + tout.detail(" - removing mark") + vals.info = 'unmarked' + else: + vals.info = 'no mark' + + if dry_run: + tout.info('Dry run completed') + return vals.oid + + def open(self, pwork, name, version): + """Open the patchwork page for a series + + Args: + pwork (Patchwork): Patchwork object to use + name (str): Name of series to open + version (str): Version number to open + """ + ser, version = self._parse_series_and_version(name, version) + link = self.link_get(ser.name, version) + pwork.url = 'https://patchwork.ozlabs.org' + url = self.loop.run_until_complete(pwork.get_series_url(link)) + print(f'Opening {url}') + + # With Firefox, GTK produces lots of warnings, so suppress them + # Gtk-Message: 06:48:20.692: Failed to load module "xapp-gtk3-module" + # Gtk-Message: 06:48:20.692: Not loading module "atk-bridge": The + # functionality is provided by GTK natively. Please try to not load it. + # Gtk-Message: 06:48:20.692: Failed to load module "appmenu-gtk-module" + # Gtk-Message: 06:48:20.692: Failed to load module "appmenu-gtk-module" + # [262145, Main Thread] WARNING: GTK+ module /snap/firefox/5987/ + # gnome-platform/usr/lib/gtk-2.0/modules/libcanberra-gtk-module.so + # cannot be loaded. + # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same + # process # is not supported.: 'glib warning', file /build/firefox/ + # parts/firefox/build/toolkit/xre/nsSigHandlers.cpp:201 + # + # (firefox_firefox:262145): Gtk-WARNING **: 06:48:20.728: GTK+ module + # /snap/firefox/5987/gnome-platform/usr/lib/gtk-2.0/modules/ + # libcanberra-gtk-module.so cannot be loaded. + # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same + # process is not supported. + # Gtk-Message: 06:48:20.728: Failed to load module + # "canberra-gtk-module" + # [262145, Main Thread] WARNING: GTK+ module /snap/firefox/5987/ + # gnome-platform/usr/lib/gtk-2.0/modules/libcanberra-gtk-module.so + # cannot be loaded. + # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same + # process is not supported.: 'glib warning', file /build/firefox/ + # parts/firefox/build/toolkit/xre/nsSigHandlers.cpp:201 + # + # (firefox_firefox:262145): Gtk-WARNING **: 06:48:20.729: GTK+ module + # /snap/firefox/5987/gnome-platform/usr/lib/gtk-2.0/modules/ + # libcanberra-gtk-module.so cannot be loaded. + # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same + # process is not supported. + # Gtk-Message: 06:48:20.729: Failed to load module + # "canberra-gtk-module" + # ATTENTION: default value of option mesa_glthread overridden by + # environment. + cros_subprocess.Popen(['xdg-open', url]) + + def progress(self, series, show_all_versions, list_patches): + """Show progress information for all versions in a series + + Args: + series (str): Name of series to use, or None to show progress for + all series + show_all_versions (bool): True to show all versions of a series, + False to show only the final version + list_patches (bool): True to list all patches for each series, + False to just show the series summary on a single line + """ + with terminal.pager(): + state_totals = defaultdict(int) + if series is not None: + _, _, need_scan = self._progress_one( + self._parse_series(series), show_all_versions, + list_patches, state_totals) + if need_scan: + tout.warning( + 'Inconsistent commit-subject: Please use ' + "'patman series -s <branch> scan' to resolve this") + return + + total_patches = 0 + total_series = 0 + sdict = self.db.series_get_dict() + border = None + total_need_scan = 0 + if not list_patches: + print(self.col.build( + self.col.MAGENTA, + f"{'Name':16} {'Description':41} Count {'Status'}")) + border = f"{'-' * 15} {'-' * 40} ----- {'-' * 15}" + print(border) + for name in sorted(sdict): + ser = sdict[name] + num_series, num_patches, need_scan = self._progress_one( + ser, show_all_versions, list_patches, state_totals) + total_need_scan += need_scan + if list_patches: + print() + total_series += num_series + total_patches += num_patches + if not list_patches: + print(border) + total = f'{total_series} series' + out = '' + for state, freq in state_totals.items(): + out += ' ' + self._build_col(state, f'{freq}:')[0] + if total_need_scan: + out = '*' + out[1:] + + print(f"{total:15} {'':40} {total_patches:5} {out}") + if total_need_scan: + tout.info( + f'Series marked * ({total_need_scan}) have commit ' + 'subjects which mismatch their patches and need to be ' + 'scanned') + + def project_set(self, pwork, name, quiet=False): + """Set the name of the project + + Args: + pwork (Patchwork): Patchwork object to use + name (str): Name of the project to use in patchwork + quiet (bool): True to skip writing the message + """ + res = self.loop.run_until_complete(pwork.get_projects()) + proj_id = None + link_name = None + for proj in res: + if proj['name'] == name: + proj_id = proj['id'] + link_name = proj['link_name'] + if not proj_id: + raise ValueError(f"Unknown project name '{name}'") + self.db.settings_update(name, proj_id, link_name) + self.commit() + if not quiet: + tout.info(f"Project '{name}' patchwork-ID {proj_id} " + f'link-name {link_name}') + + def project_get(self): + """Get the details of the project + + Returns: + tuple or None if there are no settings: + name (str): Project name, e.g. 'U-Boot' + proj_id (int): Patchworks project ID for this project + link_name (str): Patchwork's link-name for the project + """ + return self.db.settings_get() + + def remove(self, name, dry_run=False): + """Remove a series from the database + + Args: + name (str): Name of series to remove, or None to use current one + dry_run (bool): True to do a dry run + """ + ser = self._parse_series(name) + name = ser.name + if not ser.idnum: + raise ValueError(f"No such series '{name}'") + + self.db.ser_ver_remove(ser.idnum, None) + if not dry_run: + self.commit() + else: + self.rollback() + + self.commit() + tout.info(f"Removed series '{name}'") + if dry_run: + tout.info('Dry run completed') + + def rename(self, series, name, dry_run=False): + """Rename a series + + Renames a series and changes the name of any branches which match + versions present in the database + + Args: + series (str): Name of series to use, or None to use current branch + name (str): new name to use (must not include version number) + dry_run (bool): True to do a dry run + """ + old_ser, _ = self._parse_series_and_version(series, None) + if not old_ser.idnum: + raise ValueError(f"Series '{old_ser.name}' not found in database") + if old_ser.name != series: + raise ValueError(f"Invalid series name '{series}': " + 'did you use the branch name?') + chk, _ = cser_helper.split_name_version(name) + if chk != name: + raise ValueError( + f"Invalid series name '{name}': did you use the branch name?") + if chk == old_ser.name: + raise ValueError( + f"Cannot rename series '{old_ser.name}' to itself") + if self.get_series_by_name(name): + raise ValueError(f"Cannot rename: series '{name}' already exists") + + versions = self._get_version_list(old_ser.idnum) + missing = [] + exists = [] + todo = {} + for ver in versions: + ok = True + old_branch = self._get_branch_name(old_ser.name, ver) + if not gitutil.check_branch(old_branch, self.gitdir): + missing.append(old_branch) + ok = False + + branch = self._get_branch_name(name, ver) + if gitutil.check_branch(branch, self.gitdir): + exists.append(branch) + ok = False + + if ok: + todo[ver] = [old_branch, branch] + + if missing or exists: + msg = 'Cannot rename' + if missing: + msg += f": branches missing: {', '.join(missing)}" + if exists: + msg += f": branches exist: {', '.join(exists)}" + raise ValueError(msg) + + for old_branch, branch in todo.values(): + tout.info(f"Renaming branch '{old_branch}' to '{branch}'") + if not dry_run: + gitutil.rename_branch(old_branch, branch, self.gitdir) + + # Change the series name; nothing needs to change in ser_ver + self.db.series_set_name(old_ser.idnum, name) + + if not dry_run: + self.commit() + else: + self.rollback() + + tout.info(f"Renamed series '{series}' to '{name}'") + if dry_run: + tout.info('Dry run completed') + + def scan(self, branch_name, mark=False, allow_unmarked=False, end=None, + dry_run=False): + """Scan a branch and make updates to the database if it has changed + + Args: + branch_name (str): Name of branch to sync, or None for current one + mark (str): True to mark each commit with a change ID + allow_unmarked (str): True to not require each commit to be marked + end (str): Add only commits up to but exclu + dry_run (bool): True to do a dry run + """ + def _show_item(oper, seq, subject): + col = None + if oper == '+': + col = self.col.GREEN + elif oper == '-': + col = self.col.RED + out = self.col.build(col, subject) if col else subject + tout.info(f'{oper} {seq:3} {out}') + + name, ser, version, msg = self.prep_series(branch_name, end) + svid = self.get_ser_ver(ser.idnum, version).idnum + pcdict = self.get_pcommit_dict(svid) + + tout.info( + f"Syncing series '{name}' v{version}: mark {mark} " + f'allow_unmarked {allow_unmarked}') + if msg: + tout.info(msg) + + ser = self._handle_mark(name, ser, version, mark, allow_unmarked, + False, dry_run) + + # First check for new patches that are not in the database + to_add = dict(enumerate(ser.commits)) + for pcm in pcdict.values(): + tout.debug(f'pcm {pcm.subject}') + i = self._find_matched_commit(to_add, pcm) + if i is not None: + del to_add[i] + + # Now check for patches in the database that are not in the branch + to_remove = dict(enumerate(pcdict.values())) + for cmt in ser.commits: + tout.debug(f'cmt {cmt.subject}') + i = self._find_matched_patch(to_remove, cmt) + if i is not None: + del to_remove[i] + + for seq, cmt in enumerate(ser.commits): + if seq in to_remove: + _show_item('-', seq, to_remove[seq].subject) + del to_remove[seq] + if seq in to_add: + _show_item('+', seq, to_add[seq].subject) + del to_add[seq] + else: + _show_item(' ', seq, cmt.subject) + seq = len(ser.commits) + for cmt in to_add.items(): + _show_item('+', seq, cmt.subject) + seq += 1 + for seq, pcm in to_remove.items(): + _show_item('+', seq, pcm.subject) + + self.db.pcommit_delete(svid) + self._add_series_commits(ser, svid) + if not dry_run: + self.commit() + else: + self.rollback() + tout.info('Dry run completed') + + def send(self, pwork, name, autolink, autolink_wait, args): + """Send out a series + + Args: + pwork (Patchwork): Patchwork object to use + name (str): Series name to search for, or None for current series + that is checked out + autolink (bool): True to auto-link the series after sending + args (argparse.Namespace): 'send' arguments provided + autolink_wait (int): Number of seconds to wait for the autolink to + succeed + """ + ser, version = self._parse_series_and_version(name, None) + if not ser.idnum: + raise ValueError(f"Series '{ser.name}' not found in database") + + args.branch = self._get_branch_name(ser.name, version) + likely_sent = send.send(args, git_dir=self.gitdir, cwd=self.topdir) + + if likely_sent and autolink: + print(f'Autolinking with Patchwork ({autolink_wait} seconds)') + self.link_auto(pwork, name, version, True, wait_s=autolink_wait) + + def archive(self, series): + """Archive a series + + Args: + series (str): Name of series to use, or None to use current branch + """ + ser = self._parse_series(series, include_archived=True) + if not ser.idnum: + raise ValueError(f"Series '{ser.name}' not found in database") + + svlist = self.db.ser_ver_get_for_series(ser.idnum) + + # Figure out the tags we will create + tag_info = {} + now = self.get_now() + now_str = now.strftime('%d%b%y').lower() + for svi in svlist: + name = self._get_branch_name(ser.name, svi.version) + if not gitutil.check_branch(name, git_dir=self.gitdir): + raise ValueError(f"No branch named '{name}'") + tag_info[svi.version] = [svi.idnum, name, f'{name}-{now_str}'] + + # Create the tags + repo = pygit2.init_repository(self.gitdir) + for _, (idnum, name, tag_name) in tag_info.items(): + commit = repo.revparse_single(name) + repo.create_tag(tag_name, commit.hex, + pygit2.enums.ObjectType.COMMIT, + commit.author, commit.message) + + # Update the database + for idnum, name, tag_name in tag_info.values(): + self.db.ser_ver_set_archive_tag(idnum, tag_name) + + # Delete the branches + for idnum, name, tag_name in tag_info.values(): + # Detach HEAD from the branch if pointing to this branch + commit = repo.revparse_single(name) + if repo.head.target == commit.oid: + repo.set_head(commit.oid) + + repo.branches.delete(name) + + self.db.series_set_archived(ser.idnum, True) + self.commit() + + def unarchive(self, series): + """Unarchive a series + + Args: + series (str): Name of series to use, or None to use current branch + """ + ser = self._parse_series(series, include_archived=True) + if not ser.idnum: + raise ValueError(f"Series '{ser.name}' not found in database") + self.db.series_set_archived(ser.idnum, False) + + svlist = self.db.ser_ver_get_for_series(ser.idnum) + + # Collect the tags + repo = pygit2.init_repository(self.gitdir) + tag_info = {} + for svi in svlist: + name = self._get_branch_name(ser.name, svi.version) + target = repo.revparse_single(svi.archive_tag) + tag_info[svi.idnum] = name, svi.archive_tag, target + + # Make sure the branches don't exist + for name, tag_name, tag in tag_info.values(): + if name in repo.branches: + raise ValueError( + f"Cannot restore branch '{name}': already exists") + + # Recreate the branches + for name, tag_name, tag in tag_info.values(): + target = repo.get(tag.target) + repo.branches.create(name, target) + + # Delete the tags + for name, tag_name, tag in tag_info.values(): + repo.references.delete(f'refs/tags/{tag_name}') + + # Update the database + for idnum, (name, tag_name, tag) in tag_info.items(): + self.db.ser_ver_set_archive_tag(idnum, None) + + self.commit() + + def status(self, pwork, series, version, show_comments, + show_cover_comments=False): + """Show the series status from patchwork + + Args: + pwork (Patchwork): Patchwork object to use + series (str): Name of series to use, or None to use current branch + version (int): Version number, or None to detect from name + show_comments (bool): Show all comments on each patch + show_cover_comments (bool): Show all comments on the cover letter + """ + branch, series, version, _, _, link, _, _ = self._get_patches( + series, version) + if not link: + raise ValueError( + f"Series '{series.name}' v{version} has no patchwork link: " + f"Try 'patman series -s {branch} autolink'") + status.check_and_show_status( + series, link, branch, None, False, show_comments, + show_cover_comments, pwork, self.gitdir) + + def summary(self, series): + """Show summary information for all series + + Args: + series (str): Name of series to use + """ + print(f"{'Name':17} Status Description") + print(f"{'-' * 17} {'-' * 6} {'-' * 30}") + if series is not None: + self._summary_one(self._parse_series(series)) + return + + sdict = self.db.series_get_dict() + for ser in sdict.values(): + self._summary_one(ser) + + def gather(self, pwork, series, version, show_comments, + show_cover_comments, gather_tags, dry_run=False): + """Gather any new tags from Patchwork, optionally showing comments + + Args: + pwork (Patchwork): Patchwork object to use + series (str): Name of series to use, or None to use current branch + version (int): Version number, or None to detect from name + show_comments (bool): True to show the comments on each patch + show_cover_comments (bool): True to show the comments on the cover + letter + gather_tags (bool): True to gather review/test tags + dry_run (bool): True to do a dry run (database is not updated) + """ + ser, version = self._parse_series_and_version(series, version) + self._ensure_version(ser, version) + svid, link = self._get_series_svid_link(ser.idnum, version) + if not link: + raise ValueError( + "No patchwork link is available: use 'patman series autolink'") + tout.info( + f"Updating series '{ser.name}' version {version} " + f"from link '{link}'") + + loop = asyncio.get_event_loop() + with pwork.collect_stats() as stats: + cover, patches = loop.run_until_complete(self._gather( + pwork, link, show_cover_comments)) + + with terminal.pager(): + updated, updated_cover = self._sync_one( + svid, ser.name, version, show_comments, show_cover_comments, + gather_tags, cover, patches, dry_run) + tout.info(f"{updated} patch{'es' if updated != 1 else ''}" + f"{' and cover letter' if updated_cover else ''} " + f'updated ({stats.request_count} requests)') + + if not dry_run: + self.commit() + else: + self.rollback() + tout.info('Dry run completed') + + def gather_all(self, pwork, show_comments, show_cover_comments, + sync_all_versions, gather_tags, dry_run=False): + to_fetch, missing = self._get_fetch_dict(sync_all_versions) + + loop = asyncio.get_event_loop() + result, requests = loop.run_until_complete(self._do_series_sync_all( + pwork, to_fetch)) + + with terminal.pager(): + tot_updated = 0 + tot_cover = 0 + add_newline = False + for (svid, sync), (cover, patches) in zip(to_fetch.items(), + result): + if add_newline: + tout.info('') + tout.info(f"Syncing '{sync.series_name}' v{sync.version}") + updated, updated_cover = self._sync_one( + svid, sync.series_name, sync.version, show_comments, + show_cover_comments, gather_tags, cover, patches, dry_run) + tot_updated += updated + tot_cover += updated_cover + add_newline = gather_tags + + tout.info('') + tout.info( + f"{tot_updated} patch{'es' if tot_updated != 1 else ''} and " + f"{tot_cover} cover letter{'s' if tot_cover != 1 else ''} " + f'updated, {missing} missing ' + f"link{'s' if missing != 1 else ''} ({requests} requests)") + if not dry_run: + self.commit() + else: + self.rollback() + tout.info('Dry run completed') + + def upstream_add(self, name, url): + """Add a new upstream tree + + Args: + name (str): Name of the tree + url (str): URL for the tree + """ + self.db.upstream_add(name, url) + self.commit() + + def upstream_list(self): + """List the upstream repos + + Shows a list of the repos, obtained from the database + """ + udict = self.get_upstream_dict() + + for name, items in udict.items(): + url, is_default = items + default = 'default' if is_default else '' + print(f'{name:15.15} {default:8} {url}') + + def upstream_set_default(self, name): + """Set the default upstream target + + Args: + name (str): Name of the upstream remote to set as default, or None + for none + """ + self.db.upstream_set_default(name) + self.commit() + + def upstream_get_default(self): + """Get the default upstream target + + Return: + str: Name of the upstream remote to set as default, or None if none + """ + return self.db.upstream_get_default() + + def upstream_delete(self, name): + """Delete an upstream target + + Args: + name (str): Name of the upstream remote to delete + """ + self.db.upstream_delete(name) + self.commit() + + def version_remove(self, name, version, dry_run=False): + """Remove a version of a series from the database + + Args: + name (str): Name of series to remove, or None to use current one + version (int): Version number to remove + dry_run (bool): True to do a dry run + """ + ser, version = self._parse_series_and_version(name, version) + name = ser.name + + versions = self._ensure_version(ser, version) + + if versions == [version]: + raise ValueError( + f"Series '{ser.name}' only has one version: remove the series") + + self.db.ser_ver_remove(ser.idnum, version) + if not dry_run: + self.commit() + else: + self.rollback() + + tout.info(f"Removed version {version} from series '{name}'") + if dry_run: + tout.info('Dry run completed') + + def version_change(self, name, version, new_version, dry_run=False): + """Change a version of a series to be a different version + + Args: + name (str): Name of series to remove, or None to use current one + version (int): Version number to change + new_version (int): New version + dry_run (bool): True to do a dry run + """ + ser, version = self._parse_series_and_version(name, version) + name = ser.name + + versions = self._ensure_version(ser, version) + vstr = list(map(str, versions)) + if version not in versions: + raise ValueError( + f"Series '{ser.name}' does not have v{version}: " + f"{' '.join(vstr)}") + + if not new_version: + raise ValueError('Please provide a new version number') + + if new_version in versions: + raise ValueError( + f"Series '{ser.name}' already has a v{new_version}: " + f"{' '.join(vstr)}") + + new_name = self._join_name_version(ser.name, new_version) + + svid = self.get_series_svid(ser.idnum, version) + pwc = self.get_pcommit_dict(svid) + count = len(pwc.values()) + series = patchstream.get_metadata(name, 0, count, git_dir=self.gitdir) + + self.update_series(name, series, version, new_name, dry_run, + add_vers=new_version, switch=True) + self.db.ser_ver_set_version(svid, new_version) + + if not dry_run: + self.commit() + else: + self.rollback() + + tout.info(f"Changed version {version} in series '{ser.name}' " + f"to {new_version} named '{new_name}'") + if dry_run: + tout.info('Dry run completed') diff --git a/tools/patman/database.py b/tools/patman/database.py new file mode 100644 index 00000000000..9c25b04a720 --- /dev/null +++ b/tools/patman/database.py @@ -0,0 +1,823 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2025 Simon Glass <sjg@chromium.org> +# +"""Handles the patman database + +This uses sqlite3 with a local file. + +To adjsut the schema, increment LATEST, create a migrate_to_v<x>() function +and write some code in migrate_to() to call it. +""" + +from collections import namedtuple, OrderedDict +import os +import sqlite3 + +from u_boot_pylib import tools +from u_boot_pylib import tout +from patman.series import Series + +# Schema version (version 0 means there is no database yet) +LATEST = 4 + +# Information about a series/version record +SerVer = namedtuple( + 'SER_VER', + 'idnum,series_id,version,link,cover_id,cover_num_comments,name,' + 'archive_tag') + +# Record from the pcommit table: +# idnum (int): record ID +# seq (int): Patch sequence in series (0 is first) +# subject (str): patch subject +# svid (int): ID of series/version record in ser_ver table +# change_id (str): Change-ID value +# state (str): Current status in patchwork +# patch_id (int): Patchwork's patch ID for this patch +# num_comments (int): Number of comments attached to the commit +Pcommit = namedtuple( + 'PCOMMIT', + 'idnum,seq,subject,svid,change_id,state,patch_id,num_comments') + + +class Database: + """Database of information used by patman""" + + # dict of databases: + # key: filename + # value: Database object + instances = {} + + def __init__(self, db_path): + """Set up a new database object + + Args: + db_path (str): Path to the database + """ + if db_path in Database.instances: + # Two connections to the database can cause: + # sqlite3.OperationalError: database is locked + raise ValueError(f"There is already a database for '{db_path}'") + self.con = None + self.cur = None + self.db_path = db_path + self.is_open = False + Database.instances[db_path] = self + + @staticmethod + def get_instance(db_path): + """Get the database instance for a path + + This is provides to ensure that different callers can obtain the + same database object when accessing the same database file. + + Args: + db_path (str): Path to the database + + Return: + Database: Database instance, which is created if necessary + """ + db = Database.instances.get(db_path) + if db: + return db, False + return Database(db_path), True + + def start(self): + """Open the database read for use, migrate to latest schema""" + self.open_it() + self.migrate_to(LATEST) + + def open_it(self): + """Open the database, creating it if necessary""" + if self.is_open: + raise ValueError('Already open') + if not os.path.exists(self.db_path): + tout.warning(f'Creating new database {self.db_path}') + self.con = sqlite3.connect(self.db_path) + self.cur = self.con.cursor() + self.is_open = True + + def close(self): + """Close the database""" + if not self.is_open: + raise ValueError('Already closed') + self.con.close() + self.cur = None + self.con = None + self.is_open = False + + def create_v1(self): + """Create a database with the v1 schema""" + self.cur.execute( + 'CREATE TABLE series (id INTEGER PRIMARY KEY AUTOINCREMENT,' + 'name UNIQUE, desc, archived BIT)') + + # Provides a series_id/version pair, which is used to refer to a + # particular series version sent to patchwork. This stores the link + # to patchwork + self.cur.execute( + 'CREATE TABLE ser_ver (id INTEGER PRIMARY KEY AUTOINCREMENT,' + 'series_id INTEGER, version INTEGER, link,' + 'FOREIGN KEY (series_id) REFERENCES series (id))') + + self.cur.execute( + 'CREATE TABLE upstream (name UNIQUE, url, is_default BIT)') + + # change_id is the Change-Id + # patch_id is the ID of the patch on the patchwork server + self.cur.execute( + 'CREATE TABLE pcommit (id INTEGER PRIMARY KEY AUTOINCREMENT,' + 'svid INTEGER, seq INTEGER, subject, patch_id INTEGER, ' + 'change_id, state, num_comments INTEGER, ' + 'FOREIGN KEY (svid) REFERENCES ser_ver (id))') + + self.cur.execute( + 'CREATE TABLE settings (name UNIQUE, proj_id INT, link_name)') + + def _migrate_to_v2(self): + """Add a schema_version table""" + self.cur.execute('CREATE TABLE schema_version (version INTEGER)') + + def _migrate_to_v3(self): + """Store the number of cover-letter comments in the schema""" + self.cur.execute('ALTER TABLE ser_ver ADD COLUMN cover_id') + self.cur.execute('ALTER TABLE ser_ver ADD COLUMN cover_num_comments ' + 'INTEGER') + self.cur.execute('ALTER TABLE ser_ver ADD COLUMN name') + + def _migrate_to_v4(self): + """Add an archive tag for each ser_ver""" + self.cur.execute('ALTER TABLE ser_ver ADD COLUMN archive_tag') + + def migrate_to(self, dest_version): + """Migrate the database to the selected version + + Args: + dest_version (int): Version to migrate to + """ + while True: + version = self.get_schema_version() + if version == dest_version: + break + + self.close() + tools.write_file(f'{self.db_path}old.v{version}', + tools.read_file(self.db_path)) + + version += 1 + tout.info(f'Update database to v{version}') + self.open_it() + if version == 1: + self.create_v1() + elif version == 2: + self._migrate_to_v2() + elif version == 3: + self._migrate_to_v3() + elif version == 4: + self._migrate_to_v4() + + # Save the new version if we have a schema_version table + if version > 1: + self.cur.execute('DELETE FROM schema_version') + self.cur.execute( + 'INSERT INTO schema_version (version) VALUES (?)', + (version,)) + self.commit() + + def get_schema_version(self): + """Get the version of the database's schema + + Return: + int: Database version, 0 means there is no data; anything less than + LATEST means the schema is out of date and must be updated + """ + # If there is no database at all, assume v0 + version = 0 + try: + self.cur.execute('SELECT name FROM series') + except sqlite3.OperationalError: + return 0 + + # If there is no schema, assume v1 + try: + self.cur.execute('SELECT version FROM schema_version') + version = self.cur.fetchone()[0] + except sqlite3.OperationalError: + return 1 + return version + + def execute(self, query, parameters=()): + """Execute a database query + + Args: + query (str): Query string + parameters (list of values): Parameters to pass + + Return: + + """ + return self.cur.execute(query, parameters) + + def commit(self): + """Commit changes to the database""" + self.con.commit() + + def rollback(self): + """Roll back changes to the database""" + self.con.rollback() + + def lastrowid(self): + """Get the last row-ID reported by the database + + Return: + int: Value for lastrowid + """ + return self.cur.lastrowid + + def rowcount(self): + """Get the row-count reported by the database + + Return: + int: Value for rowcount + """ + return self.cur.rowcount + + def _get_series_list(self, include_archived): + """Get a list of Series objects from the database + + Args: + include_archived (bool): True to include archives series + + Return: + list of Series + """ + res = self.execute( + 'SELECT id, name, desc FROM series ' + + ('WHERE archived = 0' if not include_archived else '')) + return [Series.from_fields(idnum=idnum, name=name, desc=desc) + for idnum, name, desc in res.fetchall()] + + # series functions + + def series_get_dict_by_id(self, include_archived=False): + """Get a dict of Series objects from the database + + Args: + include_archived (bool): True to include archives series + + Return: + OrderedDict: + key: series ID + value: Series with idnum, name and desc filled out + """ + sdict = OrderedDict() + for ser in self._get_series_list(include_archived): + sdict[ser.idnum] = ser + return sdict + + def series_find_by_name(self, name, include_archived=False): + """Find a series and return its details + + Args: + name (str): Name to search for + include_archived (bool): True to include archives series + + Returns: + idnum, or None if not found + """ + res = self.execute( + 'SELECT id FROM series WHERE name = ?' + + ('AND archived = 0' if not include_archived else ''), (name,)) + recs = res.fetchall() + + # This shouldn't happen + assert len(recs) <= 1, 'Expected one match, but multiple found' + + if len(recs) != 1: + return None + return recs[0][0] + + def series_get_info(self, idnum): + """Get information for a series from the database + + Args: + idnum (int): Series ID to look up + + Return: tuple: + str: Series name + str: Series description + + Raises: + ValueError: Series is not found + """ + res = self.execute('SELECT name, desc FROM series WHERE id = ?', + (idnum,)) + recs = res.fetchall() + if len(recs) != 1: + raise ValueError(f'No series found (id {idnum} len {len(recs)})') + return recs[0] + + def series_get_dict(self, include_archived=False): + """Get a dict of Series objects from the database + + Args: + include_archived (bool): True to include archives series + + Return: + OrderedDict: + key: series name + value: Series with idnum, name and desc filled out + """ + sdict = OrderedDict() + for ser in self._get_series_list(include_archived): + sdict[ser.name] = ser + return sdict + + def series_get_version_list(self, series_idnum): + """Get a list of the versions available for a series + + Args: + series_idnum (int): ID of series to look up + + Return: + str: List of versions, which may be empty if the series is in the + process of being added + """ + res = self.execute('SELECT version FROM ser_ver WHERE series_id = ?', + (series_idnum,)) + return [x[0] for x in res.fetchall()] + + def series_get_max_version(self, series_idnum): + """Get the highest version number available for a series + + Args: + series_idnum (int): ID of series to look up + + Return: + int: Maximum version number + """ + res = self.execute( + 'SELECT MAX(version) FROM ser_ver WHERE series_id = ?', + (series_idnum,)) + return res.fetchall()[0][0] + + def series_get_all_max_versions(self): + """Find the latest version of all series + + Return: list of: + int: ser_ver ID + int: series ID + int: Maximum version + """ + res = self.execute( + 'SELECT id, series_id, MAX(version) FROM ser_ver ' + 'GROUP BY series_id') + return res.fetchall() + + def series_add(self, name, desc): + """Add a new series record + + The new record is set to not archived + + Args: + name (str): Series name + desc (str): Series description + + Return: + int: ID num of the new series record + """ + self.execute( + 'INSERT INTO series (name, desc, archived) ' + f"VALUES ('{name}', '{desc}', 0)") + return self.lastrowid() + + def series_remove(self, idnum): + """Remove a series from the database + + The series must exist + + Args: + idnum (int): ID num of series to remove + """ + self.execute('DELETE FROM series WHERE id = ?', (idnum,)) + assert self.rowcount() == 1 + + def series_remove_by_name(self, name): + """Remove a series from the database + + Args: + name (str): Name of series to remove + + Raises: + ValueError: Series does not exist (database is rolled back) + """ + self.execute('DELETE FROM series WHERE name = ?', (name,)) + if self.rowcount() != 1: + self.rollback() + raise ValueError(f"No such series '{name}'") + + def series_set_archived(self, series_idnum, archived): + """Update archive flag for a series + + Args: + series_idnum (int): ID num of the series + archived (bool): Whether to mark the series as archived or + unarchived + """ + self.execute( + 'UPDATE series SET archived = ? WHERE id = ?', + (archived, series_idnum)) + + def series_set_name(self, series_idnum, name): + """Update name for a series + + Args: + series_idnum (int): ID num of the series + name (str): new name to use + """ + self.execute( + 'UPDATE series SET name = ? WHERE id = ?', (name, series_idnum)) + + # ser_ver functions + + def ser_ver_get_link(self, series_idnum, version): + """Get the link for a series version + + Args: + series_idnum (int): ID num of the series + version (int): Version number to search for + + Return: + str: Patchwork link as a string, e.g. '12325', or None if none + + Raises: + ValueError: Multiple matches are found + """ + res = self.execute( + 'SELECT link FROM ser_ver WHERE ' + f"series_id = {series_idnum} AND version = '{version}'") + recs = res.fetchall() + if not recs: + return None + if len(recs) > 1: + raise ValueError('Expected one match, but multiple matches found') + return recs[0][0] + + def ser_ver_set_link(self, series_idnum, version, link): + """Set the link for a series version + + Args: + series_idnum (int): ID num of the series + version (int): Version number to search for + link (str): Patchwork link for the ser_ver + + Return: + bool: True if the record was found and updated, else False + """ + if link is None: + link = '' + self.execute( + 'UPDATE ser_ver SET link = ? WHERE series_id = ? AND version = ?', + (str(link), series_idnum, version)) + return self.rowcount() != 0 + + def ser_ver_set_info(self, info): + """Set the info for a series version + + Args: + info (SER_VER): Info to set. Only two options are supported: + 1: svid,cover_id,cover_num_comments,name + 2: svid,name + + Return: + bool: True if the record was found and updated, else False + """ + assert info.idnum is not None + if info.cover_id: + assert info.series_id is None + self.execute( + 'UPDATE ser_ver SET cover_id = ?, cover_num_comments = ?, ' + 'name = ? WHERE id = ?', + (info.cover_id, info.cover_num_comments, info.name, + info.idnum)) + else: + assert not info.cover_id + assert not info.cover_num_comments + assert not info.series_id + assert not info.version + assert not info.link + self.execute('UPDATE ser_ver SET name = ? WHERE id = ?', + (info.name, info.idnum)) + + return self.rowcount() != 0 + + def ser_ver_set_version(self, svid, version): + """Sets the version for a ser_ver record + + Args: + svid (int): Record ID to update + version (int): Version number to add + + Raises: + ValueError: svid was not found + """ + self.execute( + 'UPDATE ser_ver SET version = ? WHERE id = ?', (version, svid)) + if self.rowcount() != 1: + raise ValueError(f'No ser_ver updated (svid {svid})') + + def ser_ver_set_archive_tag(self, svid, tag): + """Sets the archive tag for a ser_ver record + + Args: + svid (int): Record ID to update + tag (tag): Tag to add + + Raises: + ValueError: svid was not found + """ + self.execute( + 'UPDATE ser_ver SET archive_tag = ? WHERE id = ?', (tag, svid)) + if self.rowcount() != 1: + raise ValueError(f'No ser_ver updated (svid {svid})') + + def ser_ver_add(self, series_idnum, version, link=None): + """Add a new ser_ver record + + Args: + series_idnum (int): ID num of the series which is getting a new + version + version (int): Version number to add + link (str): Patchwork link, or None if not known + + Return: + int: ID num of the new ser_ver record + """ + self.execute( + 'INSERT INTO ser_ver (series_id, version, link) VALUES (?, ?, ?)', + (series_idnum, version, link)) + return self.lastrowid() + + def ser_ver_get_for_series(self, series_idnum, version=None): + """Get a list of ser_ver records for a given series ID + + Args: + series_idnum (int): ID num of the series to search + version (int): Version number to search for, or None for all + + Return: + SER_VER: Requested information + + Raises: + ValueError: There is no matching idnum/version + """ + base = ('SELECT id, series_id, version, link, cover_id, ' + 'cover_num_comments, name, archive_tag FROM ser_ver ' + 'WHERE series_id = ?') + if version: + res = self.execute(base + ' AND version = ?', + (series_idnum, version)) + else: + res = self.execute(base, (series_idnum,)) + recs = res.fetchall() + if not recs: + raise ValueError( + f'No matching series for id {series_idnum} version {version}') + if version: + return SerVer(*recs[0]) + return [SerVer(*x) for x in recs] + + def ser_ver_get_ids_for_series(self, series_idnum, version=None): + """Get a list of ser_ver records for a given series ID + + Args: + series_idnum (int): ID num of the series to search + version (int): Version number to search for, or None for all + + Return: + list of int: List of svids for the matching records + """ + if version: + res = self.execute( + 'SELECT id FROM ser_ver WHERE series_id = ? AND version = ?', + (series_idnum, version)) + else: + res = self.execute( + 'SELECT id FROM ser_ver WHERE series_id = ?', (series_idnum,)) + return list(res.fetchall()[0]) + + def ser_ver_get_list(self): + """Get a list of patchwork entries from the database + + Return: + list of SER_VER + """ + res = self.execute( + 'SELECT id, series_id, version, link, cover_id, ' + 'cover_num_comments, name, archive_tag FROM ser_ver') + items = res.fetchall() + return [SerVer(*x) for x in items] + + def ser_ver_remove(self, series_idnum, version=None, remove_pcommits=True, + remove_series=True): + """Delete a ser_ver record + + Removes the record which has the given series ID num and version + + Args: + series_idnum (int): ID num of the series + version (int): Version number, or None to remove all versions + remove_pcommits (bool): True to remove associated pcommits too + remove_series (bool): True to remove the series if versions is None + """ + if remove_pcommits: + # Figure out svids to delete + svids = self.ser_ver_get_ids_for_series(series_idnum, version) + + self.pcommit_delete_list(svids) + + if version: + self.execute( + 'DELETE FROM ser_ver WHERE series_id = ? AND version = ?', + (series_idnum, version)) + else: + self.execute( + 'DELETE FROM ser_ver WHERE series_id = ?', + (series_idnum,)) + if not version and remove_series: + self.series_remove(series_idnum) + + # pcommit functions + + def pcommit_get_list(self, find_svid=None): + """Get a dict of pcommits entries from the database + + Args: + find_svid (int): If not None, finds the records associated with a + particular series and version; otherwise returns all records + + Return: + list of PCOMMIT: pcommit records + """ + query = ('SELECT id, seq, subject, svid, change_id, state, patch_id, ' + 'num_comments FROM pcommit') + if find_svid is not None: + query += f' WHERE svid = {find_svid}' + res = self.execute(query) + return [Pcommit(*rec) for rec in res.fetchall()] + + def pcommit_add_list(self, svid, pcommits): + """Add records to the pcommit table + + Args: + svid (int): ser_ver ID num + pcommits (list of PCOMMIT): Only seq, subject, change_id are + uses; svid comes from the argument passed in and the others + are assumed to be obtained from patchwork later + """ + for pcm in pcommits: + self.execute( + 'INSERT INTO pcommit (svid, seq, subject, change_id) VALUES ' + '(?, ?, ?, ?)', (svid, pcm.seq, pcm.subject, pcm.change_id)) + + def pcommit_delete(self, svid): + """Delete pcommit records for a given ser_ver ID + + Args_: + svid (int): ser_ver ID num of records to delete + """ + self.execute('DELETE FROM pcommit WHERE svid = ?', (svid,)) + + def pcommit_delete_list(self, svid_list): + """Delete pcommit records for a given set of ser_ver IDs + + Args_: + svid (list int): ser_ver ID nums of records to delete + """ + vals = ', '.join([str(x) for x in svid_list]) + self.execute('DELETE FROM pcommit WHERE svid IN (?)', (vals,)) + + def pcommit_update(self, pcm): + """Update a pcommit record + + Args: + pcm (PCOMMIT): Information to write; only the idnum, state, + patch_id and num_comments are used + + Return: + True if the data was written + """ + self.execute( + 'UPDATE pcommit SET ' + 'patch_id = ?, state = ?, num_comments = ? WHERE id = ?', + (pcm.patch_id, pcm.state, pcm.num_comments, pcm.idnum)) + return self.rowcount() > 0 + + # upstream functions + + def upstream_add(self, name, url): + """Add a new upstream record + + Args: + name (str): Name of the tree + url (str): URL for the tree + + Raises: + ValueError if the name already exists in the database + """ + try: + self.execute( + 'INSERT INTO upstream (name, url) VALUES (?, ?)', (name, url)) + except sqlite3.IntegrityError as exc: + if 'UNIQUE constraint failed: upstream.name' in str(exc): + raise ValueError(f"Upstream '{name}' already exists") from exc + + def upstream_set_default(self, name): + """Mark (only) the given upstream as the default + + Args: + name (str): Name of the upstream remote to set as default, or None + + Raises: + ValueError if more than one name matches (should not happen); + database is rolled back + """ + self.execute("UPDATE upstream SET is_default = 0") + if name is not None: + self.execute( + 'UPDATE upstream SET is_default = 1 WHERE name = ?', (name,)) + if self.rowcount() != 1: + self.rollback() + raise ValueError(f"No such upstream '{name}'") + + def upstream_get_default(self): + """Get the name of the default upstream + + Return: + str: Default-upstream name, or None if there is no default + """ + res = self.execute( + "SELECT name FROM upstream WHERE is_default = 1") + recs = res.fetchall() + if len(recs) != 1: + return None + return recs[0][0] + + def upstream_delete(self, name): + """Delete an upstream target + + Args: + name (str): Name of the upstream remote to delete + + Raises: + ValueError: Upstream does not exist (database is rolled back) + """ + self.execute(f"DELETE FROM upstream WHERE name = '{name}'") + if self.rowcount() != 1: + self.rollback() + raise ValueError(f"No such upstream '{name}'") + + def upstream_get_dict(self): + """Get a list of upstream entries from the database + + Return: + OrderedDict: + key (str): upstream name + value (str): url + """ + res = self.execute('SELECT name, url, is_default FROM upstream') + udict = OrderedDict() + for name, url, is_default in res.fetchall(): + udict[name] = url, is_default + return udict + + # settings functions + + def settings_update(self, name, proj_id, link_name): + """Set the patchwork settings of the project + + Args: + name (str): Name of the project to use in patchwork + proj_id (int): Project ID for the project + link_name (str): Link name for the project + """ + self.execute('DELETE FROM settings') + self.execute( + 'INSERT INTO settings (name, proj_id, link_name) ' + 'VALUES (?, ?, ?)', (name, proj_id, link_name)) + + def settings_get(self): + """Get the patchwork settings of the project + + Returns: + tuple or None if there are no settings: + name (str): Project name, e.g. 'U-Boot' + proj_id (int): Patchworks project ID for this project + link_name (str): Patchwork's link-name for the project + """ + res = self.execute("SELECT name, proj_id, link_name FROM settings") + recs = res.fetchall() + if len(recs) != 1: + return None + return recs[0] diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py new file mode 100644 index 00000000000..d029181765c --- /dev/null +++ b/tools/patman/func_test.py @@ -0,0 +1,1342 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2017 Google, Inc +# + +"""Functional tests for checking that patman behaves correctly""" + +import asyncio +import contextlib +import os +import pathlib +import re +import shutil +import sys +import unittest + +import pygit2 + +from u_boot_pylib import command +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tools + +from patman.commit import Commit +from patman import control +from patman import patchstream +from patman.patchstream import PatchStream +from patman import patchwork +from patman import send +from patman.series import Series +from patman import status +from patman.test_common import TestCommon + +PATMAN_DIR = pathlib.Path(__file__).parent +TEST_DATA_DIR = PATMAN_DIR / 'test/' + + +@contextlib.contextmanager +def directory_excursion(directory): + """Change directory to `directory` for a limited to the context block.""" + current = os.getcwd() + try: + os.chdir(directory) + yield + finally: + os.chdir(current) + + +class TestFunctional(unittest.TestCase, TestCommon): + """Functional tests for checking that patman behaves correctly""" + fred = 'Fred Bloggs <f.bloggs@napier.net>' + joe = 'Joe Bloggs <joe@napierwallies.co.nz>' + mary = 'Mary Bloggs <mary@napierwallies.co.nz>' + commits = None + patches = None + + def setUp(self): + TestCommon.setUp(self) + self.repo = None + self._patman_pathname = sys.argv[0] + self._patman_dir = os.path.dirname(os.path.realpath(sys.argv[0])) + + def tearDown(self): + TestCommon.tearDown(self) + + @staticmethod + def _get_path(fname): + """Get the path to a test file + + Args: + fname (str): Filename to obtain + + Returns: + str: Full path to file in the test directory + """ + return TEST_DATA_DIR / fname + + @classmethod + def _get_text(cls, fname): + """Read a file as text + + Args: + fname (str): Filename to read + + Returns: + str: Contents of file + """ + return open(cls._get_path(fname), encoding='utf-8').read() + + @classmethod + def _get_patch_name(cls, subject): + """Get the filename of a patch given its subject + + Args: + subject (str): Patch subject + + Returns: + str: Filename for that patch + """ + fname = re.sub('[ :]', '-', subject) + return fname.replace('--', '-') + + def _create_patches_for_test(self, series): + """Create patch files for use by tests + + This copies patch files from the test directory as needed by the series + + Args: + series (Series): Series containing commits to convert + + Returns: + tuple: + str: Cover-letter filename, or None if none + fname_list: list of str, each a patch filename + """ + cover_fname = None + fname_list = [] + for i, commit in enumerate(series.commits): + clean_subject = self._get_patch_name(commit.subject) + src_fname = '%04d-%s.patch' % (i + 1, clean_subject[:52]) + fname = os.path.join(self.tmpdir, src_fname) + shutil.copy(self._get_path(src_fname), fname) + fname_list.append(fname) + if series.get('cover'): + src_fname = '0000-cover-letter.patch' + cover_fname = os.path.join(self.tmpdir, src_fname) + fname = os.path.join(self.tmpdir, src_fname) + shutil.copy(self._get_path(src_fname), fname) + + return cover_fname, fname_list + + def test_basic(self): + """Tests the basic flow of patman + + This creates a series from some hard-coded patches build from a simple + tree with the following metadata in the top commit: + + Series-to: u-boot + Series-prefix: RFC + Series-postfix: some-branch + Series-cc: Stefan Brüns <stefan.bruens@rwth-aachen.de> + Cover-letter-cc: Lord Mëlchett <clergy@palace.gov> + Series-version: 3 + Patch-cc: fred + Series-process-log: sort, uniq + Series-changes: 4 + - Some changes + - Multi + line + change + + Commit-changes: 2 + - Changes only for this commit + + Cover-changes: 4 + - Some notes for the cover letter + + Cover-letter: + test: A test patch series + This is a test of how the cover + letter + works + END + + and this in the first commit: + + Commit-changes: 2 + - second revision change + + Series-notes: + some notes + about some things + from the first commit + END + + Commit-notes: + Some notes about + the first commit + END + + with the following commands: + + git log -n2 --reverse >/path/to/tools/patman/test/test01.txt + git format-patch --subject-prefix RFC --cover-letter HEAD~2 + mv 00* /path/to/tools/patman/test + + It checks these aspects: + - git log can be processed by patchstream + - emailing patches uses the correct command + - CC file has information on each commit + - cover letter has the expected text and subject + - each patch has the correct subject + - dry-run information prints out correctly + - unicode is handled correctly + - Series-to, Series-cc, Series-prefix, Series-postfix, Cover-letter + - Cover-letter-cc, Series-version, Series-changes, Series-notes + - Commit-notes + """ + process_tags = True + ignore_bad_tags = False + stefan = (b'Stefan Br\xc3\xbcns <stefan.bruens@rwth-aachen.de>' + .decode('utf-8')) + rick = 'Richard III <richard@palace.gov>' + mel = b'Lord M\xc3\xablchett <clergy@palace.gov>'.decode('utf-8') + add_maintainers = [stefan, rick] + dry_run = True + in_reply_to = mel + count = 2 + alias = { + 'fdt': ['simon'], + 'u-boot': ['u-boot@lists.denx.de'], + 'simon': [self.leb], + 'fred': [self.fred], + 'joe': [self.joe], + } + + text = self._get_text('test01.txt') + series = patchstream.get_metadata_for_test(text) + series.base_commit = Commit('1a44532') + series.branch = 'mybranch' + cover_fname, args = self._create_patches_for_test(series) + get_maintainer_script = str(pathlib.Path(__file__).parent.parent.parent + / 'get_maintainer.pl') + ' --norolestats' + with terminal.capture() as out: + patchstream.fix_patches(series, args) + if cover_fname and series.get('cover'): + patchstream.insert_cover_letter(cover_fname, series, count) + series.DoChecks() + cc_file = series.MakeCcFile(process_tags, cover_fname, + not ignore_bad_tags, add_maintainers, + None, get_maintainer_script, alias) + cmd = gitutil.email_patches( + series, cover_fname, args, dry_run, not ignore_bad_tags, + cc_file, alias, in_reply_to=in_reply_to, thread=None) + series.ShowActions(args, cmd, process_tags, alias) + cc_lines = tools.read_file(cc_file, binary=False).splitlines() + os.remove(cc_file) + + itr = iter(out[0].getvalue().splitlines()) + self.assertEqual('Cleaned %s patches' % len(series.commits), + next(itr)) + self.assertEqual('Change log missing for v2', next(itr)) + self.assertEqual('Change log missing for v3', next(itr)) + self.assertEqual('Change log for unknown version v4', next(itr)) + self.assertEqual("Alias 'pci' not found", next(itr)) + while next(itr) != 'Cc processing complete': + pass + self.assertIn('Dry run', next(itr)) + self.assertEqual('', next(itr)) + self.assertIn('Send a total of %d patches' % count, next(itr)) + prev = next(itr) + for i in range(len(series.commits)): + self.assertEqual(' %s' % args[i], prev) + while True: + prev = next(itr) + if 'Cc:' not in prev: + break + self.assertEqual('To: u-boot@lists.denx.de', prev) + self.assertEqual('Cc: %s' % stefan, next(itr)) + self.assertEqual('Version: 3', next(itr)) + self.assertEqual('Prefix:\t RFC', next(itr)) + self.assertEqual('Postfix:\t some-branch', next(itr)) + self.assertEqual('Cover: 4 lines', next(itr)) + self.assertEqual(' Cc: %s' % self.fred, next(itr)) + self.assertEqual(' Cc: %s' % self.joe, next(itr)) + self.assertEqual(' Cc: %s' % self.leb, + next(itr)) + self.assertEqual(' Cc: %s' % mel, next(itr)) + self.assertEqual(' Cc: %s' % rick, next(itr)) + expected = ('Git command: git send-email --annotate ' + '--in-reply-to="%s" --to u-boot@lists.denx.de ' + '--cc "%s" --cc-cmd "%s send --cc-cmd %s" %s %s' + % (in_reply_to, stefan, sys.argv[0], cc_file, cover_fname, + ' '.join(args))) + self.assertEqual(expected, next(itr)) + + self.assertEqual(('%s %s\0%s' % (args[0], rick, stefan)), cc_lines[0]) + self.assertEqual( + '%s %s\0%s\0%s\0%s\0%s' % (args[1], self.fred, self.joe, self.leb, + rick, stefan), + cc_lines[1]) + + expected = ''' +This is a test of how the cover +letter +works + +some notes +about some things +from the first commit + +Changes in v4: +- Multi + line + change +- Some changes +- Some notes for the cover letter +- fdt: Correct cast for sandbox in fdtdec_setup_mem_size_base() + +Simon Glass (2): + pci: Correct cast for sandbox + fdt: Correct cast for sandbox in fdtdec_setup_mem_size_base() + + cmd/pci.c | 3 ++- + fs/fat/fat.c | 1 + + lib/efi_loader/efi_memory.c | 1 + + lib/fdtdec.c | 3 ++- + 4 files changed, 6 insertions(+), 2 deletions(-) + +--\x20 +2.7.4 + +base-commit: 1a44532 +branch: mybranch +''' + lines = tools.read_file(cover_fname, binary=False).splitlines() + self.assertEqual( + 'Subject: [RFC PATCH some-branch v3 0/2] test: A test patch series', + lines[3]) + self.assertEqual(expected.splitlines(), lines[7:]) + + for i, fname in enumerate(args): + lines = tools.read_file(fname, binary=False).splitlines() + subject = [line for line in lines if line.startswith('Subject')] + self.assertEqual('Subject: [RFC %d/%d]' % (i + 1, count), + subject[0][:18]) + + # Check that we got our commit notes + start = 0 + expected = '' + + if i == 0: + start = 17 + expected = '''--- +Some notes about +the first commit + +(no changes since v2) + +Changes in v2: +- second revision change''' + elif i == 1: + start = 17 + expected = '''--- + +Changes in v4: +- Multi + line + change +- New +- Some changes + +Changes in v2: +- Changes only for this commit''' + + if expected: + expected = expected.splitlines() + self.assertEqual(expected, lines[start:(start+len(expected))]) + + def test_base_commit(self): + """Test adding a base commit with no cover letter""" + orig_text = self._get_text('test01.txt') + pos = orig_text.index( + 'commit 5ab48490f03051875ab13d288a4bf32b507d76fd') + text = orig_text[:pos] + series = patchstream.get_metadata_for_test(text) + series.base_commit = Commit('1a44532') + series.branch = 'mybranch' + cover_fname, args = self._create_patches_for_test(series) + self.assertFalse(cover_fname) + with terminal.capture() as out: + patchstream.fix_patches(series, args, insert_base_commit=True) + self.assertEqual('Cleaned 1 patch\n', out[0].getvalue()) + lines = tools.read_file(args[0], binary=False).splitlines() + pos = lines.index('-- ') + + # We expect these lines at the end: + # -- (with trailing space) + # 2.7.4 + # (empty) + # base-commit: xxx + # branch: xxx + self.assertEqual('base-commit: 1a44532', lines[pos + 3]) + self.assertEqual('branch: mybranch', lines[pos + 4]) + + def test_branch(self): + """Test creating patches from a branch""" + repo = self.make_git_tree() + target = repo.lookup_reference('refs/heads/first') + # pylint doesn't seem to find this + # pylint: disable=E1101 + self.repo.checkout(target, strategy=pygit2.GIT_CHECKOUT_FORCE) + control.setup() + orig_dir = os.getcwd() + try: + os.chdir(self.tmpdir) + + # Check that it can detect the current branch + self.assertEqual(2, gitutil.count_commits_to_branch(None)) + col = terminal.Color() + with terminal.capture() as _: + _, cover_fname, patch_files = send.prepare_patches( + col, branch=None, count=-1, start=0, end=0, + ignore_binary=False, signoff=True) + self.assertIsNone(cover_fname) + self.assertEqual(2, len(patch_files)) + + # Check that it can detect a different branch + self.assertEqual(3, gitutil.count_commits_to_branch('second')) + with terminal.capture() as _: + _, cover_fname, patch_files = send.prepare_patches( + col, branch='second', count=-1, start=0, end=0, + ignore_binary=False, signoff=True) + self.assertIsNotNone(cover_fname) + self.assertEqual(3, len(patch_files)) + + cover = tools.read_file(cover_fname, binary=False) + lines = cover.splitlines()[-2:] + base = repo.lookup_reference('refs/heads/base').target + self.assertEqual(f'base-commit: {base}', lines[0]) + self.assertEqual('branch: second', lines[1]) + + # Make sure that the base-commit is not present when it is in the + # cover letter + for fname in patch_files: + self.assertNotIn(b'base-commit:', tools.read_file(fname)) + + # Check that it can skip patches at the end + with terminal.capture() as _: + _, cover_fname, patch_files = send.prepare_patches( + col, branch='second', count=-1, start=0, end=1, + ignore_binary=False, signoff=True) + self.assertIsNotNone(cover_fname) + self.assertEqual(2, len(patch_files)) + + cover = tools.read_file(cover_fname, binary=False) + lines = cover.splitlines()[-2:] + base2 = repo.lookup_reference('refs/heads/second') + ref = base2.peel(pygit2.GIT_OBJ_COMMIT).parents[0].parents[0].id + self.assertEqual(f'base-commit: {ref}', lines[0]) + self.assertEqual('branch: second', lines[1]) + finally: + os.chdir(orig_dir) + + def test_custom_get_maintainer_script(self): + """Validate that a custom get_maintainer script gets used.""" + self.make_git_tree() + with directory_excursion(self.tmpdir): + # Setup git. + os.environ['GIT_CONFIG_GLOBAL'] = '/dev/null' + os.environ['GIT_CONFIG_SYSTEM'] = '/dev/null' + tools.run('git', 'config', 'user.name', 'Dummy') + tools.run('git', 'config', 'user.email', 'dumdum@dummy.com') + tools.run('git', 'branch', 'upstream') + tools.run('git', 'branch', '--set-upstream-to=upstream') + + # Setup patman configuration. + tools.write_file('.patman', '[settings]\n' + 'get_maintainer_script: dummy-script.sh\n' + 'check_patch: False\n' + 'add_maintainers: True\n', binary=False) + tools.write_file('dummy-script.sh', + '#!/usr/bin/env python3\n' + 'print("hello@there.com")\n', binary=False) + os.chmod('dummy-script.sh', 0x555) + tools.run('git', 'add', '.') + tools.run('git', 'commit', '-m', 'new commit') + + # Finally, do the test + with terminal.capture(): + output = tools.run(PATMAN_DIR / 'patman', '--dry-run') + # Assert the email address is part of the dry-run + # output. + self.assertIn('hello@there.com', output) + + def test_tags(self): + """Test collection of tags in a patchstream""" + text = '''This is a patch + +Signed-off-by: Terminator +Reviewed-by: %s +Reviewed-by: %s +Tested-by: %s +''' % (self.joe, self.mary, self.leb) + pstrm = PatchStream.process_text(text) + self.assertEqual(pstrm.commit.rtags, { + 'Reviewed-by': {self.joe, self.mary}, + 'Tested-by': {self.leb}}) + + def test_invalid_tag(self): + """Test invalid tag in a patchstream""" + text = '''This is a patch + +Serie-version: 2 +''' + with self.assertRaises(ValueError) as exc: + PatchStream.process_text(text) + self.assertEqual("Line 3: Invalid tag = 'Serie-version: 2'", + str(exc.exception)) + + def test_missing_end(self): + """Test a missing END tag""" + text = '''This is a patch + +Cover-letter: +This is the title +missing END after this line +Signed-off-by: Fred +''' + pstrm = PatchStream.process_text(text) + self.assertEqual(["Missing 'END' in section 'cover'"], + pstrm.commit.warn) + + def test_missing_blank_line(self): + """Test a missing blank line after a tag""" + text = '''This is a patch + +Series-changes: 2 +- First line of changes +- Missing blank line after this line +Signed-off-by: Fred +''' + pstrm = PatchStream.process_text(text) + self.assertEqual(["Missing 'blank line' in section 'Series-changes'"], + pstrm.commit.warn) + + def test_invalid_commit_tag(self): + """Test an invalid Commit-xxx tag""" + text = '''This is a patch + +Commit-fred: testing +''' + pstrm = PatchStream.process_text(text) + self.assertEqual(["Line 3: Ignoring Commit-fred"], pstrm.commit.warn) + + def test_self_test(self): + """Test a tested by tag by this user""" + test_line = 'Tested-by: %s@napier.com' % os.getenv('USER') + text = '''This is a patch + +%s +''' % test_line + pstrm = PatchStream.process_text(text) + self.assertEqual(["Ignoring '%s'" % test_line], pstrm.commit.warn) + + def test_space_before_tab(self): + """Test a space before a tab""" + text = '''This is a patch + ++ \tSomething +''' + pstrm = PatchStream.process_text(text) + self.assertEqual(["Line 3/0 has space before tab"], pstrm.commit.warn) + + def test_lines_after_test(self): + """Test detecting lines after TEST= line""" + text = '''This is a patch + +TEST=sometest +more lines +here +''' + pstrm = PatchStream.process_text(text) + self.assertEqual(["Found 2 lines after TEST="], pstrm.commit.warn) + + def test_blank_line_at_end(self): + """Test detecting a blank line at the end of a file""" + text = '''This is a patch + +diff --git a/lib/fdtdec.c b/lib/fdtdec.c +index c072e54..942244f 100644 +--- a/lib/fdtdec.c ++++ b/lib/fdtdec.c +@@ -1200,7 +1200,8 @@ int fdtdec_setup_mem_size_base(void) + \t} + + \tgd->ram_size = (phys_size_t)(res.end - res.start + 1); +- debug("%s: Initial DRAM size %llx\n", __func__, (u64)gd->ram_size); ++ debug("%s: Initial DRAM size %llx\n", __func__, ++ (unsigned long long)gd->ram_size); ++ +diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c + +-- +2.7.4 + + ''' + pstrm = PatchStream.process_text(text) + self.assertEqual( + ["Found possible blank line(s) at end of file 'lib/fdtdec.c'"], + pstrm.commit.warn) + + def test_no_upstream(self): + """Test CountCommitsToBranch when there is no upstream""" + repo = self.make_git_tree() + target = repo.lookup_reference('refs/heads/base') + # pylint doesn't seem to find this + # pylint: disable=E1101 + self.repo.checkout(target, strategy=pygit2.GIT_CHECKOUT_FORCE) + + # Check that it can detect the current branch + orig_dir = os.getcwd() + try: + os.chdir(self.gitdir) + with self.assertRaises(ValueError) as exc: + gitutil.count_commits_to_branch(None) + self.assertIn( + "Failed to determine upstream: fatal: no upstream configured for branch 'base'", + str(exc.exception)) + finally: + os.chdir(orig_dir) + + def run_patman(self, *args): + """Run patman using the provided arguments + + This runs the patman executable from scratch, as opposed to calling + the control.do_patman() function. + + Args: + args (list of str): Arguments to pass (excluding argv[0]) + + Return: + CommandResult: Result of execution + """ + all_args = [self._patman_pathname] + list(args) + return command.run_one(*all_args, capture=True, capture_stderr=True) + + def test_full_help(self): + """Test getting full help""" + command.TEST_RESULT = None + result = self.run_patman('-H') + help_file = os.path.join(self._patman_dir, 'README.rst') + # Remove possible extraneous strings + extra = '::::::::::::::\n' + help_file + '\n::::::::::::::\n' + gothelp = result.stdout.replace(extra, '') + self.assertEqual(len(gothelp), os.path.getsize(help_file)) + self.assertEqual(0, len(result.stderr)) + self.assertEqual(0, result.return_code) + + def test_help(self): + """Test getting help with commands and arguments""" + command.TEST_RESULT = None + result = self.run_patman('-h') + self.assertTrue(len(result.stdout) > 1000) + self.assertEqual(0, len(result.stderr)) + self.assertEqual(0, result.return_code) + + @staticmethod + def _fake_patchwork(subpath): + """Fake Patchwork server for the function below + + This handles accessing a series, providing a list consisting of a + single patch + + Args: + subpath (str): URL subpath to use + """ + re_series = re.match(r'series/(\d*)/$', subpath) + if re_series: + series_num = re_series.group(1) + if series_num == '1234': + return {'patches': [ + {'id': '1', 'name': 'Some patch'}]} + raise ValueError('Fake Patchwork does not understand: %s' % subpath) + + def test_status_mismatch(self): + """Test Patchwork patches not matching the series""" + pwork = patchwork.Patchwork.for_testing(self._fake_patchwork) + with terminal.capture() as (_, err): + loop = asyncio.get_event_loop() + _, patches = loop.run_until_complete(status.check_status(1234, + pwork)) + status.check_patch_count(0, len(patches)) + self.assertIn('Warning: Patchwork reports 1 patches, series has 0', + err.getvalue()) + + def test_status_read_patch(self): + """Test handling a single patch in Patchwork""" + pwork = patchwork.Patchwork.for_testing(self._fake_patchwork) + loop = asyncio.get_event_loop() + _, patches = loop.run_until_complete(status.check_status(1234, pwork)) + self.assertEqual(1, len(patches)) + patch = patches[0] + self.assertEqual('1', patch.id) + self.assertEqual('Some patch', patch.raw_subject) + + def test_parse_subject(self): + """Test parsing of the patch subject""" + patch = patchwork.Patch('1') + + # Simple patch not in a series + patch.parse_subject('Testing') + self.assertEqual('Testing', patch.raw_subject) + self.assertEqual('Testing', patch.subject) + self.assertEqual(1, patch.seq) + self.assertEqual(1, patch.count) + self.assertEqual(None, patch.prefix) + self.assertEqual(None, patch.version) + + # First patch in a series + patch.parse_subject('[1/2] Testing') + self.assertEqual('[1/2] Testing', patch.raw_subject) + self.assertEqual('Testing', patch.subject) + self.assertEqual(1, patch.seq) + self.assertEqual(2, patch.count) + self.assertEqual(None, patch.prefix) + self.assertEqual(None, patch.version) + + # Second patch in a series + patch.parse_subject('[2/2] Testing') + self.assertEqual('Testing', patch.subject) + self.assertEqual(2, patch.seq) + self.assertEqual(2, patch.count) + self.assertEqual(None, patch.prefix) + self.assertEqual(None, patch.version) + + # With PATCH prefix + patch.parse_subject('[PATCH,2/5] Testing') + self.assertEqual('Testing', patch.subject) + self.assertEqual(2, patch.seq) + self.assertEqual(5, patch.count) + self.assertEqual('PATCH', patch.prefix) + self.assertEqual(None, patch.version) + + # RFC patch + patch.parse_subject('[RFC,3/7] Testing') + self.assertEqual('Testing', patch.subject) + self.assertEqual(3, patch.seq) + self.assertEqual(7, patch.count) + self.assertEqual('RFC', patch.prefix) + self.assertEqual(None, patch.version) + + # Version patch + patch.parse_subject('[v2,3/7] Testing') + self.assertEqual('Testing', patch.subject) + self.assertEqual(3, patch.seq) + self.assertEqual(7, patch.count) + self.assertEqual(None, patch.prefix) + self.assertEqual('v2', patch.version) + + # All fields + patch.parse_subject('[RESEND,v2,3/7] Testing') + self.assertEqual('Testing', patch.subject) + self.assertEqual(3, patch.seq) + self.assertEqual(7, patch.count) + self.assertEqual('RESEND', patch.prefix) + self.assertEqual('v2', patch.version) + + # RFC only + patch.parse_subject('[RESEND] Testing') + self.assertEqual('Testing', patch.subject) + self.assertEqual(1, patch.seq) + self.assertEqual(1, patch.count) + self.assertEqual('RESEND', patch.prefix) + self.assertEqual(None, patch.version) + + def test_compare_series(self): + """Test operation of compare_with_series()""" + commit1 = Commit('abcd') + commit1.subject = 'Subject 1' + commit2 = Commit('ef12') + commit2.subject = 'Subject 2' + commit3 = Commit('3456') + commit3.subject = 'Subject 2' + + patch1 = patchwork.Patch('1') + patch1.subject = 'Subject 1' + patch2 = patchwork.Patch('2') + patch2.subject = 'Subject 2' + patch3 = patchwork.Patch('3') + patch3.subject = 'Subject 2' + + series = Series() + series.commits = [commit1] + patches = [patch1] + patch_for_commit, commit_for_patch, warnings = ( + status.compare_with_series(series, patches)) + self.assertEqual(1, len(patch_for_commit)) + self.assertEqual(patch1, patch_for_commit[0]) + self.assertEqual(1, len(commit_for_patch)) + self.assertEqual(commit1, commit_for_patch[0]) + + series.commits = [commit1] + patches = [patch1, patch2] + patch_for_commit, commit_for_patch, warnings = ( + status.compare_with_series(series, patches)) + self.assertEqual(1, len(patch_for_commit)) + self.assertEqual(patch1, patch_for_commit[0]) + self.assertEqual(1, len(commit_for_patch)) + self.assertEqual(commit1, commit_for_patch[0]) + self.assertEqual(["Cannot find commit for patch 2 ('Subject 2')"], + warnings) + + series.commits = [commit1, commit2] + patches = [patch1] + patch_for_commit, commit_for_patch, warnings = ( + status.compare_with_series(series, patches)) + self.assertEqual(1, len(patch_for_commit)) + self.assertEqual(patch1, patch_for_commit[0]) + self.assertEqual(1, len(commit_for_patch)) + self.assertEqual(commit1, commit_for_patch[0]) + self.assertEqual(["Cannot find patch for commit 2 ('Subject 2')"], + warnings) + + series.commits = [commit1, commit2, commit3] + patches = [patch1, patch2] + patch_for_commit, commit_for_patch, warnings = ( + status.compare_with_series(series, patches)) + self.assertEqual(2, len(patch_for_commit)) + self.assertEqual(patch1, patch_for_commit[0]) + self.assertEqual(patch2, patch_for_commit[1]) + self.assertEqual(1, len(commit_for_patch)) + self.assertEqual(commit1, commit_for_patch[0]) + self.assertEqual(["Cannot find patch for commit 3 ('Subject 2')", + "Multiple commits match patch 2 ('Subject 2'):\n" + ' Subject 2\n Subject 2'], + warnings) + + series.commits = [commit1, commit2] + patches = [patch1, patch2, patch3] + patch_for_commit, commit_for_patch, warnings = ( + status.compare_with_series(series, patches)) + self.assertEqual(1, len(patch_for_commit)) + self.assertEqual(patch1, patch_for_commit[0]) + self.assertEqual(2, len(commit_for_patch)) + self.assertEqual(commit1, commit_for_patch[0]) + self.assertEqual(["Multiple patches match commit 2 ('Subject 2'):\n" + ' Subject 2\n Subject 2', + "Cannot find commit for patch 3 ('Subject 2')"], + warnings) + + def _fake_patchwork2(self, subpath): + """Fake Patchwork server for the function below + + This handles accessing series, patches and comments, providing the data + in self.patches to the caller + + Args: + subpath (str): URL subpath to use + """ + re_series = re.match(r'series/(\d*)/$', subpath) + re_patch = re.match(r'patches/(\d*)/$', subpath) + re_comments = re.match(r'patches/(\d*)/comments/$', subpath) + if re_series: + series_num = re_series.group(1) + if series_num == '1234': + return {'patches': self.patches} + elif re_patch: + patch_num = int(re_patch.group(1)) + patch = self.patches[patch_num - 1] + return patch + elif re_comments: + patch_num = int(re_comments.group(1)) + patch = self.patches[patch_num - 1] + return patch.comments + raise ValueError('Fake Patchwork does not understand: %s' % subpath) + + def test_find_new_responses(self): + """Test operation of find_new_responses()""" + commit1 = Commit('abcd') + commit1.subject = 'Subject 1' + commit2 = Commit('ef12') + commit2.subject = 'Subject 2' + + patch1 = patchwork.Patch('1') + patch1.parse_subject('[1/2] Subject 1') + patch1.name = patch1.raw_subject + patch1.content = 'This is my patch content' + comment1a = {'content': 'Reviewed-by: %s\n' % self.joe} + + patch1.comments = [comment1a] + + patch2 = patchwork.Patch('2') + patch2.parse_subject('[2/2] Subject 2') + patch2.name = patch2.raw_subject + patch2.content = 'Some other patch content' + comment2a = { + 'content': 'Reviewed-by: %s\nTested-by: %s\n' % + (self.mary, self.leb)} + comment2b = {'content': 'Reviewed-by: %s' % self.fred} + patch2.comments = [comment2a, comment2b] + + # This test works by setting up commits and patch for use by the fake + # Rest API function _fake_patchwork2(). It calls various functions in + # the status module after setting up tags in the commits, checking that + # things behaves as expected + self.commits = [commit1, commit2] + self.patches = [patch1, patch2] + + # Check that the tags are picked up on the first patch + new_rtags, _ = status.process_reviews(patch1.content, patch1.comments, + commit1.rtags) + self.assertEqual(new_rtags, {'Reviewed-by': {self.joe}}) + + # Now the second patch + new_rtags, _ = status.process_reviews(patch2.content, patch2.comments, + commit2.rtags) + self.assertEqual(new_rtags, { + 'Reviewed-by': {self.mary, self.fred}, + 'Tested-by': {self.leb}}) + + # Now add some tags to the commit, which means they should not appear as + # 'new' tags when scanning comments + commit1.rtags = {'Reviewed-by': {self.joe}} + new_rtags, _ = status.process_reviews(patch1.content, patch1.comments, + commit1.rtags) + self.assertEqual(new_rtags, {}) + + # For the second commit, add Ed and Fred, so only Mary should be left + commit2.rtags = { + 'Tested-by': {self.leb}, + 'Reviewed-by': {self.fred}} + new_rtags, _ = status.process_reviews(patch2.content, patch2.comments, + commit2.rtags) + self.assertEqual(new_rtags, {'Reviewed-by': {self.mary}}) + + # Check that the output patches expectations: + # 1 Subject 1 + # Reviewed-by: Joe Bloggs <joe@napierwallies.co.nz> + # 2 Subject 2 + # Tested-by: Lord Edmund Blackaddër <weasel@blackadder.org> + # Reviewed-by: Fred Bloggs <f.bloggs@napier.net> + # + Reviewed-by: Mary Bloggs <mary@napierwallies.co.nz> + # 1 new response available in patchwork + + series = Series() + series.commits = [commit1, commit2] + terminal.set_print_test_mode() + pwork = patchwork.Patchwork.for_testing(self._fake_patchwork2) + status.check_and_show_status(series, '1234', None, None, False, False, + False, pwork) + itr = iter(terminal.get_print_test_lines()) + col = terminal.Color() + self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.YELLOW), + next(itr)) + self.assertEqual( + terminal.PrintLine(' Reviewed-by: ', col.GREEN, newline=False, + bright=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.joe, col.WHITE, bright=False), + next(itr)) + + self.assertEqual(terminal.PrintLine(' 2 Subject 2', col.YELLOW), + next(itr)) + self.assertEqual( + terminal.PrintLine(' Reviewed-by: ', col.GREEN, newline=False, + bright=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.fred, col.WHITE, + bright=False), next(itr)) + self.assertEqual( + terminal.PrintLine(' Tested-by: ', col.GREEN, newline=False, + bright=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.leb, col.WHITE, bright=False), + next(itr)) + self.assertEqual( + terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.mary, col.WHITE), + next(itr)) + self.assertEqual(terminal.PrintLine( + '1 new response available in patchwork (use -d to write them to a new branch)', + None), next(itr)) + + def _fake_patchwork3(self, subpath): + """Fake Patchwork server for the function below + + This handles accessing series, patches and comments, providing the data + in self.patches to the caller + + Args: + subpath (str): URL subpath to use + """ + re_series = re.match(r'series/(\d*)/$', subpath) + re_patch = re.match(r'patches/(\d*)/$', subpath) + re_comments = re.match(r'patches/(\d*)/comments/$', subpath) + if re_series: + series_num = re_series.group(1) + if series_num == '1234': + return {'patches': self.patches} + elif re_patch: + patch_num = int(re_patch.group(1)) + patch = self.patches[patch_num - 1] + return patch + elif re_comments: + patch_num = int(re_comments.group(1)) + patch = self.patches[patch_num - 1] + return patch.comments + raise ValueError('Fake Patchwork does not understand: %s' % subpath) + + def test_create_branch(self): + """Test operation of create_branch()""" + repo = self.make_git_tree() + branch = 'first' + dest_branch = 'first2' + count = 2 + gitdir = self.gitdir + + # Set up the test git tree. We use branch 'first' which has two commits + # in it + series = patchstream.get_metadata_for_list(branch, gitdir, count) + self.assertEqual(2, len(series.commits)) + + patch1 = patchwork.Patch('1') + patch1.parse_subject('[1/2] %s' % series.commits[0].subject) + patch1.name = patch1.raw_subject + patch1.content = 'This is my patch content' + comment1a = {'content': 'Reviewed-by: %s\n' % self.joe} + + patch1.comments = [comment1a] + + patch2 = patchwork.Patch('2') + patch2.parse_subject('[2/2] %s' % series.commits[1].subject) + patch2.name = patch2.raw_subject + patch2.content = 'Some other patch content' + comment2a = { + 'content': 'Reviewed-by: %s\nTested-by: %s\n' % + (self.mary, self.leb)} + comment2b = { + 'content': 'Reviewed-by: %s' % self.fred} + patch2.comments = [comment2a, comment2b] + + # This test works by setting up patches for use by the fake Rest API + # function _fake_patchwork3(). The fake patch comments above should + # result in new review tags that are collected and added to the commits + # created in the destination branch. + self.patches = [patch1, patch2] + count = 2 + + # Expected output: + # 1 i2c: I2C things + # + Reviewed-by: Joe Bloggs <joe@napierwallies.co.nz> + # 2 spi: SPI fixes + # + Reviewed-by: Fred Bloggs <f.bloggs@napier.net> + # + Reviewed-by: Mary Bloggs <mary@napierwallies.co.nz> + # + Tested-by: Lord Edmund Blackaddër <weasel@blackadder.org> + # 4 new responses available in patchwork + # 4 responses added from patchwork into new branch 'first2' + # <unittest.result.TestResult run=8 errors=0 failures=0> + + terminal.set_print_test_mode() + pwork = patchwork.Patchwork.for_testing(self._fake_patchwork3) + status.check_and_show_status( + series, '1234', branch, dest_branch, False, False, False, pwork, + repo) + lines = terminal.get_print_test_lines() + self.assertEqual(12, len(lines)) + self.assertEqual( + "4 responses added from patchwork into new branch 'first2'", + lines[11].text) + + # Check that the destination branch has the new tags + new_series = patchstream.get_metadata_for_list(dest_branch, gitdir, + count) + self.assertEqual( + {'Reviewed-by': {self.joe}}, + new_series.commits[0].rtags) + self.assertEqual( + {'Tested-by': {self.leb}, + 'Reviewed-by': {self.fred, self.mary}}, + new_series.commits[1].rtags) + + # Now check the actual test of the first commit message. We expect to + # see the new tags immediately below the old ones. + stdout = patchstream.get_list(dest_branch, count=count, git_dir=gitdir) + itr = iter([line.strip() for line in stdout.splitlines() + if '-by:' in line]) + + # First patch should have the review tag + self.assertEqual('Reviewed-by: %s' % self.joe, next(itr)) + + # Second patch should have the sign-off then the tested-by and two + # reviewed-by tags + self.assertEqual('Signed-off-by: %s' % self.leb, next(itr)) + self.assertEqual('Reviewed-by: %s' % self.fred, next(itr)) + self.assertEqual('Reviewed-by: %s' % self.mary, next(itr)) + self.assertEqual('Tested-by: %s' % self.leb, next(itr)) + + def test_parse_snippets(self): + """Test parsing of review snippets""" + text = '''Hi Fred, + +This is a comment from someone. + +Something else + +On some recent date, Fred wrote: +> This is why I wrote the patch +> so here it is + +Now a comment about the commit message +A little more to say + +Even more + +> diff --git a/file.c b/file.c +> Some more code +> Code line 2 +> Code line 3 +> Code line 4 +> Code line 5 +> Code line 6 +> Code line 7 +> Code line 8 +> Code line 9 + +And another comment + +> @@ -153,8 +143,13 @@ def check_patch(fname, show_types=False): +> further down on the file +> and more code +> +Addition here +> +Another addition here +> codey +> more codey + +and another thing in same file + +> @@ -253,8 +243,13 @@ +> with no function context + +one more thing + +> diff --git a/tools/patman/main.py b/tools/patman/main.py +> +line of code +now a very long comment in a different file +line2 +line3 +line4 +line5 +line6 +line7 +line8 +''' + pstrm = PatchStream.process_text(text, True) + self.assertEqual([], pstrm.commit.warn) + + # We expect to the filename and up to 5 lines of code context before + # each comment. The 'On xxx wrote:' bit should be removed. + self.assertEqual( + [['Hi Fred,', + 'This is a comment from someone.', + 'Something else'], + ['> This is why I wrote the patch', + '> so here it is', + 'Now a comment about the commit message', + 'A little more to say', 'Even more'], + ['> File: file.c', '> Code line 5', '> Code line 6', + '> Code line 7', '> Code line 8', '> Code line 9', + 'And another comment'], + ['> File: file.c', + '> Line: 153 / 143: def check_patch(fname, show_types=False):', + '> and more code', '> +Addition here', + '> +Another addition here', '> codey', '> more codey', + 'and another thing in same file'], + ['> File: file.c', '> Line: 253 / 243', + '> with no function context', 'one more thing'], + ['> File: tools/patman/main.py', '> +line of code', + 'now a very long comment in a different file', + 'line2', 'line3', 'line4', 'line5', 'line6', 'line7', 'line8']], + pstrm.snippets) + + def test_review_snippets(self): + """Test showing of review snippets""" + def _to_submitter(who): + m_who = re.match('(.*) <(.*)>', who) + return { + 'name': m_who.group(1), + 'email': m_who.group(2) + } + + commit1 = Commit('abcd') + commit1.subject = 'Subject 1' + commit2 = Commit('ef12') + commit2.subject = 'Subject 2' + + patch1 = patchwork.Patch('1') + patch1.parse_subject('[1/2] Subject 1') + patch1.name = patch1.raw_subject + patch1.content = 'This is my patch content' + comment1a = {'submitter': _to_submitter(self.joe), + 'content': '''Hi Fred, + +On some date Fred wrote: + +> diff --git a/file.c b/file.c +> Some code +> and more code + +Here is my comment above the above... + + +Reviewed-by: %s +''' % self.joe} + + patch1.comments = [comment1a] + + patch2 = patchwork.Patch('2') + patch2.parse_subject('[2/2] Subject 2') + patch2.name = patch2.raw_subject + patch2.content = 'Some other patch content' + comment2a = { + 'content': 'Reviewed-by: %s\nTested-by: %s\n' % + (self.mary, self.leb)} + comment2b = {'submitter': _to_submitter(self.fred), + 'content': '''Hi Fred, + +On some date Fred wrote: + +> diff --git a/tools/patman/commit.py b/tools/patman/commit.py +> @@ -41,6 +41,9 @@ class Commit: +> self.rtags = collections.defaultdict(set) +> self.warn = [] +> +> + def __str__(self): +> + return self.subject +> + +> def add_change(self, version, info): +> """Add a new change line to the change list for a version. +> +A comment + +Reviewed-by: %s +''' % self.fred} + patch2.comments = [comment2a, comment2b] + + # This test works by setting up commits and patch for use by the fake + # Rest API function _fake_patchwork2(). It calls various functions in + # the status module after setting up tags in the commits, checking that + # things behaves as expected + self.commits = [commit1, commit2] + self.patches = [patch1, patch2] + + # Check that the output patches expectations: + # 1 Subject 1 + # Reviewed-by: Joe Bloggs <joe@napierwallies.co.nz> + # 2 Subject 2 + # Tested-by: Lord Edmund Blackaddër <weasel@blackadder.org> + # Reviewed-by: Fred Bloggs <f.bloggs@napier.net> + # + Reviewed-by: Mary Bloggs <mary@napierwallies.co.nz> + # 1 new response available in patchwork + + series = Series() + series.commits = [commit1, commit2] + terminal.set_print_test_mode() + pwork = patchwork.Patchwork.for_testing(self._fake_patchwork2) + status.check_and_show_status( + series, '1234', None, None, False, True, False, pwork) + itr = iter(terminal.get_print_test_lines()) + col = terminal.Color() + self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.YELLOW), + next(itr)) + self.assertEqual( + terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.joe, col.WHITE), next(itr)) + + self.assertEqual(terminal.PrintLine('Review: %s' % self.joe, col.RED), + next(itr)) + self.assertEqual(terminal.PrintLine(' Hi Fred,', None), next(itr)) + self.assertEqual(terminal.PrintLine('', None), next(itr)) + self.assertEqual(terminal.PrintLine(' > File: file.c', col.MAGENTA), + next(itr)) + self.assertEqual(terminal.PrintLine(' > Some code', col.MAGENTA), + next(itr)) + self.assertEqual(terminal.PrintLine(' > and more code', + col.MAGENTA), + next(itr)) + self.assertEqual(terminal.PrintLine( + ' Here is my comment above the above...', None), next(itr)) + self.assertEqual(terminal.PrintLine('', None), next(itr)) + + self.assertEqual(terminal.PrintLine(' 2 Subject 2', col.YELLOW), + next(itr)) + self.assertEqual( + terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.fred, col.WHITE), + next(itr)) + self.assertEqual( + terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.mary, col.WHITE), + next(itr)) + self.assertEqual( + terminal.PrintLine(' + Tested-by: ', col.GREEN, newline=False), + next(itr)) + self.assertEqual(terminal.PrintLine(self.leb, col.WHITE), + next(itr)) + + self.assertEqual(terminal.PrintLine('Review: %s' % self.fred, col.RED), + next(itr)) + self.assertEqual(terminal.PrintLine(' Hi Fred,', None), next(itr)) + self.assertEqual(terminal.PrintLine('', None), next(itr)) + self.assertEqual(terminal.PrintLine( + ' > File: tools/patman/commit.py', col.MAGENTA), next(itr)) + self.assertEqual(terminal.PrintLine( + ' > Line: 41 / 41: class Commit:', col.MAGENTA), next(itr)) + self.assertEqual(terminal.PrintLine( + ' > + return self.subject', col.MAGENTA), next(itr)) + self.assertEqual(terminal.PrintLine( + ' > +', col.MAGENTA), next(itr)) + self.assertEqual( + terminal.PrintLine( + ' > def add_change(self, version, info):', + col.MAGENTA), + next(itr)) + self.assertEqual(terminal.PrintLine( + ' > """Add a new change line to the change list for a version.', + col.MAGENTA), next(itr)) + self.assertEqual(terminal.PrintLine( + ' >', col.MAGENTA), next(itr)) + self.assertEqual(terminal.PrintLine( + ' A comment', None), next(itr)) + self.assertEqual(terminal.PrintLine('', None), next(itr)) + + self.assertEqual(terminal.PrintLine( + '4 new responses available in patchwork (use -d to write them to a new branch)', + None), next(itr)) + + def test_insert_tags(self): + """Test inserting of review tags""" + msg = '''first line +second line.''' + tags = [ + 'Reviewed-by: Bin Meng <bmeng.cn@gmail.com>', + 'Tested-by: Bin Meng <bmeng.cn@gmail.com>' + ] + signoff = 'Signed-off-by: Simon Glass <sjg@chromium.com>' + tag_str = '\n'.join(tags) + + new_msg = patchstream.insert_tags(msg, tags) + self.assertEqual(msg + '\n\n' + tag_str, new_msg) + + new_msg = patchstream.insert_tags(msg + '\n', tags) + self.assertEqual(msg + '\n\n' + tag_str, new_msg) + + msg += '\n\n' + signoff + new_msg = patchstream.insert_tags(msg, tags) + self.assertEqual(msg + '\n' + tag_str, new_msg) diff --git a/tools/patman/get_maintainer.py b/tools/patman/get_maintainer.py new file mode 100644 index 00000000000..1c8fa726573 --- /dev/null +++ b/tools/patman/get_maintainer.py @@ -0,0 +1,64 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2012 The Chromium OS Authors. +# Copyright (c) 2022 Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com> +# + +import os +import shlex +import shutil + +from u_boot_pylib import command +from u_boot_pylib import gitutil + + +def find_get_maintainer(script_file_name): + """Try to find where `script_file_name` is. + + It searches in PATH and falls back to a path relative to the top + of the current git repository. + """ + get_maintainer = shutil.which(script_file_name) + if get_maintainer: + return get_maintainer + + git_relative_script = os.path.join(gitutil.get_top_level() or '', + script_file_name) + if os.path.exists(git_relative_script): + return git_relative_script + + +def get_maintainer(script_file_name, fname, verbose=False): + """Run `script_file_name` on a file. + + `script_file_name` should be a get_maintainer.pl-like script that + takes a patch file name as an input and return the email addresses + of the associated maintainers to standard output, one per line. + + If `script_file_name` does not exist we fail silently. + + Args: + script_file_name: The file name of the get_maintainer.pl script + (or compatible). + fname: File name of the patch to process with get_maintainer.pl. + + Returns: + A list of email addresses to CC to. + """ + # Expand `script_file_name` into a file name and its arguments, if + # any. + get_maintainer = None + arguments = None + if script_file_name: + cmd_args = shlex.split(script_file_name) + file_name = cmd_args[0] + arguments = cmd_args[1:] + + get_maintainer = find_get_maintainer(file_name) + if not get_maintainer: + if verbose: + print("WARNING: Couldn't find get_maintainer.pl") + return [] + + stdout = command.output(get_maintainer, *arguments, fname) + lines = stdout.splitlines() + return [x.replace('"', '') for x in lines] diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py new file mode 100644 index 00000000000..45040877f8c --- /dev/null +++ b/tools/patman/patchstream.py @@ -0,0 +1,922 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2011 The Chromium OS Authors. +# + +"""Handles parsing a stream of commits/emails from 'git log' or other source""" + +import collections +import datetime +import io +import math +import os +import re +import queue +import shutil +import tempfile + +from patman import commit +from patman.series import Series +from u_boot_pylib import command +from u_boot_pylib import gitutil + +# Tags that we detect and remove +RE_REMOVE = re.compile(r'^BUG=|^TEST=|^BRANCH=|^Review URL:' + r'|Reviewed-on:|Commit-\w*:') + +# Lines which are allowed after a TEST= line +RE_ALLOWED_AFTER_TEST = re.compile('^Signed-off-by:') + +# Signoffs +RE_SIGNOFF = re.compile('^Signed-off-by: *(.*)') + +# Cover letter tag +RE_COVER = re.compile('^Cover-([a-z-]*): *(.*)') + +# Patch series tag +RE_SERIES_TAG = re.compile('^Series-([a-z-]*): *(.*)') + +# Change-Id will be used to generate the Message-Id and then be stripped +RE_CHANGE_ID = re.compile('^Change-Id: *(.*)') + +# Commit series tag +RE_COMMIT_TAG = re.compile('^Commit-([a-z-]*): *(.*)') + +# Commit tags that we want to collect and keep +RE_TAG = re.compile('^(Tested-by|Acked-by|Reviewed-by|Patch-cc|Fixes): (.*)') + +# The start of a new commit in the git log +RE_COMMIT = re.compile('^commit ([0-9a-f]*)$') + +# We detect these since checkpatch doesn't always do it +RE_SPACE_BEFORE_TAB = re.compile(r'^[+].* \t') + +# Match indented lines for changes +RE_LEADING_WHITESPACE = re.compile(r'^\s') + +# Detect a 'diff' line +RE_DIFF = re.compile(r'^>.*diff --git a/(.*) b/(.*)$') + +# Detect a context line, like '> @@ -153,8 +153,13 @@ CheckPatch +RE_LINE = re.compile(r'>.*@@ \-(\d+),\d+ \+(\d+),\d+ @@ *(.*)') + +# Detect line with invalid TAG +RE_INV_TAG = re.compile('^Serie-([a-z-]*): *(.*)') + +# States we can be in - can we use range() and still have comments? +STATE_MSG_HEADER = 0 # Still in the message header +STATE_PATCH_SUBJECT = 1 # In patch subject (first line of log for a commit) +STATE_PATCH_HEADER = 2 # In patch header (after the subject) +STATE_DIFFS = 3 # In the diff part (past --- line) + + +class PatchStream: + """Class for detecting/injecting tags in a patch or series of patches + + We support processing the output of 'git log' to read out the tags we + are interested in. We can also process a patch file in order to remove + unwanted tags or inject additional ones. These correspond to the two + phases of processing. + + Args: + keep_change_id (bool): Keep the Change-Id tag + insert_base_commit (bool): True to add the base commit to the end + """ + def __init__(self, series, is_log=False, keep_change_id=False, + insert_base_commit=False): + self.skip_blank = False # True to skip a single blank line + self.found_test = False # Found a TEST= line + self.lines_after_test = 0 # Number of lines found after TEST= + self.linenum = 1 # Output line number we are up to + self.in_section = None # Name of start...END section we are in + self.notes = [] # Series notes + self.section = [] # The current section...END section + self.series = series # Info about the patch series + self.is_log = is_log # True if indent like git log + self.keep_change_id = keep_change_id # True to keep Change-Id tags + self.in_change = None # Name of the change list we are in + self.change_version = 0 # Non-zero if we are in a change list + self.change_lines = [] # Lines of the current change + self.blank_count = 0 # Number of blank lines stored up + self.state = STATE_MSG_HEADER # What state are we in? + self.commit = None # Current commit + # List of unquoted test blocks, each a list of str lines + self.snippets = [] + self.cur_diff = None # Last 'diff' line seen (str) + self.cur_line = None # Last context (@@) line seen (str) + self.recent_diff = None # 'diff' line for current snippet (str) + self.recent_line = None # '@@' line for current snippet (str) + self.recent_quoted = collections.deque([], 5) + self.recent_unquoted = queue.Queue() + self.was_quoted = None + self.insert_base_commit = insert_base_commit + self.lines = [] # All lines in a commit message + self.msg = None # Full commit message including subject + + @staticmethod + def process_text(text, is_comment=False): + """Process some text through this class using a default Commit/Series + + Args: + text (str): Text to parse + is_comment (bool): True if this is a comment rather than a patch. + If True, PatchStream doesn't expect a patch subject at the + start, but jumps straight into the body + + Returns: + PatchStream: object with results + """ + pstrm = PatchStream(Series()) + pstrm.commit = commit.Commit(None) + infd = io.StringIO(text) + outfd = io.StringIO() + if is_comment: + pstrm.state = STATE_PATCH_HEADER + pstrm.process_stream(infd, outfd) + return pstrm + + def _add_warn(self, warn): + """Add a new warning to report to the user about the current commit + + The new warning is added to the current commit if not already present. + + Args: + warn (str): Warning to report + + Raises: + ValueError: Warning is generated with no commit associated + """ + if not self.commit: + print('Warning outside commit: %s' % warn) + elif warn not in self.commit.warn: + self.commit.warn.append(warn) + + def _add_to_series(self, line, name, value): + """Add a new Series-xxx tag. + + When a Series-xxx tag is detected, we come here to record it, if we + are scanning a 'git log'. + + Args: + line (str): Source line containing tag (useful for debug/error + messages) + name (str): Tag name (part after 'Series-') + value (str): Tag value (part after 'Series-xxx: ') + """ + if name == 'notes': + self.in_section = name + self.skip_blank = False + if self.is_log: + warn = self.series.AddTag(self.commit, line, name, value) + if warn: + self.commit.warn.append(warn) + + def _add_to_commit(self, name): + """Add a new Commit-xxx tag. + + When a Commit-xxx tag is detected, we come here to record it. + + Args: + name (str): Tag name (part after 'Commit-') + """ + if name == 'notes': + self.in_section = 'commit-' + name + self.skip_blank = False + + def _add_commit_rtag(self, rtag_type, who): + """Add a response tag to the current commit + + Args: + rtag_type (str): rtag type (e.g. 'Reviewed-by') + who (str): Person who gave that rtag, e.g. + 'Fred Bloggs <fred@bloggs.org>' + """ + self.commit.add_rtag(rtag_type, who) + + def _close_commit(self, skip_last_line): + """Save the current commit into our commit list, and reset our state + + Args: + skip_last_line (bool): True to omit the final line of self.lines + when building the commit message. This is normally the blank + line between two commits, except at the end of the log, where + there is no blank line + """ + if self.commit and self.is_log: + # Skip the blank line before the subject + lines = self.lines[:-1] if skip_last_line else self.lines + self.commit.msg = '\n'.join(lines[1:]) + '\n' + self.series.AddCommit(self.commit) + self.commit = None + self.lines = [] + # If 'END' is missing in a 'Cover-letter' section, and that section + # happens to show up at the very end of the commit message, this is + # the chance for us to fix it up. + if self.in_section == 'cover' and self.is_log: + self.series.cover = self.section + self.in_section = None + self.skip_blank = True + self.section = [] + + self.cur_diff = None + self.recent_diff = None + self.recent_line = None + + def _parse_version(self, value, line): + """Parse a version from a *-changes tag + + Args: + value (str): Tag value (part after 'xxx-changes: ' + line (str): Source line containing tag + + Returns: + int: The version as an integer + + Raises: + ValueError: the value cannot be converted + """ + try: + return int(value) + except ValueError: + raise ValueError("%s: Cannot decode version info '%s'" % + (self.commit.hash, line)) + + def _finalise_change(self): + """_finalise a (multi-line) change and add it to the series or commit""" + if not self.change_lines: + return + change = '\n'.join(self.change_lines) + + if self.in_change == 'Series': + self.series.AddChange(self.change_version, self.commit, change) + elif self.in_change == 'Cover': + self.series.AddChange(self.change_version, None, change) + elif self.in_change == 'Commit': + self.commit.add_change(self.change_version, change) + self.change_lines = [] + + def _finalise_snippet(self): + """Finish off a snippet and add it to the list + + This is called when we get to the end of a snippet, i.e. the we enter + the next block of quoted text: + + This is a comment from someone. + + Something else + + > Now we have some code <----- end of snippet + > more code + + Now a comment about the above code + + This adds the snippet to our list + """ + quoted_lines = [] + while self.recent_quoted: + quoted_lines.append(self.recent_quoted.popleft()) + unquoted_lines = [] + valid = False + while not self.recent_unquoted.empty(): + text = self.recent_unquoted.get() + if not (text.startswith('On ') and text.endswith('wrote:')): + unquoted_lines.append(text) + if text: + valid = True + if valid: + lines = [] + if self.recent_diff: + lines.append('> File: %s' % self.recent_diff) + if self.recent_line: + out = '> Line: %s / %s' % self.recent_line[:2] + if self.recent_line[2]: + out += ': %s' % self.recent_line[2] + lines.append(out) + lines += quoted_lines + unquoted_lines + if lines: + self.snippets.append(lines) + + def process_line(self, line): + """Process a single line of a patch file or commit log + + This process a line and returns a list of lines to output. The list + may be empty or may contain multiple output lines. + + This is where all the complicated logic is located. The class's + state is used to move between different states and detect things + properly. + + We can be in one of two modes: + self.is_log == True: This is 'git log' mode, where most output is + indented by 4 characters and we are scanning for tags + + self.is_log == False: This is 'patch' mode, where we already have + all the tags, and are processing patches to remove junk we + don't want, and add things we think are required. + + Args: + line (str): text line to process + + Returns: + list: list of output lines, or [] if nothing should be output + + Raises: + ValueError: a fatal error occurred while parsing, e.g. an END + without a starting tag, or two commits with two change IDs + """ + # Initially we have no output. Prepare the input line string + out = [] + line = line.rstrip('\n') + + commit_match = RE_COMMIT.match(line) if self.is_log else None + + if self.is_log: + if line[:4] == ' ': + line = line[4:] + + # Handle state transition and skipping blank lines + series_tag_match = RE_SERIES_TAG.match(line) + change_id_match = RE_CHANGE_ID.match(line) + commit_tag_match = RE_COMMIT_TAG.match(line) + cover_match = RE_COVER.match(line) + signoff_match = RE_SIGNOFF.match(line) + leading_whitespace_match = RE_LEADING_WHITESPACE.match(line) + diff_match = RE_DIFF.match(line) + line_match = RE_LINE.match(line) + invalid_match = RE_INV_TAG.match(line) + tag_match = None + if self.state == STATE_PATCH_HEADER: + tag_match = RE_TAG.match(line) + is_blank = not line.strip() + if is_blank: + if (self.state == STATE_MSG_HEADER + or self.state == STATE_PATCH_SUBJECT): + self.state += 1 + + # We don't have a subject in the text stream of patch files + # It has its own line with a Subject: tag + if not self.is_log and self.state == STATE_PATCH_SUBJECT: + self.state += 1 + elif commit_match: + self.state = STATE_MSG_HEADER + if self.state != STATE_MSG_HEADER: + self.lines.append(line) + + # If a tag is detected, or a new commit starts + if series_tag_match or commit_tag_match or change_id_match or \ + cover_match or signoff_match or self.state == STATE_MSG_HEADER: + # but we are already in a section, this means 'END' is missing + # for that section, fix it up. + if self.in_section: + self._add_warn("Missing 'END' in section '%s'" % self.in_section) + if self.in_section == 'cover': + self.series.cover = self.section + elif self.in_section == 'notes': + if self.is_log: + self.series.notes += self.section + elif self.in_section == 'commit-notes': + if self.is_log: + self.commit.notes += self.section + else: + # This should not happen + raise ValueError("Unknown section '%s'" % self.in_section) + self.in_section = None + self.skip_blank = True + self.section = [] + # but we are already in a change list, that means a blank line + # is missing, fix it up. + if self.in_change: + self._add_warn("Missing 'blank line' in section '%s-changes'" % + self.in_change) + self._finalise_change() + self.in_change = None + self.change_version = 0 + + # If we are in a section, keep collecting lines until we see END + if self.in_section: + if line == 'END': + if self.in_section == 'cover': + self.series.cover = self.section + elif self.in_section == 'notes': + if self.is_log: + self.series.notes += self.section + elif self.in_section == 'commit-notes': + if self.is_log: + self.commit.notes += self.section + else: + # This should not happen + raise ValueError("Unknown section '%s'" % self.in_section) + self.in_section = None + self.skip_blank = True + self.section = [] + else: + self.section.append(line) + + # If we are not in a section, it is an unexpected END + elif line == 'END': + raise ValueError("'END' wihout section") + + # Detect the commit subject + elif not is_blank and self.state == STATE_PATCH_SUBJECT: + self.commit.subject = line + + # Detect the tags we want to remove, and skip blank lines + elif RE_REMOVE.match(line) and not commit_tag_match: + self.skip_blank = True + + # TEST= should be the last thing in the commit, so remove + # everything after it + if line.startswith('TEST='): + self.found_test = True + elif self.skip_blank and is_blank: + self.skip_blank = False + + # Detect Cover-xxx tags + elif cover_match: + name = cover_match.group(1) + value = cover_match.group(2) + if name == 'letter': + self.in_section = 'cover' + self.skip_blank = False + elif name == 'letter-cc': + self._add_to_series(line, 'cover-cc', value) + elif name == 'changes': + self.in_change = 'Cover' + self.change_version = self._parse_version(value, line) + + # If we are in a change list, key collected lines until a blank one + elif self.in_change: + if is_blank: + # Blank line ends this change list + self._finalise_change() + self.in_change = None + self.change_version = 0 + elif line == '---': + self._finalise_change() + self.in_change = None + self.change_version = 0 + out = self.process_line(line) + elif self.is_log: + if not leading_whitespace_match: + self._finalise_change() + self.change_lines.append(line) + self.skip_blank = False + + # Detect Series-xxx tags + elif series_tag_match: + name = series_tag_match.group(1) + value = series_tag_match.group(2) + if name == 'changes': + # value is the version number: e.g. 1, or 2 + self.in_change = 'Series' + self.change_version = self._parse_version(value, line) + else: + self._add_to_series(line, name, value) + self.skip_blank = True + + # Detect Change-Id tags + elif change_id_match: + if self.keep_change_id: + out = [line] + value = change_id_match.group(1) + if self.is_log: + if self.commit.change_id: + raise ValueError( + "%s: Two Change-Ids: '%s' vs. '%s'" % + (self.commit.hash, self.commit.change_id, value)) + self.commit.change_id = value + self.skip_blank = True + + # Detect Commit-xxx tags + elif commit_tag_match: + name = commit_tag_match.group(1) + value = commit_tag_match.group(2) + if name == 'notes': + self._add_to_commit(name) + self.skip_blank = True + elif name == 'changes': + self.in_change = 'Commit' + self.change_version = self._parse_version(value, line) + elif name == 'cc': + self.commit.add_cc(value.split(',')) + elif name == 'added-in': + version = self._parse_version(value, line) + self.commit.add_change(version, '- New') + self.series.AddChange(version, None, '- %s' % + self.commit.subject) + else: + self._add_warn('Line %d: Ignoring Commit-%s' % + (self.linenum, name)) + + # Detect invalid tags + elif invalid_match: + raise ValueError("Line %d: Invalid tag = '%s'" % + (self.linenum, line)) + + # Detect the start of a new commit + elif commit_match: + self._close_commit(True) + self.commit = commit.Commit(commit_match.group(1)) + + # Detect tags in the commit message + elif tag_match: + rtag_type, who = tag_match.groups() + self._add_commit_rtag(rtag_type, who) + # Remove Tested-by self, since few will take much notice + if (rtag_type == 'Tested-by' and + who.find(os.getenv('USER') + '@') != -1): + self._add_warn("Ignoring '%s'" % line) + elif rtag_type == 'Patch-cc': + self.commit.add_cc(who.split(',')) + else: + out = [line] + + # Suppress duplicate signoffs + elif signoff_match: + if (self.is_log or not self.commit or + self.commit.check_duplicate_signoff(signoff_match.group(1))): + out = [line] + + # Well that means this is an ordinary line + else: + # Look for space before tab + mat = RE_SPACE_BEFORE_TAB.match(line) + if mat: + self._add_warn('Line %d/%d has space before tab' % + (self.linenum, mat.start())) + + # OK, we have a valid non-blank line + out = [line] + self.linenum += 1 + self.skip_blank = False + + if diff_match: + self.cur_diff = diff_match.group(1) + + # If this is quoted, keep recent lines + if not diff_match and self.linenum > 1 and line: + if line.startswith('>'): + if not self.was_quoted: + self._finalise_snippet() + self.recent_line = None + if not line_match: + self.recent_quoted.append(line) + self.was_quoted = True + self.recent_diff = self.cur_diff + else: + self.recent_unquoted.put(line) + self.was_quoted = False + + if line_match: + self.recent_line = line_match.groups() + + if self.state == STATE_DIFFS: + pass + + # If this is the start of the diffs section, emit our tags and + # change log + elif line == '---': + self.state = STATE_DIFFS + + # Output the tags (signoff first), then change list + out = [] + log = self.series.MakeChangeLog(self.commit) + out += [line] + if self.commit: + out += self.commit.notes + out += [''] + log + elif self.found_test: + if not RE_ALLOWED_AFTER_TEST.match(line): + self.lines_after_test += 1 + + return out + + def finalise(self): + """Close out processing of this patch stream""" + self._finalise_snippet() + self._finalise_change() + self._close_commit(False) + if self.lines_after_test: + self._add_warn('Found %d lines after TEST=' % self.lines_after_test) + + def _write_message_id(self, outfd): + """Write the Message-Id into the output. + + This is based on the Change-Id in the original patch, the version, + and the prefix. + + Args: + outfd (io.IOBase): Output stream file object + """ + if not self.commit.change_id: + return + + # If the count is -1 we're testing, so use a fixed time + if self.commit.count == -1: + time_now = datetime.datetime(1999, 12, 31, 23, 59, 59) + else: + time_now = datetime.datetime.now() + + # In theory there is email.utils.make_msgid() which would be nice + # to use, but it already produces something way too long and thus + # will produce ugly commit lines if someone throws this into + # a "Link:" tag in the final commit. So (sigh) roll our own. + + # Start with the time; presumably we wouldn't send the same series + # with the same Change-Id at the exact same second. + parts = [time_now.strftime("%Y%m%d%H%M%S")] + + # These seem like they would be nice to include. + if 'prefix' in self.series: + parts.append(self.series['prefix']) + if 'postfix' in self.series: + parts.append(self.series['postfix']) + if 'version' in self.series: + parts.append("v%s" % self.series['version']) + + parts.append(str(self.commit.count + 1)) + + # The Change-Id must be last, right before the @ + parts.append(self.commit.change_id) + + # Join parts together with "." and write it out. + outfd.write('Message-Id: <%s@changeid>\n' % '.'.join(parts)) + + def process_stream(self, infd, outfd): + """Copy a stream from infd to outfd, filtering out unwanting things. + + This is used to process patch files one at a time. + + Args: + infd (io.IOBase): Input stream file object + outfd (io.IOBase): Output stream file object + """ + # Extract the filename from each diff, for nice warnings + fname = None + last_fname = None + re_fname = re.compile('diff --git a/(.*) b/.*') + + self._write_message_id(outfd) + + while True: + line = infd.readline() + if not line: + break + out = self.process_line(line) + + # Try to detect blank lines at EOF + for line in out: + match = re_fname.match(line) + if match: + last_fname = fname + fname = match.group(1) + if line == '+': + self.blank_count += 1 + else: + if self.blank_count and (line == '-- ' or match): + self._add_warn("Found possible blank line(s) at end of file '%s'" % + last_fname) + outfd.write('+\n' * self.blank_count) + outfd.write(line + '\n') + self.blank_count = 0 + self.finalise() + if self.insert_base_commit: + if self.series.base_commit: + print(f'base-commit: {self.series.base_commit.hash}', + file=outfd) + if self.series.branch: + print(f'branch: {self.series.branch}', file=outfd) + + +def insert_tags(msg, tags_to_emit): + """Add extra tags to a commit message + + The tags are added after an existing block of tags if found, otherwise at + the end. + + Args: + msg (str): Commit message + tags_to_emit (list): List of tags to emit, each a str + + Returns: + (str) new message + """ + out = [] + done = False + emit_tags = False + emit_blank = False + for line in msg.splitlines(): + if not done: + signoff_match = RE_SIGNOFF.match(line) + tag_match = RE_TAG.match(line) + if tag_match or signoff_match: + emit_tags = True + if emit_tags and not tag_match and not signoff_match: + out += tags_to_emit + emit_tags = False + done = True + emit_blank = not (signoff_match or tag_match) + else: + emit_blank = line + out.append(line) + if not done: + if emit_blank: + out.append('') + out += tags_to_emit + return '\n'.join(out) + +def get_list(commit_range, git_dir=None, count=None): + """Get a log of a list of comments + + This returns the output of 'git log' for the selected commits + + Args: + commit_range (str): Range of commits to count (e.g. 'HEAD..base') + git_dir (str): Path to git repositiory (None to use default) + count (int): Number of commits to list, or None for no limit + + Returns + str: String containing the contents of the git log + """ + params = gitutil.log_cmd(commit_range, reverse=True, count=count, + git_dir=git_dir) + return command.run_one(*params, capture=True).stdout + +def get_metadata_for_list(commit_range, git_dir=None, count=None, + series=None, allow_overwrite=False): + """Reads out patch series metadata from the commits + + This does a 'git log' on the relevant commits and pulls out the tags we + are interested in. + + Args: + commit_range (str): Range of commits to count (e.g. 'HEAD..base') + git_dir (str): Path to git repositiory (None to use default) + count (int): Number of commits to list, or None for no limit + series (Series): Object to add information into. By default a new series + is started. + allow_overwrite (bool): Allow tags to overwrite an existing tag + + Returns: + Series: Object containing information about the commits. + """ + if not series: + series = Series() + series.allow_overwrite = allow_overwrite + stdout = get_list(commit_range, git_dir, count) + pst = PatchStream(series, is_log=True) + for line in stdout.splitlines(): + pst.process_line(line) + pst.finalise() + return series + +def get_metadata(branch, start, count, git_dir=None): + """Reads out patch series metadata from the commits + + This does a 'git log' on the relevant commits and pulls out the tags we + are interested in. + + Args: + branch (str): Branch to use (None for current branch) + start (int): Commit to start from: 0=branch HEAD, 1=next one, etc. + count (int): Number of commits to list + + Returns: + Series: Object containing information about the commits. + """ + top = f"{branch if branch else 'HEAD'}~{start}" + series = get_metadata_for_list(top, git_dir, count) + series.base_commit = commit.Commit( + gitutil.get_hash(f'{top}~{count}', git_dir)) + series.branch = branch or gitutil.get_branch() + series.top = top + return series + +def get_metadata_for_test(text): + """Process metadata from a file containing a git log. Used for tests + + Args: + text: + + Returns: + Series: Object containing information about the commits. + """ + series = Series() + pst = PatchStream(series, is_log=True) + for line in text.splitlines(): + pst.process_line(line) + pst.finalise() + return series + +def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False, + insert_base_commit=False, cwd=None): + """Fix up a patch file, by adding/removing as required. + + We remove our tags from the patch file, insert changes lists, etc. + The patch file is processed in place, and overwritten. + + A backup file is put into backup_dir (if not None). + + Args: + backup_dir (str): Path to directory to use to backup the file + fname (str): Filename to patch file to process + series (Series): Series information about this patch set + cmt (Commit): Commit object for this patch file + keep_change_id (bool): Keep the Change-Id tag. + insert_base_commit (bool): True to add the base commit to the end + cwd (str): Directory containing filename, or None for current + + Return: + list: A list of errors, each str, or [] if all ok. + """ + fname = os.path.join(cwd or '', fname) + handle, tmpname = tempfile.mkstemp() + outfd = os.fdopen(handle, 'w', encoding='utf-8') + infd = open(fname, 'r', encoding='utf-8') + pst = PatchStream(series, keep_change_id=keep_change_id, + insert_base_commit=insert_base_commit) + pst.commit = cmt + pst.process_stream(infd, outfd) + infd.close() + outfd.close() + + # Create a backup file if required + if backup_dir: + shutil.copy(fname, os.path.join(backup_dir, os.path.basename(fname))) + shutil.move(tmpname, fname) + return cmt.warn + +def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False, + cwd=None): + """Fix up a list of patches identified by filenames + + The patch files are processed in place, and overwritten. + + Args: + series (Series): The Series object + fnames (:type: list of str): List of patch files to process + keep_change_id (bool): Keep the Change-Id tag. + insert_base_commit (bool): True to add the base commit to the end + cwd (str): Directory containing the patch files, or None for current + """ + # Current workflow creates patches, so we shouldn't need a backup + backup_dir = None #tempfile.mkdtemp('clean-patch') + count = 0 + for fname in fnames: + cmt = series.commits[count] + cmt.patch = fname + cmt.count = count + result = fix_patch(backup_dir, fname, series, cmt, + keep_change_id=keep_change_id, + insert_base_commit=insert_base_commit, cwd=cwd) + if result: + print('%d warning%s for %s:' % + (len(result), 's' if len(result) > 1 else '', fname)) + for warn in result: + print('\t%s' % warn) + print() + count += 1 + print('Cleaned %d patch%s' % (count, 'es' if count > 1 else '')) + +def insert_cover_letter(fname, series, count, cwd=None): + """Inserts a cover letter with the required info into patch 0 + + Args: + fname (str): Input / output filename of the cover letter file + series (Series): Series object + count (int): Number of patches in the series + cwd (str): Directory containing filename, or None for current + """ + fname = os.path.join(cwd or '', fname) + fil = open(fname, 'r') + lines = fil.readlines() + fil.close() + + fil = open(fname, 'w') + text = series.cover + prefix = series.GetPatchPrefix() + for line in lines: + if line.startswith('Subject:'): + # if more than 10 or 100 patches, it should say 00/xx, 000/xxx, etc + zero_repeat = int(math.log10(count)) + 1 + zero = '0' * zero_repeat + line = 'Subject: [%s %s/%d] %s\n' % (prefix, zero, count, text[0]) + + # Insert our cover letter + elif line.startswith('*** BLURB HERE ***'): + # First the blurb test + line = '\n'.join(text[1:]) + '\n' + if series.get('notes'): + line += '\n'.join(series.notes) + '\n' + + # Now the change list + out = series.MakeChangeLog(None) + line += '\n' + '\n'.join(out) + fil.write(line) + + # Insert the base commit and branch + if series.base_commit: + print(f'base-commit: {series.base_commit.hash}', file=fil) + if series.branch: + print(f'branch: {series.branch}', file=fil) + + fil.close() diff --git a/tools/patman/patchwork.py b/tools/patman/patchwork.py new file mode 100644 index 00000000000..d485648e467 --- /dev/null +++ b/tools/patman/patchwork.py @@ -0,0 +1,852 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2025 Simon Glass <sjg@chromium.org> +# +"""Provides a basic API for the patchwork server +""" + +import asyncio +import re + +import aiohttp +from collections import namedtuple + +from u_boot_pylib import terminal + +# Information passed to series_get_states() +# link (str): Patchwork link for series +# series_id (int): Series ID in database +# series_name (str): Series name +# version (int): Version number of series +# show_comments (bool): True to show comments +# show_cover_comments (bool): True to show cover-letter comments +STATE_REQ = namedtuple( + 'state_req', + 'link,series_id,series_name,version,show_comments,show_cover_comments') + +# Responses from series_get_states() +# int: ser_ver ID number +# COVER: Cover-letter info +# list of Patch: Information on each patch in the series +# list of dict: patches, see get_series()['patches'] +STATE_RESP = namedtuple('state_resp', 'svid,cover,patches,patch_list') + +# Information about a cover-letter on patchwork +# id (int): Patchwork ID of cover letter +# state (str): Current state, e.g. 'accepted' +# num_comments (int): Number of comments +# name (str): Series name +# comments (list of dict): Comments +COVER = namedtuple('cover', 'id,num_comments,name,comments') + +# Number of retries +RETRIES = 3 + +# Max concurrent request +MAX_CONCURRENT = 50 + +# Patches which are part of a multi-patch series are shown with a prefix like +# [prefix, version, sequence], for example '[RFC, v2, 3/5]'. All but the last +# part is optional. This decodes the string into groups. For single patches +# the [] part is not present: +# Groups: (ignore, ignore, ignore, prefix, version, sequence, subject) +RE_PATCH = re.compile(r'(\[(((.*),)?(.*),)?(.*)\]\s)?(.*)$') + +# This decodes the sequence string into a patch number and patch count +RE_SEQ = re.compile(r'(\d+)/(\d+)') + + +class Patch(dict): + """Models a patch in patchwork + + This class records information obtained from patchwork + + Some of this information comes from the 'Patch' column: + + [RFC,v2,1/3] dm: Driver and uclass changes for tiny-dm + + This shows the prefix, version, seq, count and subject. + + The other properties come from other columns in the display. + + Properties: + pid (str): ID of the patch (typically an integer) + seq (int): Sequence number within series (1=first) parsed from sequence + string + count (int): Number of patches in series, parsed from sequence string + raw_subject (str): Entire subject line, e.g. + "[1/2,v2] efi_loader: Sort header file ordering" + prefix (str): Prefix string or None (e.g. 'RFC') + version (str): Version string or None (e.g. 'v2') + raw_subject (str): Raw patch subject + subject (str): Patch subject with [..] part removed (same as commit + subject) + data (dict or None): Patch data: + """ + def __init__(self, pid, state=None, data=None, comments=None, + series_data=None): + super().__init__() + self.id = pid # Use 'id' to match what the Rest API provides + self.seq = None + self.count = None + self.prefix = None + self.version = None + self.raw_subject = None + self.subject = None + self.state = state + self.data = data + self.comments = comments + self.series_data = series_data + self.name = None + + # These make us more like a dictionary + def __setattr__(self, name, value): + self[name] = value + + def __getattr__(self, name): + return self[name] + + def __hash__(self): + return hash(frozenset(self.items())) + + def __str__(self): + return self.raw_subject + + def parse_subject(self, raw_subject): + """Parse the subject of a patch into its component parts + + See RE_PATCH for details. The parsed info is placed into seq, count, + prefix, version, subject + + Args: + raw_subject (str): Subject string to parse + + Raises: + ValueError: the subject cannot be parsed + """ + self.raw_subject = raw_subject.strip() + mat = RE_PATCH.search(raw_subject.strip()) + if not mat: + raise ValueError(f"Cannot parse subject '{raw_subject}'") + self.prefix, self.version, seq_info, self.subject = mat.groups()[3:] + mat_seq = RE_SEQ.match(seq_info) if seq_info else False + if mat_seq is None: + self.version = seq_info + seq_info = None + if self.version and not self.version.startswith('v'): + self.prefix = self.version + self.version = None + if seq_info: + if mat_seq: + self.seq = int(mat_seq.group(1)) + self.count = int(mat_seq.group(2)) + else: + self.seq = 1 + self.count = 1 + + +class Review: + """Represents a single review email collected in Patchwork + + Patches can attract multiple reviews. Each consists of an author/date and + a variable number of 'snippets', which are groups of quoted and unquoted + text. + """ + def __init__(self, meta, snippets): + """Create new Review object + + Args: + meta (str): Text containing review author and date + snippets (list): List of snippets in th review, each a list of text + lines + """ + self.meta = ' : '.join([line for line in meta.splitlines() if line]) + self.snippets = snippets + + +class Patchwork: + """Class to handle communication with patchwork + """ + def __init__(self, url, show_progress=True, single_thread=False): + """Set up a new patchwork handler + + Args: + url (str): URL of patchwork server, e.g. + 'https://patchwork.ozlabs.org' + """ + self.url = url + self.fake_request = None + self.proj_id = None + self.link_name = None + self._show_progress = show_progress + self.semaphore = asyncio.Semaphore( + 1 if single_thread else MAX_CONCURRENT) + self.request_count = 0 + + async def _request(self, client, subpath): + """Call the patchwork API and return the result as JSON + + Args: + client (aiohttp.ClientSession): Session to use + subpath (str): URL subpath to use + + Returns: + dict: Json result + + Raises: + ValueError: the URL could not be read + """ + # print('subpath', subpath) + self.request_count += 1 + if self.fake_request: + return self.fake_request(subpath) + + full_url = f'{self.url}/api/1.2/{subpath}' + async with self.semaphore: + # print('full_url', full_url) + for i in range(RETRIES + 1): + try: + async with client.get(full_url) as response: + if response.status != 200: + raise ValueError( + f"Could not read URL '{full_url}'") + result = await response.json() + # print('- done', full_url) + return result + break + except aiohttp.client_exceptions.ServerDisconnectedError: + if i == RETRIES: + raise + + @staticmethod + def for_testing(func): + """Get an instance to use for testing + + Args: + func (function): Function to call to handle requests. The function + is passed a URL and is expected to return a dict with the + resulting data + + Returns: + Patchwork: testing instance + """ + pwork = Patchwork(None, show_progress=False) + pwork.fake_request = func + return pwork + + class _Stats: + def __init__(self, parent): + self.parent = parent + self.request_count = 0 + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.request_count = self.parent.request_count + + def collect_stats(self): + """Context manager to count requests across a range of patchwork calls + + Usage: + pwork = Patchwork(...) + with pwork.count_requests() as counter: + pwork.something() + print(f'{counter.count} requests') + """ + self.request_count = 0 + return self._Stats(self) + + async def get_projects(self): + """Get a list of projects on the server + + Returns: + list of dict, one for each project + 'name' (str): Project name, e.g. 'U-Boot' + 'id' (int): Project ID, e.g. 9 + 'link_name' (str): Project's link-name, e.g. 'uboot' + """ + async with aiohttp.ClientSession() as client: + return await self._request(client, 'projects/') + + async def _query_series(self, client, desc): + """Query series by name + + Args: + client (aiohttp.ClientSession): Session to use + desc: String to search for + + Return: + list of series matches, each a dict, see get_series() + """ + query = desc.replace(' ', '+') + return await self._request( + client, f'series/?project={self.proj_id}&q={query}') + + async def _find_series(self, client, svid, ser_id, version, ser): + """Find a series on the server + + Args: + client (aiohttp.ClientSession): Session to use + svid (int): ser_ver ID + ser_id (int): series ID + version (int): Version number to search for + ser (Series): Contains description (cover-letter title) + + Returns: + tuple: + int: ser_ver ID (as passed in) + int: series ID (as passed in) + str: Series link, or None if not found + list of dict, or None if found + each dict is the server result from a possible series + """ + desc = ser.desc + name_found = [] + + # Do a series query on the description + res = await self._query_series(client, desc) + for pws in res: + if pws['name'] == desc: + if int(pws['version']) == version: + return svid, ser_id, pws['id'], None + name_found.append(pws) + + # When there is no cover letter, patchwork uses the first patch as the + # series name + cmt = ser.commits[0] + + res = await self._query_series(client, cmt.subject) + for pws in res: + patch = Patch(0) + patch.parse_subject(pws['name']) + if patch.subject == cmt.subject: + if int(pws['version']) == version: + return svid, ser_id, pws['id'], None + name_found.append(pws) + + return svid, ser_id, None, name_found or res + + async def find_series(self, ser, version): + """Find a series based on its description and version + + Args: + ser (Series): Contains description (cover-letter title) + version (int): Version number + + Return: tuple: + tuple: + str: Series ID, or None if not found + list of dict, or None if found + each dict is the server result from a possible series + int: number of server requests done + """ + async with aiohttp.ClientSession() as client: + # We don't know the svid and it isn't needed, so use -1 + _, _, link, options = await self._find_series(client, -1, -1, + version, ser) + return link, options + + async def find_series_list(self, to_find): + """Find the link for each series in a list + + Args: + to_find (dict of svids to sync): + key (int): ser_ver ID + value (tuple): + int: Series ID + int: Series version + str: Series link + str: Series description + + Return: tuple: + list of tuple, one for each item in to_find: + int: ser_ver_ID + int: series ID + int: Series version + str: Series link, or None if not found + list of dict, or None if found + each dict is the server result from a possible series + int: number of server requests done + """ + self.request_count = 0 + async with aiohttp.ClientSession() as client: + tasks = [asyncio.create_task( + self._find_series(client, svid, ser_id, version, desc)) + for svid, (ser_id, version, link, desc) in to_find.items()] + results = await asyncio.gather(*tasks) + + return results, self.request_count + + def project_set(self, project_id, link_name): + """Set the project ID + + The patchwork server has multiple projects. This allows the ID and + link_name of the relevant project to be selected + + This function is used for testing + + Args: + project_id (int): Project ID to use, e.g. 6 + link_name (str): Name to use for project URL links, e.g. 'uboot' + """ + self.proj_id = project_id + self.link_name = link_name + + async def get_series(self, client, link): + """Read information about a series + + Args: + client (aiohttp.ClientSession): Session to use + link (str): Patchwork series ID + + Returns: dict containing patchwork's series information + id (int): series ID unique across patchwork instance, e.g. 3 + url (str): Full URL, e.g. + 'https://patchwork.ozlabs.org/api/1.2/series/3/' + web_url (str): Full URL, e.g. + 'https://patchwork.ozlabs.org/project/uboot/list/?series=3 + project (dict): project information (id, url, name, link_name, + list_id, list_email, etc. + name (str): Series name, e.g. '[U-Boot] moveconfig: fix error' + date (str): Date, e.g. '2017-08-27T08:00:51' + submitter (dict): id, url, name, email, e.g.: + "id": 6125, + "url": "https://patchwork.ozlabs.org/api/1.2/people/6125/", + "name": "Chris Packham", + "email": "judge.packham@gmail.com" + version (int): Version number + total (int): Total number of patches based on subject + received_total (int): Total patches received by patchwork + received_all (bool): True if all patches were received + mbox (str): URL of mailbox, e.g. + 'https://patchwork.ozlabs.org/series/3/mbox/' + cover_letter (dict) or None, e.g.: + "id": 806215, + "url": "https://patchwork.ozlabs.org/api/1.2/covers/806215/", + "web_url": "https://patchwork.ozlabs.org/project/uboot/cover/ + 20170827094411.8583-1-judge.packham@gmail.com/", + "msgid": "<20170827094411.8583-1-judge.packham@gmail.com>", + "list_archive_url": null, + "date": "2017-08-27T09:44:07", + "name": "[U-Boot,v2,0/4] usb: net: Migrate USB Ethernet", + "mbox": "https://patchwork.ozlabs.org/project/uboot/cover/ + 20170827094411.8583-1-judge.packham@gmail.com/mbox/" + patches (list of dict), each e.g.: + "id": 806202, + "url": "https://patchwork.ozlabs.org/api/1.2/patches/806202/", + "web_url": "https://patchwork.ozlabs.org/project/uboot/patch/ + 20170827080051.816-1-judge.packham@gmail.com/", + "msgid": "<20170827080051.816-1-judge.packham@gmail.com>", + "list_archive_url": null, + "date": "2017-08-27T08:00:51", + "name": "[U-Boot] moveconfig: fix error message do_autoconf()", + "mbox": "https://patchwork.ozlabs.org/project/uboot/patch/ + 20170827080051.816-1-judge.packham@gmail.com/mbox/" + """ + return await self._request(client, f'series/{link}/') + + async def get_patch(self, client, patch_id): + """Read information about a patch + + Args: + client (aiohttp.ClientSession): Session to use + patch_id (str): Patchwork patch ID + + Returns: dict containing patchwork's patch information + "id": 185, + "url": "https://patchwork.ozlabs.org/api/1.2/patches/185/", + "web_url": "https://patchwork.ozlabs.org/project/cbe-oss-dev/patch/ + 200809050416.27831.adetsch@br.ibm.com/", + project (dict): project information (id, url, name, link_name, + list_id, list_email, etc. + "msgid": "<200809050416.27831.adetsch@br.ibm.com>", + "list_archive_url": null, + "date": "2008-09-05T07:16:27", + "name": "powerpc/spufs: Fix possible scheduling of a context", + "commit_ref": "b2e601d14deb2083e2a537b47869ab3895d23a28", + "pull_url": null, + "state": "accepted", + "archived": false, + "hash": "bc1c0b80d7cff66c0d1e5f3f8f4d10eb36176f0d", + "submitter": { + "id": 93, + "url": "https://patchwork.ozlabs.org/api/1.2/people/93/", + "name": "Andre Detsch", + "email": "adetsch@br.ibm.com" + }, + "delegate": { + "id": 1, + "url": "https://patchwork.ozlabs.org/api/1.2/users/1/", + "username": "jk", + "first_name": "Jeremy", + "last_name": "Kerr", + "email": "jk@ozlabs.org" + }, + "mbox": "https://patchwork.ozlabs.org/project/cbe-oss-dev/patch/ + 200809050416.27831.adetsch@br.ibm.com/mbox/", + "series": [], + "comments": "https://patchwork.ozlabs.org/api/patches/185/ + comments/", + "check": "pending", + "checks": "https://patchwork.ozlabs.org/api/patches/185/checks/", + "tags": {}, + "related": [], + "headers": {...} + "content": "We currently have a race when scheduling a context + after we have found a runnable context in spusched_tick, the + context may have been scheduled by spu_activate(). + + This may result in a panic if we try to unschedule a context + been freed in the meantime. + + This change exits spu_schedule() if the context has already + scheduled, so we don't end up scheduling it twice. + + Signed-off-by: Andre Detsch <adetsch@br.ibm.com>", + "diff": '''Index: spufs/arch/powerpc/platforms/cell/spufs/sched.c + ======================================================= + --- spufs.orig/arch/powerpc/platforms/cell/spufs/sched.c + +++ spufs/arch/powerpc/platforms/cell/spufs/sched.c + @@ -727,7 +727,8 @@ static void spu_schedule(struct spu *spu + \t/* not a candidate for interruptible because it's called + \t from the scheduler thread or from spu_deactivate */ + \tmutex_lock(&ctx->state_mutex); + -\t__spu_schedule(spu, ctx); + +\tif (ctx->state == SPU_STATE_SAVED) + +\t\t__spu_schedule(spu, ctx); + \tspu_release(ctx); + } + ''' + "prefixes": ["3/3", ...] + """ + return await self._request(client, f'patches/{patch_id}/') + + async def _get_patch_comments(self, client, patch_id): + """Read comments about a patch + + Args: + client (aiohttp.ClientSession): Session to use + patch_id (str): Patchwork patch ID + + Returns: list of dict: list of comments: + id (int): series ID unique across patchwork instance, e.g. 3331924 + web_url (str): Full URL, e.g. + 'https://patchwork.ozlabs.org/comment/3331924/' + msgid (str): Message ID, e.g. + '<d2526c98-8198-4b8b-ab10-20bda0151da1@gmx.de>' + list_archive_url: (unknown?) + date (str): Date, e.g. '2024-06-20T13:38:03' + subject (str): email subject, e.g. 'Re: [PATCH 3/5] buildman: + Support building within a Python venv' + date (str): Date, e.g. '2017-08-27T08:00:51' + submitter (dict): id, url, name, email, e.g.: + "id": 61270, + "url": "https://patchwork.ozlabs.org/api/people/61270/", + "name": "Heinrich Schuchardt", + "email": "xypron.glpk@gmx.de" + content (str): Content of email, e.g. 'On 20.06.24 15:19, + Simon Glass wrote: + >...' + headers: dict: email headers, see get_cover() for an example + """ + return await self._request(client, f'patches/{patch_id}/comments/') + + async def get_cover(self, client, cover_id): + """Read information about a cover letter + + Args: + client (aiohttp.ClientSession): Session to use + cover_id (int): Patchwork cover-letter ID + + Returns: dict containing patchwork's cover-letter information: + id (int): series ID unique across patchwork instance, e.g. 3 + url (str): Full URL, e.g. https://patchwork.ozlabs.org/project/uboot/list/?series=3 + project (dict): project information (id, url, name, link_name, + list_id, list_email, etc. + url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/api/1.2/covers/2054866/' + web_url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/project/uboot/cover/20250304130947.109799-1-sjg@chromium.org/' + project (dict): project information (id, url, name, link_name, + list_id, list_email, etc. + msgid (str): Message ID, e.g. '20250304130947.109799-1-sjg@chromium.org>' + list_archive_url (?) + date (str): Date, e.g. '2017-08-27T08:00:51' + name (str): Series name, e.g. '[U-Boot] moveconfig: fix error' + submitter (dict): id, url, name, email, e.g.: + "id": 6170, + "url": "https://patchwork.ozlabs.org/api/1.2/people/6170/", + "name": "Simon Glass", + "email": "sjg@chromium.org" + mbox (str): URL to mailbox, e.g. 'https://patchwork.ozlabs.org/project/uboot/cover/20250304130947.109799-1-sjg@chromium.org/mbox/' + series (list of dict) each e.g.: + "id": 446956, + "url": "https://patchwork.ozlabs.org/api/1.2/series/446956/", + "web_url": "https://patchwork.ozlabs.org/project/uboot/list/?series=446956", + "date": "2025-03-04T13:09:37", + "name": "binman: Check code-coverage requirements", + "version": 1, + "mbox": "https://patchwork.ozlabs.org/series/446956/mbox/" + comments: Web URL to comments: 'https://patchwork.ozlabs.org/api/covers/2054866/comments/' + headers: dict: e.g.: + "Return-Path": "<u-boot-bounces@lists.denx.de>", + "X-Original-To": "incoming@patchwork.ozlabs.org", + "Delivered-To": "patchwork-incoming@legolas.ozlabs.org", + "Authentication-Results": [ + "legolas.ozlabs.org; +\tdkim=pass (1024-bit key; + unprotected) header.d=chromium.org header.i=@chromium.org header.a=rsa-sha256 + header.s=google header.b=dG8yqtoK; +\tdkim-atps=neutral", + "legolas.ozlabs.org; + spf=pass (sender SPF authorized) smtp.mailfrom=lists.denx.de + (client-ip=85.214.62.61; helo=phobos.denx.de; + envelope-from=u-boot-bounces@lists.denx.de; receiver=patchwork.ozlabs.org)", + "phobos.denx.de; + dmarc=pass (p=none dis=none) header.from=chromium.org", + "phobos.denx.de; + spf=pass smtp.mailfrom=u-boot-bounces@lists.denx.de", + "phobos.denx.de; +\tdkim=pass (1024-bit key; + unprotected) header.d=chromium.org header.i=@chromium.org + header.b=\"dG8yqtoK\"; +\tdkim-atps=neutral", + "phobos.denx.de; + dmarc=pass (p=none dis=none) header.from=chromium.org", + "phobos.denx.de; + spf=pass smtp.mailfrom=sjg@chromium.org" + ], + "Received": [ + "from phobos.denx.de (phobos.denx.de [85.214.62.61]) +\t(using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits) +\t key-exchange X25519 server-signature ECDSA (secp384r1)) +\t(No client certificate requested) +\tby legolas.ozlabs.org (Postfix) with ESMTPS id 4Z6bd50jLhz1yD0 +\tfor <incoming@patchwork.ozlabs.org>; Wed, 5 Mar 2025 00:10:00 +1100 (AEDT)", + "from h2850616.stratoserver.net (localhost [IPv6:::1]) +\tby phobos.denx.de (Postfix) with ESMTP id 434E88144A; +\tTue, 4 Mar 2025 14:09:58 +0100 (CET)", + "by phobos.denx.de (Postfix, from userid 109) + id 8CBF98144A; Tue, 4 Mar 2025 14:09:57 +0100 (CET)", + "from mail-io1-xd2e.google.com (mail-io1-xd2e.google.com + [IPv6:2607:f8b0:4864:20::d2e]) + (using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits)) + (No client certificate requested) + by phobos.denx.de (Postfix) with ESMTPS id 48AE281426 + for <u-boot@lists.denx.de>; Tue, 4 Mar 2025 14:09:55 +0100 (CET)", + "by mail-io1-xd2e.google.com with SMTP id + ca18e2360f4ac-85ae33109f6so128326139f.2 + for <u-boot@lists.denx.de>; Tue, 04 Mar 2025 05:09:55 -0800 (PST)", + "from chromium.org (c-73-203-119-151.hsd1.co.comcast.net. + [73.203.119.151]) by smtp.gmail.com with ESMTPSA id + ca18e2360f4ac-858753cd304sm287383839f.33.2025.03.04.05.09.49 + (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); + Tue, 04 Mar 2025 05:09:50 -0800 (PST)" + ], + "X-Spam-Checker-Version": "SpamAssassin 3.4.2 (2018-09-13) on phobos.denx.de", + "X-Spam-Level": "", + "X-Spam-Status": "No, score=-2.1 required=5.0 tests=BAYES_00,DKIMWL_WL_HIGH, + DKIM_SIGNED,DKIM_VALID,DKIM_VALID_AU,DKIM_VALID_EF, + RCVD_IN_DNSWL_BLOCKED,SPF_HELO_NONE,SPF_PASS autolearn=ham + autolearn_force=no version=3.4.2", + "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed; + d=chromium.org; s=google; t=1741093792; x=1741698592; darn=lists.denx.de; + h=content-transfer-encoding:mime-version:message-id:date:subject:cc + :to:from:from:to:cc:subject:date:message-id:reply-to; + bh=B2zsLws430/BEZfatNjeaNnrcxmYUstVjp1pSXgNQjc=; + b=dG8yqtoKpSy15RHagnPcppzR8KbFCRXa2OBwXfwGoyN6M15tOJsUu2tpCdBFYiL5Mk + hQz5iDLV8p0Bs+fP4XtNEx7KeYfTZhiqcRFvdCLwYtGray/IHtOZaNoHLajrstic/OgE + 01ymu6gOEboU32eQ8uC8pdCYQ4UCkfKJwmiiU=", + "X-Google-DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed; + d=1e100.net; s=20230601; t=1741093792; x=1741698592; + h=content-transfer-encoding:mime-version:message-id:date:subject:cc + :to:from:x-gm-message-state:from:to:cc:subject:date:message-id + :reply-to; + bh=B2zsLws430/BEZfatNjeaNnrcxmYUstVjp1pSXgNQjc=; + b=eihzJf4i9gin9usvz4hnAvvbLV9/yB7hGPpwwW/amgnPUyWCeQstgvGL7WDLYYnukH + 161p4mt7+cCj7Hao/jSPvVZeuKiBNPkS4YCuP3QjXfdk2ziQ9IjloVmGarWZUOlYJ5iQ + dZnxypUkuFfLcEDSwUmRO1dvLi3nH8PDlae3yT2H87LeHaxhXWdzHxQdPc86rkYyCqCr + qBC2CTS31jqSuiaI+7qB3glvbJbSEXkunz0iDewTJDvZfmuloxTipWUjRJ1mg9UJcZt5 + 9xIuTq1n9aYf1RcQlrEOQhdBAQ0/IJgvmZtzPZi9L+ppBva1ER/xm06nMA7GEUtyGwun + c6pA==", + "X-Gm-Message-State": "AOJu0Yybx3b1+yClf/IfIbQd9u8sxzK9ixPP2HimXF/dGZfSiS7Cb+O5 + WrAkvtp7m3KPM/Mpv0sSZ5qrfTnKnb3WZyv6Oe5Q1iUjAftGNwbSxob5eJ/0y3cgrTdzE4sIWPE + =", + "X-Gm-Gg": "ASbGncu5gtgpXEPGrpbTRJulqFrFj1YPAAmKk4MiXA8/3J1A+25F0Uug2KeFUrZEjkG + KMdPg/C7e2emIvfM+Jl+mKv0ITBvhbyNCyY1q2U1s1cayZF05coZ9ewzGxXJGiEqLMG69uBmmIi + rBEvCnkXS+HVZobDQMtOsezpc+Ju8JRA7+y1R0WIlutl1mQARct6p0zTkuZp75QyB6dm/d0KYgd + iux/t/f0HC2CxstQlTlJYzKL6UJgkB5/UorY1lW/0NDRS6P1iemPQ7I3EPLJO8tM5ZrpJE7qgNP + xy0jXbUv44c48qJ1VszfY5USB8fRG7nwUYxNu6N1PXv9xWbl+z2xL68qNYUrFlHsB8ILTXAyzyr + Cdj+Sxg==", + "X-Google-Smtp-Source": " + AGHT+IFeVk5D4YEfJgPxOfg3ikO6Q7IhaDzABGkAPI6HA0ubK85OPhUHK08gV7enBQ8OdoE/ttqEjw==", + "X-Received": "by 2002:a05:6602:640f:b0:855:63c8:abb5 with SMTP id + ca18e2360f4ac-85881fdba3amr1839428939f.13.1741093792636; + Tue, 04 Mar 2025 05:09:52 -0800 (PST)", + "From": "Simon Glass <sjg@chromium.org>", + "To": "U-Boot Mailing List <u-boot@lists.denx.de>", + "Cc": "Simon Glass <sjg@chromium.org>, Alexander Kochetkov <al.kochet@gmail.com>, + Alper Nebi Yasak <alpernebiyasak@gmail.com>, + Brandon Maier <brandon.maier@collins.com>, + Jerome Forissier <jerome.forissier@linaro.org>, + Jiaxun Yang <jiaxun.yang@flygoat.com>, + Neha Malcom Francis <n-francis@ti.com>, + Patrick Rudolph <patrick.rudolph@9elements.com>, + Paul HENRYS <paul.henrys_ext@softathome.com>, Peng Fan <peng.fan@nxp.com>, + Philippe Reynes <philippe.reynes@softathome.com>, + Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>, + Tom Rini <trini@konsulko.com>", + "Subject": "[PATCH 0/7] binman: Check code-coverage requirements", + "Date": "Tue, 4 Mar 2025 06:09:37 -0700", + "Message-ID": "<20250304130947.109799-1-sjg@chromium.org>", + "X-Mailer": "git-send-email 2.43.0", + "MIME-Version": "1.0", + "Content-Transfer-Encoding": "8bit", + "X-BeenThere": "u-boot@lists.denx.de", + "X-Mailman-Version": "2.1.39", + "Precedence": "list", + "List-Id": "U-Boot discussion <u-boot.lists.denx.de>", + "List-Unsubscribe": "<https://lists.denx.de/options/u-boot>, + <mailto:u-boot-request@lists.denx.de?subject=unsubscribe>", + "List-Archive": "<https://lists.denx.de/pipermail/u-boot/>", + "List-Post": "<mailto:u-boot@lists.denx.de>", + "List-Help": "<mailto:u-boot-request@lists.denx.de?subject=help>", + "List-Subscribe": "<https://lists.denx.de/listinfo/u-boot>, + <mailto:u-boot-request@lists.denx.de?subject=subscribe>", + "Errors-To": "u-boot-bounces@lists.denx.de", + "Sender": "\"U-Boot\" <u-boot-bounces@lists.denx.de>", + "X-Virus-Scanned": "clamav-milter 0.103.8 at phobos.denx.de", + "X-Virus-Status": "Clean" + content (str): Email content, e.g. 'This series adds a cover-coverage check to CI for Binman. The iMX8 tests +are still not completed,...' + """ + async with aiohttp.ClientSession() as client: + return await self._request(client, f'covers/{cover_id}/') + + async def get_cover_comments(self, client, cover_id): + """Read comments about a cover letter + + Args: + client (aiohttp.ClientSession): Session to use + cover_id (str): Patchwork cover-letter ID + + Returns: list of dict: list of comments, each: + id (int): series ID unique across patchwork instance, e.g. 3472068 + web_url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/comment/3472068/' + list_archive_url: (unknown?) + + project (dict): project information (id, url, name, link_name, + list_id, list_email, etc. + url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/api/1.2/covers/2054866/' + web_url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/project/uboot/cover/20250304130947.109799-1-sjg@chromium.org/' + project (dict): project information (id, url, name, link_name, + list_id, list_email, etc. + date (str): Date, e.g. '2025-03-04T13:16:15' + subject (str): 'Re: [PATCH 0/7] binman: Check code-coverage requirements' + submitter (dict): id, url, name, email, e.g.: + "id": 6170, + "url": "https://patchwork.ozlabs.org/api/people/6170/", + "name": "Simon Glass", + "email": "sjg@chromium.org" + content (str): Email content, e.g. 'Hi, + +On Tue, 4 Mar 2025 at 06:09, Simon Glass <sjg@chromium.org> wrote: +> +> This '... + headers: dict: email headers, see get_cover() for an example + """ + return await self._request(client, f'covers/{cover_id}/comments/') + + async def get_series_url(self, link): + """Get the URL for a series + + Args: + link (str): Patchwork series ID + + Returns: + str: URL for the series page + """ + return f'{self.url}/project/{self.link_name}/list/?series={link}&state=*&archive=both' + + async def _get_patch_status(self, client, patch_id): + """Get the patch status + + Args: + client (aiohttp.ClientSession): Session to use + patch_id (int): Patch ID to look up in patchwork + + Return: + PATCH: Patch information + + Requests: + 1 for patch, 1 for patch comments + """ + data = await self.get_patch(client, patch_id) + state = data['state'] + comment_data = await self._get_patch_comments(client, patch_id) + + return Patch(patch_id, state, data, comment_data) + + async def get_series_cover(self, client, data): + """Get the cover information (including comments) + + Args: + client (aiohttp.ClientSession): Session to use + data (dict): Return value from self.get_series() + + Returns: + COVER object, or None if no cover letter + """ + # Patchwork should always provide this, but use get() so that we don't + # have to provide it in our fake patchwork _fake_patchwork_cser() + cover = data.get('cover_letter') + cover_id = None + if cover: + cover_id = cover['id'] + info = await self.get_cover_comments(client, cover_id) + cover = COVER(cover_id, len(info), cover['name'], info) + return cover + + async def series_get_state(self, client, link, read_comments, + read_cover_comments): + """Sync the series information against patchwork, to find patch status + + Args: + client (aiohttp.ClientSession): Session to use + link (str): Patchwork series ID + read_comments (bool): True to read the comments on the patches + read_cover_comments (bool): True to read the comments on the cover + letter + + Return: tuple: + COVER object, or None if none or not read_cover_comments + list of PATCH objects + """ + data = await self.get_series(client, link) + patch_list = list(data['patches']) + + count = len(patch_list) + patches = [] + if read_comments: + # Returns a list of Patch objects + tasks = [self._get_patch_status(client, patch_list[i]['id']) + for i in range(count)] + + patch_status = await asyncio.gather(*tasks) + for patch_data, status in zip(patch_list, patch_status): + status.series_data = patch_data + patches.append(status) + else: + for i in range(count): + info = patch_list[i] + pat = Patch(info['id'], series_data=info) + pat.raw_subject = info['name'] + patches.append(pat) + if self._show_progress: + terminal.print_clear() + + if read_cover_comments: + cover = await self.get_series_cover(client, data) + else: + cover = None + + return cover, patches diff --git a/tools/patman/patman b/tools/patman/patman new file mode 120000 index 00000000000..5a427d19424 --- /dev/null +++ b/tools/patman/patman @@ -0,0 +1 @@ +__main__.py
\ No newline at end of file diff --git a/tools/patman/patman.rst b/tools/patman/patman.rst new file mode 100644 index 00000000000..549e203c254 --- /dev/null +++ b/tools/patman/patman.rst @@ -0,0 +1,1023 @@ +.. SPDX-License-Identifier: GPL-2.0+ +.. Copyright (c) 2011 The Chromium OS Authors +.. Simon Glass <sjg@chromium.org> +.. Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com> +.. v1, v2, 19-Oct-11 +.. revised v3 24-Nov-11 +.. revised v4 Independence Day 2020, with Patchwork integration + +Patman patch manager +==================== + +This tool is a Python script which: + +- Creates patch directly from your branch +- Cleans them up by removing unwanted tags +- Inserts a cover letter with change lists +- Runs the patches through checkpatch.pl and its own checks +- Optionally emails them out to selected people +- Links the series automatically to Patchwork once sent + +It also has some Patchwork features: + +- Manage local series and their status on patchwork +- Show review tags from Patchwork and allows them to be gathered into commits +- List comments received on a series + +It is intended to automate patch creation and make it a less +error-prone process. It is useful for U-Boot and Linux work so far, +since they use the checkpatch.pl script. + +It is configured almost entirely by tags it finds in your commits. +This means that you can work on a number of different branches at +once, and keep the settings with each branch rather than having to +git format-patch, git send-email, etc. with the correct parameters +each time. So for example if you put:: + + Series-to: fred.blogs@napier.co.nz + +in one of your commits, the series will be sent there. + +In Linux and U-Boot this will also call get_maintainer.pl on each of your +patches automatically (unless you use -m to disable this). + + +Installation +------------ + +You can install patman using:: + + pip install patch-manager + +The name is chosen since patman conflicts with an existing package. + +If you are using patman within the U-Boot tree, it may be easiest to add a +symlink from your local `~/.bin` directory to `/path/to/tools/patman/patman`. + +How to use this tool +-------------------- + +This tool requires a certain way of working: + +- Maintain a number of branches, one for each patch series you are + working on +- Add tags into the commits within each branch to indicate where the + series should be sent, cover letter, version, etc. Most of these are + normally in the top commit so it is easy to change them with 'git + commit --amend' +- Each branch tracks the upstream branch, so that this script can + automatically determine the number of commits in it (optional) +- Check out a branch, and run this script to create and send out your + patches. Weeks later, change the patches and repeat, knowing that you + will get a consistent result each time. + + +How to configure it +------------------- + +For most cases of using patman for U-Boot development, patman can use the +file 'doc/git-mailrc' in your U-Boot directory to supply the email aliases +you need. To make this work, tell git where to find the file by typing +this once:: + + git config sendemail.aliasesfile doc/git-mailrc + +For both Linux and U-Boot the 'scripts/get_maintainer.pl' handles +figuring out where to send patches pretty well. For other projects, +you may want to specify a different script to be run, for example via +a project-specific `.patman` file:: + + # .patman configuration file at the root of some project + + [settings] + get_maintainer_script: etc/teams.scm get-maintainer + +The `get_maintainer_script` option corresponds to the +`--get-maintainer-script` argument of the `send` command. It is +looked relatively to the root of the current git repository, as well +as on PATH. It can also be provided arguments, as shown above. The +contract is that the script should accept a patch file name and return +a list of email addresses, one per line, like `get_maintainer.pl` +does. + +During the first run patman creates a config file for you by taking the default +user name and email address from the global .gitconfig file. + +To add your own, create a file `~/.patman` like this:: + + # patman alias file + + [alias] + me: Simon Glass <sjg@chromium.org> + + u-boot: U-Boot Mailing List <u-boot@lists.denx.de> + wolfgang: Wolfgang Denk <wd@denx.de> + others: Mike Frysinger <vapier@gentoo.org>, Fred Bloggs <f.bloggs@napier.net> + +As hinted above, Patman will also look for a `.patman` configuration +file at the root of the current project git repository, which makes it +possible to override the `project` settings variable or anything else +in a project-specific way. The values of this "local" configuration +file take precedence over those of the "global" one. + +Aliases are recursive. + +The checkpatch.pl in the U-Boot tools/ subdirectory will be located and +used. Failing that you can put it into your path or ~/bin/checkpatch.pl + +If you want to avoid sending patches to email addresses that are picked up +by patman but are known to bounce you can add a [bounces] section to your +.patman file. Unlike the [alias] section these are simple key: value pairs +that are not recursive:: + + [bounces] + gonefishing: Fred Bloggs <f.bloggs@napier.net> + + +If you want to change the defaults for patman's command-line arguments, +you can add a [settings] section to your .patman file. This can be used +for any command line option by referring to the "dest" for the option in +patman.py. For reference, the useful ones (at the moment) shown below +(all with the non-default setting):: + + [settings] + ignore_errors: True + process_tags: False + verbose: True + smtp_server: /path/to/sendmail + patchwork_url: https://patchwork.ozlabs.org + +If you want to adjust settings (or aliases) that affect just a single +project you can add a section that looks like [project_settings] or +[project_alias]. If you want to use tags for your linux work, you could do:: + + [linux_settings] + process_tags: True + + +How to run it +------------- + +First do a dry run: + +.. code-block:: bash + + ./tools/patman/patman send -n + +If it can't detect the upstream branch, try telling it how many patches +there are in your series + +.. code-block:: bash + + ./tools/patman/patman -c5 send -n + +This will create patch files in your current directory and tell you who +it is thinking of sending them to. Take a look at the patch files: + +.. code-block:: bash + + ./tools/patman/patman -c5 -s1 send -n + +Similar to the above, but skip the first commit and take the next 5. This +is useful if your top commit is for setting up testing. + + +How to install it +----------------- + +The most up to date version of patman can be found in the U-Boot sources. +However to use it on other projects it may be more convenient to install it as +a standalone application. A distutils installer is included, this can be used +to install patman: + +.. code-block:: bash + + cd tools/patman && python setup.py install + + +How to add tags +--------------- + +To make this script useful you must add tags like the following into any +commit. Most can only appear once in the whole series. + +Series-to: email / alias + Email address / alias to send patch series to (you can add this + multiple times) + +Series-cc: email / alias, ... + Email address / alias to Cc patch series to (you can add this + multiple times) + +Series-version: n + Sets the version number of this patch series + +Series-prefix: prefix + Sets the subject prefix. Normally empty but it can be RFC for + RFC patches, or RESEND if you are being ignored. The patch subject + is like [RFC PATCH] or [RESEND PATCH]. + In the meantime, git format.subjectprefix option will be added as + well. If your format.subjectprefix is set to InternalProject, then + the patch shows like: [InternalProject][RFC/RESEND PATCH] + +Series-postfix: postfix + Sets the subject "postfix". Normally empty, but can be the name of a + tree such as net or net-next if that needs to be specified. The patch + subject is like [PATCH net] or [PATCH net-next]. + +Series-name: name + Sets the name of the series. You don't need to have a name, and + patman does not yet use it, but it is convenient to put the branch + name here to help you keep track of multiple upstreaming efforts. + +Series-links: [id | version:id]... + Set the ID of the series in patchwork. You can set this after you send + out the series and look in patchwork for the resulting series. The + URL you want is the one for the series itself, not any particular patch. + E.g. for http://patchwork.ozlabs.org/project/uboot/list/?series=187331 + the series ID is 187331. This property can have a list of series IDs, + one for each version of the series, e.g. + + :: + + Series-links: 1:187331 2:188434 189372 + + Patman always uses the one without a version, since it assumes this is + the latest one. When this tag is provided, patman can compare your local + branch against patchwork to see what new reviews your series has + collected ('patman status'). + +Series-patchwork-url: url + This allows specifying the Patchwork URL for a branch. This overrides + both the setting files ("patchwork_url") and the command-line argument. + The URL should include the protocol and web site, with no trailing slash, + for example 'https://patchwork.ozlabs.org/project' + +Cover-letter: + Sets the cover letter contents for the series. The first line + will become the subject of the cover letter:: + + Cover-letter: + This is the patch set title + blah blah + more blah blah + END + +Cover-letter-cc: email / alias + Additional email addresses / aliases to send cover letter to (you + can add this multiple times) + +Series-notes: + Sets some notes for the patch series, which you don't want in + the commit messages, but do want to send, The notes are joined + together and put after the cover letter. Can appear multiple + times:: + + Series-notes: + blah blah + blah blah + more blah blah + END + +Commit-notes: + Similar, but for a single commit (patch). These notes will appear + immediately below the ``---`` cut in the patch file:: + + Commit-notes: + blah blah + blah blah + more blah blah + +Signed-off-by: Their Name <email> + A sign-off is added automatically to your patches (this is + probably a bug). If you put this tag in your patches, it will + override the default signoff that patman automatically adds. + Multiple duplicate signoffs will be removed. + +Tested-by / Reviewed-by / Acked-by + These indicate that someone has tested/reviewed/acked your patch. + When you get this reply on the mailing list, you can add this + tag to the relevant commit and the script will include it when + you send out the next version. If 'Tested-by:' is set to + yourself, it will be removed. No one will believe you. + + Example:: + + Tested-by: Their Name <fred@bloggs.com> + Reviewed-by: Their Name <email> + Acked-by: Their Name <email> + +Series-changes: n + This can appear in any commit. It lists the changes for a + particular version n of that commit. The change list is + created based on this information. Each commit gets its own + change list and also the whole thing is repeated in the cover + letter (where duplicate change lines are merged). + + By adding your change lists into your commits it is easier to + keep track of what happened. When you amend a commit, remember + to update the log there and then, knowing that the script will + do the rest. + + Example:: + + Series-changes: n + - Guinea pig moved into its cage + - Other changes ending with a blank line + <blank line> + +Commit-changes: n + This tag is like Series-changes, except changes in this changelog will + only appear in the changelog of the commit this tag is in. This is + useful when you want to add notes which may not make sense in the cover + letter. For example, you can have short changes such as "New" or + "Lint". + + Example:: + + Commit-changes: n + - This line will not appear in the cover-letter changelog + <blank line> + +Cover-changes: n + This tag is like Series-changes, except changes in this changelog will + only appear in the cover-letter changelog. This is useful to summarize + changes made with Commit-changes, or to add additional context to + changes. + + Example:: + + Cover-changes: n + - This line will only appear in the cover letter + <blank line> + +Commit-added-in: n + Add a change noting the version this commit was added in. This is + equivalent to:: + + Commit-changes: n + - New + + Cover-changes: n + - <commit subject> + + It is a convenient shorthand for suppressing the '(no changes in vN)' + message. + +Patch-cc / Commit-cc: Their Name <email> + This copies a single patch to another email address. Note that the + Cc: used by git send-email is ignored by patman, but will be + interpreted by git send-email if you use it. + +Series-process-log: sort, uniq + This tells patman to sort and/or uniq the change logs. Changes may be + multiple lines long, as long as each subsequent line of a change begins + with a whitespace character. For example, + + Example:: + + - This change + continues onto the next line + - But this change is separate + + Use 'sort' to sort the entries, and 'uniq' to include only + unique entries. If omitted, no change log processing is done. + Separate each tag with a comma. + +Change-Id: + This tag is used to generate the Message-Id of the emails that + will be sent. When you keep the Change-Id the same you are + asserting that this is a slightly different version (but logically + the same patch) as other patches that have been sent out with the + same Change-Id. The Change-Id tag line is removed from outgoing + patches, unless the `keep_change_id` settings is set to `True`. + +Various other tags are silently removed, like these Chrome OS and +Gerrit tags:: + + BUG=... + TEST=... + Review URL: + Reviewed-on: + Commit-xxxx: (except Commit-notes) + +Exercise for the reader: Try adding some tags to one of your current +patch series and see how the patches turn out. + + +Where Patches Are Sent +---------------------- + +Once the patches are created, patman sends them using git send-email. The +whole series is sent to the recipients in Series-to: and Series-cc. +You can Cc individual patches to other people with the Patch-cc: tag. Tags +in the subject are also picked up to Cc patches. For example, a commit like +this:: + + commit 10212537b85ff9b6e09c82045127522c0f0db981 + Author: Mike Frysinger <vapier@gentoo.org> + Date: Mon Nov 7 23:18:44 2011 -0500 + + x86: arm: add a git mailrc file for maintainers + + This should make sending out e-mails to the right people easier. + + Patch-cc: sandbox, mikef, ag + Patch-cc: afleming + +will create a patch which is copied to x86, arm, sandbox, mikef, ag and +afleming. + +If you have a cover letter it will get sent to the union of the Patch-cc +lists of all of the other patches. If you want to sent it to additional +people you can add a tag:: + + Cover-letter-cc: <list of addresses> + +These people will get the cover letter even if they are not on the To/Cc +list for any of the patches. + + +Patchwork Integration +--------------------- + +Patman has a very basic integration with Patchwork. If you point patman to +your series on patchwork it can show you what new reviews have appeared since +you sent your series. + +To set this up, add a Series-link tag to one of the commits in your series +(see above). + +Then you can type: + +.. code-block:: bash + + patman status + +and patman will show you each patch and what review tags have been collected, +for example:: + + ... + 21 x86: mtrr: Update the command to use the new mtrr + Reviewed-by: Wolfgang Wallner <wolfgang.wallner@br-automation.com> + + Reviewed-by: Bin Meng <bmeng.cn@gmail.com> + 22 x86: mtrr: Restructure so command execution is in + Reviewed-by: Wolfgang Wallner <wolfgang.wallner@br-automation.com> + + Reviewed-by: Bin Meng <bmeng.cn@gmail.com> + ... + +This shows that patch 21 and 22 were sent out with one review but have since +attracted another review each. If the series needs changes, you can update +these commits with the new review tag before sending the next version of the +series. + +To automatically pull into these tags into a new branch, use the -d option: + +.. code-block:: bash + + patman status -d mtrr4 + +This will create a new 'mtrr4' branch which is the same as your current branch +but has the new review tags in it. The tags are added in alphabetic order and +are placed immediately after any existing ack/review/test/fixes tags, or at the +end. You can check that this worked with: + +.. code-block:: bash + + patman -b mtrr4 status + +which should show that there are no new responses compared to this new branch. + +There is also a -C option to list the comments received for each patch. + + +Example Work Flow +----------------- + +The basic workflow is to create your commits, add some tags to the top +commit, and type 'patman' to check and send them. + +Here is an example workflow for a series of 4 patches. Let's say you have +these rather contrived patches in the following order in branch us-cmd in +your tree where 'us' means your upstreaming activity (newest to oldest as +output by git log --oneline):: + + 7c7909c wip + 89234f5 Don't include standard parser if hush is used + 8d640a7 mmc: sparc: Stop using builtin_run_command() + 0c859a9 Rename run_command2() to run_command() + a74443f sandbox: Rename run_command() to builtin_run_command() + +The first patch is some test things that enable your code to be compiled, +but that you don't want to submit because there is an existing patch for it +on the list. So you can tell patman to create and check some patches +(skipping the first patch) with: + +.. code-block:: bash + + patman -s1 send -n + +If you want to do all of them including the work-in-progress one, then +(if you are tracking an upstream branch): + +.. code-block:: bash + + patman send -n + +Let's say that patman reports an error in the second patch. Then: + +.. code-block:: bash + + git rebase -i HEAD~6 + # change 'pick' to 'edit' in 89234f5 + # use editor to make code changes + git add -u + git rebase --continue + +Now you have an updated patch series. To check it: + +.. code-block:: bash + + patman -s1 send -n + +Let's say it is now clean and you want to send it. Now you need to set up +the destination. So amend the top commit with: + +.. code-block:: bash + + git commit --amend + +Use your editor to add some tags, so that the whole commit message is:: + + The current run_command() is really only one of the options, with + hush providing the other. It really shouldn't be called directly + in case the hush parser is bring used, so rename this function to + better explain its purpose:: + + Series-to: u-boot + Series-cc: bfin, marex + Series-prefix: RFC + Cover-letter: + Unified command execution in one place + + At present two parsers have similar code to execute commands. Also + cmd_usage() is called all over the place. This series adds a single + function which processes commands called cmd_process(). + END + + Change-Id: Ica71a14c1f0ecb5650f771a32fecb8d2eb9d8a17 + + +You want this to be an RFC and Cc the whole series to the bfin alias and +to Marek. Two of the patches have tags (those are the bits at the front of +the subject that say mmc: sparc: and sandbox:), so 8d640a7 will be Cc'd to +mmc and sparc, and the last one to sandbox. + +Now to send the patches, take off the -n flag: + +.. code-block:: bash + + patman -s1 send + +The patches will be created, shown in your editor, and then sent along with +the cover letter. Note that patman's tags are automatically removed so that +people on the list don't see your secret info. + +Of course patches often attract comments and you need to make some updates. +Let's say one person sent comments and you get an Acked-by: on one patch. +Also, the patch on the list that you were waiting for has been merged, +so you can drop your wip commit. + +Take a look on patchwork and find out the URL of the series. This will be +something like `http://patchwork.ozlabs.org/project/uboot/list/?series=187331` +Add this to a tag in your top commit:: + + Series-links: 187331 + +You can use then patman to collect the Acked-by tag to the correct commit, +creating a new 'version 2' branch for us-cmd: + +.. code-block:: bash + + patman status -d us-cmd2 + git checkout us-cmd2 + +You can look at the comments in Patchwork or with: + +.. code-block:: bash + + patman status -C + +Then you can resync with upstream: + +.. code-block:: bash + + git fetch origin # or whatever upstream is called + git rebase origin/master + +and use git rebase -i to edit the commits, dropping the wip one. + +Then update the `Series-cc:` in the top commit to add the person who reviewed +the v1 series:: + + Series-cc: bfin, marex, Heiko Schocher <hs@denx.de> + +and remove the Series-prefix: tag since it it isn't an RFC any more. The +series is now version two, so the series info in the top commit looks like +this:: + + Series-to: u-boot + Series-cc: bfin, marex, Heiko Schocher <hs@denx.de> + Series-version: 2 + Cover-letter: + ... + +Finally, you need to add a change log to the two commits you changed. You +add change logs to each individual commit where the changes happened, like +this:: + + Series-changes: 2 + - Updated the command decoder to reduce code size + - Wound the torque propounder up a little more + +(note the blank line at the end of the list) + +When you run patman it will collect all the change logs from the different +commits and combine them into the cover letter, if you have one. So finally +you have a new series of commits:: + + faeb973 Don't include standard parser if hush is used + 1b2f2fe mmc: sparc: Stop using builtin_run_command() + cfbe330 Rename run_command2() to run_command() + 0682677 sandbox: Rename run_command() to builtin_run_command() + +so to send them: + +.. code-block:: bash + + patman + +and it will create and send the version 2 series. + + +Series Management +----------------- + +Sometimes you might have several series in flight at the same time. Each of +these receives comments and you want to create a new version of each series with +those comments addressed. + +Patman provides a few subcommands which are helpful for managing series. + +Series and branches +~~~~~~~~~~~~~~~~~~~ + +'patman series' works with the concept of a series. It maintains a local +database (.patman.db in your top-level git tree) and uses that to keep track of +series and patches. + +Each series goes through muliple versions. Patman requires that the first +version of your series is in a branch without a numeric suffix. Branch names +like 'serial' and 'video' are OK, but 'part3' is not. This is because Patman +uses the number at the end of the branch name to indicate the version. + +If your series name is 'video', then you can have a 'video' branch for version +1 of the series, 'video2' for version 2 and 'video3' for version 3. All three +branches are for the same series. Patman keeps track of these different +versions. It handles the branch naming automatically, but you need to be aware +of what it is doing. + +You will have an easier time if the branch names you use with 'patman series' +are short, no more than 15 characters. This is the amount of columnar space in +listings. You can add a longer description as the series description. If you +are used to having very descriptive branch names, remember that patman lets you +add metadata into commit which is automatically removed before sending. + +This documentation uses the term 'series' to mean all the versions of a series +and 'series/version' to mean a particular version of a series. + +Updating commits +~~~~~~~~~~~~~~~~ + +Since Patman provides quite a bit of automation, it updates your commits in +some cases, effectively doing a rebase of a branch in order to change the tags +in the commits. It never makes code changes. + +In extremis you can use 'git reflog' to revert something that Patman did. + + +Series subcommands +~~~~~~~~~~~~~~~~~~ + +Note that 'patman series ...' can be abbreviated as 'patman s' or 'patman ser'. + +Here is a short overview of the available subcommands: + + add + Add a new series. Use this on an existing branch to tell Patman about it. + + archive (ar) + Archive a series when you have finished upstreaming it. Archived series + are not shown by most commands. This creates a dated tag for each + version of the series, pointing to the series branch, then deletes the + branches. It puts the tag names in the database so that it can + 'unarchive' to restore things how they were. + + unarchive (unar) + Unarchive a series when you decide you need to do something more with + it. The branches are restored and tags deleted. + + autolink (au) + Search patchwork for the series link for your series, so Patman can + track the status + + autolink-all + Same but for all series + + inc + Increase the series number, effectively creating a new branch with the + next highest version number. The new branch is created based on the + existing branch. So if you use 'patman series inc' on branch 'video2' + it will create branch 'video3' and add v3 into its database + + dec + Decrease the series number, thus deleting the current branch and + removing that version from the data. If you use this comment on branch + 'video3' Patman will delete version 3 and branch 'video3'. + + get-link + Shows the Patchwork link for a series/version + + ls + Lists the series in the database + + mark + Mark a series with 'Change-Id' tags so that Patman can track patches + even when the subject changes. Unmarked patches just use the subject to + decided which is which. + + unmark + Remove 'Change-Id' tags from a series. + + open (o) + Open a series in Patchwork using your web browser + + patches + Show the patches in a particular series/version + + progress (p) + Show upstream progress for your series, or for all series + + rm + Remove a series entirely, including all versions + + rm-version (rmv) + Remove a particular version of a series. This is similar to 'dec' + except that any version can be removed, not just the latest one. + + scan + Scan the local branch and update the database with the set of patches + in that branch. This throws away the old patches. + + send + Send a series out as patches. This is similar to 'patman send' except + that it can send any series, not just the current branch. It also + waits a little for patchwork to see the cover letter, so it can find + out the patchwork link for the series. + + set-link + Sets the Patchwork link for a series-version manually. + + status (st) + Run 'patman status' on a series. This is similar to 'patman status' + except that it can get status on any series, not just the current + branch + + summary + Shows a quick summary of series with their status and description. + + sync + Sync the status of a series with Pathwork, so that + 'patman series progress' can show the right information. + + sync-all + Sync the status of all series. + + +Patman series workflow +~~~~~~~~~~~~~~~~~~~~~~ + +Here is a run-through of how to incorporate 'patman series' into your workflow. + +Firstly, set up your project:: + + patman patchwork set-project U-Boot + +This just tells Patman to look on the Patchwork server for a project of that +name. Internally Patman stores the ID and URL 'link-name' for the project, so it +can access it. + +If you need to use a different patchwork server, use the `--patchwork-url` +option or put the URL in your Patman-settings file. + +Now create a branch. For our example we are going to send out a series related +to video so the branch will be called 'video'. The upstream remove is called +'us':: + + git checkout -b video us/master + +We now have a branch and so we can do some commits:: + + <edit files> + git add ... + <edit files> + git add -u + git commit ... + git commit ... + +We now have a few commits in our 'video' branch. Let's tell patman about it:: + + patman series add + +Like most commands, if no series is given (`patman series -s video add`) then +the current branch is assumed. Since the branch is called 'video' patman knows +that it is version one of the video series. + +You'll likely get a warning that there is no cover letter. Let's add some tags +to the top commit:: + + Series-to: u-boot + Series-cc: ... + Cover-letter: + video: Improve syncing performance with cyclic + +Trying again:: + + patman series add + +You'll likely get a warning that the commits are unmarked. You can either let +patman add Change-Id values itself with the `-m` flag, or tell it not to worry +about it with `-M`. You must choose one or the other. Let's leave the commits +unmarked:: + + patman series add -M + +Congratulations, you've now got a patman database! + +Now let's send out the series. We will add tags to the top commit. + +To send it:: + + patman series send + +You should send 'git send-email' start up and you can confirm the sending of +each email. + +After that, patman waits a bit to see if it can find your new series appearing +on Patchwork. With a bit of luck this will only take 20 seconds or so. Then your +series is linked. + +To gather tags (Reviewed-by ...) for your series from patchwork:: + + patman series gather + +Now you can check your progress:: + + patman series progress + +Later on you get some comments, or perhaps you just decide to make a change on +your own. You have several options. + +The first option is that you can just create a new branch:: + + git checkout -b video2 video + +then you can add this 'v2' series to Patman with:: + + patman series add + +The second option is to get patman to create the new 'video2' branch in one +step:: + + patman inc + +The third option is to collect some tags using the 'patman status' command and +put them in a new branch:: + + patman status -d video2 + +One day the fourth option will be to ask patman to collect tags as part of the +'patman inc' command. + +Again, you do your edits, perhaps adding/removing patches, rebasing on -master +and so on. Then, send your v2:: + + patman series send + +Let's say the patches are accepted. You can use:: + + patch series gather + patch series progress + +to check, or:: + + patman series status -cC + +to see comments. You can now archive the series:: + + patman series archive + +At this point you have the basics. Some of the subcommands useful options, so +be sure to check out the help. + +Here is a sample 'progress' view: + +.. image:: pics/patman.jpg + :width: 800 + :alt: Patman showing the progress view + +General points +-------------- + +#. When you change back to the us-cmd branch days or weeks later all your + information is still there, safely stored in the commits. You don't need + to remember what version you are up to, who you sent the last lot of patches + to, or anything about the change logs. +#. If you put tags in the subject, patman will Cc the maintainers + automatically in many cases. +#. If you want to keep the commits from each series you sent so that you can + compare change and see what you did, you can either create a new branch for + each version, or just tag the branch before you start changing it: + + .. code-block:: bash + + git tag sent/us-cmd-rfc + # ...later... + git tag sent/us-cmd-v2 + +#. If you want to modify the patches a little before sending, you can do + this in your editor, but be careful! +#. If you want to run git send-email yourself, use the -n flag which will + print out the command line patman would have used. +#. It is a good idea to add the change log info as you change the commit, + not later when you can't remember which patch you changed. You can always + go back and change or remove logs from commits. +#. Some mailing lists have size limits and when we add binary contents to + our patches it's easy to exceed the size limits. Use "--no-binary" to + generate patches without any binary contents. You are supposed to include + a link to a git repository in your "Commit-notes", "Series-notes" or + "Cover-letter" for maintainers to fetch the original commit. +#. Patches will have no changelog entries for revisions where they did not + change. For clarity, if there are no changes for this patch in the most + recent revision of the series, a note will be added. For example, a patch + with the following tags in the commit:: + + Series-version: 5 + Series-changes: 2 + - Some change + + Series-changes: 4 + - Another change + + would have a changelog of::: + + (no changes since v4) + + Changes in v4: + - Another change + + Changes in v2: + - Some change + + +Other thoughts +-------------- + +This script has been split into sensible files but still needs work. +Most of these are indicated by a TODO in the code. + +It would be nice if this could handle the In-reply-to side of things. + +The tests are incomplete, as is customary. Use the 'test' subcommand to run +them: + +.. code-block:: bash + + $ tools/patman/patman test + +Note that since the test suite depends on data files only available in +the git checkout, the `test` command is hidden unless `patman` is +invoked from the U-Boot git repository. + +Alternatively, you can run the test suite via Pytest: + +.. code-block:: bash + + $ cd tools/patman && pytest + +Error handling doesn't always produce friendly error messages - e.g. +putting an incorrect tag in a commit may provide a confusing message. + +There might be a few other features not mentioned in this README. They +might be bugs. In particular, tags are case sensitive which is probably +a bad thing. diff --git a/tools/patman/project.py b/tools/patman/project.py new file mode 100644 index 00000000000..e633401e9d6 --- /dev/null +++ b/tools/patman/project.py @@ -0,0 +1,27 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2012 The Chromium OS Authors. +# + +import os.path + +from u_boot_pylib import gitutil + +def detect_project(): + """Autodetect the name of the current project. + + This looks for signature files/directories that are unlikely to exist except + in the given project. + + Returns: + The name of the project, like "linux" or "u-boot". Returns "unknown" + if we can't detect the project. + """ + top_level = gitutil.get_top_level() + + if (not top_level or + os.path.exists(os.path.join(top_level, "include", "u-boot"))): + return "u-boot" + elif os.path.exists(os.path.join(top_level, "kernel")): + return "linux" + + return "unknown" diff --git a/tools/patman/pyproject.toml b/tools/patman/pyproject.toml new file mode 100644 index 00000000000..06e169cdf48 --- /dev/null +++ b/tools/patman/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = ["setuptools>=61.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "patch-manager" +version = "0.0.6" +authors = [ + { name="Simon Glass", email="sjg@chromium.org" }, +] +dependencies = ["u_boot_pylib >= 0.0.6", "aiohttp >= 3.9.1" ] +description = "Patman patch manager" +readme = "README.rst" +requires-python = ">=3.7" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)", + "Operating System :: OS Independent", +] + +[project.urls] +"Homepage" = "https://docs.u-boot.org/en/latest/develop/patman.html" +"Bug Tracker" = "https://source.denx.de/groups/u-boot/-/issues" + +[project.scripts] +patman = "patman.__main__:run_patman" + +[tool.setuptools.package-data] +patman = ["*.rst"] diff --git a/tools/patman/pytest.ini b/tools/patman/pytest.ini new file mode 100644 index 00000000000..df3eb518d0f --- /dev/null +++ b/tools/patman/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --doctest-modules diff --git a/tools/patman/requirements.txt b/tools/patman/requirements.txt new file mode 100644 index 00000000000..d4fcb1061c2 --- /dev/null +++ b/tools/patman/requirements.txt @@ -0,0 +1,6 @@ +aiohttp==3.10.11 +ConfigParser==7.1.0 +importlib_resources==6.5.2 +pygit2==1.14.1 +requests==2.32.4 +setuptools==78.1.1 diff --git a/tools/patman/send.py b/tools/patman/send.py new file mode 100644 index 00000000000..08a916aff1a --- /dev/null +++ b/tools/patman/send.py @@ -0,0 +1,197 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2025 Google LLC +# +"""Handles the 'send' subcommand +""" + +import os +import sys + +from patman import checkpatch +from patman import patchstream +from patman import settings +from u_boot_pylib import gitutil +from u_boot_pylib import terminal + + +def check_patches(series, patch_files, run_checkpatch, verbose, use_tree, cwd): + """Run some checks on a set of patches + + This santiy-checks the patman tags like Series-version and runs the patches + through checkpatch + + Args: + series (Series): Series object for this series (set of patches) + patch_files (list): List of patch filenames, each a string, e.g. + ['0001_xxx.patch', '0002_yyy.patch'] + run_checkpatch (bool): True to run checkpatch.pl + verbose (bool): True to print out every line of the checkpatch output as + it is parsed + use_tree (bool): If False we'll pass '--no-tree' to checkpatch. + cwd (str): Path to use for patch files (None to use current dir) + + Returns: + bool: True if the patches had no errors, False if they did + """ + # Do a few checks on the series + series.DoChecks() + + # Check the patches + if run_checkpatch: + ok = checkpatch.check_patches(verbose, patch_files, use_tree, cwd) + else: + ok = True + return ok + + +def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go, + ignore_bad_tags, add_maintainers, get_maintainer_script, limit, + dry_run, in_reply_to, thread, smtp_server, cwd=None): + """Email patches to the recipients + + This emails out the patches and cover letter using 'git send-email'. Each + patch is copied to recipients identified by the patch tag and output from + the get_maintainer.pl script. The cover letter is copied to all recipients + of any patch. + + To make this work a CC file is created holding the recipients for each patch + and the cover letter. See the main program 'cc_cmd' for this logic. + + Args: + col (terminal.Color): Colour output object + series (Series): Series object for this series (set of patches) + cover_fname (str): Filename of the cover letter as a string (None if + none) + patch_files (list): List of patch filenames, each a string, e.g. + ['0001_xxx.patch', '0002_yyy.patch'] + process_tags (bool): True to process subject tags in each patch, e.g. + for 'dm: spi: Add SPI support' this would be 'dm' and 'spi'. The + tags are looked up in the configured sendemail.aliasesfile and also + in ~/.patman (see README) + its_a_go (bool): True if we are going to actually send the patches, + False if the patches have errors and will not be sent unless + @ignore_errors + ignore_bad_tags (bool): True to just print a warning for unknown tags, + False to halt with an error + add_maintainers (bool): Run the get_maintainer.pl script for each patch + get_maintainer_script (str): The script used to retrieve which + maintainers to cc + limit (int): Limit on the number of people that can be cc'd on a single + patch or the cover letter (None if no limit) + dry_run (bool): Don't actually email the patches, just print out what + would be sent + in_reply_to (str): If not None we'll pass this to git as --in-reply-to. + Should be a message ID that this is in reply to. + thread (bool): True to add --thread to git send-email (make all patches + reply to cover-letter or first patch in series) + smtp_server (str): SMTP server to use to send patches (None for default) + cwd (str): Path to use for patch files (None to use current dir) + + Return: + Git command that was/would be run + """ + cc_file = series.MakeCcFile(process_tags, cover_fname, not ignore_bad_tags, + add_maintainers, limit, get_maintainer_script, + settings.alias, cwd) + + # Email the patches out (giving the user time to check / cancel) + cmd = '' + if its_a_go: + cmd = gitutil.email_patches( + series, cover_fname, patch_files, dry_run, not ignore_bad_tags, + cc_file, alias=settings.alias, in_reply_to=in_reply_to, + thread=thread, smtp_server=smtp_server, cwd=cwd) + else: + print(col.build(col.RED, "Not sending emails due to errors/warnings")) + + # For a dry run, just show our actions as a sanity check + if dry_run: + series.ShowActions(patch_files, cmd, process_tags, settings.alias) + if not its_a_go: + print(col.build(col.RED, "Email would not be sent")) + + os.remove(cc_file) + return cmd + + +def prepare_patches(col, branch, count, start, end, ignore_binary, signoff, + keep_change_id=False, git_dir=None, cwd=None): + """Figure out what patches to generate, then generate them + + The patch files are written to the current directory, e.g. 0001_xxx.patch + 0002_yyy.patch + + Args: + col (terminal.Color): Colour output object + branch (str): Branch to create patches from (None = current) + count (int): Number of patches to produce, or -1 to produce patches for + the current branch back to the upstream commit + start (int): Start patch to use (0=first / top of branch) + end (int): End patch to use (0=last one in series, 1=one before that, + etc.) + ignore_binary (bool): Don't generate patches for binary files + keep_change_id (bool): Preserve the Change-Id tag. + git_dir (str): Path to git repository (None to use default) + cwd (str): Path to use for git operations (None to use current dir) + + Returns: + Tuple: + Series object for this series (set of patches) + Filename of the cover letter as a string (None if none) + patch_files: List of patch filenames, each a string, e.g. + ['0001_xxx.patch', '0002_yyy.patch'] + """ + if count == -1: + # Work out how many patches to send if we can + count = (gitutil.count_commits_to_branch(branch, git_dir=git_dir) - + start) + + if not count: + msg = 'No commits found to process - please use -c flag, or run:\n' \ + ' git branch --set-upstream-to remote/branch' + sys.exit(col.build(col.RED, msg)) + + # Read the metadata from the commits + to_do = count - end + series = patchstream.get_metadata(branch, start, to_do, git_dir) + cover_fname, patch_files = gitutil.create_patches( + branch, start, to_do, ignore_binary, series, signoff, git_dir=git_dir, + cwd=cwd) + + # Fix up the patch files to our liking, and insert the cover letter + patchstream.fix_patches(series, patch_files, keep_change_id, + insert_base_commit=not cover_fname, cwd=cwd) + if cover_fname and series.get('cover'): + patchstream.insert_cover_letter(cover_fname, series, to_do, cwd=cwd) + return series, cover_fname, patch_files + + +def send(args, git_dir=None, cwd=None): + """Create, check and send patches by email + + Args: + args (argparse.Namespace): Arguments to patman + cwd (str): Path to use for git operations + + Return: + bool: True if the patches were likely sent, else False + """ + col = terminal.Color() + series, cover_fname, patch_files = prepare_patches( + col, args.branch, args.count, args.start, args.end, + args.ignore_binary, args.add_signoff, + keep_change_id=args.keep_change_id, git_dir=git_dir, cwd=cwd) + ok = check_patches(series, patch_files, args.check_patch, + args.verbose, args.check_patch_use_tree, cwd) + + ok = ok and gitutil.check_suppress_cc_config() + + its_a_go = ok or args.ignore_errors + cmd = email_patches( + col, series, cover_fname, patch_files, args.process_tags, + its_a_go, args.ignore_bad_tags, args.add_maintainers, + args.get_maintainer_script, args.limit, args.dry_run, + args.in_reply_to, args.thread, args.smtp_server, cwd=cwd) + + return cmd and its_a_go and not args.dry_run diff --git a/tools/patman/series.py b/tools/patman/series.py new file mode 100644 index 00000000000..ad61bbfa399 --- /dev/null +++ b/tools/patman/series.py @@ -0,0 +1,487 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2011 The Chromium OS Authors. +# + +from __future__ import print_function + +import collections +import concurrent.futures +import itertools +import os +import sys +import time + +from patman import get_maintainer +from patman import settings +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tools + +# Series-xxx tags that we understand +valid_series = ['to', 'cc', 'version', 'changes', 'prefix', 'notes', 'name', + 'cover_cc', 'process_log', 'links', 'patchwork_url', 'postfix'] + +class Series(dict): + """Holds information about a patch series, including all tags. + + Vars: + cc (list of str): Aliases/emails to Cc all patches to + to (list of str): Aliases/emails to send patches to + commits (list of Commit): Commit objects, one for each patch + cover (list of str): Lines in the cover letter + notes (list of str): Lines in the notes + changes: (dict) List of changes for each version: + key (int): version number + value: tuple: + commit (Commit): Commit this relates to, or None if related to a + cover letter + info (str): change lines for this version (separated by \n) + allow_overwrite (bool): Allow tags to overwrite an existing tag + base_commit (Commit): Commit object at the base of this series + branch (str): Branch name of this series + desc (str): Description of the series (cover-letter title) + idnum (int or None): Database rowid + name (str): Series name, typically the branch name without any numeric + suffix + _generated_cc (dict) written in MakeCcFile() + key: name of patch file + value: list of email addresses + """ + def __init__(self): + self.cc = [] + self.to = [] + self.cover_cc = [] + self.commits = [] + self.cover = None + self.notes = [] + self.changes = {} + self.allow_overwrite = False + self.base_commit = None + self.branch = None + self.desc = '' + self.idnum = None + self.name = None + self._generated_cc = {} + + # These make us more like a dictionary + def __setattr__(self, name, value): + self[name] = value + + def __getattr__(self, name): + return self[name] + + @staticmethod + def from_fields(idnum, name, desc): + ser = Series() + ser.idnum = idnum + ser.name = name + ser.desc = desc + return ser + + def AddTag(self, commit, line, name, value): + """Add a new Series-xxx tag along with its value. + + Args: + line: Source line containing tag (useful for debug/error messages) + name: Tag name (part after 'Series-') + value: Tag value (part after 'Series-xxx: ') + + Returns: + String warning if something went wrong, else None + """ + # If we already have it, then add to our list + name = name.replace('-', '_') + if name in self and not self.allow_overwrite: + values = value.split(',') + values = [str.strip() for str in values] + if type(self[name]) != type([]): + raise ValueError("In %s: line '%s': Cannot add another value " + "'%s' to series '%s'" % + (commit.hash, line, values, self[name])) + self[name] += values + + # Otherwise just set the value + elif name in valid_series: + if name=="notes": + self[name] = [value] + else: + self[name] = value + else: + return ("In %s: line '%s': Unknown 'Series-%s': valid " + "options are %s" % (commit.hash, line, name, + ', '.join(valid_series))) + return None + + def AddCommit(self, commit): + """Add a commit into our list of commits + + We create a list of tags in the commit subject also. + + Args: + commit: Commit object to add + """ + commit.check_tags() + self.commits.append(commit) + + def ShowActions(self, args, cmd, process_tags, alias): + """Show what actions we will/would perform + + Args: + args: List of patch files we created + cmd: The git command we would have run + process_tags: Process tags as if they were aliases + alias (dict): Alias dictionary + key: alias + value: list of aliases or email addresses + """ + to_set = set(gitutil.build_email_list(self.to, alias)); + cc_set = set(gitutil.build_email_list(self.cc, alias)); + + col = terminal.Color() + print('Dry run, so not doing much. But I would do this:') + print() + print('Send a total of %d patch%s with %scover letter.' % ( + len(args), '' if len(args) == 1 else 'es', + self.get('cover') and 'a ' or 'no ')) + + # TODO: Colour the patches according to whether they passed checks + for upto in range(len(args)): + commit = self.commits[upto] + print(col.build(col.GREEN, ' %s' % args[upto])) + cc_list = list(self._generated_cc[commit.patch]) + for email in sorted(set(cc_list) - to_set - cc_set): + if email == None: + email = col.build(col.YELLOW, '<alias not found>') + if email: + print(' Cc: ', email) + print + for item in sorted(to_set): + print('To:\t ', item) + for item in sorted(cc_set - to_set): + print('Cc:\t ', item) + print('Version: ', self.get('version')) + print('Prefix:\t ', self.get('prefix')) + print('Postfix:\t ', self.get('postfix')) + if self.cover: + print('Cover: %d lines' % len(self.cover)) + cover_cc = gitutil.build_email_list(self.get('cover_cc', ''), + alias) + all_ccs = itertools.chain(cover_cc, *self._generated_cc.values()) + for email in sorted(set(all_ccs) - to_set - cc_set): + print(' Cc: ', email) + if cmd: + print('Git command: %s' % cmd) + + def MakeChangeLog(self, commit): + """Create a list of changes for each version. + + Return: + The change log as a list of strings, one per line + + Changes in v4: + - Jog the dial back closer to the widget + + Changes in v2: + - Fix the widget + - Jog the dial + + If there are no new changes in a patch, a note will be added + + (no changes since v2) + + Changes in v2: + - Fix the widget + - Jog the dial + """ + # Collect changes from the series and this commit + changes = collections.defaultdict(list) + for version, changelist in self.changes.items(): + changes[version] += changelist + if commit: + for version, changelist in commit.changes.items(): + changes[version] += [[commit, text] for text in changelist] + + versions = sorted(changes, reverse=True) + newest_version = 1 + if 'version' in self: + newest_version = max(newest_version, int(self.version)) + if versions: + newest_version = max(newest_version, versions[0]) + + final = [] + process_it = self.get('process_log', '').split(',') + process_it = [item.strip() for item in process_it] + need_blank = False + for version in versions: + out = [] + for this_commit, text in changes[version]: + if commit and this_commit != commit: + continue + if 'uniq' not in process_it or text not in out: + out.append(text) + if 'sort' in process_it: + out = sorted(out) + have_changes = len(out) > 0 + line = 'Changes in v%d:' % version + if have_changes: + out.insert(0, line) + if version < newest_version and len(final) == 0: + out.insert(0, '') + out.insert(0, '(no changes since v%d)' % version) + newest_version = 0 + # Only add a new line if we output something + if need_blank: + out.insert(0, '') + need_blank = False + final += out + need_blank = need_blank or have_changes + + if len(final) > 0: + final.append('') + elif newest_version != 1: + final = ['(no changes since v1)', ''] + return final + + def DoChecks(self): + """Check that each version has a change log + + Print an error if something is wrong. + """ + col = terminal.Color() + if self.get('version'): + changes_copy = dict(self.changes) + for version in range(1, int(self.version) + 1): + if self.changes.get(version): + del changes_copy[version] + else: + if version > 1: + str = 'Change log missing for v%d' % version + print(col.build(col.RED, str)) + for version in changes_copy: + str = 'Change log for unknown version v%d' % version + print(col.build(col.RED, str)) + elif self.changes: + str = 'Change log exists, but no version is set' + print(col.build(col.RED, str)) + + def GetCcForCommit(self, commit, process_tags, warn_on_error, + add_maintainers, limit, get_maintainer_script, + all_skips, alias, cwd): + """Get the email CCs to use with a particular commit + + Uses subject tags and get_maintainers.pl script to find people to cc + on a patch + + Args: + commit (Commit): Commit to process + process_tags (bool): Process tags as if they were aliases + warn_on_error (bool): True to print a warning when an alias fails to + match, False to ignore it. + add_maintainers (bool or list of str): Either: + True/False to call the get_maintainers to CC maintainers + List of maintainers to include (for testing) + limit (int): Limit the length of the Cc list (None if no limit) + get_maintainer_script (str): The file name of the get_maintainer.pl + script (or compatible). + all_skips (set of str): Updated to include the set of bouncing email + addresses that were dropped from the output. This is essentially + a return value from this function. + alias (dict): Alias dictionary + key: alias + value: list of aliases or email addresses + cwd (str): Path to use for patch filenames (None to use current dir) + + Returns: + list of str: List of email addresses to cc + """ + cc = [] + if process_tags: + cc += gitutil.build_email_list(commit.tags, alias, + warn_on_error=warn_on_error) + cc += gitutil.build_email_list(commit.cc_list, alias, + warn_on_error=warn_on_error) + if type(add_maintainers) == type(cc): + cc += add_maintainers + elif add_maintainers: + fname = os.path.join(cwd or '', commit.patch) + cc += get_maintainer.get_maintainer(get_maintainer_script, fname) + all_skips |= set(cc) & set(settings.bounces) + cc = list(set(cc) - set(settings.bounces)) + if limit is not None: + cc = cc[:limit] + return cc + + def MakeCcFile(self, process_tags, cover_fname, warn_on_error, + add_maintainers, limit, get_maintainer_script, alias, + cwd=None): + """Make a cc file for us to use for per-commit Cc automation + + Also stores in self._generated_cc to make ShowActions() faster. + + Args: + process_tags (bool): Process tags as if they were aliases + cover_fname (str): If non-None the name of the cover letter. + warn_on_error (bool): True to print a warning when an alias fails to + match, False to ignore it. + add_maintainers (bool or list of str): Either: + True/False to call the get_maintainers to CC maintainers + List of maintainers to include (for testing) + limit (int): Limit the length of the Cc list (None if no limit) + get_maintainer_script (str): The file name of the get_maintainer.pl + script (or compatible). + alias (dict): Alias dictionary + key: alias + value: list of aliases or email addresses + cwd (str): Path to use for patch filenames (None to use current dir) + Return: + Filename of temp file created + """ + col = terminal.Color() + # Look for commit tags (of the form 'xxx:' at the start of the subject) + fname = '/tmp/patman.%d' % os.getpid() + fd = open(fname, 'w', encoding='utf-8') + all_ccs = [] + all_skips = set() + with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor: + for i, commit in enumerate(self.commits): + commit.seq = i + commit.future = executor.submit( + self.GetCcForCommit, commit, process_tags, warn_on_error, + add_maintainers, limit, get_maintainer_script, all_skips, + alias, cwd) + + # Show progress any commits that are taking forever + lastlen = 0 + while True: + left = [commit for commit in self.commits + if not commit.future.done()] + if not left: + break + names = ', '.join(f'{c.seq + 1}:{c.subject}' + for c in left[:2]) + out = f'\r{len(left)} remaining: {names}'[:79] + spaces = ' ' * (lastlen - len(out)) + if lastlen: # Don't print anything the first time + print(out, spaces, end='') + sys.stdout.flush() + lastlen = len(out) + time.sleep(.25) + print(f'\rdone{" " * lastlen}\r', end='') + print('Cc processing complete') + + for commit in self.commits: + cc = commit.future.result() + all_ccs += cc + print(commit.patch, '\0'.join(sorted(set(cc))), file=fd) + self._generated_cc[commit.patch] = cc + + for x in sorted(all_skips): + print(col.build(col.YELLOW, f'Skipping "{x}"')) + + if cover_fname: + cover_cc = gitutil.build_email_list( + self.get('cover_cc', ''), alias) + cover_cc = list(set(cover_cc + all_ccs)) + if limit is not None: + cover_cc = cover_cc[:limit] + cc_list = '\0'.join([x for x in sorted(cover_cc)]) + print(cover_fname, cc_list, file=fd) + + fd.close() + return fname + + def AddChange(self, version, commit, info): + """Add a new change line to a version. + + This will later appear in the change log. + + Args: + version (int): version number to add change list to + commit (Commit): Commit this relates to, or None if related to a + cover letter + info (str): change lines for this version (separated by \n) + """ + if not self.changes.get(version): + self.changes[version] = [] + self.changes[version].append([commit, info]) + + def GetPatchPrefix(self): + """Get the patch version string + + Return: + Patch string, like 'RFC PATCH v5' or just 'PATCH' + """ + git_prefix = gitutil.get_default_subject_prefix() + if git_prefix: + git_prefix = '%s][' % git_prefix + else: + git_prefix = '' + + version = '' + if self.get('version'): + version = ' v%s' % self['version'] + + # Get patch name prefix + prefix = '' + if self.get('prefix'): + prefix = '%s ' % self['prefix'] + + postfix = '' + if self.get('postfix'): + postfix = ' %s' % self['postfix'] + return '%s%sPATCH%s%s' % (git_prefix, prefix, postfix, version) + + def get_links(self, links_str=None, cur_version=None): + """Look up the patchwork links for each version + + Args: + links_str (str): Links string to parse, or None to use self.links + cur_version (int): Default version to assume for un-versioned links, + or None to use self.version + + Return: + dict: + key (int): Version number + value (str): Link string + """ + if links_str is None: + links_str = self.links if 'links' in self else '' + if cur_version is None: + cur_version = int(self.version) if 'version' in self else 1 + assert isinstance(cur_version, int) + links = {} + for item in links_str.split(): + if ':' in item: + version, link = item.split(':') + links[int(version)] = link + else: + links[cur_version] = item + return links + + def build_links(self, links): + """Build a string containing the links + + Args: + links (dict): + key (int): Version number + value (str): Link string + + Return: + str: Link string, e.g. '2:4433 1:2872' + """ + out = '' + for vers in sorted(links.keys(), reverse=True): + out += f' {vers}:{links[vers]}' + return out[1:] + + def get_link_for_version(self, find_vers, links_str=None): + """Look up the patchwork link for a particular version + + Args: + find_vers (int): Version to find + links_str (str): Links string to parse, or None to use self.links + + Return: + str: Series-links entry for that version, or None if not found + """ + return self.get_links(links_str).get(find_vers) diff --git a/tools/patman/settings.py b/tools/patman/settings.py new file mode 100644 index 00000000000..17229e0d823 --- /dev/null +++ b/tools/patman/settings.py @@ -0,0 +1,444 @@ +# SPDX-License-Identifier: GPL-2.0+ +# Copyright (c) 2011 The Chromium OS Authors. +# Copyright (c) 2022 Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com> +# + +try: + import configparser as ConfigParser +except Exception: + import ConfigParser + +import argparse +from io import StringIO +import os +import re +import sys + +from u_boot_pylib import gitutil + +"""Default settings per-project. + +These are used by _ProjectConfigParser. Settings names should match +the "dest" of the option parser from patman.py. +""" +_default_settings = { + "u-boot": {}, + "linux": { + "process_tags": "False", + "check_patch_use_tree": "True", + }, + "gcc": { + "process_tags": "False", + "add_signoff": "False", + "check_patch": "False", + }, +} + + +class _ProjectConfigParser(ConfigParser.ConfigParser): + """ConfigParser that handles projects. + + There are two main goals of this class: + - Load project-specific default settings. + - Merge general default settings/aliases with project-specific ones. + + # Sample config used for tests below... + >>> from io import StringIO + >>> sample_config = ''' + ... [alias] + ... me: Peter P. <likesspiders@example.com> + ... enemies: Evil <evil@example.com> + ... + ... [sm_alias] + ... enemies: Green G. <ugly@example.com> + ... + ... [sm2_alias] + ... enemies: Doc O. <pus@example.com> + ... + ... [settings] + ... am_hero: True + ... ''' + + # Check to make sure that bogus project gets general alias. + >>> config = _ProjectConfigParser("zzz") + >>> config.read_file(StringIO(sample_config)) + >>> str(config.get("alias", "enemies")) + 'Evil <evil@example.com>' + + # Check to make sure that alias gets overridden by project. + >>> config = _ProjectConfigParser("sm") + >>> config.read_file(StringIO(sample_config)) + >>> str(config.get("alias", "enemies")) + 'Green G. <ugly@example.com>' + + # Check to make sure that settings get merged with project. + >>> config = _ProjectConfigParser("linux") + >>> config.read_file(StringIO(sample_config)) + >>> sorted((str(a), str(b)) for (a, b) in config.items("settings")) + [('am_hero', 'True'), ('check_patch_use_tree', 'True'), ('process_tags', 'False')] + + # Check to make sure that settings works with unknown project. + >>> config = _ProjectConfigParser("unknown") + >>> config.read_file(StringIO(sample_config)) + >>> sorted((str(a), str(b)) for (a, b) in config.items("settings")) + [('am_hero', 'True')] + """ + def __init__(self, project_name): + """Construct _ProjectConfigParser. + + In addition to standard ConfigParser initialization, this also + loads project defaults. + + Args: + project_name: The name of the project. + """ + self._project_name = project_name + ConfigParser.ConfigParser.__init__(self) + + # Update the project settings in the config based on + # the _default_settings global. + project_settings = "%s_settings" % project_name + if not self.has_section(project_settings): + self.add_section(project_settings) + project_defaults = _default_settings.get(project_name, {}) + for setting_name, setting_value in project_defaults.items(): + self.set(project_settings, setting_name, setting_value) + + def get(self, section, option, *args, **kwargs): + """Extend ConfigParser to try project_section before section. + + Args: + See ConfigParser. + Returns: + See ConfigParser. + """ + try: + val = ConfigParser.ConfigParser.get( + self, "%s_%s" % (self._project_name, section), option, + *args, **kwargs + ) + except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): + val = ConfigParser.ConfigParser.get( + self, section, option, *args, **kwargs + ) + return val + + def items(self, section, *args, **kwargs): + """Extend ConfigParser to add project_section to section. + + Args: + See ConfigParser. + Returns: + See ConfigParser. + """ + project_items = [] + has_project_section = False + top_items = [] + + # Get items from the project section + try: + project_items = ConfigParser.ConfigParser.items( + self, "%s_%s" % (self._project_name, section), *args, **kwargs + ) + has_project_section = True + except ConfigParser.NoSectionError: + pass + + # Get top-level items + try: + top_items = ConfigParser.ConfigParser.items( + self, section, *args, **kwargs + ) + except ConfigParser.NoSectionError: + # If neither section exists raise the error on... + if not has_project_section: + raise + + item_dict = dict(top_items) + item_dict.update(project_items) + return {(item, val) for item, val in item_dict.items()} + + +def ReadGitAliases(fname): + """Read a git alias file. This is in the form used by git: + + alias uboot u-boot@lists.denx.de + alias wd Wolfgang Denk <wd@denx.de> + + Args: + fname: Filename to read + """ + try: + fd = open(fname, 'r', encoding='utf-8') + except IOError: + print("Warning: Cannot find alias file '%s'" % fname) + return + + re_line = re.compile(r'alias\s+(\S+)\s+(.*)') + for line in fd.readlines(): + line = line.strip() + if not line or line[0] == '#': + continue + + m = re_line.match(line) + if not m: + print("Warning: Alias file line '%s' not understood" % line) + continue + + list = alias.get(m.group(1), []) + for item in m.group(2).split(','): + item = item.strip() + if item: + list.append(item) + alias[m.group(1)] = list + + fd.close() + + +def CreatePatmanConfigFile(config_fname): + """Creates a config file under $(HOME)/.patman if it can't find one. + + Args: + config_fname: Default config filename i.e., $(HOME)/.patman + + Returns: + None + """ + name = gitutil.get_default_user_name() + if name is None: + name = input("Enter name: ") + + email = gitutil.get_default_user_email() + + if email is None: + email = input("Enter email: ") + + try: + f = open(config_fname, 'w') + except IOError: + print("Couldn't create patman config file\n") + raise + + print('''[alias] +me: %s <%s> + +[bounces] +nxp = Zhikang Zhang <zhikang.zhang@nxp.com> +''' % (name, email), file=f) + f.close() + + +def _UpdateDefaults(main_parser, config, argv): + """Update the given OptionParser defaults based on config. + + We'll walk through all of the settings from all parsers. + For each setting we'll look for a default in the option parser. + If it's found we'll update the option parser default. + + The idea here is that the .patman file should be able to update + defaults but that command line flags should still have the final + say. + + Args: + parser: An instance of an ArgumentParser whose defaults will be + updated. + config: An instance of _ProjectConfigParser that we will query + for settings. + argv (list of str or None): Arguments to parse + """ + # Find all the parsers and subparsers + parsers = [main_parser] + parsers += [subparser for action in main_parser._actions + if isinstance(action, argparse._SubParsersAction) + for _, subparser in action.choices.items()] + + # Collect the defaults from each parser + defaults = {} + parser_defaults = [] + argv = list(argv) + orig_argv = argv + + bad = False + full_parser_list = [] + for parser in parsers: + argv_list = [orig_argv] + special_cases = [] + if hasattr(parser, 'defaults_cmds'): + special_cases = parser.defaults_cmds + for action in parser._actions: + if action.choices: + argv_list = [] + for choice in action.choices: + argv = None + for case in special_cases: + if case[0] == choice: + argv = case + argv_list.append(argv or [choice]) + + for argv in argv_list: + parser.message = None + old_val = parser.catch_error + try: + parser.catch_error = True + pdefs = parser.parse_known_args(argv)[0] + finally: + parser.catch_error = old_val + + # if parser.message: + # print('bad', argv, parser.message) + # bad = True + + parser_defaults.append(pdefs) + defaults.update(vars(pdefs)) + full_parser_list.append(parser) + if bad: + print('Internal parsing error') + sys.exit(1) + + # Go through the settings and collect defaults + for name, val in config.items('settings'): + if name in defaults: + default_val = defaults[name] + if isinstance(default_val, bool): + val = config.getboolean('settings', name) + elif isinstance(default_val, int): + val = config.getint('settings', name) + elif isinstance(default_val, str): + val = config.get('settings', name) + defaults[name] = val + else: + print("WARNING: Unknown setting %s" % name) + if 'cmd' in defaults: + del defaults['cmd'] + if 'subcmd' in defaults: + del defaults['subcmd'] + + # Set all the defaults and manually propagate them to subparsers + main_parser.set_defaults(**defaults) + assert len(full_parser_list) == len(parser_defaults) + for parser, pdefs in zip(full_parser_list, parser_defaults): + parser.set_defaults(**{k: v for k, v in defaults.items() + if k in pdefs}) + return defaults + + +def _ReadAliasFile(fname): + """Read in the U-Boot git alias file if it exists. + + Args: + fname: Filename to read. + """ + if os.path.exists(fname): + bad_line = None + with open(fname, encoding='utf-8') as fd: + linenum = 0 + for line in fd: + linenum += 1 + line = line.strip() + if not line or line.startswith('#'): + continue + words = line.split(None, 2) + if len(words) < 3 or words[0] != 'alias': + if not bad_line: + bad_line = "%s:%d:Invalid line '%s'" % (fname, linenum, + line) + continue + alias[words[1]] = [s.strip() for s in words[2].split(',')] + if bad_line: + print(bad_line) + + +def _ReadBouncesFile(fname): + """Read in the bounces file if it exists + + Args: + fname: Filename to read. + """ + if os.path.exists(fname): + with open(fname) as fd: + for line in fd: + if line.startswith('#'): + continue + bounces.add(line.strip()) + + +def GetItems(config, section): + """Get the items from a section of the config. + + Args: + config: _ProjectConfigParser object containing settings + section: name of section to retrieve + + Returns: + List of (name, value) tuples for the section + """ + try: + return config.items(section) + except ConfigParser.NoSectionError: + return [] + + +def Setup(parser, project_name, argv, config_fname=None): + """Set up the settings module by reading config files. + + Unless `config_fname` is specified, a `.patman` config file local + to the git repository is consulted, followed by the global + `$HOME/.patman`. If none exists, the later is created. Values + defined in the local config file take precedence over those + defined in the global one. + + Args: + parser: The parser to update. + project_name: Name of project that we're working on; we'll look + for sections named "project_section" as well. + config_fname: Config filename to read, or None for default, or False + for an empty config. An error is raised if it does not exist. + argv (list of str or None): Arguments to parse, or None for default + """ + # First read the git alias file if available + _ReadAliasFile('doc/git-mailrc') + config = _ProjectConfigParser(project_name) + + if config_fname and not os.path.exists(config_fname): + raise Exception(f'provided {config_fname} does not exist') + + if config_fname is None: + config_fname = '%s/.patman' % os.getenv('HOME') + git_local_config_fname = os.path.join(gitutil.get_top_level() or '', + '.patman') + + has_config = False + has_git_local_config = False + if config_fname is not False: + has_config = os.path.exists(config_fname) + has_git_local_config = os.path.exists(git_local_config_fname) + + # Read the git local config last, so that its values override + # those of the global config, if any. + if has_config: + config.read(config_fname) + if has_git_local_config: + config.read(git_local_config_fname) + + if config_fname is not False and not (has_config or has_git_local_config): + print("No config file found.\nCreating ~/.patman...\n") + CreatePatmanConfigFile(config_fname) + + for name, value in GetItems(config, 'alias'): + alias[name] = value.split(',') + + _ReadBouncesFile('doc/bounces') + for name, value in GetItems(config, 'bounces'): + bounces.add(value) + + return _UpdateDefaults(parser, config, argv) + + +# These are the aliases we understand, indexed by alias. Each member is a list. +alias = {} +bounces = set() + +if __name__ == "__main__": + import doctest + + doctest.testmod() diff --git a/tools/patman/setup.py b/tools/patman/setup.py new file mode 100644 index 00000000000..bcaad69a1c2 --- /dev/null +++ b/tools/patman/setup.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: GPL-2.0+ + +from setuptools import setup +setup(name='patman', + version='1.0', + scripts=['patman'], + packages=['patman'], + package_dir={'patman': ''}, + package_data={'patman': ['README.rst']}, + classifiers=['Environment :: Console', + 'Topic :: Software Development']) diff --git a/tools/patman/status.py b/tools/patman/status.py new file mode 100644 index 00000000000..967fef3ad6e --- /dev/null +++ b/tools/patman/status.py @@ -0,0 +1,405 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2020 Google LLC +# +"""Talks to the patchwork service to figure out what patches have been reviewed +and commented on. Provides a way to display review tags and comments. +Allows creation of a new branch based on the old but with the review tags +collected from patchwork. +""" + +import asyncio +from collections import defaultdict +import concurrent.futures +from itertools import repeat + +import aiohttp +import pygit2 + +from u_boot_pylib import terminal +from u_boot_pylib import tout +from patman import patchstream +from patman import patchwork + + +def process_reviews(content, comment_data, base_rtags): + """Process and return review data + + Args: + content (str): Content text of the patch itself - see pwork.get_patch() + comment_data (list of dict): Comments for the patch - see + pwork._get_patch_comments() + base_rtags (dict): base review tags (before any comments) + key: Response tag (e.g. 'Reviewed-by') + value: Set of people who gave that response, each a name/email + string + + Return: tuple: + dict: new review tags (noticed since the base_rtags) + key: Response tag (e.g. 'Reviewed-by') + value: Set of people who gave that response, each a name/email + string + list of patchwork.Review: reviews received on the patch + """ + pstrm = patchstream.PatchStream.process_text(content, True) + rtags = defaultdict(set) + for response, people in pstrm.commit.rtags.items(): + rtags[response].update(people) + + reviews = [] + for comment in comment_data: + pstrm = patchstream.PatchStream.process_text(comment['content'], True) + if pstrm.snippets: + submitter = comment['submitter'] + person = f"{submitter['name']} <{submitter['email']}>" + reviews.append(patchwork.Review(person, pstrm.snippets)) + for response, people in pstrm.commit.rtags.items(): + rtags[response].update(people) + + # Find the tags that are not in the commit + new_rtags = defaultdict(set) + for tag, people in rtags.items(): + for who in people: + is_new = (tag not in base_rtags or + who not in base_rtags[tag]) + if is_new: + new_rtags[tag].add(who) + return new_rtags, reviews + + +def compare_with_series(series, patches): + """Compare a list of patches with a series it came from + + This prints any problems as warnings + + Args: + series (Series): Series to compare against + patches (list of Patch): list of Patch objects to compare with + + Returns: + tuple + dict: + key: Commit number (0...n-1) + value: Patch object for that commit + dict: + key: Patch number (0...n-1) + value: Commit object for that patch + """ + # Check the names match + warnings = [] + patch_for_commit = {} + all_patches = set(patches) + for seq, cmt in enumerate(series.commits): + pmatch = [p for p in all_patches if p.subject == cmt.subject] + if len(pmatch) == 1: + patch_for_commit[seq] = pmatch[0] + all_patches.remove(pmatch[0]) + elif len(pmatch) > 1: + warnings.append("Multiple patches match commit %d ('%s'):\n %s" % + (seq + 1, cmt.subject, + '\n '.join([p.subject for p in pmatch]))) + else: + warnings.append("Cannot find patch for commit %d ('%s')" % + (seq + 1, cmt.subject)) + + # Check the names match + commit_for_patch = {} + all_commits = set(series.commits) + for seq, patch in enumerate(patches): + cmatch = [c for c in all_commits if c.subject == patch.subject] + if len(cmatch) == 1: + commit_for_patch[seq] = cmatch[0] + all_commits.remove(cmatch[0]) + elif len(cmatch) > 1: + warnings.append("Multiple commits match patch %d ('%s'):\n %s" % + (seq + 1, patch.subject, + '\n '.join([c.subject for c in cmatch]))) + else: + warnings.append("Cannot find commit for patch %d ('%s')" % + (seq + 1, patch.subject)) + + return patch_for_commit, commit_for_patch, warnings + + +def show_responses(col, rtags, indent, is_new): + """Show rtags collected + + Args: + col (terminal.Colour): Colour object to use + rtags (dict): review tags to show + key: Response tag (e.g. 'Reviewed-by') + value: Set of people who gave that response, each a name/email string + indent (str): Indentation string to write before each line + is_new (bool): True if this output should be highlighted + + Returns: + int: Number of review tags displayed + """ + count = 0 + for tag in sorted(rtags.keys()): + people = rtags[tag] + for who in sorted(people): + terminal.tprint(indent + '%s %s: ' % ('+' if is_new else ' ', tag), + newline=False, colour=col.GREEN, bright=is_new, + col=col) + terminal.tprint(who, colour=col.WHITE, bright=is_new, col=col) + count += 1 + return count + +def create_branch(series, new_rtag_list, branch, dest_branch, overwrite, + repo=None): + """Create a new branch with review tags added + + Args: + series (Series): Series object for the existing branch + new_rtag_list (list): List of review tags to add, one for each commit, + each a dict: + key: Response tag (e.g. 'Reviewed-by') + value: Set of people who gave that response, each a name/email + string + branch (str): Existing branch to update + dest_branch (str): Name of new branch to create + overwrite (bool): True to force overwriting dest_branch if it exists + repo (pygit2.Repository): Repo to use (use None unless testing) + + Returns: + int: Total number of review tags added across all commits + + Raises: + ValueError: if the destination branch name is the same as the original + branch, or it already exists and @overwrite is False + """ + if branch == dest_branch: + raise ValueError( + 'Destination branch must not be the same as the original branch') + if not repo: + repo = pygit2.Repository('.') + count = len(series.commits) + new_br = repo.branches.get(dest_branch) + if new_br: + if not overwrite: + raise ValueError("Branch '%s' already exists (-f to overwrite)" % + dest_branch) + new_br.delete() + if not branch: + branch = 'HEAD' + target = repo.revparse_single('%s~%d' % (branch, count)) + repo.branches.local.create(dest_branch, target) + + num_added = 0 + for seq in range(count): + parent = repo.branches.get(dest_branch) + cherry = repo.revparse_single('%s~%d' % (branch, count - seq - 1)) + + repo.merge_base(cherry.oid, parent.target) + base_tree = cherry.parents[0].tree + + index = repo.merge_trees(base_tree, parent, cherry) + tree_id = index.write_tree(repo) + + lines = [] + if new_rtag_list[seq]: + for tag, people in new_rtag_list[seq].items(): + for who in people: + lines.append('%s: %s' % (tag, who)) + num_added += 1 + message = patchstream.insert_tags(cherry.message.rstrip(), + sorted(lines)) + + repo.create_commit( + parent.name, cherry.author, cherry.committer, message, tree_id, + [parent.target]) + return num_added + + +def check_patch_count(num_commits, num_patches): + """Check the number of commits and patches agree + + Args: + num_commits (int): Number of commits + num_patches (int): Number of patches + """ + if num_patches != num_commits: + tout.warning(f'Warning: Patchwork reports {num_patches} patches, ' + f'series has {num_commits}') + + +def do_show_status(series, cover, patches, show_comments, show_cover_comments, + col, warnings_on_stderr=True): + """Check the status of a series on Patchwork + + This finds review tags and comments for a series in Patchwork, displaying + them to show what is new compared to the local series. + + Args: + series (Series): Series object for the existing branch + cover (COVER): Cover letter info, or None if none + patches (list of Patch): Patches sorted by sequence number + show_comments (bool): True to show the comments on each patch + show_cover_comments (bool): True to show the comments on the + letter + col (terminal.Colour): Colour object + + Return: tuple: + int: Number of new review tags to add + list: List of review tags to add, one item for each commit, each a + dict: + key: Response tag (e.g. 'Reviewed-by') + value: Set of people who gave that response, each a name/email + string + """ + compare = [] + for pw_patch in patches: + patch = patchwork.Patch(pw_patch.id) + patch.parse_subject(pw_patch.series_data['name']) + compare.append(patch) + + count = len(series.commits) + new_rtag_list = [None] * count + review_list = [None] * count + + with terminal.pager(): + patch_for_commit, _, warnings = compare_with_series(series, compare) + for warn in warnings: + tout.do_output(tout.WARNING if warnings_on_stderr else tout.INFO, + warn) + + for seq, pw_patch in enumerate(patches): + compare[seq].patch = pw_patch + + for i in range(count): + pat = patch_for_commit.get(i) + if pat: + patch_data = pat.patch.data + comment_data = pat.patch.comments + new_rtag_list[i], review_list[i] = process_reviews( + patch_data['content'], comment_data, + series.commits[i].rtags) + num_to_add = _do_show_status( + series, cover, patch_for_commit, show_comments, + show_cover_comments, new_rtag_list, review_list, col) + + return num_to_add, new_rtag_list + + +def _do_show_status(series, cover, patch_for_commit, show_comments, + show_cover_comments, new_rtag_list, review_list, col): + if cover and show_cover_comments: + terminal.tprint(f'Cov {cover.name}', colour=col.BLACK, col=col, + bright=False, back=col.YELLOW) + for seq, comment in enumerate(cover.comments): + submitter = comment['submitter'] + person = '%s <%s>' % (submitter['name'], submitter['email']) + terminal.tprint(f"From: {person}: {comment['date']}", + colour=col.RED, col=col) + print(comment['content']) + print() + + num_to_add = 0 + for seq, cmt in enumerate(series.commits): + patch = patch_for_commit.get(seq) + if not patch: + continue + terminal.tprint('%3d %s' % (patch.seq, patch.subject[:50]), + colour=col.YELLOW, col=col) + cmt = series.commits[seq] + base_rtags = cmt.rtags + new_rtags = new_rtag_list[seq] + + indent = ' ' * 2 + show_responses(col, base_rtags, indent, False) + num_to_add += show_responses(col, new_rtags, indent, True) + if show_comments: + for review in review_list[seq]: + terminal.tprint('Review: %s' % review.meta, colour=col.RED, + col=col) + for snippet in review.snippets: + for line in snippet: + quoted = line.startswith('>') + terminal.tprint( + f' {line}', + colour=col.MAGENTA if quoted else None, col=col) + terminal.tprint() + return num_to_add + + +def show_status(series, branch, dest_branch, force, cover, patches, + show_comments, show_cover_comments, test_repo=None): + """Check the status of a series on Patchwork + + This finds review tags and comments for a series in Patchwork, displaying + them to show what is new compared to the local series. + + Args: + client (aiohttp.ClientSession): Session to use + series (Series): Series object for the existing branch + branch (str): Existing branch to update, or None + dest_branch (str): Name of new branch to create, or None + force (bool): True to force overwriting dest_branch if it exists + cover (COVER): Cover letter info, or None if none + patches (list of Patch): Patches sorted by sequence number + show_comments (bool): True to show the comments on each patch + show_cover_comments (bool): True to show the comments on the letter + test_repo (pygit2.Repository): Repo to use (use None unless testing) + """ + col = terminal.Color() + check_patch_count(len(series.commits), len(patches)) + num_to_add, new_rtag_list = do_show_status( + series, cover, patches, show_comments, show_cover_comments, col) + + if not dest_branch and num_to_add: + msg = ' (use -d to write them to a new branch)' + else: + msg = '' + terminal.tprint( + f"{num_to_add} new response{'s' if num_to_add != 1 else ''} " + f'available in patchwork{msg}') + + if dest_branch: + num_added = create_branch(series, new_rtag_list, branch, + dest_branch, force, test_repo) + terminal.tprint( + f"{num_added} response{'s' if num_added != 1 else ''} added " + f"from patchwork into new branch '{dest_branch}'") + + +async def check_status(link, pwork, read_comments=False, + read_cover_comments=False): + """Set up an HTTP session and get the required state + + Args: + link (str): Patch series ID number + pwork (Patchwork): Patchwork object to use for reading + read_comments (bool): True to read comments and state for each patch + + Return: tuple: + COVER object, or None if none or not read_cover_comments + list of PATCH objects + """ + async with aiohttp.ClientSession() as client: + return await pwork.series_get_state(client, link, read_comments, + read_cover_comments) + + +def check_and_show_status(series, link, branch, dest_branch, force, + show_comments, show_cover_comments, pwork, + test_repo=None): + """Read the series status from patchwork and show it to the user + + Args: + series (Series): Series object for the existing branch + link (str): Patch series ID number + branch (str): Existing branch to update, or None + dest_branch (str): Name of new branch to create, or None + force (bool): True to force overwriting dest_branch if it exists + show_comments (bool): True to show the comments on each patch + show_cover_comments (bool): True to show the comments on the letter + pwork (Patchwork): Patchwork object to use for reading + test_repo (pygit2.Repository): Repo to use (use None unless testing) + """ + loop = asyncio.get_event_loop() + cover, patches = loop.run_until_complete(check_status( + link, pwork, True, show_cover_comments)) + + show_status(series, branch, dest_branch, force, cover, patches, + show_comments, show_cover_comments, test_repo=test_repo) diff --git a/tools/patman/test/0000-cover-letter.patch b/tools/patman/test/0000-cover-letter.patch new file mode 100644 index 00000000000..c99e635623f --- /dev/null +++ b/tools/patman/test/0000-cover-letter.patch @@ -0,0 +1,23 @@ +From 5ab48490f03051875ab13d288a4bf32b507d76fd Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Sat, 27 May 2017 20:52:11 -0600 +Subject: [RFC 0/2] *** SUBJECT HERE *** +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +*** BLURB HERE *** + +Simon Glass (2): + pci: Correct cast for sandbox + fdt: Correct cast for sandbox in fdtdec_setup_mem_size_base() + + cmd/pci.c | 3 ++- + fs/fat/fat.c | 1 + + lib/efi_loader/efi_memory.c | 1 + + lib/fdtdec.c | 3 ++- + 4 files changed, 6 insertions(+), 2 deletions(-) + +-- +2.7.4 + diff --git a/tools/patman/test/0001-pci-Correct-cast-for-sandbox.patch b/tools/patman/test/0001-pci-Correct-cast-for-sandbox.patch new file mode 100644 index 00000000000..038943c2c9b --- /dev/null +++ b/tools/patman/test/0001-pci-Correct-cast-for-sandbox.patch @@ -0,0 +1,51 @@ +From b9da5f937bd5ea4931ea17459bf79b2905d9594d Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Sat, 15 Apr 2017 15:39:08 -0600 +Subject: [RFC 1/2] pci: Correct cast for sandbox +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +This gives a warning with some native compilers: + +cmd/pci.c:152:11: warning: format ‘%llx’ expects argument of type + ‘long long unsigned int’, but argument 3 has type + ‘u64 {aka long unsigned int}’ [-Wformat=] + +Fix it with a cast. + +Signed-off-by: Simon Glass <sjg@chromium.org> +Commit-changes: 2 +- Changes only for this commit + +Series-notes: +some notes +about some things +from the first commit +END + +Commit-notes: +Some notes about +the first commit +END +--- + cmd/pci.c | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/cmd/pci.c b/cmd/pci.c +index 41b4fff..fe27b4f 100644 +--- a/cmd/pci.c ++++ b/cmd/pci.c +@@ -150,7 +150,8 @@ int pci_bar_show(struct udevice *dev) + if ((!is_64 && size_low) || (is_64 && size)) { + size = ~size + 1; + printf(" %d %#016llx %#016llx %d %s %s\n", +- bar_id, base, size, is_64 ? 64 : 32, ++ bar_id, (unsigned long long)base, ++ (unsigned long long)size, is_64 ? 64 : 32, + is_io ? "I/O" : "MEM", + prefetchable ? "Prefetchable" : ""); + } +-- +2.7.4 + diff --git a/tools/patman/test/0002-fdt-Correct-cast-for-sandbox-in-fdtdec_setup_mem_siz.patch b/tools/patman/test/0002-fdt-Correct-cast-for-sandbox-in-fdtdec_setup_mem_siz.patch new file mode 100644 index 00000000000..48ea1793b47 --- /dev/null +++ b/tools/patman/test/0002-fdt-Correct-cast-for-sandbox-in-fdtdec_setup_mem_siz.patch @@ -0,0 +1,85 @@ +From 5ab48490f03051875ab13d288a4bf32b507d76fd Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Sat, 15 Apr 2017 15:39:08 -0600 +Subject: [RFC 2/2] fdt: Correct cast for sandbox in fdtdec_setup_mem_size_base() +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +This gives a warning with some native compilers: + +lib/fdtdec.c:1203:8: warning: format ‘%llx’ expects argument of type + ‘long long unsigned int’, but argument 3 has type + ‘long unsigned int’ [-Wformat=] + +Fix it with a cast. + +Signed-off-by: Simon Glass <sjg@chromium.org> +Series-to: u-boot +Series-prefix: RFC +Series-cc: Stefan Brüns <stefan.bruens@rwth-aachen.de> +Cover-letter-cc: Lord Mëlchett <clergy@palace.gov> +Series-version: 3 +Patch-cc: fred +Commit-cc: joe +Series-process-log: sort, uniq +Commit-added-in: 4 +Series-changes: 4 +- Some changes +- Multi + line + change + +Commit-changes: 2 +- Changes only for this commit + +Cover-changes: 4 +- Some notes for the cover letter + +Cover-letter: +test: A test patch series +This is a test of how the cover +letter +works +END +--- + fs/fat/fat.c | 1 + + lib/efi_loader/efi_memory.c | 1 + + lib/fdtdec.c | 3 ++- + 3 files changed, 4 insertions(+), 1 deletion(-) + +diff --git a/fs/fat/fat.c b/fs/fat/fat.c +index a71bad1..ba169dc 100644 +--- a/fs/fat/fat.c ++++ b/fs/fat/fat.c +@@ -1,3 +1,4 @@ ++ + /* + * fat.c + * +diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c +index db2ae19..05f75d1 100644 +--- a/lib/efi_loader/efi_memory.c ++++ b/lib/efi_loader/efi_memory.c +@@ -1,3 +1,4 @@ ++ + /* + * EFI application memory management + * +diff --git a/lib/fdtdec.c b/lib/fdtdec.c +index c072e54..942244f 100644 +--- a/lib/fdtdec.c ++++ b/lib/fdtdec.c +@@ -1200,7 +1200,8 @@ int fdtdec_setup_mem_size_base(void) + } + + gd->ram_size = (phys_size_t)(res.end - res.start + 1); +- debug("%s: Initial DRAM size %llx\n", __func__, (u64)gd->ram_size); ++ debug("%s: Initial DRAM size %llx\n", __func__, ++ (unsigned long long)gd->ram_size); + + return 0; + } +-- +2.7.4 + diff --git a/tools/patman/test/test01.txt b/tools/patman/test/test01.txt new file mode 100644 index 00000000000..b2d73c5972c --- /dev/null +++ b/tools/patman/test/test01.txt @@ -0,0 +1,72 @@ +commit b9da5f937bd5ea4931ea17459bf79b2905d9594d +Author: Simon Glass <sjg@chromium.org> +Date: Sat Apr 15 15:39:08 2017 -0600 + + pci: Correct cast for sandbox + + This gives a warning with some native compilers: + + cmd/pci.c:152:11: warning: format ‘%llx’ expects argument of type + ‘long long unsigned int’, but argument 3 has type + ‘u64 {aka long unsigned int}’ [-Wformat=] + + Fix it with a cast. + + Signed-off-by: Simon Glass <sjg@chromium.org> + Commit-changes: 2 + - second revision change + + Series-notes: + some notes + about some things + from the first commit + END + + Commit-notes: + Some notes about + the first commit + END + +commit 5ab48490f03051875ab13d288a4bf32b507d76fd +Author: Simon Glass <sjg@chromium.org> +Date: Sat Apr 15 15:39:08 2017 -0600 + + fdt: Correct cast for sandbox in fdtdec_setup_mem_size_base() + + This gives a warning with some native compilers: + + lib/fdtdec.c:1203:8: warning: format ‘%llx’ expects argument of type + ‘long long unsigned int’, but argument 3 has type + ‘long unsigned int’ [-Wformat=] + + Fix it with a cast. + + Signed-off-by: Simon Glass <sjg@chromium.org> + Series-to: u-boot + Series-prefix: RFC + Series-postfix: some-branch + Series-cc: Stefan Brüns <stefan.bruens@rwth-aachen.de> + Cover-letter-cc: Lord Mëlchett <clergy@palace.gov> + Series-version: 3 + Patch-cc: fred + Commit-cc: joe + Series-process-log: sort, uniq + Commit-added-in: 4 + Series-changes: 4 + - Some changes + - Multi + line + change + + Commit-changes: 2 + - Changes only for this commit + + Cover-changes: 4 + - Some notes for the cover letter + + Cover-letter: + test: A test patch series + This is a test of how the cover + letter + works + END diff --git a/tools/patman/test_checkpatch.py b/tools/patman/test_checkpatch.py new file mode 100644 index 00000000000..4e8d163184e --- /dev/null +++ b/tools/patman/test_checkpatch.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: GPL-2.0+ +# +# Tests for U-Boot-specific checkpatch.pl features +# +# Copyright (c) 2011 The Chromium OS Authors. +# + +import os +import tempfile +import unittest + +from patman import checkpatch +from patman import patchstream +from patman import series +from patman import commit +from u_boot_pylib import gitutil + + +class Line: + """Single changed line in one file in a patch + + Args: + fname (str): Filename containing the added line + text (str): Text of the added line + """ + def __init__(self, fname, text): + self.fname = fname + self.text = text + + +class PatchMaker: + """Makes a patch for checking with checkpatch.pl + + The idea here is to create a patch which adds one line in one file, + intended to provoke a checkpatch error or warning. The base patch is empty + (i.e. invalid), so you should call add_line() to add at least one line. + """ + def __init__(self): + """Set up the PatchMaker object + + Properties: + lines (list of Line): List of lines to add to the patch. Note that + each line has both a file and some text associated with it, + since for simplicity we just add a single line for each file + """ + self.lines = [] + + def add_line(self, fname, text): + """Add to the list of filename/line pairs""" + self.lines.append(Line(fname, text)) + + def get_patch_text(self): + """Build the patch text + + Takes a base patch and adds a diffstat and patch for each filename/line + pair in the list. + + Returns: + str: Patch text ready for submission to checkpatch + """ + base = '''From 125b77450f4c66b8fd9654319520bbe795c9ef31 Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Sun, 14 Jun 2020 09:45:14 -0600 +Subject: [PATCH] Test commit + +This is a test commit. + +Signed-off-by: Simon Glass <sjg@chromium.org> +--- + +''' + lines = base.splitlines() + + # Create the diffstat + change = 0 + insert = 0 + for line in self.lines: + lines.append(' %s | 1 +' % line.fname) + change += 1 + insert += 1 + lines.append(' %d files changed, %d insertions(+)' % (change, insert)) + lines.append('') + + # Create the patch info for each file + for line in self.lines: + lines.append('diff --git a/%s b/%s' % (line.fname, line.fname)) + lines.append('index 7837d459f18..5ba7840f68e 100644') + lines.append('--- a/%s' % line.fname) + lines.append('+++ b/%s' % line.fname) + lines += ('''@@ -121,6 +121,7 @@ enum uclass_id { + UCLASS_W1, /* Dallas 1-Wire bus */ + UCLASS_W1_EEPROM, /* one-wire EEPROMs */ + UCLASS_WDT, /* Watchdog Timer driver */ ++%s + + UCLASS_COUNT, + UCLASS_INVALID = -1, +''' % line.text).splitlines() + lines.append('---') + lines.append('2.17.1') + + return '\n'.join(lines) + + def get_patch(self): + """Get the patch text and write it into a temporary file + + Returns: + str: Filename containing the patch + """ + inhandle, inname = tempfile.mkstemp() + infd = os.fdopen(inhandle, 'w') + infd.write(self.get_patch_text()) + infd.close() + return inname + + def run_checkpatch(self): + """Run checkpatch on the patch file + + Returns: + namedtuple containing: + ok: False=failure, True=ok + problems: List of problems, each a dict: + 'type'; error or warning + 'msg': text message + 'file' : filename + 'line': line number + errors: Number of errors + warnings: Number of warnings + checks: Number of checks + lines: Number of lines + stdout: Full output of checkpatch + """ + return checkpatch.check_patch(self.get_patch(), show_types=True) + + +class TestPatch(unittest.TestCase): + """Test the u_boot_line() function in checkpatch.pl""" + + def test_filter(self): + """Test basic filter operation""" + data=''' + +From 656c9a8c31fa65859d924cd21da920d6ba537fad Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Thu, 28 Apr 2011 09:58:51 -0700 +Subject: [PATCH (resend) 3/7] Tegra2: Add more clock support + +This adds functions to enable/disable clocks and reset to on-chip peripherals. + +cmd/pci.c:152:11: warning: format ‘%llx’ expects argument of type + ‘long long unsigned int’, but argument 3 has type + ‘u64 {aka long unsigned int}’ [-Wformat=] + +BUG=chromium-os:13875 +TEST=build U-Boot for Seaboard, boot + +Change-Id: I80fe1d0c0b7dd10aa58ce5bb1d9290b6664d5413 + +Review URL: http://codereview.chromium.org/6900006 + +Signed-off-by: Simon Glass <sjg@chromium.org> +--- + arch/arm/cpu/armv7/tegra2/Makefile | 2 +- + arch/arm/cpu/armv7/tegra2/ap20.c | 57 ++---- + arch/arm/cpu/armv7/tegra2/clock.c | 163 +++++++++++++++++ +''' + expected='''Message-Id: <19991231235959.0.I80fe1d0c0b7dd10aa58ce5bb1d9290b6664d5413@changeid> + + +From 656c9a8c31fa65859d924cd21da920d6ba537fad Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Thu, 28 Apr 2011 09:58:51 -0700 +Subject: [PATCH (resend) 3/7] Tegra2: Add more clock support + +This adds functions to enable/disable clocks and reset to on-chip peripherals. + +cmd/pci.c:152:11: warning: format ‘%llx’ expects argument of type + ‘long long unsigned int’, but argument 3 has type + ‘u64 {aka long unsigned int}’ [-Wformat=] + +Signed-off-by: Simon Glass <sjg@chromium.org> +--- + + arch/arm/cpu/armv7/tegra2/Makefile | 2 +- + arch/arm/cpu/armv7/tegra2/ap20.c | 57 ++---- + arch/arm/cpu/armv7/tegra2/clock.c | 163 +++++++++++++++++ +''' + out = '' + inhandle, inname = tempfile.mkstemp() + infd = os.fdopen(inhandle, 'w', encoding='utf-8') + infd.write(data) + infd.close() + + exphandle, expname = tempfile.mkstemp() + expfd = os.fdopen(exphandle, 'w', encoding='utf-8') + expfd.write(expected) + expfd.close() + + # Normally by the time we call fix_patch we've already collected + # metadata. Here, we haven't, but at least fake up something. + # Set the "count" to -1 which tells fix_patch to use a bogus/fixed + # time for generating the Message-Id. + com = commit.Commit('') + com.change_id = 'I80fe1d0c0b7dd10aa58ce5bb1d9290b6664d5413' + com.count = -1 + + patchstream.fix_patch(None, inname, series.Series(), com) + + rc = os.system('diff -u %s %s' % (inname, expname)) + self.assertEqual(rc, 0) + os.remove(inname) + + # Test whether the keep_change_id settings works. + inhandle, inname = tempfile.mkstemp() + infd = os.fdopen(inhandle, 'w', encoding='utf-8') + infd.write(data) + infd.close() + + patchstream.fix_patch(None, inname, series.Series(), com, + keep_change_id=True) + + with open(inname, 'r') as f: + content = f.read() + self.assertIn( + 'Change-Id: I80fe1d0c0b7dd10aa58ce5bb1d9290b6664d5413', + content) + + os.remove(inname) + os.remove(expname) + + def get_data(self, data_type): + data='''From 4924887af52713cabea78420eff03badea8f0035 Mon Sep 17 00:00:00 2001 +From: Simon Glass <sjg@chromium.org> +Date: Thu, 7 Apr 2011 10:14:41 -0700 +Subject: [PATCH 1/4] Add microsecond boot time measurement + +This defines the basics of a new boot time measurement feature. This allows +logging of very accurate time measurements as the boot proceeds, by using +an available microsecond counter. + +%s +--- + README | 11 ++++++++ + MAINTAINERS | 3 ++ + common/bootstage.c | 50 ++++++++++++++++++++++++++++++++++++ + include/bootstage.h | 71 +++++++++++++++++++++++++++++++++++++++++++++++++++ + include/common.h | 8 ++++++ + 5 files changed, 141 insertions(+), 0 deletions(-) + create mode 100644 common/bootstage.c + create mode 100644 include/bootstage.h + +diff --git a/README b/README +index 6f3748d..f9e4e65 100644 +--- a/README ++++ b/README +@@ -2026,6 +2026,17 @@ The following options need to be configured: + example, some LED's) on your board. At the moment, + the following checkpoints are implemented: + ++- Time boot progress ++ CONFIG_BOOTSTAGE ++ ++ Define this option to enable microsecond boot stage timing ++ on supported platforms. For this to work your platform ++ needs to define a function timer_get_us() which returns the ++ number of microseconds since reset. This would normally ++ be done in your SOC or board timer.c file. ++ ++ You can add calls to bootstage_mark() to set time markers. ++ + - Standalone program support: + CONFIG_STANDALONE_LOAD_ADDR + +diff --git a/MAINTAINERS b/MAINTAINERS +index b167b028ec..beb7dc634f 100644 +--- a/MAINTAINERS ++++ b/MAINTAINERS +@@ -474,3 +474,8 @@ S: Maintained + T: git git://git.denx.de/u-boot.git + F: * + F: */ ++ ++BOOTSTAGE ++M: Simon Glass <sjg@chromium.org> ++L: u-boot@lists.denx.de ++F: common/bootstage.c +diff --git a/common/bootstage.c b/common/bootstage.c +new file mode 100644 +index 0000000..2234c87 +--- /dev/null ++++ b/common/bootstage.c +@@ -0,0 +1,37 @@ ++%s ++/* ++ * Copyright (c) 2011, Google Inc. All rights reserved. ++ * ++ */ ++ ++/* ++ * This module records the progress of boot and arbitrary commands, and ++ * permits accurate timestamping of each. The records can optionally be ++ * passed to kernel in the ATAGs ++ */ ++ ++#include <config.h> ++ ++struct bootstage_record { ++ u32 time_us; ++ const char *name; ++}; ++ ++static struct bootstage_record record[BOOTSTAGE_COUNT]; ++ ++u32 bootstage_mark(enum bootstage_id id, const char *name) ++{ ++ struct bootstage_record *rec = &record[id]; ++ ++ /* Only record the first event for each */ ++%sif (!rec->name) { ++ rec->time_us = (u32)timer_get_us(); ++ rec->name = name; ++ } ++ if (!rec->name && ++ %ssomething_else) { ++ rec->time_us = (u32)timer_get_us(); ++ rec->name = name; ++ } ++%sreturn rec->time_us; ++} +-- +1.7.3.1 +''' + signoff = 'Signed-off-by: Simon Glass <sjg@chromium.org>\n' + license = '// SPDX-License-Identifier: GPL-2.0+' + tab = ' ' + indent = ' ' + if data_type == 'good': + pass + elif data_type == 'no-signoff': + signoff = '' + elif data_type == 'no-license': + license = '' + elif data_type == 'spaces': + tab = ' ' + elif data_type == 'indent': + indent = tab + else: + print('not implemented') + return data % (signoff, license, tab, indent, tab) + + def setup_data(self, data_type): + inhandle, inname = tempfile.mkstemp() + infd = os.fdopen(inhandle, 'w') + data = self.get_data(data_type) + infd.write(data) + infd.close() + return inname + + def test_good(self): + """Test checkpatch operation""" + inf = self.setup_data('good') + result = checkpatch.check_patch(inf) + self.assertEqual(result.ok, True) + self.assertEqual(result.problems, []) + self.assertEqual(result.errors, 0) + self.assertEqual(result.warnings, 0) + self.assertEqual(result.checks, 0) + self.assertEqual(result.lines, 62) + os.remove(inf) + + def test_no_signoff(self): + inf = self.setup_data('no-signoff') + result = checkpatch.check_patch(inf) + self.assertEqual(result.ok, False) + self.assertEqual(len(result.problems), 1) + self.assertEqual(result.errors, 1) + self.assertEqual(result.warnings, 0) + self.assertEqual(result.checks, 0) + self.assertEqual(result.lines, 62) + os.remove(inf) + + def test_no_license(self): + inf = self.setup_data('no-license') + result = checkpatch.check_patch(inf) + self.assertEqual(result.ok, False) + self.assertEqual(len(result.problems), 1) + self.assertEqual(result.errors, 0) + self.assertEqual(result.warnings, 1) + self.assertEqual(result.checks, 0) + self.assertEqual(result.lines, 62) + os.remove(inf) + + def test_spaces(self): + inf = self.setup_data('spaces') + result = checkpatch.check_patch(inf) + self.assertEqual(result.ok, False) + self.assertEqual(len(result.problems), 3) + self.assertEqual(result.errors, 0) + self.assertEqual(result.warnings, 3) + self.assertEqual(result.checks, 0) + self.assertEqual(result.lines, 62) + os.remove(inf) + + def test_indent(self): + inf = self.setup_data('indent') + result = checkpatch.check_patch(inf) + self.assertEqual(result.ok, False) + self.assertEqual(len(result.problems), 1) + self.assertEqual(result.errors, 0) + self.assertEqual(result.warnings, 0) + self.assertEqual(result.checks, 1) + self.assertEqual(result.lines, 62) + os.remove(inf) + + def check_single_message(self, pm, msg, pmtype = 'warning'): + """Helper function to run checkpatch and check the result + + Args: + pm: PatchMaker object to use + msg: Expected message (e.g. 'LIVETREE') + pmtype: Type of problem ('error', 'warning') + """ + result = pm.run_checkpatch() + if pmtype == 'warning': + self.assertEqual(result.warnings, 1) + elif pmtype == 'error': + self.assertEqual(result.errors, 1) + if len(result.problems) != 1: + print(result.problems) + self.assertEqual(len(result.problems), 1) + self.assertIn(msg, result.problems[0]['cptype']) + + def test_uclass(self): + """Test for possible new uclass""" + pm = PatchMaker() + pm.add_line('include/dm/uclass-id.h', 'UCLASS_WIBBLE,') + self.check_single_message(pm, 'NEW_UCLASS') + + def test_livetree(self): + """Test for using the livetree API""" + pm = PatchMaker() + pm.add_line('common/main.c', 'fdtdec_do_something()') + self.check_single_message(pm, 'LIVETREE') + + def test_new_command(self): + """Test for adding a new command""" + pm = PatchMaker() + pm.add_line('common/main.c', 'do_wibble(struct cmd_tbl *cmd_tbl)') + self.check_single_message(pm, 'CMD_TEST') + + def test_prefer_if(self): + """Test for using #ifdef""" + pm = PatchMaker() + pm.add_line('common/main.c', '#ifdef CONFIG_YELLOW') + pm.add_line('common/init.h', '#ifdef CONFIG_YELLOW') + pm.add_line('fred.dtsi', '#ifdef CONFIG_YELLOW') + self.check_single_message(pm, "PREFER_IF") + + def test_command_use_defconfig(self): + """Test for enabling/disabling commands using preprocesor""" + pm = PatchMaker() + pm.add_line('common/main.c', '#undef CONFIG_CMD_WHICH') + self.check_single_message(pm, 'DEFINE_CONFIG_SYM', 'error') + + def test_barred_include_in_hdr(self): + """Test for using a barred include in a header file""" + pm = PatchMaker() + pm.add_line('include/myfile.h', '#include <dm.h>') + self.check_single_message(pm, 'BARRED_INCLUDE_IN_HDR', 'error') + + def test_barred_include_common_h(self): + """Test for adding common.h to a file""" + pm = PatchMaker() + pm.add_line('include/myfile.h', '#include <common.h>') + self.check_single_message(pm, 'BARRED_INCLUDE_COMMON_H', 'error') + + def test_config_is_enabled_config(self): + """Test for accidental CONFIG_IS_ENABLED(CONFIG_*) calls""" + pm = PatchMaker() + pm.add_line('common/main.c', 'if (CONFIG_IS_ENABLED(CONFIG_CLK))') + self.check_single_message(pm, 'CONFIG_IS_ENABLED_CONFIG', 'error') + + def check_struct(self, auto, suffix, warning): + """Check one of the warnings for struct naming + + Args: + auto: Auto variable name, e.g. 'per_child_auto' + suffix: Suffix to expect on member, e.g. '_priv' + warning: Warning name, e.g. 'PRIV_AUTO' + """ + pm = PatchMaker() + pm.add_line('common/main.c', '.%s = sizeof(struct(fred)),' % auto) + pm.add_line('common/main.c', '.%s = sizeof(struct(mary%s)),' % + (auto, suffix)) + self.check_single_message( + pm, warning, "struct 'fred' should have a %s suffix" % suffix) + + def test_dm_driver_auto(self): + """Check for the correct suffix on 'struct driver' auto members""" + self.check_struct('priv_auto', '_priv', 'PRIV_AUTO') + self.check_struct('plat_auto', '_plat', 'PLAT_AUTO') + self.check_struct('per_child_auto', '_priv', 'CHILD_PRIV_AUTO') + self.check_struct('per_child_plat_auto', '_plat', 'CHILD_PLAT_AUTO') + + def test_dm_uclass_auto(self): + """Check for the correct suffix on 'struct uclass' auto members""" + # Some of these are omitted since they match those from struct driver + self.check_struct('per_device_auto', '_priv', 'DEVICE_PRIV_AUTO') + self.check_struct('per_device_plat_auto', '_plat', 'DEVICE_PLAT_AUTO') + + def check_strl(self, func): + """Check one of the checks for strn(cpy|cat)""" + pm = PatchMaker() + pm.add_line('common/main.c', "strn%s(foo, bar, sizeof(foo));" % func) + self.check_single_message(pm, "STRL", + "strl%s is preferred over strn%s because it always produces a nul-terminated string\n" + % (func, func)) + + def test_strl(self): + """Check for uses of strn(cat|cpy)""" + self.check_strl("cat"); + self.check_strl("cpy"); + + def test_schema(self): + """Check for uses of strn(cat|cpy)""" + pm = PatchMaker() + pm.add_line('arch/sandbox/dts/sandbox.dtsi', '\tu-boot,dm-pre-proper;') + self.check_single_message(pm, 'PRE_SCHEMA', 'error') + +if __name__ == "__main__": + unittest.main() diff --git a/tools/patman/test_common.py b/tools/patman/test_common.py new file mode 100644 index 00000000000..7da995dda22 --- /dev/null +++ b/tools/patman/test_common.py @@ -0,0 +1,254 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright 2025 Simon Glass <sjg@chromium.org> +# +"""Functional tests for checking that patman behaves correctly""" + +import os +import shutil +import tempfile + +import pygit2 + +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tools +from u_boot_pylib import tout + + +class TestCommon: + """Contains common test functions""" + leb = (b'Lord Edmund Blackadd\xc3\xabr <weasel@blackadder.org>'. + decode('utf-8')) + + # Fake patchwork project ID for U-Boot + PROJ_ID = 6 + PROJ_LINK_NAME = 'uboot' + SERIES_ID_FIRST_V3 = 31 + SERIES_ID_SECOND_V1 = 456 + SERIES_ID_SECOND_V2 = 457 + TITLE_SECOND = 'Series for my board' + + verbosity = False + preserve_outdirs = False + + @classmethod + def setup_test_args(cls, preserve_indir=False, preserve_outdirs=False, + toolpath=None, verbosity=None, no_capture=False): + """Accept arguments controlling test execution + + Args: + preserve_indir (bool): not used by patman + preserve_outdirs (bool): Preserve the output directories used by + tests. Each test has its own, so this is normally only useful + when running a single test. + toolpath (str): not used by patman + verbosity (int): verbosity to use (0 means tout.INIT, 1 means means + tout.DEBUG) + no_capture (bool): True to output all captured text after capturing + completes + """ + del preserve_indir + cls.preserve_outdirs = preserve_outdirs + cls.toolpath = toolpath + cls.verbosity = verbosity + cls.no_capture = no_capture + + def __init__(self): + super().__init__() + self.repo = None + self.tmpdir = None + self.gitdir = None + + def setUp(self): + """Set up the test temporary dir and git dir""" + self.tmpdir = tempfile.mkdtemp(prefix='patman.') + self.gitdir = os.path.join(self.tmpdir, '.git') + tout.init(tout.DEBUG if self.verbosity else tout.INFO, + allow_colour=False) + + def tearDown(self): + """Delete the temporary dir""" + if self.preserve_outdirs: + print(f'Output dir: {self.tmpdir}') + else: + shutil.rmtree(self.tmpdir) + terminal.set_print_test_mode(False) + + def make_commit_with_file(self, subject, body, fname, text): + """Create a file and add it to the git repo with a new commit + + Args: + subject (str): Subject for the commit + body (str): Body text of the commit + fname (str): Filename of file to create + text (str): Text to put into the file + """ + path = os.path.join(self.tmpdir, fname) + tools.write_file(path, text, binary=False) + index = self.repo.index + index.add(fname) + # pylint doesn't seem to find this + # pylint: disable=E1101 + author = pygit2.Signature('Test user', 'test@email.com') + committer = author + tree = index.write_tree() + message = subject + '\n' + body + self.repo.create_commit('HEAD', author, committer, message, tree, + [self.repo.head.target]) + + def make_git_tree(self): + """Make a simple git tree suitable for testing + + It has four branches: + 'base' has two commits: PCI, main + 'first' has base as upstream and two more commits: I2C, SPI + 'second' has base as upstream and three more: video, serial, bootm + 'third4' has second as upstream and four more: usb, main, test, lib + + Returns: + pygit2.Repository: repository + """ + os.environ['GIT_CONFIG_GLOBAL'] = '/dev/null' + os.environ['GIT_CONFIG_SYSTEM'] = '/dev/null' + + repo = pygit2.init_repository(self.gitdir) + self.repo = repo + new_tree = repo.TreeBuilder().write() + + common = ['git', f'--git-dir={self.gitdir}', 'config'] + tools.run(*(common + ['user.name', 'Dummy']), cwd=self.gitdir) + tools.run(*(common + ['user.email', 'dumdum@dummy.com']), + cwd=self.gitdir) + + # pylint doesn't seem to find this + # pylint: disable=E1101 + author = pygit2.Signature('Test user', 'test@email.com') + committer = author + _ = repo.create_commit('HEAD', author, committer, 'Created master', + new_tree, []) + + self.make_commit_with_file('Initial commit', ''' +Add a README + +''', 'README', '''This is the README file +describing this project +in very little detail''') + + self.make_commit_with_file('pci: PCI implementation', ''' +Here is a basic PCI implementation + +''', 'pci.c', '''This is a file +it has some contents +and some more things''') + self.make_commit_with_file('main: Main program', ''' +Hello here is the second commit. +''', 'main.c', '''This is the main file +there is very little here +but we can always add more later +if we want to + +Series-to: u-boot +Series-cc: Barry Crump <bcrump@whataroa.nz> +''') + base_target = repo.revparse_single('HEAD') + self.make_commit_with_file('i2c: I2C things', ''' +This has some stuff to do with I2C +''', 'i2c.c', '''And this is the file contents +with some I2C-related things in it''') + self.make_commit_with_file('spi: SPI fixes', f''' +SPI needs some fixes +and here they are + +Signed-off-by: {self.leb} + +Series-to: u-boot +Commit-notes: +title of the series +This is the cover letter for the series +with various details +END +''', 'spi.c', '''Some fixes for SPI in this +file to make SPI work +better than before''') + first_target = repo.revparse_single('HEAD') + + target = repo.revparse_single('HEAD~2') + # pylint doesn't seem to find this + # pylint: disable=E1101 + repo.reset(target.oid, pygit2.enums.ResetMode.HARD) + self.make_commit_with_file('video: Some video improvements', ''' +Fix up the video so that +it looks more purple. Purple is +a very nice colour. +''', 'video.c', '''More purple here +Purple and purple +Even more purple +Could not be any more purple''') + self.make_commit_with_file('serial: Add a serial driver', f''' +Here is the serial driver +for my chip. + +Cover-letter: +{self.TITLE_SECOND} +This series implements support +for my glorious board. +END +Series-to: u-boot +Series-links: {self.SERIES_ID_SECOND_V1} +''', 'serial.c', '''The code for the +serial driver is here''') + self.make_commit_with_file('bootm: Make it boot', ''' +This makes my board boot +with a fix to the bootm +command +''', 'bootm.c', '''Fix up the bootm +command to make the code as +complicated as possible''') + second_target = repo.revparse_single('HEAD') + + self.make_commit_with_file('usb: Try out the new DMA feature', ''' +This is just a fix that +ensures that DMA is enabled +''', 'usb-uclass.c', '''Here is the USB +implementation and as you can see it +it very nice''') + self.make_commit_with_file('main: Change to the main program', ''' +Here we adjust the main +program just a little bit +''', 'main.c', '''This is the text of the main program''') + self.make_commit_with_file('test: Check that everything works', ''' +This checks that all the +various things we've been +adding actually work. +''', 'test.c', '''Here is the test code and it seems OK''') + self.make_commit_with_file('lib: Sort out the extra library', ''' +The extra library is currently +broken. Fix it so that we can +use it in various place. +''', 'lib.c', '''Some library code is here +and a little more''') + third_target = repo.revparse_single('HEAD') + + repo.branches.local.create('first', first_target) + repo.config.set_multivar('branch.first.remote', '', '.') + repo.config.set_multivar('branch.first.merge', '', 'refs/heads/base') + + repo.branches.local.create('second', second_target) + repo.config.set_multivar('branch.second.remote', '', '.') + repo.config.set_multivar('branch.second.merge', '', 'refs/heads/base') + + repo.branches.local.create('base', base_target) + + repo.branches.local.create('third4', third_target) + repo.config.set_multivar('branch.third4.remote', '', '.') + repo.config.set_multivar('branch.third4.merge', '', + 'refs/heads/second') + + target = repo.lookup_reference('refs/heads/first') + repo.checkout(target, strategy=pygit2.GIT_CHECKOUT_FORCE) + target = repo.revparse_single('HEAD') + repo.reset(target.oid, pygit2.enums.ResetMode.HARD) + + self.assertFalse(gitutil.check_dirty(self.gitdir, self.tmpdir)) + return repo diff --git a/tools/patman/test_cseries.py b/tools/patman/test_cseries.py new file mode 100644 index 00000000000..4c211c8ee89 --- /dev/null +++ b/tools/patman/test_cseries.py @@ -0,0 +1,3684 @@ +# SPDX-License-Identifier: GPL-2.0+ + +# Copyright 2025 Simon Glass <sjg@chromium.org> +# +"""Functional tests for checking that patman behaves correctly""" + +import asyncio +from datetime import datetime +import os +import re +import unittest +from unittest import mock + +import pygit2 + +from u_boot_pylib import cros_subprocess +from u_boot_pylib import gitutil +from u_boot_pylib import terminal +from u_boot_pylib import tools +from patman import cmdline +from patman import control +from patman import cser_helper +from patman import cseries +from patman.database import Pcommit +from patman import database +from patman import patchstream +from patman.patchwork import Patchwork +from patman.test_common import TestCommon + +HASH_RE = r'[0-9a-f]+' +#pylint: disable=protected-access + +class Namespace: + """Simple namespace for use instead of argparse in tests""" + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +class TestCseries(unittest.TestCase, TestCommon): + """Test cases for the Cseries class + + In some cases there are tests for both direct Cseries calls and for + accessing the feature via the cmdline. It is possible to do this with mocks + but it is a bit painful to catch all cases that way. The approach here is + to create a check_...() function which yields back to the test routines to + make the call or run the command. The check_...() function typically yields + a Cseries while it is working and False when it is done, allowing the test + to check that everything is finished. + + Some subcommands don't have command tests, if it would be duplicative. Some + tests avoid using the check_...() function and just write the test out + twice, if it would be too confusing to use a coroutine. + + Note the -N flag which sort-of disables capturing of output, although in + fact it is still captured, just output at the end. When debugging the code + you may need to temporarily comment out the 'with terminal.capture()' + parts. + """ + def setUp(self): + TestCommon.setUp(self) + self.autolink_extra = None + self.loop = asyncio.get_event_loop() + self.cser = None + + def tearDown(self): + TestCommon.tearDown(self) + + class _Stage: + def __init__(self, name): + self.name = name + + def __enter__(self): + if not terminal.USE_CAPTURE: + print(f"--- starting '{self.name}'") + + def __exit__(self, exc_type, exc_val, exc_tb): + if not terminal.USE_CAPTURE: + print(f"--- finished '{self.name}'\n") + + def stage(self, name): + """Context manager to count requests across a range of patchwork calls + + Args: + name (str): Stage name + + Return: + _Stage: contect object + + Usage: + with self.stage('name'): + ...do things + + Note that the output only appears if the -N flag is used + """ + return self._Stage(name) + + def assert_finished(self, itr): + """Assert that an iterator is finished + + Args: + itr (iter): Iterator to check + """ + self.assertFalse(list(itr)) + + def test_database_setup(self): + """Check setting up of the series database""" + cser = cseries.Cseries(self.tmpdir) + with terminal.capture() as (_, err): + cser.open_database() + self.assertEqual(f'Creating new database {self.tmpdir}/.patman.db', + err.getvalue().strip()) + res = cser.db.execute("SELECT name FROM series") + self.assertTrue(res) + cser.close_database() + + def get_database(self): + """Open the database and silence the warning output + + Return: + Cseries: Resulting Cseries object + """ + cser = cseries.Cseries(self.tmpdir, terminal.COLOR_NEVER) + with terminal.capture() as _: + cser.open_database() + self.cser = cser + return cser + + def get_cser(self): + """Set up a git tree and database + + Return: + Cseries: object + """ + self.make_git_tree() + return self.get_database() + + def db_close(self): + """Close the database if open""" + if self.cser and self.cser.db.cur: + self.cser.close_database() + return True + return False + + def db_open(self): + """Open the database if closed""" + if self.cser and not self.cser.db.cur: + self.cser.open_database() + + def run_args(self, *argv, expect_ret=0, pwork=None, cser=None): + """Run patman with the given arguments + + Args: + argv (list of str): List of arguments, excluding 'patman' + expect_ret (int): Expected return code, used to check errors + pwork (Patchwork): Patchwork object to use when executing the + command, or None to create one + cser (Cseries): Cseries object to use when executing the command, + or None to create one + """ + was_open = self.db_close() + args = cmdline.parse_args(['-D'] + list(argv), config_fname=False) + exit_code = control.do_patman(args, self.tmpdir, pwork, cser) + self.assertEqual(expect_ret, exit_code) + if was_open: + self.db_open() + + def test_series_add(self): + """Test adding a new cseries""" + cser = self.get_cser() + self.assertFalse(cser.db.series_get_dict()) + + with terminal.capture() as (out, _): + cser.add('first', 'my description', allow_unmarked=True) + lines = out.getvalue().strip().splitlines() + self.assertEqual( + "Adding series 'first' v1: mark False allow_unmarked True", + lines[0]) + self.assertEqual("Added series 'first' v1 (2 commits)", lines[1]) + self.assertEqual(2, len(lines)) + + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + self.assertEqual('first', slist['first'].name) + self.assertEqual('my description', slist['first'].desc) + + svlist = cser.get_ser_ver_list() + self.assertEqual(1, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(1, svlist[0].version) + + pclist = cser.get_pcommit_dict() + self.assertEqual(2, len(pclist)) + self.assertIn(1, pclist) + self.assertEqual( + Pcommit(1, 0, 'i2c: I2C things', 1, None, None, None, None), + pclist[1]) + self.assertEqual( + Pcommit(2, 1, 'spi: SPI fixes', 1, None, None, None, None), + pclist[2]) + + def test_series_not_checked_out(self): + """Test adding a new cseries when a different one is checked out""" + cser = self.get_cser() + self.assertFalse(cser.db.series_get_dict()) + + with terminal.capture() as (out, _): + cser.add('second', allow_unmarked=True) + lines = out.getvalue().strip().splitlines() + self.assertEqual( + "Adding series 'second' v1: mark False allow_unmarked True", + lines[0]) + self.assertEqual("Added series 'second' v1 (3 commits)", lines[1]) + self.assertEqual(2, len(lines)) + + def test_series_add_manual(self): + """Test adding a new cseries with a version number""" + cser = self.get_cser() + self.assertFalse(cser.db.series_get_dict()) + + repo = pygit2.init_repository(self.gitdir) + first_target = repo.revparse_single('first') + repo.branches.local.create('first2', first_target) + repo.config.set_multivar('branch.first2.remote', '', '.') + repo.config.set_multivar('branch.first2.merge', '', 'refs/heads/base') + + with terminal.capture() as (out, _): + cser.add('first2', 'description', allow_unmarked=True) + lines = out.getvalue().splitlines() + self.assertEqual( + "Adding series 'first' v2: mark False allow_unmarked True", + lines[0]) + self.assertEqual("Added series 'first' v2 (2 commits)", lines[1]) + self.assertEqual(2, len(lines)) + + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + self.assertEqual('first', slist['first'].name) + + # We should have just one entry, with version 2 + svlist = cser.get_ser_ver_list() + self.assertEqual(1, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(2, svlist[0].version) + + def add_first2(self, checkout): + """Add a new first2 branch, a copy of first""" + repo = pygit2.init_repository(self.gitdir) + first_target = repo.revparse_single('first') + repo.branches.local.create('first2', first_target) + repo.config.set_multivar('branch.first2.remote', '', '.') + repo.config.set_multivar('branch.first2.merge', '', 'refs/heads/base') + + if checkout: + target = repo.lookup_reference('refs/heads/first2') + repo.checkout(target, strategy=pygit2.enums.CheckoutStrategy.FORCE) + + def test_series_add_different(self): + """Test adding a different version of a series from that checked out""" + cser = self.get_cser() + + self.add_first2(True) + + # Add first2 initially + with terminal.capture() as (out, _): + cser.add(None, 'description', allow_unmarked=True) + lines = out.getvalue().splitlines() + self.assertEqual( + "Adding series 'first' v2: mark False allow_unmarked True", + lines[0]) + self.assertEqual("Added series 'first' v2 (2 commits)", lines[1]) + self.assertEqual(2, len(lines)) + + # Now add first: it should be added as a new version + with terminal.capture() as (out, _): + cser.add('first', 'description', allow_unmarked=True) + lines = out.getvalue().splitlines() + self.assertEqual( + "Adding series 'first' v1: mark False allow_unmarked True", + lines[0]) + self.assertEqual( + "Added v1 to existing series 'first' (2 commits)", lines[1]) + self.assertEqual(2, len(lines)) + + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + self.assertEqual('first', slist['first'].name) + + # We should have two entries, one of each version + svlist = cser.get_ser_ver_list() + self.assertEqual(2, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(2, svlist[0].version) + + self.assertEqual(2, svlist[1].idnum) + self.assertEqual(1, svlist[1].series_id) + self.assertEqual(1, svlist[1].version) + + def test_series_add_dup(self): + """Test adding a series twice""" + cser = self.get_cser() + with terminal.capture() as (out, _): + cser.add(None, 'description', allow_unmarked=True) + + with terminal.capture() as (out, _): + cser.add(None, 'description', allow_unmarked=True) + self.assertIn("Series 'first' v1 already exists", + out.getvalue().strip()) + + self.add_first2(False) + + with terminal.capture() as (out, _): + cser.add('first2', 'description', allow_unmarked=True) + lines = out.getvalue().splitlines() + self.assertEqual( + "Added v2 to existing series 'first' (2 commits)", lines[1]) + + def test_series_add_dup_reverse(self): + """Test adding a series twice, v2 then v1""" + cser = self.get_cser() + self.add_first2(True) + with terminal.capture() as (out, _): + cser.add(None, 'description', allow_unmarked=True) + self.assertIn("Added series 'first' v2", out.getvalue().strip()) + + with terminal.capture() as (out, _): + cser.add('first', 'description', allow_unmarked=True) + self.assertIn("Added v1 to existing series 'first'", + out.getvalue().strip()) + + def test_series_add_dup_reverse_cmdline(self): + """Test adding a series twice, v2 then v1""" + cser = self.get_cser() + self.add_first2(True) + with terminal.capture() as (out, _): + self.run_args('series', 'add', '-M', '-D', 'description', + pwork=True) + self.assertIn("Added series 'first' v2 (2 commits)", + out.getvalue().strip()) + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', 'add', '-M', + '-D', 'description', pwork=True) + cser.add('first', 'description', allow_unmarked=True) + self.assertIn("Added v1 to existing series 'first'", + out.getvalue().strip()) + + def test_series_add_skip_version(self): + """Test adding a series which is v4 but has no earlier version""" + cser = self.get_cser() + with terminal.capture() as (out, _): + cser.add('third4', 'The glorious third series', mark=False, + allow_unmarked=True) + lines = out.getvalue().splitlines() + self.assertEqual( + "Adding series 'third' v4: mark False allow_unmarked True", + lines[0]) + self.assertEqual("Added series 'third' v4 (4 commits)", lines[1]) + self.assertEqual(2, len(lines)) + + sdict = cser.db.series_get_dict() + self.assertIn('third', sdict) + chk = sdict['third'] + self.assertEqual('third', chk['name']) + self.assertEqual('The glorious third series', chk['desc']) + + svid = cser.get_series_svid(chk['idnum'], 4) + self.assertEqual(4, len(cser.get_pcommit_dict(svid))) + + # Remove the series and add it again with just two commits + with terminal.capture(): + cser.remove('third4') + + with terminal.capture() as (out, _): + cser.add('third4', 'The glorious third series', mark=False, + allow_unmarked=True, end='third4~2') + lines = out.getvalue().splitlines() + self.assertEqual( + "Adding series 'third' v4: mark False allow_unmarked True", + lines[0]) + self.assertRegex( + lines[1], + 'Ending before .* main: Change to the main program') + self.assertEqual("Added series 'third' v4 (2 commits)", lines[2]) + + sdict = cser.db.series_get_dict() + self.assertIn('third', sdict) + chk = sdict['third'] + self.assertEqual('third', chk['name']) + self.assertEqual('The glorious third series', chk['desc']) + + svid = cser.get_series_svid(chk['idnum'], 4) + self.assertEqual(2, len(cser.get_pcommit_dict(svid))) + + def test_series_add_wrong_version(self): + """Test adding a series with an incorrect branch name or version + + This updates branch 'first' to have version 2, then tries to add it. + """ + cser = self.get_cser() + self.assertFalse(cser.db.series_get_dict()) + + with terminal.capture(): + _, ser, max_vers, _ = cser.prep_series('first') + cser.update_series('first', ser, max_vers, None, False, + add_vers=2) + + with self.assertRaises(ValueError) as exc: + with terminal.capture(): + cser.add('first', 'my description', allow_unmarked=True) + self.assertEqual( + "Series name 'first' suggests version 1 but Series-version tag " + 'indicates 2 (see --force-version)', str(exc.exception)) + + # Now try again with --force-version which should force version 1 + with terminal.capture() as (out, _): + cser.add('first', 'my description', allow_unmarked=True, + force_version=True) + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + "Adding series 'first' v1: mark False allow_unmarked True", + next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 2 commits from branch 'first'", next(itr)) + self.assertRegex(next(itr), + f'- {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex(next(itr), + f'- rm v1: {HASH_RE} as {HASH_RE} spi: SPI fixes') + self.assertRegex(next(itr), + f'Updating branch first from {HASH_RE} to {HASH_RE}') + self.assertEqual("Added series 'first' v1 (2 commits)", next(itr)) + try: + self.assertEqual('extra line', next(itr)) + except StopIteration: + pass + + # Since this is v1 the Series-version tag should have been removed + series = patchstream.get_metadata('first', 0, 2, git_dir=self.gitdir) + self.assertNotIn('version', series) + + def _fake_patchwork_cser(self, subpath): + """Fake Patchwork server for the function below + + This handles accessing various things used by the tests below. It has + hard-coded data, about from self.autolink_extra which can be adjusted + by the test. + + Args: + subpath (str): URL subpath to use + """ + # Get a list of projects + if subpath == 'projects/': + return [ + {'id': self.PROJ_ID, 'name': 'U-Boot', + 'link_name': self.PROJ_LINK_NAME}, + {'id': 9, 'name': 'other', 'link_name': 'other'} + ] + + # Search for series by their cover-letter name + re_search = re.match(r'series/\?project=(\d+)&q=.*$', subpath) + if re_search: + result = [ + {'id': 56, 'name': 'contains first name', 'version': 1}, + {'id': 43, 'name': 'has first in it', 'version': 1}, + {'id': 1234, 'name': 'first series', 'version': 1}, + {'id': self.SERIES_ID_SECOND_V1, 'name': self.TITLE_SECOND, + 'version': 1}, + {'id': self.SERIES_ID_SECOND_V2, 'name': self.TITLE_SECOND, + 'version': 2}, + {'id': 12345, 'name': 'i2c: I2C things', 'version': 1}, + ] + if self.autolink_extra: + result += [self.autolink_extra] + return result + + # Read information about a series, given its link (patchwork series ID) + m_series = re.match(r'series/(\d+)/$', subpath) + series_id = int(m_series.group(1)) if m_series else '' + if series_id: + if series_id == self.SERIES_ID_SECOND_V1: + # series 'second' + return { + 'patches': [ + {'id': '10', + 'name': '[PATCH,1/3] video: Some video improvements', + 'content': ''}, + {'id': '11', + 'name': '[PATCH,2/3] serial: Add a serial driver', + 'content': ''}, + {'id': '12', 'name': '[PATCH,3/3] bootm: Make it boot', + 'content': ''}, + ], + 'cover_letter': { + 'id': 39, + 'name': 'The name of the cover letter', + } + } + if series_id == self.SERIES_ID_SECOND_V2: + # series 'second2' + return { + 'patches': [ + {'id': '110', + 'name': + '[PATCH,v2,1/3] video: Some video improvements', + 'content': ''}, + {'id': '111', + 'name': '[PATCH,v2,2/3] serial: Add a serial driver', + 'content': ''}, + {'id': '112', + 'name': '[PATCH,v2,3/3] bootm: Make it boot', + 'content': ''}, + ], + 'cover_letter': { + 'id': 139, + 'name': 'The name of the cover letter', + } + } + if series_id == self.SERIES_ID_FIRST_V3: + # series 'first3' + return { + 'patches': [ + {'id': 20, 'name': '[PATCH,v3,1/2] i2c: I2C things', + 'content': ''}, + {'id': 21, 'name': '[PATCH,v3,2/2] spi: SPI fixes', + 'content': ''}, + ], + 'cover_letter': { + 'id': 29, + 'name': 'Cover letter for first', + } + } + if series_id == 123: + return { + 'patches': [ + {'id': 20, 'name': '[PATCH,1/2] i2c: I2C things', + 'content': ''}, + {'id': 21, 'name': '[PATCH,2/2] spi: SPI fixes', + 'content': ''}, + ], + } + if series_id == 1234: + return { + 'patches': [ + {'id': 20, 'name': '[PATCH,v2,1/2] i2c: I2C things', + 'content': ''}, + {'id': 21, 'name': '[PATCH,v2,2/2] spi: SPI fixes', + 'content': ''}, + ], + } + raise ValueError(f'Fake Patchwork unknown series_id: {series_id}') + + # Read patch status + m_pat = re.search(r'patches/(\d*)/$', subpath) + patch_id = int(m_pat.group(1)) if m_pat else '' + if patch_id: + if patch_id in [10, 110]: + return {'state': 'accepted', + 'content': + 'Reviewed-by: Fred Bloggs <fred@bloggs.com>'} + if patch_id in [11, 111]: + return {'state': 'changes-requested', 'content': ''} + if patch_id in [12, 112]: + return {'state': 'rejected', + 'content': "I don't like this at all, sorry"} + if patch_id == 20: + return {'state': 'awaiting-upstream', 'content': ''} + if patch_id == 21: + return {'state': 'not-applicable', 'content': ''} + raise ValueError(f'Fake Patchwork unknown patch_id: {patch_id}') + + # Read comments a from patch + m_comm = re.search(r'patches/(\d*)/comments/', subpath) + patch_id = int(m_comm.group(1)) if m_comm else '' + if patch_id: + if patch_id in [10, 110]: + return [ + {'id': 1, 'content': ''}, + {'id': 2, + 'content': + '''On some date Mary Smith <msmith@wibble.com> wrote: +> This was my original patch +> which is being quoted + +I like the approach here and I would love to see more of it. + +Reviewed-by: Fred Bloggs <fred@bloggs.com> +''', + 'submitter': { + 'name': 'Fred Bloggs', + 'email': 'fred@bloggs.com', + } + }, + ] + if patch_id in [11, 111]: + return [] + if patch_id in [12, 112]: + return [ + {'id': 4, 'content': ''}, + {'id': 5, 'content': ''}, + {'id': 6, 'content': ''}, + ] + if patch_id == 20: + return [ + {'id': 7, 'content': + '''On some date Alex Miller <alex@country.org> wrote: + +> Sometimes we need to create a patch. +> This is one of those times + +Tested-by: Mary Smith <msmith@wibble.com> # yak +'''}, + {'id': 8, 'content': ''}, + ] + if patch_id == 21: + return [] + raise ValueError( + f'Fake Patchwork does not understand patch_id {patch_id}: ' + f'{subpath}') + + # Read comments from a cover letter + m_cover_id = re.search(r'covers/(\d*)/comments/', subpath) + cover_id = int(m_cover_id.group(1)) if m_cover_id else '' + if cover_id: + if cover_id in [39, 139]: + return [ + {'content': 'some comment', + 'submitter': { + 'name': 'A user', + 'email': 'user@user.com', + }, + 'date': 'Sun 13 Apr 14:06:02 MDT 2025', + }, + {'content': 'another comment', + 'submitter': { + 'name': 'Ghenkis Khan', + 'email': 'gk@eurasia.gov', + }, + 'date': 'Sun 13 Apr 13:06:02 MDT 2025', + }, + ] + if cover_id == 29: + return [] + + raise ValueError(f'Fake Patchwork unknown cover_id: {cover_id}') + + raise ValueError(f'Fake Patchwork does not understand: {subpath}') + + def setup_second(self, do_sync=True): + """Set up the 'second' series synced with the fake patchwork + + Args: + do_sync (bool): True to sync the series + + Return: tuple: + Cseries: New Cseries object + pwork: Patchwork object + """ + with self.stage('setup second'): + cser = self.get_cser() + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + cser.add('second', allow_unmarked=True) + + series = patchstream.get_metadata_for_list('second', self.gitdir, + 3) + self.assertEqual('456', series.links) + + with terminal.capture() as (out, _): + cser.increment('second') + + series = patchstream.get_metadata_for_list('second', self.gitdir, + 3) + self.assertEqual('456', series.links) + + series = patchstream.get_metadata_for_list('second2', self.gitdir, + 3) + self.assertEqual('1:456', series.links) + + if do_sync: + with terminal.capture() as (out, _): + cser.link_auto(pwork, 'second', 2, True) + with terminal.capture() as (out, _): + cser.gather(pwork, 'second', 2, False, True, False) + lines = out.getvalue().splitlines() + self.assertEqual( + "Updating series 'second' version 2 from link '457'", + lines[0]) + self.assertEqual( + '3 patches and cover letter updated (8 requests)', + lines[1]) + self.assertEqual(2, len(lines)) + + return cser, pwork + + def test_series_add_no_cover(self): + """Test patchwork when adding a series which has no cover letter""" + cser = self.get_cser() + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + + with terminal.capture() as (out, _): + cser.add('first', 'my name for this', mark=False, + allow_unmarked=True) + self.assertIn("Added series 'first' v1 (2 commits)", out.getvalue()) + + with terminal.capture() as (out, _): + cser.link_auto(pwork, 'first', 1, True) + self.assertIn("Setting link for series 'first' v1 to 12345", + out.getvalue()) + + def test_series_list(self): + """Test listing cseries""" + self.setup_second() + + self.db_close() + args = Namespace(subcmd='ls') + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = out.getvalue().splitlines() + self.assertEqual(5, len(lines)) + self.assertEqual( + 'Name Description ' + 'Accepted Versions', lines[0]) + self.assertTrue(lines[1].startswith('--')) + self.assertEqual( + 'first ' + ' -/2 1', lines[2]) + self.assertEqual( + 'second Series for my board ' + ' 1/3 1 2', lines[3]) + self.assertTrue(lines[4].startswith('--')) + + def test_do_series_add(self): + """Add a new cseries""" + self.make_git_tree() + args = Namespace(subcmd='add', desc='my-description', series='first', + mark=False, allow_unmarked=True, upstream=None, + dry_run=False) + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + + cser = self.get_database() + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + ser = slist.get('first') + self.assertTrue(ser) + self.assertEqual('first', ser.name) + self.assertEqual('my-description', ser.desc) + + self.db_close() + args.subcmd = 'ls' + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = out.getvalue().splitlines() + self.assertEqual(4, len(lines)) + self.assertTrue(lines[1].startswith('--')) + self.assertEqual( + 'first my-description ' + '-/2 1', lines[2]) + + def test_do_series_add_cmdline(self): + """Add a new cseries using the cmdline""" + self.make_git_tree() + with terminal.capture(): + self.run_args('series', '-s', 'first', 'add', '-M', + '-D', 'my-description', pwork=True) + + cser = self.get_database() + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + ser = slist.get('first') + self.assertTrue(ser) + self.assertEqual('first', ser.name) + self.assertEqual('my-description', ser.desc) + + def test_do_series_add_auto(self): + """Add a new cseries without any arguments""" + self.make_git_tree() + + # Use the 'second' branch, which has a cover letter + gitutil.checkout('second', self.gitdir, work_tree=self.tmpdir, + force=True) + args = Namespace(subcmd='add', series=None, mark=False, + allow_unmarked=True, upstream=None, dry_run=False, + desc=None) + with terminal.capture(): + control.do_series(args, test_db=self.tmpdir, pwork=True) + + cser = self.get_database() + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + ser = slist.get('second') + self.assertTrue(ser) + self.assertEqual('second', ser.name) + self.assertEqual('Series for my board', ser.desc) + cser.close_database() + + def _check_inc(self, out): + """Check output from an 'increment' operation + + Args: + out (StringIO): Text to check + """ + itr = iter(out.getvalue().splitlines()) + + self.assertEqual("Increment 'first' v1: 2 patches", next(itr)) + self.assertRegex(next(itr), 'Checking out upstream commit .*') + self.assertEqual("Processing 2 commits from branch 'first2'", + next(itr)) + self.assertRegex(next(itr), + f'- {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex(next(itr), + f'- add v2: {HASH_RE} as {HASH_RE} spi: SPI fixes') + self.assertRegex( + next(itr), f'Updating branch first2 from {HASH_RE} to {HASH_RE}') + self.assertEqual('Added new branch first2', next(itr)) + return itr + + def test_series_link(self): + """Test adding a patchwork link to a cseries""" + cser = self.get_cser() + + repo = pygit2.init_repository(self.gitdir) + first = repo.lookup_branch('first').peel( + pygit2.enums.ObjectType.COMMIT).oid + base = repo.lookup_branch('base').peel( + pygit2.enums.ObjectType.COMMIT).oid + + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + with self.assertRaises(ValueError) as exc: + cser.link_set('first', 2, '1234', True) + self.assertEqual("Series 'first' does not have a version 2", + str(exc.exception)) + + self.assertEqual('first', gitutil.get_branch(self.gitdir)) + with terminal.capture() as (out, _): + cser.increment('first') + self.assertTrue(repo.lookup_branch('first2')) + + with terminal.capture() as (out, _): + cser.link_set('first', 2, '2345', True) + + lines = out.getvalue().splitlines() + self.assertEqual(6, len(lines)) + self.assertRegex( + lines[0], 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual("Processing 2 commits from branch 'first2'", + lines[1]) + self.assertRegex( + lines[2], + f'- {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex( + lines[3], + f"- add v2 links '2:2345': {HASH_RE} as {HASH_RE} spi: SPI fixes") + self.assertRegex( + lines[4], f'Updating branch first2 from {HASH_RE} to {HASH_RE}') + self.assertEqual("Setting link for series 'first' v2 to 2345", + lines[5]) + + self.assertEqual('2345', cser.link_get('first', 2)) + + series = patchstream.get_metadata_for_list('first2', self.gitdir, 2) + self.assertEqual('2:2345', series.links) + + self.assertEqual('first2', gitutil.get_branch(self.gitdir)) + + # Check the original series was left alone + self.assertEqual( + first, repo.lookup_branch('first').peel( + pygit2.enums.ObjectType.COMMIT).oid) + count = 2 + series1 = patchstream.get_metadata_for_list('first', self.gitdir, + count) + self.assertFalse('links' in series1) + self.assertFalse('version' in series1) + + # Check that base is left alone + self.assertEqual( + base, repo.lookup_branch('base').peel( + pygit2.enums.ObjectType.COMMIT).oid) + series1 = patchstream.get_metadata_for_list('base', self.gitdir, count) + self.assertFalse('links' in series1) + self.assertFalse('version' in series1) + + # Check out second and try to update first + gitutil.checkout('second', self.gitdir, work_tree=self.tmpdir, + force=True) + with terminal.capture(): + cser.link_set('first', 1, '16', True) + + # Overwrite the link + with terminal.capture(): + cser.link_set('first', 1, '17', True) + + series2 = patchstream.get_metadata_for_list('first', self.gitdir, + count) + self.assertEqual('1:17', series2.links) + + def test_series_link_cmdline(self): + """Test adding a patchwork link to a cseries using the cmdline""" + cser = self.get_cser() + + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', '-V', '4', 'set-link', '-u', + '1234', expect_ret=1, pwork=True) + self.assertIn("Series 'first' does not have a version 4", + out.getvalue()) + + with self.assertRaises(ValueError) as exc: + cser.link_get('first', 4) + self.assertEqual("Series 'first' does not have a version 4", + str(exc.exception)) + + with terminal.capture() as (out, _): + cser.increment('first') + + with self.assertRaises(ValueError) as exc: + cser.link_get('first', 4) + self.assertEqual("Series 'first' does not have a version 4", + str(exc.exception)) + + with terminal.capture() as (out, _): + cser.increment('first') + cser.increment('first') + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', '-V', '4', 'set-link', '-u', + '1234', pwork=True) + lines = out.getvalue().splitlines() + self.assertRegex( + lines[-3], + f"- add v4 links '4:1234': {HASH_RE} as {HASH_RE} spi: SPI fixes") + self.assertEqual("Setting link for series 'first' v4 to 1234", + lines[-1]) + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', '-V', '4', 'get-link', + pwork=True) + self.assertIn('1234', out.getvalue()) + + series = patchstream.get_metadata_for_list('first4', self.gitdir, 1) + self.assertEqual('4:1234', series.links) + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', '-V', '5', 'get-link', + expect_ret=1, pwork=True) + + self.assertIn("Series 'first' does not have a version 5", + out.getvalue()) + + # Checkout 'first' and try to get the link from 'first4' + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first4', 'get-link', pwork=True) + self.assertIn('1234', out.getvalue()) + + # This should get the link for 'first' + with terminal.capture() as (out, _): + self.run_args('series', 'get-link', pwork=True) + self.assertIn('None', out.getvalue()) + + # Checkout 'first4' again; this should get the link for 'first4' + gitutil.checkout('first4', self.gitdir, work_tree=self.tmpdir, + force=True) + + with terminal.capture() as (out, _): + self.run_args('series', 'get-link', pwork=True) + self.assertIn('1234', out.getvalue()) + + def test_series_link_auto_version(self): + """Test finding the patchwork link for a cseries automatically""" + cser = self.get_cser() + + with terminal.capture() as (out, _): + cser.add('second', allow_unmarked=True) + + # Make sure that the link is there + count = 3 + series = patchstream.get_metadata('second', 0, count, + git_dir=self.gitdir) + self.assertEqual(f'{self.SERIES_ID_SECOND_V1}', series.links) + + # Set link with detected version + with terminal.capture() as (out, _): + cser.link_set('second', None, f'{self.SERIES_ID_SECOND_V1}', True) + self.assertEqual( + "Setting link for series 'second' v1 to 456", + out.getvalue().splitlines()[-1]) + + # Make sure that the link was set + series = patchstream.get_metadata('second', 0, count, + git_dir=self.gitdir) + self.assertEqual(f'1:{self.SERIES_ID_SECOND_V1}', series.links) + + with terminal.capture(): + cser.increment('second') + + # Make sure that the new series gets the same link + series = patchstream.get_metadata('second2', 0, 3, + git_dir=self.gitdir) + + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + self.assertFalse(cser.project_get()) + cser.project_set(pwork, 'U-Boot', quiet=True) + + self.assertEqual( + (self.SERIES_ID_SECOND_V1, None, 'second', 1, + 'Series for my board'), + cser.link_search(pwork, 'second', 1)) + + with terminal.capture(): + cser.increment('second') + + self.assertEqual((457, None, 'second', 2, 'Series for my board'), + cser.link_search(pwork, 'second', 2)) + + def test_series_link_auto_name(self): + """Test finding the patchwork link for a cseries with auto name""" + cser = self.get_cser() + + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + # Set link with detected name + with self.assertRaises(ValueError) as exc: + cser.link_set(None, 2, '2345', True) + self.assertEqual( + "Series 'first' does not have a version 2", str(exc.exception)) + + with terminal.capture(): + cser.increment('first') + + with terminal.capture() as (out, _): + cser.link_set(None, 2, '2345', True) + self.assertEqual( + "Setting link for series 'first' v2 to 2345", + out.getvalue().splitlines()[-1]) + + svlist = cser.get_ser_ver_list() + self.assertEqual(2, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(1, svlist[0].version) + self.assertIsNone(svlist[0].link) + + self.assertEqual(2, svlist[1].idnum) + self.assertEqual(1, svlist[1].series_id) + self.assertEqual(2, svlist[1].version) + self.assertEqual('2345', svlist[1].link) + + def test_series_link_auto_name_version(self): + """Find patchwork link for a cseries with auto name + version""" + cser = self.get_cser() + + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + # Set link with detected name and version + with terminal.capture() as (out, _): + cser.link_set(None, None, '1234', True) + self.assertEqual( + "Setting link for series 'first' v1 to 1234", + out.getvalue().splitlines()[-1]) + + with terminal.capture(): + cser.increment('first') + + with terminal.capture() as (out, _): + cser.link_set(None, None, '2345', True) + self.assertEqual( + "Setting link for series 'first' v2 to 2345", + out.getvalue().splitlines()[-1]) + + svlist = cser.get_ser_ver_list() + self.assertEqual(2, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(1, svlist[0].version) + self.assertEqual('1234', svlist[0].link) + + self.assertEqual(2, svlist[1].idnum) + self.assertEqual(1, svlist[1].series_id) + self.assertEqual(2, svlist[1].version) + self.assertEqual('2345', svlist[1].link) + + def test_series_link_missing(self): + """Test finding patchwork link for a cseries but it is missing""" + cser = self.get_cser() + + with terminal.capture(): + cser.add('second', allow_unmarked=True) + + with terminal.capture(): + cser.increment('second') + cser.increment('second') + + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + self.assertFalse(cser.project_get()) + cser.project_set(pwork, 'U-Boot', quiet=True) + + self.assertEqual( + (self.SERIES_ID_SECOND_V1, None, 'second', 1, + 'Series for my board'), + cser.link_search(pwork, 'second', 1)) + self.assertEqual((457, None, 'second', 2, 'Series for my board'), + cser.link_search(pwork, 'second', 2)) + res = cser.link_search(pwork, 'second', 3) + self.assertEqual( + (None, + [{'id': self.SERIES_ID_SECOND_V1, 'name': 'Series for my board', + 'version': 1}, + {'id': 457, 'name': 'Series for my board', 'version': 2}], + 'second', 3, 'Series for my board'), + res) + + def check_series_autolink(self): + """Common code for autolink tests""" + cser = self.get_cser() + + with self.stage('setup'): + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + self.assertFalse(cser.project_get()) + cser.project_set(pwork, 'U-Boot', quiet=True) + + with terminal.capture(): + cser.add('first', '', allow_unmarked=True) + cser.add('second', allow_unmarked=True) + + with self.stage('autolink unset'): + with terminal.capture() as (out, _): + yield cser, pwork + self.assertEqual( + "Setting link for series 'second' v1 to " + f'{self.SERIES_ID_SECOND_V1}', + out.getvalue().splitlines()[-1]) + + svlist = cser.get_ser_ver_list() + self.assertEqual(2, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(1, svlist[0].version) + self.assertEqual(2, svlist[1].idnum) + self.assertEqual(2, svlist[1].series_id) + self.assertEqual(1, svlist[1].version) + self.assertEqual(str(self.SERIES_ID_SECOND_V1), svlist[1].link) + yield None + + def test_series_autolink(self): + """Test linking a cseries to its patchwork series by description""" + cor = self.check_series_autolink() + cser, pwork = next(cor) + + with self.assertRaises(ValueError) as exc: + cser.link_auto(pwork, 'first', None, True) + self.assertIn("Series 'first' has an empty description", + str(exc.exception)) + + # autolink unset + cser.link_auto(pwork, 'second', None, True) + + self.assertFalse(next(cor)) + cor.close() + + def test_series_autolink_cmdline(self): + """Test linking to patchwork series by description on cmdline""" + cor = self.check_series_autolink() + _, pwork = next(cor) + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', 'autolink', expect_ret=1, + pwork=pwork) + self.assertEqual( + "patman: ValueError: Series 'first' has an empty description", + out.getvalue().strip()) + + # autolink unset + self.run_args('series', '-s', 'second', 'autolink', '-u', pwork=pwork) + + self.assertFalse(next(cor)) + cor.close() + + def _autolink_setup(self): + """Set things up for autolink tests + + Return: tuple: + Cseries object + Patchwork object + """ + cser = self.get_cser() + + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + self.assertFalse(cser.project_get()) + cser.project_set(pwork, 'U-Boot', quiet=True) + + with terminal.capture(): + cser.add('first', 'first series', allow_unmarked=True) + cser.add('second', allow_unmarked=True) + cser.increment('first') + return cser, pwork + + def test_series_link_auto_all(self): + """Test linking all cseries to their patchwork series by description""" + cser, pwork = self._autolink_setup() + with terminal.capture() as (out, _): + summary = cser.link_auto_all(pwork, update_commit=True, + link_all_versions=True, + replace_existing=False, dry_run=True, + show_summary=False) + self.assertEqual(3, len(summary)) + items = iter(summary.values()) + linked = next(items) + self.assertEqual( + ('first', 1, None, 'first series', 'linked:1234'), linked) + self.assertEqual( + ('first', 2, None, 'first series', 'not found'), next(items)) + self.assertEqual( + ('second', 1, f'{self.SERIES_ID_SECOND_V1}', 'Series for my board', + f'already:{self.SERIES_ID_SECOND_V1}'), + next(items)) + self.assertEqual('Dry run completed', out.getvalue().splitlines()[-1]) + + # A second dry run should do exactly the same thing + with terminal.capture() as (out2, _): + summary2 = cser.link_auto_all(pwork, update_commit=True, + link_all_versions=True, + replace_existing=False, dry_run=True, + show_summary=False) + self.assertEqual(out.getvalue(), out2.getvalue()) + self.assertEqual(summary, summary2) + + # Now do it for real + with terminal.capture(): + summary = cser.link_auto_all(pwork, update_commit=True, + link_all_versions=True, + replace_existing=False, dry_run=False, + show_summary=False) + + # Check the link was updated + pdict = cser.get_ser_ver_dict() + svid = list(summary)[0] + self.assertEqual('1234', pdict[svid].link) + + series = patchstream.get_metadata_for_list('first', self.gitdir, 2) + self.assertEqual('1:1234', series.links) + + def test_series_autolink_latest(self): + """Test linking the lastest versions""" + cser, pwork = self._autolink_setup() + with terminal.capture(): + summary = cser.link_auto_all(pwork, update_commit=True, + link_all_versions=False, + replace_existing=False, dry_run=False, + show_summary=False) + self.assertEqual(2, len(summary)) + items = iter(summary.values()) + self.assertEqual( + ('first', 2, None, 'first series', 'not found'), next(items)) + self.assertEqual( + ('second', 1, f'{self.SERIES_ID_SECOND_V1}', 'Series for my board', + f'already:{self.SERIES_ID_SECOND_V1}'), + next(items)) + + def test_series_autolink_no_update(self): + """Test linking the lastest versions without updating commits""" + cser, pwork = self._autolink_setup() + with terminal.capture(): + cser.link_auto_all(pwork, update_commit=False, + link_all_versions=True, replace_existing=False, + dry_run=False, + show_summary=False) + + series = patchstream.get_metadata_for_list('first', self.gitdir, 2) + self.assertNotIn('links', series) + + def test_series_autolink_replace(self): + """Test linking the lastest versions without updating commits""" + cser, pwork = self._autolink_setup() + with terminal.capture(): + summary = cser.link_auto_all(pwork, update_commit=True, + link_all_versions=True, + replace_existing=True, dry_run=False, + show_summary=False) + self.assertEqual(3, len(summary)) + items = iter(summary.values()) + linked = next(items) + self.assertEqual( + ('first', 1, None, 'first series', 'linked:1234'), linked) + self.assertEqual( + ('first', 2, None, 'first series', 'not found'), next(items)) + self.assertEqual( + ('second', 1, f'{self.SERIES_ID_SECOND_V1}', 'Series for my board', + f'linked:{self.SERIES_ID_SECOND_V1}'), + next(items)) + + def test_series_autolink_extra(self): + """Test command-line operation + + This just uses mocks for now since we can rely on the direct tests for + the actual operation. + """ + _, pwork = self._autolink_setup() + with (mock.patch.object(cseries.Cseries, 'link_auto_all', + return_value=None) as method): + self.run_args('series', 'autolink-all', pwork=True) + method.assert_called_once_with(True, update_commit=False, + link_all_versions=False, + replace_existing=False, dry_run=False, + show_summary=True) + + with (mock.patch.object(cseries.Cseries, 'link_auto_all', + return_value=None) as method): + self.run_args('series', 'autolink-all', '-a', pwork=True) + method.assert_called_once_with(True, update_commit=False, + link_all_versions=True, + replace_existing=False, dry_run=False, + show_summary=True) + + with (mock.patch.object(cseries.Cseries, 'link_auto_all', + return_value=None) as method): + self.run_args('series', 'autolink-all', '-a', '-r', pwork=True) + method.assert_called_once_with(True, update_commit=False, + link_all_versions=True, + replace_existing=True, dry_run=False, + show_summary=True) + + with (mock.patch.object(cseries.Cseries, 'link_auto_all', + return_value=None) as method): + self.run_args('series', '-n', 'autolink-all', '-r', pwork=True) + method.assert_called_once_with(True, update_commit=False, + link_all_versions=False, + replace_existing=True, dry_run=True, + show_summary=True) + + with (mock.patch.object(cseries.Cseries, 'link_auto_all', + return_value=None) as method): + self.run_args('series', 'autolink-all', '-u', pwork=True) + method.assert_called_once_with(True, update_commit=True, + link_all_versions=False, + replace_existing=False, dry_run=False, + show_summary=True) + + # Now do a real one to check the patchwork handling and output + with terminal.capture() as (out, _): + self.run_args('series', 'autolink-all', '-a', pwork=pwork) + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + '1 series linked, 1 already linked, 1 not found (3 requests)', + next(itr)) + self.assertEqual('', next(itr)) + self.assertEqual( + 'Name Version Description ' + ' Result', next(itr)) + self.assertTrue(next(itr).startswith('--')) + self.assertEqual( + 'first 1 first series ' + ' linked:1234', next(itr)) + self.assertEqual( + 'first 2 first series ' + ' not found', next(itr)) + self.assertEqual( + 'second 1 Series for my board ' + f' already:{self.SERIES_ID_SECOND_V1}', + next(itr)) + self.assertTrue(next(itr).startswith('--')) + self.assert_finished(itr) + + def check_series_archive(self): + """Coroutine to run the archive test""" + cser = self.get_cser() + with self.stage('setup'): + with terminal.capture(): + cser.add('first', '', allow_unmarked=True) + + # Check the series is visible in the list + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + self.assertEqual('first', slist['first'].name) + + # Add a second branch + with terminal.capture(): + cser.increment('first') + + cser.fake_now = datetime(24, 9, 14) + repo = pygit2.init_repository(self.gitdir) + with self.stage('archive'): + expected_commit1 = repo.revparse_single('first') + expected_commit2 = repo.revparse_single('first2') + expected_tag1 = 'first-14sep24' + expected_tag2 = 'first2-14sep24' + + # Archive it and make sure it is invisible + yield cser + slist = cser.db.series_get_dict() + self.assertFalse(slist) + + # ...unless we include archived items + slist = cser.db.series_get_dict(include_archived=True) + self.assertEqual(1, len(slist)) + first = slist['first'] + self.assertEqual('first', first.name) + + # Make sure the branches have been tagged + svlist = cser.db.ser_ver_get_for_series(first.idnum) + self.assertEqual(expected_tag1, svlist[0].archive_tag) + self.assertEqual(expected_tag2, svlist[1].archive_tag) + + # Check that the tags were created and point to old branch commits + target1 = repo.revparse_single(expected_tag1) + self.assertEqual(expected_commit1, target1.get_object()) + target2 = repo.revparse_single(expected_tag2) + self.assertEqual(expected_commit2, target2.get_object()) + + # The branches should be deleted + self.assertFalse('first' in repo.branches) + self.assertFalse('first2' in repo.branches) + + with self.stage('unarchive'): + # or we unarchive it + yield cser + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + + # Make sure the branches have been restored + branch1 = repo.branches['first'] + branch2 = repo.branches['first2'] + self.assertEqual(expected_commit1.oid, branch1.target) + self.assertEqual(expected_commit2.oid, branch2.target) + + # Make sure the tags were deleted + try: + target1 = repo.revparse_single(expected_tag1) + self.fail('target1 is still present') + except KeyError: + pass + try: + target1 = repo.revparse_single(expected_tag2) + self.fail('target2 is still present') + except KeyError: + pass + + # Make sure the tag information has been removed + svlist = cser.db.ser_ver_get_for_series(first.idnum) + self.assertFalse(svlist[0].archive_tag) + self.assertFalse(svlist[1].archive_tag) + + yield False + + def test_series_archive(self): + """Test marking a series as archived""" + cor = self.check_series_archive() + cser = next(cor) + + # Archive it and make sure it is invisible + cser.archive('first') + cser = next(cor) + cser.unarchive('first') + self.assertFalse(next(cor)) + cor.close() + + def test_series_archive_cmdline(self): + """Test marking a series as archived with cmdline""" + cor = self.check_series_archive() + cser = next(cor) + + # Archive it and make sure it is invisible + self.run_args('series', '-s', 'first', 'archive', pwork=True, + cser=cser) + next(cor) + self.run_args('series', '-s', 'first', 'unarchive', pwork=True, + cser=cser) + self.assertFalse(next(cor)) + cor.close() + + def check_series_inc(self): + """Coroutine to run the increment test""" + cser = self.get_cser() + + with self.stage('setup'): + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + with self.stage('increment'): + with terminal.capture() as (out, _): + yield cser + self._check_inc(out) + + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + + svlist = cser.get_ser_ver_list() + self.assertEqual(2, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(1, svlist[0].version) + + self.assertEqual(2, svlist[1].idnum) + self.assertEqual(1, svlist[1].series_id) + self.assertEqual(2, svlist[1].version) + + series = patchstream.get_metadata_for_list('first2', self.gitdir, + 1) + self.assertEqual('2', series.version) + + series = patchstream.get_metadata_for_list('first', self.gitdir, 1) + self.assertNotIn('version', series) + + self.assertEqual('first2', gitutil.get_branch(self.gitdir)) + yield None + + def test_series_inc(self): + """Test incrementing the version""" + cor = self.check_series_inc() + cser = next(cor) + + cser.increment('first') + self.assertFalse(next(cor)) + + cor.close() + + def test_series_inc_cmdline(self): + """Test incrementing the version with cmdline""" + cor = self.check_series_inc() + next(cor) + + self.run_args('series', '-s', 'first', 'inc', pwork=True) + self.assertFalse(next(cor)) + cor.close() + + def test_series_inc_no_upstream(self): + """Increment a series which has no upstream branch""" + cser = self.get_cser() + + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + with terminal.capture(): + cser.add('first', '', allow_unmarked=True) + + repo = pygit2.init_repository(self.gitdir) + upstream = repo.lookup_branch('base') + upstream.delete() + with terminal.capture(): + cser.increment('first') + + slist = cser.db.series_get_dict() + self.assertEqual(1, len(slist)) + + def test_series_inc_dryrun(self): + """Test incrementing the version with cmdline""" + cser = self.get_cser() + + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + with terminal.capture() as (out, _): + cser.increment('first', dry_run=True) + itr = self._check_inc(out) + self.assertEqual('Dry run completed', next(itr)) + + # Make sure that nothing was added + svlist = cser.get_ser_ver_list() + self.assertEqual(1, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(1, svlist[0].version) + + # We should still be on the same branch + self.assertEqual('first', gitutil.get_branch(self.gitdir)) + + def test_series_dec(self): + """Test decrementing the version""" + cser = self.get_cser() + + gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir, + force=True) + with terminal.capture() as (out, _): + cser.add('first', '', allow_unmarked=True) + + pclist = cser.get_pcommit_dict() + self.assertEqual(2, len(pclist)) + + # Try decrementing when there is only one version + with self.assertRaises(ValueError) as exc: + cser.decrement('first') + self.assertEqual("Series 'first' only has one version", + str(exc.exception)) + + # Add a version; now there should be two + with terminal.capture() as (out, _): + cser.increment('first') + svdict = cser.get_ser_ver_dict() + self.assertEqual(2, len(svdict)) + + pclist = cser.get_pcommit_dict() + self.assertEqual(4, len(pclist)) + + # Remove version two, using dry run (i.e. no effect) + with terminal.capture() as (out, _): + cser.decrement('first', dry_run=True) + svdict = cser.get_ser_ver_dict() + self.assertEqual(2, len(svdict)) + + repo = pygit2.init_repository(self.gitdir) + branch = repo.lookup_branch('first2') + self.assertTrue(branch) + branch_oid = branch.peel(pygit2.enums.ObjectType.COMMIT).oid + + pclist = cser.get_pcommit_dict() + self.assertEqual(4, len(pclist)) + + # Now remove version two for real + with terminal.capture() as (out, _): + cser.decrement('first') + lines = out.getvalue().splitlines() + self.assertEqual(2, len(lines)) + self.assertEqual("Removing series 'first' v2", lines[0]) + self.assertEqual( + f"Deleted branch 'first2' {str(branch_oid)[:10]}", lines[1]) + + svdict = cser.get_ser_ver_dict() + self.assertEqual(1, len(svdict)) + + pclist = cser.get_pcommit_dict() + self.assertEqual(2, len(pclist)) + + branch = repo.lookup_branch('first2') + self.assertFalse(branch) + + # Removing the only version should not be allowed + with self.assertRaises(ValueError) as exc: + cser.decrement('first', dry_run=True) + self.assertEqual("Series 'first' only has one version", + str(exc.exception)) + + def test_upstream_add(self): + """Test adding an upsream""" + cser = self.get_cser() + + cser.upstream_add('us', 'https://one') + ulist = cser.get_upstream_dict() + self.assertEqual(1, len(ulist)) + self.assertEqual(('https://one', None), ulist['us']) + + cser.upstream_add('ci', 'git@two') + ulist = cser.get_upstream_dict() + self.assertEqual(2, len(ulist)) + self.assertEqual(('https://one', None), ulist['us']) + self.assertEqual(('git@two', None), ulist['ci']) + + # Try to add a duplicate + with self.assertRaises(ValueError) as exc: + cser.upstream_add('ci', 'git@three') + self.assertEqual("Upstream 'ci' already exists", str(exc.exception)) + + with terminal.capture() as (out, _): + cser.upstream_list() + lines = out.getvalue().splitlines() + self.assertEqual(2, len(lines)) + self.assertEqual('us https://one', lines[0]) + self.assertEqual('ci git@two', lines[1]) + + def test_upstream_add_cmdline(self): + """Test adding an upsream with cmdline""" + with terminal.capture(): + self.run_args('upstream', 'add', 'us', 'https://one') + + with terminal.capture() as (out, _): + self.run_args('upstream', 'list') + lines = out.getvalue().splitlines() + self.assertEqual(1, len(lines)) + self.assertEqual('us https://one', lines[0]) + + def test_upstream_default(self): + """Operation of the default upstream""" + cser = self.get_cser() + + with self.assertRaises(ValueError) as exc: + cser.upstream_set_default('us') + self.assertEqual("No such upstream 'us'", str(exc.exception)) + + cser.upstream_add('us', 'https://one') + cser.upstream_add('ci', 'git@two') + + self.assertIsNone(cser.upstream_get_default()) + + cser.upstream_set_default('us') + self.assertEqual('us', cser.upstream_get_default()) + + cser.upstream_set_default('us') + + cser.upstream_set_default('ci') + self.assertEqual('ci', cser.upstream_get_default()) + + with terminal.capture() as (out, _): + cser.upstream_list() + lines = out.getvalue().splitlines() + self.assertEqual(2, len(lines)) + self.assertEqual('us https://one', lines[0]) + self.assertEqual('ci default git@two', lines[1]) + + cser.upstream_set_default(None) + self.assertIsNone(cser.upstream_get_default()) + + def test_upstream_default_cmdline(self): + """Operation of the default upstream on cmdline""" + with terminal.capture() as (out, _): + self.run_args('upstream', 'default', 'us', expect_ret=1) + self.assertEqual("patman: ValueError: No such upstream 'us'", + out.getvalue().strip().splitlines()[-1]) + + self.run_args('upstream', 'add', 'us', 'https://one') + self.run_args('upstream', 'add', 'ci', 'git@two') + + with terminal.capture() as (out, _): + self.run_args('upstream', 'default') + self.assertEqual('unset', out.getvalue().strip()) + + self.run_args('upstream', 'default', 'us') + with terminal.capture() as (out, _): + self.run_args('upstream', 'default') + self.assertEqual('us', out.getvalue().strip()) + + self.run_args('upstream', 'default', 'ci') + with terminal.capture() as (out, _): + self.run_args('upstream', 'default') + self.assertEqual('ci', out.getvalue().strip()) + + with terminal.capture() as (out, _): + self.run_args('upstream', 'default', '--unset') + self.assertFalse(out.getvalue().strip()) + + with terminal.capture() as (out, _): + self.run_args('upstream', 'default') + self.assertEqual('unset', out.getvalue().strip()) + + def test_upstream_delete(self): + """Test operation of the default upstream""" + cser = self.get_cser() + + with self.assertRaises(ValueError) as exc: + cser.upstream_delete('us') + self.assertEqual("No such upstream 'us'", str(exc.exception)) + + cser.upstream_add('us', 'https://one') + cser.upstream_add('ci', 'git@two') + + cser.upstream_set_default('us') + cser.upstream_delete('us') + self.assertIsNone(cser.upstream_get_default()) + + cser.upstream_delete('ci') + ulist = cser.get_upstream_dict() + self.assertFalse(ulist) + + def test_upstream_delete_cmdline(self): + """Test deleting an upstream""" + with terminal.capture() as (out, _): + self.run_args('upstream', 'delete', 'us', expect_ret=1) + self.assertEqual("patman: ValueError: No such upstream 'us'", + out.getvalue().strip().splitlines()[-1]) + + self.run_args('us', 'add', 'us', 'https://one') + self.run_args('us', 'add', 'ci', 'git@two') + + self.run_args('upstream', 'default', 'us') + self.run_args('upstream', 'delete', 'us') + with terminal.capture() as (out, _): + self.run_args('upstream', 'default', 'us', expect_ret=1) + self.assertEqual("patman: ValueError: No such upstream 'us'", + out.getvalue().strip()) + + self.run_args('upstream', 'delete', 'ci') + with terminal.capture() as (out, _): + self.run_args('upstream', 'list') + self.assertFalse(out.getvalue().strip()) + + def test_series_add_mark(self): + """Test marking a cseries with Change-Id fields""" + cser = self.get_cser() + + with terminal.capture(): + cser.add('first', '', mark=True) + + pcdict = cser.get_pcommit_dict() + + series = patchstream.get_metadata('first', 0, 2, git_dir=self.gitdir) + self.assertEqual(2, len(series.commits)) + self.assertIn(1, pcdict) + self.assertEqual(1, pcdict[1].idnum) + self.assertEqual('i2c: I2C things', pcdict[1].subject) + self.assertEqual(1, pcdict[1].svid) + self.assertEqual(series.commits[0].change_id, pcdict[1].change_id) + + self.assertIn(2, pcdict) + self.assertEqual(2, pcdict[2].idnum) + self.assertEqual('spi: SPI fixes', pcdict[2].subject) + self.assertEqual(1, pcdict[2].svid) + self.assertEqual(series.commits[1].change_id, pcdict[2].change_id) + + def test_series_add_mark_fail(self): + """Test marking a cseries when the tree is dirty""" + cser = self.get_cser() + + tools.write_file(os.path.join(self.tmpdir, 'fname'), b'123') + with terminal.capture(): + cser.add('first', '', mark=True) + + tools.write_file(os.path.join(self.tmpdir, 'i2c.c'), b'123') + with self.assertRaises(ValueError) as exc: + with terminal.capture(): + cser.add('first', '', mark=True) + self.assertEqual( + "Modified files exist: use 'git status' to check: [' M i2c.c']", + str(exc.exception)) + + def test_series_add_mark_dry_run(self): + """Test marking a cseries with Change-Id fields""" + cser = self.get_cser() + + with terminal.capture() as (out, _): + cser.add('first', '', mark=True, dry_run=True) + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + "Adding series 'first' v1: mark True allow_unmarked False", + next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual("Processing 2 commits from branch 'first'", + next(itr)) + self.assertRegex( + next(itr), f'- marked: {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex( + next(itr), f'- marked: {HASH_RE} as {HASH_RE} spi: SPI fixes') + self.assertRegex( + next(itr), f'Updating branch first from {HASH_RE} to {HASH_RE}') + self.assertEqual("Added series 'first' v1 (2 commits)", + next(itr)) + self.assertEqual('Dry run completed', next(itr)) + + # Doing another dry run should produce the same result + with terminal.capture() as (out2, _): + cser.add('first', '', mark=True, dry_run=True) + self.assertEqual(out.getvalue(), out2.getvalue()) + + tools.write_file(os.path.join(self.tmpdir, 'i2c.c'), b'123') + with terminal.capture() as (out, _): + with self.assertRaises(ValueError) as exc: + cser.add('first', '', mark=True, dry_run=True) + self.assertEqual( + "Modified files exist: use 'git status' to check: [' M i2c.c']", + str(exc.exception)) + + pcdict = cser.get_pcommit_dict() + self.assertFalse(pcdict) + + def test_series_add_mark_cmdline(self): + """Test marking a cseries with Change-Id fields using the cmdline""" + cser = self.get_cser() + + with terminal.capture(): + self.run_args('series', '-s', 'first', 'add', '-m', + '-D', 'my-description', pwork=True) + + pcdict = cser.get_pcommit_dict() + self.assertTrue(pcdict[1].change_id) + self.assertTrue(pcdict[2].change_id) + + def test_series_add_unmarked_cmdline(self): + """Test adding an unmarked cseries using the command line""" + cser = self.get_cser() + + with terminal.capture(): + self.run_args('series', '-s', 'first', 'add', '-M', + '-D', 'my-description', pwork=True) + + pcdict = cser.get_pcommit_dict() + self.assertFalse(pcdict[1].change_id) + self.assertFalse(pcdict[2].change_id) + + def test_series_add_unmarked_bad_cmdline(self): + """Test failure to add an unmarked cseries using a bad command line""" + self.get_cser() + + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', 'add', + '-D', 'my-description', expect_ret=1, pwork=True) + last_line = out.getvalue().splitlines()[-2] + self.assertEqual( + 'patman: ValueError: 2 commit(s) are unmarked; ' + 'please use -m or -M', last_line) + + def check_series_unmark(self): + """Checker for unmarking tests""" + cser = self.get_cser() + with self.stage('unmarked commits'): + yield cser + + with self.stage('mark commits'): + with terminal.capture() as (out, _): + yield cser + + with self.stage('unmark: dry run'): + with terminal.capture() as (out, _): + yield cser + + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + "Unmarking series 'first': allow_unmarked False", + next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual("Processing 2 commits from branch 'first'", + next(itr)) + self.assertRegex( + next(itr), + f'- unmarked: {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex( + next(itr), + f'- unmarked: {HASH_RE} as {HASH_RE} spi: SPI fixes') + self.assertRegex( + next(itr), f'Updating branch first from {HASH_RE} to {HASH_RE}') + self.assertEqual('Dry run completed', next(itr)) + + with self.stage('unmark'): + with terminal.capture() as (out, _): + yield cser + self.assertIn('- unmarked', out.getvalue()) + + with self.stage('unmark: allow unmarked'): + with terminal.capture() as (out, _): + yield cser + self.assertIn('- no mark', out.getvalue()) + + yield None + + def test_series_unmark(self): + """Test unmarking a cseries, i.e. removing Change-Id fields""" + cor = self.check_series_unmark() + cser = next(cor) + + # check the allow_unmarked flag + with terminal.capture(): + with self.assertRaises(ValueError) as exc: + cser.unmark('first', dry_run=True) + self.assertEqual('Unmarked commits 2/2', str(exc.exception)) + + # mark commits + cser = next(cor) + cser.add('first', '', mark=True) + + # unmark: dry run + cser = next(cor) + cser.unmark('first', dry_run=True) + + # unmark + cser = next(cor) + cser.unmark('first') + + # unmark: allow unmarked + cser = next(cor) + cser.unmark('first', allow_unmarked=True) + + self.assertFalse(next(cor)) + + def test_series_unmark_cmdline(self): + """Test the unmark command""" + cor = self.check_series_unmark() + next(cor) + + # check the allow_unmarked flag + with terminal.capture() as (out, _): + self.run_args('series', 'unmark', expect_ret=1, pwork=True) + self.assertIn('Unmarked commits 2/2', out.getvalue()) + + # mark commits + next(cor) + self.run_args('series', '-s', 'first', 'add', '-D', '', '--mark', + pwork=True) + + # unmark: dry run + next(cor) + self.run_args('series', '-s', 'first', '-n', 'unmark', pwork=True) + + # unmark + next(cor) + self.run_args('series', '-s', 'first', 'unmark', pwork=True) + + # unmark: allow unmarked + next(cor) + self.run_args('series', '-s', 'first', 'unmark', '--allow-unmarked', + pwork=True) + + self.assertFalse(next(cor)) + + def test_series_unmark_middle(self): + """Test unmarking with Change-Id fields not last in the commit""" + cser = self.get_cser() + with terminal.capture(): + cser.add('first', '', allow_unmarked=True) + + # Add some change IDs in the middle of the commit message + with terminal.capture(): + name, ser, _, _ = cser.prep_series('first') + old_msgs = [] + for vals in cser.process_series(name, ser): + old_msgs.append(vals.msg) + lines = vals.msg.splitlines() + change_id = cser.make_change_id(vals.commit) + extra = [f'{cser_helper.CHANGE_ID_TAG}: {change_id}'] + vals.msg = '\n'.join(lines[:2] + extra + lines[2:]) + '\n' + + with terminal.capture(): + cser.unmark('first') + + # We should get back the original commit message + series = patchstream.get_metadata('first', 0, 2, git_dir=self.gitdir) + self.assertEqual(old_msgs[0], series.commits[0].msg) + self.assertEqual(old_msgs[1], series.commits[1].msg) + + def check_series_mark(self): + """Checker for marking tests""" + cser = self.get_cser() + yield cser + + # Start with a dry run, which should do nothing + with self.stage('dry run'): + with terminal.capture(): + yield cser + + series = patchstream.get_metadata_for_list('first', self.gitdir, 2) + self.assertEqual(2, len(series.commits)) + self.assertFalse(series.commits[0].change_id) + self.assertFalse(series.commits[1].change_id) + + # Now do a real run + with self.stage('real run'): + with terminal.capture(): + yield cser + + series = patchstream.get_metadata_for_list('first', self.gitdir, 2) + self.assertEqual(2, len(series.commits)) + self.assertTrue(series.commits[0].change_id) + self.assertTrue(series.commits[1].change_id) + + # Try to mark again, which should fail + with self.stage('mark twice'): + with terminal.capture(): + with self.assertRaises(ValueError) as exc: + cser.mark('first', dry_run=False) + self.assertEqual('Marked commits 2/2', str(exc.exception)) + + # Use the --marked flag to make it succeed + with self.stage('mark twice with --marked'): + with terminal.capture(): + yield cser + self.assertEqual('Marked commits 2/2', str(exc.exception)) + + series2 = patchstream.get_metadata_for_list('first', self.gitdir, + 2) + self.assertEqual(2, len(series2.commits)) + self.assertEqual(series.commits[0].change_id, + series2.commits[0].change_id) + self.assertEqual(series.commits[1].change_id, + series2.commits[1].change_id) + + yield None + + def test_series_mark(self): + """Test marking a cseries, i.e. adding Change-Id fields""" + cor = self.check_series_mark() + cser = next(cor) + + # Start with a dry run, which should do nothing + cser = next(cor) + cser.mark('first', dry_run=True) + + # Now do a real run + cser = next(cor) + cser.mark('first', dry_run=False) + + # Try to mark again, which should fail + with terminal.capture(): + with self.assertRaises(ValueError) as exc: + cser.mark('first', dry_run=False) + self.assertEqual('Marked commits 2/2', str(exc.exception)) + + # Use the --allow-marked flag to make it succeed + cser = next(cor) + cser.mark('first', allow_marked=True, dry_run=False) + + self.assertFalse(next(cor)) + + def test_series_mark_cmdline(self): + """Test marking a cseries, i.e. adding Change-Id fields""" + cor = self.check_series_mark() + next(cor) + + # Start with a dry run, which should do nothing + next(cor) + self.run_args('series', '-n', '-s', 'first', 'mark', pwork=True) + + # Now do a real run + next(cor) + self.run_args('series', '-s', 'first', 'mark', pwork=True) + + # Try to mark again, which should fail + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', 'mark', expect_ret=1, + pwork=True) + self.assertIn('Marked commits 2/2', out.getvalue()) + + # Use the --allow-marked flag to make it succeed + next(cor) + self.run_args('series', '-s', 'first', 'mark', '--allow-marked', + pwork=True) + self.assertFalse(next(cor)) + + def test_series_remove(self): + """Test removing a series""" + cser = self.get_cser() + + with self.stage('remove non-existent series'): + with self.assertRaises(ValueError) as exc: + cser.remove('first') + self.assertEqual("No such series 'first'", str(exc.exception)) + + with self.stage('add'): + with terminal.capture() as (out, _): + cser.add('first', '', mark=True) + self.assertTrue(cser.db.series_get_dict()) + pclist = cser.get_pcommit_dict() + self.assertEqual(2, len(pclist)) + + with self.stage('remove'): + with terminal.capture() as (out, _): + cser.remove('first') + self.assertEqual("Removed series 'first'", out.getvalue().strip()) + self.assertFalse(cser.db.series_get_dict()) + + pclist = cser.get_pcommit_dict() + self.assertFalse(len(pclist)) + + def test_series_remove_cmdline(self): + """Test removing a series using the command line""" + cser = self.get_cser() + + with self.stage('remove non-existent series'): + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'first', 'rm', expect_ret=1, + pwork=True) + self.assertEqual("patman: ValueError: No such series 'first'", + out.getvalue().strip()) + + with self.stage('add'): + with terminal.capture() as (out, _): + cser.add('first', '', mark=True) + self.assertTrue(cser.db.series_get_dict()) + + with self.stage('remove'): + with terminal.capture() as (out, _): + cser.remove('first') + self.assertEqual("Removed series 'first'", out.getvalue().strip()) + self.assertFalse(cser.db.series_get_dict()) + + def check_series_remove_multiple(self): + """Check for removing a series with more than one version""" + cser = self.get_cser() + + with self.stage('setup'): + self.add_first2(True) + + with terminal.capture() as (out, _): + cser.add(None, '', mark=True) + cser.add('first', '', mark=True) + self.assertTrue(cser.db.series_get_dict()) + pclist = cser.get_pcommit_dict() + self.assertEqual(4, len(pclist)) + + # Do a dry-run removal + with self.stage('dry run'): + with terminal.capture() as (out, _): + yield cser + self.assertEqual("Removed version 1 from series 'first'\n" + 'Dry run completed', out.getvalue().strip()) + self.assertEqual({'first'}, cser.db.series_get_dict().keys()) + + svlist = cser.get_ser_ver_list() + self.assertEqual(2, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(2, svlist[0].version) + + self.assertEqual(2, svlist[1].idnum) + self.assertEqual(1, svlist[1].series_id) + self.assertEqual(1, svlist[1].version) + + # Now remove for real + with self.stage('real'): + with terminal.capture() as (out, _): + yield cser + self.assertEqual("Removed version 1 from series 'first'", + out.getvalue().strip()) + self.assertEqual({'first'}, cser.db.series_get_dict().keys()) + plist = cser.get_ser_ver_list() + self.assertEqual(1, len(plist)) + pclist = cser.get_pcommit_dict() + self.assertEqual(2, len(pclist)) + + with self.stage('remove only version'): + yield cser + self.assertEqual({'first'}, cser.db.series_get_dict().keys()) + + svlist = cser.get_ser_ver_list() + self.assertEqual(1, len(svlist)) + self.assertEqual(1, svlist[0].idnum) + self.assertEqual(1, svlist[0].series_id) + self.assertEqual(2, svlist[0].version) + + with self.stage('remove series (dry run'): + with terminal.capture() as (out, _): + yield cser + self.assertEqual("Removed series 'first'\nDry run completed", + out.getvalue().strip()) + self.assertTrue(cser.db.series_get_dict()) + self.assertTrue(cser.get_ser_ver_list()) + + with self.stage('remove series'): + with terminal.capture() as (out, _): + yield cser + self.assertEqual("Removed series 'first'", out.getvalue().strip()) + self.assertFalse(cser.db.series_get_dict()) + self.assertFalse(cser.get_ser_ver_list()) + + yield False + + def test_series_remove_multiple(self): + """Test removing a series with more than one version""" + cor = self.check_series_remove_multiple() + cser = next(cor) + + # Do a dry-run removal + cser.version_remove('first', 1, dry_run=True) + cser = next(cor) + + # Now remove for real + cser.version_remove('first', 1) + cser = next(cor) + + # Remove only version + with self.assertRaises(ValueError) as exc: + cser.version_remove('first', 2, dry_run=True) + self.assertEqual( + "Series 'first' only has one version: remove the series", + str(exc.exception)) + cser = next(cor) + + # Remove series (dry run) + cser.remove('first', dry_run=True) + cser = next(cor) + + # Remove series (real) + cser.remove('first') + + self.assertFalse(next(cor)) + cor.close() + + def test_series_remove_multiple_cmdline(self): + """Test removing a series with more than one version on cmdline""" + cor = self.check_series_remove_multiple() + next(cor) + + # Do a dry-run removal + self.run_args('series', '-n', '-s', 'first', '-V', '1', 'rm-version', + pwork=True) + next(cor) + + # Now remove for real + self.run_args('series', '-s', 'first', '-V', '1', 'rm-version', + pwork=True) + next(cor) + + # Remove only version + with terminal.capture() as (out, _): + self.run_args('series', '-n', '-s', 'first', '-V', '2', + 'rm-version', expect_ret=1, pwork=True) + self.assertIn( + "Series 'first' only has one version: remove the series", + out.getvalue().strip()) + next(cor) + + # Remove series (dry run) + self.run_args('series', '-n', '-s', 'first', 'rm', pwork=True) + next(cor) + + # Remove series (real) + self.run_args('series', '-s', 'first', 'rm', pwork=True) + + self.assertFalse(next(cor)) + cor.close() + + def test_patchwork_set_project(self): + """Test setting the project ID""" + cser = self.get_cser() + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + with terminal.capture() as (out, _): + cser.project_set(pwork, 'U-Boot') + self.assertEqual( + f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot", + out.getvalue().strip()) + + def test_patchwork_project_get(self): + """Test setting the project ID""" + cser = self.get_cser() + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + self.assertFalse(cser.project_get()) + with terminal.capture() as (out, _): + cser.project_set(pwork, 'U-Boot') + self.assertEqual( + f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot", + out.getvalue().strip()) + + name, pwid, link_name = cser.project_get() + self.assertEqual('U-Boot', name) + self.assertEqual(self.PROJ_ID, pwid) + self.assertEqual('uboot', link_name) + + def test_patchwork_project_get_cmdline(self): + """Test setting the project ID""" + cser = self.get_cser() + + self.assertFalse(cser.project_get()) + + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + with terminal.capture() as (out, _): + self.run_args('-P', 'https://url', 'patchwork', 'set-project', + 'U-Boot', pwork=pwork) + self.assertEqual( + f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot", + out.getvalue().strip()) + + name, pwid, link_name = cser.project_get() + self.assertEqual('U-Boot', name) + self.assertEqual(6, pwid) + self.assertEqual('uboot', link_name) + + with terminal.capture() as (out, _): + self.run_args('-P', 'https://url', 'patchwork', 'get-project') + self.assertEqual( + f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot", + out.getvalue().strip()) + + def check_series_list_patches(self): + """Test listing the patches for a series""" + cser = self.get_cser() + + with self.stage('setup'): + with terminal.capture() as (out, _): + cser.add(None, '', allow_unmarked=True) + cser.add('second', allow_unmarked=True) + target = self.repo.lookup_reference('refs/heads/second') + self.repo.checkout( + target, strategy=pygit2.enums.CheckoutStrategy.FORCE) + cser.increment('second') + + with self.stage('list first'): + with terminal.capture() as (out, _): + yield cser + itr = iter(out.getvalue().splitlines()) + self.assertEqual("Branch 'first' (total 2): 2:unknown", next(itr)) + self.assertIn('PatchId', next(itr)) + self.assertRegex(next(itr), r' 0 .* i2c: I2C things') + self.assertRegex(next(itr), r' 1 .* spi: SPI fixes') + + with self.stage('list second2'): + with terminal.capture() as (out, _): + yield cser + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + "Branch 'second2' (total 3): 3:unknown", next(itr)) + self.assertIn('PatchId', next(itr)) + self.assertRegex( + next(itr), ' 0 .* video: Some video improvements') + self.assertRegex(next(itr), ' 1 .* serial: Add a serial driver') + self.assertRegex(next(itr), ' 2 .* bootm: Make it boot') + + yield None + + def test_series_list_patches(self): + """Test listing the patches for a series""" + cor = self.check_series_list_patches() + cser = next(cor) + + # list first + cser.list_patches('first', 1) + cser = next(cor) + + # list second2 + cser.list_patches('second2', 2) + self.assertFalse(next(cor)) + cor.close() + + def test_series_list_patches_cmdline(self): + """Test listing the patches for a series using the cmdline""" + cor = self.check_series_list_patches() + next(cor) + + # list first + self.run_args('series', '-s', 'first', 'patches', pwork=True) + next(cor) + + # list second2 + self.run_args('series', '-s', 'second', '-V', '2', 'patches', + pwork=True) + self.assertFalse(next(cor)) + cor.close() + + def test_series_list_patches_detail(self): + """Test listing the patches for a series""" + cser = self.get_cser() + with terminal.capture(): + cser.add(None, '', allow_unmarked=True) + cser.add('second', allow_unmarked=True) + target = self.repo.lookup_reference('refs/heads/second') + self.repo.checkout( + target, strategy=pygit2.enums.CheckoutStrategy.FORCE) + cser.increment('second') + + with terminal.capture() as (out, _): + cser.list_patches('first', 1, show_commit=True) + expect = r'''Branch 'first' (total 2): 2:unknown +Seq State Com PatchId Commit Subject + 0 unknown - .* i2c: I2C things + +commit .* +Author: Test user <test@email.com> +Date: .* + + i2c: I2C things + + This has some stuff to do with I2C + + i2c.c | 2 ++ + 1 file changed, 2 insertions(+) + + + 1 unknown - .* spi: SPI fixes + +commit .* +Author: Test user <test@email.com> +Date: .* + + spi: SPI fixes + + SPI needs some fixes + and here they are + + Signed-off-by: Lord Edmund Blackaddër <weasel@blackadder.org> + + Series-to: u-boot + Commit-notes: + title of the series + This is the cover letter for the series + with various details + END + + spi.c | 3 +++ + 1 file changed, 3 insertions(+) +''' + itr = iter(out.getvalue().splitlines()) + for seq, eline in enumerate(expect.splitlines()): + line = next(itr).rstrip() + if '*' in eline: + self.assertRegex(line, eline, f'line {seq + 1}') + else: + self.assertEqual(eline, line, f'line {seq + 1}') + + # Show just the patch; this should exclude the commit message + with terminal.capture() as (out, _): + cser.list_patches('first', 1, show_patch=True) + chk = out.getvalue() + self.assertIn('SPI fixes', chk) # subject + self.assertNotIn('SPI needs some fixes', chk) # commit body + self.assertIn('make SPI work', chk) # patch body + + # Show both + with terminal.capture() as (out, _): + cser.list_patches('first', 1, show_commit=True, show_patch=True) + chk = out.getvalue() + self.assertIn('SPI fixes', chk) # subject + self.assertIn('SPI needs some fixes', chk) # commit body + self.assertIn('make SPI work', chk) # patch body + + def check_series_gather(self): + """Checker for gathering tags for a series""" + cser = self.get_cser() + with self.stage('setup'): + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + self.assertFalse(cser.project_get()) + cser.project_set(pwork, 'U-Boot', quiet=True) + + with terminal.capture() as (out, _): + cser.add('second', 'description', allow_unmarked=True) + + ser = cser.get_series_by_name('second') + pwid = cser.get_series_svid(ser.idnum, 1) + + # First do a dry run + with self.stage('gather: dry run'): + with terminal.capture() as (out, _): + yield cser, pwork + lines = out.getvalue().splitlines() + self.assertEqual( + f"Updating series 'second' version 1 from link " + f"'{self.SERIES_ID_SECOND_V1}'", + lines[0]) + self.assertEqual('3 patches updated (7 requests)', lines[1]) + self.assertEqual('Dry run completed', lines[2]) + self.assertEqual(3, len(lines)) + + pwc = cser.get_pcommit_dict(pwid) + self.assertIsNone(pwc[0].state) + self.assertIsNone(pwc[1].state) + self.assertIsNone(pwc[2].state) + + # Now try it again, gathering tags + with self.stage('gather: dry run'): + with terminal.capture() as (out, _): + yield cser, pwork + lines = out.getvalue().splitlines() + itr = iter(lines) + self.assertEqual( + f"Updating series 'second' version 1 from link " + f"'{self.SERIES_ID_SECOND_V1}'", + next(itr)) + self.assertEqual(' 1 video: Some video improvements', next(itr)) + self.assertEqual(' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', + next(itr)) + self.assertEqual(' 2 serial: Add a serial driver', next(itr)) + self.assertEqual(' 3 bootm: Make it boot', next(itr)) + + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual("Processing 3 commits from branch 'second'", + next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} ' + 'video: Some video improvements') + self.assertRegex( + next(itr), + f"- upd links '1:456': {HASH_RE} as {HASH_RE} " + 'serial: Add a serial driver') + self.assertRegex( + next(itr), + f'- {HASH_RE} as {HASH_RE} ' + 'bootm: Make it boot') + self.assertRegex( + next(itr), + f'Updating branch second from {HASH_RE} to {HASH_RE}') + self.assertEqual('3 patches updated (7 requests)', next(itr)) + self.assertEqual('Dry run completed', next(itr)) + self.assert_finished(itr) + + # Make sure that no tags were added to the branch + series = patchstream.get_metadata_for_list('second', self.gitdir, + 3) + for cmt in series.commits: + self.assertFalse(cmt.rtags, + 'Commit {cmt.subject} rtags {cmt.rtags}') + + # Now do it for real + with self.stage('gather: real'): + with terminal.capture() as (out, _): + yield cser, pwork + lines2 = out.getvalue().splitlines() + self.assertEqual(lines2, lines[:-1]) + + # Make sure that the tags were added to the branch + series = patchstream.get_metadata_for_list('second', self.gitdir, + 3) + self.assertEqual( + {'Reviewed-by': {'Fred Bloggs <fred@bloggs.com>'}}, + series.commits[0].rtags) + self.assertFalse(series.commits[1].rtags) + self.assertFalse(series.commits[2].rtags) + + # Make sure the status was updated + pwc = cser.get_pcommit_dict(pwid) + self.assertEqual('accepted', pwc[0].state) + self.assertEqual('changes-requested', pwc[1].state) + self.assertEqual('rejected', pwc[2].state) + + yield None + + def test_series_gather(self): + """Test gathering tags for a series""" + cor = self.check_series_gather() + cser, pwork = next(cor) + + # sync (dry_run) + cser.gather(pwork, 'second', None, False, False, False, dry_run=True) + cser, pwork = next(cor) + + # gather (dry_run) + cser.gather(pwork, 'second', None, False, False, True, dry_run=True) + cser, pwork = next(cor) + + # gather (real) + cser.gather(pwork, 'second', None, False, False, True) + + self.assertFalse(next(cor)) + + def test_series_gather_cmdline(self): + """Test gathering tags for a series with cmdline""" + cor = self.check_series_gather() + _, pwork = next(cor) + + # sync (dry_run) + self.run_args( + 'series', '-n', '-s', 'second', 'gather', '-G', pwork=pwork) + + # gather (dry_run) + _, pwork = next(cor) + self.run_args('series', '-n', '-s', 'second', 'gather', pwork=pwork) + + # gather (real) + _, pwork = next(cor) + self.run_args('series', '-s', 'second', 'gather', pwork=pwork) + + self.assertFalse(next(cor)) + + def check_series_gather_all(self): + """Gather all series at once""" + with self.stage('setup'): + cser, pwork = self.setup_second(False) + + with terminal.capture(): + cser.add('first', 'description', allow_unmarked=True) + cser.increment('first') + cser.increment('first') + cser.link_set('first', 1, '123', True) + cser.link_set('first', 2, '1234', True) + cser.link_set('first', 3, f'{self.SERIES_ID_FIRST_V3}', True) + cser.link_auto(pwork, 'second', 2, True) + + with self.stage('no options'): + with terminal.capture() as (out, _): + yield cser, pwork + self.assertEqual( + "Syncing 'first' v3\n" + "Syncing 'second' v2\n" + '\n' + '5 patches and 2 cover letters updated, 0 missing links ' + '(14 requests)\n' + 'Dry run completed', + out.getvalue().strip()) + + with self.stage('gather'): + with terminal.capture() as (out, _): + yield cser, pwork + lines = out.getvalue().splitlines() + itr = iter(lines) + self.assertEqual("Syncing 'first' v3", next(itr)) + self.assertEqual(' 1 i2c: I2C things', next(itr)) + self.assertEqual( + ' + Tested-by: Mary Smith <msmith@wibble.com> # yak', + next(itr)) + self.assertEqual(' 2 spi: SPI fixes', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 2 commits from branch 'first3'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex( + next(itr), + f"- upd links '3:31': {HASH_RE} as {HASH_RE} spi: SPI fixes") + self.assertRegex( + next(itr), + f'Updating branch first3 from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + + self.assertEqual("Syncing 'second' v2", next(itr)) + self.assertEqual(' 1 video: Some video improvements', next(itr)) + self.assertEqual( + ' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', next(itr)) + self.assertEqual(' 2 serial: Add a serial driver', next(itr)) + self.assertEqual(' 3 bootm: Make it boot', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 3 commits from branch 'second2'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} ' + 'video: Some video improvements') + self.assertRegex( + next(itr), + f"- upd links '2:457 1:456': {HASH_RE} as {HASH_RE} " + 'serial: Add a serial driver') + self.assertRegex( + next(itr), + f'- {HASH_RE} as {HASH_RE} ' + 'bootm: Make it boot') + self.assertRegex( + next(itr), + f'Updating branch second2 from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + self.assertEqual( + '5 patches and 2 cover letters updated, 0 missing links ' + '(14 requests)', + next(itr)) + self.assertEqual('Dry run completed', next(itr)) + self.assert_finished(itr) + + with self.stage('gather, patch comments,!dry_run'): + with terminal.capture() as (out, _): + yield cser, pwork + lines = out.getvalue().splitlines() + itr = iter(lines) + self.assertEqual("Syncing 'first' v1", next(itr)) + self.assertEqual(' 1 i2c: I2C things', next(itr)) + self.assertEqual( + ' + Tested-by: Mary Smith <msmith@wibble.com> # yak', + next(itr)) + self.assertEqual(' 2 spi: SPI fixes', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 2 commits from branch 'first'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex( + next(itr), + f"- upd links '1:123': {HASH_RE} as {HASH_RE} spi: SPI fixes") + self.assertRegex( + next(itr), + f'Updating branch first from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + + self.assertEqual("Syncing 'first' v2", next(itr)) + self.assertEqual(' 1 i2c: I2C things', next(itr)) + self.assertEqual( + ' + Tested-by: Mary Smith <msmith@wibble.com> # yak', + next(itr)) + self.assertEqual(' 2 spi: SPI fixes', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 2 commits from branch 'first2'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} ' + 'i2c: I2C things') + self.assertRegex( + next(itr), + f"- upd links '2:1234': {HASH_RE} as {HASH_RE} spi: SPI fixes") + self.assertRegex( + next(itr), + f'Updating branch first2 from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + self.assertEqual("Syncing 'first' v3", next(itr)) + self.assertEqual(' 1 i2c: I2C things', next(itr)) + self.assertEqual( + ' + Tested-by: Mary Smith <msmith@wibble.com> # yak', + next(itr)) + self.assertEqual(' 2 spi: SPI fixes', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 2 commits from branch 'first3'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} i2c: I2C things') + self.assertRegex( + next(itr), + f"- upd links '3:31': {HASH_RE} as {HASH_RE} spi: SPI fixes") + self.assertRegex( + next(itr), + f'Updating branch first3 from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + + self.assertEqual("Syncing 'second' v1", next(itr)) + self.assertEqual(' 1 video: Some video improvements', next(itr)) + self.assertEqual( + ' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', next(itr)) + self.assertEqual( + 'Review: Fred Bloggs <fred@bloggs.com>', next(itr)) + self.assertEqual(' > This was my original patch', next(itr)) + self.assertEqual(' > which is being quoted', next(itr)) + self.assertEqual( + ' I like the approach here and I would love to see more ' + 'of it.', next(itr)) + self.assertEqual('', next(itr)) + self.assertEqual(' 2 serial: Add a serial driver', next(itr)) + self.assertEqual(' 3 bootm: Make it boot', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 3 commits from branch 'second'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} ' + 'video: Some video improvements') + self.assertRegex( + next(itr), + f"- upd links '1:456': {HASH_RE} as {HASH_RE} " + 'serial: Add a serial driver') + self.assertRegex( + next(itr), + f'- {HASH_RE} as {HASH_RE} ' + 'bootm: Make it boot') + self.assertRegex( + next(itr), + f'Updating branch second from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + + self.assertEqual("Syncing 'second' v2", next(itr)) + self.assertEqual(' 1 video: Some video improvements', next(itr)) + self.assertEqual( + ' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', next(itr)) + self.assertEqual( + 'Review: Fred Bloggs <fred@bloggs.com>', next(itr)) + self.assertEqual(' > This was my original patch', next(itr)) + self.assertEqual(' > which is being quoted', next(itr)) + self.assertEqual( + ' I like the approach here and I would love to see more ' + 'of it.', next(itr)) + self.assertEqual('', next(itr)) + self.assertEqual(' 2 serial: Add a serial driver', next(itr)) + self.assertEqual(' 3 bootm: Make it boot', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 3 commits from branch 'second2'", next(itr)) + self.assertRegex( + next(itr), + f'- added 1 tag: {HASH_RE} as {HASH_RE} ' + 'video: Some video improvements') + self.assertRegex( + next(itr), + f"- upd links '2:457 1:456': {HASH_RE} as {HASH_RE} " + 'serial: Add a serial driver') + self.assertRegex( + next(itr), + f'- {HASH_RE} as {HASH_RE} ' + 'bootm: Make it boot') + self.assertRegex( + next(itr), + f'Updating branch second2 from {HASH_RE} to {HASH_RE}') + self.assertEqual('', next(itr)) + self.assertEqual( + '12 patches and 3 cover letters updated, 0 missing links ' + '(32 requests)', next(itr)) + self.assert_finished(itr) + + yield None + + def test_series_gather_all(self): + """Gather all series at once""" + cor = self.check_series_gather_all() + cser, pwork = next(cor) + + # no options + cser.gather_all(pwork, False, True, False, False, dry_run=True) + cser, pwork = next(cor) + + # gather + cser.gather_all(pwork, False, False, False, True, dry_run=True) + cser, pwork = next(cor) + + # gather, patch comments, !dry_run + cser.gather_all(pwork, True, False, True, True) + + self.assertFalse(next(cor)) + + def test_series_gather_all_cmdline(self): + """Sync all series at once using cmdline""" + cor = self.check_series_gather_all() + _, pwork = next(cor) + + # no options + self.run_args('series', '-n', '-s', 'second', 'gather-all', '-G', + pwork=pwork) + _, pwork = next(cor) + + # gather + self.run_args('series', '-n', '-s', 'second', 'gather-all', + pwork=pwork) + _, pwork = next(cor) + + # gather, patch comments, !dry_run + self.run_args('series', '-s', 'second', 'gather-all', '-a', '-c', + pwork=pwork) + + self.assertFalse(next(cor)) + + def _check_second(self, itr, show_all): + """Check output from a 'progress' command + + Args: + itr (Iterator): Contains the output lines to check + show_all (bool): True if all versions are being shown, not just + latest + """ + self.assertEqual('second: Series for my board (versions: 1 2)', + next(itr)) + if show_all: + self.assertEqual("Branch 'second' (total 3): 3:unknown", + next(itr)) + self.assertIn('PatchId', next(itr)) + self.assertRegex( + next(itr), + ' 0 unknown - .* video: Some video improvements') + self.assertRegex( + next(itr), + ' 1 unknown - .* serial: Add a serial driver') + self.assertRegex( + next(itr), + ' 2 unknown - .* bootm: Make it boot') + self.assertEqual('', next(itr)) + self.assertEqual( + "Branch 'second2' (total 3): 1:accepted 1:changes 1:rejected", + next(itr)) + self.assertIn('PatchId', next(itr)) + self.assertEqual( + 'Cov 2 139 ' + 'The name of the cover letter', next(itr)) + self.assertRegex( + next(itr), + ' 0 accepted 2 110 .* video: Some video improvements') + self.assertRegex( + next(itr), + ' 1 changes 111 .* serial: Add a serial driver') + self.assertRegex( + next(itr), + ' 2 rejected 3 112 .* bootm: Make it boot') + + def test_series_progress(self): + """Test showing progress for a cseries""" + self.setup_second() + self.db_close() + + with self.stage('latest versions'): + args = Namespace(subcmd='progress', series='second', + show_all_versions=False, list_patches=True) + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = iter(out.getvalue().splitlines()) + self._check_second(lines, False) + + with self.stage('all versions'): + args.show_all_versions = True + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = iter(out.getvalue().splitlines()) + self._check_second(lines, True) + + def _check_first(self, itr): + """Check output from the progress command + + Args: + itr (Iterator): Contains the output lines to check + """ + self.assertEqual('first: (versions: 1)', next(itr)) + self.assertEqual("Branch 'first' (total 2): 2:unknown", next(itr)) + self.assertIn('PatchId', next(itr)) + self.assertRegex( + next(itr), + ' 0 unknown - .* i2c: I2C things') + self.assertRegex( + next(itr), + ' 1 unknown - .* spi: SPI fixes') + self.assertEqual('', next(itr)) + + def test_series_progress_all(self): + """Test showing progress for all cseries""" + self.setup_second() + self.db_close() + + with self.stage('progress with patches'): + args = Namespace(subcmd='progress', series=None, + show_all_versions=False, list_patches=True) + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = iter(out.getvalue().splitlines()) + self._check_first(lines) + self._check_second(lines, False) + + with self.stage('all versions'): + args.show_all_versions = True + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = iter(out.getvalue().splitlines()) + self._check_first(lines) + self._check_second(lines, True) + + def test_series_progress_no_patches(self): + """Test showing progress for all cseries without patches""" + self.setup_second() + + with terminal.capture() as (out, _): + self.run_args('series', 'progress', pwork=True) + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + 'Name Description ' + 'Count Status', next(itr)) + self.assertTrue(next(itr).startswith('--')) + self.assertEqual( + 'first ' + ' 2 2:unknown', next(itr)) + self.assertEqual( + 'second2 The name of the cover letter ' + ' 3 1:accepted 1:changes 1:rejected', next(itr)) + self.assertTrue(next(itr).startswith('--')) + self.assertEqual( + ['2', 'series', '5', '2:unknown', '1:accepted', '1:changes', + '1:rejected'], + next(itr).split()) + self.assert_finished(itr) + + def test_series_progress_all_no_patches(self): + """Test showing progress for all cseries versions without patches""" + self.setup_second() + + with terminal.capture() as (out, _): + self.run_args('series', 'progress', '--show-all-versions', + pwork=True) + itr = iter(out.getvalue().splitlines()) + self.assertEqual( + 'Name Description ' + 'Count Status', next(itr)) + self.assertTrue(next(itr).startswith('--')) + self.assertEqual( + 'first ' + ' 2 2:unknown', next(itr)) + self.assertEqual( + 'second Series for my board ' + ' 3 3:unknown', next(itr)) + self.assertEqual( + 'second2 The name of the cover letter ' + ' 3 1:accepted 1:changes 1:rejected', next(itr)) + self.assertTrue(next(itr).startswith('--')) + self.assertEqual( + ['3', 'series', '8', '5:unknown', '1:accepted', '1:changes', + '1:rejected'], + next(itr).split()) + self.assert_finished(itr) + + def test_series_summary(self): + """Test showing a summary of series status""" + self.setup_second() + + self.db_close() + args = Namespace(subcmd='summary', series=None) + with terminal.capture() as (out, _): + control.do_series(args, test_db=self.tmpdir, pwork=True) + lines = out.getvalue().splitlines() + self.assertEqual( + 'Name Status Description', + lines[0]) + self.assertEqual( + '----------------- ------ ------------------------------', + lines[1]) + self.assertEqual('first -/2 ', lines[2]) + self.assertEqual('second 1/3 Series for my board', lines[3]) + + def test_series_open(self): + """Test opening a series in a web browser""" + cser = self.get_cser() + pwork = Patchwork.for_testing(self._fake_patchwork_cser) + self.assertFalse(cser.project_get()) + pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME) + + with terminal.capture(): + cser.add('second', allow_unmarked=True) + cser.increment('second') + cser.link_auto(pwork, 'second', 2, True) + cser.gather(pwork, 'second', 2, False, False, False) + + with mock.patch.object(cros_subprocess.Popen, '__init__', + return_value=None) as method: + with terminal.capture() as (out, _): + cser.open(pwork, 'second2', 2) + + url = ('https://patchwork.ozlabs.org/project/uboot/list/?series=457' + '&state=*&archive=both') + method.assert_called_once_with(['xdg-open', url]) + self.assertEqual(f'Opening {url}', out.getvalue().strip()) + + def test_name_version(self): + """Test handling of series names and versions""" + cser = self.get_cser() + repo = self.repo + + self.assertEqual(('fred', None), + cser_helper.split_name_version('fred')) + self.assertEqual(('mary', 2), cser_helper.split_name_version('mary2')) + + ser, version = cser._parse_series_and_version(None, None) + self.assertEqual('first', ser.name) + self.assertEqual(1, version) + + ser, version = cser._parse_series_and_version('first', None) + self.assertEqual('first', ser.name) + self.assertEqual(1, version) + + ser, version = cser._parse_series_and_version('first', 2) + self.assertEqual('first', ser.name) + self.assertEqual(2, version) + + with self.assertRaises(ValueError) as exc: + cser._parse_series_and_version('123', 2) + self.assertEqual( + "Series name '123' cannot be a number, use '<name><version>'", + str(exc.exception)) + + with self.assertRaises(ValueError) as exc: + cser._parse_series_and_version('first', 100) + self.assertEqual("Version 100 exceeds 99", str(exc.exception)) + + with terminal.capture() as (_, err): + cser._parse_series_and_version('mary3', 4) + self.assertIn('Version mismatch: -V has 4 but branch name indicates 3', + err.getvalue()) + + ser, version = cser._parse_series_and_version('mary', 4) + self.assertEqual('mary', ser.name) + self.assertEqual(4, version) + + # Move off the branch and check for a sensible error + commit = repo.revparse_single('first~') + repo.checkout_tree(commit) + repo.set_head(commit.oid) + + with self.assertRaises(ValueError) as exc: + cser._parse_series_and_version(None, None) + self.assertEqual('No branch detected: please use -s <series>', + str(exc.exception)) + + def test_name_version_extra(self): + """More tests for some corner cases""" + cser, _ = self.setup_second() + target = self.repo.lookup_reference('refs/heads/second2') + self.repo.checkout( + target, strategy=pygit2.enums.CheckoutStrategy.FORCE) + + ser, version = cser._parse_series_and_version(None, None) + self.assertEqual('second', ser.name) + self.assertEqual(2, version) + + ser, version = cser._parse_series_and_version('second2', None) + self.assertEqual('second', ser.name) + self.assertEqual(2, version) + + def test_migrate(self): + """Test migration to later schema versions""" + db = database.Database(f'{self.tmpdir}/.patman.db') + with terminal.capture() as (out, err): + db.open_it() + self.assertEqual( + f'Creating new database {self.tmpdir}/.patman.db', + err.getvalue().strip()) + + self.assertEqual(0, db.get_schema_version()) + + for version in range(1, database.LATEST + 1): + with terminal.capture() as (out, _): + db.migrate_to(version) + self.assertTrue(os.path.exists( + f'{self.tmpdir}/.patman.dbold.v{version - 1}')) + self.assertEqual(f'Update database to v{version}', + out.getvalue().strip()) + self.assertEqual(version, db.get_schema_version()) + self.assertEqual(4, database.LATEST) + + def test_series_scan(self): + """Test scanning a series for updates""" + cser, _ = self.setup_second() + target = self.repo.lookup_reference('refs/heads/second2') + self.repo.checkout( + target, strategy=pygit2.enums.CheckoutStrategy.FORCE) + + # Add a new commit + self.repo = pygit2.init_repository(self.gitdir) + self.make_commit_with_file( + 'wip: Try out a new thing', 'Just checking', 'wibble.c', + '''changes to wibble''') + target = self.repo.revparse_single('HEAD') + self.repo.reset(target.oid, pygit2.enums.ResetMode.HARD) + + # name = gitutil.get_branch(self.gitdir) + # upstream_name = gitutil.get_upstream(self.gitdir, name) + name, ser, version, _ = cser.prep_series(None) + + # We now have 4 commits numbered 0 (second~3) to 3 (the one we just + # added). Drop commit 1 (the 'serial' one) from the branch + cser._filter_commits(name, ser, 1) + svid = cser.get_ser_ver(ser.idnum, version).idnum + old_pcdict = cser.get_pcommit_dict(svid).values() + + expect = '''Syncing series 'second2' v2: mark False allow_unmarked True + 0 video: Some video improvements +- 1 serial: Add a serial driver + 1 bootm: Make it boot ++ 2 Just checking +''' + with terminal.capture() as (out, _): + self.run_args('series', '-n', 'scan', '-M', pwork=True) + self.assertEqual(expect + 'Dry run completed\n', out.getvalue()) + + new_pcdict = cser.get_pcommit_dict(svid).values() + self.assertEqual(list(old_pcdict), list(new_pcdict)) + + with terminal.capture() as (out, _): + self.run_args('series', 'scan', '-M', pwork=True) + self.assertEqual(expect, out.getvalue()) + + new_pcdict = cser.get_pcommit_dict(svid).values() + self.assertEqual(len(old_pcdict), len(new_pcdict)) + chk = list(new_pcdict) + self.assertNotEqual(list(old_pcdict), list(new_pcdict)) + self.assertEqual('video: Some video improvements', chk[0].subject) + self.assertEqual('bootm: Make it boot', chk[1].subject) + self.assertEqual('Just checking', chk[2].subject) + + def test_series_send(self): + """Test sending a series""" + cser, pwork = self.setup_second() + + # Create a third version + with terminal.capture(): + cser.increment('second') + series = patchstream.get_metadata_for_list('second3', self.gitdir, 3) + self.assertEqual('2:457 1:456', series.links) + self.assertEqual('3', series.version) + + with terminal.capture() as (out, err): + self.run_args('series', '-n', '-s', 'second3', 'send', + '--no-autolink', pwork=pwork) + self.assertIn('Send a total of 3 patches with a cover letter', + out.getvalue()) + self.assertIn( + 'video.c:1: warning: Missing or malformed SPDX-License-Identifier ' + 'tag in line 1', err.getvalue()) + self.assertIn( + '<patch>:19: warning: added, moved or deleted file(s), does ' + 'MAINTAINERS need updating?', err.getvalue()) + self.assertIn('bootm.c:1: check: Avoid CamelCase: <Fix>', + err.getvalue()) + self.assertIn( + 'Cc: Anatolij Gustschin <ag.dev.uboot@gmail.com>', out.getvalue()) + + self.assertTrue(os.path.exists(os.path.join( + self.tmpdir, '0001-video-Some-video-improvements.patch'))) + self.assertTrue(os.path.exists(os.path.join( + self.tmpdir, '0002-serial-Add-a-serial-driver.patch'))) + self.assertTrue(os.path.exists(os.path.join( + self.tmpdir, '0003-bootm-Make-it-boot.patch'))) + + def test_series_send_and_link(self): + """Test sending a series and then adding its link to the database""" + def h_sleep(time_s): + if cser.get_time() > 25: + self.autolink_extra = {'id': 500, + 'name': 'Series for my board', + 'version': 3} + cser.inc_fake_time(time_s) + + cser, pwork = self.setup_second() + + # Create a third version + with terminal.capture(): + cser.increment('second') + series = patchstream.get_metadata_for_list('second3', self.gitdir, 3) + self.assertEqual('2:457 1:456', series.links) + self.assertEqual('3', series.version) + + with terminal.capture(): + self.run_args('series', '-n', 'send', pwork=pwork) + + cser.set_fake_time(h_sleep) + with terminal.capture() as (out, _): + cser.link_auto(pwork, 'second3', 3, True, 50) + itr = iter(out.getvalue().splitlines()) + for i in range(7): + self.assertEqual( + "Possible matches for 'second' v3 desc 'Series for my board':", + next(itr), f'failed at i={i}') + self.assertEqual(' Link Version Description', next(itr)) + self.assertEqual(' 456 1 Series for my board', next(itr)) + self.assertEqual(' 457 2 Series for my board', next(itr)) + self.assertEqual('Sleeping for 5 seconds', next(itr)) + self.assertEqual('Link completed after 35 seconds', next(itr)) + self.assertRegex( + next(itr), 'Checking out upstream commit refs/heads/base: .*') + self.assertEqual( + "Processing 3 commits from branch 'second3'", next(itr)) + self.assertRegex( + next(itr), + f'- {HASH_RE} as {HASH_RE} ' + 'video: Some video improvements') + self.assertRegex( + next(itr), + f"- add links '3:500 2:457 1:456': {HASH_RE} as {HASH_RE} " + 'serial: Add a serial driver') + self.assertRegex( + next(itr), + f'- add v3: {HASH_RE} as {HASH_RE} ' + 'bootm: Make it boot') + self.assertRegex( + next(itr), + f'Updating branch second3 from {HASH_RE} to {HASH_RE}') + self.assertEqual( + "Setting link for series 'second' v3 to 500", next(itr)) + + def _check_status(self, out, has_comments, has_cover_comments): + """Check output from the status command + + Args: + itr (Iterator): Contains the output lines to check + """ + itr = iter(out.getvalue().splitlines()) + if has_cover_comments: + self.assertEqual('Cov The name of the cover letter', next(itr)) + self.assertEqual( + 'From: A user <user@user.com>: Sun 13 Apr 14:06:02 MDT 2025', + next(itr)) + self.assertEqual('some comment', next(itr)) + self.assertEqual('', next(itr)) + + self.assertEqual( + 'From: Ghenkis Khan <gk@eurasia.gov>: Sun 13 Apr 13:06:02 ' + 'MDT 2025', + next(itr)) + self.assertEqual('another comment', next(itr)) + self.assertEqual('', next(itr)) + + self.assertEqual(' 1 video: Some video improvements', next(itr)) + self.assertEqual(' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', + next(itr)) + if has_comments: + self.assertEqual( + 'Review: Fred Bloggs <fred@bloggs.com>', next(itr)) + self.assertEqual(' > This was my original patch', next(itr)) + self.assertEqual(' > which is being quoted', next(itr)) + self.assertEqual( + ' I like the approach here and I would love to see more ' + 'of it.', next(itr)) + self.assertEqual('', next(itr)) + + self.assertEqual(' 2 serial: Add a serial driver', next(itr)) + self.assertEqual(' 3 bootm: Make it boot', next(itr)) + self.assertEqual( + '1 new response available in patchwork (use -d to write them to ' + 'a new branch)', next(itr)) + + def test_series_status(self): + """Test getting the status of a series, including comments""" + cser, pwork = self.setup_second() + + # Use single threading for easy debugging, but the multithreaded + # version should produce the same output + with self.stage('status second2: single-threaded'): + with terminal.capture() as (out, _): + cser.status(pwork, 'second', 2, False) + self._check_status(out, False, False) + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + with self.stage('status second2 (normal)'): + with terminal.capture() as (out2, _): + cser.status(pwork, 'second', 2, False) + self.assertEqual(out.getvalue(), out2.getvalue()) + self._check_status(out, False, False) + + with self.stage('with comments'): + with terminal.capture() as (out, _): + cser.status(pwork, 'second', 2, show_comments=True) + self._check_status(out, True, False) + + with self.stage('with comments and cover comments'): + with terminal.capture() as (out, _): + cser.status(pwork, 'second', 2, show_comments=True, + show_cover_comments=True) + self._check_status(out, True, True) + + def test_series_status_cmdline(self): + """Test getting the status of a series, including comments""" + cser, pwork = self.setup_second() + + with self.stage('status second2'): + with terminal.capture() as (out, _): + self.run_args('series', '-s', 'second', '-V', '2', 'status', + pwork=pwork) + self._check_status(out, False, False) + + with self.stage('status second2 (normal)'): + with terminal.capture() as (out, _): + cser.status(pwork, 'second', 2, show_comments=True) + self._check_status(out, True, False) + + with self.stage('with comments and cover comments'): + with terminal.capture() as (out, _): + cser.status(pwork, 'second', 2, show_comments=True, + show_cover_comments=True) + self._check_status(out, True, True) + + def test_series_no_subcmd(self): + """Test handling of things without a subcommand""" + parsers = cmdline.setup_parser() + parsers['series'].catch_error = True + with terminal.capture() as (out, _): + cmdline.parse_args(['series'], parsers=parsers) + self.assertIn('usage: patman series', out.getvalue()) + + parsers['patchwork'].catch_error = True + with terminal.capture() as (out, _): + cmdline.parse_args(['patchwork'], parsers=parsers) + self.assertIn('usage: patman patchwork', out.getvalue()) + + parsers['upstream'].catch_error = True + with terminal.capture() as (out, _): + cmdline.parse_args(['upstream'], parsers=parsers) + self.assertIn('usage: patman upstream', out.getvalue()) + + def check_series_rename(self): + """Check renaming a series""" + cser = self.get_cser() + with self.stage('setup'): + with terminal.capture() as (out, _): + cser.add('first', 'my name', allow_unmarked=True) + + # Remember the old series + old = cser.get_series_by_name('first') + + self.assertEqual('first', gitutil.get_branch(self.gitdir)) + with terminal.capture() as (out, _): + cser.increment('first') + self.assertEqual('first2', gitutil.get_branch(self.gitdir)) + + with terminal.capture() as (out, _): + cser.increment('first') + self.assertEqual('first3', gitutil.get_branch(self.gitdir)) + + # Do the dry run + with self.stage('rename - dry run'): + with terminal.capture() as (out, _): + yield cser + lines = out.getvalue().splitlines() + itr = iter(lines) + self.assertEqual("Renaming branch 'first' to 'newname'", next(itr)) + self.assertEqual( + "Renaming branch 'first2' to 'newname2'", next(itr)) + self.assertEqual( + "Renaming branch 'first3' to 'newname3'", next(itr)) + self.assertEqual("Renamed series 'first' to 'newname'", next(itr)) + self.assertEqual("Dry run completed", next(itr)) + self.assert_finished(itr) + + # Check nothing changed + self.assertEqual('first3', gitutil.get_branch(self.gitdir)) + sdict = cser.db.series_get_dict() + self.assertIn('first', sdict) + + # Now do it for real + with self.stage('rename - real'): + with terminal.capture() as (out2, _): + yield cser + lines2 = out2.getvalue().splitlines() + self.assertEqual(lines[:-1], lines2) + + self.assertEqual('newname3', gitutil.get_branch(self.gitdir)) + + # Check the series ID did not change + ser = cser.get_series_by_name('newname') + self.assertEqual(old.idnum, ser.idnum) + + yield None + + def test_series_rename(self): + """Test renaming of a series""" + cor = self.check_series_rename() + cser = next(cor) + + # Rename (dry run) + cser.rename('first', 'newname', dry_run=True) + cser = next(cor) + + # Rename (real) + cser.rename('first', 'newname') + self.assertFalse(next(cor)) + + def test_series_rename_cmdline(self): + """Test renaming of a series with the cmdline""" + cor = self.check_series_rename() + next(cor) + + # Rename (dry run) + self.run_args('series', '-n', '-s', 'first', 'rename', '-N', 'newname', + pwork=True) + next(cor) + + # Rename (real) + self.run_args('series', '-s', 'first', 'rename', '-N', 'newname', + pwork=True) + + self.assertFalse(next(cor)) + + def test_series_rename_bad(self): + """Test renaming when it is not allowed""" + cser = self.get_cser() + with terminal.capture(): + cser.add('first', 'my name', allow_unmarked=True) + cser.increment('first') + cser.increment('first') + + with self.assertRaises(ValueError) as exc: + cser.rename('first', 'first') + self.assertEqual("Cannot rename series 'first' to itself", + str(exc.exception)) + + with self.assertRaises(ValueError) as exc: + cser.rename('first2', 'newname') + self.assertEqual( + "Invalid series name 'first2': did you use the branch name?", + str(exc.exception)) + + with self.assertRaises(ValueError) as exc: + cser.rename('first', 'newname2') + self.assertEqual( + "Invalid series name 'newname2': did you use the branch name?", + str(exc.exception)) + + with self.assertRaises(ValueError) as exc: + cser.rename('first', 'second') + self.assertEqual("Cannot rename: branches exist: second", + str(exc.exception)) + + with terminal.capture(): + cser.add('second', 'another name', allow_unmarked=True) + cser.increment('second') + + with self.assertRaises(ValueError) as exc: + cser.rename('first', 'second') + self.assertEqual("Cannot rename: series 'second' already exists", + str(exc.exception)) + + # Rename second2 so that it gets in the way of the rename + gitutil.rename_branch('second2', 'newname2', self.gitdir) + with self.assertRaises(ValueError) as exc: + cser.rename('first', 'newname') + self.assertEqual("Cannot rename: branches exist: newname2", + str(exc.exception)) + + # Rename first3 and make sure it stops the rename + gitutil.rename_branch('first3', 'tempbranch', self.gitdir) + with self.assertRaises(ValueError) as exc: + cser.rename('first', 'newname') + self.assertEqual( + "Cannot rename: branches missing: first3: branches exist: " + 'newname2', str(exc.exception)) + + def test_version_change(self): + """Test changing a version of a series to a different version number""" + cser = self.get_cser() + + with self.stage('setup'): + with terminal.capture(): + cser.add('first', 'my description', allow_unmarked=True) + + with self.stage('non-existent version'): + # Check changing a non-existent version + with self.assertRaises(ValueError) as exc: + cser.version_change('first', 2, 3, dry_run=True) + self.assertEqual("Series 'first' does not have a version 2", + str(exc.exception)) + + with self.stage('new version missing'): + with self.assertRaises(ValueError) as exc: + cser.version_change('first', None, None, dry_run=True) + self.assertEqual("Please provide a new version number", + str(exc.exception)) + + # Change v1 to v2 (dry run) + with self.stage('v1 -> 2 dry run'): + with terminal.capture(): + self.assertTrue(gitutil.check_branch('first', self.gitdir)) + cser.version_change('first', 1, 3, dry_run=True) + self.assertTrue(gitutil.check_branch('first', self.gitdir)) + self.assertFalse(gitutil.check_branch('first3', self.gitdir)) + + # Check that nothing actually happened + series = patchstream.get_metadata('first', 0, 2, + git_dir=self.gitdir) + self.assertNotIn('version', series) + + svlist = cser.get_ser_ver_list() + self.assertEqual(1, len(svlist)) + item = svlist[0] + self.assertEqual(1, item.version) + + with self.stage('increment twice'): + # Increment so that we get first3 + with terminal.capture(): + cser.increment('first') + cser.increment('first') + + with self.stage('existing version'): + # Check changing to an existing version + with self.assertRaises(ValueError) as exc: + cser.version_change('first', 1, 3, dry_run=True) + self.assertEqual("Series 'first' already has a v3: 1 2 3", + str(exc.exception)) + + # Change v1 to v4 (for real) + with self.stage('v1 -> 4'): + with terminal.capture(): + self.assertTrue(gitutil.check_branch('first', self.gitdir)) + cser.version_change('first', 1, 4) + self.assertTrue(gitutil.check_branch('first', self.gitdir)) + self.assertTrue(gitutil.check_branch('first4', self.gitdir)) + + series = patchstream.get_metadata('first4', 0, 2, + git_dir=self.gitdir) + self.assertIn('version', series) + self.assertEqual('4', series.version) + + svdict = cser.get_ser_ver_dict() + self.assertEqual(3, len(svdict)) + item = svdict[item.idnum] + self.assertEqual(4, item.version) + + with self.stage('increment'): + # Now try to increment first again + with terminal.capture(): + cser.increment('first') + + ser = cser.get_series_by_name('first') + self.assertIn(5, cser._get_version_list(ser.idnum)) + + def test_version_change_cmdline(self): + """Check changing a version on the cmdline""" + self.get_cser() + with (mock.patch.object(cseries.Cseries, 'version_change', + return_value=None) as method): + self.run_args('series', '-s', 'first', 'version-change', + pwork=True) + method.assert_called_once_with('first', None, None, dry_run=False) + + with (mock.patch.object(cseries.Cseries, 'version_change', + return_value=None) as method): + self.run_args('series', '-s', 'first', 'version-change', + '--new-version', '3', pwork=True) + method.assert_called_once_with('first', None, 3, dry_run=False) diff --git a/tools/patman/test_settings.py b/tools/patman/test_settings.py new file mode 100644 index 00000000000..c117836de31 --- /dev/null +++ b/tools/patman/test_settings.py @@ -0,0 +1,67 @@ +# SPDX-License-Identifier: GPL-2.0+ +# +# Copyright (c) 2022 Maxim Cournoyer <maxim.cournoyer@savoirfairelinux.com> +# + +import argparse +import contextlib +import os +import sys +import tempfile + +from patman import settings +from u_boot_pylib import tools + + +@contextlib.contextmanager +def empty_git_repository(): + with tempfile.TemporaryDirectory() as tmpdir: + os.chdir(tmpdir) + tools.run('git', 'init', raise_on_error=True) + yield tmpdir + + +@contextlib.contextmanager +def cleared_command_line_args(): + old_value = sys.argv[:] + sys.argv = [sys.argv[0]] + try: + yield + finally: + sys.argv = old_value + + +def test_git_local_config(): + # Clearing the command line arguments is required, otherwise + # arguments passed to the test running such as in 'pytest -k + # filter' would be processed by _UpdateDefaults and fail. + with cleared_command_line_args(): + with empty_git_repository(): + with tempfile.NamedTemporaryFile() as global_config: + global_config.write(b'[settings]\n' + b'project=u-boot\n') + global_config.flush() + parser = argparse.ArgumentParser() + parser.add_argument('-p', '--project', default='unknown') + subparsers = parser.add_subparsers(dest='cmd') + send = subparsers.add_parser('send') + send.add_argument('--no-check', action='store_false', + dest='check_patch', default=True) + + # Test "global" config is used. + settings.Setup(parser, 'unknown', None, global_config.name) + args, _ = parser.parse_known_args([]) + assert args.project == 'u-boot' + send_args, _ = send.parse_known_args([]) + assert send_args.check_patch + + # Test local config can shadow it. + with open('.patman', 'w', buffering=1) as f: + f.write('[settings]\n' + 'project: guix-patches\n' + 'check_patch: False\n') + settings.Setup(parser, 'unknown', global_config.name) + args, _ = parser.parse_known_args([]) + assert args.project == 'guix-patches' + send_args, _ = send.parse_known_args([]) + assert not send_args.check_patch |