summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/Kconfig2
-rw-r--r--tools/binman/binman.rst7
-rw-r--r--tools/binman/bintool_test.py20
-rw-r--r--tools/binman/btool/openssl.py16
-rwxr-xr-xtools/binman/cbfs_util_test.py15
-rw-r--r--tools/binman/control.py104
-rw-r--r--tools/binman/elf_test.py3
-rw-r--r--tools/binman/entries.rst16
-rw-r--r--tools/binman/entry.py17
-rw-r--r--tools/binman/etype/atf_bl1.py23
-rw-r--r--tools/binman/etype/blob.py9
-rw-r--r--tools/binman/etype/blob_ext_list.py4
-rw-r--r--tools/binman/etype/cbfs.py3
-rw-r--r--tools/binman/etype/fit.py3
-rw-r--r--tools/binman/etype/mkimage.py2
-rw-r--r--tools/binman/etype/renesas_rcar4_sa0.py46
-rw-r--r--tools/binman/etype/section.py22
-rw-r--r--tools/binman/etype/ti_secure.py1
-rw-r--r--tools/binman/etype/ti_secure_rom.py1
-rw-r--r--tools/binman/etype/u_boot_spl_pubkey_dtb.py2
-rw-r--r--tools/binman/etype/x509_cert.py7
-rwxr-xr-xtools/binman/fip_util_test.py11
-rw-r--r--tools/binman/ftest.py235
-rw-r--r--tools/binman/image.py2
-rw-r--r--tools/binman/image_test.py4
-rwxr-xr-xtools/binman/main.py4
-rw-r--r--tools/binman/missing-blob-help59
-rw-r--r--tools/binman/test/170_fit_fdt.dts14
-rw-r--r--tools/binman/test/220_fit_subentry_bintool.dts2
-rw-r--r--tools/binman/test/223_fit_fdt_oper.dts14
-rw-r--r--tools/binman/test/284_fit_fdt_list.dts14
-rw-r--r--tools/binman/test/333_fit_fdt_dir.dts14
-rw-r--r--tools/binman/test/334_fit_fdt_compat.dts14
-rw-r--r--tools/binman/test/335_fit_fdt_phase.dts14
-rw-r--r--tools/binman/test/345_fit_fdt_name.dts14
-rw-r--r--tools/binman/test/347_bl1.dts13
-rw-r--r--tools/binman/test/347_bootph_prop.dts21
-rw-r--r--tools/binman/test/347_key_name_hint_dir_fit_signature.dts98
-rw-r--r--tools/binman/test/348_key_name_hint_dir_spl_pubkey_dtb.dts16
-rw-r--r--tools/binman/test/348_renesas_rcar4_sa0.dts15
-rw-r--r--tools/binman/test/349_renesas_rcar4_sa0_size.dts20
-rw-r--r--tools/buildman/builder.py11
-rw-r--r--tools/buildman/builderthread.py58
-rw-r--r--tools/buildman/buildman.rst12
-rw-r--r--tools/buildman/cmdline.py3
-rw-r--r--tools/buildman/control.py22
-rw-r--r--tools/buildman/func_test.py20
-rwxr-xr-xtools/buildman/main.py2
-rw-r--r--tools/buildman/test.py6
-rwxr-xr-xtools/dtoc/main.py5
-rwxr-xr-xtools/dtoc/test_dtoc.py11
-rwxr-xr-xtools/dtoc/test_fdt.py2
-rw-r--r--tools/dtoc/test_src_scan.py25
-rw-r--r--tools/envcrc.c2
-rw-r--r--tools/fit_image.c122
-rw-r--r--tools/ifdtool.c8
-rw-r--r--tools/mkimage.c7
-rw-r--r--tools/patman/__init__.py9
-rwxr-xr-xtools/patman/__main__.py67
-rw-r--r--tools/patman/checkpatch.py15
-rw-r--r--tools/patman/cmdline.py527
-rw-r--r--tools/patman/control.py424
-rw-r--r--tools/patman/cser_helper.py1524
-rw-r--r--tools/patman/cseries.py1165
-rw-r--r--tools/patman/database.py823
-rw-r--r--tools/patman/func_test.py544
-rw-r--r--tools/patman/get_maintainer.py13
-rw-r--r--tools/patman/patchstream.py44
-rw-r--r--tools/patman/patchwork.py852
-rw-r--r--tools/patman/patman.rst283
-rw-r--r--tools/patman/project.py3
-rw-r--r--tools/patman/pyproject.toml2
-rw-r--r--tools/patman/requirements.txt3
-rw-r--r--tools/patman/send.py197
-rw-r--r--tools/patman/series.py143
-rw-r--r--tools/patman/settings.py77
-rw-r--r--tools/patman/status.py488
-rw-r--r--tools/patman/test_checkpatch.py2
-rw-r--r--tools/patman/test_common.py254
-rw-r--r--tools/patman/test_cseries.py3684
-rw-r--r--tools/patman/test_settings.py2
-rwxr-xr-xtools/rmboard.py6
-rw-r--r--tools/stm32image.c215
-rw-r--r--tools/termios_linux.h6
-rwxr-xr-xtools/u_boot_pylib/__main__.py2
-rw-r--r--tools/u_boot_pylib/command.py5
-rw-r--r--tools/u_boot_pylib/gitutil.py255
-rw-r--r--tools/u_boot_pylib/terminal.py105
-rw-r--r--tools/u_boot_pylib/test_util.py29
-rw-r--r--tools/u_boot_pylib/tout.py27
90 files changed, 11656 insertions, 1376 deletions
diff --git a/tools/Kconfig b/tools/Kconfig
index 8e272ee99a8..652b0f22557 100644
--- a/tools/Kconfig
+++ b/tools/Kconfig
@@ -137,7 +137,7 @@ config DEVICE_TYPE
default 0x01
depends on FSPI_CONF_HEADER
help
- Flash type: Serial NOR (0X01) and Serial NAND (0x02)
+ Flash type: Serial NOR (0x01) and Serial NAND (0x02)
config FLASH_PAD_TYPE
hex "Flash Pad Type"
diff --git a/tools/binman/binman.rst b/tools/binman/binman.rst
index 84b1331df5c..392e507d449 100644
--- a/tools/binman/binman.rst
+++ b/tools/binman/binman.rst
@@ -1143,6 +1143,13 @@ Optional entries
Some entries need to exist only if certain conditions are met. For example, an
entry may want to appear in the image only if a file has a particular format.
+Also, the ``optional`` property may be used to mark entries as optional::
+
+ tee-os {
+ filename = "tee.bin";
+ optional;
+ };
+
Obviously the entry must exist in the image description for it to be processed
at all, so a way needs to be found to have the entry remove itself.
diff --git a/tools/binman/bintool_test.py b/tools/binman/bintool_test.py
index 949d6f4c8a9..7e8dafea94e 100644
--- a/tools/binman/bintool_test.py
+++ b/tools/binman/bintool_test.py
@@ -55,14 +55,14 @@ class TestBintool(unittest.TestCase):
def test_version(self):
"""Check handling of a tool being present or absent"""
btest = Bintool.create('_testing')
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
btest.show()
self.assertFalse(btest.is_present())
self.assertIn('-', stdout.getvalue())
btest.present = True
self.assertTrue(btest.is_present())
self.assertEqual('123', btest.version())
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
btest.show()
self.assertIn('123', stdout.getvalue())
@@ -90,7 +90,7 @@ class TestBintool(unittest.TestCase):
col = terminal.Color()
with unittest.mock.patch.object(tools, 'download',
side_effect=fake_download):
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
btest.fetch_tool(method, col, False)
return stdout.getvalue()
@@ -144,7 +144,7 @@ class TestBintool(unittest.TestCase):
with unittest.mock.patch.object(bintool.Bintool, 'tooldir', destdir):
with unittest.mock.patch.object(tools, 'download',
side_effect=handle_download):
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2)
self.assertTrue(os.path.exists(dest_fname))
data = tools.read_file(dest_fname)
@@ -177,7 +177,7 @@ class TestBintool(unittest.TestCase):
self.count = collections.defaultdict(int)
with unittest.mock.patch.object(bintool.Bintool, 'fetch_tool',
side_effect=fake_fetch):
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
Bintool.fetch_tools(method, ['all'])
lines = stdout.getvalue().splitlines()
self.assertIn(f'{self.count[bintool.FETCHED]}: ', lines[-2])
@@ -220,7 +220,7 @@ class TestBintool(unittest.TestCase):
side_effect=[all_tools]):
with unittest.mock.patch.object(bintool.Bintool, 'create',
side_effect=self.btools.values()):
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
Bintool.fetch_tools(bintool.FETCH_ANY, ['missing'])
lines = stdout.getvalue().splitlines()
num_tools = len(self.btools)
@@ -255,7 +255,7 @@ class TestBintool(unittest.TestCase):
with unittest.mock.patch.object(bintool.Bintool, 'tooldir',
self._indir):
with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
btest.fetch_tool(bintool.FETCH_BUILD, col, False)
fname = os.path.join(self._indir, '_testing')
return fname if write_file else self.fname, stdout.getvalue()
@@ -278,7 +278,7 @@ class TestBintool(unittest.TestCase):
btest.install = True
col = terminal.Color()
with unittest.mock.patch.object(tools, 'run', return_value=None):
- with test_util.capture_sys_output() as _:
+ with terminal.capture() as _:
result = btest.fetch_tool(bintool.FETCH_BIN, col, False)
self.assertEqual(bintool.FETCHED, result)
@@ -287,7 +287,7 @@ class TestBintool(unittest.TestCase):
btest = Bintool.create('_testing')
btest.disable = True
col = terminal.Color()
- with test_util.capture_sys_output() as _:
+ with terminal.capture() as _:
result = btest.fetch_tool(bintool.FETCH_BIN, col, False)
self.assertEqual(bintool.FAIL, result)
@@ -314,7 +314,7 @@ class TestBintool(unittest.TestCase):
with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
with unittest.mock.patch.object(tools, 'download',
side_effect=handle_download):
- with test_util.capture_sys_output() as _:
+ with terminal.capture() as _:
for name in Bintool.get_tool_list():
btool = Bintool.create(name)
for method in range(bintool.FETCH_COUNT):
diff --git a/tools/binman/btool/openssl.py b/tools/binman/btool/openssl.py
index c6df64c5316..b26f087c447 100644
--- a/tools/binman/btool/openssl.py
+++ b/tools/binman/btool/openssl.py
@@ -153,7 +153,7 @@ numFirewallRegions = INTEGER:{firewall_cert_data['num_firewalls']}
def x509_cert_rom(self, cert_fname, input_fname, key_fname, sw_rev,
config_fname, req_dist_name_dict, cert_type, bootcore,
- bootcore_opts, load_addr, sha):
+ bootcore_opts, load_addr, sha, debug):
"""Create a certificate
Args:
@@ -221,9 +221,13 @@ emailAddress = {req_dist_name_dict['emailAddress']}
# iterationCnt = INTEGER:TEST_IMAGE_KEY_DERIVE_INDEX
# salt = FORMAT:HEX,OCT:TEST_IMAGE_KEY_DERIVE_SALT
+ # When debugging low level boot firmware it can be useful to have ROM or TIFS
+ # unlock JTAG access to the misbehaving CPUs. However in a production setting
+ # this can lead to code modification by outside parties after it's been
+ # authenticated. To gain JTAG access add the 'debug' flag to the binman config
[ debug ]
debugUID = FORMAT:HEX,OCT:0000000000000000000000000000000000000000000000000000000000000000
- debugType = INTEGER:4
+ debugType = INTEGER:{ "4" if debug else "0" }
coreDbgEn = INTEGER:0
coreDbgSecEn = INTEGER:0
''', file=outf)
@@ -238,7 +242,7 @@ emailAddress = {req_dist_name_dict['emailAddress']}
imagesize_sbl, hashval_sbl, load_addr_sysfw, imagesize_sysfw,
hashval_sysfw, load_addr_sysfw_data, imagesize_sysfw_data,
hashval_sysfw_data, sysfw_inner_cert_ext_boot_block,
- dm_data_ext_boot_block, bootcore_opts):
+ dm_data_ext_boot_block, bootcore_opts, debug):
"""Create a certificate
Args:
@@ -324,9 +328,13 @@ compSize = INTEGER:{imagesize_sysfw_data}
shaType = OID:{sha_type}
shaValue = FORMAT:HEX,OCT:{hashval_sysfw_data}
+# When debugging low level boot firmware it can be useful to have ROM or TIFS
+# unlock JTAG access to the misbehaving CPUs. However in a production setting
+# this can lead to code modification by outside parties after it's been
+# authenticated. To gain JTAG access add the 'debug' flag to the binman config
[ debug ]
debugUID = FORMAT:HEX,OCT:0000000000000000000000000000000000000000000000000000000000000000
-debugType = INTEGER:4
+debugType = INTEGER:{ "4" if debug else "0" }
coreDbgEn = INTEGER:0
coreDbgSecEn = INTEGER:0
diff --git a/tools/binman/cbfs_util_test.py b/tools/binman/cbfs_util_test.py
index 4c415b7ce94..2494a6b9405 100755
--- a/tools/binman/cbfs_util_test.py
+++ b/tools/binman/cbfs_util_test.py
@@ -20,6 +20,7 @@ from binman import bintool
from binman import cbfs_util
from binman.cbfs_util import CbfsWriter
from binman import elf
+from u_boot_pylib import terminal
from u_boot_pylib import test_util
from u_boot_pylib import tools
@@ -314,7 +315,7 @@ class TestCbfs(unittest.TestCase):
newdata = data[:-4] + struct.pack('<I', cbw._header_offset + 1)
# We should still be able to find the master header by searching
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
cbfs = cbfs_util.CbfsReader(newdata)
self.assertIn('Relative offset seems wrong', stdout.getvalue())
self.assertIn('u-boot', cbfs.files)
@@ -330,7 +331,7 @@ class TestCbfs(unittest.TestCase):
# Drop most of the header and try reading the modified CBFS
newdata = data[:cbw._header_offset + 4]
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
with self.assertRaises(ValueError) as e:
cbfs_util.CbfsReader(newdata)
self.assertIn('Relative offset seems wrong', stdout.getvalue())
@@ -351,7 +352,7 @@ class TestCbfs(unittest.TestCase):
# Remove all but 4 bytes of the file headerm and try to read the file
newdata = data[:pos + 4]
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
with io.BytesIO(newdata) as fd:
fd.seek(pos)
self.assertEqual(False, cbr._read_next_file(fd))
@@ -373,7 +374,7 @@ class TestCbfs(unittest.TestCase):
# Create a new CBFS with only the first 16 bytes of the file name, then
# try to read the file
newdata = data[:pos + cbfs_util.FILE_HEADER_LEN + 16]
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
with io.BytesIO(newdata) as fd:
fd.seek(pos)
self.assertEqual(False, cbr._read_next_file(fd))
@@ -389,7 +390,7 @@ class TestCbfs(unittest.TestCase):
try:
cbfs_util.DEBUG = True
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
cbfs_util.CbfsReader(data)
self.assertEqual('name u-boot\nftype 50\ndata %s\n' % U_BOOT_DATA,
stdout.getvalue())
@@ -416,7 +417,7 @@ class TestCbfs(unittest.TestCase):
# Create a new CBFS with the tag changed to something invalid
newdata = data[:pos] + struct.pack('>I', 0x123) + data[pos + 4:]
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
cbfs_util.CbfsReader(newdata)
self.assertEqual('Unknown attribute tag 123\n', stdout.getvalue())
@@ -441,7 +442,7 @@ class TestCbfs(unittest.TestCase):
tag_pos = (4 + pos + cbfs_util.FILE_HEADER_LEN +
cbfs_util.ATTRIBUTE_ALIGN)
newdata = data[:tag_pos + 4]
- with test_util.capture_sys_output() as (stdout, _stderr):
+ with terminal.capture() as (stdout, _stderr):
with io.BytesIO(newdata) as fd:
fd.seek(pos)
self.assertEqual(False, cbr._read_next_file(fd))
diff --git a/tools/binman/control.py b/tools/binman/control.py
index 81f61e3e152..1307222591d 100644
--- a/tools/binman/control.py
+++ b/tools/binman/control.py
@@ -8,12 +8,11 @@
from collections import OrderedDict
import glob
try:
- import importlib.resources
+ import importlib.resources as importlib_resources
except ImportError: # pragma: no cover
# for Python 3.6
import importlib_resources
import os
-import pkg_resources
import re
import sys
@@ -96,8 +95,8 @@ def _ReadMissingBlobHelp():
msg = ''
return tag, msg
- my_data = pkg_resources.resource_string(__name__, 'missing-blob-help')
- re_tag = re.compile('^([-a-z0-9]+):$')
+ my_data = importlib_resources.files(__package__).joinpath('missing-blob-help').read_bytes()
+ re_tag = re.compile(r"^([-\.a-z0-9]+):$")
result = {}
tag = None
msg = ''
@@ -151,8 +150,9 @@ def GetEntryModules(include_testing=True):
Returns:
Set of paths to entry class filenames
"""
- glob_list = pkg_resources.resource_listdir(__name__, 'etype')
- glob_list = [fname for fname in glob_list if fname.endswith('.py')]
+ entries = importlib_resources.files(__package__).joinpath('etype')
+ glob_list = [entry.name for entry in entries.iterdir()
+ if entry.name.endswith('.py') and entry.is_file()]
return set([os.path.splitext(os.path.basename(item))[0]
for item in glob_list
if include_testing or '_testing' not in item])
@@ -530,6 +530,57 @@ def _RemoveTemplates(parent):
for node in del_nodes:
node.Delete()
+def propagate_prop(node, prop):
+ """Propagate the provided property to all the parent nodes up the hierarchy
+
+ Args:
+ node (fdt.Node): Node and all its parent nodes up to the root to
+ propagate the property.
+ prop (str): Boolean property to propagate
+
+ Return:
+ True if any change was made, else False
+ """
+ changed = False
+ while node:
+ if prop not in node.props:
+ node.AddEmptyProp(prop, 0)
+ changed = True
+ node = node.parent
+ return changed
+
+def scan_and_prop_bootph(node):
+ """Propagate bootph properties from children to parents
+
+ The bootph schema indicates that bootph properties in children should be
+ implied in their parents, all the way up the hierarchy. This is expensive
+ to implement in U-Boot before relocation at runtime, so this function
+ explicitly propagates these bootph properties upwards during build time.
+
+ This is used to set the bootph-all, bootph-some-ram property in the parent
+ node if the respective property is found in any of the parent's subnodes.
+ The other bootph-* properties are associated with the SPL stage and hence
+ handled by fdtgrep.c.
+
+ Args:
+ node (fdt.Node): Node to scan for bootph-all and bootph-some-ram
+ property
+
+ Return:
+ True if any change was made, else False
+
+ """
+ bootph_prop = {'bootph-all', 'bootph-some-ram'}
+
+ changed = False
+ for prop in bootph_prop:
+ if prop in node.props:
+ changed |= propagate_prop(node.parent, prop)
+
+ for subnode in node.subnodes:
+ changed |= scan_and_prop_bootph(subnode)
+ return changed
+
def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded, indir):
"""Prepare the images to be processed and select the device tree
@@ -589,6 +640,9 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded, ind
fname = tools.get_output_filename('u-boot.dtb.tmpl2')
tools.write_file(fname, dtb.GetContents())
+ if scan_and_prop_bootph(dtb.GetRoot()):
+ dtb.Sync(True)
+
images = _ReadImageDesc(node, use_expanded)
if select_images:
@@ -645,14 +699,27 @@ def CheckForProblems(image):
_ShowHelpForMissingBlobs(tout.ERROR, missing_list)
faked_list = []
+ faked_optional_list = []
+ faked_required_list = []
image.CheckFakedBlobs(faked_list)
- if faked_list:
+ for e in faked_list:
+ if e.optional:
+ faked_optional_list.append(e)
+ else:
+ faked_required_list.append(e)
+ if faked_required_list:
tout.warning(
"Image '%s' has faked external blobs and is non-functional: %s\n" %
(image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
- for e in faked_list])))
+ for e in faked_required_list])))
optional_list = []
+ # For optional blobs, we should inform the user when the blob is not present. This will come as
+ # a warning since it may not be immediately apparent that something is missing otherwise.
+ # E.g. user thinks they supplied a blob, but there is no info of the contrary if they made an
+ # error.
+ # Faked optional blobs are not relevant for final images (as they are dropped anyway) so we
+ # will omit the message with default verbosity.
image.CheckOptional(optional_list)
if optional_list:
tout.warning(
@@ -660,6 +727,12 @@ def CheckForProblems(image):
(image.name, ' '.join([e.name for e in optional_list])))
_ShowHelpForMissingBlobs(tout.WARNING, optional_list)
+ if faked_optional_list:
+ tout.info(
+ "Image '%s' has faked optional external blobs but is still functional: %s\n" %
+ (image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
+ for e in faked_optional_list])))
+
missing_bintool_list = []
image.check_missing_bintools(missing_bintool_list)
if missing_bintool_list:
@@ -667,7 +740,7 @@ def CheckForProblems(image):
"Image '%s' has missing bintools and is non-functional: %s\n" %
(image.name, ' '.join([os.path.basename(bintool.name)
for bintool in missing_bintool_list])))
- return any([missing_list, faked_list, missing_bintool_list])
+ return any([missing_list, faked_required_list, missing_bintool_list])
def ProcessImage(image, update_fdt, write_map, get_contents=True,
allow_resize=True, allow_missing=False,
@@ -697,7 +770,6 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
image.SetAllowMissing(allow_missing)
image.SetAllowFakeBlob(allow_fake_blobs)
image.GetEntryContents()
- image.drop_absent()
image.GetEntryOffsets()
# We need to pack the entries to figure out where everything
@@ -736,12 +808,12 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
image.Raise('Entries changed size after packing (tried %s passes)' %
passes)
+ has_problems = CheckForProblems(image)
+
image.BuildImage()
if write_map:
image.WriteMap()
- has_problems = CheckForProblems(image)
-
image.WriteAlternates()
return has_problems
@@ -759,7 +831,7 @@ def Binman(args):
global state
if args.full_help:
- with importlib.resources.path('binman', 'README.rst') as readme:
+ with importlib_resources.path('binman', 'README.rst') as readme:
tools.print_full_help(str(readme))
return 0
@@ -777,7 +849,7 @@ def Binman(args):
if args.cmd in ['ls', 'extract', 'replace', 'tool', 'sign']:
try:
- tout.init(args.verbosity)
+ tout.init(args.verbosity + 1)
if args.cmd == 'replace':
tools.prepare_output_dir(args.outdir, args.preserve)
else:
@@ -835,9 +907,9 @@ def Binman(args):
args.indir.append(board_pathname)
try:
- tout.init(args.verbosity)
+ tout.init(args.verbosity + 1)
elf.debug = args.debug
- cbfs_util.VERBOSE = args.verbosity > 2
+ cbfs_util.VERBOSE = args.verbosity > tout.NOTICE
state.use_fake_dtb = args.fake_dtb
# Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
diff --git a/tools/binman/elf_test.py b/tools/binman/elf_test.py
index 2f22639dffc..5b173392898 100644
--- a/tools/binman/elf_test.py
+++ b/tools/binman/elf_test.py
@@ -13,6 +13,7 @@ import unittest
from binman import elf
from u_boot_pylib import command
+from u_boot_pylib import terminal
from u_boot_pylib import test_util
from u_boot_pylib import tools
from u_boot_pylib import tout
@@ -187,7 +188,7 @@ class TestElf(unittest.TestCase):
entry = FakeEntry(24)
section = FakeSection()
elf_fname = self.ElfTestFile('u_boot_binman_syms')
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
elf.LookupAndWriteSymbols(elf_fname, entry, section)
self.assertTrue(len(stdout.getvalue()) > 0)
finally:
diff --git a/tools/binman/entries.rst b/tools/binman/entries.rst
index 4f05aa0a323..12a39d070e4 100644
--- a/tools/binman/entries.rst
+++ b/tools/binman/entries.rst
@@ -53,6 +53,22 @@ respecting the `bootph-xxx` tags in the devicetree.
+.. _etype_atf_bl1:
+
+Entry: atf-bl1: AP Trusted ROM (TF-A) BL1 blob
+-----------------------------------------------------
+
+Properties / Entry arguments:
+ - atf-bl1-path: Filename of file to read into entry. This is typically
+ called bl1.bin
+
+This entry holds the AP Trusted ROM firmware typically used by an SoC to
+help initialize the SoC before the SPL or U-Boot is started. See
+https://github.com/TrustedFirmware-A/trusted-firmware-a for more information
+about Boot Loader stage 1 (BL1) or about Trusted Firmware (TF-A)
+
+
+
.. _etype_atf_bl31:
Entry: atf-bl31: ARM Trusted Firmware (ATF) BL31 blob
diff --git a/tools/binman/entry.py b/tools/binman/entry.py
index bdc60e47fca..ce7ef28e94b 100644
--- a/tools/binman/entry.py
+++ b/tools/binman/entry.py
@@ -88,6 +88,7 @@ class Entry(object):
updated with a hash of the entry contents
comp_bintool: Bintools used for compress and decompress data
fake_fname: Fake filename, if one was created, else None
+ faked (bool): True if the entry is absent and faked
required_props (dict of str): Properties which must be present. This can
be added to by subclasses
elf_fname (str): Filename of the ELF file, if this entry holds an ELF
@@ -759,7 +760,7 @@ class Entry(object):
self.image_pos)
# pylint: disable=assignment-from-none
- def GetEntries(self):
+ def GetEntries(self) -> None:
"""Return a list of entries contained by this entry
Returns:
@@ -1120,7 +1121,7 @@ features to produce new behaviours.
if self.missing and not self.optional:
missing_list.append(self)
- def check_fake_fname(self, fname, size=0):
+ def check_fake_fname(self, fname: str, size: int = 0) -> str:
"""If the file is missing and the entry allows fake blobs, fake it
Sets self.faked to True if faked
@@ -1130,9 +1131,7 @@ features to produce new behaviours.
size (int): Size of fake file to create
Returns:
- tuple:
- fname (str): Filename of faked file
- bool: True if the blob was faked, False if not
+ fname (str): Filename of faked file
"""
if self.allow_fake and not pathlib.Path(fname).is_file():
if not self.fake_fname:
@@ -1142,8 +1141,8 @@ features to produce new behaviours.
tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
self.fake_fname = outfname
self.faked = True
- return self.fake_fname, True
- return fname, False
+ return self.fake_fname
+ return fname
def CheckFakedBlobs(self, faked_blobs_list):
"""Check if any entries in this section have faked external blobs
@@ -1352,6 +1351,10 @@ features to produce new behaviours.
os.mkdir(cls.fake_dir)
tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
+ def drop_absent_optional(self) -> None:
+ """Entries don't have any entries, do nothing"""
+ pass
+
def ensure_props(self):
"""Raise an exception if properties are missing
diff --git a/tools/binman/etype/atf_bl1.py b/tools/binman/etype/atf_bl1.py
new file mode 100644
index 00000000000..7adf10e693c
--- /dev/null
+++ b/tools/binman/etype/atf_bl1.py
@@ -0,0 +1,23 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright 2025 Texas Instruments Incorporated
+#
+# Entry-type module for Application Processor Trusted ROM (BL1)
+#
+
+from binman.etype.blob_named_by_arg import Entry_blob_named_by_arg
+
+class Entry_atf_bl1(Entry_blob_named_by_arg):
+ """Application Processor (AP) Trusted ROM BL1 blob
+
+ Properties / Entry arguments:
+ - atf-bl1-path: Filename of file to read into entry. This is typically
+ called bl1.bin or bl1.elf
+
+ This entry holds the boot code initialization like exception vectors and
+ processor and platform initialization.
+
+ See https://github.com/TrustedFirmware-A/trusted-firmware-a for more information.
+ """
+ def __init__(self, section, etype, node):
+ super().__init__(section, etype, node, 'atf-bl1')
+ self.external = True
diff --git a/tools/binman/etype/blob.py b/tools/binman/etype/blob.py
index 041e1122953..acd9ae34074 100644
--- a/tools/binman/etype/blob.py
+++ b/tools/binman/etype/blob.py
@@ -42,7 +42,7 @@ class Entry_blob(Entry):
if fdt_util.GetBool(self._node, 'write-symbols'):
self.auto_write_symbols = True
- def ObtainContents(self, fake_size=0):
+ def ObtainContents(self, fake_size: int = 0) -> bool:
self._filename = self.GetDefaultFilename()
self._pathname = tools.get_input_filename(self._filename,
self.external and (self.optional or self.section.GetAllowMissing()))
@@ -50,10 +50,11 @@ class Entry_blob(Entry):
if not self._pathname:
if not fake_size and self.assume_size:
fake_size = self.assume_size
- self._pathname, faked = self.check_fake_fname(self._filename,
- fake_size)
+ self._pathname = self.check_fake_fname(self._filename, fake_size)
self.missing = True
- if not faked:
+ if self.optional:
+ self.mark_absent("missing but optional")
+ if not self.faked:
content_size = 0
if self.assume_size: # Ensure we get test coverage on next line
content_size = self.assume_size
diff --git a/tools/binman/etype/blob_ext_list.py b/tools/binman/etype/blob_ext_list.py
index 1bfcf6733a7..a8b5a24c3a1 100644
--- a/tools/binman/etype/blob_ext_list.py
+++ b/tools/binman/etype/blob_ext_list.py
@@ -33,11 +33,11 @@ class Entry_blob_ext_list(Entry_blob):
self._filenames = fdt_util.GetStringList(self._node, 'filenames')
self._pathnames = []
- def ObtainContents(self):
+ def ObtainContents(self) -> bool:
missing = False
pathnames = []
for fname in self._filenames:
- fname, _ = self.check_fake_fname(fname)
+ fname = self.check_fake_fname(fname)
pathname = tools.get_input_filename(
fname, self.external and self.section.GetAllowMissing())
# Allow the file to be missing
diff --git a/tools/binman/etype/cbfs.py b/tools/binman/etype/cbfs.py
index 124fa1e4ffc..5879f377231 100644
--- a/tools/binman/etype/cbfs.py
+++ b/tools/binman/etype/cbfs.py
@@ -276,7 +276,8 @@ class Entry_cbfs(Entry):
for entry in self._entries.values():
entry.ListEntries(entries, indent + 1)
- def GetEntries(self):
+ def GetEntries(self) -> dict[str, Entry]:
+ """Returns the entries (tree children) of this section"""
return self._entries
def ReadData(self, decomp=True, alt_format=None):
diff --git a/tools/binman/etype/fit.py b/tools/binman/etype/fit.py
index ed3cac4ee7e..db40479d30e 100644
--- a/tools/binman/etype/fit.py
+++ b/tools/binman/etype/fit.py
@@ -557,12 +557,15 @@ class Entry_fit(Entry_section):
Raises:
ValueError: Filename 'rsa2048.key' not found in input path
ValueError: Multiple key paths found
+ ValueError: 'dir/rsa2048' is a path not a filename
"""
def _find_keys_dir(node):
for subnode in node.subnodes:
if (subnode.name.startswith('signature') or
subnode.name.startswith('cipher')):
hint = subnode.props['key-name-hint'].value
+ if '/' in hint:
+ self.Raise(f"'{hint}' is a path not a filename")
name = tools.get_input_filename(
f"{hint}.key" if subnode.name.startswith('signature')
else f"{hint}.bin")
diff --git a/tools/binman/etype/mkimage.py b/tools/binman/etype/mkimage.py
index 6ae5d0c8a4f..75e59c3d3a3 100644
--- a/tools/binman/etype/mkimage.py
+++ b/tools/binman/etype/mkimage.py
@@ -205,7 +205,7 @@ class Entry_mkimage(Entry_section):
self.record_missing_bintool(self.mkimage)
return data
- def GetEntries(self):
+ def GetEntries(self) -> dict[str, Entry]:
# Make a copy so we don't change the original
entries = OrderedDict(self._entries)
if self._imagename:
diff --git a/tools/binman/etype/renesas_rcar4_sa0.py b/tools/binman/etype/renesas_rcar4_sa0.py
new file mode 100644
index 00000000000..3a7c0988fdc
--- /dev/null
+++ b/tools/binman/etype/renesas_rcar4_sa0.py
@@ -0,0 +1,46 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright 2025 Marek Vasut <marek.vasut+renesas@mailbox.org>
+#
+# Entry-type module for generating the Renesas R-Car Gen4 SA0 header.
+#
+
+import os
+import struct
+
+from binman.etype.section import Entry_section
+from dtoc import fdt_util
+from u_boot_pylib import tools
+
+RCAR_GEN4_SF_HEADER_SIZE = 0x40000
+RCAR_GEN4_SF_MAX_LOAD_SIZE = 0xec000
+
+class Entry_renesas_rcar4_sa0(Entry_section):
+ """Renesas R-Car Gen4 SA0 generator"""
+
+ def __init__(self, section, etype, node):
+ super().__init__(section, etype, node)
+ self.required_props = ['renesas,loader-address']
+
+ def ReadNode(self):
+ self.loader_address = fdt_util.GetInt(self._node, 'renesas,loader-address')
+ super().ReadNode()
+
+ def BuildSectionData(self, required):
+ data = super().BuildSectionData(required)[RCAR_GEN4_SF_HEADER_SIZE:]
+
+ # Offset 0x0000 / Value 0x00000000
+ header = struct.pack('<I', 0)
+ header += tools.get_bytes(0xff, 0x3008)
+ # Offset 0x300c / Value 0x00000000
+ header += struct.pack('<I', 0)
+ header += tools.get_bytes(0xff, 0x144)
+ # Offset 0x3154 / Value (payload load address)
+ header += struct.pack('<I', self.loader_address)
+ header += tools.get_bytes(0xff, 0x10c)
+ # Offset 0x3264 / Value (payload size in 4-byte words, aligned to 4k)
+ header += struct.pack('<I', int(tools.align(len(data), 0x1000) / 4))
+ header += tools.get_bytes(0xff, 0x3cd98)
+ if len(data) > RCAR_GEN4_SF_MAX_LOAD_SIZE:
+ self.Raise(f'SRAM data longer than {RCAR_GEN4_SF_MAX_LOAD_SIZE} Bytes')
+
+ return header + data
diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py
index 4c4c8c417f8..03c4f7c6ec7 100644
--- a/tools/binman/etype/section.py
+++ b/tools/binman/etype/section.py
@@ -189,7 +189,7 @@ class Entry_section(Entry):
self._sort = fdt_util.GetBool(self._node, 'sort-by-offset')
self._end_at_4gb = fdt_util.GetBool(self._node, 'end-at-4gb')
self._skip_at_start = fdt_util.GetInt(self._node, 'skip-at-start')
- if self._end_at_4gb:
+ if self._end_at_4gb and self.GetImage().copy_to_orig:
if not self.size:
self.Raise("Section size must be provided when using end-at-4gb")
if self._skip_at_start is not None:
@@ -263,6 +263,8 @@ class Entry_section(Entry):
super().AddMissingProperties(have_image_pos)
if self.compress != 'none':
have_image_pos = False
+ if self._end_at_4gb:
+ state.AddZeroProp(self._node, 'skip-at-start')
for entry in self._entries.values():
entry.AddMissingProperties(have_image_pos)
@@ -505,6 +507,8 @@ class Entry_section(Entry):
def SetCalculatedProperties(self):
super().SetCalculatedProperties()
+ if self._end_at_4gb:
+ state.SetInt(self._node, 'skip-at-start', self._skip_at_start)
for entry in self._entries.values():
entry.SetCalculatedProperties()
@@ -533,7 +537,7 @@ class Entry_section(Entry):
for entry in self._entries.values():
entry.WriteMap(fd, indent + 1)
- def GetEntries(self):
+ def GetEntries(self) -> dict[str, Entry]:
return self._entries
def GetContentsByPhandle(self, phandle, source_entry, required):
@@ -768,9 +772,17 @@ class Entry_section(Entry):
todo)
return True
- def drop_absent(self):
- """Drop entries which are absent"""
- self._entries = {n: e for n, e in self._entries.items() if not e.absent}
+ def drop_absent_optional(self) -> None:
+ """Drop entries which are absent.
+ Call for all nodes in the tree. Leaf nodes will do nothing per
+ definition. Sections however have _entries and should drop all children
+ which are absent.
+ """
+ self._entries = {n: e for n, e in self._entries.items() if not (e.absent and e.optional)}
+ # Drop nodes first before traversing children to avoid superfluous calls
+ # to children of absent nodes.
+ for e in self.GetEntries().values():
+ e.drop_absent_optional()
def _SetEntryOffsetSize(self, name, offset, size):
"""Set the offset and size of an entry
diff --git a/tools/binman/etype/ti_secure.py b/tools/binman/etype/ti_secure.py
index 420ee263e4f..f6caa0286d9 100644
--- a/tools/binman/etype/ti_secure.py
+++ b/tools/binman/etype/ti_secure.py
@@ -124,6 +124,7 @@ class Entry_ti_secure(Entry_x509_cert):
'OU': 'Processors',
'CN': 'TI Support',
'emailAddress': 'support@ti.com'}
+ self.debug = fdt_util.GetBool(self._node, 'debug', False)
def ReadFirewallNode(self):
self.firewall_cert_data['certificate'] = ""
diff --git a/tools/binman/etype/ti_secure_rom.py b/tools/binman/etype/ti_secure_rom.py
index f6fc3f90f84..7e90c655940 100644
--- a/tools/binman/etype/ti_secure_rom.py
+++ b/tools/binman/etype/ti_secure_rom.py
@@ -87,6 +87,7 @@ class Entry_ti_secure_rom(Entry_x509_cert):
'OU': 'Processors',
'CN': 'TI Support',
'emailAddress': 'support@ti.com'}
+ self.debug = fdt_util.GetBool(self._node, 'debug', False)
def NonCombinedGetCertificate(self, required):
"""Generate certificate for legacy boot flow
diff --git a/tools/binman/etype/u_boot_spl_pubkey_dtb.py b/tools/binman/etype/u_boot_spl_pubkey_dtb.py
index cb196061de2..3061c4bcdc4 100644
--- a/tools/binman/etype/u_boot_spl_pubkey_dtb.py
+++ b/tools/binman/etype/u_boot_spl_pubkey_dtb.py
@@ -87,6 +87,8 @@ class Entry_u_boot_spl_pubkey_dtb(Entry_blob_dtb):
dir=tools.get_output_dir())\
as pubkey_tdb:
tools.write_file(pubkey_tdb.name, self.GetData())
+ if '/' in self._key_name_hint:
+ self.Raise(f"'{self._key_name_hint}' is a path not a filename")
keyname = tools.get_input_filename(self._key_name_hint + ".crt")
self.fdt_add_pubkey.run(pubkey_tdb.name,
os.path.dirname(keyname),
diff --git a/tools/binman/etype/x509_cert.py b/tools/binman/etype/x509_cert.py
index 25e6808b7f9..b6e8b0b4fb0 100644
--- a/tools/binman/etype/x509_cert.py
+++ b/tools/binman/etype/x509_cert.py
@@ -52,6 +52,7 @@ class Entry_x509_cert(Entry_collection):
self.sysfw_inner_cert_ext_boot_block = None
self.dm_data_ext_boot_block = None
self.firewall_cert_data = None
+ self.debug = False
def ReadNode(self):
super().ReadNode()
@@ -114,7 +115,8 @@ class Entry_x509_cert(Entry_collection):
bootcore=self.bootcore,
bootcore_opts=self.bootcore_opts,
load_addr=self.load_addr,
- sha=self.sha
+ sha=self.sha,
+ debug=self.debug
)
elif type == 'rom-combined':
stdout = self.openssl.x509_cert_rom_combined(
@@ -140,7 +142,8 @@ class Entry_x509_cert(Entry_collection):
hashval_sysfw_data=self.hashval_sysfw_data,
sysfw_inner_cert_ext_boot_block=self.sysfw_inner_cert_ext_boot_block,
dm_data_ext_boot_block=self.dm_data_ext_boot_block,
- bootcore_opts=self.bootcore_opts
+ bootcore_opts=self.bootcore_opts,
+ debug=self.debug
)
if stdout is not None:
data = tools.read_file(output_fname)
diff --git a/tools/binman/fip_util_test.py b/tools/binman/fip_util_test.py
index 56aa56f4643..cb4001be020 100755
--- a/tools/binman/fip_util_test.py
+++ b/tools/binman/fip_util_test.py
@@ -22,6 +22,7 @@ sys.path.insert(2, os.path.join(OUR_PATH, '..'))
# pylint: disable=C0413
from binman import bintool
from binman import fip_util
+from u_boot_pylib import terminal
from u_boot_pylib import test_util
from u_boot_pylib import tools
@@ -215,7 +216,7 @@ toc_entry_t toc_entries[] = {
macros = fip_util.parse_macros(self._indir)
names = fip_util.parse_names(self._indir)
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
fip_util.create_code_output(macros, names)
self.assertIn(
"UUID 'UUID_TRUSTED_OS_FW_KEY_CERT' is not mentioned in tbbr_config.c file",
@@ -239,7 +240,7 @@ FIP_TYPE_LIST = [
] # end
blah de blah
''', binary=False)
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
fip_util.main(self.args, self.src_file)
self.assertIn('Needs update', stdout.getvalue())
@@ -256,7 +257,7 @@ FIP_TYPE_LIST = [
0x9d, 0xf3, 0x19, 0xed, 0xa1, 0x1f, 0x68, 0x01]),
] # end
blah blah''', binary=False)
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
fip_util.main(self.args, self.src_file)
self.assertIn('is up-to-date', stdout.getvalue())
@@ -269,7 +270,7 @@ blah blah''', binary=False)
args = self.args.copy()
args.remove('-D')
tools.write_file(self.src_file, '', binary=False)
- with test_util.capture_sys_output():
+ with terminal.capture():
fip_util.main(args, self.src_file)
@unittest.skipIf(not HAVE_FIPTOOL, 'No fiptool available')
@@ -389,7 +390,7 @@ Trusted Boot Firmware BL2: offset=0xC0, size=0xE, cmdline="--tb-fw"
def test_fiptool_errors(self):
"""Check some error reporting from fiptool"""
with self.assertRaises(Exception) as err:
- with test_util.capture_sys_output():
+ with terminal.capture():
FIPTOOL.create_bad()
self.assertIn("unrecognized option '--fred'", str(err.exception))
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index fa174900014..a90db3c9351 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -36,6 +36,7 @@ from binman.etype import fdtmap
from binman.etype import image_header
from binman.image import Image
from u_boot_pylib import command
+from u_boot_pylib import terminal
from u_boot_pylib import test_util
from u_boot_pylib import tools
from u_boot_pylib import tout
@@ -83,10 +84,12 @@ FILES_DATA = (b"sorry I'm late\nOh, don't bother apologising, I'm " +
b"sorry you're alive\n")
COMPRESS_DATA = b'compress xxxxxxxxxxxxxxxxxxxxxx data'
COMPRESS_DATA_BIG = COMPRESS_DATA * 2
+MISSING_DATA = b'missing'
REFCODE_DATA = b'refcode'
FSP_M_DATA = b'fsp_m'
FSP_S_DATA = b'fsp_s'
FSP_T_DATA = b'fsp_t'
+ATF_BL1_DATA = b'bl1'
ATF_BL31_DATA = b'bl31'
TEE_OS_DATA = b'this is some tee OS data'
TI_DM_DATA = b'tidmtidm'
@@ -225,6 +228,7 @@ class TestFunctional(unittest.TestCase):
TestFunctional._MakeInputFile('compress', COMPRESS_DATA)
TestFunctional._MakeInputFile('compress_big', COMPRESS_DATA_BIG)
+ TestFunctional._MakeInputFile('bl1.bin', ATF_BL1_DATA)
TestFunctional._MakeInputFile('bl31.bin', ATF_BL31_DATA)
TestFunctional._MakeInputFile('tee-pager.bin', TEE_OS_DATA)
TestFunctional._MakeInputFile('dm.bin', TI_DM_DATA)
@@ -247,7 +251,7 @@ class TestFunctional(unittest.TestCase):
# ATF and OP_TEE
TestFunctional._MakeInputFile('bl31.elf',
tools.read_file(cls.ElfTestFile('elf_sections')))
- TestFunctional._MakeInputFile('tee.elf',
+ TestFunctional.tee_elf_path = TestFunctional._MakeInputFile('tee.elf',
tools.read_file(cls.ElfTestFile('elf_sections')))
# Newer OP_TEE file in v1 binary format
@@ -273,7 +277,7 @@ class TestFunctional(unittest.TestCase):
@classmethod
def setup_test_args(cls, preserve_indir=False, preserve_outdirs=False,
- toolpath=None, verbosity=None):
+ toolpath=None, verbosity=None, no_capture=False):
"""Accept arguments controlling test execution
Args:
@@ -282,12 +286,13 @@ class TestFunctional(unittest.TestCase):
preserve_outdir: Preserve the output directories used by tests. Each
test has its own, so this is normally only useful when running a
single test.
- toolpath: ist of paths to use for tools
+ toolpath: list of paths to use for tools
"""
cls.preserve_indir = preserve_indir
cls.preserve_outdirs = preserve_outdirs
cls.toolpath = toolpath
cls.verbosity = verbosity
+ cls.no_capture = no_capture
def _CheckBintool(self, bintool):
if not bintool.is_present():
@@ -510,9 +515,9 @@ class TestFunctional(unittest.TestCase):
return dtb.GetContents()
def _DoReadFileDtb(self, fname, use_real_dtb=False, use_expanded=False,
- verbosity=None, map=False, update_dtb=False,
- entry_args=None, reset_dtbs=True, extra_indirs=None,
- threads=None):
+ verbosity=None, allow_fake_blobs=True, map=False,
+ update_dtb=False, entry_args=None, reset_dtbs=True,
+ extra_indirs=None, threads=None):
"""Run binman and return the resulting image
This runs binman with a given test file and then reads the resulting
@@ -530,6 +535,7 @@ class TestFunctional(unittest.TestCase):
use_expanded: True to use expanded entries where available, e.g.
'u-boot-expanded' instead of 'u-boot'
verbosity: Verbosity level to use (0-3, None=don't set it)
+ allow_fake_blobs: whether binman should fake missing ext blobs
map: True to output map files for the images
update_dtb: Update the offset and size of each entry in the device
tree before packing it into the image
@@ -567,7 +573,7 @@ class TestFunctional(unittest.TestCase):
retcode = self._DoTestFile(fname, map=map, update_dtb=update_dtb,
entry_args=entry_args, use_real_dtb=use_real_dtb,
use_expanded=use_expanded, verbosity=verbosity,
- extra_indirs=extra_indirs,
+ allow_fake_blobs=allow_fake_blobs, extra_indirs=extra_indirs,
threads=threads)
self.assertEqual(0, retcode)
out_dtb_fname = tools.get_output_filename('u-boot.dtb.out')
@@ -1796,14 +1802,14 @@ class TestFunctional(unittest.TestCase):
def testEntryDocs(self):
"""Test for creation of entry documentation"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
control.WriteEntryDocs(control.GetEntryModules())
self.assertTrue(len(stdout.getvalue()) > 0)
def testEntryDocsMissing(self):
"""Test handling of missing entry documentation"""
with self.assertRaises(ValueError) as e:
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
control.WriteEntryDocs(control.GetEntryModules(), 'u_boot')
self.assertIn('Documentation is missing for modules: u_boot',
str(e.exception))
@@ -1918,7 +1924,7 @@ class TestFunctional(unittest.TestCase):
entry_args = {
'keydir': 'devkeys',
}
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('071_gbb.dts', force_missing_bintools='futility',
entry_args=entry_args)
err = stderr.getvalue()
@@ -2014,7 +2020,7 @@ class TestFunctional(unittest.TestCase):
entry_args = {
'keydir': 'devkeys',
}
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('074_vblock.dts',
force_missing_bintools='futility',
entry_args=entry_args)
@@ -2058,7 +2064,7 @@ class TestFunctional(unittest.TestCase):
# We should only get the expected message in verbose mode
for verbosity in (0, 2):
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
retcode = self._DoTestFile('006_dual_image.dts',
verbosity=verbosity,
images=['image2'])
@@ -2247,7 +2253,7 @@ class TestFunctional(unittest.TestCase):
def testExtendSizeBad(self):
"""Test an extending entry which fails to provide contents"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
with self.assertRaises(ValueError) as e:
self._DoReadFileDtb('089_extend_size_bad.dts', map=True)
self.assertIn("Node '/binman/_testing': Cannot obtain contents when "
@@ -2376,7 +2382,7 @@ class TestFunctional(unittest.TestCase):
def testPackOverlapMap(self):
"""Test that overlapping regions are detected"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
with self.assertRaises(ValueError) as e:
self._DoTestFile('014_pack_overlap.dts', map=True)
map_fname = tools.get_output_filename('image.map')
@@ -2570,7 +2576,7 @@ class TestFunctional(unittest.TestCase):
def testIfwiMissing(self):
"""Test that binman still produces an image if ifwitool is missing"""
self._SetupIfwi('fitimage.bin')
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('111_x86_rom_ifwi.dts',
force_missing_bintools='ifwitool')
err = stderr.getvalue()
@@ -2914,7 +2920,7 @@ class TestFunctional(unittest.TestCase):
tmpdir = None
try:
tmpdir, updated_fname = self._SetupImageInTmpdir()
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoBinman('ls', '-i', updated_fname)
finally:
if tmpdir:
@@ -3078,7 +3084,7 @@ class TestFunctional(unittest.TestCase):
tmpdir = None
try:
tmpdir, updated_fname = self._SetupImageInTmpdir()
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoBinman('extract', '-i', updated_fname, 'u-boot',
'-f', fname)
finally:
@@ -3729,7 +3735,7 @@ class TestFunctional(unittest.TestCase):
u_boot_fname1 = os.path.join(outdir, 'u-boot')
os.remove(u_boot_fname1)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
control.ReplaceEntries(updated_fname, None, outdir, [])
self.assertIn("Skipping entry '/u-boot' from missing file",
stderr.getvalue())
@@ -3870,7 +3876,7 @@ class TestFunctional(unittest.TestCase):
def testMkimageMissing(self):
"""Test that binman still produces an image if mkimage is missing"""
self._SetupSplElf()
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('156_mkimage.dts',
force_missing_bintools='mkimage')
err = stderr.getvalue()
@@ -3890,7 +3896,7 @@ class TestFunctional(unittest.TestCase):
def testExtblobMissingOk(self):
"""Test an image with an missing external blob that is allowed"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
ret = self._DoTestFile('158_blob_ext_missing.dts',
allow_missing=True)
self.assertEqual(103, ret)
@@ -3901,7 +3907,7 @@ class TestFunctional(unittest.TestCase):
def testExtblobMissingOkFlag(self):
"""Test an image with an missing external blob allowed with -W"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
ret = self._DoTestFile('158_blob_ext_missing.dts',
allow_missing=True, ignore_missing=True)
self.assertEqual(0, ret)
@@ -3912,7 +3918,7 @@ class TestFunctional(unittest.TestCase):
def testExtblobMissingOkSect(self):
"""Test an image with an missing external blob that is allowed"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('159_blob_ext_missing_sect.dts',
allow_missing=True)
err = stderr.getvalue()
@@ -3920,7 +3926,7 @@ class TestFunctional(unittest.TestCase):
def testPackX86RomMeMissingDesc(self):
"""Test that an missing Intel descriptor entry is allowed"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('164_x86_rom_me_missing.dts', allow_missing=True)
err = stderr.getvalue()
self.assertRegex(err, "Image 'image'.*missing.*: intel-descriptor")
@@ -3930,7 +3936,7 @@ class TestFunctional(unittest.TestCase):
self._SetupIfwi('fitimage.bin')
pathname = os.path.join(self._indir, 'fitimage.bin')
os.remove(pathname)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('111_x86_rom_ifwi.dts', allow_missing=True)
err = stderr.getvalue()
self.assertRegex(err, "Image 'image'.*missing.*: intel-ifwi")
@@ -4013,7 +4019,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual({
'image-pos': 0,
'offset': 0,
- 'size': 1890,
+ 'size': 1378,
'u-boot:image-pos': 0,
'u-boot:offset': 0,
@@ -4021,7 +4027,7 @@ class TestFunctional(unittest.TestCase):
'fit:image-pos': 4,
'fit:offset': 4,
- 'fit:size': 1840,
+ 'fit:size': 1328,
'fit/images/kernel:image-pos': 304,
'fit/images/kernel:offset': 300,
@@ -4039,8 +4045,8 @@ class TestFunctional(unittest.TestCase):
'fit/images/fdt-1/u-boot-spl-dtb:offset': 0,
'fit/images/fdt-1/u-boot-spl-dtb:size': 6,
- 'u-boot-nodtb:image-pos': 1844,
- 'u-boot-nodtb:offset': 1844,
+ 'u-boot-nodtb:image-pos': 1332,
+ 'u-boot-nodtb:offset': 1332,
'u-boot-nodtb:size': 46,
}, props)
@@ -4152,7 +4158,7 @@ class TestFunctional(unittest.TestCase):
def testFitMissingOK(self):
"""Test that binman still produces a FIT image if mkimage is missing"""
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('162_fit_external.dts', allow_missing=True,
force_missing_bintools='mkimage')
err = stderr.getvalue()
@@ -4226,7 +4232,7 @@ class TestFunctional(unittest.TestCase):
def testFitExtblobMissingOk(self):
"""Test a FIT with a missing external blob that is allowed"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('168_fit_missing_blob.dts',
allow_missing=True)
err = stderr.getvalue()
@@ -4395,7 +4401,7 @@ class TestFunctional(unittest.TestCase):
control.missing_blob_help = control._ReadMissingBlobHelp()
control.missing_blob_help['wibble'] = 'Wibble test'
control.missing_blob_help['another'] = 'Another test'
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('168_fit_missing_blob.dts',
allow_missing=True)
err = stderr.getvalue()
@@ -4664,7 +4670,7 @@ class TestFunctional(unittest.TestCase):
def testLz4Missing(self):
"""Test that binman still produces an image if lz4 is missing"""
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('185_compress_section.dts',
force_missing_bintools='lz4')
err = stderr.getvalue()
@@ -5061,7 +5067,7 @@ class TestFunctional(unittest.TestCase):
def testTiming(self):
"""Test output of timing information"""
data = self._DoReadFile('055_sections.dts')
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
state.TimingShow()
self.assertIn('read:', stdout.getvalue())
self.assertIn('compress:', stdout.getvalue())
@@ -5156,7 +5162,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(version, state.GetVersion(self._indir))
with self.assertRaises(SystemExit):
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoBinman('-V')
self.assertEqual('Binman %s\n' % version, stderr.getvalue())
@@ -5176,7 +5182,7 @@ class TestFunctional(unittest.TestCase):
try:
tmpdir, updated_fname = self._SetupImageInTmpdir()
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
self._DoBinman('extract', '-i', updated_fname, '-F', 'list')
self.assertEqual(
'''Flag (-F) Entry type Description
@@ -5206,21 +5212,23 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testExtblobList(self):
"""Test an image with an external blob list"""
- data = self._DoReadFile('215_blob_ext_list.dts')
- self.assertEqual(REFCODE_DATA + FSP_M_DATA, data)
+ data = self._DoReadFileDtb('215_blob_ext_list.dts',
+ allow_fake_blobs=False)
+ self.assertEqual(REFCODE_DATA + FSP_M_DATA, data[0])
def testExtblobListMissing(self):
"""Test an image with a missing external blob"""
with self.assertRaises(ValueError) as e:
- self._DoReadFile('216_blob_ext_list_missing.dts')
+ self._DoReadFileDtb('216_blob_ext_list_missing.dts',
+ allow_fake_blobs=False)
self.assertIn("Filename 'missing-file' not found in input path",
str(e.exception))
def testExtblobListMissingOk(self):
"""Test an image with an missing external blob that is allowed"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('216_blob_ext_list_missing.dts',
- allow_missing=True)
+ allow_missing=True, allow_fake_blobs=False)
err = stderr.getvalue()
self.assertRegex(err, "Image 'image'.*missing.*: blob-ext")
@@ -5295,7 +5303,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
tmpdir = None
try:
tmpdir, updated_fname = self._SetupImageInTmpdir()
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoBinman('ls', '-i', updated_fname)
finally:
if tmpdir:
@@ -5378,7 +5386,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertEqual(True, fent.valid)
def testFipMissing(self):
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('209_fip_missing.dts', allow_missing=True)
err = stderr.getvalue()
self.assertRegex(err, "Image 'image'.*missing.*: rmm-fw")
@@ -5432,7 +5440,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFakeBlob(self):
"""Test handling of faking an external blob"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('217_fake_blob.dts', allow_missing=True,
allow_fake_blobs=True)
err = stderr.getvalue()
@@ -5442,7 +5450,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testExtblobListFaked(self):
"""Test an extblob with missing external blob that are faked"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('216_blob_ext_list_missing.dts',
allow_fake_blobs=True)
err = stderr.getvalue()
@@ -5450,7 +5458,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testListBintools(self):
args = ['tool', '--list']
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
self._DoBinman(*args)
out = stdout.getvalue().splitlines()
self.assertTrue(len(out) >= 2)
@@ -5474,20 +5482,20 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
args = ['tool', '--fetch', '_testing']
with unittest.mock.patch.object(tools, 'download',
side_effect=fail_download):
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
self._DoBinman(*args)
self.assertIn('failed to fetch with all methods', stdout.getvalue())
def testBintoolDocs(self):
"""Test for creation of bintool documentation"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
control.write_bintool_docs(control.bintool.Bintool.get_tool_list())
self.assertTrue(len(stdout.getvalue()) > 0)
def testBintoolDocsMissing(self):
"""Test handling of missing bintool documentation"""
with self.assertRaises(ValueError) as e:
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
control.write_bintool_docs(
control.bintool.Bintool.get_tool_list(), 'mkimage')
self.assertIn('Documentation is missing for modules: mkimage',
@@ -5507,7 +5515,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
tmpdir = None
try:
tmpdir, updated_fname = self._SetupImageInTmpdir()
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._RunBinman('ls', '-i', updated_fname)
finally:
if tmpdir:
@@ -5532,7 +5540,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args = {
'keydir': 'devkeys',
}
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('220_fit_subentry_bintool.dts',
force_missing_bintools='futility', entry_args=entry_args)
err = stderr.getvalue()
@@ -5573,6 +5581,22 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
data = self._DoReadFile('225_ti_dm.dts')
self.assertEqual(TI_DM_DATA, data[:len(TI_DM_DATA)])
+ def testPackBl1(self):
+ """test if an image with a bl1 binary can be created"""
+ data = self._DoReadFile('347_bl1.dts')
+ self.assertEqual(ATF_BL1_DATA, data[:len(ATF_BL1_DATA)])
+
+ def testRenesasRCarGen4SA0Image(self):
+ """Test that binman can produce an Renesas R-Car Gen4 SA0 image"""
+ self._DoTestFile('348_renesas_rcar4_sa0.dts')
+
+ def testRenesasRCarGen4SA0ImageSize(self):
+ """Test that binman can not produce large Renesas R-Car Gen4 SA0 image"""
+ with self.assertRaises(ValueError) as exc:
+ self._DoTestFile('349_renesas_rcar4_sa0_size.dts')
+ self.assertIn("Node '/binman/renesas-rcar4-sa0': SRAM data longer than 966656 Bytes",
+ str(exc.exception))
+
def testFitFdtOper(self):
"""Check handling of a specified FIT operation"""
entry_args = {
@@ -5729,7 +5753,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
'tee-os-path': 'missing.elf',
}
test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile(
'226_fit_split_elf.dts', entry_args=entry_args,
extra_indirs=[test_subdir], verbosity=3, **kwargs)
@@ -5757,10 +5781,10 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfMissing(self):
- """Test an split-elf FIT with a missing ELF file"""
+ """Test an split-elf FIT with a missing ELF file. Don't fake the file."""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
- out, err = self.checkFitSplitElf(allow_missing=True)
+ out, err = self.checkFitSplitElf(allow_missing=True, allow_fake_blobs=False)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
@@ -5784,7 +5808,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageMissingBlob(self):
"""Test using mkimage to build an image"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('229_mkimage_missing.dts', allow_missing=True,
allow_fake_blobs=True)
err = stderr.getvalue()
@@ -6449,16 +6473,18 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testAbsent(self):
"""Check handling of absent entries"""
data = self._DoReadFile('262_absent.dts')
- self.assertEqual(U_BOOT_DATA + U_BOOT_IMG_DATA, data)
+ self.assertEqual(U_BOOT_DATA + b'aa' + U_BOOT_IMG_DATA, data)
- def testPackTeeOsOptional(self):
- """Test that an image with an optional TEE binary can be created"""
+ def testPackTeeOsElf(self):
+ """Test that an image with a TEE elf binary can be created"""
entry_args = {
'tee-os-path': 'tee.elf',
}
+ tee_path = self.tee_elf_path
data = self._DoReadFileDtb('263_tee_os_opt.dts',
entry_args=entry_args)[0]
- self.assertEqual(U_BOOT_DATA + U_BOOT_IMG_DATA, data)
+ self.assertEqual(U_BOOT_DATA + tools.read_file(tee_path) +
+ U_BOOT_IMG_DATA, data)
def checkFitTee(self, dts, tee_fname):
"""Check that a tee-os entry works and returns data
@@ -6497,12 +6523,15 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
fdt_util.fdt32_to_cpu(node.props['entry'].value))
self.assertEqual(U_BOOT_DATA, node.props['data'].bytes)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self.checkFitTee('264_tee_os_opt_fit.dts', '')
err = stderr.getvalue()
self.assertRegex(
err,
"Image '.*' is missing optional external blobs but is still functional: tee-os")
+ self.assertNotRegex(
+ err,
+ "Image '.*' has faked external blobs and is non-functional: tee-os")
def testFitTeeOsOptionalFitBad(self):
"""Test an image with a FIT with an optional OP-TEE binary"""
@@ -6528,15 +6557,26 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
"Node '/binman/fit/images/@tee-SEQ/tee-os': Invalid OP-TEE file: size mismatch (expected 0x4, have 0xe)",
str(exc.exception))
- def testExtblobOptional(self):
+ def testExtblobMissingOptional(self):
"""Test an image with an external blob that is optional"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
+ data = self._DoReadFileDtb('266_blob_ext_opt.dts',
+ allow_fake_blobs=False)[0]
+ self.assertEqual(REFCODE_DATA, data)
+ self.assertNotIn(MISSING_DATA, data)
+
+ def testExtblobFakedOptional(self):
+ """Test an image with an external blob that is optional"""
+ with terminal.capture() as (stdout, stderr):
data = self._DoReadFile('266_blob_ext_opt.dts')
self.assertEqual(REFCODE_DATA, data)
err = stderr.getvalue()
self.assertRegex(
err,
"Image '.*' is missing optional external blobs but is still functional: missing")
+ self.assertNotRegex(
+ err,
+ "Image '.*' has faked external blobs and is non-functional: missing")
def testSectionInner(self):
"""Test an inner section with a size"""
@@ -6686,7 +6726,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
'tee-os-path': 'missing.bin',
}
test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
data = self._DoReadFileDtb(
'276_fit_firmware_loadables.dts',
entry_args=entry_args,
@@ -6717,12 +6757,12 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
node = dtb.GetNode('/configurations/conf-missing-tee-1')
self.assertEqual('atf-1', node.props['firmware'].value)
- self.assertEqual(['u-boot', 'atf-2'],
+ self.assertEqual(['u-boot', 'tee', 'atf-2'],
fdt_util.GetStringList(node, 'loadables'))
def testTooldir(self):
"""Test that we can specify the tooldir"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self.assertEqual(0, self._DoBinman('--tooldir', 'fred',
'tool', '-l'))
self.assertEqual('fred', bintool.Bintool.tooldir)
@@ -6731,7 +6771,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertEqual(['fred'], tools.tool_search_paths)
# Try with a few toolpaths; the tooldir should be at the end
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self.assertEqual(0, self._DoBinman(
'--toolpath', 'mary', '--toolpath', 'anna', '--tooldir', 'fred',
'tool', '-l'))
@@ -6836,7 +6876,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args = {
'keyfile': 'keyfile',
}
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('279_x509_cert.dts',
force_missing_bintools='openssl',
entry_args=entry_args)
@@ -6850,7 +6890,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageMissingBlobMultiple(self):
"""Test missing blob with mkimage entry and multiple-data-files"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('292_mkimage_missing_multiple.dts', allow_missing=True)
err = stderr.getvalue()
self.assertIn("is missing external blobs and is non-functional", err)
@@ -7196,7 +7236,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args = {
'keyfile': keyfile,
}
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('296_ti_secure.dts',
force_missing_bintools='openssl',
entry_args=entry_args)
@@ -7281,6 +7321,13 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
tools.to_bytes(''.join(node.props['key'].value)))
self.assertNotIn('key-source', node.props)
+ def testKeyNameHintIsPathSplPubkeyDtb(self):
+ """Test that binman errors out on key-name-hint being a path"""
+ with self.assertRaises(ValueError) as e:
+ self._DoReadFile('348_key_name_hint_dir_spl_pubkey_dtb.dts')
+ self.assertIn(
+ 'Node \'/binman/u-boot-spl-pubkey-dtb\': \'keys/key\' is a path not a filename',
+ str(e.exception))
def testSplPubkeyDtb(self):
"""Test u_boot_spl_pubkey_dtb etype"""
@@ -7372,7 +7419,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self._MakeInputFile("ssk.pem", data)
self._SetupPmuFwlElf()
self._SetupSplElf()
- with test_util.capture_sys_output() as (_, stderr):
+ with terminal.capture() as (_, stderr):
self._DoTestFile('307_xilinx_bootgen_sign.dts',
force_missing_bintools='bootgen')
err = stderr.getvalue()
@@ -7575,7 +7622,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def test_assume_size_ok(self):
"""Test handling of the assume-size where it fits OK"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('327_assume_size_ok.dts', allow_missing=True,
allow_fake_blobs=True)
err = stderr.getvalue()
@@ -7585,7 +7632,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def test_assume_size_no_fake(self):
"""Test handling of the assume-size where it fits OK"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self._DoTestFile('327_assume_size_ok.dts', allow_missing=True)
err = stderr.getvalue()
self.assertRegex(
@@ -7817,7 +7864,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkeficapsuleMissingOk(self):
"""Test that binman deals with mkeficapsule being missing"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
ret = self._DoTestFile('311_capsule.dts',
force_missing_bintools='mkeficapsule',
allow_missing=True)
@@ -7842,7 +7889,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testSymbolsCompressed(self):
"""Test binman complains about symbols from a compressed section"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self.checkSymbols('338_symbols_comp.dts', U_BOOT_SPL_DATA, None)
out = stdout.getvalue()
self.assertIn('Symbol-writing: no value for /binman/section/u-boot',
@@ -7954,6 +8001,24 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
entry_args=entry_args,
extra_indirs=[test_subdir])[0]
+ def testKeyNameHintIsPathSimpleFit(self):
+ """Test that binman errors out on key-name-hint being a path"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
+ entry_args = {
+ 'of-list': 'test-fdt1',
+ 'default-dt': 'test-fdt1',
+ 'atf-bl31-path': 'bl31.elf',
+ }
+ test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
+ with self.assertRaises(ValueError) as e:
+ self._DoReadFileDtb(
+ '347_key_name_hint_dir_fit_signature.dts',
+ entry_args=entry_args,
+ extra_indirs=[test_subdir])
+ self.assertIn(
+ 'Node \'/binman/fit\': \'keys/rsa2048\' is a path not a filename',
+ str(e.exception))
def testSimpleFitEncryptedData(self):
"""Test an image with a FIT containing data to be encrypted"""
@@ -8011,5 +8076,29 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self._DoTestFile('346_remove_template.dts',
force_missing_bintools='openssl',)
+ def testBootphPropagation(self):
+ """Test that bootph-* properties are propagated correctly to supernodes"""
+ _, _, _, out_dtb_fname = self._DoReadFileDtb(
+ '347_bootph_prop.dts', use_real_dtb=True, update_dtb=True)
+ dtb = fdt.Fdt(out_dtb_fname)
+ dtb.Scan()
+ root = dtb.GetRoot()
+ parent_node = root.FindNode('dummy-parent')
+ subnode1 = parent_node.FindNode('subnode-1')
+ subnode2 = subnode1.FindNode('subnode-2')
+ subnode3 = subnode1.FindNode('subnode-3')
+ subnode4 = subnode3.FindNode('subnode-4')
+
+ self.assertIn('bootph-some-ram', subnode1.props,
+ "Child node is missing 'bootph-some-ram' property")
+ self.assertIn('bootph-all', subnode1.props,
+ "Child node is missing 'bootph-all' property")
+ self.assertIn('bootph-some-ram', parent_node.props,
+ "Parent node is missing 'bootph-some-ram' property")
+ self.assertIn('bootph-all', parent_node.props,
+ "Parent node is missing 'bootph-all' property")
+ self.assertEqual(len(subnode4.props), 0,
+ "subnode shouldn't have any properties")
+
if __name__ == "__main__":
unittest.main()
diff --git a/tools/binman/image.py b/tools/binman/image.py
index 24ce0af7c72..698cfa4148e 100644
--- a/tools/binman/image.py
+++ b/tools/binman/image.py
@@ -183,6 +183,8 @@ class Image(section.Entry_section):
fname = tools.get_output_filename(self._filename)
tout.info("Writing image to '%s'" % fname)
with open(fname, 'wb') as fd:
+ # For final image, don't write absent blobs to file
+ self.drop_absent_optional()
data = self.GetPaddedData()
fd.write(data)
tout.info("Wrote %#x bytes" % len(data))
diff --git a/tools/binman/image_test.py b/tools/binman/image_test.py
index 7d65e2d589a..26e161c91fc 100644
--- a/tools/binman/image_test.py
+++ b/tools/binman/image_test.py
@@ -7,7 +7,7 @@
import unittest
from binman.image import Image
-from u_boot_pylib.test_util import capture_sys_output
+from u_boot_pylib import terminal
class TestImage(unittest.TestCase):
def testInvalidFormat(self):
@@ -29,7 +29,7 @@ class TestImage(unittest.TestCase):
def testMissingSymbolOptional(self):
image = Image('name', 'node', test=True)
image._entries = {}
- with capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
val = image.GetSymbolValue('_binman_type_prop_pname', True, 'msg', 0)
self.assertEqual(val, None)
self.assertEqual("Warning: msg: Entry 'type' not found in list ()\n",
diff --git a/tools/binman/main.py b/tools/binman/main.py
index 326f5c93155..fa5ad79ca0e 100755
--- a/tools/binman/main.py
+++ b/tools/binman/main.py
@@ -77,8 +77,8 @@ def RunTests(debug, verbosity, processes, test_preserve_dirs, args, toolpath):
# Run the entry tests first ,since these need to be the first to import the
# 'entry' module.
result = test_util.run_test_suites(
- 'binman', debug, verbosity, test_preserve_dirs, processes, test_name,
- toolpath,
+ 'binman', debug, verbosity, False, test_preserve_dirs, processes,
+ test_name, toolpath,
[bintool_test.TestBintool, entry_test.TestEntry, ftest.TestFunctional,
fdt_test.TestFdt, elf_test.TestElf, image_test.TestImage,
cbfs_util_test.TestCbfs, fip_util_test.TestFip])
diff --git a/tools/binman/missing-blob-help b/tools/binman/missing-blob-help
index ab0023eb9fb..d2ed35bef4d 100644
--- a/tools/binman/missing-blob-help
+++ b/tools/binman/missing-blob-help
@@ -14,15 +14,6 @@ atf-bl31-sunxi:
Please read the section on ARM Trusted Firmware (ATF) in
board/sunxi/README.sunxi64
-scp-sunxi:
-SCP firmware is required for system suspend, but is otherwise optional.
-Please read the section on SCP firmware in board/sunxi/README.sunxi64
-
-iot2050-seboot:
-See the documentation for IOT2050 board. Your image is missing SEBoot
-which is mandatory for board startup. Prebuilt SEBoot located at
-meta-iot2050/tree/master/recipes-bsp/u-boot/files/prebuild/seboot_pg*.bin.
-
iot2050-otpcmd:
See the documentation for IOT2050 board. Your image is missing OTP command data
block which is used for provisioning the customer keys to the board.
@@ -31,22 +22,62 @@ meta-iot2050/tree/master/recipes-bsp/secure-boot-otp-provisioning/files/make-otp
for how to generate this binary. If you are not using secure boot or do not
intend to provision the keys, disable CONFIG_IOT2050_EMBED_OTPCMD.
+iot2050-seboot:
+See the documentation for IOT2050 board. Your image is missing SEBoot
+which is mandatory for board startup. Prebuilt SEBoot located at
+meta-iot2050/tree/master/recipes-bsp/u-boot/files/prebuild/seboot_pg*.bin.
+
k3-rti-wdt-firmware:
If CONFIG_WDT_K3_RTI_LOAD_FW is enabled, a firmware image is needed for
the R5F core(s) to trigger the system reset. One possible source is
https://github.com/siemens/k3-rti-wdt.
+opensbi:
+See the documentation for your board. The OpenSBI git repo is at
+https://github.com/riscv/opensbi.git
+You may need to build fw_dynamic.bin first and re-build u-boot with
+OPENSBI=/path/to/fw_dynamic.bin
+
rockchip-tpl:
An external TPL is required to initialize DRAM. Get the external TPL
binary and build with ROCKCHIP_TPL=/path/to/ddr.bin. One possible source
for the external TPL binary is https://github.com/rockchip-linux/rkbin.
+scp-sunxi:
+SCP firmware is required for system suspend, but is otherwise optional.
+Please read the section on SCP firmware in board/sunxi/README.sunxi64
+
+sysfw-inner-cert:
+You are missing the inner certificate for TI's Foundational Security (TIFS)
+firmware which is critical to authenticating the TIFS firmware during boot.
+HS-FS and HS-SE parts will not boot without this certificate.
+
+Have a look at your board's documentation to find and include the latest
+TIFS certificate blobs and how to include them in the build.
+
+ https://docs.u-boot.org/en/latest/board/ti/k3.html
+
tee-os:
See the documentation for your board. You may need to build Open Portable
Trusted Execution Environment (OP-TEE) and build with TEE=/path/to/tee.bin
-opensbi:
-See the documentation for your board. The OpenSBI git repo is at
-https://github.com/riscv/opensbi.git
-You may need to build fw_dynamic.bin first and re-build u-boot with
-OPENSBI=/path/to/fw_dynamic.bin
+ti-dm:
+You are missing TI's Device Management (DM) firmware which is critical to
+provide resource and power management services for your board. Your board
+will not boot without this firmware.
+
+Have a look at your board's documentation to find the latest version of
+the DM firmware binary and how to include it in the build.
+
+ https://docs.u-boot.org/en/latest/board/ti/k3.html
+
+ti-fs-enc.bin:
+You are missing TI's Foundational Security (TIFS) firmware which is
+critical to provide foundational security services like authenticated boot,
+and firewall management for the SoC. Your board will not boot without
+this firmware.
+
+Have a look at your board's documentation to find the latest version of the
+TIFS firmware binary and how to include them in the build.
+
+ https://docs.u-boot.org/en/latest/board/ti/k3.html
diff --git a/tools/binman/test/170_fit_fdt.dts b/tools/binman/test/170_fit_fdt.dts
index 0197ffd1597..4b1e9b41ec0 100644
--- a/tools/binman/test/170_fit_fdt.dts
+++ b/tools/binman/test/170_fit_fdt.dts
@@ -15,6 +15,20 @@
fit,fdt-list = "of-list";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/220_fit_subentry_bintool.dts b/tools/binman/test/220_fit_subentry_bintool.dts
index 6e29d41eeb3..b1d8fb0feae 100644
--- a/tools/binman/test/220_fit_subentry_bintool.dts
+++ b/tools/binman/test/220_fit_subentry_bintool.dts
@@ -12,7 +12,7 @@
#address-cells = <1>;
images {
- test {
+ kernel {
description = "Something using a bintool";
type = "kernel";
arch = "arm";
diff --git a/tools/binman/test/223_fit_fdt_oper.dts b/tools/binman/test/223_fit_fdt_oper.dts
index e630165acf4..cb3b31e36f6 100644
--- a/tools/binman/test/223_fit_fdt_oper.dts
+++ b/tools/binman/test/223_fit_fdt_oper.dts
@@ -15,6 +15,20 @@
fit,fdt-list = "of-list";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/284_fit_fdt_list.dts b/tools/binman/test/284_fit_fdt_list.dts
index 8885313f5b8..70cdb326708 100644
--- a/tools/binman/test/284_fit_fdt_list.dts
+++ b/tools/binman/test/284_fit_fdt_list.dts
@@ -15,6 +15,20 @@
fit,fdt-list-val = "test-fdt1", "test-fdt2";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/333_fit_fdt_dir.dts b/tools/binman/test/333_fit_fdt_dir.dts
index aa778451a4b..71971de4232 100644
--- a/tools/binman/test/333_fit_fdt_dir.dts
+++ b/tools/binman/test/333_fit_fdt_dir.dts
@@ -15,6 +15,20 @@
fit,fdt-list-dir = "fdts";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/334_fit_fdt_compat.dts b/tools/binman/test/334_fit_fdt_compat.dts
index 3bf45c710db..bf1b5a4a94a 100644
--- a/tools/binman/test/334_fit_fdt_compat.dts
+++ b/tools/binman/test/334_fit_fdt_compat.dts
@@ -15,6 +15,20 @@
fit,fdt-list = "of-list";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/335_fit_fdt_phase.dts b/tools/binman/test/335_fit_fdt_phase.dts
index f8d0740a394..c20bcad651a 100644
--- a/tools/binman/test/335_fit_fdt_phase.dts
+++ b/tools/binman/test/335_fit_fdt_phase.dts
@@ -15,6 +15,20 @@
fit,fdt-list = "of-list";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/345_fit_fdt_name.dts b/tools/binman/test/345_fit_fdt_name.dts
index 631a8e5f59b..0ef2e1934a0 100644
--- a/tools/binman/test/345_fit_fdt_name.dts
+++ b/tools/binman/test/345_fit_fdt_name.dts
@@ -15,6 +15,20 @@
fit,fdt-list = "of-list";
images {
+ atf {
+ description = "atf firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
+ uboot {
+ description = "U-Boot firmware";
+ type = "firmware";
+ compression = "none";
+ load = <00000000>;
+ entry = <00000000>;
+ };
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
diff --git a/tools/binman/test/347_bl1.dts b/tools/binman/test/347_bl1.dts
new file mode 100644
index 00000000000..1a109956204
--- /dev/null
+++ b/tools/binman/test/347_bl1.dts
@@ -0,0 +1,13 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+ binman {
+ atf-bl1 {
+ filename = "bl1.bin";
+ };
+ };
+};
diff --git a/tools/binman/test/347_bootph_prop.dts b/tools/binman/test/347_bootph_prop.dts
new file mode 100644
index 00000000000..91d4e4ad600
--- /dev/null
+++ b/tools/binman/test/347_bootph_prop.dts
@@ -0,0 +1,21 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+/ {
+ dummy-parent {
+ subnode-1 {
+ subnode-2 {
+ bootph-all;
+ };
+ subnode-3 {
+ bootph-some-ram;
+ subnode-4 {
+ };
+ };
+ };
+ };
+
+ binman: binman {
+ };
+};
+
diff --git a/tools/binman/test/347_key_name_hint_dir_fit_signature.dts b/tools/binman/test/347_key_name_hint_dir_fit_signature.dts
new file mode 100644
index 00000000000..96e2126dadb
--- /dev/null
+++ b/tools/binman/test/347_key_name_hint_dir_fit_signature.dts
@@ -0,0 +1,98 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ fit {
+ description = "test desc";
+ #address-cells = <1>;
+ fit,fdt-list = "of-list";
+ fit,sign;
+
+ images {
+ u-boot {
+ description = "test u-boot";
+ type = "standalone";
+ arch = "arm64";
+ os = "u-boot";
+ compression = "none";
+ load = <0x00000000>;
+ entry = <0x00000000>;
+
+ u-boot-nodtb {
+ };
+
+ hash {
+ algo = "sha256";
+ };
+
+ signature {
+ algo = "sha256,rsa2048";
+ key-name-hint = "keys/rsa2048";
+ };
+ };
+ @atf-SEQ {
+ fit,operation = "split-elf";
+ description = "test tf-a";
+ type = "firmware";
+ arch = "arm64";
+ os = "arm-trusted-firmware";
+ compression = "none";
+ fit,load;
+ fit,entry;
+ fit,data;
+
+ atf-bl31 {
+ };
+
+ hash {
+ algo = "sha256";
+ };
+
+ signature {
+ algo = "sha256,rsa2048";
+ key-name-hint = "keys/rsa2048";
+ };
+ };
+ @fdt-SEQ {
+ description = "test fdt";
+ type = "flat_dt";
+ compression = "none";
+
+ hash {
+ algo = "sha256";
+ };
+
+ signature {
+ algo = "sha256,rsa2048";
+ key-name-hint = "keys/rsa2048";
+ };
+ };
+ };
+
+ configurations {
+ default = "@conf-uboot-DEFAULT-SEQ";
+ @conf-uboot-SEQ {
+ description = "uboot config";
+ fdt = "fdt-SEQ";
+ fit,firmware = "u-boot";
+ fit,loadables;
+
+ hash {
+ algo = "sha256";
+ };
+
+ signature {
+ algo = "sha256,rsa2048";
+ key-name-hint = "keys/rsa2048";
+ sign-images = "firmware", "loadables", "fdt";
+ };
+ };
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/348_key_name_hint_dir_spl_pubkey_dtb.dts b/tools/binman/test/348_key_name_hint_dir_spl_pubkey_dtb.dts
new file mode 100644
index 00000000000..85ebd58b6c0
--- /dev/null
+++ b/tools/binman/test/348_key_name_hint_dir_spl_pubkey_dtb.dts
@@ -0,0 +1,16 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ u-boot-spl-pubkey-dtb {
+ algo = "sha384,rsa4096";
+ required = "conf";
+ key-name-hint = "keys/key";
+ };
+ };
+};
diff --git a/tools/binman/test/348_renesas_rcar4_sa0.dts b/tools/binman/test/348_renesas_rcar4_sa0.dts
new file mode 100644
index 00000000000..4a8717520f2
--- /dev/null
+++ b/tools/binman/test/348_renesas_rcar4_sa0.dts
@@ -0,0 +1,15 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ renesas-rcar4-sa0 {
+ filename = "sa0.bin";
+ renesas,loader-address = <0x10>;
+ };
+ };
+};
diff --git a/tools/binman/test/349_renesas_rcar4_sa0_size.dts b/tools/binman/test/349_renesas_rcar4_sa0_size.dts
new file mode 100644
index 00000000000..eaf4507260b
--- /dev/null
+++ b/tools/binman/test/349_renesas_rcar4_sa0_size.dts
@@ -0,0 +1,20 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ renesas-rcar4-sa0 {
+ filename = "sa0.bin";
+ renesas,loader-address = <0x10>;
+
+ fill { /* Oversize fill to cover size check */
+ size = <0x140000>;
+ fill-byte = [ff];
+ };
+ };
+ };
+};
diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py
index 4bea0a02b78..9516e25e215 100644
--- a/tools/buildman/builder.py
+++ b/tools/buildman/builder.py
@@ -265,7 +265,7 @@ class Builder:
reproducible_builds=False, force_build=False,
force_build_failures=False, force_reconfig=False,
in_tree=False, force_config_on_failure=False, make_func=None,
- dtc_skip=False):
+ dtc_skip=False, build_target=None):
"""Create a new Builder object
Args:
@@ -315,6 +315,7 @@ class Builder:
retrying a failed build
make_func (function): Function to call to run 'make'
dtc_skip (bool): True to skip building dtc and use the system one
+ build_target (str): Build target to use (None to use the default)
"""
self.toolchains = toolchains
self.base_dir = base_dir
@@ -363,6 +364,7 @@ class Builder:
raise ValueError('Cannot find dtc')
else:
self.dtc = None
+ self.build_target = build_target
if not self.squash_config_y:
self.config_filenames += EXTRA_CONFIG_FILENAMES
@@ -629,10 +631,13 @@ class Builder:
Args:
commit_upto: Commit number to use (0..self.count-1)
target: Target name
+
+ Return:
+ str: Output directory to use, or '' if None
"""
output_dir = self.get_output_dir(commit_upto)
if self.work_in_output:
- return output_dir
+ return output_dir or ''
return os.path.join(output_dir, target)
def get_done_file(self, commit_upto, target):
@@ -1681,7 +1686,7 @@ class Builder:
"""
thread_dir = self.get_thread_dir(thread_num)
builderthread.mkdir(thread_dir)
- git_dir = os.path.join(thread_dir, '.git')
+ git_dir = os.path.join(thread_dir, '.git') if thread_dir else None
# Create a worktree or a git repo clone for this thread if it
# doesn't already exist
diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py
index b8578d5b97b..371708c8a98 100644
--- a/tools/buildman/builderthread.py
+++ b/tools/buildman/builderthread.py
@@ -31,12 +31,14 @@ def mkdir(dirname, parents=False):
"""Make a directory if it doesn't already exist.
Args:
- dirname (str): Directory to create
+ dirname (str): Directory to create, or None to do nothing
parents (bool): True to also make parent directories
Raises:
OSError: File already exists
"""
+ if not dirname or os.path.exists(dirname):
+ return
try:
if parents:
os.makedirs(dirname)
@@ -45,8 +47,8 @@ def mkdir(dirname, parents=False):
except OSError as err:
if err.errno == errno.EEXIST:
if os.path.realpath('.') == os.path.realpath(dirname):
- print(f"Cannot create the current working directory '{dirname}'!")
- sys.exit(1)
+ raise ValueError(
+ f"Cannot create the current working directory '{dirname}'!")
else:
raise
@@ -55,7 +57,7 @@ def _remove_old_outputs(out_dir):
"""Remove any old output-target files
Args:
- out_dir (str): Output directory for the build
+ out_dir (str): Output directory for the build, or None for current dir
Since we use a build directory that was previously used by another
board, it may have produced an SPL image. If we don't remove it (i.e.
@@ -63,7 +65,7 @@ def _remove_old_outputs(out_dir):
output of this build, even if it does not produce SPL images.
"""
for elf in BASE_ELF_FILENAMES:
- fname = os.path.join(out_dir, elf)
+ fname = os.path.join(out_dir or '', elf)
if os.path.exists(fname):
os.remove(fname)
@@ -191,9 +193,11 @@ class BuilderThread(threading.Thread):
Args:
brd (Board): Board to create arguments for
- out_dir (str): Path to output directory containing the files
+ out_dir (str): Path to output directory containing the files, or
+ or None to not use a separate output directory
out_rel_dir (str): Output directory relative to the current dir
- work_dir (str): Directory to which the source will be checked out
+ work_dir (str): Directory to which the source will be checked out,
+ or None to use current directory
commit_upto (int): Commit number to build (0...n-1)
Returns:
@@ -204,22 +208,22 @@ class BuilderThread(threading.Thread):
"""
args = []
cwd = work_dir
- src_dir = os.path.realpath(work_dir)
- if not self.builder.in_tree:
- if commit_upto is None:
- # In this case we are building in the original source directory
- # (i.e. the current directory where buildman is invoked. The
- # output directory is set to this thread's selected work
- # directory.
- #
- # Symlinks can confuse U-Boot's Makefile since we may use '..'
- # in our path, so remove them.
+ src_dir = os.path.realpath(work_dir) if work_dir else os.getcwd()
+ if commit_upto is None:
+ # In this case we are building in the original source directory
+ # (i.e. the current directory where buildman is invoked. The
+ # output directory is set to this thread's selected work
+ # directory.
+ #
+ # Symlinks can confuse U-Boot's Makefile since we may use '..'
+ # in our path, so remove them.
+ if out_dir:
real_dir = os.path.realpath(out_dir)
args.append(f'O={real_dir}')
- cwd = None
- src_dir = os.getcwd()
- else:
- args.append(f'O={out_rel_dir}')
+ cwd = None
+ src_dir = os.getcwd()
+ elif out_rel_dir:
+ args.append(f'O={out_rel_dir}')
if self.builder.verbose_build:
args.append('V=1')
else:
@@ -285,6 +289,8 @@ class BuilderThread(threading.Thread):
"""
if config_only:
args.append('cfg')
+ elif self.builder.build_target:
+ args.append(self.builder.build_target)
result = self.make(commit, brd, 'build', cwd, *args, env=env)
cmd_list.append([self.builder.gnu_make] + args)
if (result.return_code == 2 and
@@ -395,7 +401,8 @@ class BuilderThread(threading.Thread):
config_only (bool): Only configure the source, do not build it
adjust_cfg (list of str): See the cfgutil module and run_commit()
commit (Commit): Commit only being built
- out_dir (str): Output directory for the build
+ out_dir (str): Output directory for the build, or None to use
+ current
out_rel_dir (str): Output directory relatie to the current dir
result (CommandResult): Previous result
@@ -407,7 +414,8 @@ class BuilderThread(threading.Thread):
"""
# Set up the environment and command line
env = self.builder.make_environment(self.toolchain)
- mkdir(out_dir)
+ if out_dir and not os.path.exists(out_dir):
+ mkdir(out_dir)
args, cwd, src_dir = self._build_args(brd, out_dir, out_rel_dir,
work_dir, commit_upto)
@@ -417,7 +425,7 @@ class BuilderThread(threading.Thread):
_remove_old_outputs(out_dir)
# If we need to reconfigure, do that now
- cfg_file = os.path.join(out_dir, '.config')
+ cfg_file = os.path.join(out_dir or '', '.config')
cmd_list = []
if do_config or adjust_cfg:
result = self._reconfigure(
@@ -625,7 +633,7 @@ class BuilderThread(threading.Thread):
# Extract the environment from U-Boot and dump it out
cmd = [f'{self.toolchain.cross}objcopy', '-O', 'binary',
'-j', '.rodata.default_environment',
- 'env/built-in.o', 'uboot.env']
+ 'env/built-in.a', 'uboot.env']
command.run_one(*cmd, capture=True, capture_stderr=True,
cwd=result.out_dir, raise_on_error=False, env=env)
if not work_in_output:
diff --git a/tools/buildman/buildman.rst b/tools/buildman/buildman.rst
index 07ecc5c110c..8c45a841024 100644
--- a/tools/buildman/buildman.rst
+++ b/tools/buildman/buildman.rst
@@ -1329,6 +1329,18 @@ sometimes useful to have buildman wait until the others have finished. Use the
--process-limit option for this: --process-limit 1 will allow only one buildman
to process jobs at a time.
+To build a particular target, rather than the default U-Boot target, use the
+`--target` option. This is unlikely to be useful unless you are building a
+single board.
+
+Buildman normally builds out-of-tree, meaning that the source directory is not
+disturbed by the build. Use `-i` to do an in-tree build instead. Note that this
+does not affect the source directory, since buildman creates a separate git
+'worktree' for each board. This means that it is possible to do an in-tree
+build of an entire branch, or even a 'current source' build for multiple boards.
+As a special case, you can use `-wi` to do an in-tree build in the current
+directory.
+
Build summary
-------------
diff --git a/tools/buildman/cmdline.py b/tools/buildman/cmdline.py
index 7573e5bdfe8..9236d6187cf 100644
--- a/tools/buildman/cmdline.py
+++ b/tools/buildman/cmdline.py
@@ -22,6 +22,7 @@ def add_upto_m(parser):
This is split out to avoid having too many statements in one function
"""
+ # Available JqzZ
parser.add_argument('-a', '--adjust-cfg', type=str, action='append',
help='Adjust the Kconfig settings in .config before building')
parser.add_argument('-A', '--print-prefix', action='store_true',
@@ -153,6 +154,8 @@ def add_after_m(parser):
parser.add_argument('-T', '--threads', type=int,
default=None,
help='Number of builder threads to use (0=single-thread)')
+ parser.add_argument('--target', type=str,
+ default=None, help='Build target to use')
parser.add_argument('-u', '--show_unknown', action='store_true',
default=False, help='Show boards with unknown build result')
parser.add_argument('-U', '--show-environment', action='store_true',
diff --git a/tools/buildman/control.py b/tools/buildman/control.py
index 5109b1cd5ce..4dedd333551 100644
--- a/tools/buildman/control.py
+++ b/tools/buildman/control.py
@@ -390,7 +390,7 @@ def get_boards_obj(output_dir, regen_board_list, maintainer_check, full_check,
read it in.
Args:
- output_dir (str): Output directory to use
+ output_dir (str): Output directory to use, or None to use current dir
regen_board_list (bool): True to just regenerate the board list
maintainer_check (bool): True to just run a maintainer check
full_check (bool): True to just run a full check of Kconfig and
@@ -414,9 +414,9 @@ def get_boards_obj(output_dir, regen_board_list, maintainer_check, full_check,
return 2
return 0
- if not os.path.exists(output_dir):
+ if output_dir and not os.path.exists(output_dir):
os.makedirs(output_dir)
- board_file = os.path.join(output_dir, 'boards.cfg')
+ board_file = os.path.join(output_dir or '', 'boards.cfg')
if regen_board_list and regen_board_list != '-':
board_file = regen_board_list
@@ -501,7 +501,7 @@ def adjust_args(args, series, selected):
def setup_output_dir(output_dir, work_in_output, branch, no_subdirs, col,
- clean_dir):
+ in_tree, clean_dir):
"""Set up the output directory
Args:
@@ -509,6 +509,7 @@ def setup_output_dir(output_dir, work_in_output, branch, no_subdirs, col,
work_in_output (bool): True to work in the output directory
branch (str): Name of branch to build, or None if none
no_subdirs (bool): True to put the output in the top-level output dir
+ in_tree (bool): True if doing an in-tree build
clean_dir: Used for tests only, indicates that the existing output_dir
should be removed before starting the build
@@ -516,9 +517,11 @@ def setup_output_dir(output_dir, work_in_output, branch, no_subdirs, col,
str: Updated output directory pathname
"""
if not output_dir:
- if work_in_output:
- sys.exit(col.build(col.RED, '-w requires that you specify -o'))
output_dir = '..'
+ if work_in_output:
+ if not in_tree:
+ sys.exit(col.build(col.RED, '-w requires that you specify -o'))
+ output_dir = None
if branch and not no_subdirs:
# As a special case allow the board directory to be placed in the
# output directory itself rather than any subdirectory.
@@ -751,7 +754,7 @@ def do_buildman(args, toolchains=None, make_func=None, brds=None,
output_dir = setup_output_dir(
args.output_dir, args.work_in_output, args.branch,
- args.no_subdirs, col, clean_dir)
+ args.no_subdirs, col, args.in_tree, clean_dir)
# Work out what subset of the boards we are building
if not brds:
@@ -785,6 +788,9 @@ def do_buildman(args, toolchains=None, make_func=None, brds=None,
args.verbose)
return 0
+ if args.config_only and args.target:
+ raise ValueError('Cannot use --config-only with --target')
+
# Create a new builder with the selected args
builder = Builder(toolchains, output_dir, git_dir,
args.threads, args.jobs, checkout=True,
@@ -810,7 +816,7 @@ def do_buildman(args, toolchains=None, make_func=None, brds=None,
force_build_failures = args.force_build_failures,
force_reconfig = args.force_reconfig, in_tree = args.in_tree,
force_config_on_failure=not args.quick, make_func=make_func,
- dtc_skip=args.dtc_skip)
+ dtc_skip=args.dtc_skip, build_target=args.target)
TEST_BUILDER = builder
diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py
index b45eb95a1e6..51c6855420e 100644
--- a/tools/buildman/func_test.py
+++ b/tools/buildman/func_test.py
@@ -670,7 +670,7 @@ Some images are invalid'''
def testThreadExceptions(self):
"""Test that exceptions in threads are reported"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
self.assertEqual(102, self._RunControl('-o', self._output_dir,
test_thread_exceptions=True))
self.assertIn(
@@ -808,7 +808,7 @@ Some images are invalid'''
# CONFIG_LOCALVERSION_AUTO is not set
''', cfg_data)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
lines, cfg_data = self.check_command('-r', '-a', 'LOCALVERSION')
self.assertIn(b'SOURCE_DATE_EPOCH=0', lines[0])
@@ -1032,14 +1032,14 @@ endif
outfile = os.path.join(self._output_dir, 'test-boards.cfg')
if os.path.exists(outfile):
os.remove(outfile)
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
result = self._RunControl('-R', outfile, brds=None,
get_builder=False)
self.assertTrue(os.path.exists(outfile))
def test_print_prefix(self):
"""Test that we can print the toolchain prefix"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
result = self._RunControl('-A', 'board0')
self.assertEqual('arm-\n', stdout.getvalue())
self.assertEqual('', stderr.getvalue())
@@ -1083,7 +1083,7 @@ endif
def test_print_arch(self):
"""Test that we can print the board architecture"""
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
result = self._RunControl('--print-arch', 'board0')
self.assertEqual('arm\n', stdout.getvalue())
self.assertEqual('', stderr.getvalue())
@@ -1152,3 +1152,13 @@ CONFIG_SOC="fred"
'board': 'ARM Board 0',
'config': 'config0',
'target': 'board0'}, []), res)
+
+ def testTarget(self):
+ """Test that the --target flag works"""
+ lines = self.check_command('--target', 'u-boot.dtb')[0]
+
+ # It should not affect the defconfig line
+ self.assertNotIn(b'u-boot.dtb', lines[0])
+
+ # It should appear at the end of the build line
+ self.assertEqual(b'u-boot.dtb', lines[1].split()[-1])
diff --git a/tools/buildman/main.py b/tools/buildman/main.py
index 72571b226d9..77b9bebed27 100755
--- a/tools/buildman/main.py
+++ b/tools/buildman/main.py
@@ -49,7 +49,7 @@ def run_tests(skip_net_tests, debug, verbose, args):
# Run the entry tests first ,since these need to be the first to import the
# 'entry' module.
result = test_util.run_test_suites(
- 'buildman', debug, verbose, False, args.threads, test_name, [],
+ 'buildman', debug, verbose, False, False, args.threads, test_name, [],
[test.TestBuild, func_test.TestFunctional, 'buildman.toolchain'])
return (0 if result.wasSuccessful() else 1)
diff --git a/tools/buildman/test.py b/tools/buildman/test.py
index e31e6c72e1a..7ee9496ffb3 100644
--- a/tools/buildman/test.py
+++ b/tools/buildman/test.py
@@ -610,7 +610,7 @@ class TestBuild(unittest.TestCase):
def testToolchainDownload(self):
"""Test that we can download toolchains"""
if use_network:
- with test_util.capture_sys_output() as (stdout, stderr):
+ with terminal.capture() as (stdout, stderr):
url = self.toolchains.LocateArchUrl('arm')
self.assertRegex(url, 'https://www.kernel.org/pub/tools/'
'crosstool/files/bin/x86_64/.*/'
@@ -1054,7 +1054,7 @@ class TestBuild(unittest.TestCase):
self.assertEqual([f'{home}/mypath'], toolchains.paths)
# Check scanning
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
toolchains.Scan(verbose=True, raise_on_error=False)
lines = iter(stdout.getvalue().splitlines() + ['##done'])
self.assertEqual('Scanning for tool chains', next(lines))
@@ -1071,7 +1071,7 @@ class TestBuild(unittest.TestCase):
self.assertEqual('##done', next(lines))
# Check adding a toolchain
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
toolchains.Add('~/aarch64-linux-gcc', test=True, verbose=True)
lines = iter(stdout.getvalue().splitlines() + ['##done'])
self.assertEqual('Tool chain test: BAD', next(lines))
diff --git a/tools/dtoc/main.py b/tools/dtoc/main.py
index 6c91450410e..59b98b0fa9f 100755
--- a/tools/dtoc/main.py
+++ b/tools/dtoc/main.py
@@ -58,8 +58,9 @@ def run_tests(processes, args):
test_dtoc.setup()
result = test_util.run_test_suites(
- toolname='dtoc', debug=True, verbosity=1, test_preserve_dirs=False,
- processes=processes, test_name=test_name, toolpath=[],
+ toolname='dtoc', debug=True, verbosity=1, no_capture=False,
+ test_preserve_dirs=False, processes=processes, test_name=test_name,
+ toolpath=[],
class_and_module_list=[test_dtoc.TestDtoc,test_src_scan.TestSrcScan])
return (0 if result.wasSuccessful() else 1)
diff --git a/tools/dtoc/test_dtoc.py b/tools/dtoc/test_dtoc.py
index c4a0889aebe..1a85ebcf81a 100755
--- a/tools/dtoc/test_dtoc.py
+++ b/tools/dtoc/test_dtoc.py
@@ -26,6 +26,7 @@ from dtoc.dtb_platdata import get_value
from dtoc.dtb_platdata import tab_to
from dtoc.src_scan import conv_name_to_c
from dtoc.src_scan import get_compat_name
+from u_boot_pylib import terminal
from u_boot_pylib import test_util
from u_boot_pylib import tools
@@ -879,7 +880,7 @@ U_BOOT_DRVINFO(gpios_at_0) = {
"""Test output from a device tree file with an invalid driver"""
dtb_file = get_dtb_file('dtoc_test_invalid_driver.dts')
output = tools.get_output_filename('output')
- with test_util.capture_sys_output() as _:
+ with terminal.capture() as _:
dtb_platdata.run_steps(
['struct'], dtb_file, False, output, [], None, False,
scan=copy_scan())
@@ -890,7 +891,7 @@ struct dtd_invalid {
};
''', data)
- with test_util.capture_sys_output() as _:
+ with terminal.capture() as _:
dtb_platdata.run_steps(
['platdata'], dtb_file, False, output, [], None, False,
scan=copy_scan())
@@ -1522,7 +1523,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_stdout(self):
"""Test output to stdout"""
dtb_file = get_dtb_file('dtoc_test_simple.dts')
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
self.run_test(['struct'], dtb_file, None)
self._check_strings(self.struct_text, stdout.getvalue())
@@ -1744,7 +1745,7 @@ U_BOOT_DRVINFO(spl_test2) = {
"""Test alias for a uclass that doesn't exist"""
dtb_file = get_dtb_file('dtoc_test_alias_bad_uc.dts')
output = tools.get_output_filename('output')
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
plat = self.run_test(['struct'], dtb_file, output)
self.assertEqual("Could not find uclass for alias 'other1'",
stdout.getvalue().strip())
@@ -1821,7 +1822,7 @@ U_BOOT_DRVINFO(spl_test2) = {
del scan._structs['dm_test_uc_priv']
# Now generate the uclasses, which should provide a warning
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
plat.generate_uclasses()
self.assertEqual(
'Warning: Cannot find header file for struct dm_test_uc_priv',
diff --git a/tools/dtoc/test_fdt.py b/tools/dtoc/test_fdt.py
index 0b01518f3a5..a0bed4e18bb 100755
--- a/tools/dtoc/test_fdt.py
+++ b/tools/dtoc/test_fdt.py
@@ -969,7 +969,7 @@ def run_tests(names, processes):
"""
test_name = names[0] if names else None
result = test_util.run_test_suites(
- 'test_fdt', False, False, False, processes, test_name, None,
+ 'test_fdt', False, False, False, False, processes, test_name, None,
[TestFdt, TestNode, TestProp, TestFdtUtil])
return (0 if result.wasSuccessful() else 1)
diff --git a/tools/dtoc/test_src_scan.py b/tools/dtoc/test_src_scan.py
index 64b740841ca..385efedc851 100644
--- a/tools/dtoc/test_src_scan.py
+++ b/tools/dtoc/test_src_scan.py
@@ -15,6 +15,7 @@ import unittest
from unittest import mock
from dtoc import src_scan
+from u_boot_pylib import terminal
from u_boot_pylib import test_util
from u_boot_pylib import tools
@@ -80,7 +81,7 @@ class TestSrcScan(unittest.TestCase):
fout.write(b'\x81')
scan = src_scan.Scanner(None, [driver_fn])
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.scan_drivers()
self.assertRegex(stdout.getvalue(),
r"Skipping file '.*' due to unicode error\s*")
@@ -170,7 +171,7 @@ class TestSrcScan(unittest.TestCase):
node.parent = FakeNode()
scan = src_scan.Scanner(None, None)
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
name, aliases = scan.get_normalized_compat_name(node)
self.assertEqual('rockchip_rk3288_grf', name)
self.assertEqual([], aliases)
@@ -189,7 +190,7 @@ class TestSrcScan(unittest.TestCase):
scan._driver_aliases['rockchip_rk3288_srf'] = 'rockchip_rk3288_grf'
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
name, aliases = scan.get_normalized_compat_name(node)
self.assertEqual('', stdout.getvalue().strip())
self.assertEqual('rockchip_rk3288_grf', name)
@@ -197,7 +198,7 @@ class TestSrcScan(unittest.TestCase):
self.assertEqual(EXPECT_WARN, scan._warnings)
prop.value = 'rockchip,rk3288-srf'
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
name, aliases = scan.get_normalized_compat_name(node)
self.assertEqual('', stdout.getvalue().strip())
self.assertEqual('rockchip_rk3288_grf', name)
@@ -379,7 +380,7 @@ struct another_struct {
tools.write_file(output, b'struct this is a test \x81 of bad unicode')
scan = src_scan.Scanner(None, None)
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.scan_header(output)
self.assertIn('due to unicode error', stdout.getvalue())
@@ -456,7 +457,7 @@ U_BOOT_DRIVER(%s) = {
self.assertTrue(drv2.warn_dups)
# We should see a warning
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.mark_used([node])
self.assertEqual(
"Warning: Duplicate driver name 'nvidia_tegra114_i2c' (orig=file2.c, dups=file1.c)",
@@ -477,7 +478,7 @@ U_BOOT_DRIVER(%s) = {
self.assertFalse(drv1.warn_dups)
# We should not see a warning
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.mark_used([node])
self.assertEqual('', stdout.getvalue().strip())
@@ -539,7 +540,7 @@ U_BOOT_DRIVER(i2c_tegra) = {
# get_normalized_compat_name() uses this to check for root node
tnode.parent = FakeNode()
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.get_normalized_compat_name(node)
scan.get_normalized_compat_name(tnode)
self.assertEqual('', stdout.getvalue().strip())
@@ -547,14 +548,14 @@ U_BOOT_DRIVER(i2c_tegra) = {
self.assertEqual(2, len(scan._missing_drivers))
self.assertEqual({'rockchip_rk3288_grf', 'nvidia_tegra114_i2c'},
scan._missing_drivers)
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.show_warnings()
self.assertIn('rockchip_rk3288_grf', stdout.getvalue())
# This should show just the rockchip warning, since the tegra driver
# is not in self._missing_drivers
scan._missing_drivers.remove('nvidia_tegra114_i2c')
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.show_warnings()
self.assertIn('rockchip_rk3288_grf', stdout.getvalue())
self.assertNotIn('tegra_i2c_ids', stdout.getvalue())
@@ -563,7 +564,7 @@ U_BOOT_DRIVER(i2c_tegra) = {
# used, the warning related to that driver will be shown
drv = scan._drivers['i2c_tegra']
drv.used = True
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.show_warnings()
self.assertIn('rockchip_rk3288_grf', stdout.getvalue())
self.assertIn('tegra_i2c_ids', stdout.getvalue())
@@ -572,7 +573,7 @@ U_BOOT_DRIVER(i2c_tegra) = {
scan._warnings['i2c_tegra'].update(
scan._warnings['nvidia_tegra114_i2c'])
del scan._warnings['nvidia_tegra114_i2c']
- with test_util.capture_sys_output() as (stdout, _):
+ with terminal.capture() as (stdout, _):
scan.show_warnings()
self.assertEqual('''i2c_tegra: WARNING: the driver nvidia_tegra114_i2c was not found in the driver list
: file.c: Warning: unexpected suffix ' + 1' on .of_match line for compat 'tegra_i2c_ids'
diff --git a/tools/envcrc.c b/tools/envcrc.c
index 09051800364..7f680fb8a95 100644
--- a/tools/envcrc.c
+++ b/tools/envcrc.c
@@ -40,7 +40,7 @@
# endif
#endif /* CONFIG_ENV_IS_IN_FLASH */
-#ifdef CONFIG_SYS_REDUNDAND_ENVIRONMENT
+#ifdef CONFIG_ENV_REDUNDANT
# define ENV_HEADER_SIZE (sizeof(uint32_t) + 1)
#else
# define ENV_HEADER_SIZE (sizeof(uint32_t))
diff --git a/tools/fit_image.c b/tools/fit_image.c
index caed8d5f901..8717dc9a3b1 100644
--- a/tools/fit_image.c
+++ b/tools/fit_image.c
@@ -24,6 +24,65 @@
static struct legacy_img_hdr header;
+static int fit_estimate_hash_sig_size(struct image_tool_params *params, const char *fname)
+{
+ bool signing = IMAGE_ENABLE_SIGN && (params->keydir || params->keyfile);
+ struct stat sbuf;
+ void *fdt;
+ int fd;
+ int estimate = 0;
+ int depth, noffset;
+ const char *name;
+
+ fd = mmap_fdt(params->cmdname, fname, 0, &fdt, &sbuf, false, true);
+ if (fd < 0)
+ return -EIO;
+
+ /*
+ * Walk the FIT image, looking for nodes named hash* and
+ * signature*. Since the interesting nodes are subnodes of an
+ * image or configuration node, we are only interested in
+ * those at depth exactly 3.
+ *
+ * The estimate for a hash node is based on a sha512 digest
+ * being 64 bytes, with another 64 bytes added to account for
+ * fdt structure overhead (the tags and the name of the
+ * "value" property).
+ *
+ * The estimate for a signature node is based on an rsa4096
+ * signature being 512 bytes, with another 512 bytes to
+ * account for fdt overhead and the various other properties
+ * (hashed-nodes etc.) that will also be filled in.
+ *
+ * One could try to be more precise in the estimates by
+ * looking at the "algo" property and, in the case of
+ * configuration signatures, the sign-images property. Also,
+ * when signing an already created FIT image, the hash nodes
+ * already have properly sized value properties, so one could
+ * also take pre-existence of "value" properties in hash nodes
+ * into account. But this rather simple approach should work
+ * well enough in practice.
+ */
+ for (depth = 0, noffset = fdt_next_node(fdt, 0, &depth);
+ noffset >= 0 && depth > 0;
+ noffset = fdt_next_node(fdt, noffset, &depth)) {
+ if (depth != 3)
+ continue;
+
+ name = fdt_get_name(fdt, noffset, NULL);
+ if (!strncmp(name, FIT_HASH_NODENAME, strlen(FIT_HASH_NODENAME)))
+ estimate += 128;
+
+ if (signing && !strncmp(name, FIT_SIG_NODENAME, strlen(FIT_SIG_NODENAME)))
+ estimate += 1024;
+ }
+
+ munmap(fdt, sbuf.st_size);
+ close(fd);
+
+ return estimate;
+}
+
static int fit_add_file_data(struct image_tool_params *params, size_t size_inc,
const char *tmpfile)
{
@@ -627,6 +686,7 @@ static int fit_import_data(struct image_tool_params *params, const char *fname)
struct stat sbuf;
int ret;
int images;
+ int confs;
int node;
fd = mmap_fdt(params->cmdname, fname, 0, &old_fdt, &sbuf, false, false);
@@ -695,6 +755,43 @@ static int fit_import_data(struct image_tool_params *params, const char *fname)
}
}
+ confs = fdt_path_offset(fdt, FIT_CONFS_PATH);
+ static const char * const props[] = { FIT_KERNEL_PROP,
+ FIT_RAMDISK_PROP,
+ FIT_FDT_PROP,
+ FIT_LOADABLE_PROP,
+ FIT_FPGA_PROP,
+ FIT_FIRMWARE_PROP,
+ FIT_SCRIPT_PROP};
+
+ fdt_for_each_subnode(node, fdt, confs) {
+ const char *conf_name = fdt_get_name(fdt, node, NULL);
+
+ for (int i = 0; i < ARRAY_SIZE(props); i++) {
+ int count = fdt_stringlist_count(fdt, node, props[i]);
+
+ if (count < 0)
+ continue;
+
+ for (int j = 0; j < count; j++) {
+ const char *img_name =
+ fdt_stringlist_get(fdt, node, props[i], j, NULL);
+ if (!img_name || !*img_name)
+ continue;
+
+ int img = fdt_subnode_offset(fdt, images, img_name);
+
+ if (img < 0) {
+ fprintf(stderr,
+ "Error: configuration '%s' references undefined image '%s' in property '%s'\n",
+ conf_name, img_name, props[i]);
+ ret = FDT_ERR_NOTFOUND;
+ goto err_munmap;
+ }
+ }
+ }
+ }
+
munmap(old_fdt, sbuf.st_size);
/* Close the old fd so we can re-use it. */
@@ -750,7 +847,7 @@ static int fit_handle_file(struct image_tool_params *params)
char bakfile[MKIMAGE_MAX_TMPFILE_LEN + 4] = {0};
char cmd[MKIMAGE_MAX_DTC_CMDLINE_LEN];
size_t size_inc;
- int ret;
+ int ret = EXIT_FAILURE;
/* Flattened Image Tree (FIT) format handling */
debug ("FIT format handling\n");
@@ -806,16 +903,16 @@ static int fit_handle_file(struct image_tool_params *params)
rename(tmpfile, bakfile);
/*
- * Set hashes for images in the blob. Unfortunately we may need more
- * space in either FDT, so keep trying until we succeed.
- *
- * Note: this is pretty inefficient for signing, since we must
- * calculate the signature every time. It would be better to calculate
- * all the data and then store it in a separate step. However, this
- * would be considerably more complex to implement. Generally a few
- * steps of this loop is enough to sign with several keys.
+ * Set hashes for images in the blob and compute
+ * signatures. We do an attempt at estimating the expected
+ * extra size, but just in case that is not sufficient, keep
+ * trying adding 1K, with a reasonable upper bound of 64K
+ * total, until we succeed.
*/
- for (size_inc = 0; size_inc < 64 * 1024; size_inc += 1024) {
+ size_inc = fit_estimate_hash_sig_size(params, bakfile);
+ if (size_inc < 0)
+ goto err_system;
+ do {
if (copyfile(bakfile, tmpfile) < 0) {
printf("Can't copy %s to %s\n", bakfile, tmpfile);
ret = -EIO;
@@ -824,7 +921,8 @@ static int fit_handle_file(struct image_tool_params *params)
ret = fit_add_file_data(params, size_inc, tmpfile);
if (!ret || ret != -ENOSPC)
break;
- }
+ size_inc += 1024;
+ } while (size_inc < 64 * 1024);
if (ret) {
fprintf(stderr, "%s Can't add hashes to FIT blob: %d\n",
@@ -854,7 +952,7 @@ static int fit_handle_file(struct image_tool_params *params)
err_system:
unlink(tmpfile);
unlink(bakfile);
- return -1;
+ return ret;
}
/**
diff --git a/tools/ifdtool.c b/tools/ifdtool.c
index b70570361f4..9fd7a709214 100644
--- a/tools/ifdtool.c
+++ b/tools/ifdtool.c
@@ -499,8 +499,10 @@ static int write_image(char *filename, char *image, int size)
S_IWUSR | S_IRGRP | S_IROTH);
if (new_fd < 0)
return perror_fname("Could not open file '%s'", filename);
- if (write(new_fd, image, size) != size)
+ if (write(new_fd, image, size) != size) {
+ close(new_fd);
return perror_fname("Could not write file '%s'", filename);
+ }
close(new_fd);
return 0;
@@ -604,8 +606,10 @@ int open_for_read(const char *fname, int *sizep)
if (fd == -1)
return perror_fname("Could not open file '%s'", fname);
- if (fstat(fd, &buf) == -1)
+ if (fstat(fd, &buf) == -1) {
+ close(fd);
return perror_fname("Could not stat file '%s'", fname);
+ }
*sizep = buf.st_size;
debug("File %s is %d bytes\n", fname, *sizep);
diff --git a/tools/mkimage.c b/tools/mkimage.c
index 2954626a283..361711c53b2 100644
--- a/tools/mkimage.c
+++ b/tools/mkimage.c
@@ -519,8 +519,13 @@ int main(int argc, char **argv)
*/
retval = tparams->fflag_handle(&params);
- if (retval != EXIT_SUCCESS)
+ if (retval != EXIT_SUCCESS) {
+ if (retval == FDT_ERR_NOTFOUND) {
+ // Already printed error, exit cleanly
+ exit(EXIT_FAILURE);
+ }
usage("Bad parameters for FIT image type");
+ }
}
if (params.lflag || params.fflag) {
diff --git a/tools/patman/__init__.py b/tools/patman/__init__.py
index 6de0e9fba10..0cca6f42435 100644
--- a/tools/patman/__init__.py
+++ b/tools/patman/__init__.py
@@ -1,5 +1,8 @@
# SPDX-License-Identifier: GPL-2.0+
-__all__ = ['checkpatch', 'commit', 'control', 'func_test', 'get_maintainer',
- '__main__', 'patchstream', 'project', 'series',
- 'settings', 'setup', 'status', 'test_checkpatch', 'test_settings']
+__all__ = [
+ 'checkpatch', 'cmdline', 'commit', 'control', 'cser_helper', 'cseries',
+ 'database', 'func_test', 'get_maintainer', '__main__', 'patchstream',
+ 'patchwork', 'project', 'send', 'series', 'settings', 'setup', 'status',
+ 'test_checkpatch', 'test_common', 'test_cseries', 'test_settings'
+]
diff --git a/tools/patman/__main__.py b/tools/patman/__main__.py
index 36f1c08507c..edfb1b5927c 100755
--- a/tools/patman/__main__.py
+++ b/tools/patman/__main__.py
@@ -6,15 +6,8 @@
"""See README for more information"""
-try:
- from importlib import resources
-except ImportError:
- # for Python 3.6
- import importlib_resources as resources
import os
-import re
import sys
-import traceback
# Allow 'from patman import xxx to work'
# pylint: disable=C0413
@@ -22,11 +15,10 @@ our_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(our_path, '..'))
# Our modules
+from u_boot_pylib import test_util
+from u_boot_pylib import tout
from patman import cmdline
from patman import control
-from u_boot_pylib import terminal
-from u_boot_pylib import test_util
-from u_boot_pylib import tools
def run_patman():
@@ -40,58 +32,27 @@ def run_patman():
if not args.debug:
sys.tracebacklimit = 0
- # Run our meagre tests
+ tout.init(tout.INFO if args.verbose else tout.WARNING)
+
+ # Run our reasonably good tests
if args.cmd == 'test':
# pylint: disable=C0415
from patman import func_test
from patman import test_checkpatch
+ from patman import test_cseries
+ to_run = args.testname if args.testname not in [None, 'test'] else None
result = test_util.run_test_suites(
- 'patman', False, False, False, None, None, None,
- [test_checkpatch.TestPatch, func_test.TestFunctional,
- 'settings'])
-
+ 'patman', False, args.verbose, args.no_capture,
+ args.test_preserve_dirs, None, to_run, None,
+ [test_checkpatch.TestPatch, func_test.TestFunctional, 'settings',
+ test_cseries.TestCseries])
sys.exit(0 if result.wasSuccessful() else 1)
# Process commits, produce patches files, check them, email them
- elif args.cmd == 'send':
- # Called from git with a patch filename as argument
- # Printout a list of additional CC recipients for this patch
- if args.cc_cmd:
- re_line = re.compile(r'(\S*) (.*)')
- with open(args.cc_cmd, 'r', encoding='utf-8') as inf:
- for line in inf.readlines():
- match = re_line.match(line)
- if match and match.group(1) == args.patchfiles[0]:
- for cca in match.group(2).split('\0'):
- cca = cca.strip()
- if cca:
- print(cca)
-
- elif args.full_help:
- with resources.path('patman', 'README.rst') as readme:
- tools.print_full_help(str(readme))
- else:
- # If we are not processing tags, no need to warning about bad ones
- if not args.process_tags:
- args.ignore_bad_tags = True
- control.send(args)
-
- # Check status of patches in patchwork
- elif args.cmd == 'status':
- ret_code = 0
- try:
- control.patchwork_status(args.branch, args.count, args.start, args.end,
- args.dest_branch, args.force,
- args.show_comments, args.patchwork_url)
- except Exception as exc:
- terminal.tprint(f'patman: {type(exc).__name__}: {exc}',
- colour=terminal.Color.RED)
- if args.debug:
- print()
- traceback.print_exc()
- ret_code = 1
- sys.exit(ret_code)
+ else:
+ exit_code = control.do_patman(args)
+ sys.exit(exit_code)
if __name__ == "__main__":
diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py
index 2975881705c..f9204a907ef 100644
--- a/tools/patman/checkpatch.py
+++ b/tools/patman/checkpatch.py
@@ -22,7 +22,7 @@ RE_NOTE = re.compile(r'NOTE: (.*)')
def find_check_patch():
- top_level = gitutil.get_top_level()
+ top_level = gitutil.get_top_level() or ''
try_list = [
os.getcwd(),
os.path.join(os.getcwd(), '..', '..'),
@@ -187,7 +187,8 @@ def check_patch_parse(checkpatch_output, verbose=False):
return result
-def check_patch(fname, verbose=False, show_types=False, use_tree=False):
+def check_patch(fname, verbose=False, show_types=False, use_tree=False,
+ cwd=None):
"""Run checkpatch.pl on a file and parse the results.
Args:
@@ -196,6 +197,7 @@ def check_patch(fname, verbose=False, show_types=False, use_tree=False):
parsed
show_types: Tell checkpatch to show the type (number) of each message
use_tree (bool): If False we'll pass '--no-tree' to checkpatch.
+ cwd (str): Path to use for patch files (None to use current dir)
Returns:
namedtuple containing:
@@ -217,7 +219,9 @@ def check_patch(fname, verbose=False, show_types=False, use_tree=False):
args.append('--no-tree')
if show_types:
args.append('--show-types')
- output = command.output(*args, fname, raise_on_error=False)
+ output = command.output(
+ *args, os.path.join(cwd or '', fname), raise_on_error=False,
+ capture_stderr=not use_tree)
return check_patch_parse(output, verbose)
@@ -240,7 +244,7 @@ def get_warning_msg(col, msg_type, fname, line, msg):
line_str = '' if line is None else '%d' % line
return '%s:%s: %s: %s\n' % (fname, line_str, msg_type, msg)
-def check_patches(verbose, args, use_tree):
+def check_patches(verbose, args, use_tree, cwd):
'''Run the checkpatch.pl script on each patch'''
error_count, warning_count, check_count = 0, 0, 0
col = terminal.Color()
@@ -248,7 +252,8 @@ def check_patches(verbose, args, use_tree):
with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
futures = []
for fname in args:
- f = executor.submit(check_patch, fname, verbose, use_tree=use_tree)
+ f = executor.submit(check_patch, fname, verbose, use_tree=use_tree,
+ cwd=cwd)
futures.append(f)
for fname, f in zip(args, futures):
diff --git a/tools/patman/cmdline.py b/tools/patman/cmdline.py
index 562bc823f60..924f0ad4e42 100644
--- a/tools/patman/cmdline.py
+++ b/tools/patman/cmdline.py
@@ -13,101 +13,443 @@ import os
import pathlib
import sys
-from patman import project
from u_boot_pylib import gitutil
+from patman import project
from patman import settings
PATMAN_DIR = pathlib.Path(__file__).parent
HAS_TESTS = os.path.exists(PATMAN_DIR / "func_test.py")
-def parse_args():
- """Parse command line arguments from sys.argv[]
+# Aliases for subcommands
+ALIASES = {
+ 'series': ['s', 'ser'],
+ 'status': ['st'],
+ 'patchwork': ['pw'],
+ 'upstream': ['us'],
+
+ # Series aliases
+ 'archive': ['ar'],
+ 'autolink': ['au'],
+ 'gather': ['g'],
+ 'open': ['o'],
+ 'progress': ['p', 'pr', 'prog'],
+ 'rm-version': ['rmv'],
+ 'unarchive': ['unar'],
+ }
+
+
+class ErrorCatchingArgumentParser(argparse.ArgumentParser):
+ def __init__(self, **kwargs):
+ self.exit_state = None
+ self.catch_error = False
+ super().__init__(**kwargs)
+
+ def error(self, message):
+ if self.catch_error:
+ self.message = message
+ else:
+ super().error(message)
+
+ def exit(self, status=0, message=None):
+ if self.catch_error:
+ self.exit_state = True
+ else:
+ super().exit(status, message)
+
+
+def add_send_args(par):
+ """Add arguments for the 'send' command
+
+ Arguments:
+ par (ArgumentParser): Parser to add to
+ """
+ par.add_argument(
+ '-c', '--count', dest='count', type=int, default=-1,
+ help='Automatically create patches from top n commits')
+ par.add_argument(
+ '-e', '--end', type=int, default=0,
+ help='Commits to skip at end of patch list')
+ par.add_argument(
+ '-i', '--ignore-errors', action='store_true',
+ dest='ignore_errors', default=False,
+ help='Send patches email even if patch errors are found')
+ par.add_argument(
+ '-l', '--limit-cc', dest='limit', type=int, default=None,
+ help='Limit the cc list to LIMIT entries [default: %(default)s]')
+ par.add_argument(
+ '-m', '--no-maintainers', action='store_false',
+ dest='add_maintainers', default=True,
+ help="Don't cc the file maintainers automatically")
+ default_arg = None
+ top_level = gitutil.get_top_level()
+ if top_level:
+ default_arg = os.path.join(top_level, 'scripts',
+ 'get_maintainer.pl') + ' --norolestats'
+ par.add_argument(
+ '--get-maintainer-script', dest='get_maintainer_script', type=str,
+ action='store',
+ default=default_arg,
+ help='File name of the get_maintainer.pl (or compatible) script.')
+ par.add_argument(
+ '-r', '--in-reply-to', type=str, action='store',
+ help="Message ID that this series is in reply to")
+ par.add_argument(
+ '-s', '--start', dest='start', type=int, default=0,
+ help='Commit to start creating patches from (0 = HEAD)')
+ par.add_argument(
+ '-t', '--ignore-bad-tags', action='store_true', default=False,
+ help='Ignore bad tags / aliases (default=warn)')
+ par.add_argument(
+ '--no-binary', action='store_true', dest='ignore_binary',
+ default=False,
+ help="Do not output contents of changes in binary files")
+ par.add_argument(
+ '--no-check', action='store_false', dest='check_patch', default=True,
+ help="Don't check for patch compliance")
+ par.add_argument(
+ '--tree', dest='check_patch_use_tree', default=False,
+ action='store_true',
+ help=("Set `tree` to True. If `tree` is False then we'll pass "
+ "'--no-tree' to checkpatch (default: tree=%(default)s)"))
+ par.add_argument(
+ '--no-tree', dest='check_patch_use_tree', action='store_false',
+ help="Set `tree` to False")
+ par.add_argument(
+ '--no-tags', action='store_false', dest='process_tags', default=True,
+ help="Don't process subject tags as aliases")
+ par.add_argument(
+ '--no-signoff', action='store_false', dest='add_signoff',
+ default=True, help="Don't add Signed-off-by to patches")
+ par.add_argument(
+ '--smtp-server', type=str,
+ help="Specify the SMTP server to 'git send-email'")
+ par.add_argument(
+ '--keep-change-id', action='store_true',
+ help='Preserve Change-Id tags in patches to send.')
+
+
+def _add_show_comments(parser):
+ parser.add_argument('-c', '--show-comments', action='store_true',
+ help='Show comments from each patch')
+
+
+def _add_show_cover_comments(parser):
+ parser.add_argument('-C', '--show-cover-comments', action='store_true',
+ help='Show comments from the cover letter')
+
+
+def add_patchwork_subparser(subparsers):
+ """Add the 'patchwork' subparser
+
+ Args:
+ subparsers (argparse action): Subparser parent
+
+ Return:
+ ArgumentParser: patchwork subparser
+ """
+ patchwork = subparsers.add_parser(
+ 'patchwork', aliases=ALIASES['patchwork'],
+ help='Manage patchwork connection')
+ patchwork.defaults_cmds = [
+ ['set-project', 'U-Boot'],
+ ]
+ patchwork_subparsers = patchwork.add_subparsers(dest='subcmd')
+ patchwork_subparsers.add_parser('get-project')
+ uset = patchwork_subparsers.add_parser('set-project')
+ uset.add_argument(
+ 'project_name', help="Patchwork project name, e.g. 'U-Boot'")
+ return patchwork
+
+
+def add_series_subparser(subparsers):
+ """Add the 'series' subparser
+
+ Args:
+ subparsers (argparse action): Subparser parent
+
+ Return:
+ ArgumentParser: series subparser
+ """
+ def _add_allow_unmarked(parser):
+ parser.add_argument('-M', '--allow-unmarked', action='store_true',
+ default=False,
+ help="Don't require commits to be marked")
+
+ def _add_mark(parser):
+ parser.add_argument(
+ '-m', '--mark', action='store_true',
+ help='Mark unmarked commits with a Change-Id field')
+
+ def _add_update(parser):
+ parser.add_argument('-u', '--update', action='store_true',
+ help='Update the branch commit')
+
+ def _add_wait(parser, default_s):
+ """Add a -w option to a parser
+
+ Args:
+ parser (ArgumentParser): Parser to adjust
+ default_s (int): Default value to use, in seconds
+ """
+ parser.add_argument(
+ '-w', '--autolink-wait', type=int, default=default_s,
+ help='Seconds to wait for patchwork to get a sent series')
+
+ def _upstream_add(parser):
+ parser.add_argument('-U', '--upstream', help='Commit to end before')
+
+ def _add_gather(parser):
+ parser.add_argument(
+ '-G', '--no-gather-tags', dest='gather_tags', default=True,
+ action='store_false',
+ help="Don't gather review/test tags / update local series")
+
+ series = subparsers.add_parser('series', aliases=ALIASES['series'],
+ help='Manage series of patches')
+ series.defaults_cmds = [
+ ['set-link', 'fred'],
+ ]
+ series.add_argument(
+ '-n', '--dry-run', action='store_true', dest='dry_run', default=False,
+ help="Do a dry run (create but don't email patches)")
+ series.add_argument('-s', '--series', help='Name of series')
+ series.add_argument('-V', '--version', type=int,
+ help='Version number to link')
+ series_subparsers = series.add_subparsers(dest='subcmd')
+
+ # This causes problem at present, perhaps due to the 'defaults' handling in
+ # settings
+ # series_subparsers.required = True
+
+ add = series_subparsers.add_parser('add')
+ add.add_argument('-D', '--desc',
+ help='Series description / cover-letter title')
+ add.add_argument(
+ '-f', '--force-version', action='store_true',
+ help='Change the Series-version on a series to match its branch')
+ _add_mark(add)
+ _add_allow_unmarked(add)
+ _upstream_add(add)
+
+ series_subparsers.add_parser('archive', aliases=ALIASES['archive'])
+
+ auto = series_subparsers.add_parser('autolink',
+ aliases=ALIASES['autolink'])
+ _add_update(auto)
+ _add_wait(auto, 0)
+
+ aall = series_subparsers.add_parser('autolink-all')
+ aall.add_argument('-a', '--link-all-versions', action='store_true',
+ help='Link all series versions, not just the latest')
+ aall.add_argument('-r', '--replace-existing', action='store_true',
+ help='Replace existing links')
+ _add_update(aall)
+
+ series_subparsers.add_parser('dec')
+
+ gat = series_subparsers.add_parser('gather', aliases=ALIASES['gather'])
+ _add_gather(gat)
+ _add_show_comments(gat)
+ _add_show_cover_comments(gat)
+
+ sall = series_subparsers.add_parser('gather-all')
+ sall.add_argument(
+ '-a', '--gather-all-versions', action='store_true',
+ help='Gather tags from all series versions, not just the latest')
+ _add_gather(sall)
+ _add_show_comments(sall)
+ _add_show_cover_comments(sall)
+
+ series_subparsers.add_parser('get-link')
+ series_subparsers.add_parser('inc')
+ series_subparsers.add_parser('ls')
+
+ mar = series_subparsers.add_parser('mark')
+ mar.add_argument('-m', '--allow-marked', action='store_true',
+ default=False,
+ help="Don't require commits to be unmarked")
+
+ series_subparsers.add_parser('open', aliases=ALIASES['open'])
+ pat = series_subparsers.add_parser(
+ 'patches', epilog='Show a list of patches and optional details')
+ pat.add_argument('-t', '--commit', action='store_true',
+ help='Show the commit and diffstat')
+ pat.add_argument('-p', '--patch', action='store_true',
+ help='Show the patch body')
+
+ prog = series_subparsers.add_parser('progress',
+ aliases=ALIASES['progress'])
+ prog.add_argument('-a', '--show-all-versions', action='store_true',
+ help='Show all series versions, not just the latest')
+ prog.add_argument('-l', '--list-patches', action='store_true',
+ help='List patch subject and status')
+
+ ren = series_subparsers.add_parser('rename')
+ ren.add_argument('-N', '--new-name', help='New name for the series')
+
+ series_subparsers.add_parser('rm')
+ series_subparsers.add_parser('rm-version', aliases=ALIASES['rm-version'])
+
+ scan = series_subparsers.add_parser('scan')
+ _add_mark(scan)
+ _add_allow_unmarked(scan)
+ _upstream_add(scan)
+
+ ssend = series_subparsers.add_parser('send')
+ add_send_args(ssend)
+ ssend.add_argument(
+ '--no-autolink', action='store_false', default=True, dest='autolink',
+ help='Monitor patchwork after sending so the series can be autolinked')
+ _add_wait(ssend, 120)
+
+ setl = series_subparsers.add_parser('set-link')
+ _add_update(setl)
+
+ setl.add_argument(
+ 'link', help='Link to use, i.e. patchwork series number (e.g. 452329)')
+ stat = series_subparsers.add_parser('status', aliases=ALIASES['status'])
+ _add_show_comments(stat)
+ _add_show_cover_comments(stat)
+
+ series_subparsers.add_parser('summary')
+
+ series_subparsers.add_parser('unarchive', aliases=ALIASES['unarchive'])
+
+ unm = series_subparsers.add_parser('unmark')
+ _add_allow_unmarked(unm)
+
+ ver = series_subparsers.add_parser(
+ 'version-change', help='Change a version to a different version')
+ ver.add_argument('--new-version', type=int,
+ help='New version number to change this one too')
+
+ return series
+
+
+def add_send_subparser(subparsers):
+ """Add the 'send' subparser
+
+ Args:
+ subparsers (argparse action): Subparser parent
+
+ Return:
+ ArgumentParser: send subparser
+ """
+ send = subparsers.add_parser(
+ 'send', help='Format, check and email patches (default command)')
+ send.add_argument(
+ '-b', '--branch', type=str,
+ help="Branch to process (by default, the current branch)")
+ send.add_argument(
+ '-n', '--dry-run', action='store_true', dest='dry_run',
+ default=False, help="Do a dry run (create but don't email patches)")
+ send.add_argument(
+ '--cc-cmd', dest='cc_cmd', type=str, action='store',
+ default=None, help='Output cc list for patch file (used by git)')
+ add_send_args(send)
+ send.add_argument('patchfiles', nargs='*')
+ return send
+
+
+def add_status_subparser(subparsers):
+ """Add the 'status' subparser
+
+ Args:
+ subparsers (argparse action): Subparser parent
+
+ Return:
+ ArgumentParser: status subparser
+ """
+ status = subparsers.add_parser('status', aliases=ALIASES['status'],
+ help='Check status of patches in patchwork')
+ _add_show_comments(status)
+ status.add_argument(
+ '-d', '--dest-branch', type=str,
+ help='Name of branch to create with collected responses')
+ status.add_argument('-f', '--force', action='store_true',
+ help='Force overwriting an existing branch')
+ status.add_argument('-T', '--single-thread', action='store_true',
+ help='Disable multithreading when reading patchwork')
+ return status
+
+
+def add_upstream_subparser(subparsers):
+ """Add the 'status' subparser
+
+ Args:
+ subparsers (argparse action): Subparser parent
+
+ Return:
+ ArgumentParser: status subparser
+ """
+ upstream = subparsers.add_parser('upstream', aliases=ALIASES['upstream'],
+ help='Manage upstream destinations')
+ upstream.defaults_cmds = [
+ ['add', 'us', 'http://fred'],
+ ['delete', 'us'],
+ ]
+ upstream_subparsers = upstream.add_subparsers(dest='subcmd')
+ uadd = upstream_subparsers.add_parser('add')
+ uadd.add_argument('remote_name',
+ help="Git remote name used for this upstream, e.g. 'us'")
+ uadd.add_argument(
+ 'url', help='URL to use for this upstream, e.g. '
+ "'https://gitlab.denx.de/u-boot/u-boot.git'")
+ udel = upstream_subparsers.add_parser('delete')
+ udel.add_argument(
+ 'remote_name',
+ help="Git remote name used for this upstream, e.g. 'us'")
+ upstream_subparsers.add_parser('list')
+ udef = upstream_subparsers.add_parser('default')
+ udef.add_argument('-u', '--unset', action='store_true',
+ help='Unset the default upstream')
+ udef.add_argument('remote_name', nargs='?',
+ help="Git remote name used for this upstream, e.g. 'us'")
+ return upstream
+
+
+def setup_parser():
+ """Set up command-line parser
Returns:
- tuple containing:
- options: command line options
- args: command lin arguments
+ argparse.Parser object
"""
epilog = '''Create patches from commits in a branch, check them and email
them as specified by tags you place in the commits. Use -n to do a dry
run first.'''
- parser = argparse.ArgumentParser(epilog=epilog)
- parser.add_argument('-b', '--branch', type=str,
- help="Branch to process (by default, the current branch)")
- parser.add_argument('-c', '--count', dest='count', type=int,
- default=-1, help='Automatically create patches from top n commits')
- parser.add_argument('-e', '--end', type=int, default=0,
- help='Commits to skip at end of patch list')
- parser.add_argument('-D', '--debug', action='store_true',
+ parser = ErrorCatchingArgumentParser(epilog=epilog)
+ parser.add_argument(
+ '-D', '--debug', action='store_true',
help='Enabling debugging (provides a full traceback on error)')
+ parser.add_argument(
+ '-N', '--no-capture', action='store_true',
+ help='Disable capturing of console output in tests')
parser.add_argument('-p', '--project', default=project.detect_project(),
help="Project name; affects default option values and "
"aliases [default: %(default)s]")
parser.add_argument('-P', '--patchwork-url',
default='https://patchwork.ozlabs.org',
help='URL of patchwork server [default: %(default)s]')
- parser.add_argument('-s', '--start', dest='start', type=int,
- default=0, help='Commit to start creating patches from (0 = HEAD)')
+ parser.add_argument(
+ '-T', '--thread', action='store_true', dest='thread',
+ default=False, help='Create patches as a single thread')
parser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose', default=False,
help='Verbose output of errors and warnings')
parser.add_argument(
+ '-X', '--test-preserve-dirs', action='store_true',
+ help='Preserve and display test-created directories')
+ parser.add_argument(
'-H', '--full-help', action='store_true', dest='full_help',
default=False, help='Display the README file')
subparsers = parser.add_subparsers(dest='cmd')
- send = subparsers.add_parser(
- 'send', help='Format, check and email patches (default command)')
- send.add_argument('-i', '--ignore-errors', action='store_true',
- dest='ignore_errors', default=False,
- help='Send patches email even if patch errors are found')
- send.add_argument('-l', '--limit-cc', dest='limit', type=int, default=None,
- help='Limit the cc list to LIMIT entries [default: %(default)s]')
- send.add_argument('-m', '--no-maintainers', action='store_false',
- dest='add_maintainers', default=True,
- help="Don't cc the file maintainers automatically")
- send.add_argument(
- '--get-maintainer-script', dest='get_maintainer_script', type=str,
- action='store',
- default=os.path.join(gitutil.get_top_level(), 'scripts',
- 'get_maintainer.pl') + ' --norolestats',
- help='File name of the get_maintainer.pl (or compatible) script.')
- send.add_argument('-n', '--dry-run', action='store_true', dest='dry_run',
- default=False, help="Do a dry run (create but don't email patches)")
- send.add_argument('-r', '--in-reply-to', type=str, action='store',
- help="Message ID that this series is in reply to")
- send.add_argument('-t', '--ignore-bad-tags', action='store_true',
- default=False,
- help='Ignore bad tags / aliases (default=warn)')
- send.add_argument('-T', '--thread', action='store_true', dest='thread',
- default=False, help='Create patches as a single thread')
- send.add_argument('--cc-cmd', dest='cc_cmd', type=str, action='store',
- default=None, help='Output cc list for patch file (used by git)')
- send.add_argument('--no-binary', action='store_true', dest='ignore_binary',
- default=False,
- help="Do not output contents of changes in binary files")
- send.add_argument('--no-check', action='store_false', dest='check_patch',
- default=True,
- help="Don't check for patch compliance")
- send.add_argument(
- '--tree', dest='check_patch_use_tree', default=False,
- action='store_true',
- help=("Set `tree` to True. If `tree` is False then we'll pass "
- "'--no-tree' to checkpatch (default: tree=%(default)s)"))
- send.add_argument('--no-tree', dest='check_patch_use_tree',
- action='store_false', help="Set `tree` to False")
- send.add_argument(
- '--no-tags', action='store_false', dest='process_tags', default=True,
- help="Don't process subject tags as aliases")
- send.add_argument('--no-signoff', action='store_false', dest='add_signoff',
- default=True, help="Don't add Signed-off-by to patches")
- send.add_argument('--smtp-server', type=str,
- help="Specify the SMTP server to 'git send-email'")
- send.add_argument('--keep-change-id', action='store_true',
- help='Preserve Change-Id tags in patches to send.')
-
- send.add_argument('patchfiles', nargs='*')
+ add_send_subparser(subparsers)
+ patchwork = add_patchwork_subparser(subparsers)
+ series = add_series_subparser(subparsers)
+ add_status_subparser(subparsers)
+ upstream = add_upstream_subparser(subparsers)
# Only add the 'test' action if the test data files are available.
if HAS_TESTS:
@@ -115,33 +457,60 @@ def parse_args():
test_parser.add_argument('testname', type=str, default=None, nargs='?',
help="Specify the test to run")
- status = subparsers.add_parser('status',
- help='Check status of patches in patchwork')
- status.add_argument('-C', '--show-comments', action='store_true',
- help='Show comments from each patch')
- status.add_argument(
- '-d', '--dest-branch', type=str,
- help='Name of branch to create with collected responses')
- status.add_argument('-f', '--force', action='store_true',
- help='Force overwriting an existing branch')
+ parsers = {
+ 'main': parser,
+ 'series': series,
+ 'patchwork': patchwork,
+ 'upstream': upstream,
+ }
+ return parsers
+
+
+def parse_args(argv=None, config_fname=None, parsers=None):
+ """Parse command line arguments from sys.argv[]
+
+ Args:
+ argv (str or None): Arguments to process, or None to use sys.argv[1:]
+ config_fname (str): Config file to read, or None for default, or False
+ for an empty config
+
+ Returns:
+ tuple containing:
+ options: command line options
+ args: command lin arguments
+ """
+ if not parsers:
+ parsers = setup_parser()
+ parser = parsers['main']
# Parse options twice: first to get the project and second to handle
# defaults properly (which depends on project)
# Use parse_known_args() in case 'cmd' is omitted
- argv = sys.argv[1:]
+ if not argv:
+ argv = sys.argv[1:]
+
args, rest = parser.parse_known_args(argv)
if hasattr(args, 'project'):
- settings.Setup(parser, args.project)
+ settings.Setup(parser, args.project, argv, config_fname)
args, rest = parser.parse_known_args(argv)
# If we have a command, it is safe to parse all arguments
if args.cmd:
args = parser.parse_args(argv)
- else:
+ elif not args.full_help:
# No command, so insert it after the known arguments and before the ones
# that presumably relate to the 'send' subcommand
nargs = len(rest)
argv = argv[:-nargs] + ['send'] + rest
args = parser.parse_args(argv)
+ # Resolve aliases
+ for full, aliases in ALIASES.items():
+ if args.cmd in aliases:
+ args.cmd = full
+ if 'subcmd' in args and args.subcmd in aliases:
+ args.subcmd = full
+ if args.cmd in ['series', 'upstream', 'patchwork'] and not args.subcmd:
+ parser.parse_args([args.cmd, '--help'])
+
return args
diff --git a/tools/patman/control.py b/tools/patman/control.py
index b8a45912058..3e09b16e87b 100644
--- a/tools/patman/control.py
+++ b/tools/patman/control.py
@@ -8,186 +8,47 @@ This module provides various functions called by the main program to implement
the features of patman.
"""
-import os
-import sys
+import re
+import traceback
+
+try:
+ from importlib import resources
+except ImportError:
+ # for Python 3.6
+ import importlib_resources as resources
-from patman import checkpatch
-from patman import patchstream
from u_boot_pylib import gitutil
from u_boot_pylib import terminal
+from u_boot_pylib import tools
+from u_boot_pylib import tout
+from patman import cseries
+from patman import cser_helper
+from patman import patchstream
+from patman.patchwork import Patchwork
+from patman import send
+from patman import settings
def setup():
"""Do required setup before doing anything"""
gitutil.setup()
+ alias_fname = gitutil.get_alias_file()
+ if alias_fname:
+ settings.ReadGitAliases(alias_fname)
-def prepare_patches(col, branch, count, start, end, ignore_binary, signoff,
- keep_change_id=False):
- """Figure out what patches to generate, then generate them
-
- The patch files are written to the current directory, e.g. 0001_xxx.patch
- 0002_yyy.patch
-
- Args:
- col (terminal.Color): Colour output object
- branch (str): Branch to create patches from (None = current)
- count (int): Number of patches to produce, or -1 to produce patches for
- the current branch back to the upstream commit
- start (int): Start partch to use (0=first / top of branch)
- end (int): End patch to use (0=last one in series, 1=one before that,
- etc.)
- ignore_binary (bool): Don't generate patches for binary files
- keep_change_id (bool): Preserve the Change-Id tag.
-
- Returns:
- Tuple:
- Series object for this series (set of patches)
- Filename of the cover letter as a string (None if none)
- patch_files: List of patch filenames, each a string, e.g.
- ['0001_xxx.patch', '0002_yyy.patch']
- """
- if count == -1:
- # Work out how many patches to send if we can
- count = (gitutil.count_commits_to_branch(branch) - start)
-
- if not count:
- str = 'No commits found to process - please use -c flag, or run:\n' \
- ' git branch --set-upstream-to remote/branch'
- sys.exit(col.build(col.RED, str))
-
- # Read the metadata from the commits
- to_do = count - end
- series = patchstream.get_metadata(branch, start, to_do)
- cover_fname, patch_files = gitutil.create_patches(
- branch, start, to_do, ignore_binary, series, signoff)
-
- # Fix up the patch files to our liking, and insert the cover letter
- patchstream.fix_patches(series, patch_files, keep_change_id,
- insert_base_commit=not cover_fname)
- if cover_fname and series.get('cover'):
- patchstream.insert_cover_letter(cover_fname, series, to_do)
- return series, cover_fname, patch_files
-
-
-def check_patches(series, patch_files, run_checkpatch, verbose, use_tree):
- """Run some checks on a set of patches
-
- This santiy-checks the patman tags like Series-version and runs the patches
- through checkpatch
-
- Args:
- series (Series): Series object for this series (set of patches)
- patch_files (list): List of patch filenames, each a string, e.g.
- ['0001_xxx.patch', '0002_yyy.patch']
- run_checkpatch (bool): True to run checkpatch.pl
- verbose (bool): True to print out every line of the checkpatch output as
- it is parsed
- use_tree (bool): If False we'll pass '--no-tree' to checkpatch.
-
- Returns:
- bool: True if the patches had no errors, False if they did
- """
- # Do a few checks on the series
- series.DoChecks()
-
- # Check the patches
- if run_checkpatch:
- ok = checkpatch.check_patches(verbose, patch_files, use_tree)
- else:
- ok = True
- return ok
-
-
-def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go,
- ignore_bad_tags, add_maintainers, get_maintainer_script, limit,
- dry_run, in_reply_to, thread, smtp_server):
- """Email patches to the recipients
-
- This emails out the patches and cover letter using 'git send-email'. Each
- patch is copied to recipients identified by the patch tag and output from
- the get_maintainer.pl script. The cover letter is copied to all recipients
- of any patch.
-
- To make this work a CC file is created holding the recipients for each patch
- and the cover letter. See the main program 'cc_cmd' for this logic.
-
- Args:
- col (terminal.Color): Colour output object
- series (Series): Series object for this series (set of patches)
- cover_fname (str): Filename of the cover letter as a string (None if
- none)
- patch_files (list): List of patch filenames, each a string, e.g.
- ['0001_xxx.patch', '0002_yyy.patch']
- process_tags (bool): True to process subject tags in each patch, e.g.
- for 'dm: spi: Add SPI support' this would be 'dm' and 'spi'. The
- tags are looked up in the configured sendemail.aliasesfile and also
- in ~/.patman (see README)
- its_a_go (bool): True if we are going to actually send the patches,
- False if the patches have errors and will not be sent unless
- @ignore_errors
- ignore_bad_tags (bool): True to just print a warning for unknown tags,
- False to halt with an error
- add_maintainers (bool): Run the get_maintainer.pl script for each patch
- get_maintainer_script (str): The script used to retrieve which
- maintainers to cc
- limit (int): Limit on the number of people that can be cc'd on a single
- patch or the cover letter (None if no limit)
- dry_run (bool): Don't actually email the patches, just print out what
- would be sent
- in_reply_to (str): If not None we'll pass this to git as --in-reply-to.
- Should be a message ID that this is in reply to.
- thread (bool): True to add --thread to git send-email (make all patches
- reply to cover-letter or first patch in series)
- smtp_server (str): SMTP server to use to send patches (None for default)
- """
- cc_file = series.MakeCcFile(process_tags, cover_fname, not ignore_bad_tags,
- add_maintainers, limit, get_maintainer_script)
-
- # Email the patches out (giving the user time to check / cancel)
- cmd = ''
- if its_a_go:
- cmd = gitutil.email_patches(
- series, cover_fname, patch_files, dry_run, not ignore_bad_tags,
- cc_file, in_reply_to=in_reply_to, thread=thread,
- smtp_server=smtp_server)
- else:
- print(col.build(col.RED, "Not sending emails due to errors/warnings"))
-
- # For a dry run, just show our actions as a sanity check
- if dry_run:
- series.ShowActions(patch_files, cmd, process_tags)
- if not its_a_go:
- print(col.build(col.RED, "Email would not be sent"))
-
- os.remove(cc_file)
-
-def send(args):
+def do_send(args):
"""Create, check and send patches by email
Args:
args (argparse.Namespace): Arguments to patman
"""
setup()
- col = terminal.Color()
- series, cover_fname, patch_files = prepare_patches(
- col, args.branch, args.count, args.start, args.end,
- args.ignore_binary, args.add_signoff,
- keep_change_id=args.keep_change_id)
- ok = check_patches(series, patch_files, args.check_patch,
- args.verbose, args.check_patch_use_tree)
-
- ok = ok and gitutil.check_suppress_cc_config()
-
- its_a_go = ok or args.ignore_errors
- email_patches(
- col, series, cover_fname, patch_files, args.process_tags,
- its_a_go, args.ignore_bad_tags, args.add_maintainers,
- args.get_maintainer_script, args.limit, args.dry_run,
- args.in_reply_to, args.thread, args.smtp_server)
+ send.send(args)
+
def patchwork_status(branch, count, start, end, dest_branch, force,
- show_comments, url):
+ show_comments, url, single_thread=False):
"""Check the status of patches in patchwork
This finds the series in patchwork using the Series-link tag, checks for new
@@ -212,9 +73,11 @@ def patchwork_status(branch, count, start, end, dest_branch, force,
Raises:
ValueError: if the branch has no Series-link value
"""
+ if not branch:
+ branch = gitutil.get_branch()
if count == -1:
# Work out how many patches to send if we can
- count = (gitutil.count_commits_to_branch(branch) - start)
+ count = gitutil.count_commits_to_branch(branch) - start
series = patchstream.get_metadata(branch, start, count - end)
warnings = 0
@@ -231,17 +94,240 @@ def patchwork_status(branch, count, start, end, dest_branch, force,
if not links:
raise ValueError("Branch has no Series-links value")
- # Find the link without a version number (we don't support versions yet)
- found = [link for link in links.split() if not ':' in link]
- if not found:
- raise ValueError('Series-links has no current version (without :)')
+ _, version = cser_helper.split_name_version(branch)
+ link = series.get_link_for_version(version, links)
+ if not link:
+ raise ValueError('Series-links has no link for v{version}')
+ tout.debug(f"Link '{link}")
# Allow the series to override the URL
if 'patchwork_url' in series:
url = series.patchwork_url
+ pwork = Patchwork(url, single_thread=single_thread)
# Import this here to avoid failing on other commands if the dependencies
# are not present
from patman import status
- status.check_patchwork_status(series, found[0], branch, dest_branch, force,
- show_comments, url)
+ pwork = Patchwork(url)
+ status.check_and_show_status(series, link, branch, dest_branch, force,
+ show_comments, False, pwork)
+
+
+def do_series(args, test_db=None, pwork=None, cser=None):
+ """Process a series subcommand
+
+ Args:
+ args (Namespace): Arguments to process
+ test_db (str or None): Directory containing the test database, None to
+ use the normal one
+ pwork (Patchwork): Patchwork object to use, None to create one if
+ needed
+ cser (Cseries): Cseries object to use, None to create one
+ """
+ if not cser:
+ cser = cseries.Cseries(test_db)
+ needs_patchwork = [
+ 'autolink', 'autolink-all', 'open', 'send', 'status', 'gather',
+ 'gather-all'
+ ]
+ try:
+ cser.open_database()
+ if args.subcmd in needs_patchwork:
+ if not pwork:
+ pwork = Patchwork(args.patchwork_url)
+ proj = cser.project_get()
+ if not proj:
+ raise ValueError(
+ "Please set project ID with 'patman patchwork set-project'")
+ _, proj_id, link_name = cser.project_get()
+ pwork.project_set(proj_id, link_name)
+ elif pwork and pwork is not True:
+ raise ValueError(
+ f"Internal error: command '{args.subcmd}' should not have patchwork")
+ if args.subcmd == 'add':
+ cser.add(args.series, args.desc, mark=args.mark,
+ allow_unmarked=args.allow_unmarked, end=args.upstream,
+ dry_run=args.dry_run)
+ elif args.subcmd == 'archive':
+ cser.archive(args.series)
+ elif args.subcmd == 'autolink':
+ cser.link_auto(pwork, args.series, args.version, args.update,
+ args.autolink_wait)
+ elif args.subcmd == 'autolink-all':
+ cser.link_auto_all(pwork, update_commit=args.update,
+ link_all_versions=args.link_all_versions,
+ replace_existing=args.replace_existing,
+ dry_run=args.dry_run, show_summary=True)
+ elif args.subcmd == 'dec':
+ cser.decrement(args.series, args.dry_run)
+ elif args.subcmd == 'gather':
+ cser.gather(pwork, args.series, args.version, args.show_comments,
+ args.show_cover_comments, args.gather_tags,
+ dry_run=args.dry_run)
+ elif args.subcmd == 'gather-all':
+ cser.gather_all(
+ pwork, args.show_comments, args.show_cover_comments,
+ args.gather_all_versions, args.gather_tags, args.dry_run)
+ elif args.subcmd == 'get-link':
+ link = cser.link_get(args.series, args.version)
+ print(link)
+ elif args.subcmd == 'inc':
+ cser.increment(args.series, args.dry_run)
+ elif args.subcmd == 'ls':
+ cser.series_list()
+ elif args.subcmd == 'open':
+ cser.open(pwork, args.series, args.version)
+ elif args.subcmd == 'mark':
+ cser.mark(args.series, args.allow_marked, dry_run=args.dry_run)
+ elif args.subcmd == 'patches':
+ cser.list_patches(args.series, args.version, args.commit,
+ args.patch)
+ elif args.subcmd == 'progress':
+ cser.progress(args.series, args.show_all_versions,
+ args.list_patches)
+ elif args.subcmd == 'rm':
+ cser.remove(args.series, dry_run=args.dry_run)
+ elif args.subcmd == 'rm-version':
+ cser.version_remove(args.series, args.version, dry_run=args.dry_run)
+ elif args.subcmd == 'rename':
+ cser.rename(args.series, args.new_name, dry_run=args.dry_run)
+ elif args.subcmd == 'scan':
+ cser.scan(args.series, mark=args.mark,
+ allow_unmarked=args.allow_unmarked, end=args.upstream,
+ dry_run=args.dry_run)
+ elif args.subcmd == 'send':
+ cser.send(pwork, args.series, args.autolink, args.autolink_wait,
+ args)
+ elif args.subcmd == 'set-link':
+ cser.link_set(args.series, args.version, args.link, args.update)
+ elif args.subcmd == 'status':
+ cser.status(pwork, args.series, args.version, args.show_comments,
+ args.show_cover_comments)
+ elif args.subcmd == 'summary':
+ cser.summary(args.series)
+ elif args.subcmd == 'unarchive':
+ cser.unarchive(args.series)
+ elif args.subcmd == 'unmark':
+ cser.unmark(args.series, args.allow_unmarked, dry_run=args.dry_run)
+ elif args.subcmd == 'version-change':
+ cser.version_change(args.series, args.version, args.new_version,
+ dry_run=args.dry_run)
+ else:
+ raise ValueError(f"Unknown series subcommand '{args.subcmd}'")
+ finally:
+ cser.close_database()
+
+
+def upstream(args, test_db=None):
+ """Process an 'upstream' subcommand
+
+ Args:
+ args (Namespace): Arguments to process
+ test_db (str or None): Directory containing the test database, None to
+ use the normal one
+ """
+ cser = cseries.Cseries(test_db)
+ try:
+ cser.open_database()
+ if args.subcmd == 'add':
+ cser.upstream_add(args.remote_name, args.url)
+ elif args.subcmd == 'default':
+ if args.unset:
+ cser.upstream_set_default(None)
+ elif args.remote_name:
+ cser.upstream_set_default(args.remote_name)
+ else:
+ result = cser.upstream_get_default()
+ print(result if result else 'unset')
+ elif args.subcmd == 'delete':
+ cser.upstream_delete(args.remote_name)
+ elif args.subcmd == 'list':
+ cser.upstream_list()
+ else:
+ raise ValueError(f"Unknown upstream subcommand '{args.subcmd}'")
+ finally:
+ cser.close_database()
+
+
+def patchwork(args, test_db=None, pwork=None):
+ """Process a 'patchwork' subcommand
+ Args:
+ args (Namespace): Arguments to process
+ test_db (str or None): Directory containing the test database, None to
+ use the normal one
+ pwork (Patchwork): Patchwork object to use
+ """
+ cser = cseries.Cseries(test_db)
+ try:
+ cser.open_database()
+ if args.subcmd == 'set-project':
+ if not pwork:
+ pwork = Patchwork(args.patchwork_url)
+ cser.project_set(pwork, args.project_name)
+ elif args.subcmd == 'get-project':
+ info = cser.project_get()
+ if not info:
+ raise ValueError("Project has not been set; use 'patman patchwork set-project'")
+ name, pwid, link_name = info
+ print(f"Project '{name}' patchwork-ID {pwid} link-name {link_name}")
+ else:
+ raise ValueError(f"Unknown patchwork subcommand '{args.subcmd}'")
+ finally:
+ cser.close_database()
+
+def do_patman(args, test_db=None, pwork=None, cser=None):
+ """Process a patman command
+
+ Args:
+ args (Namespace): Arguments to process
+ test_db (str or None): Directory containing the test database, None to
+ use the normal one
+ pwork (Patchwork): Patchwork object to use, or None to create one
+ cser (Cseries): Cseries object to use when executing the command,
+ or None to create one
+ """
+ if args.full_help:
+ with resources.path('patman', 'README.rst') as readme:
+ tools.print_full_help(str(readme))
+ return 0
+ if args.cmd == 'send':
+ # Called from git with a patch filename as argument
+ # Printout a list of additional CC recipients for this patch
+ if args.cc_cmd:
+ re_line = re.compile(r'(\S*) (.*)')
+ with open(args.cc_cmd, 'r', encoding='utf-8') as inf:
+ for line in inf.readlines():
+ match = re_line.match(line)
+ if match and match.group(1) == args.patchfiles[0]:
+ for cca in match.group(2).split('\0'):
+ cca = cca.strip()
+ if cca:
+ print(cca)
+ else:
+ # If we are not processing tags, no need to warning about bad ones
+ if not args.process_tags:
+ args.ignore_bad_tags = True
+ do_send(args)
+ return 0
+
+ ret_code = 0
+ try:
+ # Check status of patches in patchwork
+ if args.cmd == 'status':
+ patchwork_status(args.branch, args.count, args.start, args.end,
+ args.dest_branch, args.force, args.show_comments,
+ args.patchwork_url)
+ elif args.cmd == 'series':
+ do_series(args, test_db, pwork, cser)
+ elif args.cmd == 'upstream':
+ upstream(args, test_db)
+ elif args.cmd == 'patchwork':
+ patchwork(args, test_db, pwork)
+ except Exception as exc:
+ terminal.tprint(f'patman: {type(exc).__name__}: {exc}',
+ colour=terminal.Color.RED)
+ if args.debug:
+ print()
+ traceback.print_exc()
+ ret_code = 1
+ return ret_code
diff --git a/tools/patman/cser_helper.py b/tools/patman/cser_helper.py
new file mode 100644
index 00000000000..2841fcd9c20
--- /dev/null
+++ b/tools/patman/cser_helper.py
@@ -0,0 +1,1524 @@
+# SPDX-License-Identifier: GPL-2.0+
+#
+# Copyright 2025 Simon Glass <sjg@chromium.org>
+#
+"""Helper functions for handling the 'series' subcommand
+"""
+
+import asyncio
+from collections import OrderedDict, defaultdict, namedtuple
+from datetime import datetime
+import hashlib
+import os
+import re
+import sys
+import time
+from types import SimpleNamespace
+
+import aiohttp
+import pygit2
+from pygit2.enums import CheckoutStrategy
+
+from u_boot_pylib import gitutil
+from u_boot_pylib import terminal
+from u_boot_pylib import tout
+
+from patman import patchstream
+from patman.database import Database, Pcommit, SerVer
+from patman import patchwork
+from patman.series import Series
+from patman import status
+
+
+# Tag to use for Change IDs
+CHANGE_ID_TAG = 'Change-Id'
+
+# Length of hash to display
+HASH_LEN = 10
+
+# Shorter version of some states, to save horizontal space
+SHORTEN_STATE = {
+ 'handled-elsewhere': 'elsewhere',
+ 'awaiting-upstream': 'awaiting',
+ 'not-applicable': 'n/a',
+ 'changes-requested': 'changes',
+}
+
+# Summary info returned from Cseries.link_auto_all()
+AUTOLINK = namedtuple('autolink', 'name,version,link,desc,result')
+
+
+def oid(oid_val):
+ """Convert a hash string into a shortened hash
+
+ The number of hex digits git uses for showing hashes depends on the size of
+ the repo. For the purposes of showing hashes to the user in lists, we use a
+ fixed value for now
+
+ Args:
+ str or Pygit2.oid: Hash value to shorten
+
+ Return:
+ str: Shortened hash
+ """
+ return str(oid_val)[:HASH_LEN]
+
+
+def split_name_version(in_name):
+ """Split a branch name into its series name and its version
+
+ For example:
+ 'series' returns ('series', 1)
+ 'series3' returns ('series', 3)
+ Args:
+ in_name (str): Name to parse
+
+ Return:
+ tuple:
+ str: series name
+ int: series version, or None if there is none in in_name
+ """
+ m_ver = re.match(r'([^0-9]*)(\d*)', in_name)
+ version = None
+ if m_ver:
+ name = m_ver.group(1)
+ if m_ver.group(2):
+ version = int(m_ver.group(2))
+ else:
+ name = in_name
+ return name, version
+
+
+class CseriesHelper:
+ """Helper functions for Cseries
+
+ This class handles database read/write as well as operations in a git
+ directory to update series information.
+ """
+ def __init__(self, topdir=None, colour=terminal.COLOR_IF_TERMINAL):
+ """Set up a new CseriesHelper
+
+ Args:
+ topdir (str): Top-level directory of the repo
+ colour (terminal.enum): Whether to enable ANSI colour or not
+
+ Properties:
+ gitdir (str): Git directory (typically topdir + '/.git')
+ db (Database): Database handler
+ col (terminal.Colour): Colour object
+ _fake_time (float): Holds the current fake time for tests, in
+ seconds
+ _fake_sleep (func): Function provided by a test; called to fake a
+ 'time.sleep()' call and take whatever action it wants to take.
+ The only argument is the (Float) time to sleep for; it returns
+ nothing
+ loop (asyncio event loop): Loop used for Patchwork operations
+ """
+ self.topdir = topdir
+ self.gitdir = None
+ self.db = None
+ self.col = terminal.Color(colour)
+ self._fake_time = None
+ self._fake_sleep = None
+ self.fake_now = None
+ self.loop = asyncio.get_event_loop()
+
+ def open_database(self):
+ """Open the database ready for use"""
+ if not self.topdir:
+ self.topdir = gitutil.get_top_level()
+ if not self.topdir:
+ raise ValueError('No git repo detected in current directory')
+ self.gitdir = os.path.join(self.topdir, '.git')
+ fname = f'{self.topdir}/.patman.db'
+
+ # For the first instance, start it up with the expected schema
+ self.db, is_new = Database.get_instance(fname)
+ if is_new:
+ self.db.start()
+ else:
+ # If a previous test has already checked the schema, just open it
+ self.db.open_it()
+
+ def close_database(self):
+ """Close the database"""
+ if self.db:
+ self.db.close()
+
+ def commit(self):
+ """Commit changes to the database"""
+ self.db.commit()
+
+ def rollback(self):
+ """Roll back changes to the database"""
+ self.db.rollback()
+
+ def set_fake_time(self, fake_sleep):
+ """Setup the fake timer
+
+ Args:
+ fake_sleep (func(float)): Function to call to fake a sleep
+ """
+ self._fake_time = 0
+ self._fake_sleep = fake_sleep
+
+ def inc_fake_time(self, inc_s):
+ """Increment the fake time
+
+ Args:
+ inc_s (float): Amount to increment the fake time by
+ """
+ self._fake_time += inc_s
+
+ def get_time(self):
+ """Get the current time, fake or real
+
+ This function should always be used to read the time so that faking the
+ time works correctly in tests.
+
+ Return:
+ float: Fake time, if time is being faked, else real time
+ """
+ if self._fake_time is not None:
+ return self._fake_time
+ return time.monotonic()
+
+ def sleep(self, time_s):
+ """Sleep for a while
+
+ This function should always be used to sleep so that faking the time
+ works correctly in tests.
+
+ Args:
+ time_s (float): Amount of seconds to sleep for
+ """
+ print(f'Sleeping for {time_s} seconds')
+ if self._fake_time is not None:
+ self._fake_sleep(time_s)
+ else:
+ time.sleep(time_s)
+
+ def get_now(self):
+ """Get the time now
+
+ This function should always be used to read the datetime, so that
+ faking the time works correctly in tests
+
+ Return:
+ DateTime object
+ """
+ if self.fake_now:
+ return self.fake_now
+ return datetime.now()
+
+ def get_ser_ver_list(self):
+ """Get a list of patchwork entries from the database
+
+ Return:
+ list of SER_VER
+ """
+ return self.db.ser_ver_get_list()
+
+ def get_ser_ver_dict(self):
+ """Get a dict of patchwork entries from the database
+
+ Return: dict contain all records:
+ key (int): ser_ver id
+ value (SER_VER): Information about one ser_ver record
+ """
+ svlist = self.get_ser_ver_list()
+ svdict = {}
+ for sver in svlist:
+ svdict[sver.idnum] = sver
+ return svdict
+
+ def get_upstream_dict(self):
+ """Get a list of upstream entries from the database
+
+ Return:
+ OrderedDict:
+ key (str): upstream name
+ value (str): url
+ """
+ return self.db.upstream_get_dict()
+
+ def get_pcommit_dict(self, find_svid=None):
+ """Get a dict of pcommits entries from the database
+
+ Args:
+ find_svid (int): If not None, finds the records associated with a
+ particular series and version
+
+ Return:
+ OrderedDict:
+ key (int): record ID if find_svid is None, else seq
+ value (PCOMMIT): record data
+ """
+ pcdict = OrderedDict()
+ for rec in self.db.pcommit_get_list(find_svid):
+ if find_svid is not None:
+ pcdict[rec.seq] = rec
+ else:
+ pcdict[rec.idnum] = rec
+ return pcdict
+
+ def _get_series_info(self, idnum):
+ """Get information for a series from the database
+
+ Args:
+ idnum (int): Series ID to look up
+
+ Return: tuple:
+ str: Series name
+ str: Series description
+
+ Raises:
+ ValueError: Series is not found
+ """
+ return self.db.series_get_info(idnum)
+
+ def prep_series(self, name, end=None):
+ """Prepare to work with a series
+
+ Args:
+ name (str): Branch name with version appended, e.g. 'fix2'
+ end (str or None): Commit to end at, e.g. 'my_branch~16'. Only
+ commits up to that are processed. None to process commits up to
+ the upstream branch
+
+ Return: tuple:
+ str: Series name, e.g. 'fix'
+ Series: Collected series information, including name
+ int: Version number, e.g. 2
+ str: Message to show
+ """
+ ser, version = self._parse_series_and_version(name, None)
+ if not name:
+ name = self._get_branch_name(ser.name, version)
+
+ # First check we have a branch with this name
+ if not gitutil.check_branch(name, git_dir=self.gitdir):
+ raise ValueError(f"No branch named '{name}'")
+
+ count = gitutil.count_commits_to_branch(name, self.gitdir, end)
+ if not count:
+ raise ValueError('Cannot detect branch automatically: '
+ 'Perhaps use -U <upstream-commit> ?')
+
+ series = patchstream.get_metadata(name, 0, count, git_dir=self.gitdir)
+ self._copy_db_fields_to(series, ser)
+ msg = None
+ if end:
+ repo = pygit2.init_repository(self.gitdir)
+ target = repo.revparse_single(end)
+ first_line = target.message.splitlines()[0]
+ msg = f'Ending before {oid(target.id)} {first_line}'
+
+ return name, series, version, msg
+
+ def _copy_db_fields_to(self, series, in_series):
+ """Copy over fields used by Cseries from one series to another
+
+ This copes desc, idnum and name
+
+ Args:
+ series (Series): Series to copy to
+ in_series (Series): Series to copy from
+ """
+ series.desc = in_series.desc
+ series.idnum = in_series.idnum
+ series.name = in_series.name
+
+ def _handle_mark(self, branch_name, in_series, version, mark,
+ allow_unmarked, force_version, dry_run):
+ """Handle marking a series, checking for unmarked commits, etc.
+
+ Args:
+ branch_name (str): Name of branch to sync, or None for current one
+ in_series (Series): Series object
+ version (int): branch version, e.g. 2 for 'mychange2'
+ mark (bool): True to mark each commit with a change ID
+ allow_unmarked (str): True to not require each commit to be marked
+ force_version (bool): True if ignore a Series-version tag that
+ doesn't match its branch name
+ dry_run (bool): True to do a dry run
+
+ Returns:
+ Series: New series object, if the series was marked;
+ copy_db_fields_to() is used to copy fields over
+
+ Raises:
+ ValueError: Series being unmarked when it should be marked, etc.
+ """
+ series = in_series
+ if 'version' in series and int(series.version) != version:
+ msg = (f"Series name '{branch_name}' suggests version {version} "
+ f"but Series-version tag indicates {series.version}")
+ if not force_version:
+ raise ValueError(msg + ' (see --force-version)')
+
+ tout.warning(msg)
+ tout.warning(f'Updating Series-version tag to version {version}')
+ self.update_series(branch_name, series, int(series.version),
+ new_name=None, dry_run=dry_run,
+ add_vers=version)
+
+ # Collect the commits again, as the hashes have changed
+ series = patchstream.get_metadata(branch_name, 0,
+ len(series.commits),
+ git_dir=self.gitdir)
+ self._copy_db_fields_to(series, in_series)
+
+ if mark:
+ add_oid = self._mark_series(branch_name, series, dry_run=dry_run)
+
+ # Collect the commits again, as the hashes have changed
+ series = patchstream.get_metadata(add_oid, 0, len(series.commits),
+ git_dir=self.gitdir)
+ self._copy_db_fields_to(series, in_series)
+
+ bad_count = 0
+ for commit in series.commits:
+ if not commit.change_id:
+ bad_count += 1
+ if bad_count and not allow_unmarked:
+ raise ValueError(
+ f'{bad_count} commit(s) are unmarked; please use -m or -M')
+
+ return series
+
+ def _add_series_commits(self, series, svid):
+ """Add a commits from a series into the database
+
+ Args:
+ series (Series): Series containing commits to add
+ svid (int): ser_ver-table ID to use for each commit
+ """
+ to_add = [Pcommit(None, seq, commit.subject, None, commit.change_id,
+ None, None, None)
+ for seq, commit in enumerate(series.commits)]
+
+ self.db.pcommit_add_list(svid, to_add)
+
+ def get_series_by_name(self, name, include_archived=False):
+ """Get a Series object from the database by name
+
+ Args:
+ name (str): Name of series to get
+ include_archived (bool): True to search in archives series
+
+ Return:
+ Series: Object containing series info, or None if none
+ """
+ idnum = self.db.series_find_by_name(name, include_archived)
+ if not idnum:
+ return None
+ name, desc = self.db.series_get_info(idnum)
+
+ return Series.from_fields(idnum, name, desc)
+
+ def _get_branch_name(self, name, version):
+ """Get the branch name for a particular version
+
+ Args:
+ name (str): Base name of branch
+ version (int): Version number to use
+ """
+ return name + (f'{version}' if version > 1 else '')
+
+ def _ensure_version(self, ser, version):
+ """Ensure that a version exists in a series
+
+ Args:
+ ser (Series): Series information, with idnum and name used here
+ version (int): Version to check
+
+ Returns:
+ list of int: List of versions
+ """
+ versions = self._get_version_list(ser.idnum)
+ if version not in versions:
+ raise ValueError(
+ f"Series '{ser.name}' does not have a version {version}")
+ return versions
+
+ def _set_link(self, ser_id, name, version, link, update_commit,
+ dry_run=False):
+ """Add / update a series-links link for a series
+
+ Args:
+ ser_id (int): Series ID number
+ name (str): Series name (used to find the branch)
+ version (int): Version number (used to update the database)
+ link (str): Patchwork link-string for the series
+ update_commit (bool): True to update the current commit with the
+ link
+ dry_run (bool): True to do a dry run
+
+ Return:
+ bool: True if the database was update, False if the ser_id or
+ version was not found
+ """
+ if update_commit:
+ branch_name = self._get_branch_name(name, version)
+ _, ser, max_vers, _ = self.prep_series(branch_name)
+ self.update_series(branch_name, ser, max_vers, add_vers=version,
+ dry_run=dry_run, add_link=link)
+ if link is None:
+ link = ''
+ updated = 1 if self.db.ser_ver_set_link(ser_id, version, link) else 0
+ if dry_run:
+ self.rollback()
+ else:
+ self.commit()
+
+ return updated
+
+ def _get_autolink_dict(self, sdict, link_all_versions):
+ """Get a dict of ser_vers to fetch, along with their patchwork links
+
+ Note that this returns items that already have links, as well as those
+ without links
+
+ Args:
+ sdict:
+ key: series ID
+ value: Series with idnum, name and desc filled out
+ link_all_versions (bool): True to sync all versions of a series,
+ False to sync only the latest version
+
+ Return: tuple:
+ dict:
+ key (int): svid
+ value (tuple):
+ int: series ID
+ str: series name
+ int: series version
+ str: patchwork link for the series, or None if none
+ desc: cover-letter name / series description
+ """
+ svdict = self.get_ser_ver_dict()
+ to_fetch = {}
+
+ if link_all_versions:
+ for svinfo in self.get_ser_ver_list():
+ ser = sdict[svinfo.series_id]
+
+ pwc = self.get_pcommit_dict(svinfo.idnum)
+ count = len(pwc)
+ branch = self._join_name_version(ser.name, svinfo.version)
+ series = patchstream.get_metadata(branch, 0, count,
+ git_dir=self.gitdir)
+ self._copy_db_fields_to(series, ser)
+
+ to_fetch[svinfo.idnum] = (svinfo.series_id, series.name,
+ svinfo.version, svinfo.link, series)
+ else:
+ # Find the maximum version for each series
+ max_vers = self._series_all_max_versions()
+
+ # Get a list of links to fetch
+ for svid, ser_id, version in max_vers:
+ svinfo = svdict[svid]
+ ser = sdict[ser_id]
+
+ pwc = self.get_pcommit_dict(svid)
+ count = len(pwc)
+ branch = self._join_name_version(ser.name, version)
+ series = patchstream.get_metadata(branch, 0, count,
+ git_dir=self.gitdir)
+ self._copy_db_fields_to(series, ser)
+
+ to_fetch[svid] = (ser_id, series.name, version, svinfo.link,
+ series)
+ return to_fetch
+
+ def _get_version_list(self, idnum):
+ """Get a list of the versions available for a series
+
+ Args:
+ idnum (int): ID of series to look up
+
+ Return:
+ str: List of versions
+ """
+ if idnum is None:
+ raise ValueError('Unknown series idnum')
+ return self.db.series_get_version_list(idnum)
+
+ def _join_name_version(self, in_name, version):
+ """Convert a series name plus a version into a branch name
+
+ For example:
+ ('series', 1) returns 'series'
+ ('series', 3) returns 'series3'
+
+ Args:
+ in_name (str): Series name
+ version (int): Version number
+
+ Return:
+ str: associated branch name
+ """
+ if version == 1:
+ return in_name
+ return f'{in_name}{version}'
+
+ def _parse_series(self, name, include_archived=False):
+ """Parse the name of a series, or detect it from the current branch
+
+ Args:
+ name (str or None): name of series
+ include_archived (bool): True to search in archives series
+
+ Return:
+ Series: New object with the name set; idnum is also set if the
+ series exists in the database
+ """
+ if not name:
+ name = gitutil.get_branch(self.gitdir)
+ name, _ = split_name_version(name)
+ ser = self.get_series_by_name(name, include_archived)
+ if not ser:
+ ser = Series()
+ ser.name = name
+ return ser
+
+ def _parse_series_and_version(self, in_name, in_version):
+ """Parse name and version of a series, or detect from current branch
+
+ Figures out the name from in_name, or if that is None, from the current
+ branch.
+
+ Uses the version in_version, or if that is None, uses the int at the
+ end of the name (e.g. 'series' is version 1, 'series4' is version 4)
+
+ Args:
+ in_name (str or None): name of series
+ in_version (str or None): version of series
+
+ Return:
+ tuple:
+ Series: New object with the name set; idnum is also set if the
+ series exists in the database
+ int: Series version-number detected from the name
+ (e.g. 'fred' is version 1, 'fred2' is version 2)
+ """
+ name = in_name
+ if not name:
+ name = gitutil.get_branch(self.gitdir)
+ if not name:
+ raise ValueError('No branch detected: please use -s <series>')
+ name, version = split_name_version(name)
+ if not name:
+ raise ValueError(f"Series name '{in_name}' cannot be a number, "
+ f"use '<name><version>'")
+ if in_version:
+ if version and version != in_version:
+ tout.warning(
+ f"Version mismatch: -V has {in_version} but branch name "
+ f'indicates {version}')
+ version = in_version
+ if not version:
+ version = 1
+ if version > 99:
+ raise ValueError(f"Version {version} exceeds 99")
+ ser = self.get_series_by_name(name)
+ if not ser:
+ ser = Series()
+ ser.name = name
+ return ser, version
+
+ def _series_get_version_stats(self, idnum, vers):
+ """Get the stats for a series
+
+ Args:
+ idnum (int): ID number of series to process
+ vers (int): Version number to process
+
+ Return:
+ tuple:
+ str: Status string, '<accepted>/<count>'
+ OrderedDict:
+ key (int): record ID if find_svid is None, else seq
+ value (PCOMMIT): record data
+ """
+ svid, link = self._get_series_svid_link(idnum, vers)
+ pwc = self.get_pcommit_dict(svid)
+ count = len(pwc.values())
+ if link:
+ accepted = 0
+ for pcm in pwc.values():
+ accepted += pcm.state == 'accepted'
+ else:
+ accepted = '-'
+ return f'{accepted}/{count}', pwc
+
+ def get_series_svid(self, series_id, version):
+ """Get the patchwork ID of a series version
+
+ Args:
+ series_id (int): id of the series to look up
+ version (int): version number to look up
+
+ Return:
+ str: link found
+
+ Raises:
+ ValueError: No matching series found
+ """
+ return self._get_series_svid_link(series_id, version)[0]
+
+ def _get_series_svid_link(self, series_id, version):
+ """Get the patchwork ID of a series version
+
+ Args:
+ series_id (int): series ID to look up
+ version (int): version number to look up
+
+ Return:
+ tuple:
+ int: record id
+ str: link
+ """
+ recs = self.get_ser_ver(series_id, version)
+ return recs.idnum, recs.link
+
+ def get_ser_ver(self, series_id, version):
+ """Get the patchwork details for a series version
+
+ Args:
+ series_id (int): series ID to look up
+ version (int): version number to look up
+
+ Return:
+ SER_VER: Requested information
+
+ Raises:
+ ValueError: There is no matching idnum/version
+ """
+ return self.db.ser_ver_get_for_series(series_id, version)
+
+ def _prepare_process(self, name, count, new_name=None, quiet=False):
+ """Get ready to process all commits in a branch
+
+ Args:
+ name (str): Name of the branch to process
+ count (int): Number of commits
+ new_name (str or None): New name, if a new branch is to be created
+ quiet (bool): True to avoid output (used for testing)
+
+ Return: tuple:
+ pygit2.repo: Repo to use
+ pygit2.oid: Upstream commit, onto which commits should be added
+ Pygit2.branch: Original branch, for later use
+ str: (Possibly new) name of branch to process
+ list of Commit: commits to process, in order
+ pygit2.Reference: Original head before processing started
+ """
+ upstream_guess = gitutil.get_upstream(self.gitdir, name)[0]
+
+ tout.debug(f"_process_series name '{name}' new_name '{new_name}' "
+ f"upstream_guess '{upstream_guess}'")
+ dirty = gitutil.check_dirty(self.gitdir, self.topdir)
+ if dirty:
+ raise ValueError(
+ f"Modified files exist: use 'git status' to check: "
+ f'{dirty[:5]}')
+ repo = pygit2.init_repository(self.gitdir)
+
+ commit = None
+ upstream_name = None
+ if upstream_guess:
+ try:
+ upstream = repo.lookup_reference(upstream_guess)
+ upstream_name = upstream.name
+ commit = upstream.peel(pygit2.enums.ObjectType.COMMIT)
+ except KeyError:
+ pass
+ except pygit2.repository.InvalidSpecError as exc:
+ print(f"Error '{exc}'")
+ if not upstream_name:
+ upstream_name = f'{name}~{count}'
+ commit = repo.revparse_single(upstream_name)
+
+ branch = repo.lookup_branch(name)
+ if not quiet:
+ tout.info(
+ f'Checking out upstream commit {upstream_name}: '
+ f'{oid(commit.oid)}')
+
+ old_head = repo.head
+ if old_head.shorthand == name:
+ old_head = None
+ else:
+ old_head = repo.head
+
+ if new_name:
+ name = new_name
+ repo.set_head(commit.oid)
+
+ commits = []
+ cmt = repo.get(branch.target)
+ for _ in range(count):
+ commits.append(cmt)
+ cmt = cmt.parents[0]
+
+ return (repo, repo.head, branch, name, commit, list(reversed(commits)),
+ old_head)
+
+ def _pick_commit(self, repo, cmt):
+ """Apply a commit to the source tree, without committing it
+
+ _prepare_process() must be called before starting to pick commits
+
+ This function must be called before _finish_commit()
+
+ Note that this uses a cherry-pick method, creating a new tree_id each
+ time, so can make source-code changes
+
+ Args:
+ repo (pygit2.repo): Repo to use
+ cmt (Commit): Commit to apply
+
+ Return: tuple:
+ tree_id (pygit2.oid): Oid of index with source-changes applied
+ commit (pygit2.oid): Old commit being cherry-picked
+ """
+ tout.detail(f"- adding {oid(cmt.hash)} {cmt}")
+ repo.cherrypick(cmt.hash)
+ if repo.index.conflicts:
+ raise ValueError('Conflicts detected')
+
+ tree_id = repo.index.write_tree()
+ cherry = repo.get(cmt.hash)
+ tout.detail(f"cherry {oid(cherry.oid)}")
+ return tree_id, cherry
+
+ def _finish_commit(self, repo, tree_id, commit, cur, msg=None):
+ """Complete a commit
+
+ This must be called after _pick_commit().
+
+ Args:
+ repo (pygit2.repo): Repo to use
+ tree_id (pygit2.oid): Oid of index with source-changes applied; if
+ None then the existing commit.tree_id is used
+ commit (pygit2.oid): Old commit being cherry-picked
+ cur (pygit2.reference): Reference to parent to use for the commit
+ msg (str): Commit subject and message; None to use commit.message
+ """
+ if msg is None:
+ msg = commit.message
+ if not tree_id:
+ tree_id = commit.tree_id
+ repo.create_commit('HEAD', commit.author, commit.committer,
+ msg, tree_id, [cur.target])
+ return repo.head
+
+ def _finish_process(self, repo, branch, name, cur, old_head, new_name=None,
+ switch=False, dry_run=False, quiet=False):
+ """Finish processing commits
+
+ Args:
+ repo (pygit2.repo): Repo to use
+ branch (pygit2.branch): Branch returned by _prepare_process()
+ name (str): Name of the branch to process
+ new_name (str or None): New name, if a new branch is being created
+ switch (bool): True to switch to the new branch after processing;
+ otherwise HEAD remains at the original branch, as amended
+ dry_run (bool): True to do a dry run, restoring the original tree
+ afterwards
+ quiet (bool): True to avoid output (used for testing)
+
+ Return:
+ pygit2.reference: Final commit after everything is completed
+ """
+ repo.state_cleanup()
+
+ # Update the branch
+ target = repo.revparse_single('HEAD')
+ if not quiet:
+ tout.info(f'Updating branch {name} from {oid(branch.target)} to '
+ f'{str(target.oid)[:HASH_LEN]}')
+ if dry_run:
+ if new_name:
+ repo.head.set_target(branch.target)
+ else:
+ branch_oid = branch.peel(pygit2.enums.ObjectType.COMMIT).oid
+ repo.head.set_target(branch_oid)
+ repo.head.set_target(branch.target)
+ repo.set_head(branch.name)
+ else:
+ if new_name:
+ new_branch = repo.branches.create(new_name, target)
+ if branch.upstream:
+ new_branch.upstream = branch.upstream
+ branch = new_branch
+ else:
+ branch.set_target(cur.target)
+ repo.set_head(branch.name)
+ if old_head:
+ if not switch:
+ repo.set_head(old_head.name)
+ return target
+
+ def make_change_id(self, commit):
+ """Make a Change ID for a commit
+
+ This is similar to the gerrit script:
+ git var GIT_COMMITTER_IDENT ; echo "$refhash" ; cat "README"; }
+ | git hash-object --stdin)
+
+ Args:
+ commit (pygit2.commit): Commit to process
+
+ Return:
+ Change ID in hex format
+ """
+ sig = commit.committer
+ val = hashlib.sha1()
+ to_hash = f'{sig.name} <{sig.email}> {sig.time} {sig.offset}'
+ val.update(to_hash.encode('utf-8'))
+ val.update(str(commit.tree_id).encode('utf-8'))
+ val.update(commit.message.encode('utf-8'))
+ return val.hexdigest()
+
+ def _filter_commits(self, name, series, seq_to_drop):
+ """Filter commits to drop one
+
+ This function rebases the current branch, dropping a single commit,
+ thus changing the resulting code in the tree.
+
+ Args:
+ name (str): Name of the branch to process
+ series (Series): Series object
+ seq_to_drop (int): Commit sequence to drop; commits are numbered
+ from 0, which is the one after the upstream branch, to
+ count - 1
+ """
+ count = len(series.commits)
+ (repo, cur, branch, name, commit, _, _) = self._prepare_process(
+ name, count, quiet=True)
+ repo.checkout_tree(commit, strategy=CheckoutStrategy.FORCE |
+ CheckoutStrategy.RECREATE_MISSING)
+ repo.set_head(commit.oid)
+ for seq, cmt in enumerate(series.commits):
+ if seq != seq_to_drop:
+ tree_id, cherry = self._pick_commit(repo, cmt)
+ cur = self._finish_commit(repo, tree_id, cherry, cur)
+ self._finish_process(repo, branch, name, cur, None, quiet=True)
+
+ def process_series(self, name, series, new_name=None, switch=False,
+ dry_run=False):
+ """Rewrite a series commit messages, leaving code alone
+
+ This uses a 'vals' namespace to pass things to the controlling
+ function.
+
+ Each time _process_series() yields, it sets up:
+ commit (Commit): The pygit2 commit that is being processed
+ msg (str): Commit message, which can be modified
+ info (str): Initially empty; the controlling function can add a
+ short message here which will be shown to the user
+ final (bool): True if this is the last commit to apply
+ seq (int): Current sequence number in the commits to apply (0,,n-1)
+
+ It also sets git HEAD at the commit before this commit being
+ processed
+
+ The function can change msg and info, e.g. to add or remove tags from
+ the commit.
+
+ Args:
+ name (str): Name of the branch to process
+ series (Series): Series object
+ new_name (str or None): New name, if a new branch is to be created
+ switch (bool): True to switch to the new branch after processing;
+ otherwise HEAD remains at the original branch, as amended
+ dry_run (bool): True to do a dry run, restoring the original tree
+ afterwards
+
+ Return:
+ pygit.oid: oid of the new branch
+ """
+ count = len(series.commits)
+ repo, cur, branch, name, _, commits, old_head = self._prepare_process(
+ name, count, new_name)
+ vals = SimpleNamespace()
+ vals.final = False
+ tout.info(f"Processing {count} commits from branch '{name}'")
+
+ # Record the message lines
+ lines = []
+ for seq, cmt in enumerate(series.commits):
+ commit = commits[seq]
+ vals.commit = commit
+ vals.msg = commit.message
+ vals.info = ''
+ vals.final = seq == len(series.commits) - 1
+ vals.seq = seq
+ yield vals
+
+ cur = self._finish_commit(repo, None, commit, cur, vals.msg)
+ lines.append([vals.info.strip(),
+ f'{oid(cmt.hash)} as {oid(cur.target)} {cmt}'])
+
+ max_len = max(len(info) for info, rest in lines) + 1
+ for info, rest in lines:
+ if info:
+ info += ':'
+ tout.info(f'- {info.ljust(max_len)} {rest}')
+ target = self._finish_process(repo, branch, name, cur, old_head,
+ new_name, switch, dry_run)
+ vals.oid = target.oid
+
+ def _mark_series(self, name, series, dry_run=False):
+ """Mark a series with Change-Id tags
+
+ Args:
+ name (str): Name of the series to mark
+ series (Series): Series object
+ dry_run (bool): True to do a dry run, restoring the original tree
+ afterwards
+
+ Return:
+ pygit.oid: oid of the new branch
+ """
+ vals = None
+ for vals in self.process_series(name, series, dry_run=dry_run):
+ if CHANGE_ID_TAG not in vals.msg:
+ change_id = self.make_change_id(vals.commit)
+ vals.msg = vals.msg + f'\n{CHANGE_ID_TAG}: {change_id}'
+ tout.detail(" - adding mark")
+ vals.info = 'marked'
+ else:
+ vals.info = 'has mark'
+
+ return vals.oid
+
+ def update_series(self, branch_name, series, max_vers, new_name=None,
+ dry_run=False, add_vers=None, add_link=None,
+ add_rtags=None, switch=False):
+ """Rewrite a series to update the Series-version/Series-links lines
+
+ This updates the series in git; it does not update the database
+
+ Args:
+ branch_name (str): Name of the branch to process
+ series (Series): Series object
+ max_vers (int): Version number of the series being updated
+ new_name (str or None): New name, if a new branch is to be created
+ dry_run (bool): True to do a dry run, restoring the original tree
+ afterwards
+ add_vers (int or None): Version number to add to the series, if any
+ add_link (str or None): Link to add to the series, if any
+ add_rtags (list of dict): List of review tags to add, one item for
+ each commit, each a dict:
+ key: Response tag (e.g. 'Reviewed-by')
+ value: Set of people who gave that response, each a name/email
+ string
+ switch (bool): True to switch to the new branch after processing;
+ otherwise HEAD remains at the original branch, as amended
+
+ Return:
+ pygit.oid: oid of the new branch
+ """
+ def _do_version():
+ if add_vers:
+ if add_vers == 1:
+ vals.info += f'rm v{add_vers} '
+ else:
+ vals.info += f'add v{add_vers} '
+ out.append(f'Series-version: {add_vers}')
+
+ def _do_links(new_links):
+ if add_link:
+ if 'add' not in vals.info:
+ vals.info += 'add '
+ vals.info += f"links '{new_links}' "
+ else:
+ vals.info += f"upd links '{new_links}' "
+ out.append(f'Series-links: {new_links}')
+
+ added_version = False
+ added_link = False
+ for vals in self.process_series(branch_name, series, new_name, switch,
+ dry_run):
+ out = []
+ for line in vals.msg.splitlines():
+ m_ver = re.match('Series-version:(.*)', line)
+ m_links = re.match('Series-links:(.*)', line)
+ if m_ver and add_vers:
+ if ('version' in series and
+ int(series.version) != max_vers):
+ tout.warning(
+ f'Branch {branch_name}: Series-version tag '
+ f'{series.version} does not match expected '
+ f'version {max_vers}')
+ _do_version()
+ added_version = True
+ elif m_links:
+ links = series.get_links(m_links.group(1), max_vers)
+ if add_link:
+ links[max_vers] = add_link
+ _do_links(series.build_links(links))
+ added_link = True
+ else:
+ out.append(line)
+ if vals.final:
+ if not added_version and add_vers and add_vers > 1:
+ _do_version()
+ if not added_link and add_link:
+ _do_links(f'{max_vers}:{add_link}')
+
+ vals.msg = '\n'.join(out) + '\n'
+ if add_rtags and add_rtags[vals.seq]:
+ lines = []
+ for tag, people in add_rtags[vals.seq].items():
+ for who in people:
+ lines.append(f'{tag}: {who}')
+ vals.msg = patchstream.insert_tags(vals.msg.rstrip(),
+ sorted(lines))
+ vals.info += (f'added {len(lines)} '
+ f"tag{'' if len(lines) == 1 else 's'}")
+
+ def _build_col(self, state, prefix='', base_str=None):
+ """Build a patch-state string with colour
+
+ Args:
+ state (str): State to colourise (also indicates the colour to use)
+ prefix (str): Prefix string to also colourise
+ base_str (str or None): String to show instead of state, or None to
+ show state
+
+ Return:
+ str: String with ANSI colour characters
+ """
+ bright = True
+ if state == 'accepted':
+ col = self.col.GREEN
+ elif state == 'awaiting-upstream':
+ bright = False
+ col = self.col.GREEN
+ elif state in ['changes-requested']:
+ col = self.col.CYAN
+ elif state in ['rejected', 'deferred', 'not-applicable', 'superseded',
+ 'handled-elsewhere']:
+ col = self.col.RED
+ elif not state:
+ state = 'unknown'
+ col = self.col.MAGENTA
+ else:
+ # under-review, rfc, needs-review-ack
+ col = self.col.WHITE
+ out = base_str or SHORTEN_STATE.get(state, state)
+ pad = ' ' * (10 - len(out))
+ col_state = self.col.build(col, prefix + out, bright)
+ return col_state, pad
+
+ def _get_patches(self, series, version):
+ """Get a Series object containing the patches in a series
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ version (int): Version number, or None to detect from name
+
+ Return: tuple:
+ str: Name of branch, e.g. 'mary2'
+ Series: Series object containing the commits and idnum, desc, name
+ int: Version number of series, e.g. 2
+ OrderedDict:
+ key (int): record ID if find_svid is None, else seq
+ value (PCOMMIT): record data
+ str: series name (for this version)
+ str: patchwork link
+ str: cover_id
+ int: cover_num_comments
+ """
+ ser, version = self._parse_series_and_version(series, version)
+ if not ser.idnum:
+ raise ValueError(f"Unknown series '{series}'")
+ self._ensure_version(ser, version)
+ svinfo = self.get_ser_ver(ser.idnum, version)
+ pwc = self.get_pcommit_dict(svinfo.idnum)
+
+ count = len(pwc)
+ branch = self._join_name_version(ser.name, version)
+ series = patchstream.get_metadata(branch, 0, count,
+ git_dir=self.gitdir)
+ self._copy_db_fields_to(series, ser)
+
+ return (branch, series, version, pwc, svinfo.name, svinfo.link,
+ svinfo.cover_id, svinfo.cover_num_comments)
+
+ def _list_patches(self, branch, pwc, series, desc, cover_id, num_comments,
+ show_commit, show_patch, list_patches, state_totals):
+ """List patches along with optional status info
+
+ Args:
+ branch (str): Branch name if self.show_progress
+ pwc (dict): pcommit records:
+ key (int): seq
+ value (PCOMMIT): Record from database
+ series (Series): Series to show, or None to just use the database
+ desc (str): Series title
+ cover_id (int): Cover-letter ID
+ num_comments (int): The number of comments on the cover letter
+ show_commit (bool): True to show the commit and diffstate
+ show_patch (bool): True to show the patch
+ list_patches (bool): True to list all patches for each series,
+ False to just show the series summary on a single line
+ state_totals (dict): Holds totals for each state across all patches
+ key (str): state name
+ value (int): Number of patches in that state
+
+ Return:
+ bool: True if OK, False if any commit subjects don't match their
+ patchwork subjects
+ """
+ lines = []
+ states = defaultdict(int)
+ count = len(pwc)
+ ok = True
+ for seq, item in enumerate(pwc.values()):
+ if series:
+ cmt = series.commits[seq]
+ if cmt.subject != item.subject:
+ ok = False
+
+ col_state, pad = self._build_col(item.state)
+ patch_id = item.patch_id if item.patch_id else ''
+ if item.num_comments:
+ comments = str(item.num_comments)
+ elif item.num_comments is None:
+ comments = '-'
+ else:
+ comments = ''
+
+ if show_commit or show_patch:
+ subject = self.col.build(self.col.BLACK, item.subject,
+ bright=False, back=self.col.YELLOW)
+ else:
+ subject = item.subject
+
+ line = (f'{seq:3} {col_state}{pad} {comments.rjust(3)} '
+ f'{patch_id:7} {oid(cmt.hash)} {subject}')
+ lines.append(line)
+ states[item.state] += 1
+ out = ''
+ for state, freq in states.items():
+ out += ' ' + self._build_col(state, f'{freq}:')[0]
+ state_totals[state] += freq
+ name = ''
+ if not list_patches:
+ name = desc or series.desc
+ name = self.col.build(self.col.YELLOW, name[:41].ljust(41))
+ if not ok:
+ out = '*' + out[1:]
+ print(f"{branch:16} {name} {len(pwc):5} {out}")
+ return ok
+ print(f"Branch '{branch}' (total {len(pwc)}):{out}{name}")
+
+ print(self.col.build(
+ self.col.MAGENTA,
+ f"Seq State Com PatchId {'Commit'.ljust(HASH_LEN)} Subject"))
+
+ comments = '' if num_comments is None else str(num_comments)
+ if desc or comments or cover_id:
+ cov = 'Cov' if cover_id else ''
+ print(self.col.build(
+ self.col.WHITE,
+ f"{cov:14} {comments.rjust(3)} {cover_id or '':7} "
+ f'{desc or series.desc}',
+ bright=False))
+ for seq in range(count):
+ line = lines[seq]
+ print(line)
+ if show_commit or show_patch:
+ print()
+ cmt = series.commits[seq] if series else ''
+ msg = gitutil.show_commit(
+ cmt.hash, show_commit, True, show_patch,
+ colour=self.col.enabled(), git_dir=self.gitdir)
+ sys.stdout.write(msg)
+ if seq != count - 1:
+ print()
+ print()
+
+ return ok
+
+ def _find_matched_commit(self, commits, pcm):
+ """Find a commit in a list of possible matches
+
+ Args:
+ commits (dict of Commit): Possible matches
+ key (int): sequence number of patch (from 0)
+ value (Commit): Commit object
+ pcm (PCOMMIT): Patch to check
+
+ Return:
+ int: Sequence number of matching commit, or None if not found
+ """
+ for seq, cmt in commits.items():
+ tout.debug(f"- match subject: '{cmt.subject}'")
+ if pcm.subject == cmt.subject:
+ return seq
+ return None
+
+ def _find_matched_patch(self, patches, cmt):
+ """Find a patch in a list of possible matches
+
+ Args:
+ patches: dict of ossible matches
+ key (int): sequence number of patch
+ value (PCOMMIT): patch
+ cmt (Commit): Commit to check
+
+ Return:
+ int: Sequence number of matching patch, or None if not found
+ """
+ for seq, pcm in patches.items():
+ tout.debug(f"- match subject: '{pcm.subject}'")
+ if cmt.subject == pcm.subject:
+ return seq
+ return None
+
+ def _sync_one(self, svid, series_name, version, show_comments,
+ show_cover_comments, gather_tags, cover, patches, dry_run):
+ """Sync one series to the database
+
+ Args:
+ svid (int): Ser/ver ID
+ cover (dict or None): Cover letter from patchwork, with keys:
+ id (int): Cover-letter ID in patchwork
+ num_comments (int): Number of comments
+ name (str): Cover-letter name
+ patches (list of Patch): Patches in the series
+ """
+ pwc = self.get_pcommit_dict(svid)
+ if gather_tags:
+ count = len(pwc)
+ branch = self._join_name_version(series_name, version)
+ series = patchstream.get_metadata(branch, 0, count,
+ git_dir=self.gitdir)
+
+ _, new_rtag_list = status.do_show_status(
+ series, cover, patches, show_comments, show_cover_comments,
+ self.col, warnings_on_stderr=False)
+ self.update_series(branch, series, version, None, dry_run,
+ add_rtags=new_rtag_list)
+
+ updated = 0
+ for seq, item in enumerate(pwc.values()):
+ if seq >= len(patches):
+ continue
+ patch = patches[seq]
+ if patch.id:
+ if self.db.pcommit_update(
+ Pcommit(item.idnum, seq, None, None, None, patch.state,
+ patch.id, len(patch.comments))):
+ updated += 1
+ if cover:
+ info = SerVer(svid, None, None, None, cover.id,
+ cover.num_comments, cover.name, None)
+ else:
+ info = SerVer(svid, None, None, None, None, None, patches[0].name,
+ None)
+ self.db.ser_ver_set_info(info)
+
+ return updated, 1 if cover else 0
+
+ async def _gather(self, pwork, link, show_cover_comments):
+ """Sync the series status from patchwork
+
+ Creates a new client sesion and calls _sync()
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ link (str): Patchwork link for the series
+ show_cover_comments (bool): True to show the comments on the cover
+ letter
+
+ Return: tuple:
+ COVER object, or None if none or not read_cover_comments
+ list of PATCH objects
+ """
+ async with aiohttp.ClientSession() as client:
+ return await pwork.series_get_state(client, link, True,
+ show_cover_comments)
+
+ def _get_fetch_dict(self, sync_all_versions):
+ """Get a dict of ser_vers to fetch, along with their patchwork links
+
+ Args:
+ sync_all_versions (bool): True to sync all versions of a series,
+ False to sync only the latest version
+
+ Return: tuple:
+ dict: things to fetch
+ key (int): svid
+ value (str): patchwork link for the series
+ int: number of series which are missing a link
+ """
+ missing = 0
+ svdict = self.get_ser_ver_dict()
+ sdict = self.db.series_get_dict_by_id()
+ to_fetch = {}
+
+ if sync_all_versions:
+ for svinfo in self.get_ser_ver_list():
+ ser_ver = svdict[svinfo.idnum]
+ if svinfo.link:
+ to_fetch[svinfo.idnum] = patchwork.STATE_REQ(
+ svinfo.link, svinfo.series_id,
+ sdict[svinfo.series_id].name, svinfo.version, False,
+ False)
+ else:
+ missing += 1
+ else:
+ # Find the maximum version for each series
+ max_vers = self._series_all_max_versions()
+
+ # Get a list of links to fetch
+ for svid, series_id, version in max_vers:
+ ser_ver = svdict[svid]
+ if series_id not in sdict:
+ # skip archived item
+ continue
+ if ser_ver.link:
+ to_fetch[svid] = patchwork.STATE_REQ(
+ ser_ver.link, series_id, sdict[series_id].name,
+ version, False, False)
+ else:
+ missing += 1
+
+ # order by series name, version
+ ordered = OrderedDict()
+ for svid in sorted(
+ to_fetch,
+ key=lambda k: (to_fetch[k].series_name, to_fetch[k].version)):
+ sync = to_fetch[svid]
+ ordered[svid] = sync
+
+ return ordered, missing
+
+ async def _sync_all(self, client, pwork, to_fetch):
+ """Sync all series status from patchwork
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ sync_all_versions (bool): True to sync all versions of a series,
+ False to sync only the latest version
+ gather_tags (bool): True to gather review/test tags
+
+ Return: list of tuple:
+ COVER object, or None if none or not read_cover_comments
+ list of PATCH objects
+ """
+ with pwork.collect_stats() as stats:
+ tasks = [pwork.series_get_state(client, sync.link, True, True)
+ for sync in to_fetch.values() if sync.link]
+ result = await asyncio.gather(*tasks)
+ return result, stats.request_count
+
+ async def _do_series_sync_all(self, pwork, to_fetch):
+ async with aiohttp.ClientSession() as client:
+ return await self._sync_all(client, pwork, to_fetch)
+
+ def _progress_one(self, ser, show_all_versions, list_patches,
+ state_totals):
+ """Show progress information for all versions in a series
+
+ Args:
+ ser (Series): Series to use
+ show_all_versions (bool): True to show all versions of a series,
+ False to show only the final version
+ list_patches (bool): True to list all patches for each series,
+ False to just show the series summary on a single line
+ state_totals (dict): Holds totals for each state across all patches
+ key (str): state name
+ value (int): Number of patches in that state
+
+ Return: tuple
+ int: Number of series shown
+ int: Number of patches shown
+ int: Number of version which need a 'scan'
+ """
+ max_vers = self._series_max_version(ser.idnum)
+ name, desc = self._get_series_info(ser.idnum)
+ coloured = self.col.build(self.col.BLACK, desc, bright=False,
+ back=self.col.YELLOW)
+ versions = self._get_version_list(ser.idnum)
+ vstr = list(map(str, versions))
+
+ if list_patches:
+ print(f"{name}: {coloured} (versions: {' '.join(vstr)})")
+ add_blank_line = False
+ total_series = 0
+ total_patches = 0
+ need_scan = 0
+ for ver in versions:
+ if not show_all_versions and ver != max_vers:
+ continue
+ if add_blank_line:
+ print()
+ _, pwc = self._series_get_version_stats(ser.idnum, ver)
+ count = len(pwc)
+ branch = self._join_name_version(ser.name, ver)
+ series = patchstream.get_metadata(branch, 0, count,
+ git_dir=self.gitdir)
+ svinfo = self.get_ser_ver(ser.idnum, ver)
+ self._copy_db_fields_to(series, ser)
+
+ ok = self._list_patches(
+ branch, pwc, series, svinfo.name, svinfo.cover_id,
+ svinfo.cover_num_comments, False, False, list_patches,
+ state_totals)
+ if not ok:
+ need_scan += 1
+ add_blank_line = list_patches
+ total_series += 1
+ total_patches += count
+ return total_series, total_patches, need_scan
+
+ def _summary_one(self, ser):
+ """Show summary information for the latest version in a series
+
+ Args:
+ series (str): Name of series to use, or None to show progress for
+ all series
+ """
+ max_vers = self._series_max_version(ser.idnum)
+ name, desc = self._get_series_info(ser.idnum)
+ stats, pwc = self._series_get_version_stats(ser.idnum, max_vers)
+ states = {x.state for x in pwc.values()}
+ state = 'accepted'
+ for val in ['awaiting-upstream', 'changes-requested', 'rejected',
+ 'deferred', 'not-applicable', 'superseded',
+ 'handled-elsewhere']:
+ if val in states:
+ state = val
+ state_str, pad = self._build_col(state, base_str=name)
+ print(f"{state_str}{pad} {stats.rjust(6)} {desc}")
+
+ def _series_max_version(self, idnum):
+ """Find the latest version of a series
+
+ Args:
+ idnum (int): Series ID to look up
+
+ Return:
+ int: maximum version
+ """
+ return self.db.series_get_max_version(idnum)
+
+ def _series_all_max_versions(self):
+ """Find the latest version of all series
+
+ Return: list of:
+ int: ser_ver ID
+ int: series ID
+ int: Maximum version
+ """
+ return self.db.series_get_all_max_versions()
diff --git a/tools/patman/cseries.py b/tools/patman/cseries.py
new file mode 100644
index 00000000000..bcbc4963cea
--- /dev/null
+++ b/tools/patman/cseries.py
@@ -0,0 +1,1165 @@
+# SPDX-License-Identifier: GPL-2.0+
+#
+# Copyright 2025 Google LLC
+#
+"""Handles the 'series' subcommand
+"""
+
+import asyncio
+from collections import OrderedDict, defaultdict
+
+import pygit2
+
+from u_boot_pylib import cros_subprocess
+from u_boot_pylib import gitutil
+from u_boot_pylib import terminal
+from u_boot_pylib import tout
+
+from patman import patchstream
+from patman import cser_helper
+from patman.cser_helper import AUTOLINK, oid
+from patman import send
+from patman import status
+
+
+class Cseries(cser_helper.CseriesHelper):
+ """Database with information about series
+
+ This class handles database read/write as well as operations in a git
+ directory to update series information.
+ """
+ def __init__(self, topdir=None, colour=terminal.COLOR_IF_TERMINAL):
+ """Set up a new Cseries
+
+ Args:
+ topdir (str): Top-level directory of the repo
+ colour (terminal.enum): Whether to enable ANSI colour or not
+ """
+ super().__init__(topdir, colour)
+
+ def add(self, branch_name, desc=None, mark=False, allow_unmarked=False,
+ end=None, force_version=False, dry_run=False):
+ """Add a series (or new version of a series) to the database
+
+ Args:
+ branch_name (str): Name of branch to sync, or None for current one
+ desc (str): Description to use, or None to use the series subject
+ mark (str): True to mark each commit with a change ID
+ allow_unmarked (str): True to not require each commit to be marked
+ end (str): Add only commits up to but exclu
+ force_version (bool): True if ignore a Series-version tag that
+ doesn't match its branch name
+ dry_run (bool): True to do a dry run
+ """
+ name, ser, version, msg = self.prep_series(branch_name, end)
+ tout.info(f"Adding series '{ser.name}' v{version}: mark {mark} "
+ f'allow_unmarked {allow_unmarked}')
+ if msg:
+ tout.info(msg)
+ if desc is None:
+ if not ser.cover:
+ raise ValueError(f"Branch '{name}' has no cover letter - "
+ 'please provide description')
+ desc = ser['cover'][0]
+
+ ser = self._handle_mark(name, ser, version, mark, allow_unmarked,
+ force_version, dry_run)
+ link = ser.get_link_for_version(version)
+
+ msg = 'Added'
+ added = False
+ series_id = self.db.series_find_by_name(ser.name)
+ if not series_id:
+ series_id = self.db.series_add(ser.name, desc)
+ added = True
+ msg += f" series '{ser.name}'"
+
+ if version not in self._get_version_list(series_id):
+ svid = self.db.ser_ver_add(series_id, version, link)
+ msg += f" v{version}"
+ if not added:
+ msg += f" to existing series '{ser.name}'"
+ added = True
+
+ self._add_series_commits(ser, svid)
+ count = len(ser.commits)
+ msg += f" ({count} commit{'s' if count > 1 else ''})"
+ if not added:
+ tout.info(f"Series '{ser.name}' v{version} already exists")
+ msg = None
+ elif not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+ series_id = None
+ ser.desc = desc
+ ser.idnum = series_id
+
+ if msg:
+ tout.info(msg)
+ if dry_run:
+ tout.info('Dry run completed')
+
+ def decrement(self, series, dry_run=False):
+ """Decrement a series to the previous version and delete the branch
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ dry_run (bool): True to do a dry run
+ """
+ ser = self._parse_series(series)
+ if not ser.idnum:
+ raise ValueError(f"Series '{ser.name}' not found in database")
+
+ max_vers = self._series_max_version(ser.idnum)
+ if max_vers < 2:
+ raise ValueError(f"Series '{ser.name}' only has one version")
+
+ tout.info(f"Removing series '{ser.name}' v{max_vers}")
+
+ new_max = max_vers - 1
+
+ repo = pygit2.init_repository(self.gitdir)
+ if not dry_run:
+ name = self._get_branch_name(ser.name, new_max)
+ branch = repo.lookup_branch(name)
+ try:
+ repo.checkout(branch)
+ except pygit2.errors.GitError:
+ tout.warning(f"Failed to checkout branch {name}")
+ raise
+
+ del_name = f'{ser.name}{max_vers}'
+ del_branch = repo.lookup_branch(del_name)
+ branch_oid = del_branch.peel(pygit2.enums.ObjectType.COMMIT).oid
+ del_branch.delete()
+ print(f"Deleted branch '{del_name}' {oid(branch_oid)}")
+
+ self.db.ser_ver_remove(ser.idnum, max_vers)
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+
+ def increment(self, series_name, dry_run=False):
+ """Increment a series to the next version and create a new branch
+
+ Args:
+ series_name (str): Name of series to use, or None to use current
+ branch
+ dry_run (bool): True to do a dry run
+ """
+ ser = self._parse_series(series_name)
+ if not ser.idnum:
+ raise ValueError(f"Series '{ser.name}' not found in database")
+
+ max_vers = self._series_max_version(ser.idnum)
+
+ branch_name = self._get_branch_name(ser.name, max_vers)
+ on_branch = gitutil.get_branch(self.gitdir) == branch_name
+ svid = self.get_series_svid(ser.idnum, max_vers)
+ pwc = self.get_pcommit_dict(svid)
+ count = len(pwc.values())
+ series = patchstream.get_metadata(branch_name, 0, count,
+ git_dir=self.gitdir)
+ tout.info(f"Increment '{ser.name}' v{max_vers}: {count} patches")
+
+ # Create a new branch
+ vers = max_vers + 1
+ new_name = self._join_name_version(ser.name, vers)
+
+ self.update_series(branch_name, series, max_vers, new_name, dry_run,
+ add_vers=vers, switch=on_branch)
+
+ old_svid = self.get_series_svid(ser.idnum, max_vers)
+ pcd = self.get_pcommit_dict(old_svid)
+
+ svid = self.db.ser_ver_add(ser.idnum, vers)
+ self.db.pcommit_add_list(svid, pcd.values())
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+
+ # repo.head.set_target(amended)
+ tout.info(f'Added new branch {new_name}')
+ if dry_run:
+ tout.info('Dry run completed')
+
+ def link_set(self, series_name, version, link, update_commit):
+ """Add / update a series-links link for a series
+
+ Args:
+ series_name (str): Name of series to use, or None to use current
+ branch
+ version (int): Version number, or None to detect from name
+ link (str): Patchwork link-string for the series
+ update_commit (bool): True to update the current commit with the
+ link
+ """
+ ser, version = self._parse_series_and_version(series_name, version)
+ self._ensure_version(ser, version)
+
+ self._set_link(ser.idnum, ser.name, version, link, update_commit)
+ self.commit()
+ tout.info(f"Setting link for series '{ser.name}' v{version} to {link}")
+
+ def link_get(self, series, version):
+ """Get the patchwork link for a version of a series
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ version (int): Version number or None for current
+
+ Return:
+ str: Patchwork link as a string, e.g. '12325'
+ """
+ ser, version = self._parse_series_and_version(series, version)
+ self._ensure_version(ser, version)
+ return self.db.ser_ver_get_link(ser.idnum, version)
+
+ def link_search(self, pwork, series, version):
+ """Search patch for the link for a series
+
+ Returns either the single match, or None, in which case the second part
+ of the tuple is filled in
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ series (str): Series name to search for, or None for current series
+ that is checked out
+ version (int): Version to search for, or None for current version
+ detected from branch name
+
+ Returns:
+ tuple:
+ int: ID of the series found, or None
+ list of possible matches, or None, each a dict:
+ 'id': series ID
+ 'name': series name
+ str: series name
+ int: series version
+ str: series description
+ """
+ _, ser, version, _, _, _, _, _ = self._get_patches(series, version)
+
+ if not ser.desc:
+ raise ValueError(f"Series '{ser.name}' has an empty description")
+
+ pws, options = self.loop.run_until_complete(pwork.find_series(
+ ser, version))
+ return pws, options, ser.name, version, ser.desc
+
+ def link_auto(self, pwork, series, version, update_commit, wait_s=0):
+ """Automatically find a series link by looking in patchwork
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ series (str): Series name to search for, or None for current series
+ that is checked out
+ version (int): Version to search for, or None for current version
+ detected from branch name
+ update_commit (bool): True to update the current commit with the
+ link
+ wait_s (int): Number of seconds to wait for the autolink to succeed
+ """
+ start = self.get_time()
+ stop = start + wait_s
+ sleep_time = 5
+ while True:
+ pws, options, name, version, desc = self.link_search(
+ pwork, series, version)
+ if pws:
+ if wait_s:
+ tout.info('Link completed after '
+ f'{self.get_time() - start} seconds')
+ break
+
+ print(f"Possible matches for '{name}' v{version} desc '{desc}':")
+ print(' Link Version Description')
+ for opt in options:
+ print(f"{opt['id']:6} {opt['version']:7} {opt['name']}")
+ if not wait_s or self.get_time() > stop:
+ delay = f' after {wait_s} seconds' if wait_s else ''
+ raise ValueError(f"Cannot find series '{desc}{delay}'")
+
+ self.sleep(sleep_time)
+
+ self.link_set(name, version, pws, update_commit)
+
+ def link_auto_all(self, pwork, update_commit, link_all_versions,
+ replace_existing, dry_run, show_summary=True):
+ """Automatically find a series link by looking in patchwork
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ update_commit (bool): True to update the current commit with the
+ link
+ link_all_versions (bool): True to sync all versions of a series,
+ False to sync only the latest version
+ replace_existing (bool): True to sync a series even if it already
+ has a link
+ dry_run (bool): True to do a dry run
+ show_summary (bool): True to show a summary of how things went
+
+ Return:
+ OrderedDict of summary info:
+ key (int): ser_ver ID
+ value (AUTOLINK): result of autolinking on this ser_ver
+ """
+ sdict = self.db.series_get_dict_by_id()
+ all_ser_vers = self._get_autolink_dict(sdict, link_all_versions)
+
+ # Get rid of things without a description
+ valid = {}
+ state = {}
+ no_desc = 0
+ not_found = 0
+ updated = 0
+ failed = 0
+ already = 0
+ for svid, (ser_id, name, version, link, desc) in all_ser_vers.items():
+ if link and not replace_existing:
+ state[svid] = f'already:{link}'
+ already += 1
+ elif desc:
+ valid[svid] = ser_id, version, link, desc
+ else:
+ no_desc += 1
+ state[svid] = 'missing description'
+
+ results, requests = self.loop.run_until_complete(
+ pwork.find_series_list(valid))
+
+ for svid, ser_id, link, _ in results:
+ if link:
+ version = all_ser_vers[svid][2]
+ if self._set_link(ser_id, sdict[ser_id].name, version,
+ link, update_commit, dry_run=dry_run):
+ updated += 1
+ state[svid] = f'linked:{link}'
+ else:
+ failed += 1
+ state[svid] = 'failed'
+ else:
+ not_found += 1
+ state[svid] = 'not found'
+
+ # Create a summary sorted by name and version
+ summary = OrderedDict()
+ for svid in sorted(all_ser_vers, key=lambda k: all_ser_vers[k][1:2]):
+ _, name, version, link, ser = all_ser_vers[svid]
+ summary[svid] = AUTOLINK(name, version, link, ser.desc,
+ state[svid])
+
+ if show_summary:
+ msg = f'{updated} series linked'
+ if already:
+ msg += f', {already} already linked'
+ if not_found:
+ msg += f', {not_found} not found'
+ if no_desc:
+ msg += f', {no_desc} missing description'
+ if failed:
+ msg += f', {failed} updated failed'
+ tout.info(msg + f' ({requests} requests)')
+
+ tout.info('')
+ tout.info(f"{'Name':15} Version {'Description':40} Result")
+ border = f"{'-' * 15} ------- {'-' * 40} {'-' * 15}"
+ tout.info(border)
+ for name, version, link, desc, state in summary.values():
+ bright = True
+ if state.startswith('already'):
+ col = self.col.GREEN
+ bright = False
+ elif state.startswith('linked'):
+ col = self.col.MAGENTA
+ else:
+ col = self.col.RED
+ col_state = self.col.build(col, state, bright)
+ tout.info(f"{name:16.16} {version:7} {desc or '':40.40} "
+ f'{col_state}')
+ tout.info(border)
+ if dry_run:
+ tout.info('Dry run completed')
+
+ return summary
+
+ def series_list(self):
+ """List all series
+
+ Lines all series along with their description, number of patches
+ accepted and the available versions
+ """
+ sdict = self.db.series_get_dict()
+ print(f"{'Name':15} {'Description':40} Accepted Versions")
+ border = f"{'-' * 15} {'-' * 40} -------- {'-' * 15}"
+ print(border)
+ for name in sorted(sdict):
+ ser = sdict[name]
+ versions = self._get_version_list(ser.idnum)
+ stat = self._series_get_version_stats(
+ ser.idnum, self._series_max_version(ser.idnum))[0]
+
+ vlist = ' '.join([str(ver) for ver in sorted(versions)])
+
+ print(f'{name:16.16} {ser.desc:41.41} {stat.rjust(8)} {vlist}')
+ print(border)
+
+ def list_patches(self, series, version, show_commit=False,
+ show_patch=False):
+ """List patches in a series
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ version (int): Version number, or None to detect from name
+ show_commit (bool): True to show the commit and diffstate
+ show_patch (bool): True to show the patch
+ """
+ branch, series, version, pwc, name, _, cover_id, num_comments = (
+ self._get_patches(series, version))
+ with terminal.pager():
+ state_totals = defaultdict(int)
+ self._list_patches(branch, pwc, series, name, cover_id,
+ num_comments, show_commit, show_patch, True,
+ state_totals)
+
+ def mark(self, in_name, allow_marked=False, dry_run=False):
+ """Add Change-Id tags to a series
+
+ Args:
+ in_name (str): Name of the series to unmark
+ allow_marked (bool): Allow commits to be (already) marked
+ dry_run (bool): True to do a dry run, restoring the original tree
+ afterwards
+
+ Return:
+ pygit.oid: oid of the new branch
+ """
+ name, ser, _, _ = self.prep_series(in_name)
+ tout.info(f"Marking series '{name}': allow_marked {allow_marked}")
+
+ if not allow_marked:
+ bad = []
+ for cmt in ser.commits:
+ if cmt.change_id:
+ bad.append(cmt)
+ if bad:
+ print(f'{len(bad)} commit(s) already have marks')
+ for cmt in bad:
+ print(f' - {oid(cmt.hash)} {cmt.subject}')
+ raise ValueError(
+ f'Marked commits {len(bad)}/{len(ser.commits)}')
+ new_oid = self._mark_series(in_name, ser, dry_run=dry_run)
+
+ if dry_run:
+ tout.info('Dry run completed')
+ return new_oid
+
+ def unmark(self, name, allow_unmarked=False, dry_run=False):
+ """Remove Change-Id tags from a series
+
+ Args:
+ name (str): Name of the series to unmark
+ allow_unmarked (bool): Allow commits to be (already) unmarked
+ dry_run (bool): True to do a dry run, restoring the original tree
+ afterwards
+
+ Return:
+ pygit.oid: oid of the new branch
+ """
+ name, ser, _, _ = self.prep_series(name)
+ tout.info(
+ f"Unmarking series '{name}': allow_unmarked {allow_unmarked}")
+
+ if not allow_unmarked:
+ bad = []
+ for cmt in ser.commits:
+ if not cmt.change_id:
+ bad.append(cmt)
+ if bad:
+ print(f'{len(bad)} commit(s) are missing marks')
+ for cmt in bad:
+ print(f' - {oid(cmt.hash)} {cmt.subject}')
+ raise ValueError(
+ f'Unmarked commits {len(bad)}/{len(ser.commits)}')
+ vals = None
+ for vals in self.process_series(name, ser, dry_run=dry_run):
+ if cser_helper.CHANGE_ID_TAG in vals.msg:
+ lines = vals.msg.splitlines()
+ updated = [line for line in lines
+ if not line.startswith(cser_helper.CHANGE_ID_TAG)]
+ vals.msg = '\n'.join(updated)
+
+ tout.detail(" - removing mark")
+ vals.info = 'unmarked'
+ else:
+ vals.info = 'no mark'
+
+ if dry_run:
+ tout.info('Dry run completed')
+ return vals.oid
+
+ def open(self, pwork, name, version):
+ """Open the patchwork page for a series
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ name (str): Name of series to open
+ version (str): Version number to open
+ """
+ ser, version = self._parse_series_and_version(name, version)
+ link = self.link_get(ser.name, version)
+ pwork.url = 'https://patchwork.ozlabs.org'
+ url = self.loop.run_until_complete(pwork.get_series_url(link))
+ print(f'Opening {url}')
+
+ # With Firefox, GTK produces lots of warnings, so suppress them
+ # Gtk-Message: 06:48:20.692: Failed to load module "xapp-gtk3-module"
+ # Gtk-Message: 06:48:20.692: Not loading module "atk-bridge": The
+ # functionality is provided by GTK natively. Please try to not load it.
+ # Gtk-Message: 06:48:20.692: Failed to load module "appmenu-gtk-module"
+ # Gtk-Message: 06:48:20.692: Failed to load module "appmenu-gtk-module"
+ # [262145, Main Thread] WARNING: GTK+ module /snap/firefox/5987/
+ # gnome-platform/usr/lib/gtk-2.0/modules/libcanberra-gtk-module.so
+ # cannot be loaded.
+ # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same
+ # process # is not supported.: 'glib warning', file /build/firefox/
+ # parts/firefox/build/toolkit/xre/nsSigHandlers.cpp:201
+ #
+ # (firefox_firefox:262145): Gtk-WARNING **: 06:48:20.728: GTK+ module
+ # /snap/firefox/5987/gnome-platform/usr/lib/gtk-2.0/modules/
+ # libcanberra-gtk-module.so cannot be loaded.
+ # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same
+ # process is not supported.
+ # Gtk-Message: 06:48:20.728: Failed to load module
+ # "canberra-gtk-module"
+ # [262145, Main Thread] WARNING: GTK+ module /snap/firefox/5987/
+ # gnome-platform/usr/lib/gtk-2.0/modules/libcanberra-gtk-module.so
+ # cannot be loaded.
+ # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same
+ # process is not supported.: 'glib warning', file /build/firefox/
+ # parts/firefox/build/toolkit/xre/nsSigHandlers.cpp:201
+ #
+ # (firefox_firefox:262145): Gtk-WARNING **: 06:48:20.729: GTK+ module
+ # /snap/firefox/5987/gnome-platform/usr/lib/gtk-2.0/modules/
+ # libcanberra-gtk-module.so cannot be loaded.
+ # GTK+ 2.x symbols detected. Using GTK+ 2.x and GTK+ 3 in the same
+ # process is not supported.
+ # Gtk-Message: 06:48:20.729: Failed to load module
+ # "canberra-gtk-module"
+ # ATTENTION: default value of option mesa_glthread overridden by
+ # environment.
+ cros_subprocess.Popen(['xdg-open', url])
+
+ def progress(self, series, show_all_versions, list_patches):
+ """Show progress information for all versions in a series
+
+ Args:
+ series (str): Name of series to use, or None to show progress for
+ all series
+ show_all_versions (bool): True to show all versions of a series,
+ False to show only the final version
+ list_patches (bool): True to list all patches for each series,
+ False to just show the series summary on a single line
+ """
+ with terminal.pager():
+ state_totals = defaultdict(int)
+ if series is not None:
+ _, _, need_scan = self._progress_one(
+ self._parse_series(series), show_all_versions,
+ list_patches, state_totals)
+ if need_scan:
+ tout.warning(
+ 'Inconsistent commit-subject: Please use '
+ "'patman series -s <branch> scan' to resolve this")
+ return
+
+ total_patches = 0
+ total_series = 0
+ sdict = self.db.series_get_dict()
+ border = None
+ total_need_scan = 0
+ if not list_patches:
+ print(self.col.build(
+ self.col.MAGENTA,
+ f"{'Name':16} {'Description':41} Count {'Status'}"))
+ border = f"{'-' * 15} {'-' * 40} ----- {'-' * 15}"
+ print(border)
+ for name in sorted(sdict):
+ ser = sdict[name]
+ num_series, num_patches, need_scan = self._progress_one(
+ ser, show_all_versions, list_patches, state_totals)
+ total_need_scan += need_scan
+ if list_patches:
+ print()
+ total_series += num_series
+ total_patches += num_patches
+ if not list_patches:
+ print(border)
+ total = f'{total_series} series'
+ out = ''
+ for state, freq in state_totals.items():
+ out += ' ' + self._build_col(state, f'{freq}:')[0]
+ if total_need_scan:
+ out = '*' + out[1:]
+
+ print(f"{total:15} {'':40} {total_patches:5} {out}")
+ if total_need_scan:
+ tout.info(
+ f'Series marked * ({total_need_scan}) have commit '
+ 'subjects which mismatch their patches and need to be '
+ 'scanned')
+
+ def project_set(self, pwork, name, quiet=False):
+ """Set the name of the project
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ name (str): Name of the project to use in patchwork
+ quiet (bool): True to skip writing the message
+ """
+ res = self.loop.run_until_complete(pwork.get_projects())
+ proj_id = None
+ link_name = None
+ for proj in res:
+ if proj['name'] == name:
+ proj_id = proj['id']
+ link_name = proj['link_name']
+ if not proj_id:
+ raise ValueError(f"Unknown project name '{name}'")
+ self.db.settings_update(name, proj_id, link_name)
+ self.commit()
+ if not quiet:
+ tout.info(f"Project '{name}' patchwork-ID {proj_id} "
+ f'link-name {link_name}')
+
+ def project_get(self):
+ """Get the details of the project
+
+ Returns:
+ tuple or None if there are no settings:
+ name (str): Project name, e.g. 'U-Boot'
+ proj_id (int): Patchworks project ID for this project
+ link_name (str): Patchwork's link-name for the project
+ """
+ return self.db.settings_get()
+
+ def remove(self, name, dry_run=False):
+ """Remove a series from the database
+
+ Args:
+ name (str): Name of series to remove, or None to use current one
+ dry_run (bool): True to do a dry run
+ """
+ ser = self._parse_series(name)
+ name = ser.name
+ if not ser.idnum:
+ raise ValueError(f"No such series '{name}'")
+
+ self.db.ser_ver_remove(ser.idnum, None)
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+
+ self.commit()
+ tout.info(f"Removed series '{name}'")
+ if dry_run:
+ tout.info('Dry run completed')
+
+ def rename(self, series, name, dry_run=False):
+ """Rename a series
+
+ Renames a series and changes the name of any branches which match
+ versions present in the database
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ name (str): new name to use (must not include version number)
+ dry_run (bool): True to do a dry run
+ """
+ old_ser, _ = self._parse_series_and_version(series, None)
+ if not old_ser.idnum:
+ raise ValueError(f"Series '{old_ser.name}' not found in database")
+ if old_ser.name != series:
+ raise ValueError(f"Invalid series name '{series}': "
+ 'did you use the branch name?')
+ chk, _ = cser_helper.split_name_version(name)
+ if chk != name:
+ raise ValueError(
+ f"Invalid series name '{name}': did you use the branch name?")
+ if chk == old_ser.name:
+ raise ValueError(
+ f"Cannot rename series '{old_ser.name}' to itself")
+ if self.get_series_by_name(name):
+ raise ValueError(f"Cannot rename: series '{name}' already exists")
+
+ versions = self._get_version_list(old_ser.idnum)
+ missing = []
+ exists = []
+ todo = {}
+ for ver in versions:
+ ok = True
+ old_branch = self._get_branch_name(old_ser.name, ver)
+ if not gitutil.check_branch(old_branch, self.gitdir):
+ missing.append(old_branch)
+ ok = False
+
+ branch = self._get_branch_name(name, ver)
+ if gitutil.check_branch(branch, self.gitdir):
+ exists.append(branch)
+ ok = False
+
+ if ok:
+ todo[ver] = [old_branch, branch]
+
+ if missing or exists:
+ msg = 'Cannot rename'
+ if missing:
+ msg += f": branches missing: {', '.join(missing)}"
+ if exists:
+ msg += f": branches exist: {', '.join(exists)}"
+ raise ValueError(msg)
+
+ for old_branch, branch in todo.values():
+ tout.info(f"Renaming branch '{old_branch}' to '{branch}'")
+ if not dry_run:
+ gitutil.rename_branch(old_branch, branch, self.gitdir)
+
+ # Change the series name; nothing needs to change in ser_ver
+ self.db.series_set_name(old_ser.idnum, name)
+
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+
+ tout.info(f"Renamed series '{series}' to '{name}'")
+ if dry_run:
+ tout.info('Dry run completed')
+
+ def scan(self, branch_name, mark=False, allow_unmarked=False, end=None,
+ dry_run=False):
+ """Scan a branch and make updates to the database if it has changed
+
+ Args:
+ branch_name (str): Name of branch to sync, or None for current one
+ mark (str): True to mark each commit with a change ID
+ allow_unmarked (str): True to not require each commit to be marked
+ end (str): Add only commits up to but exclu
+ dry_run (bool): True to do a dry run
+ """
+ def _show_item(oper, seq, subject):
+ col = None
+ if oper == '+':
+ col = self.col.GREEN
+ elif oper == '-':
+ col = self.col.RED
+ out = self.col.build(col, subject) if col else subject
+ tout.info(f'{oper} {seq:3} {out}')
+
+ name, ser, version, msg = self.prep_series(branch_name, end)
+ svid = self.get_ser_ver(ser.idnum, version).idnum
+ pcdict = self.get_pcommit_dict(svid)
+
+ tout.info(
+ f"Syncing series '{name}' v{version}: mark {mark} "
+ f'allow_unmarked {allow_unmarked}')
+ if msg:
+ tout.info(msg)
+
+ ser = self._handle_mark(name, ser, version, mark, allow_unmarked,
+ False, dry_run)
+
+ # First check for new patches that are not in the database
+ to_add = dict(enumerate(ser.commits))
+ for pcm in pcdict.values():
+ tout.debug(f'pcm {pcm.subject}')
+ i = self._find_matched_commit(to_add, pcm)
+ if i is not None:
+ del to_add[i]
+
+ # Now check for patches in the database that are not in the branch
+ to_remove = dict(enumerate(pcdict.values()))
+ for cmt in ser.commits:
+ tout.debug(f'cmt {cmt.subject}')
+ i = self._find_matched_patch(to_remove, cmt)
+ if i is not None:
+ del to_remove[i]
+
+ for seq, cmt in enumerate(ser.commits):
+ if seq in to_remove:
+ _show_item('-', seq, to_remove[seq].subject)
+ del to_remove[seq]
+ if seq in to_add:
+ _show_item('+', seq, to_add[seq].subject)
+ del to_add[seq]
+ else:
+ _show_item(' ', seq, cmt.subject)
+ seq = len(ser.commits)
+ for cmt in to_add.items():
+ _show_item('+', seq, cmt.subject)
+ seq += 1
+ for seq, pcm in to_remove.items():
+ _show_item('+', seq, pcm.subject)
+
+ self.db.pcommit_delete(svid)
+ self._add_series_commits(ser, svid)
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+ tout.info('Dry run completed')
+
+ def send(self, pwork, name, autolink, autolink_wait, args):
+ """Send out a series
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ name (str): Series name to search for, or None for current series
+ that is checked out
+ autolink (bool): True to auto-link the series after sending
+ args (argparse.Namespace): 'send' arguments provided
+ autolink_wait (int): Number of seconds to wait for the autolink to
+ succeed
+ """
+ ser, version = self._parse_series_and_version(name, None)
+ if not ser.idnum:
+ raise ValueError(f"Series '{ser.name}' not found in database")
+
+ args.branch = self._get_branch_name(ser.name, version)
+ likely_sent = send.send(args, git_dir=self.gitdir, cwd=self.topdir)
+
+ if likely_sent and autolink:
+ print(f'Autolinking with Patchwork ({autolink_wait} seconds)')
+ self.link_auto(pwork, name, version, True, wait_s=autolink_wait)
+
+ def archive(self, series):
+ """Archive a series
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ """
+ ser = self._parse_series(series, include_archived=True)
+ if not ser.idnum:
+ raise ValueError(f"Series '{ser.name}' not found in database")
+
+ svlist = self.db.ser_ver_get_for_series(ser.idnum)
+
+ # Figure out the tags we will create
+ tag_info = {}
+ now = self.get_now()
+ now_str = now.strftime('%d%b%y').lower()
+ for svi in svlist:
+ name = self._get_branch_name(ser.name, svi.version)
+ if not gitutil.check_branch(name, git_dir=self.gitdir):
+ raise ValueError(f"No branch named '{name}'")
+ tag_info[svi.version] = [svi.idnum, name, f'{name}-{now_str}']
+
+ # Create the tags
+ repo = pygit2.init_repository(self.gitdir)
+ for _, (idnum, name, tag_name) in tag_info.items():
+ commit = repo.revparse_single(name)
+ repo.create_tag(tag_name, commit.hex,
+ pygit2.enums.ObjectType.COMMIT,
+ commit.author, commit.message)
+
+ # Update the database
+ for idnum, name, tag_name in tag_info.values():
+ self.db.ser_ver_set_archive_tag(idnum, tag_name)
+
+ # Delete the branches
+ for idnum, name, tag_name in tag_info.values():
+ # Detach HEAD from the branch if pointing to this branch
+ commit = repo.revparse_single(name)
+ if repo.head.target == commit.oid:
+ repo.set_head(commit.oid)
+
+ repo.branches.delete(name)
+
+ self.db.series_set_archived(ser.idnum, True)
+ self.commit()
+
+ def unarchive(self, series):
+ """Unarchive a series
+
+ Args:
+ series (str): Name of series to use, or None to use current branch
+ """
+ ser = self._parse_series(series, include_archived=True)
+ if not ser.idnum:
+ raise ValueError(f"Series '{ser.name}' not found in database")
+ self.db.series_set_archived(ser.idnum, False)
+
+ svlist = self.db.ser_ver_get_for_series(ser.idnum)
+
+ # Collect the tags
+ repo = pygit2.init_repository(self.gitdir)
+ tag_info = {}
+ for svi in svlist:
+ name = self._get_branch_name(ser.name, svi.version)
+ target = repo.revparse_single(svi.archive_tag)
+ tag_info[svi.idnum] = name, svi.archive_tag, target
+
+ # Make sure the branches don't exist
+ for name, tag_name, tag in tag_info.values():
+ if name in repo.branches:
+ raise ValueError(
+ f"Cannot restore branch '{name}': already exists")
+
+ # Recreate the branches
+ for name, tag_name, tag in tag_info.values():
+ target = repo.get(tag.target)
+ repo.branches.create(name, target)
+
+ # Delete the tags
+ for name, tag_name, tag in tag_info.values():
+ repo.references.delete(f'refs/tags/{tag_name}')
+
+ # Update the database
+ for idnum, (name, tag_name, tag) in tag_info.items():
+ self.db.ser_ver_set_archive_tag(idnum, None)
+
+ self.commit()
+
+ def status(self, pwork, series, version, show_comments,
+ show_cover_comments=False):
+ """Show the series status from patchwork
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ series (str): Name of series to use, or None to use current branch
+ version (int): Version number, or None to detect from name
+ show_comments (bool): Show all comments on each patch
+ show_cover_comments (bool): Show all comments on the cover letter
+ """
+ branch, series, version, _, _, link, _, _ = self._get_patches(
+ series, version)
+ if not link:
+ raise ValueError(
+ f"Series '{series.name}' v{version} has no patchwork link: "
+ f"Try 'patman series -s {branch} autolink'")
+ status.check_and_show_status(
+ series, link, branch, None, False, show_comments,
+ show_cover_comments, pwork, self.gitdir)
+
+ def summary(self, series):
+ """Show summary information for all series
+
+ Args:
+ series (str): Name of series to use
+ """
+ print(f"{'Name':17} Status Description")
+ print(f"{'-' * 17} {'-' * 6} {'-' * 30}")
+ if series is not None:
+ self._summary_one(self._parse_series(series))
+ return
+
+ sdict = self.db.series_get_dict()
+ for ser in sdict.values():
+ self._summary_one(ser)
+
+ def gather(self, pwork, series, version, show_comments,
+ show_cover_comments, gather_tags, dry_run=False):
+ """Gather any new tags from Patchwork, optionally showing comments
+
+ Args:
+ pwork (Patchwork): Patchwork object to use
+ series (str): Name of series to use, or None to use current branch
+ version (int): Version number, or None to detect from name
+ show_comments (bool): True to show the comments on each patch
+ show_cover_comments (bool): True to show the comments on the cover
+ letter
+ gather_tags (bool): True to gather review/test tags
+ dry_run (bool): True to do a dry run (database is not updated)
+ """
+ ser, version = self._parse_series_and_version(series, version)
+ self._ensure_version(ser, version)
+ svid, link = self._get_series_svid_link(ser.idnum, version)
+ if not link:
+ raise ValueError(
+ "No patchwork link is available: use 'patman series autolink'")
+ tout.info(
+ f"Updating series '{ser.name}' version {version} "
+ f"from link '{link}'")
+
+ loop = asyncio.get_event_loop()
+ with pwork.collect_stats() as stats:
+ cover, patches = loop.run_until_complete(self._gather(
+ pwork, link, show_cover_comments))
+
+ with terminal.pager():
+ updated, updated_cover = self._sync_one(
+ svid, ser.name, version, show_comments, show_cover_comments,
+ gather_tags, cover, patches, dry_run)
+ tout.info(f"{updated} patch{'es' if updated != 1 else ''}"
+ f"{' and cover letter' if updated_cover else ''} "
+ f'updated ({stats.request_count} requests)')
+
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+ tout.info('Dry run completed')
+
+ def gather_all(self, pwork, show_comments, show_cover_comments,
+ sync_all_versions, gather_tags, dry_run=False):
+ to_fetch, missing = self._get_fetch_dict(sync_all_versions)
+
+ loop = asyncio.get_event_loop()
+ result, requests = loop.run_until_complete(self._do_series_sync_all(
+ pwork, to_fetch))
+
+ with terminal.pager():
+ tot_updated = 0
+ tot_cover = 0
+ add_newline = False
+ for (svid, sync), (cover, patches) in zip(to_fetch.items(),
+ result):
+ if add_newline:
+ tout.info('')
+ tout.info(f"Syncing '{sync.series_name}' v{sync.version}")
+ updated, updated_cover = self._sync_one(
+ svid, sync.series_name, sync.version, show_comments,
+ show_cover_comments, gather_tags, cover, patches, dry_run)
+ tot_updated += updated
+ tot_cover += updated_cover
+ add_newline = gather_tags
+
+ tout.info('')
+ tout.info(
+ f"{tot_updated} patch{'es' if tot_updated != 1 else ''} and "
+ f"{tot_cover} cover letter{'s' if tot_cover != 1 else ''} "
+ f'updated, {missing} missing '
+ f"link{'s' if missing != 1 else ''} ({requests} requests)")
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+ tout.info('Dry run completed')
+
+ def upstream_add(self, name, url):
+ """Add a new upstream tree
+
+ Args:
+ name (str): Name of the tree
+ url (str): URL for the tree
+ """
+ self.db.upstream_add(name, url)
+ self.commit()
+
+ def upstream_list(self):
+ """List the upstream repos
+
+ Shows a list of the repos, obtained from the database
+ """
+ udict = self.get_upstream_dict()
+
+ for name, items in udict.items():
+ url, is_default = items
+ default = 'default' if is_default else ''
+ print(f'{name:15.15} {default:8} {url}')
+
+ def upstream_set_default(self, name):
+ """Set the default upstream target
+
+ Args:
+ name (str): Name of the upstream remote to set as default, or None
+ for none
+ """
+ self.db.upstream_set_default(name)
+ self.commit()
+
+ def upstream_get_default(self):
+ """Get the default upstream target
+
+ Return:
+ str: Name of the upstream remote to set as default, or None if none
+ """
+ return self.db.upstream_get_default()
+
+ def upstream_delete(self, name):
+ """Delete an upstream target
+
+ Args:
+ name (str): Name of the upstream remote to delete
+ """
+ self.db.upstream_delete(name)
+ self.commit()
+
+ def version_remove(self, name, version, dry_run=False):
+ """Remove a version of a series from the database
+
+ Args:
+ name (str): Name of series to remove, or None to use current one
+ version (int): Version number to remove
+ dry_run (bool): True to do a dry run
+ """
+ ser, version = self._parse_series_and_version(name, version)
+ name = ser.name
+
+ versions = self._ensure_version(ser, version)
+
+ if versions == [version]:
+ raise ValueError(
+ f"Series '{ser.name}' only has one version: remove the series")
+
+ self.db.ser_ver_remove(ser.idnum, version)
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+
+ tout.info(f"Removed version {version} from series '{name}'")
+ if dry_run:
+ tout.info('Dry run completed')
+
+ def version_change(self, name, version, new_version, dry_run=False):
+ """Change a version of a series to be a different version
+
+ Args:
+ name (str): Name of series to remove, or None to use current one
+ version (int): Version number to change
+ new_version (int): New version
+ dry_run (bool): True to do a dry run
+ """
+ ser, version = self._parse_series_and_version(name, version)
+ name = ser.name
+
+ versions = self._ensure_version(ser, version)
+ vstr = list(map(str, versions))
+ if version not in versions:
+ raise ValueError(
+ f"Series '{ser.name}' does not have v{version}: "
+ f"{' '.join(vstr)}")
+
+ if not new_version:
+ raise ValueError('Please provide a new version number')
+
+ if new_version in versions:
+ raise ValueError(
+ f"Series '{ser.name}' already has a v{new_version}: "
+ f"{' '.join(vstr)}")
+
+ new_name = self._join_name_version(ser.name, new_version)
+
+ svid = self.get_series_svid(ser.idnum, version)
+ pwc = self.get_pcommit_dict(svid)
+ count = len(pwc.values())
+ series = patchstream.get_metadata(name, 0, count, git_dir=self.gitdir)
+
+ self.update_series(name, series, version, new_name, dry_run,
+ add_vers=new_version, switch=True)
+ self.db.ser_ver_set_version(svid, new_version)
+
+ if not dry_run:
+ self.commit()
+ else:
+ self.rollback()
+
+ tout.info(f"Changed version {version} in series '{ser.name}' "
+ f"to {new_version} named '{new_name}'")
+ if dry_run:
+ tout.info('Dry run completed')
diff --git a/tools/patman/database.py b/tools/patman/database.py
new file mode 100644
index 00000000000..9c25b04a720
--- /dev/null
+++ b/tools/patman/database.py
@@ -0,0 +1,823 @@
+# SPDX-License-Identifier: GPL-2.0+
+#
+# Copyright 2025 Simon Glass <sjg@chromium.org>
+#
+"""Handles the patman database
+
+This uses sqlite3 with a local file.
+
+To adjsut the schema, increment LATEST, create a migrate_to_v<x>() function
+and write some code in migrate_to() to call it.
+"""
+
+from collections import namedtuple, OrderedDict
+import os
+import sqlite3
+
+from u_boot_pylib import tools
+from u_boot_pylib import tout
+from patman.series import Series
+
+# Schema version (version 0 means there is no database yet)
+LATEST = 4
+
+# Information about a series/version record
+SerVer = namedtuple(
+ 'SER_VER',
+ 'idnum,series_id,version,link,cover_id,cover_num_comments,name,'
+ 'archive_tag')
+
+# Record from the pcommit table:
+# idnum (int): record ID
+# seq (int): Patch sequence in series (0 is first)
+# subject (str): patch subject
+# svid (int): ID of series/version record in ser_ver table
+# change_id (str): Change-ID value
+# state (str): Current status in patchwork
+# patch_id (int): Patchwork's patch ID for this patch
+# num_comments (int): Number of comments attached to the commit
+Pcommit = namedtuple(
+ 'PCOMMIT',
+ 'idnum,seq,subject,svid,change_id,state,patch_id,num_comments')
+
+
+class Database:
+ """Database of information used by patman"""
+
+ # dict of databases:
+ # key: filename
+ # value: Database object
+ instances = {}
+
+ def __init__(self, db_path):
+ """Set up a new database object
+
+ Args:
+ db_path (str): Path to the database
+ """
+ if db_path in Database.instances:
+ # Two connections to the database can cause:
+ # sqlite3.OperationalError: database is locked
+ raise ValueError(f"There is already a database for '{db_path}'")
+ self.con = None
+ self.cur = None
+ self.db_path = db_path
+ self.is_open = False
+ Database.instances[db_path] = self
+
+ @staticmethod
+ def get_instance(db_path):
+ """Get the database instance for a path
+
+ This is provides to ensure that different callers can obtain the
+ same database object when accessing the same database file.
+
+ Args:
+ db_path (str): Path to the database
+
+ Return:
+ Database: Database instance, which is created if necessary
+ """
+ db = Database.instances.get(db_path)
+ if db:
+ return db, False
+ return Database(db_path), True
+
+ def start(self):
+ """Open the database read for use, migrate to latest schema"""
+ self.open_it()
+ self.migrate_to(LATEST)
+
+ def open_it(self):
+ """Open the database, creating it if necessary"""
+ if self.is_open:
+ raise ValueError('Already open')
+ if not os.path.exists(self.db_path):
+ tout.warning(f'Creating new database {self.db_path}')
+ self.con = sqlite3.connect(self.db_path)
+ self.cur = self.con.cursor()
+ self.is_open = True
+
+ def close(self):
+ """Close the database"""
+ if not self.is_open:
+ raise ValueError('Already closed')
+ self.con.close()
+ self.cur = None
+ self.con = None
+ self.is_open = False
+
+ def create_v1(self):
+ """Create a database with the v1 schema"""
+ self.cur.execute(
+ 'CREATE TABLE series (id INTEGER PRIMARY KEY AUTOINCREMENT,'
+ 'name UNIQUE, desc, archived BIT)')
+
+ # Provides a series_id/version pair, which is used to refer to a
+ # particular series version sent to patchwork. This stores the link
+ # to patchwork
+ self.cur.execute(
+ 'CREATE TABLE ser_ver (id INTEGER PRIMARY KEY AUTOINCREMENT,'
+ 'series_id INTEGER, version INTEGER, link,'
+ 'FOREIGN KEY (series_id) REFERENCES series (id))')
+
+ self.cur.execute(
+ 'CREATE TABLE upstream (name UNIQUE, url, is_default BIT)')
+
+ # change_id is the Change-Id
+ # patch_id is the ID of the patch on the patchwork server
+ self.cur.execute(
+ 'CREATE TABLE pcommit (id INTEGER PRIMARY KEY AUTOINCREMENT,'
+ 'svid INTEGER, seq INTEGER, subject, patch_id INTEGER, '
+ 'change_id, state, num_comments INTEGER, '
+ 'FOREIGN KEY (svid) REFERENCES ser_ver (id))')
+
+ self.cur.execute(
+ 'CREATE TABLE settings (name UNIQUE, proj_id INT, link_name)')
+
+ def _migrate_to_v2(self):
+ """Add a schema_version table"""
+ self.cur.execute('CREATE TABLE schema_version (version INTEGER)')
+
+ def _migrate_to_v3(self):
+ """Store the number of cover-letter comments in the schema"""
+ self.cur.execute('ALTER TABLE ser_ver ADD COLUMN cover_id')
+ self.cur.execute('ALTER TABLE ser_ver ADD COLUMN cover_num_comments '
+ 'INTEGER')
+ self.cur.execute('ALTER TABLE ser_ver ADD COLUMN name')
+
+ def _migrate_to_v4(self):
+ """Add an archive tag for each ser_ver"""
+ self.cur.execute('ALTER TABLE ser_ver ADD COLUMN archive_tag')
+
+ def migrate_to(self, dest_version):
+ """Migrate the database to the selected version
+
+ Args:
+ dest_version (int): Version to migrate to
+ """
+ while True:
+ version = self.get_schema_version()
+ if version == dest_version:
+ break
+
+ self.close()
+ tools.write_file(f'{self.db_path}old.v{version}',
+ tools.read_file(self.db_path))
+
+ version += 1
+ tout.info(f'Update database to v{version}')
+ self.open_it()
+ if version == 1:
+ self.create_v1()
+ elif version == 2:
+ self._migrate_to_v2()
+ elif version == 3:
+ self._migrate_to_v3()
+ elif version == 4:
+ self._migrate_to_v4()
+
+ # Save the new version if we have a schema_version table
+ if version > 1:
+ self.cur.execute('DELETE FROM schema_version')
+ self.cur.execute(
+ 'INSERT INTO schema_version (version) VALUES (?)',
+ (version,))
+ self.commit()
+
+ def get_schema_version(self):
+ """Get the version of the database's schema
+
+ Return:
+ int: Database version, 0 means there is no data; anything less than
+ LATEST means the schema is out of date and must be updated
+ """
+ # If there is no database at all, assume v0
+ version = 0
+ try:
+ self.cur.execute('SELECT name FROM series')
+ except sqlite3.OperationalError:
+ return 0
+
+ # If there is no schema, assume v1
+ try:
+ self.cur.execute('SELECT version FROM schema_version')
+ version = self.cur.fetchone()[0]
+ except sqlite3.OperationalError:
+ return 1
+ return version
+
+ def execute(self, query, parameters=()):
+ """Execute a database query
+
+ Args:
+ query (str): Query string
+ parameters (list of values): Parameters to pass
+
+ Return:
+
+ """
+ return self.cur.execute(query, parameters)
+
+ def commit(self):
+ """Commit changes to the database"""
+ self.con.commit()
+
+ def rollback(self):
+ """Roll back changes to the database"""
+ self.con.rollback()
+
+ def lastrowid(self):
+ """Get the last row-ID reported by the database
+
+ Return:
+ int: Value for lastrowid
+ """
+ return self.cur.lastrowid
+
+ def rowcount(self):
+ """Get the row-count reported by the database
+
+ Return:
+ int: Value for rowcount
+ """
+ return self.cur.rowcount
+
+ def _get_series_list(self, include_archived):
+ """Get a list of Series objects from the database
+
+ Args:
+ include_archived (bool): True to include archives series
+
+ Return:
+ list of Series
+ """
+ res = self.execute(
+ 'SELECT id, name, desc FROM series ' +
+ ('WHERE archived = 0' if not include_archived else ''))
+ return [Series.from_fields(idnum=idnum, name=name, desc=desc)
+ for idnum, name, desc in res.fetchall()]
+
+ # series functions
+
+ def series_get_dict_by_id(self, include_archived=False):
+ """Get a dict of Series objects from the database
+
+ Args:
+ include_archived (bool): True to include archives series
+
+ Return:
+ OrderedDict:
+ key: series ID
+ value: Series with idnum, name and desc filled out
+ """
+ sdict = OrderedDict()
+ for ser in self._get_series_list(include_archived):
+ sdict[ser.idnum] = ser
+ return sdict
+
+ def series_find_by_name(self, name, include_archived=False):
+ """Find a series and return its details
+
+ Args:
+ name (str): Name to search for
+ include_archived (bool): True to include archives series
+
+ Returns:
+ idnum, or None if not found
+ """
+ res = self.execute(
+ 'SELECT id FROM series WHERE name = ?' +
+ ('AND archived = 0' if not include_archived else ''), (name,))
+ recs = res.fetchall()
+
+ # This shouldn't happen
+ assert len(recs) <= 1, 'Expected one match, but multiple found'
+
+ if len(recs) != 1:
+ return None
+ return recs[0][0]
+
+ def series_get_info(self, idnum):
+ """Get information for a series from the database
+
+ Args:
+ idnum (int): Series ID to look up
+
+ Return: tuple:
+ str: Series name
+ str: Series description
+
+ Raises:
+ ValueError: Series is not found
+ """
+ res = self.execute('SELECT name, desc FROM series WHERE id = ?',
+ (idnum,))
+ recs = res.fetchall()
+ if len(recs) != 1:
+ raise ValueError(f'No series found (id {idnum} len {len(recs)})')
+ return recs[0]
+
+ def series_get_dict(self, include_archived=False):
+ """Get a dict of Series objects from the database
+
+ Args:
+ include_archived (bool): True to include archives series
+
+ Return:
+ OrderedDict:
+ key: series name
+ value: Series with idnum, name and desc filled out
+ """
+ sdict = OrderedDict()
+ for ser in self._get_series_list(include_archived):
+ sdict[ser.name] = ser
+ return sdict
+
+ def series_get_version_list(self, series_idnum):
+ """Get a list of the versions available for a series
+
+ Args:
+ series_idnum (int): ID of series to look up
+
+ Return:
+ str: List of versions, which may be empty if the series is in the
+ process of being added
+ """
+ res = self.execute('SELECT version FROM ser_ver WHERE series_id = ?',
+ (series_idnum,))
+ return [x[0] for x in res.fetchall()]
+
+ def series_get_max_version(self, series_idnum):
+ """Get the highest version number available for a series
+
+ Args:
+ series_idnum (int): ID of series to look up
+
+ Return:
+ int: Maximum version number
+ """
+ res = self.execute(
+ 'SELECT MAX(version) FROM ser_ver WHERE series_id = ?',
+ (series_idnum,))
+ return res.fetchall()[0][0]
+
+ def series_get_all_max_versions(self):
+ """Find the latest version of all series
+
+ Return: list of:
+ int: ser_ver ID
+ int: series ID
+ int: Maximum version
+ """
+ res = self.execute(
+ 'SELECT id, series_id, MAX(version) FROM ser_ver '
+ 'GROUP BY series_id')
+ return res.fetchall()
+
+ def series_add(self, name, desc):
+ """Add a new series record
+
+ The new record is set to not archived
+
+ Args:
+ name (str): Series name
+ desc (str): Series description
+
+ Return:
+ int: ID num of the new series record
+ """
+ self.execute(
+ 'INSERT INTO series (name, desc, archived) '
+ f"VALUES ('{name}', '{desc}', 0)")
+ return self.lastrowid()
+
+ def series_remove(self, idnum):
+ """Remove a series from the database
+
+ The series must exist
+
+ Args:
+ idnum (int): ID num of series to remove
+ """
+ self.execute('DELETE FROM series WHERE id = ?', (idnum,))
+ assert self.rowcount() == 1
+
+ def series_remove_by_name(self, name):
+ """Remove a series from the database
+
+ Args:
+ name (str): Name of series to remove
+
+ Raises:
+ ValueError: Series does not exist (database is rolled back)
+ """
+ self.execute('DELETE FROM series WHERE name = ?', (name,))
+ if self.rowcount() != 1:
+ self.rollback()
+ raise ValueError(f"No such series '{name}'")
+
+ def series_set_archived(self, series_idnum, archived):
+ """Update archive flag for a series
+
+ Args:
+ series_idnum (int): ID num of the series
+ archived (bool): Whether to mark the series as archived or
+ unarchived
+ """
+ self.execute(
+ 'UPDATE series SET archived = ? WHERE id = ?',
+ (archived, series_idnum))
+
+ def series_set_name(self, series_idnum, name):
+ """Update name for a series
+
+ Args:
+ series_idnum (int): ID num of the series
+ name (str): new name to use
+ """
+ self.execute(
+ 'UPDATE series SET name = ? WHERE id = ?', (name, series_idnum))
+
+ # ser_ver functions
+
+ def ser_ver_get_link(self, series_idnum, version):
+ """Get the link for a series version
+
+ Args:
+ series_idnum (int): ID num of the series
+ version (int): Version number to search for
+
+ Return:
+ str: Patchwork link as a string, e.g. '12325', or None if none
+
+ Raises:
+ ValueError: Multiple matches are found
+ """
+ res = self.execute(
+ 'SELECT link FROM ser_ver WHERE '
+ f"series_id = {series_idnum} AND version = '{version}'")
+ recs = res.fetchall()
+ if not recs:
+ return None
+ if len(recs) > 1:
+ raise ValueError('Expected one match, but multiple matches found')
+ return recs[0][0]
+
+ def ser_ver_set_link(self, series_idnum, version, link):
+ """Set the link for a series version
+
+ Args:
+ series_idnum (int): ID num of the series
+ version (int): Version number to search for
+ link (str): Patchwork link for the ser_ver
+
+ Return:
+ bool: True if the record was found and updated, else False
+ """
+ if link is None:
+ link = ''
+ self.execute(
+ 'UPDATE ser_ver SET link = ? WHERE series_id = ? AND version = ?',
+ (str(link), series_idnum, version))
+ return self.rowcount() != 0
+
+ def ser_ver_set_info(self, info):
+ """Set the info for a series version
+
+ Args:
+ info (SER_VER): Info to set. Only two options are supported:
+ 1: svid,cover_id,cover_num_comments,name
+ 2: svid,name
+
+ Return:
+ bool: True if the record was found and updated, else False
+ """
+ assert info.idnum is not None
+ if info.cover_id:
+ assert info.series_id is None
+ self.execute(
+ 'UPDATE ser_ver SET cover_id = ?, cover_num_comments = ?, '
+ 'name = ? WHERE id = ?',
+ (info.cover_id, info.cover_num_comments, info.name,
+ info.idnum))
+ else:
+ assert not info.cover_id
+ assert not info.cover_num_comments
+ assert not info.series_id
+ assert not info.version
+ assert not info.link
+ self.execute('UPDATE ser_ver SET name = ? WHERE id = ?',
+ (info.name, info.idnum))
+
+ return self.rowcount() != 0
+
+ def ser_ver_set_version(self, svid, version):
+ """Sets the version for a ser_ver record
+
+ Args:
+ svid (int): Record ID to update
+ version (int): Version number to add
+
+ Raises:
+ ValueError: svid was not found
+ """
+ self.execute(
+ 'UPDATE ser_ver SET version = ? WHERE id = ?', (version, svid))
+ if self.rowcount() != 1:
+ raise ValueError(f'No ser_ver updated (svid {svid})')
+
+ def ser_ver_set_archive_tag(self, svid, tag):
+ """Sets the archive tag for a ser_ver record
+
+ Args:
+ svid (int): Record ID to update
+ tag (tag): Tag to add
+
+ Raises:
+ ValueError: svid was not found
+ """
+ self.execute(
+ 'UPDATE ser_ver SET archive_tag = ? WHERE id = ?', (tag, svid))
+ if self.rowcount() != 1:
+ raise ValueError(f'No ser_ver updated (svid {svid})')
+
+ def ser_ver_add(self, series_idnum, version, link=None):
+ """Add a new ser_ver record
+
+ Args:
+ series_idnum (int): ID num of the series which is getting a new
+ version
+ version (int): Version number to add
+ link (str): Patchwork link, or None if not known
+
+ Return:
+ int: ID num of the new ser_ver record
+ """
+ self.execute(
+ 'INSERT INTO ser_ver (series_id, version, link) VALUES (?, ?, ?)',
+ (series_idnum, version, link))
+ return self.lastrowid()
+
+ def ser_ver_get_for_series(self, series_idnum, version=None):
+ """Get a list of ser_ver records for a given series ID
+
+ Args:
+ series_idnum (int): ID num of the series to search
+ version (int): Version number to search for, or None for all
+
+ Return:
+ SER_VER: Requested information
+
+ Raises:
+ ValueError: There is no matching idnum/version
+ """
+ base = ('SELECT id, series_id, version, link, cover_id, '
+ 'cover_num_comments, name, archive_tag FROM ser_ver '
+ 'WHERE series_id = ?')
+ if version:
+ res = self.execute(base + ' AND version = ?',
+ (series_idnum, version))
+ else:
+ res = self.execute(base, (series_idnum,))
+ recs = res.fetchall()
+ if not recs:
+ raise ValueError(
+ f'No matching series for id {series_idnum} version {version}')
+ if version:
+ return SerVer(*recs[0])
+ return [SerVer(*x) for x in recs]
+
+ def ser_ver_get_ids_for_series(self, series_idnum, version=None):
+ """Get a list of ser_ver records for a given series ID
+
+ Args:
+ series_idnum (int): ID num of the series to search
+ version (int): Version number to search for, or None for all
+
+ Return:
+ list of int: List of svids for the matching records
+ """
+ if version:
+ res = self.execute(
+ 'SELECT id FROM ser_ver WHERE series_id = ? AND version = ?',
+ (series_idnum, version))
+ else:
+ res = self.execute(
+ 'SELECT id FROM ser_ver WHERE series_id = ?', (series_idnum,))
+ return list(res.fetchall()[0])
+
+ def ser_ver_get_list(self):
+ """Get a list of patchwork entries from the database
+
+ Return:
+ list of SER_VER
+ """
+ res = self.execute(
+ 'SELECT id, series_id, version, link, cover_id, '
+ 'cover_num_comments, name, archive_tag FROM ser_ver')
+ items = res.fetchall()
+ return [SerVer(*x) for x in items]
+
+ def ser_ver_remove(self, series_idnum, version=None, remove_pcommits=True,
+ remove_series=True):
+ """Delete a ser_ver record
+
+ Removes the record which has the given series ID num and version
+
+ Args:
+ series_idnum (int): ID num of the series
+ version (int): Version number, or None to remove all versions
+ remove_pcommits (bool): True to remove associated pcommits too
+ remove_series (bool): True to remove the series if versions is None
+ """
+ if remove_pcommits:
+ # Figure out svids to delete
+ svids = self.ser_ver_get_ids_for_series(series_idnum, version)
+
+ self.pcommit_delete_list(svids)
+
+ if version:
+ self.execute(
+ 'DELETE FROM ser_ver WHERE series_id = ? AND version = ?',
+ (series_idnum, version))
+ else:
+ self.execute(
+ 'DELETE FROM ser_ver WHERE series_id = ?',
+ (series_idnum,))
+ if not version and remove_series:
+ self.series_remove(series_idnum)
+
+ # pcommit functions
+
+ def pcommit_get_list(self, find_svid=None):
+ """Get a dict of pcommits entries from the database
+
+ Args:
+ find_svid (int): If not None, finds the records associated with a
+ particular series and version; otherwise returns all records
+
+ Return:
+ list of PCOMMIT: pcommit records
+ """
+ query = ('SELECT id, seq, subject, svid, change_id, state, patch_id, '
+ 'num_comments FROM pcommit')
+ if find_svid is not None:
+ query += f' WHERE svid = {find_svid}'
+ res = self.execute(query)
+ return [Pcommit(*rec) for rec in res.fetchall()]
+
+ def pcommit_add_list(self, svid, pcommits):
+ """Add records to the pcommit table
+
+ Args:
+ svid (int): ser_ver ID num
+ pcommits (list of PCOMMIT): Only seq, subject, change_id are
+ uses; svid comes from the argument passed in and the others
+ are assumed to be obtained from patchwork later
+ """
+ for pcm in pcommits:
+ self.execute(
+ 'INSERT INTO pcommit (svid, seq, subject, change_id) VALUES '
+ '(?, ?, ?, ?)', (svid, pcm.seq, pcm.subject, pcm.change_id))
+
+ def pcommit_delete(self, svid):
+ """Delete pcommit records for a given ser_ver ID
+
+ Args_:
+ svid (int): ser_ver ID num of records to delete
+ """
+ self.execute('DELETE FROM pcommit WHERE svid = ?', (svid,))
+
+ def pcommit_delete_list(self, svid_list):
+ """Delete pcommit records for a given set of ser_ver IDs
+
+ Args_:
+ svid (list int): ser_ver ID nums of records to delete
+ """
+ vals = ', '.join([str(x) for x in svid_list])
+ self.execute('DELETE FROM pcommit WHERE svid IN (?)', (vals,))
+
+ def pcommit_update(self, pcm):
+ """Update a pcommit record
+
+ Args:
+ pcm (PCOMMIT): Information to write; only the idnum, state,
+ patch_id and num_comments are used
+
+ Return:
+ True if the data was written
+ """
+ self.execute(
+ 'UPDATE pcommit SET '
+ 'patch_id = ?, state = ?, num_comments = ? WHERE id = ?',
+ (pcm.patch_id, pcm.state, pcm.num_comments, pcm.idnum))
+ return self.rowcount() > 0
+
+ # upstream functions
+
+ def upstream_add(self, name, url):
+ """Add a new upstream record
+
+ Args:
+ name (str): Name of the tree
+ url (str): URL for the tree
+
+ Raises:
+ ValueError if the name already exists in the database
+ """
+ try:
+ self.execute(
+ 'INSERT INTO upstream (name, url) VALUES (?, ?)', (name, url))
+ except sqlite3.IntegrityError as exc:
+ if 'UNIQUE constraint failed: upstream.name' in str(exc):
+ raise ValueError(f"Upstream '{name}' already exists") from exc
+
+ def upstream_set_default(self, name):
+ """Mark (only) the given upstream as the default
+
+ Args:
+ name (str): Name of the upstream remote to set as default, or None
+
+ Raises:
+ ValueError if more than one name matches (should not happen);
+ database is rolled back
+ """
+ self.execute("UPDATE upstream SET is_default = 0")
+ if name is not None:
+ self.execute(
+ 'UPDATE upstream SET is_default = 1 WHERE name = ?', (name,))
+ if self.rowcount() != 1:
+ self.rollback()
+ raise ValueError(f"No such upstream '{name}'")
+
+ def upstream_get_default(self):
+ """Get the name of the default upstream
+
+ Return:
+ str: Default-upstream name, or None if there is no default
+ """
+ res = self.execute(
+ "SELECT name FROM upstream WHERE is_default = 1")
+ recs = res.fetchall()
+ if len(recs) != 1:
+ return None
+ return recs[0][0]
+
+ def upstream_delete(self, name):
+ """Delete an upstream target
+
+ Args:
+ name (str): Name of the upstream remote to delete
+
+ Raises:
+ ValueError: Upstream does not exist (database is rolled back)
+ """
+ self.execute(f"DELETE FROM upstream WHERE name = '{name}'")
+ if self.rowcount() != 1:
+ self.rollback()
+ raise ValueError(f"No such upstream '{name}'")
+
+ def upstream_get_dict(self):
+ """Get a list of upstream entries from the database
+
+ Return:
+ OrderedDict:
+ key (str): upstream name
+ value (str): url
+ """
+ res = self.execute('SELECT name, url, is_default FROM upstream')
+ udict = OrderedDict()
+ for name, url, is_default in res.fetchall():
+ udict[name] = url, is_default
+ return udict
+
+ # settings functions
+
+ def settings_update(self, name, proj_id, link_name):
+ """Set the patchwork settings of the project
+
+ Args:
+ name (str): Name of the project to use in patchwork
+ proj_id (int): Project ID for the project
+ link_name (str): Link name for the project
+ """
+ self.execute('DELETE FROM settings')
+ self.execute(
+ 'INSERT INTO settings (name, proj_id, link_name) '
+ 'VALUES (?, ?, ?)', (name, proj_id, link_name))
+
+ def settings_get(self):
+ """Get the patchwork settings of the project
+
+ Returns:
+ tuple or None if there are no settings:
+ name (str): Project name, e.g. 'U-Boot'
+ proj_id (int): Patchworks project ID for this project
+ link_name (str): Patchwork's link-name for the project
+ """
+ res = self.execute("SELECT name, proj_id, link_name FROM settings")
+ recs = res.fetchall()
+ if len(recs) != 1:
+ return None
+ return recs[0]
diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py
index 720746e21f5..d029181765c 100644
--- a/tools/patman/func_test.py
+++ b/tools/patman/func_test.py
@@ -6,29 +6,31 @@
"""Functional tests for checking that patman behaves correctly"""
+import asyncio
import contextlib
import os
import pathlib
import re
import shutil
import sys
-import tempfile
import unittest
+import pygit2
+
+from u_boot_pylib import command
+from u_boot_pylib import gitutil
+from u_boot_pylib import terminal
+from u_boot_pylib import tools
from patman.commit import Commit
from patman import control
from patman import patchstream
from patman.patchstream import PatchStream
+from patman import patchwork
+from patman import send
from patman.series import Series
-from patman import settings
-from u_boot_pylib import gitutil
-from u_boot_pylib import terminal
-from u_boot_pylib import tools
-from u_boot_pylib.test_util import capture_sys_output
-
-import pygit2
from patman import status
+from patman.test_common import TestCommon
PATMAN_DIR = pathlib.Path(__file__).parent
TEST_DATA_DIR = PATMAN_DIR / 'test/'
@@ -45,10 +47,8 @@ def directory_excursion(directory):
os.chdir(current)
-class TestFunctional(unittest.TestCase):
+class TestFunctional(unittest.TestCase, TestCommon):
"""Functional tests for checking that patman behaves correctly"""
- leb = (b'Lord Edmund Blackadd\xc3\xabr <weasel@blackadder.org>'.
- decode('utf-8'))
fred = 'Fred Bloggs <f.bloggs@napier.net>'
joe = 'Joe Bloggs <joe@napierwallies.co.nz>'
mary = 'Mary Bloggs <mary@napierwallies.co.nz>'
@@ -56,13 +56,13 @@ class TestFunctional(unittest.TestCase):
patches = None
def setUp(self):
- self.tmpdir = tempfile.mkdtemp(prefix='patman.')
- self.gitdir = os.path.join(self.tmpdir, 'git')
+ TestCommon.setUp(self)
self.repo = None
+ self._patman_pathname = sys.argv[0]
+ self._patman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
def tearDown(self):
- shutil.rmtree(self.tmpdir)
- terminal.set_print_test_mode(False)
+ TestCommon.tearDown(self)
@staticmethod
def _get_path(fname):
@@ -153,7 +153,7 @@ class TestFunctional(unittest.TestCase):
Commit-changes: 2
- Changes only for this commit
-' Cover-changes: 4
+ Cover-changes: 4
- Some notes for the cover letter
Cover-letter:
@@ -199,14 +199,15 @@ class TestFunctional(unittest.TestCase):
"""
process_tags = True
ignore_bad_tags = False
- stefan = b'Stefan Br\xc3\xbcns <stefan.bruens@rwth-aachen.de>'.decode('utf-8')
+ stefan = (b'Stefan Br\xc3\xbcns <stefan.bruens@rwth-aachen.de>'
+ .decode('utf-8'))
rick = 'Richard III <richard@palace.gov>'
mel = b'Lord M\xc3\xablchett <clergy@palace.gov>'.decode('utf-8')
add_maintainers = [stefan, rick]
dry_run = True
in_reply_to = mel
count = 2
- settings.alias = {
+ alias = {
'fdt': ['simon'],
'u-boot': ['u-boot@lists.denx.de'],
'simon': [self.leb],
@@ -221,58 +222,58 @@ class TestFunctional(unittest.TestCase):
cover_fname, args = self._create_patches_for_test(series)
get_maintainer_script = str(pathlib.Path(__file__).parent.parent.parent
/ 'get_maintainer.pl') + ' --norolestats'
- with capture_sys_output() as out:
+ with terminal.capture() as out:
patchstream.fix_patches(series, args)
if cover_fname and series.get('cover'):
patchstream.insert_cover_letter(cover_fname, series, count)
series.DoChecks()
cc_file = series.MakeCcFile(process_tags, cover_fname,
not ignore_bad_tags, add_maintainers,
- None, get_maintainer_script)
+ None, get_maintainer_script, alias)
cmd = gitutil.email_patches(
series, cover_fname, args, dry_run, not ignore_bad_tags,
- cc_file, in_reply_to=in_reply_to, thread=None)
- series.ShowActions(args, cmd, process_tags)
- cc_lines = open(cc_file, encoding='utf-8').read().splitlines()
+ cc_file, alias, in_reply_to=in_reply_to, thread=None)
+ series.ShowActions(args, cmd, process_tags, alias)
+ cc_lines = tools.read_file(cc_file, binary=False).splitlines()
os.remove(cc_file)
- lines = iter(out[0].getvalue().splitlines())
+ itr = iter(out[0].getvalue().splitlines())
self.assertEqual('Cleaned %s patches' % len(series.commits),
- next(lines))
- self.assertEqual('Change log missing for v2', next(lines))
- self.assertEqual('Change log missing for v3', next(lines))
- self.assertEqual('Change log for unknown version v4', next(lines))
- self.assertEqual("Alias 'pci' not found", next(lines))
- while next(lines) != 'Cc processing complete':
+ next(itr))
+ self.assertEqual('Change log missing for v2', next(itr))
+ self.assertEqual('Change log missing for v3', next(itr))
+ self.assertEqual('Change log for unknown version v4', next(itr))
+ self.assertEqual("Alias 'pci' not found", next(itr))
+ while next(itr) != 'Cc processing complete':
pass
- self.assertIn('Dry run', next(lines))
- self.assertEqual('', next(lines))
- self.assertIn('Send a total of %d patches' % count, next(lines))
- prev = next(lines)
- for i, commit in enumerate(series.commits):
+ self.assertIn('Dry run', next(itr))
+ self.assertEqual('', next(itr))
+ self.assertIn('Send a total of %d patches' % count, next(itr))
+ prev = next(itr)
+ for i in range(len(series.commits)):
self.assertEqual(' %s' % args[i], prev)
while True:
- prev = next(lines)
+ prev = next(itr)
if 'Cc:' not in prev:
break
self.assertEqual('To: u-boot@lists.denx.de', prev)
- self.assertEqual('Cc: %s' % stefan, next(lines))
- self.assertEqual('Version: 3', next(lines))
- self.assertEqual('Prefix:\t RFC', next(lines))
- self.assertEqual('Postfix:\t some-branch', next(lines))
- self.assertEqual('Cover: 4 lines', next(lines))
- self.assertEqual(' Cc: %s' % self.fred, next(lines))
- self.assertEqual(' Cc: %s' % self.joe, next(lines))
+ self.assertEqual('Cc: %s' % stefan, next(itr))
+ self.assertEqual('Version: 3', next(itr))
+ self.assertEqual('Prefix:\t RFC', next(itr))
+ self.assertEqual('Postfix:\t some-branch', next(itr))
+ self.assertEqual('Cover: 4 lines', next(itr))
+ self.assertEqual(' Cc: %s' % self.fred, next(itr))
+ self.assertEqual(' Cc: %s' % self.joe, next(itr))
self.assertEqual(' Cc: %s' % self.leb,
- next(lines))
- self.assertEqual(' Cc: %s' % mel, next(lines))
- self.assertEqual(' Cc: %s' % rick, next(lines))
+ next(itr))
+ self.assertEqual(' Cc: %s' % mel, next(itr))
+ self.assertEqual(' Cc: %s' % rick, next(itr))
expected = ('Git command: git send-email --annotate '
- '--in-reply-to="%s" --to "u-boot@lists.denx.de" '
+ '--in-reply-to="%s" --to u-boot@lists.denx.de '
'--cc "%s" --cc-cmd "%s send --cc-cmd %s" %s %s'
% (in_reply_to, stefan, sys.argv[0], cc_file, cover_fname,
' '.join(args)))
- self.assertEqual(expected, next(lines))
+ self.assertEqual(expected, next(itr))
self.assertEqual(('%s %s\0%s' % (args[0], rick, stefan)), cc_lines[0])
self.assertEqual(
@@ -313,14 +314,14 @@ Simon Glass (2):
base-commit: 1a44532
branch: mybranch
'''
- lines = open(cover_fname, encoding='utf-8').read().splitlines()
+ lines = tools.read_file(cover_fname, binary=False).splitlines()
self.assertEqual(
'Subject: [RFC PATCH some-branch v3 0/2] test: A test patch series',
lines[3])
self.assertEqual(expected.splitlines(), lines[7:])
for i, fname in enumerate(args):
- lines = open(fname, encoding='utf-8').read().splitlines()
+ lines = tools.read_file(fname, binary=False).splitlines()
subject = [line for line in lines if line.startswith('Subject')]
self.assertEqual('Subject: [RFC %d/%d]' % (i + 1, count),
subject[0][:18])
@@ -360,14 +361,15 @@ Changes in v2:
def test_base_commit(self):
"""Test adding a base commit with no cover letter"""
orig_text = self._get_text('test01.txt')
- pos = orig_text.index('commit 5ab48490f03051875ab13d288a4bf32b507d76fd')
+ pos = orig_text.index(
+ 'commit 5ab48490f03051875ab13d288a4bf32b507d76fd')
text = orig_text[:pos]
series = patchstream.get_metadata_for_test(text)
series.base_commit = Commit('1a44532')
series.branch = 'mybranch'
cover_fname, args = self._create_patches_for_test(series)
self.assertFalse(cover_fname)
- with capture_sys_output() as out:
+ with terminal.capture() as out:
patchstream.fix_patches(series, args, insert_base_commit=True)
self.assertEqual('Cleaned 1 patch\n', out[0].getvalue())
lines = tools.read_file(args[0], binary=False).splitlines()
@@ -382,139 +384,6 @@ Changes in v2:
self.assertEqual('base-commit: 1a44532', lines[pos + 3])
self.assertEqual('branch: mybranch', lines[pos + 4])
- def make_commit_with_file(self, subject, body, fname, text):
- """Create a file and add it to the git repo with a new commit
-
- Args:
- subject (str): Subject for the commit
- body (str): Body text of the commit
- fname (str): Filename of file to create
- text (str): Text to put into the file
- """
- path = os.path.join(self.gitdir, fname)
- tools.write_file(path, text, binary=False)
- index = self.repo.index
- index.add(fname)
- # pylint doesn't seem to find this
- # pylint: disable=E1101
- author = pygit2.Signature('Test user', 'test@email.com')
- committer = author
- tree = index.write_tree()
- message = subject + '\n' + body
- self.repo.create_commit('HEAD', author, committer, message, tree,
- [self.repo.head.target])
-
- def make_git_tree(self):
- """Make a simple git tree suitable for testing
-
- It has three branches:
- 'base' has two commits: PCI, main
- 'first' has base as upstream and two more commits: I2C, SPI
- 'second' has base as upstream and three more: video, serial, bootm
-
- Returns:
- pygit2.Repository: repository
- """
- repo = pygit2.init_repository(self.gitdir)
- self.repo = repo
- new_tree = repo.TreeBuilder().write()
-
- # pylint doesn't seem to find this
- # pylint: disable=E1101
- author = pygit2.Signature('Test user', 'test@email.com')
- committer = author
- _ = repo.create_commit('HEAD', author, committer, 'Created master',
- new_tree, [])
-
- self.make_commit_with_file('Initial commit', '''
-Add a README
-
-''', 'README', '''This is the README file
-describing this project
-in very little detail''')
-
- self.make_commit_with_file('pci: PCI implementation', '''
-Here is a basic PCI implementation
-
-''', 'pci.c', '''This is a file
-it has some contents
-and some more things''')
- self.make_commit_with_file('main: Main program', '''
-Hello here is the second commit.
-''', 'main.c', '''This is the main file
-there is very little here
-but we can always add more later
-if we want to
-
-Series-to: u-boot
-Series-cc: Barry Crump <bcrump@whataroa.nz>
-''')
- base_target = repo.revparse_single('HEAD')
- self.make_commit_with_file('i2c: I2C things', '''
-This has some stuff to do with I2C
-''', 'i2c.c', '''And this is the file contents
-with some I2C-related things in it''')
- self.make_commit_with_file('spi: SPI fixes', '''
-SPI needs some fixes
-and here they are
-
-Signed-off-by: %s
-
-Series-to: u-boot
-Commit-notes:
-title of the series
-This is the cover letter for the series
-with various details
-END
-''' % self.leb, 'spi.c', '''Some fixes for SPI in this
-file to make SPI work
-better than before''')
- first_target = repo.revparse_single('HEAD')
-
- target = repo.revparse_single('HEAD~2')
- # pylint doesn't seem to find this
- # pylint: disable=E1101
- repo.reset(target.oid, pygit2.GIT_CHECKOUT_FORCE)
- self.make_commit_with_file('video: Some video improvements', '''
-Fix up the video so that
-it looks more purple. Purple is
-a very nice colour.
-''', 'video.c', '''More purple here
-Purple and purple
-Even more purple
-Could not be any more purple''')
- self.make_commit_with_file('serial: Add a serial driver', '''
-Here is the serial driver
-for my chip.
-
-Cover-letter:
-Series for my board
-This series implements support
-for my glorious board.
-END
-Series-links: 183237
-''', 'serial.c', '''The code for the
-serial driver is here''')
- self.make_commit_with_file('bootm: Make it boot', '''
-This makes my board boot
-with a fix to the bootm
-command
-''', 'bootm.c', '''Fix up the bootm
-command to make the code as
-complicated as possible''')
- second_target = repo.revparse_single('HEAD')
-
- repo.branches.local.create('first', first_target)
- repo.config.set_multivar('branch.first.remote', '', '.')
- repo.config.set_multivar('branch.first.merge', '', 'refs/heads/base')
-
- repo.branches.local.create('second', second_target)
- repo.config.set_multivar('branch.second.remote', '', '.')
- repo.config.set_multivar('branch.second.merge', '', 'refs/heads/base')
-
- repo.branches.local.create('base', base_target)
- return repo
-
def test_branch(self):
"""Test creating patches from a branch"""
repo = self.make_git_tree()
@@ -525,13 +394,13 @@ complicated as possible''')
control.setup()
orig_dir = os.getcwd()
try:
- os.chdir(self.gitdir)
+ os.chdir(self.tmpdir)
# Check that it can detect the current branch
self.assertEqual(2, gitutil.count_commits_to_branch(None))
col = terminal.Color()
- with capture_sys_output() as _:
- _, cover_fname, patch_files = control.prepare_patches(
+ with terminal.capture() as _:
+ _, cover_fname, patch_files = send.prepare_patches(
col, branch=None, count=-1, start=0, end=0,
ignore_binary=False, signoff=True)
self.assertIsNone(cover_fname)
@@ -539,8 +408,8 @@ complicated as possible''')
# Check that it can detect a different branch
self.assertEqual(3, gitutil.count_commits_to_branch('second'))
- with capture_sys_output() as _:
- series, cover_fname, patch_files = control.prepare_patches(
+ with terminal.capture() as _:
+ _, cover_fname, patch_files = send.prepare_patches(
col, branch='second', count=-1, start=0, end=0,
ignore_binary=False, signoff=True)
self.assertIsNotNone(cover_fname)
@@ -558,8 +427,8 @@ complicated as possible''')
self.assertNotIn(b'base-commit:', tools.read_file(fname))
# Check that it can skip patches at the end
- with capture_sys_output() as _:
- _, cover_fname, patch_files = control.prepare_patches(
+ with terminal.capture() as _:
+ _, cover_fname, patch_files = send.prepare_patches(
col, branch='second', count=-1, start=0, end=1,
ignore_binary=False, signoff=True)
self.assertIsNotNone(cover_fname)
@@ -577,7 +446,7 @@ complicated as possible''')
def test_custom_get_maintainer_script(self):
"""Validate that a custom get_maintainer script gets used."""
self.make_git_tree()
- with directory_excursion(self.gitdir):
+ with directory_excursion(self.tmpdir):
# Setup git.
os.environ['GIT_CONFIG_GLOBAL'] = '/dev/null'
os.environ['GIT_CONFIG_SYSTEM'] = '/dev/null'
@@ -585,22 +454,21 @@ complicated as possible''')
tools.run('git', 'config', 'user.email', 'dumdum@dummy.com')
tools.run('git', 'branch', 'upstream')
tools.run('git', 'branch', '--set-upstream-to=upstream')
- tools.run('git', 'add', '.')
- tools.run('git', 'commit', '-m', 'new commit')
# Setup patman configuration.
- with open('.patman', 'w', buffering=1) as f:
- f.write('[settings]\n'
- 'get_maintainer_script: dummy-script.sh\n'
- 'check_patch: False\n'
- 'add_maintainers: True\n')
- with open('dummy-script.sh', 'w', buffering=1) as f:
- f.write('#!/usr/bin/env python\n'
- 'print("hello@there.com")\n')
+ tools.write_file('.patman', '[settings]\n'
+ 'get_maintainer_script: dummy-script.sh\n'
+ 'check_patch: False\n'
+ 'add_maintainers: True\n', binary=False)
+ tools.write_file('dummy-script.sh',
+ '#!/usr/bin/env python3\n'
+ 'print("hello@there.com")\n', binary=False)
os.chmod('dummy-script.sh', 0x555)
+ tools.run('git', 'add', '.')
+ tools.run('git', 'commit', '-m', 'new commit')
# Finally, do the test
- with capture_sys_output():
+ with terminal.capture():
output = tools.run(PATMAN_DIR / 'patman', '--dry-run')
# Assert the email address is part of the dry-run
# output.
@@ -627,7 +495,7 @@ Tested-by: %s
Serie-version: 2
'''
with self.assertRaises(ValueError) as exc:
- pstrm = PatchStream.process_text(text)
+ PatchStream.process_text(text)
self.assertEqual("Line 3: Invalid tag = 'Serie-version: 2'",
str(exc.exception))
@@ -705,9 +573,9 @@ index c072e54..942244f 100644
--- a/lib/fdtdec.c
+++ b/lib/fdtdec.c
@@ -1200,7 +1200,8 @@ int fdtdec_setup_mem_size_base(void)
- }
+ \t}
- gd->ram_size = (phys_size_t)(res.end - res.start + 1);
+ \tgd->ram_size = (phys_size_t)(res.end - res.start + 1);
- debug("%s: Initial DRAM size %llx\n", __func__, (u64)gd->ram_size);
+ debug("%s: Initial DRAM size %llx\n", __func__,
+ (unsigned long long)gd->ram_size);
@@ -743,15 +611,49 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
finally:
os.chdir(orig_dir)
+ def run_patman(self, *args):
+ """Run patman using the provided arguments
+
+ This runs the patman executable from scratch, as opposed to calling
+ the control.do_patman() function.
+
+ Args:
+ args (list of str): Arguments to pass (excluding argv[0])
+
+ Return:
+ CommandResult: Result of execution
+ """
+ all_args = [self._patman_pathname] + list(args)
+ return command.run_one(*all_args, capture=True, capture_stderr=True)
+
+ def test_full_help(self):
+ """Test getting full help"""
+ command.TEST_RESULT = None
+ result = self.run_patman('-H')
+ help_file = os.path.join(self._patman_dir, 'README.rst')
+ # Remove possible extraneous strings
+ extra = '::::::::::::::\n' + help_file + '\n::::::::::::::\n'
+ gothelp = result.stdout.replace(extra, '')
+ self.assertEqual(len(gothelp), os.path.getsize(help_file))
+ self.assertEqual(0, len(result.stderr))
+ self.assertEqual(0, result.return_code)
+
+ def test_help(self):
+ """Test getting help with commands and arguments"""
+ command.TEST_RESULT = None
+ result = self.run_patman('-h')
+ self.assertTrue(len(result.stdout) > 1000)
+ self.assertEqual(0, len(result.stderr))
+ self.assertEqual(0, result.return_code)
+
@staticmethod
- def _fake_patchwork(url, subpath):
+ def _fake_patchwork(subpath):
"""Fake Patchwork server for the function below
This handles accessing a series, providing a list consisting of a
single patch
Args:
- url (str): URL of patchwork server
subpath (str): URL subpath to use
"""
re_series = re.match(r'series/(\d*)/$', subpath)
@@ -764,20 +666,20 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
def test_status_mismatch(self):
"""Test Patchwork patches not matching the series"""
- series = Series()
-
- with capture_sys_output() as (_, err):
- status.collect_patches(series, 1234, None, self._fake_patchwork)
+ pwork = patchwork.Patchwork.for_testing(self._fake_patchwork)
+ with terminal.capture() as (_, err):
+ loop = asyncio.get_event_loop()
+ _, patches = loop.run_until_complete(status.check_status(1234,
+ pwork))
+ status.check_patch_count(0, len(patches))
self.assertIn('Warning: Patchwork reports 1 patches, series has 0',
err.getvalue())
def test_status_read_patch(self):
"""Test handling a single patch in Patchwork"""
- series = Series()
- series.commits = [Commit('abcd')]
-
- patches = status.collect_patches(series, 1234, None,
- self._fake_patchwork)
+ pwork = patchwork.Patchwork.for_testing(self._fake_patchwork)
+ loop = asyncio.get_event_loop()
+ _, patches = loop.run_until_complete(status.check_status(1234, pwork))
self.assertEqual(1, len(patches))
patch = patches[0]
self.assertEqual('1', patch.id)
@@ -785,7 +687,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
def test_parse_subject(self):
"""Test parsing of the patch subject"""
- patch = status.Patch('1')
+ patch = patchwork.Patch('1')
# Simple patch not in a series
patch.parse_subject('Testing')
@@ -813,6 +715,14 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual(None, patch.prefix)
self.assertEqual(None, patch.version)
+ # With PATCH prefix
+ patch.parse_subject('[PATCH,2/5] Testing')
+ self.assertEqual('Testing', patch.subject)
+ self.assertEqual(2, patch.seq)
+ self.assertEqual(5, patch.count)
+ self.assertEqual('PATCH', patch.prefix)
+ self.assertEqual(None, patch.version)
+
# RFC patch
patch.parse_subject('[RFC,3/7] Testing')
self.assertEqual('Testing', patch.subject)
@@ -854,11 +764,11 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
commit3 = Commit('3456')
commit3.subject = 'Subject 2'
- patch1 = status.Patch('1')
+ patch1 = patchwork.Patch('1')
patch1.subject = 'Subject 1'
- patch2 = status.Patch('2')
+ patch2 = patchwork.Patch('2')
patch2.subject = 'Subject 2'
- patch3 = status.Patch('3')
+ patch3 = patchwork.Patch('3')
patch3.subject = 'Subject 2'
series = Series()
@@ -920,14 +830,13 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
"Cannot find commit for patch 3 ('Subject 2')"],
warnings)
- def _fake_patchwork2(self, url, subpath):
+ def _fake_patchwork2(self, subpath):
"""Fake Patchwork server for the function below
This handles accessing series, patches and comments, providing the data
in self.patches to the caller
Args:
- url (str): URL of patchwork server
subpath (str): URL subpath to use
"""
re_series = re.match(r'series/(\d*)/$', subpath)
@@ -954,7 +863,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
commit2 = Commit('ef12')
commit2.subject = 'Subject 2'
- patch1 = status.Patch('1')
+ patch1 = patchwork.Patch('1')
patch1.parse_subject('[1/2] Subject 1')
patch1.name = patch1.raw_subject
patch1.content = 'This is my patch content'
@@ -962,7 +871,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
patch1.comments = [comment1a]
- patch2 = status.Patch('2')
+ patch2 = patchwork.Patch('2')
patch2.parse_subject('[2/2] Subject 2')
patch2.name = patch2.raw_subject
patch2.content = 'Some other patch content'
@@ -978,37 +887,33 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
# things behaves as expected
self.commits = [commit1, commit2]
self.patches = [patch1, patch2]
- count = 2
- new_rtag_list = [None] * count
- review_list = [None, None]
# Check that the tags are picked up on the first patch
- status.find_new_responses(new_rtag_list, review_list, 0, commit1,
- patch1, None, self._fake_patchwork2)
- self.assertEqual(new_rtag_list[0], {'Reviewed-by': {self.joe}})
+ new_rtags, _ = status.process_reviews(patch1.content, patch1.comments,
+ commit1.rtags)
+ self.assertEqual(new_rtags, {'Reviewed-by': {self.joe}})
# Now the second patch
- status.find_new_responses(new_rtag_list, review_list, 1, commit2,
- patch2, None, self._fake_patchwork2)
- self.assertEqual(new_rtag_list[1], {
+ new_rtags, _ = status.process_reviews(patch2.content, patch2.comments,
+ commit2.rtags)
+ self.assertEqual(new_rtags, {
'Reviewed-by': {self.mary, self.fred},
'Tested-by': {self.leb}})
# Now add some tags to the commit, which means they should not appear as
# 'new' tags when scanning comments
- new_rtag_list = [None] * count
commit1.rtags = {'Reviewed-by': {self.joe}}
- status.find_new_responses(new_rtag_list, review_list, 0, commit1,
- patch1, None, self._fake_patchwork2)
- self.assertEqual(new_rtag_list[0], {})
+ new_rtags, _ = status.process_reviews(patch1.content, patch1.comments,
+ commit1.rtags)
+ self.assertEqual(new_rtags, {})
# For the second commit, add Ed and Fred, so only Mary should be left
commit2.rtags = {
'Tested-by': {self.leb},
'Reviewed-by': {self.fred}}
- status.find_new_responses(new_rtag_list, review_list, 1, commit2,
- patch2, None, self._fake_patchwork2)
- self.assertEqual(new_rtag_list[1], {'Reviewed-by': {self.mary}})
+ new_rtags, _ = status.process_reviews(patch2.content, patch2.comments,
+ commit2.rtags)
+ self.assertEqual(new_rtags, {'Reviewed-by': {self.mary}})
# Check that the output patches expectations:
# 1 Subject 1
@@ -1022,50 +927,50 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
series = Series()
series.commits = [commit1, commit2]
terminal.set_print_test_mode()
- status.check_patchwork_status(series, '1234', None, None, False, False,
- None, self._fake_patchwork2)
- lines = iter(terminal.get_print_test_lines())
+ pwork = patchwork.Patchwork.for_testing(self._fake_patchwork2)
+ status.check_and_show_status(series, '1234', None, None, False, False,
+ False, pwork)
+ itr = iter(terminal.get_print_test_lines())
col = terminal.Color()
- self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.BLUE),
- next(lines))
+ self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.YELLOW),
+ next(itr))
self.assertEqual(
terminal.PrintLine(' Reviewed-by: ', col.GREEN, newline=False,
bright=False),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(self.joe, col.WHITE, bright=False),
- next(lines))
+ next(itr))
- self.assertEqual(terminal.PrintLine(' 2 Subject 2', col.BLUE),
- next(lines))
+ self.assertEqual(terminal.PrintLine(' 2 Subject 2', col.YELLOW),
+ next(itr))
self.assertEqual(
terminal.PrintLine(' Reviewed-by: ', col.GREEN, newline=False,
bright=False),
- next(lines))
- self.assertEqual(terminal.PrintLine(self.fred, col.WHITE, bright=False),
- next(lines))
+ next(itr))
+ self.assertEqual(terminal.PrintLine(self.fred, col.WHITE,
+ bright=False), next(itr))
self.assertEqual(
terminal.PrintLine(' Tested-by: ', col.GREEN, newline=False,
bright=False),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(self.leb, col.WHITE, bright=False),
- next(lines))
+ next(itr))
self.assertEqual(
terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(self.mary, col.WHITE),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(
'1 new response available in patchwork (use -d to write them to a new branch)',
- None), next(lines))
+ None), next(itr))
- def _fake_patchwork3(self, url, subpath):
+ def _fake_patchwork3(self, subpath):
"""Fake Patchwork server for the function below
This handles accessing series, patches and comments, providing the data
in self.patches to the caller
Args:
- url (str): URL of patchwork server
subpath (str): URL subpath to use
"""
re_series = re.match(r'series/(\d*)/$', subpath)
@@ -1091,14 +996,14 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
branch = 'first'
dest_branch = 'first2'
count = 2
- gitdir = os.path.join(self.gitdir, '.git')
+ gitdir = self.gitdir
# Set up the test git tree. We use branch 'first' which has two commits
# in it
series = patchstream.get_metadata_for_list(branch, gitdir, count)
self.assertEqual(2, len(series.commits))
- patch1 = status.Patch('1')
+ patch1 = patchwork.Patch('1')
patch1.parse_subject('[1/2] %s' % series.commits[0].subject)
patch1.name = patch1.raw_subject
patch1.content = 'This is my patch content'
@@ -1106,7 +1011,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
patch1.comments = [comment1a]
- patch2 = status.Patch('2')
+ patch2 = patchwork.Patch('2')
patch2.parse_subject('[2/2] %s' % series.commits[1].subject)
patch2.name = patch2.raw_subject
patch2.content = 'Some other patch content'
@@ -1136,9 +1041,10 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
# <unittest.result.TestResult run=8 errors=0 failures=0>
terminal.set_print_test_mode()
- status.check_patchwork_status(series, '1234', branch, dest_branch,
- False, False, None, self._fake_patchwork3,
- repo)
+ pwork = patchwork.Patchwork.for_testing(self._fake_patchwork3)
+ status.check_and_show_status(
+ series, '1234', branch, dest_branch, False, False, False, pwork,
+ repo)
lines = terminal.get_print_test_lines()
self.assertEqual(12, len(lines))
self.assertEqual(
@@ -1159,18 +1065,18 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
# Now check the actual test of the first commit message. We expect to
# see the new tags immediately below the old ones.
stdout = patchstream.get_list(dest_branch, count=count, git_dir=gitdir)
- lines = iter([line.strip() for line in stdout.splitlines()
- if '-by:' in line])
+ itr = iter([line.strip() for line in stdout.splitlines()
+ if '-by:' in line])
# First patch should have the review tag
- self.assertEqual('Reviewed-by: %s' % self.joe, next(lines))
+ self.assertEqual('Reviewed-by: %s' % self.joe, next(itr))
# Second patch should have the sign-off then the tested-by and two
# reviewed-by tags
- self.assertEqual('Signed-off-by: %s' % self.leb, next(lines))
- self.assertEqual('Reviewed-by: %s' % self.fred, next(lines))
- self.assertEqual('Reviewed-by: %s' % self.mary, next(lines))
- self.assertEqual('Tested-by: %s' % self.leb, next(lines))
+ self.assertEqual('Signed-off-by: %s' % self.leb, next(itr))
+ self.assertEqual('Reviewed-by: %s' % self.fred, next(itr))
+ self.assertEqual('Reviewed-by: %s' % self.mary, next(itr))
+ self.assertEqual('Tested-by: %s' % self.leb, next(itr))
def test_parse_snippets(self):
"""Test parsing of review snippets"""
@@ -1246,8 +1152,9 @@ line8
'And another comment'],
['> File: file.c',
'> Line: 153 / 143: def check_patch(fname, show_types=False):',
- '> and more code', '> +Addition here', '> +Another addition here',
- '> codey', '> more codey', 'and another thing in same file'],
+ '> and more code', '> +Addition here',
+ '> +Another addition here', '> codey', '> more codey',
+ 'and another thing in same file'],
['> File: file.c', '> Line: 253 / 243',
'> with no function context', 'one more thing'],
['> File: tools/patman/main.py', '> +line of code',
@@ -1269,7 +1176,7 @@ line8
commit2 = Commit('ef12')
commit2.subject = 'Subject 2'
- patch1 = status.Patch('1')
+ patch1 = patchwork.Patch('1')
patch1.parse_subject('[1/2] Subject 1')
patch1.name = patch1.raw_subject
patch1.content = 'This is my patch content'
@@ -1290,7 +1197,7 @@ Reviewed-by: %s
patch1.comments = [comment1a]
- patch2 = status.Patch('2')
+ patch2 = patchwork.Patch('2')
patch2.parse_subject('[2/2] Subject 2')
patch2.name = patch2.raw_subject
patch2.content = 'Some other patch content'
@@ -1338,77 +1245,80 @@ Reviewed-by: %s
series = Series()
series.commits = [commit1, commit2]
terminal.set_print_test_mode()
- status.check_patchwork_status(series, '1234', None, None, False, True,
- None, self._fake_patchwork2)
- lines = iter(terminal.get_print_test_lines())
+ pwork = patchwork.Patchwork.for_testing(self._fake_patchwork2)
+ status.check_and_show_status(
+ series, '1234', None, None, False, True, False, pwork)
+ itr = iter(terminal.get_print_test_lines())
col = terminal.Color()
- self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.BLUE),
- next(lines))
+ self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.YELLOW),
+ next(itr))
self.assertEqual(
terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False),
- next(lines))
- self.assertEqual(terminal.PrintLine(self.joe, col.WHITE), next(lines))
+ next(itr))
+ self.assertEqual(terminal.PrintLine(self.joe, col.WHITE), next(itr))
self.assertEqual(terminal.PrintLine('Review: %s' % self.joe, col.RED),
- next(lines))
- self.assertEqual(terminal.PrintLine(' Hi Fred,', None), next(lines))
- self.assertEqual(terminal.PrintLine('', None), next(lines))
+ next(itr))
+ self.assertEqual(terminal.PrintLine(' Hi Fred,', None), next(itr))
+ self.assertEqual(terminal.PrintLine('', None), next(itr))
self.assertEqual(terminal.PrintLine(' > File: file.c', col.MAGENTA),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(' > Some code', col.MAGENTA),
- next(lines))
- self.assertEqual(terminal.PrintLine(' > and more code', col.MAGENTA),
- next(lines))
+ next(itr))
+ self.assertEqual(terminal.PrintLine(' > and more code',
+ col.MAGENTA),
+ next(itr))
self.assertEqual(terminal.PrintLine(
- ' Here is my comment above the above...', None), next(lines))
- self.assertEqual(terminal.PrintLine('', None), next(lines))
+ ' Here is my comment above the above...', None), next(itr))
+ self.assertEqual(terminal.PrintLine('', None), next(itr))
- self.assertEqual(terminal.PrintLine(' 2 Subject 2', col.BLUE),
- next(lines))
+ self.assertEqual(terminal.PrintLine(' 2 Subject 2', col.YELLOW),
+ next(itr))
self.assertEqual(
terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(self.fred, col.WHITE),
- next(lines))
+ next(itr))
self.assertEqual(
terminal.PrintLine(' + Reviewed-by: ', col.GREEN, newline=False),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(self.mary, col.WHITE),
- next(lines))
+ next(itr))
self.assertEqual(
terminal.PrintLine(' + Tested-by: ', col.GREEN, newline=False),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine(self.leb, col.WHITE),
- next(lines))
+ next(itr))
self.assertEqual(terminal.PrintLine('Review: %s' % self.fred, col.RED),
- next(lines))
- self.assertEqual(terminal.PrintLine(' Hi Fred,', None), next(lines))
- self.assertEqual(terminal.PrintLine('', None), next(lines))
+ next(itr))
+ self.assertEqual(terminal.PrintLine(' Hi Fred,', None), next(itr))
+ self.assertEqual(terminal.PrintLine('', None), next(itr))
self.assertEqual(terminal.PrintLine(
- ' > File: tools/patman/commit.py', col.MAGENTA), next(lines))
+ ' > File: tools/patman/commit.py', col.MAGENTA), next(itr))
self.assertEqual(terminal.PrintLine(
- ' > Line: 41 / 41: class Commit:', col.MAGENTA), next(lines))
+ ' > Line: 41 / 41: class Commit:', col.MAGENTA), next(itr))
self.assertEqual(terminal.PrintLine(
- ' > + return self.subject', col.MAGENTA), next(lines))
+ ' > + return self.subject', col.MAGENTA), next(itr))
self.assertEqual(terminal.PrintLine(
- ' > +', col.MAGENTA), next(lines))
+ ' > +', col.MAGENTA), next(itr))
self.assertEqual(
- terminal.PrintLine(' > def add_change(self, version, info):',
- col.MAGENTA),
- next(lines))
+ terminal.PrintLine(
+ ' > def add_change(self, version, info):',
+ col.MAGENTA),
+ next(itr))
self.assertEqual(terminal.PrintLine(
' > """Add a new change line to the change list for a version.',
- col.MAGENTA), next(lines))
+ col.MAGENTA), next(itr))
self.assertEqual(terminal.PrintLine(
- ' >', col.MAGENTA), next(lines))
+ ' >', col.MAGENTA), next(itr))
self.assertEqual(terminal.PrintLine(
- ' A comment', None), next(lines))
- self.assertEqual(terminal.PrintLine('', None), next(lines))
+ ' A comment', None), next(itr))
+ self.assertEqual(terminal.PrintLine('', None), next(itr))
self.assertEqual(terminal.PrintLine(
'4 new responses available in patchwork (use -d to write them to a new branch)',
- None), next(lines))
+ None), next(itr))
def test_insert_tags(self):
"""Test inserting of review tags"""
diff --git a/tools/patman/get_maintainer.py b/tools/patman/get_maintainer.py
index 200ee96551d..1c8fa726573 100644
--- a/tools/patman/get_maintainer.py
+++ b/tools/patman/get_maintainer.py
@@ -21,7 +21,7 @@ def find_get_maintainer(script_file_name):
if get_maintainer:
return get_maintainer
- git_relative_script = os.path.join(gitutil.get_top_level(),
+ git_relative_script = os.path.join(gitutil.get_top_level() or '',
script_file_name)
if os.path.exists(git_relative_script):
return git_relative_script
@@ -46,11 +46,14 @@ def get_maintainer(script_file_name, fname, verbose=False):
"""
# Expand `script_file_name` into a file name and its arguments, if
# any.
- cmd_args = shlex.split(script_file_name)
- file_name = cmd_args[0]
- arguments = cmd_args[1:]
+ get_maintainer = None
+ arguments = None
+ if script_file_name:
+ cmd_args = shlex.split(script_file_name)
+ file_name = cmd_args[0]
+ arguments = cmd_args[1:]
- get_maintainer = find_get_maintainer(file_name)
+ get_maintainer = find_get_maintainer(file_name)
if not get_maintainer:
if verbose:
print("WARNING: Couldn't find get_maintainer.pl")
diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py
index 7a695c37c27..45040877f8c 100644
--- a/tools/patman/patchstream.py
+++ b/tools/patman/patchstream.py
@@ -109,6 +109,8 @@ class PatchStream:
self.recent_unquoted = queue.Queue()
self.was_quoted = None
self.insert_base_commit = insert_base_commit
+ self.lines = [] # All lines in a commit message
+ self.msg = None # Full commit message including subject
@staticmethod
def process_text(text, is_comment=False):
@@ -190,11 +192,22 @@ class PatchStream:
"""
self.commit.add_rtag(rtag_type, who)
- def _close_commit(self):
- """Save the current commit into our commit list, and reset our state"""
+ def _close_commit(self, skip_last_line):
+ """Save the current commit into our commit list, and reset our state
+
+ Args:
+ skip_last_line (bool): True to omit the final line of self.lines
+ when building the commit message. This is normally the blank
+ line between two commits, except at the end of the log, where
+ there is no blank line
+ """
if self.commit and self.is_log:
+ # Skip the blank line before the subject
+ lines = self.lines[:-1] if skip_last_line else self.lines
+ self.commit.msg = '\n'.join(lines[1:]) + '\n'
self.series.AddCommit(self.commit)
self.commit = None
+ self.lines = []
# If 'END' is missing in a 'Cover-letter' section, and that section
# happens to show up at the very end of the commit message, this is
# the chance for us to fix it up.
@@ -345,6 +358,8 @@ class PatchStream:
self.state += 1
elif commit_match:
self.state = STATE_MSG_HEADER
+ if self.state != STATE_MSG_HEADER:
+ self.lines.append(line)
# If a tag is detected, or a new commit starts
if series_tag_match or commit_tag_match or change_id_match or \
@@ -499,7 +514,7 @@ class PatchStream:
# Detect the start of a new commit
elif commit_match:
- self._close_commit()
+ self._close_commit(True)
self.commit = commit.Commit(commit_match.group(1))
# Detect tags in the commit message
@@ -579,7 +594,7 @@ class PatchStream:
"""Close out processing of this patch stream"""
self._finalise_snippet()
self._finalise_change()
- self._close_commit()
+ self._close_commit(False)
if self.lines_after_test:
self._add_warn('Found %d lines after TEST=' % self.lines_after_test)
@@ -754,7 +769,7 @@ def get_metadata_for_list(commit_range, git_dir=None, count=None,
pst.finalise()
return series
-def get_metadata(branch, start, count):
+def get_metadata(branch, start, count, git_dir=None):
"""Reads out patch series metadata from the commits
This does a 'git log' on the relevant commits and pulls out the tags we
@@ -769,8 +784,9 @@ def get_metadata(branch, start, count):
Series: Object containing information about the commits.
"""
top = f"{branch if branch else 'HEAD'}~{start}"
- series = get_metadata_for_list(top, None, count)
- series.base_commit = commit.Commit(gitutil.get_hash(f'{top}~{count}'))
+ series = get_metadata_for_list(top, git_dir, count)
+ series.base_commit = commit.Commit(
+ gitutil.get_hash(f'{top}~{count}', git_dir))
series.branch = branch or gitutil.get_branch()
series.top = top
return series
@@ -792,7 +808,7 @@ def get_metadata_for_test(text):
return series
def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False,
- insert_base_commit=False):
+ insert_base_commit=False, cwd=None):
"""Fix up a patch file, by adding/removing as required.
We remove our tags from the patch file, insert changes lists, etc.
@@ -807,10 +823,12 @@ def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False,
cmt (Commit): Commit object for this patch file
keep_change_id (bool): Keep the Change-Id tag.
insert_base_commit (bool): True to add the base commit to the end
+ cwd (str): Directory containing filename, or None for current
Return:
list: A list of errors, each str, or [] if all ok.
"""
+ fname = os.path.join(cwd or '', fname)
handle, tmpname = tempfile.mkstemp()
outfd = os.fdopen(handle, 'w', encoding='utf-8')
infd = open(fname, 'r', encoding='utf-8')
@@ -827,7 +845,8 @@ def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False,
shutil.move(tmpname, fname)
return cmt.warn
-def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False):
+def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False,
+ cwd=None):
"""Fix up a list of patches identified by filenames
The patch files are processed in place, and overwritten.
@@ -837,6 +856,7 @@ def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False):
fnames (:type: list of str): List of patch files to process
keep_change_id (bool): Keep the Change-Id tag.
insert_base_commit (bool): True to add the base commit to the end
+ cwd (str): Directory containing the patch files, or None for current
"""
# Current workflow creates patches, so we shouldn't need a backup
backup_dir = None #tempfile.mkdtemp('clean-patch')
@@ -847,7 +867,7 @@ def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False):
cmt.count = count
result = fix_patch(backup_dir, fname, series, cmt,
keep_change_id=keep_change_id,
- insert_base_commit=insert_base_commit)
+ insert_base_commit=insert_base_commit, cwd=cwd)
if result:
print('%d warning%s for %s:' %
(len(result), 's' if len(result) > 1 else '', fname))
@@ -857,14 +877,16 @@ def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False):
count += 1
print('Cleaned %d patch%s' % (count, 'es' if count > 1 else ''))
-def insert_cover_letter(fname, series, count):
+def insert_cover_letter(fname, series, count, cwd=None):
"""Inserts a cover letter with the required info into patch 0
Args:
fname (str): Input / output filename of the cover letter file
series (Series): Series object
count (int): Number of patches in the series
+ cwd (str): Directory containing filename, or None for current
"""
+ fname = os.path.join(cwd or '', fname)
fil = open(fname, 'r')
lines = fil.readlines()
fil.close()
diff --git a/tools/patman/patchwork.py b/tools/patman/patchwork.py
new file mode 100644
index 00000000000..d485648e467
--- /dev/null
+++ b/tools/patman/patchwork.py
@@ -0,0 +1,852 @@
+# SPDX-License-Identifier: GPL-2.0+
+#
+# Copyright 2025 Simon Glass <sjg@chromium.org>
+#
+"""Provides a basic API for the patchwork server
+"""
+
+import asyncio
+import re
+
+import aiohttp
+from collections import namedtuple
+
+from u_boot_pylib import terminal
+
+# Information passed to series_get_states()
+# link (str): Patchwork link for series
+# series_id (int): Series ID in database
+# series_name (str): Series name
+# version (int): Version number of series
+# show_comments (bool): True to show comments
+# show_cover_comments (bool): True to show cover-letter comments
+STATE_REQ = namedtuple(
+ 'state_req',
+ 'link,series_id,series_name,version,show_comments,show_cover_comments')
+
+# Responses from series_get_states()
+# int: ser_ver ID number
+# COVER: Cover-letter info
+# list of Patch: Information on each patch in the series
+# list of dict: patches, see get_series()['patches']
+STATE_RESP = namedtuple('state_resp', 'svid,cover,patches,patch_list')
+
+# Information about a cover-letter on patchwork
+# id (int): Patchwork ID of cover letter
+# state (str): Current state, e.g. 'accepted'
+# num_comments (int): Number of comments
+# name (str): Series name
+# comments (list of dict): Comments
+COVER = namedtuple('cover', 'id,num_comments,name,comments')
+
+# Number of retries
+RETRIES = 3
+
+# Max concurrent request
+MAX_CONCURRENT = 50
+
+# Patches which are part of a multi-patch series are shown with a prefix like
+# [prefix, version, sequence], for example '[RFC, v2, 3/5]'. All but the last
+# part is optional. This decodes the string into groups. For single patches
+# the [] part is not present:
+# Groups: (ignore, ignore, ignore, prefix, version, sequence, subject)
+RE_PATCH = re.compile(r'(\[(((.*),)?(.*),)?(.*)\]\s)?(.*)$')
+
+# This decodes the sequence string into a patch number and patch count
+RE_SEQ = re.compile(r'(\d+)/(\d+)')
+
+
+class Patch(dict):
+ """Models a patch in patchwork
+
+ This class records information obtained from patchwork
+
+ Some of this information comes from the 'Patch' column:
+
+ [RFC,v2,1/3] dm: Driver and uclass changes for tiny-dm
+
+ This shows the prefix, version, seq, count and subject.
+
+ The other properties come from other columns in the display.
+
+ Properties:
+ pid (str): ID of the patch (typically an integer)
+ seq (int): Sequence number within series (1=first) parsed from sequence
+ string
+ count (int): Number of patches in series, parsed from sequence string
+ raw_subject (str): Entire subject line, e.g.
+ "[1/2,v2] efi_loader: Sort header file ordering"
+ prefix (str): Prefix string or None (e.g. 'RFC')
+ version (str): Version string or None (e.g. 'v2')
+ raw_subject (str): Raw patch subject
+ subject (str): Patch subject with [..] part removed (same as commit
+ subject)
+ data (dict or None): Patch data:
+ """
+ def __init__(self, pid, state=None, data=None, comments=None,
+ series_data=None):
+ super().__init__()
+ self.id = pid # Use 'id' to match what the Rest API provides
+ self.seq = None
+ self.count = None
+ self.prefix = None
+ self.version = None
+ self.raw_subject = None
+ self.subject = None
+ self.state = state
+ self.data = data
+ self.comments = comments
+ self.series_data = series_data
+ self.name = None
+
+ # These make us more like a dictionary
+ def __setattr__(self, name, value):
+ self[name] = value
+
+ def __getattr__(self, name):
+ return self[name]
+
+ def __hash__(self):
+ return hash(frozenset(self.items()))
+
+ def __str__(self):
+ return self.raw_subject
+
+ def parse_subject(self, raw_subject):
+ """Parse the subject of a patch into its component parts
+
+ See RE_PATCH for details. The parsed info is placed into seq, count,
+ prefix, version, subject
+
+ Args:
+ raw_subject (str): Subject string to parse
+
+ Raises:
+ ValueError: the subject cannot be parsed
+ """
+ self.raw_subject = raw_subject.strip()
+ mat = RE_PATCH.search(raw_subject.strip())
+ if not mat:
+ raise ValueError(f"Cannot parse subject '{raw_subject}'")
+ self.prefix, self.version, seq_info, self.subject = mat.groups()[3:]
+ mat_seq = RE_SEQ.match(seq_info) if seq_info else False
+ if mat_seq is None:
+ self.version = seq_info
+ seq_info = None
+ if self.version and not self.version.startswith('v'):
+ self.prefix = self.version
+ self.version = None
+ if seq_info:
+ if mat_seq:
+ self.seq = int(mat_seq.group(1))
+ self.count = int(mat_seq.group(2))
+ else:
+ self.seq = 1
+ self.count = 1
+
+
+class Review:
+ """Represents a single review email collected in Patchwork
+
+ Patches can attract multiple reviews. Each consists of an author/date and
+ a variable number of 'snippets', which are groups of quoted and unquoted
+ text.
+ """
+ def __init__(self, meta, snippets):
+ """Create new Review object
+
+ Args:
+ meta (str): Text containing review author and date
+ snippets (list): List of snippets in th review, each a list of text
+ lines
+ """
+ self.meta = ' : '.join([line for line in meta.splitlines() if line])
+ self.snippets = snippets
+
+
+class Patchwork:
+ """Class to handle communication with patchwork
+ """
+ def __init__(self, url, show_progress=True, single_thread=False):
+ """Set up a new patchwork handler
+
+ Args:
+ url (str): URL of patchwork server, e.g.
+ 'https://patchwork.ozlabs.org'
+ """
+ self.url = url
+ self.fake_request = None
+ self.proj_id = None
+ self.link_name = None
+ self._show_progress = show_progress
+ self.semaphore = asyncio.Semaphore(
+ 1 if single_thread else MAX_CONCURRENT)
+ self.request_count = 0
+
+ async def _request(self, client, subpath):
+ """Call the patchwork API and return the result as JSON
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ subpath (str): URL subpath to use
+
+ Returns:
+ dict: Json result
+
+ Raises:
+ ValueError: the URL could not be read
+ """
+ # print('subpath', subpath)
+ self.request_count += 1
+ if self.fake_request:
+ return self.fake_request(subpath)
+
+ full_url = f'{self.url}/api/1.2/{subpath}'
+ async with self.semaphore:
+ # print('full_url', full_url)
+ for i in range(RETRIES + 1):
+ try:
+ async with client.get(full_url) as response:
+ if response.status != 200:
+ raise ValueError(
+ f"Could not read URL '{full_url}'")
+ result = await response.json()
+ # print('- done', full_url)
+ return result
+ break
+ except aiohttp.client_exceptions.ServerDisconnectedError:
+ if i == RETRIES:
+ raise
+
+ @staticmethod
+ def for_testing(func):
+ """Get an instance to use for testing
+
+ Args:
+ func (function): Function to call to handle requests. The function
+ is passed a URL and is expected to return a dict with the
+ resulting data
+
+ Returns:
+ Patchwork: testing instance
+ """
+ pwork = Patchwork(None, show_progress=False)
+ pwork.fake_request = func
+ return pwork
+
+ class _Stats:
+ def __init__(self, parent):
+ self.parent = parent
+ self.request_count = 0
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.request_count = self.parent.request_count
+
+ def collect_stats(self):
+ """Context manager to count requests across a range of patchwork calls
+
+ Usage:
+ pwork = Patchwork(...)
+ with pwork.count_requests() as counter:
+ pwork.something()
+ print(f'{counter.count} requests')
+ """
+ self.request_count = 0
+ return self._Stats(self)
+
+ async def get_projects(self):
+ """Get a list of projects on the server
+
+ Returns:
+ list of dict, one for each project
+ 'name' (str): Project name, e.g. 'U-Boot'
+ 'id' (int): Project ID, e.g. 9
+ 'link_name' (str): Project's link-name, e.g. 'uboot'
+ """
+ async with aiohttp.ClientSession() as client:
+ return await self._request(client, 'projects/')
+
+ async def _query_series(self, client, desc):
+ """Query series by name
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ desc: String to search for
+
+ Return:
+ list of series matches, each a dict, see get_series()
+ """
+ query = desc.replace(' ', '+')
+ return await self._request(
+ client, f'series/?project={self.proj_id}&q={query}')
+
+ async def _find_series(self, client, svid, ser_id, version, ser):
+ """Find a series on the server
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ svid (int): ser_ver ID
+ ser_id (int): series ID
+ version (int): Version number to search for
+ ser (Series): Contains description (cover-letter title)
+
+ Returns:
+ tuple:
+ int: ser_ver ID (as passed in)
+ int: series ID (as passed in)
+ str: Series link, or None if not found
+ list of dict, or None if found
+ each dict is the server result from a possible series
+ """
+ desc = ser.desc
+ name_found = []
+
+ # Do a series query on the description
+ res = await self._query_series(client, desc)
+ for pws in res:
+ if pws['name'] == desc:
+ if int(pws['version']) == version:
+ return svid, ser_id, pws['id'], None
+ name_found.append(pws)
+
+ # When there is no cover letter, patchwork uses the first patch as the
+ # series name
+ cmt = ser.commits[0]
+
+ res = await self._query_series(client, cmt.subject)
+ for pws in res:
+ patch = Patch(0)
+ patch.parse_subject(pws['name'])
+ if patch.subject == cmt.subject:
+ if int(pws['version']) == version:
+ return svid, ser_id, pws['id'], None
+ name_found.append(pws)
+
+ return svid, ser_id, None, name_found or res
+
+ async def find_series(self, ser, version):
+ """Find a series based on its description and version
+
+ Args:
+ ser (Series): Contains description (cover-letter title)
+ version (int): Version number
+
+ Return: tuple:
+ tuple:
+ str: Series ID, or None if not found
+ list of dict, or None if found
+ each dict is the server result from a possible series
+ int: number of server requests done
+ """
+ async with aiohttp.ClientSession() as client:
+ # We don't know the svid and it isn't needed, so use -1
+ _, _, link, options = await self._find_series(client, -1, -1,
+ version, ser)
+ return link, options
+
+ async def find_series_list(self, to_find):
+ """Find the link for each series in a list
+
+ Args:
+ to_find (dict of svids to sync):
+ key (int): ser_ver ID
+ value (tuple):
+ int: Series ID
+ int: Series version
+ str: Series link
+ str: Series description
+
+ Return: tuple:
+ list of tuple, one for each item in to_find:
+ int: ser_ver_ID
+ int: series ID
+ int: Series version
+ str: Series link, or None if not found
+ list of dict, or None if found
+ each dict is the server result from a possible series
+ int: number of server requests done
+ """
+ self.request_count = 0
+ async with aiohttp.ClientSession() as client:
+ tasks = [asyncio.create_task(
+ self._find_series(client, svid, ser_id, version, desc))
+ for svid, (ser_id, version, link, desc) in to_find.items()]
+ results = await asyncio.gather(*tasks)
+
+ return results, self.request_count
+
+ def project_set(self, project_id, link_name):
+ """Set the project ID
+
+ The patchwork server has multiple projects. This allows the ID and
+ link_name of the relevant project to be selected
+
+ This function is used for testing
+
+ Args:
+ project_id (int): Project ID to use, e.g. 6
+ link_name (str): Name to use for project URL links, e.g. 'uboot'
+ """
+ self.proj_id = project_id
+ self.link_name = link_name
+
+ async def get_series(self, client, link):
+ """Read information about a series
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ link (str): Patchwork series ID
+
+ Returns: dict containing patchwork's series information
+ id (int): series ID unique across patchwork instance, e.g. 3
+ url (str): Full URL, e.g.
+ 'https://patchwork.ozlabs.org/api/1.2/series/3/'
+ web_url (str): Full URL, e.g.
+ 'https://patchwork.ozlabs.org/project/uboot/list/?series=3
+ project (dict): project information (id, url, name, link_name,
+ list_id, list_email, etc.
+ name (str): Series name, e.g. '[U-Boot] moveconfig: fix error'
+ date (str): Date, e.g. '2017-08-27T08:00:51'
+ submitter (dict): id, url, name, email, e.g.:
+ "id": 6125,
+ "url": "https://patchwork.ozlabs.org/api/1.2/people/6125/",
+ "name": "Chris Packham",
+ "email": "judge.packham@gmail.com"
+ version (int): Version number
+ total (int): Total number of patches based on subject
+ received_total (int): Total patches received by patchwork
+ received_all (bool): True if all patches were received
+ mbox (str): URL of mailbox, e.g.
+ 'https://patchwork.ozlabs.org/series/3/mbox/'
+ cover_letter (dict) or None, e.g.:
+ "id": 806215,
+ "url": "https://patchwork.ozlabs.org/api/1.2/covers/806215/",
+ "web_url": "https://patchwork.ozlabs.org/project/uboot/cover/
+ 20170827094411.8583-1-judge.packham@gmail.com/",
+ "msgid": "<20170827094411.8583-1-judge.packham@gmail.com>",
+ "list_archive_url": null,
+ "date": "2017-08-27T09:44:07",
+ "name": "[U-Boot,v2,0/4] usb: net: Migrate USB Ethernet",
+ "mbox": "https://patchwork.ozlabs.org/project/uboot/cover/
+ 20170827094411.8583-1-judge.packham@gmail.com/mbox/"
+ patches (list of dict), each e.g.:
+ "id": 806202,
+ "url": "https://patchwork.ozlabs.org/api/1.2/patches/806202/",
+ "web_url": "https://patchwork.ozlabs.org/project/uboot/patch/
+ 20170827080051.816-1-judge.packham@gmail.com/",
+ "msgid": "<20170827080051.816-1-judge.packham@gmail.com>",
+ "list_archive_url": null,
+ "date": "2017-08-27T08:00:51",
+ "name": "[U-Boot] moveconfig: fix error message do_autoconf()",
+ "mbox": "https://patchwork.ozlabs.org/project/uboot/patch/
+ 20170827080051.816-1-judge.packham@gmail.com/mbox/"
+ """
+ return await self._request(client, f'series/{link}/')
+
+ async def get_patch(self, client, patch_id):
+ """Read information about a patch
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ patch_id (str): Patchwork patch ID
+
+ Returns: dict containing patchwork's patch information
+ "id": 185,
+ "url": "https://patchwork.ozlabs.org/api/1.2/patches/185/",
+ "web_url": "https://patchwork.ozlabs.org/project/cbe-oss-dev/patch/
+ 200809050416.27831.adetsch@br.ibm.com/",
+ project (dict): project information (id, url, name, link_name,
+ list_id, list_email, etc.
+ "msgid": "<200809050416.27831.adetsch@br.ibm.com>",
+ "list_archive_url": null,
+ "date": "2008-09-05T07:16:27",
+ "name": "powerpc/spufs: Fix possible scheduling of a context",
+ "commit_ref": "b2e601d14deb2083e2a537b47869ab3895d23a28",
+ "pull_url": null,
+ "state": "accepted",
+ "archived": false,
+ "hash": "bc1c0b80d7cff66c0d1e5f3f8f4d10eb36176f0d",
+ "submitter": {
+ "id": 93,
+ "url": "https://patchwork.ozlabs.org/api/1.2/people/93/",
+ "name": "Andre Detsch",
+ "email": "adetsch@br.ibm.com"
+ },
+ "delegate": {
+ "id": 1,
+ "url": "https://patchwork.ozlabs.org/api/1.2/users/1/",
+ "username": "jk",
+ "first_name": "Jeremy",
+ "last_name": "Kerr",
+ "email": "jk@ozlabs.org"
+ },
+ "mbox": "https://patchwork.ozlabs.org/project/cbe-oss-dev/patch/
+ 200809050416.27831.adetsch@br.ibm.com/mbox/",
+ "series": [],
+ "comments": "https://patchwork.ozlabs.org/api/patches/185/
+ comments/",
+ "check": "pending",
+ "checks": "https://patchwork.ozlabs.org/api/patches/185/checks/",
+ "tags": {},
+ "related": [],
+ "headers": {...}
+ "content": "We currently have a race when scheduling a context
+ after we have found a runnable context in spusched_tick, the
+ context may have been scheduled by spu_activate().
+
+ This may result in a panic if we try to unschedule a context
+ been freed in the meantime.
+
+ This change exits spu_schedule() if the context has already
+ scheduled, so we don't end up scheduling it twice.
+
+ Signed-off-by: Andre Detsch <adetsch@br.ibm.com>",
+ "diff": '''Index: spufs/arch/powerpc/platforms/cell/spufs/sched.c
+ =======================================================
+ --- spufs.orig/arch/powerpc/platforms/cell/spufs/sched.c
+ +++ spufs/arch/powerpc/platforms/cell/spufs/sched.c
+ @@ -727,7 +727,8 @@ static void spu_schedule(struct spu *spu
+ \t/* not a candidate for interruptible because it's called
+ \t from the scheduler thread or from spu_deactivate */
+ \tmutex_lock(&ctx->state_mutex);
+ -\t__spu_schedule(spu, ctx);
+ +\tif (ctx->state == SPU_STATE_SAVED)
+ +\t\t__spu_schedule(spu, ctx);
+ \tspu_release(ctx);
+ }
+ '''
+ "prefixes": ["3/3", ...]
+ """
+ return await self._request(client, f'patches/{patch_id}/')
+
+ async def _get_patch_comments(self, client, patch_id):
+ """Read comments about a patch
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ patch_id (str): Patchwork patch ID
+
+ Returns: list of dict: list of comments:
+ id (int): series ID unique across patchwork instance, e.g. 3331924
+ web_url (str): Full URL, e.g.
+ 'https://patchwork.ozlabs.org/comment/3331924/'
+ msgid (str): Message ID, e.g.
+ '<d2526c98-8198-4b8b-ab10-20bda0151da1@gmx.de>'
+ list_archive_url: (unknown?)
+ date (str): Date, e.g. '2024-06-20T13:38:03'
+ subject (str): email subject, e.g. 'Re: [PATCH 3/5] buildman:
+ Support building within a Python venv'
+ date (str): Date, e.g. '2017-08-27T08:00:51'
+ submitter (dict): id, url, name, email, e.g.:
+ "id": 61270,
+ "url": "https://patchwork.ozlabs.org/api/people/61270/",
+ "name": "Heinrich Schuchardt",
+ "email": "xypron.glpk@gmx.de"
+ content (str): Content of email, e.g. 'On 20.06.24 15:19,
+ Simon Glass wrote:
+ >...'
+ headers: dict: email headers, see get_cover() for an example
+ """
+ return await self._request(client, f'patches/{patch_id}/comments/')
+
+ async def get_cover(self, client, cover_id):
+ """Read information about a cover letter
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ cover_id (int): Patchwork cover-letter ID
+
+ Returns: dict containing patchwork's cover-letter information:
+ id (int): series ID unique across patchwork instance, e.g. 3
+ url (str): Full URL, e.g. https://patchwork.ozlabs.org/project/uboot/list/?series=3
+ project (dict): project information (id, url, name, link_name,
+ list_id, list_email, etc.
+ url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/api/1.2/covers/2054866/'
+ web_url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/project/uboot/cover/20250304130947.109799-1-sjg@chromium.org/'
+ project (dict): project information (id, url, name, link_name,
+ list_id, list_email, etc.
+ msgid (str): Message ID, e.g. '20250304130947.109799-1-sjg@chromium.org>'
+ list_archive_url (?)
+ date (str): Date, e.g. '2017-08-27T08:00:51'
+ name (str): Series name, e.g. '[U-Boot] moveconfig: fix error'
+ submitter (dict): id, url, name, email, e.g.:
+ "id": 6170,
+ "url": "https://patchwork.ozlabs.org/api/1.2/people/6170/",
+ "name": "Simon Glass",
+ "email": "sjg@chromium.org"
+ mbox (str): URL to mailbox, e.g. 'https://patchwork.ozlabs.org/project/uboot/cover/20250304130947.109799-1-sjg@chromium.org/mbox/'
+ series (list of dict) each e.g.:
+ "id": 446956,
+ "url": "https://patchwork.ozlabs.org/api/1.2/series/446956/",
+ "web_url": "https://patchwork.ozlabs.org/project/uboot/list/?series=446956",
+ "date": "2025-03-04T13:09:37",
+ "name": "binman: Check code-coverage requirements",
+ "version": 1,
+ "mbox": "https://patchwork.ozlabs.org/series/446956/mbox/"
+ comments: Web URL to comments: 'https://patchwork.ozlabs.org/api/covers/2054866/comments/'
+ headers: dict: e.g.:
+ "Return-Path": "<u-boot-bounces@lists.denx.de>",
+ "X-Original-To": "incoming@patchwork.ozlabs.org",
+ "Delivered-To": "patchwork-incoming@legolas.ozlabs.org",
+ "Authentication-Results": [
+ "legolas.ozlabs.org;
+\tdkim=pass (1024-bit key;
+ unprotected) header.d=chromium.org header.i=@chromium.org header.a=rsa-sha256
+ header.s=google header.b=dG8yqtoK;
+\tdkim-atps=neutral",
+ "legolas.ozlabs.org;
+ spf=pass (sender SPF authorized) smtp.mailfrom=lists.denx.de
+ (client-ip=85.214.62.61; helo=phobos.denx.de;
+ envelope-from=u-boot-bounces@lists.denx.de; receiver=patchwork.ozlabs.org)",
+ "phobos.denx.de;
+ dmarc=pass (p=none dis=none) header.from=chromium.org",
+ "phobos.denx.de;
+ spf=pass smtp.mailfrom=u-boot-bounces@lists.denx.de",
+ "phobos.denx.de;
+\tdkim=pass (1024-bit key;
+ unprotected) header.d=chromium.org header.i=@chromium.org
+ header.b=\"dG8yqtoK\";
+\tdkim-atps=neutral",
+ "phobos.denx.de;
+ dmarc=pass (p=none dis=none) header.from=chromium.org",
+ "phobos.denx.de;
+ spf=pass smtp.mailfrom=sjg@chromium.org"
+ ],
+ "Received": [
+ "from phobos.denx.de (phobos.denx.de [85.214.62.61])
+\t(using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits)
+\t key-exchange X25519 server-signature ECDSA (secp384r1))
+\t(No client certificate requested)
+\tby legolas.ozlabs.org (Postfix) with ESMTPS id 4Z6bd50jLhz1yD0
+\tfor <incoming@patchwork.ozlabs.org>; Wed, 5 Mar 2025 00:10:00 +1100 (AEDT)",
+ "from h2850616.stratoserver.net (localhost [IPv6:::1])
+\tby phobos.denx.de (Postfix) with ESMTP id 434E88144A;
+\tTue, 4 Mar 2025 14:09:58 +0100 (CET)",
+ "by phobos.denx.de (Postfix, from userid 109)
+ id 8CBF98144A; Tue, 4 Mar 2025 14:09:57 +0100 (CET)",
+ "from mail-io1-xd2e.google.com (mail-io1-xd2e.google.com
+ [IPv6:2607:f8b0:4864:20::d2e])
+ (using TLSv1.3 with cipher TLS_AES_128_GCM_SHA256 (128/128 bits))
+ (No client certificate requested)
+ by phobos.denx.de (Postfix) with ESMTPS id 48AE281426
+ for <u-boot@lists.denx.de>; Tue, 4 Mar 2025 14:09:55 +0100 (CET)",
+ "by mail-io1-xd2e.google.com with SMTP id
+ ca18e2360f4ac-85ae33109f6so128326139f.2
+ for <u-boot@lists.denx.de>; Tue, 04 Mar 2025 05:09:55 -0800 (PST)",
+ "from chromium.org (c-73-203-119-151.hsd1.co.comcast.net.
+ [73.203.119.151]) by smtp.gmail.com with ESMTPSA id
+ ca18e2360f4ac-858753cd304sm287383839f.33.2025.03.04.05.09.49
+ (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256);
+ Tue, 04 Mar 2025 05:09:50 -0800 (PST)"
+ ],
+ "X-Spam-Checker-Version": "SpamAssassin 3.4.2 (2018-09-13) on phobos.denx.de",
+ "X-Spam-Level": "",
+ "X-Spam-Status": "No, score=-2.1 required=5.0 tests=BAYES_00,DKIMWL_WL_HIGH,
+ DKIM_SIGNED,DKIM_VALID,DKIM_VALID_AU,DKIM_VALID_EF,
+ RCVD_IN_DNSWL_BLOCKED,SPF_HELO_NONE,SPF_PASS autolearn=ham
+ autolearn_force=no version=3.4.2",
+ "DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;
+ d=chromium.org; s=google; t=1741093792; x=1741698592; darn=lists.denx.de;
+ h=content-transfer-encoding:mime-version:message-id:date:subject:cc
+ :to:from:from:to:cc:subject:date:message-id:reply-to;
+ bh=B2zsLws430/BEZfatNjeaNnrcxmYUstVjp1pSXgNQjc=;
+ b=dG8yqtoKpSy15RHagnPcppzR8KbFCRXa2OBwXfwGoyN6M15tOJsUu2tpCdBFYiL5Mk
+ hQz5iDLV8p0Bs+fP4XtNEx7KeYfTZhiqcRFvdCLwYtGray/IHtOZaNoHLajrstic/OgE
+ 01ymu6gOEboU32eQ8uC8pdCYQ4UCkfKJwmiiU=",
+ "X-Google-DKIM-Signature": "v=1; a=rsa-sha256; c=relaxed/relaxed;
+ d=1e100.net; s=20230601; t=1741093792; x=1741698592;
+ h=content-transfer-encoding:mime-version:message-id:date:subject:cc
+ :to:from:x-gm-message-state:from:to:cc:subject:date:message-id
+ :reply-to;
+ bh=B2zsLws430/BEZfatNjeaNnrcxmYUstVjp1pSXgNQjc=;
+ b=eihzJf4i9gin9usvz4hnAvvbLV9/yB7hGPpwwW/amgnPUyWCeQstgvGL7WDLYYnukH
+ 161p4mt7+cCj7Hao/jSPvVZeuKiBNPkS4YCuP3QjXfdk2ziQ9IjloVmGarWZUOlYJ5iQ
+ dZnxypUkuFfLcEDSwUmRO1dvLi3nH8PDlae3yT2H87LeHaxhXWdzHxQdPc86rkYyCqCr
+ qBC2CTS31jqSuiaI+7qB3glvbJbSEXkunz0iDewTJDvZfmuloxTipWUjRJ1mg9UJcZt5
+ 9xIuTq1n9aYf1RcQlrEOQhdBAQ0/IJgvmZtzPZi9L+ppBva1ER/xm06nMA7GEUtyGwun
+ c6pA==",
+ "X-Gm-Message-State": "AOJu0Yybx3b1+yClf/IfIbQd9u8sxzK9ixPP2HimXF/dGZfSiS7Cb+O5
+ WrAkvtp7m3KPM/Mpv0sSZ5qrfTnKnb3WZyv6Oe5Q1iUjAftGNwbSxob5eJ/0y3cgrTdzE4sIWPE
+ =",
+ "X-Gm-Gg": "ASbGncu5gtgpXEPGrpbTRJulqFrFj1YPAAmKk4MiXA8/3J1A+25F0Uug2KeFUrZEjkG
+ KMdPg/C7e2emIvfM+Jl+mKv0ITBvhbyNCyY1q2U1s1cayZF05coZ9ewzGxXJGiEqLMG69uBmmIi
+ rBEvCnkXS+HVZobDQMtOsezpc+Ju8JRA7+y1R0WIlutl1mQARct6p0zTkuZp75QyB6dm/d0KYgd
+ iux/t/f0HC2CxstQlTlJYzKL6UJgkB5/UorY1lW/0NDRS6P1iemPQ7I3EPLJO8tM5ZrpJE7qgNP
+ xy0jXbUv44c48qJ1VszfY5USB8fRG7nwUYxNu6N1PXv9xWbl+z2xL68qNYUrFlHsB8ILTXAyzyr
+ Cdj+Sxg==",
+ "X-Google-Smtp-Source": "
+ AGHT+IFeVk5D4YEfJgPxOfg3ikO6Q7IhaDzABGkAPI6HA0ubK85OPhUHK08gV7enBQ8OdoE/ttqEjw==",
+ "X-Received": "by 2002:a05:6602:640f:b0:855:63c8:abb5 with SMTP id
+ ca18e2360f4ac-85881fdba3amr1839428939f.13.1741093792636;
+ Tue, 04 Mar 2025 05:09:52 -0800 (PST)",
+ "From": "Simon Glass <sjg@chromium.org>",
+ "To": "U-Boot Mailing List <u-boot@lists.denx.de>",
+ "Cc": "Simon Glass <sjg@chromium.org>, Alexander Kochetkov <al.kochet@gmail.com>,
+ Alper Nebi Yasak <alpernebiyasak@gmail.com>,
+ Brandon Maier <brandon.maier@collins.com>,
+ Jerome Forissier <jerome.forissier@linaro.org>,
+ Jiaxun Yang <jiaxun.yang@flygoat.com>,
+ Neha Malcom Francis <n-francis@ti.com>,
+ Patrick Rudolph <patrick.rudolph@9elements.com>,
+ Paul HENRYS <paul.henrys_ext@softathome.com>, Peng Fan <peng.fan@nxp.com>,
+ Philippe Reynes <philippe.reynes@softathome.com>,
+ Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>,
+ Tom Rini <trini@konsulko.com>",
+ "Subject": "[PATCH 0/7] binman: Check code-coverage requirements",
+ "Date": "Tue, 4 Mar 2025 06:09:37 -0700",
+ "Message-ID": "<20250304130947.109799-1-sjg@chromium.org>",
+ "X-Mailer": "git-send-email 2.43.0",
+ "MIME-Version": "1.0",
+ "Content-Transfer-Encoding": "8bit",
+ "X-BeenThere": "u-boot@lists.denx.de",
+ "X-Mailman-Version": "2.1.39",
+ "Precedence": "list",
+ "List-Id": "U-Boot discussion <u-boot.lists.denx.de>",
+ "List-Unsubscribe": "<https://lists.denx.de/options/u-boot>,
+ <mailto:u-boot-request@lists.denx.de?subject=unsubscribe>",
+ "List-Archive": "<https://lists.denx.de/pipermail/u-boot/>",
+ "List-Post": "<mailto:u-boot@lists.denx.de>",
+ "List-Help": "<mailto:u-boot-request@lists.denx.de?subject=help>",
+ "List-Subscribe": "<https://lists.denx.de/listinfo/u-boot>,
+ <mailto:u-boot-request@lists.denx.de?subject=subscribe>",
+ "Errors-To": "u-boot-bounces@lists.denx.de",
+ "Sender": "\"U-Boot\" <u-boot-bounces@lists.denx.de>",
+ "X-Virus-Scanned": "clamav-milter 0.103.8 at phobos.denx.de",
+ "X-Virus-Status": "Clean"
+ content (str): Email content, e.g. 'This series adds a cover-coverage check to CI for Binman. The iMX8 tests
+are still not completed,...'
+ """
+ async with aiohttp.ClientSession() as client:
+ return await self._request(client, f'covers/{cover_id}/')
+
+ async def get_cover_comments(self, client, cover_id):
+ """Read comments about a cover letter
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ cover_id (str): Patchwork cover-letter ID
+
+ Returns: list of dict: list of comments, each:
+ id (int): series ID unique across patchwork instance, e.g. 3472068
+ web_url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/comment/3472068/'
+ list_archive_url: (unknown?)
+
+ project (dict): project information (id, url, name, link_name,
+ list_id, list_email, etc.
+ url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/api/1.2/covers/2054866/'
+ web_url (str): Full URL, e.g. 'https://patchwork.ozlabs.org/project/uboot/cover/20250304130947.109799-1-sjg@chromium.org/'
+ project (dict): project information (id, url, name, link_name,
+ list_id, list_email, etc.
+ date (str): Date, e.g. '2025-03-04T13:16:15'
+ subject (str): 'Re: [PATCH 0/7] binman: Check code-coverage requirements'
+ submitter (dict): id, url, name, email, e.g.:
+ "id": 6170,
+ "url": "https://patchwork.ozlabs.org/api/people/6170/",
+ "name": "Simon Glass",
+ "email": "sjg@chromium.org"
+ content (str): Email content, e.g. 'Hi,
+
+On Tue, 4 Mar 2025 at 06:09, Simon Glass <sjg@chromium.org> wrote:
+>
+> This '...
+ headers: dict: email headers, see get_cover() for an example
+ """
+ return await self._request(client, f'covers/{cover_id}/comments/')
+
+ async def get_series_url(self, link):
+ """Get the URL for a series
+
+ Args:
+ link (str): Patchwork series ID
+
+ Returns:
+ str: URL for the series page
+ """
+ return f'{self.url}/project/{self.link_name}/list/?series={link}&state=*&archive=both'
+
+ async def _get_patch_status(self, client, patch_id):
+ """Get the patch status
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ patch_id (int): Patch ID to look up in patchwork
+
+ Return:
+ PATCH: Patch information
+
+ Requests:
+ 1 for patch, 1 for patch comments
+ """
+ data = await self.get_patch(client, patch_id)
+ state = data['state']
+ comment_data = await self._get_patch_comments(client, patch_id)
+
+ return Patch(patch_id, state, data, comment_data)
+
+ async def get_series_cover(self, client, data):
+ """Get the cover information (including comments)
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ data (dict): Return value from self.get_series()
+
+ Returns:
+ COVER object, or None if no cover letter
+ """
+ # Patchwork should always provide this, but use get() so that we don't
+ # have to provide it in our fake patchwork _fake_patchwork_cser()
+ cover = data.get('cover_letter')
+ cover_id = None
+ if cover:
+ cover_id = cover['id']
+ info = await self.get_cover_comments(client, cover_id)
+ cover = COVER(cover_id, len(info), cover['name'], info)
+ return cover
+
+ async def series_get_state(self, client, link, read_comments,
+ read_cover_comments):
+ """Sync the series information against patchwork, to find patch status
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ link (str): Patchwork series ID
+ read_comments (bool): True to read the comments on the patches
+ read_cover_comments (bool): True to read the comments on the cover
+ letter
+
+ Return: tuple:
+ COVER object, or None if none or not read_cover_comments
+ list of PATCH objects
+ """
+ data = await self.get_series(client, link)
+ patch_list = list(data['patches'])
+
+ count = len(patch_list)
+ patches = []
+ if read_comments:
+ # Returns a list of Patch objects
+ tasks = [self._get_patch_status(client, patch_list[i]['id'])
+ for i in range(count)]
+
+ patch_status = await asyncio.gather(*tasks)
+ for patch_data, status in zip(patch_list, patch_status):
+ status.series_data = patch_data
+ patches.append(status)
+ else:
+ for i in range(count):
+ info = patch_list[i]
+ pat = Patch(info['id'], series_data=info)
+ pat.raw_subject = info['name']
+ patches.append(pat)
+ if self._show_progress:
+ terminal.print_clear()
+
+ if read_cover_comments:
+ cover = await self.get_series_cover(client, data)
+ else:
+ cover = None
+
+ return cover, patches
diff --git a/tools/patman/patman.rst b/tools/patman/patman.rst
index 63b95a6b161..549e203c254 100644
--- a/tools/patman/patman.rst
+++ b/tools/patman/patman.rst
@@ -16,12 +16,13 @@ This tool is a Python script which:
- Inserts a cover letter with change lists
- Runs the patches through checkpatch.pl and its own checks
- Optionally emails them out to selected people
+- Links the series automatically to Patchwork once sent
It also has some Patchwork features:
-- shows review tags from Patchwork so you can update your local patches
-- pulls these down into a new branch on request
-- lists comments received on a series
+- Manage local series and their status on patchwork
+- Show review tags from Patchwork and allows them to be gathered into commits
+- List comments received on a series
It is intended to automate patch creation and make it a less
error-prone process. It is useful for U-Boot and Linux work so far,
@@ -659,6 +660,282 @@ so to send them:
and it will create and send the version 2 series.
+Series Management
+-----------------
+
+Sometimes you might have several series in flight at the same time. Each of
+these receives comments and you want to create a new version of each series with
+those comments addressed.
+
+Patman provides a few subcommands which are helpful for managing series.
+
+Series and branches
+~~~~~~~~~~~~~~~~~~~
+
+'patman series' works with the concept of a series. It maintains a local
+database (.patman.db in your top-level git tree) and uses that to keep track of
+series and patches.
+
+Each series goes through muliple versions. Patman requires that the first
+version of your series is in a branch without a numeric suffix. Branch names
+like 'serial' and 'video' are OK, but 'part3' is not. This is because Patman
+uses the number at the end of the branch name to indicate the version.
+
+If your series name is 'video', then you can have a 'video' branch for version
+1 of the series, 'video2' for version 2 and 'video3' for version 3. All three
+branches are for the same series. Patman keeps track of these different
+versions. It handles the branch naming automatically, but you need to be aware
+of what it is doing.
+
+You will have an easier time if the branch names you use with 'patman series'
+are short, no more than 15 characters. This is the amount of columnar space in
+listings. You can add a longer description as the series description. If you
+are used to having very descriptive branch names, remember that patman lets you
+add metadata into commit which is automatically removed before sending.
+
+This documentation uses the term 'series' to mean all the versions of a series
+and 'series/version' to mean a particular version of a series.
+
+Updating commits
+~~~~~~~~~~~~~~~~
+
+Since Patman provides quite a bit of automation, it updates your commits in
+some cases, effectively doing a rebase of a branch in order to change the tags
+in the commits. It never makes code changes.
+
+In extremis you can use 'git reflog' to revert something that Patman did.
+
+
+Series subcommands
+~~~~~~~~~~~~~~~~~~
+
+Note that 'patman series ...' can be abbreviated as 'patman s' or 'patman ser'.
+
+Here is a short overview of the available subcommands:
+
+ add
+ Add a new series. Use this on an existing branch to tell Patman about it.
+
+ archive (ar)
+ Archive a series when you have finished upstreaming it. Archived series
+ are not shown by most commands. This creates a dated tag for each
+ version of the series, pointing to the series branch, then deletes the
+ branches. It puts the tag names in the database so that it can
+ 'unarchive' to restore things how they were.
+
+ unarchive (unar)
+ Unarchive a series when you decide you need to do something more with
+ it. The branches are restored and tags deleted.
+
+ autolink (au)
+ Search patchwork for the series link for your series, so Patman can
+ track the status
+
+ autolink-all
+ Same but for all series
+
+ inc
+ Increase the series number, effectively creating a new branch with the
+ next highest version number. The new branch is created based on the
+ existing branch. So if you use 'patman series inc' on branch 'video2'
+ it will create branch 'video3' and add v3 into its database
+
+ dec
+ Decrease the series number, thus deleting the current branch and
+ removing that version from the data. If you use this comment on branch
+ 'video3' Patman will delete version 3 and branch 'video3'.
+
+ get-link
+ Shows the Patchwork link for a series/version
+
+ ls
+ Lists the series in the database
+
+ mark
+ Mark a series with 'Change-Id' tags so that Patman can track patches
+ even when the subject changes. Unmarked patches just use the subject to
+ decided which is which.
+
+ unmark
+ Remove 'Change-Id' tags from a series.
+
+ open (o)
+ Open a series in Patchwork using your web browser
+
+ patches
+ Show the patches in a particular series/version
+
+ progress (p)
+ Show upstream progress for your series, or for all series
+
+ rm
+ Remove a series entirely, including all versions
+
+ rm-version (rmv)
+ Remove a particular version of a series. This is similar to 'dec'
+ except that any version can be removed, not just the latest one.
+
+ scan
+ Scan the local branch and update the database with the set of patches
+ in that branch. This throws away the old patches.
+
+ send
+ Send a series out as patches. This is similar to 'patman send' except
+ that it can send any series, not just the current branch. It also
+ waits a little for patchwork to see the cover letter, so it can find
+ out the patchwork link for the series.
+
+ set-link
+ Sets the Patchwork link for a series-version manually.
+
+ status (st)
+ Run 'patman status' on a series. This is similar to 'patman status'
+ except that it can get status on any series, not just the current
+ branch
+
+ summary
+ Shows a quick summary of series with their status and description.
+
+ sync
+ Sync the status of a series with Pathwork, so that
+ 'patman series progress' can show the right information.
+
+ sync-all
+ Sync the status of all series.
+
+
+Patman series workflow
+~~~~~~~~~~~~~~~~~~~~~~
+
+Here is a run-through of how to incorporate 'patman series' into your workflow.
+
+Firstly, set up your project::
+
+ patman patchwork set-project U-Boot
+
+This just tells Patman to look on the Patchwork server for a project of that
+name. Internally Patman stores the ID and URL 'link-name' for the project, so it
+can access it.
+
+If you need to use a different patchwork server, use the `--patchwork-url`
+option or put the URL in your Patman-settings file.
+
+Now create a branch. For our example we are going to send out a series related
+to video so the branch will be called 'video'. The upstream remove is called
+'us'::
+
+ git checkout -b video us/master
+
+We now have a branch and so we can do some commits::
+
+ <edit files>
+ git add ...
+ <edit files>
+ git add -u
+ git commit ...
+ git commit ...
+
+We now have a few commits in our 'video' branch. Let's tell patman about it::
+
+ patman series add
+
+Like most commands, if no series is given (`patman series -s video add`) then
+the current branch is assumed. Since the branch is called 'video' patman knows
+that it is version one of the video series.
+
+You'll likely get a warning that there is no cover letter. Let's add some tags
+to the top commit::
+
+ Series-to: u-boot
+ Series-cc: ...
+ Cover-letter:
+ video: Improve syncing performance with cyclic
+
+Trying again::
+
+ patman series add
+
+You'll likely get a warning that the commits are unmarked. You can either let
+patman add Change-Id values itself with the `-m` flag, or tell it not to worry
+about it with `-M`. You must choose one or the other. Let's leave the commits
+unmarked::
+
+ patman series add -M
+
+Congratulations, you've now got a patman database!
+
+Now let's send out the series. We will add tags to the top commit.
+
+To send it::
+
+ patman series send
+
+You should send 'git send-email' start up and you can confirm the sending of
+each email.
+
+After that, patman waits a bit to see if it can find your new series appearing
+on Patchwork. With a bit of luck this will only take 20 seconds or so. Then your
+series is linked.
+
+To gather tags (Reviewed-by ...) for your series from patchwork::
+
+ patman series gather
+
+Now you can check your progress::
+
+ patman series progress
+
+Later on you get some comments, or perhaps you just decide to make a change on
+your own. You have several options.
+
+The first option is that you can just create a new branch::
+
+ git checkout -b video2 video
+
+then you can add this 'v2' series to Patman with::
+
+ patman series add
+
+The second option is to get patman to create the new 'video2' branch in one
+step::
+
+ patman inc
+
+The third option is to collect some tags using the 'patman status' command and
+put them in a new branch::
+
+ patman status -d video2
+
+One day the fourth option will be to ask patman to collect tags as part of the
+'patman inc' command.
+
+Again, you do your edits, perhaps adding/removing patches, rebasing on -master
+and so on. Then, send your v2::
+
+ patman series send
+
+Let's say the patches are accepted. You can use::
+
+ patch series gather
+ patch series progress
+
+to check, or::
+
+ patman series status -cC
+
+to see comments. You can now archive the series::
+
+ patman series archive
+
+At this point you have the basics. Some of the subcommands useful options, so
+be sure to check out the help.
+
+Here is a sample 'progress' view:
+
+.. image:: pics/patman.jpg
+ :width: 800
+ :alt: Patman showing the progress view
+
General points
--------------
diff --git a/tools/patman/project.py b/tools/patman/project.py
index d6143a67066..e633401e9d6 100644
--- a/tools/patman/project.py
+++ b/tools/patman/project.py
@@ -18,7 +18,8 @@ def detect_project():
"""
top_level = gitutil.get_top_level()
- if os.path.exists(os.path.join(top_level, "include", "u-boot")):
+ if (not top_level or
+ os.path.exists(os.path.join(top_level, "include", "u-boot"))):
return "u-boot"
elif os.path.exists(os.path.join(top_level, "kernel")):
return "linux"
diff --git a/tools/patman/pyproject.toml b/tools/patman/pyproject.toml
index fcefcf66960..06e169cdf48 100644
--- a/tools/patman/pyproject.toml
+++ b/tools/patman/pyproject.toml
@@ -8,7 +8,7 @@ version = "0.0.6"
authors = [
{ name="Simon Glass", email="sjg@chromium.org" },
]
-dependencies = ["u_boot_pylib >= 0.0.6"]
+dependencies = ["u_boot_pylib >= 0.0.6", "aiohttp >= 3.9.1" ]
description = "Patman patch manager"
readme = "README.rst"
requires-python = ">=3.7"
diff --git a/tools/patman/requirements.txt b/tools/patman/requirements.txt
index e8cbc6cf0c3..ce9a3854527 100644
--- a/tools/patman/requirements.txt
+++ b/tools/patman/requirements.txt
@@ -1,5 +1,6 @@
+aiohttp==3.9.1
ConfigParser==7.1.0
importlib_resources==6.5.2
-pygit2==1.13.3
+pygit2==1.14.1
Requests==2.32.3
setuptools==75.8.0
diff --git a/tools/patman/send.py b/tools/patman/send.py
new file mode 100644
index 00000000000..08a916aff1a
--- /dev/null
+++ b/tools/patman/send.py
@@ -0,0 +1,197 @@
+# SPDX-License-Identifier: GPL-2.0+
+#
+# Copyright 2025 Google LLC
+#
+"""Handles the 'send' subcommand
+"""
+
+import os
+import sys
+
+from patman import checkpatch
+from patman import patchstream
+from patman import settings
+from u_boot_pylib import gitutil
+from u_boot_pylib import terminal
+
+
+def check_patches(series, patch_files, run_checkpatch, verbose, use_tree, cwd):
+ """Run some checks on a set of patches
+
+ This santiy-checks the patman tags like Series-version and runs the patches
+ through checkpatch
+
+ Args:
+ series (Series): Series object for this series (set of patches)
+ patch_files (list): List of patch filenames, each a string, e.g.
+ ['0001_xxx.patch', '0002_yyy.patch']
+ run_checkpatch (bool): True to run checkpatch.pl
+ verbose (bool): True to print out every line of the checkpatch output as
+ it is parsed
+ use_tree (bool): If False we'll pass '--no-tree' to checkpatch.
+ cwd (str): Path to use for patch files (None to use current dir)
+
+ Returns:
+ bool: True if the patches had no errors, False if they did
+ """
+ # Do a few checks on the series
+ series.DoChecks()
+
+ # Check the patches
+ if run_checkpatch:
+ ok = checkpatch.check_patches(verbose, patch_files, use_tree, cwd)
+ else:
+ ok = True
+ return ok
+
+
+def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go,
+ ignore_bad_tags, add_maintainers, get_maintainer_script, limit,
+ dry_run, in_reply_to, thread, smtp_server, cwd=None):
+ """Email patches to the recipients
+
+ This emails out the patches and cover letter using 'git send-email'. Each
+ patch is copied to recipients identified by the patch tag and output from
+ the get_maintainer.pl script. The cover letter is copied to all recipients
+ of any patch.
+
+ To make this work a CC file is created holding the recipients for each patch
+ and the cover letter. See the main program 'cc_cmd' for this logic.
+
+ Args:
+ col (terminal.Color): Colour output object
+ series (Series): Series object for this series (set of patches)
+ cover_fname (str): Filename of the cover letter as a string (None if
+ none)
+ patch_files (list): List of patch filenames, each a string, e.g.
+ ['0001_xxx.patch', '0002_yyy.patch']
+ process_tags (bool): True to process subject tags in each patch, e.g.
+ for 'dm: spi: Add SPI support' this would be 'dm' and 'spi'. The
+ tags are looked up in the configured sendemail.aliasesfile and also
+ in ~/.patman (see README)
+ its_a_go (bool): True if we are going to actually send the patches,
+ False if the patches have errors and will not be sent unless
+ @ignore_errors
+ ignore_bad_tags (bool): True to just print a warning for unknown tags,
+ False to halt with an error
+ add_maintainers (bool): Run the get_maintainer.pl script for each patch
+ get_maintainer_script (str): The script used to retrieve which
+ maintainers to cc
+ limit (int): Limit on the number of people that can be cc'd on a single
+ patch or the cover letter (None if no limit)
+ dry_run (bool): Don't actually email the patches, just print out what
+ would be sent
+ in_reply_to (str): If not None we'll pass this to git as --in-reply-to.
+ Should be a message ID that this is in reply to.
+ thread (bool): True to add --thread to git send-email (make all patches
+ reply to cover-letter or first patch in series)
+ smtp_server (str): SMTP server to use to send patches (None for default)
+ cwd (str): Path to use for patch files (None to use current dir)
+
+ Return:
+ Git command that was/would be run
+ """
+ cc_file = series.MakeCcFile(process_tags, cover_fname, not ignore_bad_tags,
+ add_maintainers, limit, get_maintainer_script,
+ settings.alias, cwd)
+
+ # Email the patches out (giving the user time to check / cancel)
+ cmd = ''
+ if its_a_go:
+ cmd = gitutil.email_patches(
+ series, cover_fname, patch_files, dry_run, not ignore_bad_tags,
+ cc_file, alias=settings.alias, in_reply_to=in_reply_to,
+ thread=thread, smtp_server=smtp_server, cwd=cwd)
+ else:
+ print(col.build(col.RED, "Not sending emails due to errors/warnings"))
+
+ # For a dry run, just show our actions as a sanity check
+ if dry_run:
+ series.ShowActions(patch_files, cmd, process_tags, settings.alias)
+ if not its_a_go:
+ print(col.build(col.RED, "Email would not be sent"))
+
+ os.remove(cc_file)
+ return cmd
+
+
+def prepare_patches(col, branch, count, start, end, ignore_binary, signoff,
+ keep_change_id=False, git_dir=None, cwd=None):
+ """Figure out what patches to generate, then generate them
+
+ The patch files are written to the current directory, e.g. 0001_xxx.patch
+ 0002_yyy.patch
+
+ Args:
+ col (terminal.Color): Colour output object
+ branch (str): Branch to create patches from (None = current)
+ count (int): Number of patches to produce, or -1 to produce patches for
+ the current branch back to the upstream commit
+ start (int): Start patch to use (0=first / top of branch)
+ end (int): End patch to use (0=last one in series, 1=one before that,
+ etc.)
+ ignore_binary (bool): Don't generate patches for binary files
+ keep_change_id (bool): Preserve the Change-Id tag.
+ git_dir (str): Path to git repository (None to use default)
+ cwd (str): Path to use for git operations (None to use current dir)
+
+ Returns:
+ Tuple:
+ Series object for this series (set of patches)
+ Filename of the cover letter as a string (None if none)
+ patch_files: List of patch filenames, each a string, e.g.
+ ['0001_xxx.patch', '0002_yyy.patch']
+ """
+ if count == -1:
+ # Work out how many patches to send if we can
+ count = (gitutil.count_commits_to_branch(branch, git_dir=git_dir) -
+ start)
+
+ if not count:
+ msg = 'No commits found to process - please use -c flag, or run:\n' \
+ ' git branch --set-upstream-to remote/branch'
+ sys.exit(col.build(col.RED, msg))
+
+ # Read the metadata from the commits
+ to_do = count - end
+ series = patchstream.get_metadata(branch, start, to_do, git_dir)
+ cover_fname, patch_files = gitutil.create_patches(
+ branch, start, to_do, ignore_binary, series, signoff, git_dir=git_dir,
+ cwd=cwd)
+
+ # Fix up the patch files to our liking, and insert the cover letter
+ patchstream.fix_patches(series, patch_files, keep_change_id,
+ insert_base_commit=not cover_fname, cwd=cwd)
+ if cover_fname and series.get('cover'):
+ patchstream.insert_cover_letter(cover_fname, series, to_do, cwd=cwd)
+ return series, cover_fname, patch_files
+
+
+def send(args, git_dir=None, cwd=None):
+ """Create, check and send patches by email
+
+ Args:
+ args (argparse.Namespace): Arguments to patman
+ cwd (str): Path to use for git operations
+
+ Return:
+ bool: True if the patches were likely sent, else False
+ """
+ col = terminal.Color()
+ series, cover_fname, patch_files = prepare_patches(
+ col, args.branch, args.count, args.start, args.end,
+ args.ignore_binary, args.add_signoff,
+ keep_change_id=args.keep_change_id, git_dir=git_dir, cwd=cwd)
+ ok = check_patches(series, patch_files, args.check_patch,
+ args.verbose, args.check_patch_use_tree, cwd)
+
+ ok = ok and gitutil.check_suppress_cc_config()
+
+ its_a_go = ok or args.ignore_errors
+ cmd = email_patches(
+ col, series, cover_fname, patch_files, args.process_tags,
+ its_a_go, args.ignore_bad_tags, args.add_maintainers,
+ args.get_maintainer_script, args.limit, args.dry_run,
+ args.in_reply_to, args.thread, args.smtp_server, cwd=cwd)
+
+ return cmd and its_a_go and not args.dry_run
diff --git a/tools/patman/series.py b/tools/patman/series.py
index b73e9c58de4..ad61bbfa399 100644
--- a/tools/patman/series.py
+++ b/tools/patman/series.py
@@ -25,13 +25,27 @@ class Series(dict):
"""Holds information about a patch series, including all tags.
Vars:
- cc: List of aliases/emails to Cc all patches to
- commits: List of Commit objects, one for each patch
- cover: List of lines in the cover letter
- notes: List of lines in the notes
- changes: (dict) List of changes for each version, The key is
- the integer version number
- allow_overwrite: Allow tags to overwrite an existing tag
+ cc (list of str): Aliases/emails to Cc all patches to
+ to (list of str): Aliases/emails to send patches to
+ commits (list of Commit): Commit objects, one for each patch
+ cover (list of str): Lines in the cover letter
+ notes (list of str): Lines in the notes
+ changes: (dict) List of changes for each version:
+ key (int): version number
+ value: tuple:
+ commit (Commit): Commit this relates to, or None if related to a
+ cover letter
+ info (str): change lines for this version (separated by \n)
+ allow_overwrite (bool): Allow tags to overwrite an existing tag
+ base_commit (Commit): Commit object at the base of this series
+ branch (str): Branch name of this series
+ desc (str): Description of the series (cover-letter title)
+ idnum (int or None): Database rowid
+ name (str): Series name, typically the branch name without any numeric
+ suffix
+ _generated_cc (dict) written in MakeCcFile()
+ key: name of patch file
+ value: list of email addresses
"""
def __init__(self):
self.cc = []
@@ -44,10 +58,9 @@ class Series(dict):
self.allow_overwrite = False
self.base_commit = None
self.branch = None
-
- # Written in MakeCcFile()
- # key: name of patch file
- # value: list of email addresses
+ self.desc = ''
+ self.idnum = None
+ self.name = None
self._generated_cc = {}
# These make us more like a dictionary
@@ -57,6 +70,14 @@ class Series(dict):
def __getattr__(self, name):
return self[name]
+ @staticmethod
+ def from_fields(idnum, name, desc):
+ ser = Series()
+ ser.idnum = idnum
+ ser.name = name
+ ser.desc = desc
+ return ser
+
def AddTag(self, commit, line, name, value):
"""Add a new Series-xxx tag along with its value.
@@ -102,16 +123,19 @@ class Series(dict):
commit.check_tags()
self.commits.append(commit)
- def ShowActions(self, args, cmd, process_tags):
+ def ShowActions(self, args, cmd, process_tags, alias):
"""Show what actions we will/would perform
Args:
args: List of patch files we created
cmd: The git command we would have run
process_tags: Process tags as if they were aliases
+ alias (dict): Alias dictionary
+ key: alias
+ value: list of aliases or email addresses
"""
- to_set = set(gitutil.build_email_list(self.to));
- cc_set = set(gitutil.build_email_list(self.cc));
+ to_set = set(gitutil.build_email_list(self.to, alias));
+ cc_set = set(gitutil.build_email_list(self.cc, alias));
col = terminal.Color()
print('Dry run, so not doing much. But I would do this:')
@@ -140,7 +164,8 @@ class Series(dict):
print('Postfix:\t ', self.get('postfix'))
if self.cover:
print('Cover: %d lines' % len(self.cover))
- cover_cc = gitutil.build_email_list(self.get('cover_cc', ''))
+ cover_cc = gitutil.build_email_list(self.get('cover_cc', ''),
+ alias)
all_ccs = itertools.chain(cover_cc, *self._generated_cc.values())
for email in sorted(set(all_ccs) - to_set - cc_set):
print(' Cc: ', email)
@@ -241,7 +266,7 @@ class Series(dict):
def GetCcForCommit(self, commit, process_tags, warn_on_error,
add_maintainers, limit, get_maintainer_script,
- all_skips):
+ all_skips, alias, cwd):
"""Get the email CCs to use with a particular commit
Uses subject tags and get_maintainers.pl script to find people to cc
@@ -261,21 +286,25 @@ class Series(dict):
all_skips (set of str): Updated to include the set of bouncing email
addresses that were dropped from the output. This is essentially
a return value from this function.
+ alias (dict): Alias dictionary
+ key: alias
+ value: list of aliases or email addresses
+ cwd (str): Path to use for patch filenames (None to use current dir)
Returns:
list of str: List of email addresses to cc
"""
cc = []
if process_tags:
- cc += gitutil.build_email_list(commit.tags,
+ cc += gitutil.build_email_list(commit.tags, alias,
warn_on_error=warn_on_error)
- cc += gitutil.build_email_list(commit.cc_list,
+ cc += gitutil.build_email_list(commit.cc_list, alias,
warn_on_error=warn_on_error)
if type(add_maintainers) == type(cc):
cc += add_maintainers
elif add_maintainers:
- cc += get_maintainer.get_maintainer(get_maintainer_script,
- commit.patch)
+ fname = os.path.join(cwd or '', commit.patch)
+ cc += get_maintainer.get_maintainer(get_maintainer_script, fname)
all_skips |= set(cc) & set(settings.bounces)
cc = list(set(cc) - set(settings.bounces))
if limit is not None:
@@ -283,7 +312,8 @@ class Series(dict):
return cc
def MakeCcFile(self, process_tags, cover_fname, warn_on_error,
- add_maintainers, limit, get_maintainer_script):
+ add_maintainers, limit, get_maintainer_script, alias,
+ cwd=None):
"""Make a cc file for us to use for per-commit Cc automation
Also stores in self._generated_cc to make ShowActions() faster.
@@ -299,6 +329,10 @@ class Series(dict):
limit (int): Limit the length of the Cc list (None if no limit)
get_maintainer_script (str): The file name of the get_maintainer.pl
script (or compatible).
+ alias (dict): Alias dictionary
+ key: alias
+ value: list of aliases or email addresses
+ cwd (str): Path to use for patch filenames (None to use current dir)
Return:
Filename of temp file created
"""
@@ -313,7 +347,8 @@ class Series(dict):
commit.seq = i
commit.future = executor.submit(
self.GetCcForCommit, commit, process_tags, warn_on_error,
- add_maintainers, limit, get_maintainer_script, all_skips)
+ add_maintainers, limit, get_maintainer_script, all_skips,
+ alias, cwd)
# Show progress any commits that are taking forever
lastlen = 0
@@ -344,7 +379,8 @@ class Series(dict):
print(col.build(col.YELLOW, f'Skipping "{x}"'))
if cover_fname:
- cover_cc = gitutil.build_email_list(self.get('cover_cc', ''))
+ cover_cc = gitutil.build_email_list(
+ self.get('cover_cc', ''), alias)
cover_cc = list(set(cover_cc + all_ccs))
if limit is not None:
cover_cc = cover_cc[:limit]
@@ -360,8 +396,10 @@ class Series(dict):
This will later appear in the change log.
Args:
- version: version number to add change list to
- info: change line for this version
+ version (int): version number to add change list to
+ commit (Commit): Commit this relates to, or None if related to a
+ cover letter
+ info (str): change lines for this version (separated by \n)
"""
if not self.changes.get(version):
self.changes[version] = []
@@ -392,3 +430,58 @@ class Series(dict):
if self.get('postfix'):
postfix = ' %s' % self['postfix']
return '%s%sPATCH%s%s' % (git_prefix, prefix, postfix, version)
+
+ def get_links(self, links_str=None, cur_version=None):
+ """Look up the patchwork links for each version
+
+ Args:
+ links_str (str): Links string to parse, or None to use self.links
+ cur_version (int): Default version to assume for un-versioned links,
+ or None to use self.version
+
+ Return:
+ dict:
+ key (int): Version number
+ value (str): Link string
+ """
+ if links_str is None:
+ links_str = self.links if 'links' in self else ''
+ if cur_version is None:
+ cur_version = int(self.version) if 'version' in self else 1
+ assert isinstance(cur_version, int)
+ links = {}
+ for item in links_str.split():
+ if ':' in item:
+ version, link = item.split(':')
+ links[int(version)] = link
+ else:
+ links[cur_version] = item
+ return links
+
+ def build_links(self, links):
+ """Build a string containing the links
+
+ Args:
+ links (dict):
+ key (int): Version number
+ value (str): Link string
+
+ Return:
+ str: Link string, e.g. '2:4433 1:2872'
+ """
+ out = ''
+ for vers in sorted(links.keys(), reverse=True):
+ out += f' {vers}:{links[vers]}'
+ return out[1:]
+
+ def get_link_for_version(self, find_vers, links_str=None):
+ """Look up the patchwork link for a particular version
+
+ Args:
+ find_vers (int): Version to find
+ links_str (str): Links string to parse, or None to use self.links
+
+ Return:
+ str: Series-links entry for that version, or None if not found
+ """
+ return self.get_links(links_str).get(find_vers)
diff --git a/tools/patman/settings.py b/tools/patman/settings.py
index d66b22be1df..17229e0d823 100644
--- a/tools/patman/settings.py
+++ b/tools/patman/settings.py
@@ -9,8 +9,10 @@ except Exception:
import ConfigParser
import argparse
+from io import StringIO
import os
import re
+import sys
from u_boot_pylib import gitutil
@@ -226,7 +228,7 @@ nxp = Zhikang Zhang <zhikang.zhang@nxp.com>
f.close()
-def _UpdateDefaults(main_parser, config):
+def _UpdateDefaults(main_parser, config, argv):
"""Update the given OptionParser defaults based on config.
We'll walk through all of the settings from all parsers.
@@ -242,6 +244,7 @@ def _UpdateDefaults(main_parser, config):
updated.
config: An instance of _ProjectConfigParser that we will query
for settings.
+ argv (list of str or None): Arguments to parse
"""
# Find all the parsers and subparsers
parsers = [main_parser]
@@ -252,10 +255,45 @@ def _UpdateDefaults(main_parser, config):
# Collect the defaults from each parser
defaults = {}
parser_defaults = []
+ argv = list(argv)
+ orig_argv = argv
+
+ bad = False
+ full_parser_list = []
for parser in parsers:
- pdefs = parser.parse_known_args()[0]
- parser_defaults.append(pdefs)
- defaults.update(vars(pdefs))
+ argv_list = [orig_argv]
+ special_cases = []
+ if hasattr(parser, 'defaults_cmds'):
+ special_cases = parser.defaults_cmds
+ for action in parser._actions:
+ if action.choices:
+ argv_list = []
+ for choice in action.choices:
+ argv = None
+ for case in special_cases:
+ if case[0] == choice:
+ argv = case
+ argv_list.append(argv or [choice])
+
+ for argv in argv_list:
+ parser.message = None
+ old_val = parser.catch_error
+ try:
+ parser.catch_error = True
+ pdefs = parser.parse_known_args(argv)[0]
+ finally:
+ parser.catch_error = old_val
+
+ # if parser.message:
+ # print('bad', argv, parser.message)
+ # bad = True
+
+ parser_defaults.append(pdefs)
+ defaults.update(vars(pdefs))
+ full_parser_list.append(parser)
+ if bad:
+ print('Internal parsing error')
+ sys.exit(1)
# Go through the settings and collect defaults
for name, val in config.items('settings'):
@@ -270,12 +308,18 @@ def _UpdateDefaults(main_parser, config):
defaults[name] = val
else:
print("WARNING: Unknown setting %s" % name)
+ if 'cmd' in defaults:
+ del defaults['cmd']
+ if 'subcmd' in defaults:
+ del defaults['subcmd']
# Set all the defaults and manually propagate them to subparsers
main_parser.set_defaults(**defaults)
- for parser, pdefs in zip(parsers, parser_defaults):
+ assert len(full_parser_list) == len(parser_defaults)
+ for parser, pdefs in zip(full_parser_list, parser_defaults):
parser.set_defaults(**{k: v for k, v in defaults.items()
if k in pdefs})
+ return defaults
def _ReadAliasFile(fname):
@@ -334,7 +378,7 @@ def GetItems(config, section):
return []
-def Setup(parser, project_name, config_fname=None):
+def Setup(parser, project_name, argv, config_fname=None):
"""Set up the settings module by reading config files.
Unless `config_fname` is specified, a `.patman` config file local
@@ -347,8 +391,9 @@ def Setup(parser, project_name, config_fname=None):
parser: The parser to update.
project_name: Name of project that we're working on; we'll look
for sections named "project_section" as well.
- config_fname: Config filename to read. An error is raised if it
- does not exist.
+ config_fname: Config filename to read, or None for default, or False
+ for an empty config. An error is raised if it does not exist.
+ argv (list of str or None): Arguments to parse, or None for default
"""
# First read the git alias file if available
_ReadAliasFile('doc/git-mailrc')
@@ -357,12 +402,16 @@ def Setup(parser, project_name, config_fname=None):
if config_fname and not os.path.exists(config_fname):
raise Exception(f'provided {config_fname} does not exist')
- if not config_fname:
+ if config_fname is None:
config_fname = '%s/.patman' % os.getenv('HOME')
- has_config = os.path.exists(config_fname)
+ git_local_config_fname = os.path.join(gitutil.get_top_level() or '',
+ '.patman')
- git_local_config_fname = os.path.join(gitutil.get_top_level(), '.patman')
- has_git_local_config = os.path.exists(git_local_config_fname)
+ has_config = False
+ has_git_local_config = False
+ if config_fname is not False:
+ has_config = os.path.exists(config_fname)
+ has_git_local_config = os.path.exists(git_local_config_fname)
# Read the git local config last, so that its values override
# those of the global config, if any.
@@ -371,7 +420,7 @@ def Setup(parser, project_name, config_fname=None):
if has_git_local_config:
config.read(git_local_config_fname)
- if not (has_config or has_git_local_config):
+ if config_fname is not False and not (has_config or has_git_local_config):
print("No config file found.\nCreating ~/.patman...\n")
CreatePatmanConfigFile(config_fname)
@@ -382,7 +431,7 @@ def Setup(parser, project_name, config_fname=None):
for name, value in GetItems(config, 'bounces'):
bounces.add(value)
- _UpdateDefaults(parser, config)
+ return _UpdateDefaults(parser, config, argv)
# These are the aliases we understand, indexed by alias. Each member is a list.
diff --git a/tools/patman/status.py b/tools/patman/status.py
index 5fb436e08ff..967fef3ad6e 100644
--- a/tools/patman/status.py
+++ b/tools/patman/status.py
@@ -8,141 +8,64 @@ Allows creation of a new branch based on the old but with the review tags
collected from patchwork.
"""
-import collections
+import asyncio
+from collections import defaultdict
import concurrent.futures
from itertools import repeat
-import re
+import aiohttp
import pygit2
-import requests
-from patman import patchstream
-from patman.patchstream import PatchStream
from u_boot_pylib import terminal
from u_boot_pylib import tout
+from patman import patchstream
+from patman import patchwork
-# Patches which are part of a multi-patch series are shown with a prefix like
-# [prefix, version, sequence], for example '[RFC, v2, 3/5]'. All but the last
-# part is optional. This decodes the string into groups. For single patches
-# the [] part is not present:
-# Groups: (ignore, ignore, ignore, prefix, version, sequence, subject)
-RE_PATCH = re.compile(r'(\[(((.*),)?(.*),)?(.*)\]\s)?(.*)$')
-
-# This decodes the sequence string into a patch number and patch count
-RE_SEQ = re.compile(r'(\d+)/(\d+)')
-def to_int(vals):
- """Convert a list of strings into integers, using 0 if not an integer
+def process_reviews(content, comment_data, base_rtags):
+ """Process and return review data
Args:
- vals (list): List of strings
-
- Returns:
- list: List of integers, one for each input string
- """
- out = [int(val) if val.isdigit() else 0 for val in vals]
- return out
-
-
-class Patch(dict):
- """Models a patch in patchwork
-
- This class records information obtained from patchwork
-
- Some of this information comes from the 'Patch' column:
-
- [RFC,v2,1/3] dm: Driver and uclass changes for tiny-dm
-
- This shows the prefix, version, seq, count and subject.
-
- The other properties come from other columns in the display.
+ content (str): Content text of the patch itself - see pwork.get_patch()
+ comment_data (list of dict): Comments for the patch - see
+ pwork._get_patch_comments()
+ base_rtags (dict): base review tags (before any comments)
+ key: Response tag (e.g. 'Reviewed-by')
+ value: Set of people who gave that response, each a name/email
+ string
- Properties:
- pid (str): ID of the patch (typically an integer)
- seq (int): Sequence number within series (1=first) parsed from sequence
- string
- count (int): Number of patches in series, parsed from sequence string
- raw_subject (str): Entire subject line, e.g.
- "[1/2,v2] efi_loader: Sort header file ordering"
- prefix (str): Prefix string or None (e.g. 'RFC')
- version (str): Version string or None (e.g. 'v2')
- raw_subject (str): Raw patch subject
- subject (str): Patch subject with [..] part removed (same as commit
- subject)
+ Return: tuple:
+ dict: new review tags (noticed since the base_rtags)
+ key: Response tag (e.g. 'Reviewed-by')
+ value: Set of people who gave that response, each a name/email
+ string
+ list of patchwork.Review: reviews received on the patch
"""
- def __init__(self, pid):
- super().__init__()
- self.id = pid # Use 'id' to match what the Rest API provides
- self.seq = None
- self.count = None
- self.prefix = None
- self.version = None
- self.raw_subject = None
- self.subject = None
-
- # These make us more like a dictionary
- def __setattr__(self, name, value):
- self[name] = value
-
- def __getattr__(self, name):
- return self[name]
-
- def __hash__(self):
- return hash(frozenset(self.items()))
-
- def __str__(self):
- return self.raw_subject
-
- def parse_subject(self, raw_subject):
- """Parse the subject of a patch into its component parts
-
- See RE_PATCH for details. The parsed info is placed into seq, count,
- prefix, version, subject
-
- Args:
- raw_subject (str): Subject string to parse
-
- Raises:
- ValueError: the subject cannot be parsed
- """
- self.raw_subject = raw_subject.strip()
- mat = RE_PATCH.search(raw_subject.strip())
- if not mat:
- raise ValueError("Cannot parse subject '%s'" % raw_subject)
- self.prefix, self.version, seq_info, self.subject = mat.groups()[3:]
- mat_seq = RE_SEQ.match(seq_info) if seq_info else False
- if mat_seq is None:
- self.version = seq_info
- seq_info = None
- if self.version and not self.version.startswith('v'):
- self.prefix = self.version
- self.version = None
- if seq_info:
- if mat_seq:
- self.seq = int(mat_seq.group(1))
- self.count = int(mat_seq.group(2))
- else:
- self.seq = 1
- self.count = 1
-
+ pstrm = patchstream.PatchStream.process_text(content, True)
+ rtags = defaultdict(set)
+ for response, people in pstrm.commit.rtags.items():
+ rtags[response].update(people)
-class Review:
- """Represents a single review email collected in Patchwork
+ reviews = []
+ for comment in comment_data:
+ pstrm = patchstream.PatchStream.process_text(comment['content'], True)
+ if pstrm.snippets:
+ submitter = comment['submitter']
+ person = f"{submitter['name']} <{submitter['email']}>"
+ reviews.append(patchwork.Review(person, pstrm.snippets))
+ for response, people in pstrm.commit.rtags.items():
+ rtags[response].update(people)
- Patches can attract multiple reviews. Each consists of an author/date and
- a variable number of 'snippets', which are groups of quoted and unquoted
- text.
- """
- def __init__(self, meta, snippets):
- """Create new Review object
+ # Find the tags that are not in the commit
+ new_rtags = defaultdict(set)
+ for tag, people in rtags.items():
+ for who in people:
+ is_new = (tag not in base_rtags or
+ who not in base_rtags[tag])
+ if is_new:
+ new_rtags[tag].add(who)
+ return new_rtags, reviews
- Args:
- meta (str): Text containing review author and date
- snippets (list): List of snippets in th review, each a list of text
- lines
- """
- self.meta = ' : '.join([line for line in meta.splitlines() if line])
- self.snippets = snippets
def compare_with_series(series, patches):
"""Compare a list of patches with a series it came from
@@ -151,7 +74,7 @@ def compare_with_series(series, patches):
Args:
series (Series): Series to compare against
- patches (:type: list of Patch): list of Patch objects to compare with
+ patches (list of Patch): list of Patch objects to compare with
Returns:
tuple
@@ -179,7 +102,6 @@ def compare_with_series(series, patches):
warnings.append("Cannot find patch for commit %d ('%s')" %
(seq + 1, cmt.subject))
-
# Check the names match
commit_for_patch = {}
all_commits = set(series.commits)
@@ -198,132 +120,12 @@ def compare_with_series(series, patches):
return patch_for_commit, commit_for_patch, warnings
-def call_rest_api(url, subpath):
- """Call the patchwork API and return the result as JSON
-
- Args:
- url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'
- subpath (str): URL subpath to use
-
- Returns:
- dict: Json result
-
- Raises:
- ValueError: the URL could not be read
- """
- full_url = '%s/api/1.2/%s' % (url, subpath)
- response = requests.get(full_url)
- if response.status_code != 200:
- raise ValueError("Could not read URL '%s'" % full_url)
- return response.json()
-
-def collect_patches(series, series_id, url, rest_api=call_rest_api):
- """Collect patch information about a series from patchwork
-
- Uses the Patchwork REST API to collect information provided by patchwork
- about the status of each patch.
-
- Args:
- series (Series): Series object corresponding to the local branch
- containing the series
- series_id (str): Patch series ID number
- url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'
- rest_api (function): API function to call to access Patchwork, for
- testing
-
- Returns:
- list: List of patches sorted by sequence number, each a Patch object
-
- Raises:
- ValueError: if the URL could not be read or the web page does not follow
- the expected structure
- """
- data = rest_api(url, 'series/%s/' % series_id)
-
- # Get all the rows, which are patches
- patch_dict = data['patches']
- count = len(patch_dict)
- num_commits = len(series.commits)
- if count != num_commits:
- tout.warning('Warning: Patchwork reports %d patches, series has %d' %
- (count, num_commits))
-
- patches = []
-
- # Work through each row (patch) one at a time, collecting the information
- warn_count = 0
- for pw_patch in patch_dict:
- patch = Patch(pw_patch['id'])
- patch.parse_subject(pw_patch['name'])
- patches.append(patch)
- if warn_count > 1:
- tout.warning(' (total of %d warnings)' % warn_count)
- # Sort patches by patch number
- patches = sorted(patches, key=lambda x: x.seq)
- return patches
-
-def find_new_responses(new_rtag_list, review_list, seq, cmt, patch, url,
- rest_api=call_rest_api):
- """Find new rtags collected by patchwork that we don't know about
-
- This is designed to be run in parallel, once for each commit/patch
-
- Args:
- new_rtag_list (list): New rtags are written to new_rtag_list[seq]
- list, each a dict:
- key: Response tag (e.g. 'Reviewed-by')
- value: Set of people who gave that response, each a name/email
- string
- review_list (list): New reviews are written to review_list[seq]
- list, each a
- List of reviews for the patch, each a Review
- seq (int): Position in new_rtag_list to update
- cmt (Commit): Commit object for this commit
- patch (Patch): Corresponding Patch object for this patch
- url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'
- rest_api (function): API function to call to access Patchwork, for
- testing
- """
- if not patch:
- return
-
- # Get the content for the patch email itself as well as all comments
- data = rest_api(url, 'patches/%s/' % patch.id)
- pstrm = PatchStream.process_text(data['content'], True)
-
- rtags = collections.defaultdict(set)
- for response, people in pstrm.commit.rtags.items():
- rtags[response].update(people)
-
- data = rest_api(url, 'patches/%s/comments/' % patch.id)
-
- reviews = []
- for comment in data:
- pstrm = PatchStream.process_text(comment['content'], True)
- if pstrm.snippets:
- submitter = comment['submitter']
- person = '%s <%s>' % (submitter['name'], submitter['email'])
- reviews.append(Review(person, pstrm.snippets))
- for response, people in pstrm.commit.rtags.items():
- rtags[response].update(people)
-
- # Find the tags that are not in the commit
- new_rtags = collections.defaultdict(set)
- base_rtags = cmt.rtags
- for tag, people in rtags.items():
- for who in people:
- is_new = (tag not in base_rtags or
- who not in base_rtags[tag])
- if is_new:
- new_rtags[tag].add(who)
- new_rtag_list[seq] = new_rtags
- review_list[seq] = reviews
-
-def show_responses(rtags, indent, is_new):
+def show_responses(col, rtags, indent, is_new):
"""Show rtags collected
Args:
+ col (terminal.Colour): Colour object to use
rtags (dict): review tags to show
key: Response tag (e.g. 'Reviewed-by')
value: Set of people who gave that response, each a name/email string
@@ -333,14 +135,14 @@ def show_responses(rtags, indent, is_new):
Returns:
int: Number of review tags displayed
"""
- col = terminal.Color()
count = 0
for tag in sorted(rtags.keys()):
people = rtags[tag]
for who in sorted(people):
terminal.tprint(indent + '%s %s: ' % ('+' if is_new else ' ', tag),
- newline=False, colour=col.GREEN, bright=is_new)
- terminal.tprint(who, colour=col.WHITE, bright=is_new)
+ newline=False, colour=col.GREEN, bright=is_new,
+ col=col)
+ terminal.tprint(who, colour=col.WHITE, bright=is_new, col=col)
count += 1
return count
@@ -409,9 +211,21 @@ def create_branch(series, new_rtag_list, branch, dest_branch, overwrite,
[parent.target])
return num_added
-def check_patchwork_status(series, series_id, branch, dest_branch, force,
- show_comments, url, rest_api=call_rest_api,
- test_repo=None):
+
+def check_patch_count(num_commits, num_patches):
+ """Check the number of commits and patches agree
+
+ Args:
+ num_commits (int): Number of commits
+ num_patches (int): Number of patches
+ """
+ if num_patches != num_commits:
+ tout.warning(f'Warning: Patchwork reports {num_patches} patches, '
+ f'series has {num_commits}')
+
+
+def do_show_status(series, cover, patches, show_comments, show_cover_comments,
+ col, warnings_on_stderr=True):
"""Check the status of a series on Patchwork
This finds review tags and comments for a series in Patchwork, displaying
@@ -419,36 +233,67 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force,
Args:
series (Series): Series object for the existing branch
- series_id (str): Patch series ID number
- branch (str): Existing branch to update, or None
- dest_branch (str): Name of new branch to create, or None
- force (bool): True to force overwriting dest_branch if it exists
+ cover (COVER): Cover letter info, or None if none
+ patches (list of Patch): Patches sorted by sequence number
show_comments (bool): True to show the comments on each patch
- url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'
- rest_api (function): API function to call to access Patchwork, for
- testing
- test_repo (pygit2.Repository): Repo to use (use None unless testing)
+ show_cover_comments (bool): True to show the comments on the
+ letter
+ col (terminal.Colour): Colour object
+
+ Return: tuple:
+ int: Number of new review tags to add
+ list: List of review tags to add, one item for each commit, each a
+ dict:
+ key: Response tag (e.g. 'Reviewed-by')
+ value: Set of people who gave that response, each a name/email
+ string
"""
- patches = collect_patches(series, series_id, url, rest_api)
- col = terminal.Color()
+ compare = []
+ for pw_patch in patches:
+ patch = patchwork.Patch(pw_patch.id)
+ patch.parse_subject(pw_patch.series_data['name'])
+ compare.append(patch)
+
count = len(series.commits)
new_rtag_list = [None] * count
review_list = [None] * count
- patch_for_commit, _, warnings = compare_with_series(series, patches)
- for warn in warnings:
- tout.warning(warn)
-
- patch_list = [patch_for_commit.get(c) for c in range(len(series.commits))]
-
- with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
- futures = executor.map(
- find_new_responses, repeat(new_rtag_list), repeat(review_list),
- range(count), series.commits, patch_list, repeat(url),
- repeat(rest_api))
- for fresponse in futures:
- if fresponse:
- raise fresponse.exception()
+ with terminal.pager():
+ patch_for_commit, _, warnings = compare_with_series(series, compare)
+ for warn in warnings:
+ tout.do_output(tout.WARNING if warnings_on_stderr else tout.INFO,
+ warn)
+
+ for seq, pw_patch in enumerate(patches):
+ compare[seq].patch = pw_patch
+
+ for i in range(count):
+ pat = patch_for_commit.get(i)
+ if pat:
+ patch_data = pat.patch.data
+ comment_data = pat.patch.comments
+ new_rtag_list[i], review_list[i] = process_reviews(
+ patch_data['content'], comment_data,
+ series.commits[i].rtags)
+ num_to_add = _do_show_status(
+ series, cover, patch_for_commit, show_comments,
+ show_cover_comments, new_rtag_list, review_list, col)
+
+ return num_to_add, new_rtag_list
+
+
+def _do_show_status(series, cover, patch_for_commit, show_comments,
+ show_cover_comments, new_rtag_list, review_list, col):
+ if cover and show_cover_comments:
+ terminal.tprint(f'Cov {cover.name}', colour=col.BLACK, col=col,
+ bright=False, back=col.YELLOW)
+ for seq, comment in enumerate(cover.comments):
+ submitter = comment['submitter']
+ person = '%s <%s>' % (submitter['name'], submitter['email'])
+ terminal.tprint(f"From: {person}: {comment['date']}",
+ colour=col.RED, col=col)
+ print(comment['content'])
+ print()
num_to_add = 0
for seq, cmt in enumerate(series.commits):
@@ -456,32 +301,105 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force,
if not patch:
continue
terminal.tprint('%3d %s' % (patch.seq, patch.subject[:50]),
- colour=col.BLUE)
+ colour=col.YELLOW, col=col)
cmt = series.commits[seq]
base_rtags = cmt.rtags
new_rtags = new_rtag_list[seq]
indent = ' ' * 2
- show_responses(base_rtags, indent, False)
- num_to_add += show_responses(new_rtags, indent, True)
+ show_responses(col, base_rtags, indent, False)
+ num_to_add += show_responses(col, new_rtags, indent, True)
if show_comments:
for review in review_list[seq]:
- terminal.tprint('Review: %s' % review.meta, colour=col.RED)
+ terminal.tprint('Review: %s' % review.meta, colour=col.RED,
+ col=col)
for snippet in review.snippets:
for line in snippet:
quoted = line.startswith('>')
- terminal.tprint(' %s' % line,
- colour=col.MAGENTA if quoted else None)
+ terminal.tprint(
+ f' {line}',
+ colour=col.MAGENTA if quoted else None, col=col)
terminal.tprint()
+ return num_to_add
- terminal.tprint("%d new response%s available in patchwork%s" %
- (num_to_add, 's' if num_to_add != 1 else '',
- '' if dest_branch
- else ' (use -d to write them to a new branch)'))
+
+def show_status(series, branch, dest_branch, force, cover, patches,
+ show_comments, show_cover_comments, test_repo=None):
+ """Check the status of a series on Patchwork
+
+ This finds review tags and comments for a series in Patchwork, displaying
+ them to show what is new compared to the local series.
+
+ Args:
+ client (aiohttp.ClientSession): Session to use
+ series (Series): Series object for the existing branch
+ branch (str): Existing branch to update, or None
+ dest_branch (str): Name of new branch to create, or None
+ force (bool): True to force overwriting dest_branch if it exists
+ cover (COVER): Cover letter info, or None if none
+ patches (list of Patch): Patches sorted by sequence number
+ show_comments (bool): True to show the comments on each patch
+ show_cover_comments (bool): True to show the comments on the letter
+ test_repo (pygit2.Repository): Repo to use (use None unless testing)
+ """
+ col = terminal.Color()
+ check_patch_count(len(series.commits), len(patches))
+ num_to_add, new_rtag_list = do_show_status(
+ series, cover, patches, show_comments, show_cover_comments, col)
+
+ if not dest_branch and num_to_add:
+ msg = ' (use -d to write them to a new branch)'
+ else:
+ msg = ''
+ terminal.tprint(
+ f"{num_to_add} new response{'s' if num_to_add != 1 else ''} "
+ f'available in patchwork{msg}')
if dest_branch:
num_added = create_branch(series, new_rtag_list, branch,
dest_branch, force, test_repo)
terminal.tprint(
- "%d response%s added from patchwork into new branch '%s'" %
- (num_added, 's' if num_added != 1 else '', dest_branch))
+ f"{num_added} response{'s' if num_added != 1 else ''} added "
+ f"from patchwork into new branch '{dest_branch}'")
+
+
+async def check_status(link, pwork, read_comments=False,
+ read_cover_comments=False):
+ """Set up an HTTP session and get the required state
+
+ Args:
+ link (str): Patch series ID number
+ pwork (Patchwork): Patchwork object to use for reading
+ read_comments (bool): True to read comments and state for each patch
+
+ Return: tuple:
+ COVER object, or None if none or not read_cover_comments
+ list of PATCH objects
+ """
+ async with aiohttp.ClientSession() as client:
+ return await pwork.series_get_state(client, link, read_comments,
+ read_cover_comments)
+
+
+def check_and_show_status(series, link, branch, dest_branch, force,
+ show_comments, show_cover_comments, pwork,
+ test_repo=None):
+ """Read the series status from patchwork and show it to the user
+
+ Args:
+ series (Series): Series object for the existing branch
+ link (str): Patch series ID number
+ branch (str): Existing branch to update, or None
+ dest_branch (str): Name of new branch to create, or None
+ force (bool): True to force overwriting dest_branch if it exists
+ show_comments (bool): True to show the comments on each patch
+ show_cover_comments (bool): True to show the comments on the letter
+ pwork (Patchwork): Patchwork object to use for reading
+ test_repo (pygit2.Repository): Repo to use (use None unless testing)
+ """
+ loop = asyncio.get_event_loop()
+ cover, patches = loop.run_until_complete(check_status(
+ link, pwork, True, show_cover_comments))
+
+ show_status(series, branch, dest_branch, force, cover, patches,
+ show_comments, show_cover_comments, test_repo=test_repo)
diff --git a/tools/patman/test_checkpatch.py b/tools/patman/test_checkpatch.py
index 3bf16febbf6..4e8d163184e 100644
--- a/tools/patman/test_checkpatch.py
+++ b/tools/patman/test_checkpatch.py
@@ -137,7 +137,7 @@ Signed-off-by: Simon Glass <sjg@chromium.org>
class TestPatch(unittest.TestCase):
"""Test the u_boot_line() function in checkpatch.pl"""
- def test_basic(self):
+ def test_filter(self):
"""Test basic filter operation"""
data='''
diff --git a/tools/patman/test_common.py b/tools/patman/test_common.py
new file mode 100644
index 00000000000..7da995dda22
--- /dev/null
+++ b/tools/patman/test_common.py
@@ -0,0 +1,254 @@
+# SPDX-License-Identifier: GPL-2.0+
+#
+# Copyright 2025 Simon Glass <sjg@chromium.org>
+#
+"""Functional tests for checking that patman behaves correctly"""
+
+import os
+import shutil
+import tempfile
+
+import pygit2
+
+from u_boot_pylib import gitutil
+from u_boot_pylib import terminal
+from u_boot_pylib import tools
+from u_boot_pylib import tout
+
+
+class TestCommon:
+ """Contains common test functions"""
+ leb = (b'Lord Edmund Blackadd\xc3\xabr <weasel@blackadder.org>'.
+ decode('utf-8'))
+
+ # Fake patchwork project ID for U-Boot
+ PROJ_ID = 6
+ PROJ_LINK_NAME = 'uboot'
+ SERIES_ID_FIRST_V3 = 31
+ SERIES_ID_SECOND_V1 = 456
+ SERIES_ID_SECOND_V2 = 457
+ TITLE_SECOND = 'Series for my board'
+
+ verbosity = False
+ preserve_outdirs = False
+
+ @classmethod
+ def setup_test_args(cls, preserve_indir=False, preserve_outdirs=False,
+ toolpath=None, verbosity=None, no_capture=False):
+ """Accept arguments controlling test execution
+
+ Args:
+ preserve_indir (bool): not used by patman
+ preserve_outdirs (bool): Preserve the output directories used by
+ tests. Each test has its own, so this is normally only useful
+ when running a single test.
+ toolpath (str): not used by patman
+ verbosity (int): verbosity to use (0 means tout.INIT, 1 means means
+ tout.DEBUG)
+ no_capture (bool): True to output all captured text after capturing
+ completes
+ """
+ del preserve_indir
+ cls.preserve_outdirs = preserve_outdirs
+ cls.toolpath = toolpath
+ cls.verbosity = verbosity
+ cls.no_capture = no_capture
+
+ def __init__(self):
+ super().__init__()
+ self.repo = None
+ self.tmpdir = None
+ self.gitdir = None
+
+ def setUp(self):
+ """Set up the test temporary dir and git dir"""
+ self.tmpdir = tempfile.mkdtemp(prefix='patman.')
+ self.gitdir = os.path.join(self.tmpdir, '.git')
+ tout.init(tout.DEBUG if self.verbosity else tout.INFO,
+ allow_colour=False)
+
+ def tearDown(self):
+ """Delete the temporary dir"""
+ if self.preserve_outdirs:
+ print(f'Output dir: {self.tmpdir}')
+ else:
+ shutil.rmtree(self.tmpdir)
+ terminal.set_print_test_mode(False)
+
+ def make_commit_with_file(self, subject, body, fname, text):
+ """Create a file and add it to the git repo with a new commit
+
+ Args:
+ subject (str): Subject for the commit
+ body (str): Body text of the commit
+ fname (str): Filename of file to create
+ text (str): Text to put into the file
+ """
+ path = os.path.join(self.tmpdir, fname)
+ tools.write_file(path, text, binary=False)
+ index = self.repo.index
+ index.add(fname)
+ # pylint doesn't seem to find this
+ # pylint: disable=E1101
+ author = pygit2.Signature('Test user', 'test@email.com')
+ committer = author
+ tree = index.write_tree()
+ message = subject + '\n' + body
+ self.repo.create_commit('HEAD', author, committer, message, tree,
+ [self.repo.head.target])
+
+ def make_git_tree(self):
+ """Make a simple git tree suitable for testing
+
+ It has four branches:
+ 'base' has two commits: PCI, main
+ 'first' has base as upstream and two more commits: I2C, SPI
+ 'second' has base as upstream and three more: video, serial, bootm
+ 'third4' has second as upstream and four more: usb, main, test, lib
+
+ Returns:
+ pygit2.Repository: repository
+ """
+ os.environ['GIT_CONFIG_GLOBAL'] = '/dev/null'
+ os.environ['GIT_CONFIG_SYSTEM'] = '/dev/null'
+
+ repo = pygit2.init_repository(self.gitdir)
+ self.repo = repo
+ new_tree = repo.TreeBuilder().write()
+
+ common = ['git', f'--git-dir={self.gitdir}', 'config']
+ tools.run(*(common + ['user.name', 'Dummy']), cwd=self.gitdir)
+ tools.run(*(common + ['user.email', 'dumdum@dummy.com']),
+ cwd=self.gitdir)
+
+ # pylint doesn't seem to find this
+ # pylint: disable=E1101
+ author = pygit2.Signature('Test user', 'test@email.com')
+ committer = author
+ _ = repo.create_commit('HEAD', author, committer, 'Created master',
+ new_tree, [])
+
+ self.make_commit_with_file('Initial commit', '''
+Add a README
+
+''', 'README', '''This is the README file
+describing this project
+in very little detail''')
+
+ self.make_commit_with_file('pci: PCI implementation', '''
+Here is a basic PCI implementation
+
+''', 'pci.c', '''This is a file
+it has some contents
+and some more things''')
+ self.make_commit_with_file('main: Main program', '''
+Hello here is the second commit.
+''', 'main.c', '''This is the main file
+there is very little here
+but we can always add more later
+if we want to
+
+Series-to: u-boot
+Series-cc: Barry Crump <bcrump@whataroa.nz>
+''')
+ base_target = repo.revparse_single('HEAD')
+ self.make_commit_with_file('i2c: I2C things', '''
+This has some stuff to do with I2C
+''', 'i2c.c', '''And this is the file contents
+with some I2C-related things in it''')
+ self.make_commit_with_file('spi: SPI fixes', f'''
+SPI needs some fixes
+and here they are
+
+Signed-off-by: {self.leb}
+
+Series-to: u-boot
+Commit-notes:
+title of the series
+This is the cover letter for the series
+with various details
+END
+''', 'spi.c', '''Some fixes for SPI in this
+file to make SPI work
+better than before''')
+ first_target = repo.revparse_single('HEAD')
+
+ target = repo.revparse_single('HEAD~2')
+ # pylint doesn't seem to find this
+ # pylint: disable=E1101
+ repo.reset(target.oid, pygit2.enums.ResetMode.HARD)
+ self.make_commit_with_file('video: Some video improvements', '''
+Fix up the video so that
+it looks more purple. Purple is
+a very nice colour.
+''', 'video.c', '''More purple here
+Purple and purple
+Even more purple
+Could not be any more purple''')
+ self.make_commit_with_file('serial: Add a serial driver', f'''
+Here is the serial driver
+for my chip.
+
+Cover-letter:
+{self.TITLE_SECOND}
+This series implements support
+for my glorious board.
+END
+Series-to: u-boot
+Series-links: {self.SERIES_ID_SECOND_V1}
+''', 'serial.c', '''The code for the
+serial driver is here''')
+ self.make_commit_with_file('bootm: Make it boot', '''
+This makes my board boot
+with a fix to the bootm
+command
+''', 'bootm.c', '''Fix up the bootm
+command to make the code as
+complicated as possible''')
+ second_target = repo.revparse_single('HEAD')
+
+ self.make_commit_with_file('usb: Try out the new DMA feature', '''
+This is just a fix that
+ensures that DMA is enabled
+''', 'usb-uclass.c', '''Here is the USB
+implementation and as you can see it
+it very nice''')
+ self.make_commit_with_file('main: Change to the main program', '''
+Here we adjust the main
+program just a little bit
+''', 'main.c', '''This is the text of the main program''')
+ self.make_commit_with_file('test: Check that everything works', '''
+This checks that all the
+various things we've been
+adding actually work.
+''', 'test.c', '''Here is the test code and it seems OK''')
+ self.make_commit_with_file('lib: Sort out the extra library', '''
+The extra library is currently
+broken. Fix it so that we can
+use it in various place.
+''', 'lib.c', '''Some library code is here
+and a little more''')
+ third_target = repo.revparse_single('HEAD')
+
+ repo.branches.local.create('first', first_target)
+ repo.config.set_multivar('branch.first.remote', '', '.')
+ repo.config.set_multivar('branch.first.merge', '', 'refs/heads/base')
+
+ repo.branches.local.create('second', second_target)
+ repo.config.set_multivar('branch.second.remote', '', '.')
+ repo.config.set_multivar('branch.second.merge', '', 'refs/heads/base')
+
+ repo.branches.local.create('base', base_target)
+
+ repo.branches.local.create('third4', third_target)
+ repo.config.set_multivar('branch.third4.remote', '', '.')
+ repo.config.set_multivar('branch.third4.merge', '',
+ 'refs/heads/second')
+
+ target = repo.lookup_reference('refs/heads/first')
+ repo.checkout(target, strategy=pygit2.GIT_CHECKOUT_FORCE)
+ target = repo.revparse_single('HEAD')
+ repo.reset(target.oid, pygit2.enums.ResetMode.HARD)
+
+ self.assertFalse(gitutil.check_dirty(self.gitdir, self.tmpdir))
+ return repo
diff --git a/tools/patman/test_cseries.py b/tools/patman/test_cseries.py
new file mode 100644
index 00000000000..e58f2f68333
--- /dev/null
+++ b/tools/patman/test_cseries.py
@@ -0,0 +1,3684 @@
+# SPDX-License-Identifier: GPL-2.0+
+
+# Copyright 2025 Simon Glass <sjg@chromium.org>
+#
+"""Functional tests for checking that patman behaves correctly"""
+
+import asyncio
+from datetime import datetime
+import os
+import re
+import unittest
+from unittest import mock
+
+import pygit2
+
+from u_boot_pylib import cros_subprocess
+from u_boot_pylib import gitutil
+from u_boot_pylib import terminal
+from u_boot_pylib import tools
+from patman import cmdline
+from patman import control
+from patman import cser_helper
+from patman import cseries
+from patman.database import Pcommit
+from patman import database
+from patman import patchstream
+from patman.patchwork import Patchwork
+from patman.test_common import TestCommon
+
+HASH_RE = r'[0-9a-f]+'
+#pylint: disable=protected-access
+
+class Namespace:
+ """Simple namespace for use instead of argparse in tests"""
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+
+class TestCseries(unittest.TestCase, TestCommon):
+ """Test cases for the Cseries class
+
+ In some cases there are tests for both direct Cseries calls and for
+ accessing the feature via the cmdline. It is possible to do this with mocks
+ but it is a bit painful to catch all cases that way. The approach here is
+ to create a check_...() function which yields back to the test routines to
+ make the call or run the command. The check_...() function typically yields
+ a Cseries while it is working and False when it is done, allowing the test
+ to check that everything is finished.
+
+ Some subcommands don't have command tests, if it would be duplicative. Some
+ tests avoid using the check_...() function and just write the test out
+ twice, if it would be too confusing to use a coroutine.
+
+ Note the -N flag which sort-of disables capturing of output, although in
+ fact it is still captured, just output at the end. When debugging the code
+ you may need to temporarily comment out the 'with terminal.capture()'
+ parts.
+ """
+ def setUp(self):
+ TestCommon.setUp(self)
+ self.autolink_extra = None
+ self.loop = asyncio.get_event_loop()
+ self.cser = None
+
+ def tearDown(self):
+ TestCommon.tearDown(self)
+
+ class _Stage:
+ def __init__(self, name):
+ self.name = name
+
+ def __enter__(self):
+ if not terminal.USE_CAPTURE:
+ print(f"--- starting '{self.name}'")
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not terminal.USE_CAPTURE:
+ print(f"--- finished '{self.name}'\n")
+
+ def stage(self, name):
+ """Context manager to count requests across a range of patchwork calls
+
+ Args:
+ name (str): Stage name
+
+ Return:
+ _Stage: contect object
+
+ Usage:
+ with self.stage('name'):
+ ...do things
+
+ Note that the output only appears if the -N flag is used
+ """
+ return self._Stage(name)
+
+ def assert_finished(self, itr):
+ """Assert that an iterator is finished
+
+ Args:
+ itr (iter): Iterator to check
+ """
+ self.assertFalse(list(itr))
+
+ def test_database_setup(self):
+ """Check setting up of the series database"""
+ cser = cseries.Cseries(self.tmpdir)
+ with terminal.capture() as (_, err):
+ cser.open_database()
+ self.assertEqual(f'Creating new database {self.tmpdir}/.patman.db',
+ err.getvalue().strip())
+ res = cser.db.execute("SELECT name FROM series")
+ self.assertTrue(res)
+ cser.close_database()
+
+ def get_database(self):
+ """Open the database and silence the warning output
+
+ Return:
+ Cseries: Resulting Cseries object
+ """
+ cser = cseries.Cseries(self.tmpdir, terminal.COLOR_NEVER)
+ with terminal.capture() as _:
+ cser.open_database()
+ self.cser = cser
+ return cser
+
+ def get_cser(self):
+ """Set up a git tree and database
+
+ Return:
+ Cseries: object
+ """
+ self.make_git_tree()
+ return self.get_database()
+
+ def db_close(self):
+ """Close the database if open"""
+ if self.cser and self.cser.db.cur:
+ self.cser.close_database()
+ return True
+ return False
+
+ def db_open(self):
+ """Open the database if closed"""
+ if self.cser and not self.cser.db.cur:
+ self.cser.open_database()
+
+ def run_args(self, *argv, expect_ret=0, pwork=None, cser=None):
+ """Run patman with the given arguments
+
+ Args:
+ argv (list of str): List of arguments, excluding 'patman'
+ expect_ret (int): Expected return code, used to check errors
+ pwork (Patchwork): Patchwork object to use when executing the
+ command, or None to create one
+ cser (Cseries): Cseries object to use when executing the command,
+ or None to create one
+ """
+ was_open = self.db_close()
+ args = cmdline.parse_args(['-D'] + list(argv), config_fname=False)
+ exit_code = control.do_patman(args, self.tmpdir, pwork, cser)
+ self.assertEqual(expect_ret, exit_code)
+ if was_open:
+ self.db_open()
+
+ def test_series_add(self):
+ """Test adding a new cseries"""
+ cser = self.get_cser()
+ self.assertFalse(cser.db.series_get_dict())
+
+ with terminal.capture() as (out, _):
+ cser.add('first', 'my description', allow_unmarked=True)
+ lines = out.getvalue().strip().splitlines()
+ self.assertEqual(
+ "Adding series 'first' v1: mark False allow_unmarked True",
+ lines[0])
+ self.assertEqual("Added series 'first' v1 (2 commits)", lines[1])
+ self.assertEqual(2, len(lines))
+
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ self.assertEqual('first', slist['first'].name)
+ self.assertEqual('my description', slist['first'].desc)
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(1, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(1, svlist[0].version)
+
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(2, len(pclist))
+ self.assertIn(1, pclist)
+ self.assertEqual(
+ Pcommit(1, 0, 'i2c: I2C things', 1, None, None, None, None),
+ pclist[1])
+ self.assertEqual(
+ Pcommit(2, 1, 'spi: SPI fixes', 1, None, None, None, None),
+ pclist[2])
+
+ def test_series_not_checked_out(self):
+ """Test adding a new cseries when a different one is checked out"""
+ cser = self.get_cser()
+ self.assertFalse(cser.db.series_get_dict())
+
+ with terminal.capture() as (out, _):
+ cser.add('second', allow_unmarked=True)
+ lines = out.getvalue().strip().splitlines()
+ self.assertEqual(
+ "Adding series 'second' v1: mark False allow_unmarked True",
+ lines[0])
+ self.assertEqual("Added series 'second' v1 (3 commits)", lines[1])
+ self.assertEqual(2, len(lines))
+
+ def test_series_add_manual(self):
+ """Test adding a new cseries with a version number"""
+ cser = self.get_cser()
+ self.assertFalse(cser.db.series_get_dict())
+
+ repo = pygit2.init_repository(self.gitdir)
+ first_target = repo.revparse_single('first')
+ repo.branches.local.create('first2', first_target)
+ repo.config.set_multivar('branch.first2.remote', '', '.')
+ repo.config.set_multivar('branch.first2.merge', '', 'refs/heads/base')
+
+ with terminal.capture() as (out, _):
+ cser.add('first2', 'description', allow_unmarked=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Adding series 'first' v2: mark False allow_unmarked True",
+ lines[0])
+ self.assertEqual("Added series 'first' v2 (2 commits)", lines[1])
+ self.assertEqual(2, len(lines))
+
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ self.assertEqual('first', slist['first'].name)
+
+ # We should have just one entry, with version 2
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(1, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(2, svlist[0].version)
+
+ def add_first2(self, checkout):
+ """Add a new first2 branch, a copy of first"""
+ repo = pygit2.init_repository(self.gitdir)
+ first_target = repo.revparse_single('first')
+ repo.branches.local.create('first2', first_target)
+ repo.config.set_multivar('branch.first2.remote', '', '.')
+ repo.config.set_multivar('branch.first2.merge', '', 'refs/heads/base')
+
+ if checkout:
+ target = repo.lookup_reference('refs/heads/first2')
+ repo.checkout(target, strategy=pygit2.enums.CheckoutStrategy.FORCE)
+
+ def test_series_add_different(self):
+ """Test adding a different version of a series from that checked out"""
+ cser = self.get_cser()
+
+ self.add_first2(True)
+
+ # Add first2 initially
+ with terminal.capture() as (out, _):
+ cser.add(None, 'description', allow_unmarked=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Adding series 'first' v2: mark False allow_unmarked True",
+ lines[0])
+ self.assertEqual("Added series 'first' v2 (2 commits)", lines[1])
+ self.assertEqual(2, len(lines))
+
+ # Now add first: it should be added as a new version
+ with terminal.capture() as (out, _):
+ cser.add('first', 'description', allow_unmarked=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Adding series 'first' v1: mark False allow_unmarked True",
+ lines[0])
+ self.assertEqual(
+ "Added v1 to existing series 'first' (2 commits)", lines[1])
+ self.assertEqual(2, len(lines))
+
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ self.assertEqual('first', slist['first'].name)
+
+ # We should have two entries, one of each version
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(2, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(2, svlist[0].version)
+
+ self.assertEqual(2, svlist[1].idnum)
+ self.assertEqual(1, svlist[1].series_id)
+ self.assertEqual(1, svlist[1].version)
+
+ def test_series_add_dup(self):
+ """Test adding a series twice"""
+ cser = self.get_cser()
+ with terminal.capture() as (out, _):
+ cser.add(None, 'description', allow_unmarked=True)
+
+ with terminal.capture() as (out, _):
+ cser.add(None, 'description', allow_unmarked=True)
+ self.assertIn("Series 'first' v1 already exists",
+ out.getvalue().strip())
+
+ self.add_first2(False)
+
+ with terminal.capture() as (out, _):
+ cser.add('first2', 'description', allow_unmarked=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Added v2 to existing series 'first' (2 commits)", lines[1])
+
+ def test_series_add_dup_reverse(self):
+ """Test adding a series twice, v2 then v1"""
+ cser = self.get_cser()
+ self.add_first2(True)
+ with terminal.capture() as (out, _):
+ cser.add(None, 'description', allow_unmarked=True)
+ self.assertIn("Added series 'first' v2", out.getvalue().strip())
+
+ with terminal.capture() as (out, _):
+ cser.add('first', 'description', allow_unmarked=True)
+ self.assertIn("Added v1 to existing series 'first'",
+ out.getvalue().strip())
+
+ def test_series_add_dup_reverse_cmdline(self):
+ """Test adding a series twice, v2 then v1"""
+ cser = self.get_cser()
+ self.add_first2(True)
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'add', '-M', '-D', 'description',
+ pwork=True)
+ self.assertIn("Added series 'first' v2 (2 commits)",
+ out.getvalue().strip())
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', 'add', '-M',
+ '-D', 'description', pwork=True)
+ cser.add('first', 'description', allow_unmarked=True)
+ self.assertIn("Added v1 to existing series 'first'",
+ out.getvalue().strip())
+
+ def test_series_add_skip_version(self):
+ """Test adding a series which is v4 but has no earlier version"""
+ cser = self.get_cser()
+ with terminal.capture() as (out, _):
+ cser.add('third4', 'The glorious third series', mark=False,
+ allow_unmarked=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Adding series 'third' v4: mark False allow_unmarked True",
+ lines[0])
+ self.assertEqual("Added series 'third' v4 (4 commits)", lines[1])
+ self.assertEqual(2, len(lines))
+
+ sdict = cser.db.series_get_dict()
+ self.assertIn('third', sdict)
+ chk = sdict['third']
+ self.assertEqual('third', chk['name'])
+ self.assertEqual('The glorious third series', chk['desc'])
+
+ svid = cser.get_series_svid(chk['idnum'], 4)
+ self.assertEqual(4, len(cser.get_pcommit_dict(svid)))
+
+ # Remove the series and add it again with just two commits
+ with terminal.capture():
+ cser.remove('third4')
+
+ with terminal.capture() as (out, _):
+ cser.add('third4', 'The glorious third series', mark=False,
+ allow_unmarked=True, end='third4~2')
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Adding series 'third' v4: mark False allow_unmarked True",
+ lines[0])
+ self.assertRegex(
+ lines[1],
+ 'Ending before .* main: Change to the main program')
+ self.assertEqual("Added series 'third' v4 (2 commits)", lines[2])
+
+ sdict = cser.db.series_get_dict()
+ self.assertIn('third', sdict)
+ chk = sdict['third']
+ self.assertEqual('third', chk['name'])
+ self.assertEqual('The glorious third series', chk['desc'])
+
+ svid = cser.get_series_svid(chk['idnum'], 4)
+ self.assertEqual(2, len(cser.get_pcommit_dict(svid)))
+
+ def test_series_add_wrong_version(self):
+ """Test adding a series with an incorrect branch name or version
+
+ This updates branch 'first' to have version 2, then tries to add it.
+ """
+ cser = self.get_cser()
+ self.assertFalse(cser.db.series_get_dict())
+
+ with terminal.capture():
+ _, ser, max_vers, _ = cser.prep_series('first')
+ cser.update_series('first', ser, max_vers, None, False,
+ add_vers=2)
+
+ with self.assertRaises(ValueError) as exc:
+ with terminal.capture():
+ cser.add('first', 'my description', allow_unmarked=True)
+ self.assertEqual(
+ "Series name 'first' suggests version 1 but Series-version tag "
+ 'indicates 2 (see --force-version)', str(exc.exception))
+
+ # Now try again with --force-version which should force version 1
+ with terminal.capture() as (out, _):
+ cser.add('first', 'my description', allow_unmarked=True,
+ force_version=True)
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ "Adding series 'first' v1: mark False allow_unmarked True",
+ next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 2 commits from branch 'first'", next(itr))
+ self.assertRegex(next(itr),
+ f'- {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(next(itr),
+ f'- rm v1: {HASH_RE} as {HASH_RE} spi: SPI fixes')
+ self.assertRegex(next(itr),
+ f'Updating branch first from {HASH_RE} to {HASH_RE}')
+ self.assertEqual("Added series 'first' v1 (2 commits)", next(itr))
+ try:
+ self.assertEqual('extra line', next(itr))
+ except StopIteration:
+ pass
+
+ # Since this is v1 the Series-version tag should have been removed
+ series = patchstream.get_metadata('first', 0, 2, git_dir=self.gitdir)
+ self.assertNotIn('version', series)
+
+ def _fake_patchwork_cser(self, subpath):
+ """Fake Patchwork server for the function below
+
+ This handles accessing various things used by the tests below. It has
+ hard-coded data, about from self.autolink_extra which can be adjusted
+ by the test.
+
+ Args:
+ subpath (str): URL subpath to use
+ """
+ # Get a list of projects
+ if subpath == 'projects/':
+ return [
+ {'id': self.PROJ_ID, 'name': 'U-Boot',
+ 'link_name': self.PROJ_LINK_NAME},
+ {'id': 9, 'name': 'other', 'link_name': 'other'}
+ ]
+
+ # Search for series by their cover-letter name
+ re_search = re.match(r'series/\?project=(\d+)&q=.*$', subpath)
+ if re_search:
+ result = [
+ {'id': 56, 'name': 'contains first name', 'version': 1},
+ {'id': 43, 'name': 'has first in it', 'version': 1},
+ {'id': 1234, 'name': 'first series', 'version': 1},
+ {'id': self.SERIES_ID_SECOND_V1, 'name': self.TITLE_SECOND,
+ 'version': 1},
+ {'id': self.SERIES_ID_SECOND_V2, 'name': self.TITLE_SECOND,
+ 'version': 2},
+ {'id': 12345, 'name': 'i2c: I2C things', 'version': 1},
+ ]
+ if self.autolink_extra:
+ result += [self.autolink_extra]
+ return result
+
+ # Read information about a series, given its link (patchwork series ID)
+ m_series = re.match(r'series/(\d+)/$', subpath)
+ series_id = int(m_series.group(1)) if m_series else ''
+ if series_id:
+ if series_id == self.SERIES_ID_SECOND_V1:
+ # series 'second'
+ return {
+ 'patches': [
+ {'id': '10',
+ 'name': '[PATCH,1/3] video: Some video improvements',
+ 'content': ''},
+ {'id': '11',
+ 'name': '[PATCH,2/3] serial: Add a serial driver',
+ 'content': ''},
+ {'id': '12', 'name': '[PATCH,3/3] bootm: Make it boot',
+ 'content': ''},
+ ],
+ 'cover_letter': {
+ 'id': 39,
+ 'name': 'The name of the cover letter',
+ }
+ }
+ if series_id == self.SERIES_ID_SECOND_V2:
+ # series 'second2'
+ return {
+ 'patches': [
+ {'id': '110',
+ 'name':
+ '[PATCH,v2,1/3] video: Some video improvements',
+ 'content': ''},
+ {'id': '111',
+ 'name': '[PATCH,v2,2/3] serial: Add a serial driver',
+ 'content': ''},
+ {'id': '112',
+ 'name': '[PATCH,v2,3/3] bootm: Make it boot',
+ 'content': ''},
+ ],
+ 'cover_letter': {
+ 'id': 139,
+ 'name': 'The name of the cover letter',
+ }
+ }
+ if series_id == self.SERIES_ID_FIRST_V3:
+ # series 'first3'
+ return {
+ 'patches': [
+ {'id': 20, 'name': '[PATCH,v3,1/2] i2c: I2C things',
+ 'content': ''},
+ {'id': 21, 'name': '[PATCH,v3,2/2] spi: SPI fixes',
+ 'content': ''},
+ ],
+ 'cover_letter': {
+ 'id': 29,
+ 'name': 'Cover letter for first',
+ }
+ }
+ if series_id == 123:
+ return {
+ 'patches': [
+ {'id': 20, 'name': '[PATCH,1/2] i2c: I2C things',
+ 'content': ''},
+ {'id': 21, 'name': '[PATCH,2/2] spi: SPI fixes',
+ 'content': ''},
+ ],
+ }
+ if series_id == 1234:
+ return {
+ 'patches': [
+ {'id': 20, 'name': '[PATCH,v2,1/2] i2c: I2C things',
+ 'content': ''},
+ {'id': 21, 'name': '[PATCH,v2,2/2] spi: SPI fixes',
+ 'content': ''},
+ ],
+ }
+ raise ValueError(f'Fake Patchwork unknown series_id: {series_id}')
+
+ # Read patch status
+ m_pat = re.search(r'patches/(\d*)/$', subpath)
+ patch_id = int(m_pat.group(1)) if m_pat else ''
+ if patch_id:
+ if patch_id in [10, 110]:
+ return {'state': 'accepted',
+ 'content':
+ 'Reviewed-by: Fred Bloggs <fred@bloggs.com>'}
+ if patch_id in [11, 111]:
+ return {'state': 'changes-requested', 'content': ''}
+ if patch_id in [12, 112]:
+ return {'state': 'rejected',
+ 'content': "I don't like this at all, sorry"}
+ if patch_id == 20:
+ return {'state': 'awaiting-upstream', 'content': ''}
+ if patch_id == 21:
+ return {'state': 'not-applicable', 'content': ''}
+ raise ValueError(f'Fake Patchwork unknown patch_id: {patch_id}')
+
+ # Read comments a from patch
+ m_comm = re.search(r'patches/(\d*)/comments/', subpath)
+ patch_id = int(m_comm.group(1)) if m_comm else ''
+ if patch_id:
+ if patch_id in [10, 110]:
+ return [
+ {'id': 1, 'content': ''},
+ {'id': 2,
+ 'content':
+ '''On some date Mary Smith <msmith@wibble.com> wrote:
+> This was my original patch
+> which is being quoted
+
+I like the approach here and I would love to see more of it.
+
+Reviewed-by: Fred Bloggs <fred@bloggs.com>
+''',
+ 'submitter': {
+ 'name': 'Fred Bloggs',
+ 'email': 'fred@bloggs.com',
+ }
+ },
+ ]
+ if patch_id in [11, 111]:
+ return []
+ if patch_id in [12, 112]:
+ return [
+ {'id': 4, 'content': ''},
+ {'id': 5, 'content': ''},
+ {'id': 6, 'content': ''},
+ ]
+ if patch_id == 20:
+ return [
+ {'id': 7, 'content':
+ '''On some date Alex Miller <alex@country.org> wrote:
+
+> Sometimes we need to create a patch.
+> This is one of those times
+
+Tested-by: Mary Smith <msmith@wibble.com> # yak
+'''},
+ {'id': 8, 'content': ''},
+ ]
+ if patch_id == 21:
+ return []
+ raise ValueError(
+ f'Fake Patchwork does not understand patch_id {patch_id}: '
+ f'{subpath}')
+
+ # Read comments from a cover letter
+ m_cover_id = re.search(r'covers/(\d*)/comments/', subpath)
+ cover_id = int(m_cover_id.group(1)) if m_cover_id else ''
+ if cover_id:
+ if cover_id in [39, 139]:
+ return [
+ {'content': 'some comment',
+ 'submitter': {
+ 'name': 'A user',
+ 'email': 'user@user.com',
+ },
+ 'date': 'Sun 13 Apr 14:06:02 MDT 2025',
+ },
+ {'content': 'another comment',
+ 'submitter': {
+ 'name': 'Ghenkis Khan',
+ 'email': 'gk@eurasia.gov',
+ },
+ 'date': 'Sun 13 Apr 13:06:02 MDT 2025',
+ },
+ ]
+ if cover_id == 29:
+ return []
+
+ raise ValueError(f'Fake Patchwork unknown cover_id: {cover_id}')
+
+ raise ValueError(f'Fake Patchwork does not understand: {subpath}')
+
+ def setup_second(self, do_sync=True):
+ """Set up the 'second' series synced with the fake patchwork
+
+ Args:
+ do_sync (bool): True to sync the series
+
+ Return: tuple:
+ Cseries: New Cseries object
+ pwork: Patchwork object
+ """
+ with self.stage('setup second'):
+ cser = self.get_cser()
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+ cser.add('second', allow_unmarked=True)
+
+ series = patchstream.get_metadata_for_list('second', self.gitdir,
+ 3)
+ self.assertEqual('456', series.links)
+
+ with terminal.capture() as (out, _):
+ cser.increment('second')
+
+ series = patchstream.get_metadata_for_list('second', self.gitdir,
+ 3)
+ self.assertEqual('456', series.links)
+
+ series = patchstream.get_metadata_for_list('second2', self.gitdir,
+ 3)
+ self.assertEqual('1:456', series.links)
+
+ if do_sync:
+ with terminal.capture() as (out, _):
+ cser.link_auto(pwork, 'second', 2, True)
+ with terminal.capture() as (out, _):
+ cser.gather(pwork, 'second', 2, False, True, False)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ "Updating series 'second' version 2 from link '457'",
+ lines[0])
+ self.assertEqual(
+ '3 patches and cover letter updated (8 requests)',
+ lines[1])
+ self.assertEqual(2, len(lines))
+
+ return cser, pwork
+
+ def test_series_add_no_cover(self):
+ """Test patchwork when adding a series which has no cover letter"""
+ cser = self.get_cser()
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+
+ with terminal.capture() as (out, _):
+ cser.add('first', 'my name for this', mark=False,
+ allow_unmarked=True)
+ self.assertIn("Added series 'first' v1 (2 commits)", out.getvalue())
+
+ with terminal.capture() as (out, _):
+ cser.link_auto(pwork, 'first', 1, True)
+ self.assertIn("Setting link for series 'first' v1 to 12345",
+ out.getvalue())
+
+ def test_series_list(self):
+ """Test listing cseries"""
+ self.setup_second()
+
+ self.db_close()
+ args = Namespace(subcmd='ls')
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(5, len(lines))
+ self.assertEqual(
+ 'Name Description '
+ 'Accepted Versions', lines[0])
+ self.assertTrue(lines[1].startswith('--'))
+ self.assertEqual(
+ 'first '
+ ' -/2 1', lines[2])
+ self.assertEqual(
+ 'second Series for my board '
+ ' 1/3 1 2', lines[3])
+ self.assertTrue(lines[4].startswith('--'))
+
+ def test_do_series_add(self):
+ """Add a new cseries"""
+ self.make_git_tree()
+ args = Namespace(subcmd='add', desc='my-description', series='first',
+ mark=False, allow_unmarked=True, upstream=None,
+ dry_run=False)
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+
+ cser = self.get_database()
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ ser = slist.get('first')
+ self.assertTrue(ser)
+ self.assertEqual('first', ser.name)
+ self.assertEqual('my-description', ser.desc)
+
+ self.db_close()
+ args.subcmd = 'ls'
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(4, len(lines))
+ self.assertTrue(lines[1].startswith('--'))
+ self.assertEqual(
+ 'first my-description '
+ '-/2 1', lines[2])
+
+ def test_do_series_add_cmdline(self):
+ """Add a new cseries using the cmdline"""
+ self.make_git_tree()
+ with terminal.capture():
+ self.run_args('series', '-s', 'first', 'add', '-M',
+ '-D', 'my-description', pwork=True)
+
+ cser = self.get_database()
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ ser = slist.get('first')
+ self.assertTrue(ser)
+ self.assertEqual('first', ser.name)
+ self.assertEqual('my-description', ser.desc)
+
+ def test_do_series_add_auto(self):
+ """Add a new cseries without any arguments"""
+ self.make_git_tree()
+
+ # Use the 'second' branch, which has a cover letter
+ gitutil.checkout('second', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+ args = Namespace(subcmd='add', series=None, mark=False,
+ allow_unmarked=True, upstream=None, dry_run=False,
+ desc=None)
+ with terminal.capture():
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+
+ cser = self.get_database()
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ ser = slist.get('second')
+ self.assertTrue(ser)
+ self.assertEqual('second', ser.name)
+ self.assertEqual('Series for my board', ser.desc)
+ cser.close_database()
+
+ def _check_inc(self, out):
+ """Check output from an 'increment' operation
+
+ Args:
+ out (StringIO): Text to check
+ """
+ itr = iter(out.getvalue().splitlines())
+
+ self.assertEqual("Increment 'first' v1: 2 patches", next(itr))
+ self.assertRegex(next(itr), 'Checking out upstream commit .*')
+ self.assertEqual("Processing 2 commits from branch 'first2'",
+ next(itr))
+ self.assertRegex(next(itr),
+ f'- {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(next(itr),
+ f'- add v2: {HASH_RE} as {HASH_RE} spi: SPI fixes')
+ self.assertRegex(
+ next(itr), f'Updating branch first2 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('Added new branch first2', next(itr))
+ return itr
+
+ def test_series_link(self):
+ """Test adding a patchwork link to a cseries"""
+ cser = self.get_cser()
+
+ repo = pygit2.init_repository(self.gitdir)
+ first = repo.lookup_branch('first').peel(
+ pygit2.enums.ObjectType.COMMIT).oid
+ base = repo.lookup_branch('base').peel(
+ pygit2.enums.ObjectType.COMMIT).oid
+
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ with self.assertRaises(ValueError) as exc:
+ cser.link_set('first', 2, '1234', True)
+ self.assertEqual("Series 'first' does not have a version 2",
+ str(exc.exception))
+
+ self.assertEqual('first', gitutil.get_branch(self.gitdir))
+ with terminal.capture() as (out, _):
+ cser.increment('first')
+ self.assertTrue(repo.lookup_branch('first2'))
+
+ with terminal.capture() as (out, _):
+ cser.link_set('first', 2, '2345', True)
+
+ lines = out.getvalue().splitlines()
+ self.assertEqual(6, len(lines))
+ self.assertRegex(
+ lines[0], 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual("Processing 2 commits from branch 'first2'",
+ lines[1])
+ self.assertRegex(
+ lines[2],
+ f'- {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(
+ lines[3],
+ f"- add v2 links '2:2345': {HASH_RE} as {HASH_RE} spi: SPI fixes")
+ self.assertRegex(
+ lines[4], f'Updating branch first2 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual("Setting link for series 'first' v2 to 2345",
+ lines[5])
+
+ self.assertEqual('2345', cser.link_get('first', 2))
+
+ series = patchstream.get_metadata_for_list('first2', self.gitdir, 2)
+ self.assertEqual('2:2345', series.links)
+
+ self.assertEqual('first2', gitutil.get_branch(self.gitdir))
+
+ # Check the original series was left alone
+ self.assertEqual(
+ first, repo.lookup_branch('first').peel(
+ pygit2.enums.ObjectType.COMMIT).oid)
+ count = 2
+ series1 = patchstream.get_metadata_for_list('first', self.gitdir,
+ count)
+ self.assertFalse('links' in series1)
+ self.assertFalse('version' in series1)
+
+ # Check that base is left alone
+ self.assertEqual(
+ base, repo.lookup_branch('base').peel(
+ pygit2.enums.ObjectType.COMMIT).oid)
+ series1 = patchstream.get_metadata_for_list('base', self.gitdir, count)
+ self.assertFalse('links' in series1)
+ self.assertFalse('version' in series1)
+
+ # Check out second and try to update first
+ gitutil.checkout('second', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+ with terminal.capture():
+ cser.link_set('first', 1, '16', True)
+
+ # Overwrite the link
+ with terminal.capture():
+ cser.link_set('first', 1, '17', True)
+
+ series2 = patchstream.get_metadata_for_list('first', self.gitdir,
+ count)
+ self.assertEqual('1:17', series2.links)
+
+ def test_series_link_cmdline(self):
+ """Test adding a patchwork link to a cseries using the cmdline"""
+ cser = self.get_cser()
+
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', '-V', '4', 'set-link', '-u',
+ '1234', expect_ret=1, pwork=True)
+ self.assertIn("Series 'first' does not have a version 4",
+ out.getvalue())
+
+ with self.assertRaises(ValueError) as exc:
+ cser.link_get('first', 4)
+ self.assertEqual("Series 'first' does not have a version 4",
+ str(exc.exception))
+
+ with terminal.capture() as (out, _):
+ cser.increment('first')
+
+ with self.assertRaises(ValueError) as exc:
+ cser.link_get('first', 4)
+ self.assertEqual("Series 'first' does not have a version 4",
+ str(exc.exception))
+
+ with terminal.capture() as (out, _):
+ cser.increment('first')
+ cser.increment('first')
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', '-V', '4', 'set-link', '-u',
+ '1234', pwork=True)
+ lines = out.getvalue().splitlines()
+ self.assertRegex(
+ lines[-3],
+ f"- add v4 links '4:1234': {HASH_RE} as {HASH_RE} spi: SPI fixes")
+ self.assertEqual("Setting link for series 'first' v4 to 1234",
+ lines[-1])
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', '-V', '4', 'get-link',
+ pwork=True)
+ self.assertIn('1234', out.getvalue())
+
+ series = patchstream.get_metadata_for_list('first4', self.gitdir, 1)
+ self.assertEqual('4:1234', series.links)
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', '-V', '5', 'get-link',
+ expect_ret=1, pwork=True)
+
+ self.assertIn("Series 'first' does not have a version 5",
+ out.getvalue())
+
+ # Checkout 'first' and try to get the link from 'first4'
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first4', 'get-link', pwork=True)
+ self.assertIn('1234', out.getvalue())
+
+ # This should get the link for 'first'
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'get-link', pwork=True)
+ self.assertIn('None', out.getvalue())
+
+ # Checkout 'first4' again; this should get the link for 'first4'
+ gitutil.checkout('first4', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'get-link', pwork=True)
+ self.assertIn('1234', out.getvalue())
+
+ def test_series_link_auto_version(self):
+ """Test finding the patchwork link for a cseries automatically"""
+ cser = self.get_cser()
+
+ with terminal.capture() as (out, _):
+ cser.add('second', allow_unmarked=True)
+
+ # Make sure that the link is there
+ count = 3
+ series = patchstream.get_metadata('second', 0, count,
+ git_dir=self.gitdir)
+ self.assertEqual(f'{self.SERIES_ID_SECOND_V1}', series.links)
+
+ # Set link with detected version
+ with terminal.capture() as (out, _):
+ cser.link_set('second', None, f'{self.SERIES_ID_SECOND_V1}', True)
+ self.assertEqual(
+ "Setting link for series 'second' v1 to 456",
+ out.getvalue().splitlines()[-1])
+
+ # Make sure that the link was set
+ series = patchstream.get_metadata('second', 0, count,
+ git_dir=self.gitdir)
+ self.assertEqual(f'1:{self.SERIES_ID_SECOND_V1}', series.links)
+
+ with terminal.capture():
+ cser.increment('second')
+
+ # Make sure that the new series gets the same link
+ series = patchstream.get_metadata('second2', 0, 3,
+ git_dir=self.gitdir)
+
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+ self.assertFalse(cser.project_get())
+ cser.project_set(pwork, 'U-Boot', quiet=True)
+
+ self.assertEqual(
+ (self.SERIES_ID_SECOND_V1, None, 'second', 1,
+ 'Series for my board'),
+ cser.link_search(pwork, 'second', 1))
+
+ with terminal.capture():
+ cser.increment('second')
+
+ self.assertEqual((457, None, 'second', 2, 'Series for my board'),
+ cser.link_search(pwork, 'second', 2))
+
+ def test_series_link_auto_name(self):
+ """Test finding the patchwork link for a cseries with auto name"""
+ cser = self.get_cser()
+
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ # Set link with detected name
+ with self.assertRaises(ValueError) as exc:
+ cser.link_set(None, 2, '2345', True)
+ self.assertEqual(
+ "Series 'first' does not have a version 2", str(exc.exception))
+
+ with terminal.capture():
+ cser.increment('first')
+
+ with terminal.capture() as (out, _):
+ cser.link_set(None, 2, '2345', True)
+ self.assertEqual(
+ "Setting link for series 'first' v2 to 2345",
+ out.getvalue().splitlines()[-1])
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(2, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(1, svlist[0].version)
+ self.assertIsNone(svlist[0].link)
+
+ self.assertEqual(2, svlist[1].idnum)
+ self.assertEqual(1, svlist[1].series_id)
+ self.assertEqual(2, svlist[1].version)
+ self.assertEqual('2345', svlist[1].link)
+
+ def test_series_link_auto_name_version(self):
+ """Find patchwork link for a cseries with auto name + version"""
+ cser = self.get_cser()
+
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ # Set link with detected name and version
+ with terminal.capture() as (out, _):
+ cser.link_set(None, None, '1234', True)
+ self.assertEqual(
+ "Setting link for series 'first' v1 to 1234",
+ out.getvalue().splitlines()[-1])
+
+ with terminal.capture():
+ cser.increment('first')
+
+ with terminal.capture() as (out, _):
+ cser.link_set(None, None, '2345', True)
+ self.assertEqual(
+ "Setting link for series 'first' v2 to 2345",
+ out.getvalue().splitlines()[-1])
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(2, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(1, svlist[0].version)
+ self.assertEqual('1234', svlist[0].link)
+
+ self.assertEqual(2, svlist[1].idnum)
+ self.assertEqual(1, svlist[1].series_id)
+ self.assertEqual(2, svlist[1].version)
+ self.assertEqual('2345', svlist[1].link)
+
+ def test_series_link_missing(self):
+ """Test finding patchwork link for a cseries but it is missing"""
+ cser = self.get_cser()
+
+ with terminal.capture():
+ cser.add('second', allow_unmarked=True)
+
+ with terminal.capture():
+ cser.increment('second')
+ cser.increment('second')
+
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+ self.assertFalse(cser.project_get())
+ cser.project_set(pwork, 'U-Boot', quiet=True)
+
+ self.assertEqual(
+ (self.SERIES_ID_SECOND_V1, None, 'second', 1,
+ 'Series for my board'),
+ cser.link_search(pwork, 'second', 1))
+ self.assertEqual((457, None, 'second', 2, 'Series for my board'),
+ cser.link_search(pwork, 'second', 2))
+ res = cser.link_search(pwork, 'second', 3)
+ self.assertEqual(
+ (None,
+ [{'id': self.SERIES_ID_SECOND_V1, 'name': 'Series for my board',
+ 'version': 1},
+ {'id': 457, 'name': 'Series for my board', 'version': 2}],
+ 'second', 3, 'Series for my board'),
+ res)
+
+ def check_series_autolink(self):
+ """Common code for autolink tests"""
+ cser = self.get_cser()
+
+ with self.stage('setup'):
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+ self.assertFalse(cser.project_get())
+ cser.project_set(pwork, 'U-Boot', quiet=True)
+
+ with terminal.capture():
+ cser.add('first', '', allow_unmarked=True)
+ cser.add('second', allow_unmarked=True)
+
+ with self.stage('autolink unset'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ self.assertEqual(
+ "Setting link for series 'second' v1 to "
+ f'{self.SERIES_ID_SECOND_V1}',
+ out.getvalue().splitlines()[-1])
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(2, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(1, svlist[0].version)
+ self.assertEqual(2, svlist[1].idnum)
+ self.assertEqual(2, svlist[1].series_id)
+ self.assertEqual(1, svlist[1].version)
+ self.assertEqual(str(self.SERIES_ID_SECOND_V1), svlist[1].link)
+ yield None
+
+ def test_series_autolink(self):
+ """Test linking a cseries to its patchwork series by description"""
+ cor = self.check_series_autolink()
+ cser, pwork = next(cor)
+
+ with self.assertRaises(ValueError) as exc:
+ cser.link_auto(pwork, 'first', None, True)
+ self.assertIn("Series 'first' has an empty description",
+ str(exc.exception))
+
+ # autolink unset
+ cser.link_auto(pwork, 'second', None, True)
+
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_series_autolink_cmdline(self):
+ """Test linking to patchwork series by description on cmdline"""
+ cor = self.check_series_autolink()
+ _, pwork = next(cor)
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', 'autolink', expect_ret=1,
+ pwork=pwork)
+ self.assertEqual(
+ "patman: ValueError: Series 'first' has an empty description",
+ out.getvalue().strip())
+
+ # autolink unset
+ self.run_args('series', '-s', 'second', 'autolink', '-u', pwork=pwork)
+
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def _autolink_setup(self):
+ """Set things up for autolink tests
+
+ Return: tuple:
+ Cseries object
+ Patchwork object
+ """
+ cser = self.get_cser()
+
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+ self.assertFalse(cser.project_get())
+ cser.project_set(pwork, 'U-Boot', quiet=True)
+
+ with terminal.capture():
+ cser.add('first', 'first series', allow_unmarked=True)
+ cser.add('second', allow_unmarked=True)
+ cser.increment('first')
+ return cser, pwork
+
+ def test_series_link_auto_all(self):
+ """Test linking all cseries to their patchwork series by description"""
+ cser, pwork = self._autolink_setup()
+ with terminal.capture() as (out, _):
+ summary = cser.link_auto_all(pwork, update_commit=True,
+ link_all_versions=True,
+ replace_existing=False, dry_run=True,
+ show_summary=False)
+ self.assertEqual(3, len(summary))
+ items = iter(summary.values())
+ linked = next(items)
+ self.assertEqual(
+ ('first', 1, None, 'first series', 'linked:1234'), linked)
+ self.assertEqual(
+ ('first', 2, None, 'first series', 'not found'), next(items))
+ self.assertEqual(
+ ('second', 1, f'{self.SERIES_ID_SECOND_V1}', 'Series for my board',
+ f'already:{self.SERIES_ID_SECOND_V1}'),
+ next(items))
+ self.assertEqual('Dry run completed', out.getvalue().splitlines()[-1])
+
+ # A second dry run should do exactly the same thing
+ with terminal.capture() as (out2, _):
+ summary2 = cser.link_auto_all(pwork, update_commit=True,
+ link_all_versions=True,
+ replace_existing=False, dry_run=True,
+ show_summary=False)
+ self.assertEqual(out.getvalue(), out2.getvalue())
+ self.assertEqual(summary, summary2)
+
+ # Now do it for real
+ with terminal.capture():
+ summary = cser.link_auto_all(pwork, update_commit=True,
+ link_all_versions=True,
+ replace_existing=False, dry_run=False,
+ show_summary=False)
+
+ # Check the link was updated
+ pdict = cser.get_ser_ver_dict()
+ svid = list(summary)[0]
+ self.assertEqual('1234', pdict[svid].link)
+
+ series = patchstream.get_metadata_for_list('first', self.gitdir, 2)
+ self.assertEqual('1:1234', series.links)
+
+ def test_series_autolink_latest(self):
+ """Test linking the lastest versions"""
+ cser, pwork = self._autolink_setup()
+ with terminal.capture():
+ summary = cser.link_auto_all(pwork, update_commit=True,
+ link_all_versions=False,
+ replace_existing=False, dry_run=False,
+ show_summary=False)
+ self.assertEqual(2, len(summary))
+ items = iter(summary.values())
+ self.assertEqual(
+ ('first', 2, None, 'first series', 'not found'), next(items))
+ self.assertEqual(
+ ('second', 1, f'{self.SERIES_ID_SECOND_V1}', 'Series for my board',
+ f'already:{self.SERIES_ID_SECOND_V1}'),
+ next(items))
+
+ def test_series_autolink_no_update(self):
+ """Test linking the lastest versions without updating commits"""
+ cser, pwork = self._autolink_setup()
+ with terminal.capture():
+ cser.link_auto_all(pwork, update_commit=False,
+ link_all_versions=True, replace_existing=False,
+ dry_run=False,
+ show_summary=False)
+
+ series = patchstream.get_metadata_for_list('first', self.gitdir, 2)
+ self.assertNotIn('links', series)
+
+ def test_series_autolink_replace(self):
+ """Test linking the lastest versions without updating commits"""
+ cser, pwork = self._autolink_setup()
+ with terminal.capture():
+ summary = cser.link_auto_all(pwork, update_commit=True,
+ link_all_versions=True,
+ replace_existing=True, dry_run=False,
+ show_summary=False)
+ self.assertEqual(3, len(summary))
+ items = iter(summary.values())
+ linked = next(items)
+ self.assertEqual(
+ ('first', 1, None, 'first series', 'linked:1234'), linked)
+ self.assertEqual(
+ ('first', 2, None, 'first series', 'not found'), next(items))
+ self.assertEqual(
+ ('second', 1, f'{self.SERIES_ID_SECOND_V1}', 'Series for my board',
+ f'linked:{self.SERIES_ID_SECOND_V1}'),
+ next(items))
+
+ def test_series_autolink_extra(self):
+ """Test command-line operation
+
+ This just uses mocks for now since we can rely on the direct tests for
+ the actual operation.
+ """
+ _, pwork = self._autolink_setup()
+ with (mock.patch.object(cseries.Cseries, 'link_auto_all',
+ return_value=None) as method):
+ self.run_args('series', 'autolink-all', pwork=True)
+ method.assert_called_once_with(True, update_commit=False,
+ link_all_versions=False,
+ replace_existing=False, dry_run=False,
+ show_summary=True)
+
+ with (mock.patch.object(cseries.Cseries, 'link_auto_all',
+ return_value=None) as method):
+ self.run_args('series', 'autolink-all', '-a', pwork=True)
+ method.assert_called_once_with(True, update_commit=False,
+ link_all_versions=True,
+ replace_existing=False, dry_run=False,
+ show_summary=True)
+
+ with (mock.patch.object(cseries.Cseries, 'link_auto_all',
+ return_value=None) as method):
+ self.run_args('series', 'autolink-all', '-a', '-r', pwork=True)
+ method.assert_called_once_with(True, update_commit=False,
+ link_all_versions=True,
+ replace_existing=True, dry_run=False,
+ show_summary=True)
+
+ with (mock.patch.object(cseries.Cseries, 'link_auto_all',
+ return_value=None) as method):
+ self.run_args('series', '-n', 'autolink-all', '-r', pwork=True)
+ method.assert_called_once_with(True, update_commit=False,
+ link_all_versions=False,
+ replace_existing=True, dry_run=True,
+ show_summary=True)
+
+ with (mock.patch.object(cseries.Cseries, 'link_auto_all',
+ return_value=None) as method):
+ self.run_args('series', 'autolink-all', '-u', pwork=True)
+ method.assert_called_once_with(True, update_commit=True,
+ link_all_versions=False,
+ replace_existing=False, dry_run=False,
+ show_summary=True)
+
+ # Now do a real one to check the patchwork handling and output
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'autolink-all', '-a', pwork=pwork)
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ '1 series linked, 1 already linked, 1 not found (3 requests)',
+ next(itr))
+ self.assertEqual('', next(itr))
+ self.assertEqual(
+ 'Name Version Description '
+ ' Result', next(itr))
+ self.assertTrue(next(itr).startswith('--'))
+ self.assertEqual(
+ 'first 1 first series '
+ ' linked:1234', next(itr))
+ self.assertEqual(
+ 'first 2 first series '
+ ' not found', next(itr))
+ self.assertEqual(
+ 'second 1 Series for my board '
+ f' already:{self.SERIES_ID_SECOND_V1}',
+ next(itr))
+ self.assertTrue(next(itr).startswith('--'))
+ self.assert_finished(itr)
+
+ def check_series_archive(self):
+ """Coroutine to run the archive test"""
+ cser = self.get_cser()
+ with self.stage('setup'):
+ with terminal.capture():
+ cser.add('first', '', allow_unmarked=True)
+
+ # Check the series is visible in the list
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+ self.assertEqual('first', slist['first'].name)
+
+ # Add a second branch
+ with terminal.capture():
+ cser.increment('first')
+
+ cser.fake_now = datetime(24, 9, 14)
+ repo = pygit2.init_repository(self.gitdir)
+ with self.stage('archive'):
+ expected_commit1 = repo.revparse_single('first')
+ expected_commit2 = repo.revparse_single('first2')
+ expected_tag1 = 'first-14sep24'
+ expected_tag2 = 'first2-14sep24'
+
+ # Archive it and make sure it is invisible
+ yield cser
+ slist = cser.db.series_get_dict()
+ self.assertFalse(slist)
+
+ # ...unless we include archived items
+ slist = cser.db.series_get_dict(include_archived=True)
+ self.assertEqual(1, len(slist))
+ first = slist['first']
+ self.assertEqual('first', first.name)
+
+ # Make sure the branches have been tagged
+ svlist = cser.db.ser_ver_get_for_series(first.idnum)
+ self.assertEqual(expected_tag1, svlist[0].archive_tag)
+ self.assertEqual(expected_tag2, svlist[1].archive_tag)
+
+ # Check that the tags were created and point to old branch commits
+ target1 = repo.revparse_single(expected_tag1)
+ self.assertEqual(expected_commit1, target1.get_object())
+ target2 = repo.revparse_single(expected_tag2)
+ self.assertEqual(expected_commit2, target2.get_object())
+
+ # The branches should be deleted
+ self.assertFalse('first' in repo.branches)
+ self.assertFalse('first2' in repo.branches)
+
+ with self.stage('unarchive'):
+ # or we unarchive it
+ yield cser
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+
+ # Make sure the branches have been restored
+ branch1 = repo.branches['first']
+ branch2 = repo.branches['first2']
+ self.assertEqual(expected_commit1.oid, branch1.target)
+ self.assertEqual(expected_commit2.oid, branch2.target)
+
+ # Make sure the tags were deleted
+ try:
+ target1 = repo.revparse_single(expected_tag1)
+ self.fail('target1 is still present')
+ except KeyError:
+ pass
+ try:
+ target1 = repo.revparse_single(expected_tag2)
+ self.fail('target2 is still present')
+ except KeyError:
+ pass
+
+ # Make sure the tag information has been removed
+ svlist = cser.db.ser_ver_get_for_series(first.idnum)
+ self.assertFalse(svlist[0].archive_tag)
+ self.assertFalse(svlist[1].archive_tag)
+
+ yield False
+
+ def test_series_archive(self):
+ """Test marking a series as archived"""
+ cor = self.check_series_archive()
+ cser = next(cor)
+
+ # Archive it and make sure it is invisible
+ cser.archive('first')
+ cser = next(cor)
+ cser.unarchive('first')
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_series_archive_cmdline(self):
+ """Test marking a series as archived with cmdline"""
+ cor = self.check_series_archive()
+ cser = next(cor)
+
+ # Archive it and make sure it is invisible
+ self.run_args('series', '-s', 'first', 'archive', pwork=True,
+ cser=cser)
+ next(cor)
+ self.run_args('series', '-s', 'first', 'unarchive', pwork=True,
+ cser=cser)
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def check_series_inc(self):
+ """Coroutine to run the increment test"""
+ cser = self.get_cser()
+
+ with self.stage('setup'):
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ with self.stage('increment'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self._check_inc(out)
+
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(2, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(1, svlist[0].version)
+
+ self.assertEqual(2, svlist[1].idnum)
+ self.assertEqual(1, svlist[1].series_id)
+ self.assertEqual(2, svlist[1].version)
+
+ series = patchstream.get_metadata_for_list('first2', self.gitdir,
+ 1)
+ self.assertEqual('2', series.version)
+
+ series = patchstream.get_metadata_for_list('first', self.gitdir, 1)
+ self.assertNotIn('version', series)
+
+ self.assertEqual('first2', gitutil.get_branch(self.gitdir))
+ yield None
+
+ def test_series_inc(self):
+ """Test incrementing the version"""
+ cor = self.check_series_inc()
+ cser = next(cor)
+
+ cser.increment('first')
+ self.assertFalse(next(cor))
+
+ cor.close()
+
+ def test_series_inc_cmdline(self):
+ """Test incrementing the version with cmdline"""
+ cor = self.check_series_inc()
+ next(cor)
+
+ self.run_args('series', '-s', 'first', 'inc', pwork=True)
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_series_inc_no_upstream(self):
+ """Increment a series which has no upstream branch"""
+ cser = self.get_cser()
+
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+ with terminal.capture():
+ cser.add('first', '', allow_unmarked=True)
+
+ repo = pygit2.init_repository(self.gitdir)
+ upstream = repo.lookup_branch('base')
+ upstream.delete()
+ with terminal.capture():
+ cser.increment('first')
+
+ slist = cser.db.series_get_dict()
+ self.assertEqual(1, len(slist))
+
+ def test_series_inc_dryrun(self):
+ """Test incrementing the version with cmdline"""
+ cser = self.get_cser()
+
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ with terminal.capture() as (out, _):
+ cser.increment('first', dry_run=True)
+ itr = self._check_inc(out)
+ self.assertEqual('Dry run completed', next(itr))
+
+ # Make sure that nothing was added
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(1, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(1, svlist[0].version)
+
+ # We should still be on the same branch
+ self.assertEqual('first', gitutil.get_branch(self.gitdir))
+
+ def test_series_dec(self):
+ """Test decrementing the version"""
+ cser = self.get_cser()
+
+ gitutil.checkout('first', self.gitdir, work_tree=self.tmpdir,
+ force=True)
+ with terminal.capture() as (out, _):
+ cser.add('first', '', allow_unmarked=True)
+
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(2, len(pclist))
+
+ # Try decrementing when there is only one version
+ with self.assertRaises(ValueError) as exc:
+ cser.decrement('first')
+ self.assertEqual("Series 'first' only has one version",
+ str(exc.exception))
+
+ # Add a version; now there should be two
+ with terminal.capture() as (out, _):
+ cser.increment('first')
+ svdict = cser.get_ser_ver_dict()
+ self.assertEqual(2, len(svdict))
+
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(4, len(pclist))
+
+ # Remove version two, using dry run (i.e. no effect)
+ with terminal.capture() as (out, _):
+ cser.decrement('first', dry_run=True)
+ svdict = cser.get_ser_ver_dict()
+ self.assertEqual(2, len(svdict))
+
+ repo = pygit2.init_repository(self.gitdir)
+ branch = repo.lookup_branch('first2')
+ self.assertTrue(branch)
+ branch_oid = branch.peel(pygit2.enums.ObjectType.COMMIT).oid
+
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(4, len(pclist))
+
+ # Now remove version two for real
+ with terminal.capture() as (out, _):
+ cser.decrement('first')
+ lines = out.getvalue().splitlines()
+ self.assertEqual(2, len(lines))
+ self.assertEqual("Removing series 'first' v2", lines[0])
+ self.assertEqual(
+ f"Deleted branch 'first2' {str(branch_oid)[:10]}", lines[1])
+
+ svdict = cser.get_ser_ver_dict()
+ self.assertEqual(1, len(svdict))
+
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(2, len(pclist))
+
+ branch = repo.lookup_branch('first2')
+ self.assertFalse(branch)
+
+ # Removing the only version should not be allowed
+ with self.assertRaises(ValueError) as exc:
+ cser.decrement('first', dry_run=True)
+ self.assertEqual("Series 'first' only has one version",
+ str(exc.exception))
+
+ def test_upstream_add(self):
+ """Test adding an upsream"""
+ cser = self.get_cser()
+
+ cser.upstream_add('us', 'https://one')
+ ulist = cser.get_upstream_dict()
+ self.assertEqual(1, len(ulist))
+ self.assertEqual(('https://one', None), ulist['us'])
+
+ cser.upstream_add('ci', 'git@two')
+ ulist = cser.get_upstream_dict()
+ self.assertEqual(2, len(ulist))
+ self.assertEqual(('https://one', None), ulist['us'])
+ self.assertEqual(('git@two', None), ulist['ci'])
+
+ # Try to add a duplicate
+ with self.assertRaises(ValueError) as exc:
+ cser.upstream_add('ci', 'git@three')
+ self.assertEqual("Upstream 'ci' already exists", str(exc.exception))
+
+ with terminal.capture() as (out, _):
+ cser.upstream_list()
+ lines = out.getvalue().splitlines()
+ self.assertEqual(2, len(lines))
+ self.assertEqual('us https://one', lines[0])
+ self.assertEqual('ci git@two', lines[1])
+
+ def test_upstream_add_cmdline(self):
+ """Test adding an upsream with cmdline"""
+ with terminal.capture():
+ self.run_args('upstream', 'add', 'us', 'https://one')
+
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'list')
+ lines = out.getvalue().splitlines()
+ self.assertEqual(1, len(lines))
+ self.assertEqual('us https://one', lines[0])
+
+ def test_upstream_default(self):
+ """Operation of the default upstream"""
+ cser = self.get_cser()
+
+ with self.assertRaises(ValueError) as exc:
+ cser.upstream_set_default('us')
+ self.assertEqual("No such upstream 'us'", str(exc.exception))
+
+ cser.upstream_add('us', 'https://one')
+ cser.upstream_add('ci', 'git@two')
+
+ self.assertIsNone(cser.upstream_get_default())
+
+ cser.upstream_set_default('us')
+ self.assertEqual('us', cser.upstream_get_default())
+
+ cser.upstream_set_default('us')
+
+ cser.upstream_set_default('ci')
+ self.assertEqual('ci', cser.upstream_get_default())
+
+ with terminal.capture() as (out, _):
+ cser.upstream_list()
+ lines = out.getvalue().splitlines()
+ self.assertEqual(2, len(lines))
+ self.assertEqual('us https://one', lines[0])
+ self.assertEqual('ci default git@two', lines[1])
+
+ cser.upstream_set_default(None)
+ self.assertIsNone(cser.upstream_get_default())
+
+ def test_upstream_default_cmdline(self):
+ """Operation of the default upstream on cmdline"""
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default', 'us', expect_ret=1)
+ self.assertEqual("patman: ValueError: No such upstream 'us'",
+ out.getvalue().strip().splitlines()[-1])
+
+ self.run_args('upstream', 'add', 'us', 'https://one')
+ self.run_args('upstream', 'add', 'ci', 'git@two')
+
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default')
+ self.assertEqual('unset', out.getvalue().strip())
+
+ self.run_args('upstream', 'default', 'us')
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default')
+ self.assertEqual('us', out.getvalue().strip())
+
+ self.run_args('upstream', 'default', 'ci')
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default')
+ self.assertEqual('ci', out.getvalue().strip())
+
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default', '--unset')
+ self.assertFalse(out.getvalue().strip())
+
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default')
+ self.assertEqual('unset', out.getvalue().strip())
+
+ def test_upstream_delete(self):
+ """Test operation of the default upstream"""
+ cser = self.get_cser()
+
+ with self.assertRaises(ValueError) as exc:
+ cser.upstream_delete('us')
+ self.assertEqual("No such upstream 'us'", str(exc.exception))
+
+ cser.upstream_add('us', 'https://one')
+ cser.upstream_add('ci', 'git@two')
+
+ cser.upstream_set_default('us')
+ cser.upstream_delete('us')
+ self.assertIsNone(cser.upstream_get_default())
+
+ cser.upstream_delete('ci')
+ ulist = cser.get_upstream_dict()
+ self.assertFalse(ulist)
+
+ def test_upstream_delete_cmdline(self):
+ """Test deleting an upstream"""
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'delete', 'us', expect_ret=1)
+ self.assertEqual("patman: ValueError: No such upstream 'us'",
+ out.getvalue().strip().splitlines()[-1])
+
+ self.run_args('us', 'add', 'us', 'https://one')
+ self.run_args('us', 'add', 'ci', 'git@two')
+
+ self.run_args('upstream', 'default', 'us')
+ self.run_args('upstream', 'delete', 'us')
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'default', 'us', expect_ret=1)
+ self.assertEqual("patman: ValueError: No such upstream 'us'",
+ out.getvalue().strip())
+
+ self.run_args('upstream', 'delete', 'ci')
+ with terminal.capture() as (out, _):
+ self.run_args('upstream', 'list')
+ self.assertFalse(out.getvalue().strip())
+
+ def test_series_add_mark(self):
+ """Test marking a cseries with Change-Id fields"""
+ cser = self.get_cser()
+
+ with terminal.capture():
+ cser.add('first', '', mark=True)
+
+ pcdict = cser.get_pcommit_dict()
+
+ series = patchstream.get_metadata('first', 0, 2, git_dir=self.gitdir)
+ self.assertEqual(2, len(series.commits))
+ self.assertIn(1, pcdict)
+ self.assertEqual(1, pcdict[1].idnum)
+ self.assertEqual('i2c: I2C things', pcdict[1].subject)
+ self.assertEqual(1, pcdict[1].svid)
+ self.assertEqual(series.commits[0].change_id, pcdict[1].change_id)
+
+ self.assertIn(2, pcdict)
+ self.assertEqual(2, pcdict[2].idnum)
+ self.assertEqual('spi: SPI fixes', pcdict[2].subject)
+ self.assertEqual(1, pcdict[2].svid)
+ self.assertEqual(series.commits[1].change_id, pcdict[2].change_id)
+
+ def test_series_add_mark_fail(self):
+ """Test marking a cseries when the tree is dirty"""
+ cser = self.get_cser()
+
+ tools.write_file(os.path.join(self.tmpdir, 'fname'), b'123')
+ with terminal.capture():
+ cser.add('first', '', mark=True)
+
+ tools.write_file(os.path.join(self.tmpdir, 'i2c.c'), b'123')
+ with self.assertRaises(ValueError) as exc:
+ with terminal.capture():
+ cser.add('first', '', mark=True)
+ self.assertEqual(
+ "Modified files exist: use 'git status' to check: [' M i2c.c']",
+ str(exc.exception))
+
+ def test_series_add_mark_dry_run(self):
+ """Test marking a cseries with Change-Id fields"""
+ cser = self.get_cser()
+
+ with terminal.capture() as (out, _):
+ cser.add('first', '', mark=True, dry_run=True)
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ "Adding series 'first' v1: mark True allow_unmarked False",
+ next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual("Processing 2 commits from branch 'first'",
+ next(itr))
+ self.assertRegex(
+ next(itr), f'- marked: {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(
+ next(itr), f'- marked: {HASH_RE} as {HASH_RE} spi: SPI fixes')
+ self.assertRegex(
+ next(itr), f'Updating branch first from {HASH_RE} to {HASH_RE}')
+ self.assertEqual("Added series 'first' v1 (2 commits)",
+ next(itr))
+ self.assertEqual('Dry run completed', next(itr))
+
+ # Doing another dry run should produce the same result
+ with terminal.capture() as (out2, _):
+ cser.add('first', '', mark=True, dry_run=True)
+ self.assertEqual(out.getvalue(), out2.getvalue())
+
+ tools.write_file(os.path.join(self.tmpdir, 'i2c.c'), b'123')
+ with terminal.capture() as (out, _):
+ with self.assertRaises(ValueError) as exc:
+ cser.add('first', '', mark=True, dry_run=True)
+ self.assertEqual(
+ "Modified files exist: use 'git status' to check: [' M i2c.c']",
+ str(exc.exception))
+
+ pcdict = cser.get_pcommit_dict()
+ self.assertFalse(pcdict)
+
+ def test_series_add_mark_cmdline(self):
+ """Test marking a cseries with Change-Id fields using the cmdline"""
+ cser = self.get_cser()
+
+ with terminal.capture():
+ self.run_args('series', '-s', 'first', 'add', '-m',
+ '-D', 'my-description', pwork=True)
+
+ pcdict = cser.get_pcommit_dict()
+ self.assertTrue(pcdict[1].change_id)
+ self.assertTrue(pcdict[2].change_id)
+
+ def test_series_add_unmarked_cmdline(self):
+ """Test adding an unmarked cseries using the command line"""
+ cser = self.get_cser()
+
+ with terminal.capture():
+ self.run_args('series', '-s', 'first', 'add', '-M',
+ '-D', 'my-description', pwork=True)
+
+ pcdict = cser.get_pcommit_dict()
+ self.assertFalse(pcdict[1].change_id)
+ self.assertFalse(pcdict[2].change_id)
+
+ def test_series_add_unmarked_bad_cmdline(self):
+ """Test failure to add an unmarked cseries using a bad command line"""
+ self.get_cser()
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', 'add',
+ '-D', 'my-description', expect_ret=1, pwork=True)
+ last_line = out.getvalue().splitlines()[-2]
+ self.assertEqual(
+ 'patman: ValueError: 2 commit(s) are unmarked; '
+ 'please use -m or -M', last_line)
+
+ def check_series_unmark(self):
+ """Checker for unmarking tests"""
+ cser = self.get_cser()
+ with self.stage('unmarked commits'):
+ yield cser
+
+ with self.stage('mark commits'):
+ with terminal.capture() as (out, _):
+ yield cser
+
+ with self.stage('unmark: dry run'):
+ with terminal.capture() as (out, _):
+ yield cser
+
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ "Unmarking series 'first': allow_unmarked False",
+ next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual("Processing 2 commits from branch 'first'",
+ next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- unmarked: {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(
+ next(itr),
+ f'- unmarked: {HASH_RE} as {HASH_RE} spi: SPI fixes')
+ self.assertRegex(
+ next(itr), f'Updating branch first from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('Dry run completed', next(itr))
+
+ with self.stage('unmark'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self.assertIn('- unmarked', out.getvalue())
+
+ with self.stage('unmark: allow unmarked'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self.assertIn('- no mark', out.getvalue())
+
+ yield None
+
+ def test_series_unmark(self):
+ """Test unmarking a cseries, i.e. removing Change-Id fields"""
+ cor = self.check_series_unmark()
+ cser = next(cor)
+
+ # check the allow_unmarked flag
+ with terminal.capture():
+ with self.assertRaises(ValueError) as exc:
+ cser.unmark('first', dry_run=True)
+ self.assertEqual('Unmarked commits 2/2', str(exc.exception))
+
+ # mark commits
+ cser = next(cor)
+ cser.add('first', '', mark=True)
+
+ # unmark: dry run
+ cser = next(cor)
+ cser.unmark('first', dry_run=True)
+
+ # unmark
+ cser = next(cor)
+ cser.unmark('first')
+
+ # unmark: allow unmarked
+ cser = next(cor)
+ cser.unmark('first', allow_unmarked=True)
+
+ self.assertFalse(next(cor))
+
+ def test_series_unmark_cmdline(self):
+ """Test the unmark command"""
+ cor = self.check_series_unmark()
+ next(cor)
+
+ # check the allow_unmarked flag
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'unmark', expect_ret=1, pwork=True)
+ self.assertIn('Unmarked commits 2/2', out.getvalue())
+
+ # mark commits
+ next(cor)
+ self.run_args('series', '-s', 'first', 'add', '-D', '', '--mark',
+ pwork=True)
+
+ # unmark: dry run
+ next(cor)
+ self.run_args('series', '-s', 'first', '-n', 'unmark', pwork=True)
+
+ # unmark
+ next(cor)
+ self.run_args('series', '-s', 'first', 'unmark', pwork=True)
+
+ # unmark: allow unmarked
+ next(cor)
+ self.run_args('series', '-s', 'first', 'unmark', '--allow-unmarked',
+ pwork=True)
+
+ self.assertFalse(next(cor))
+
+ def test_series_unmark_middle(self):
+ """Test unmarking with Change-Id fields not last in the commit"""
+ cser = self.get_cser()
+ with terminal.capture():
+ cser.add('first', '', allow_unmarked=True)
+
+ # Add some change IDs in the middle of the commit message
+ with terminal.capture():
+ name, ser, _, _ = cser.prep_series('first')
+ old_msgs = []
+ for vals in cser.process_series(name, ser):
+ old_msgs.append(vals.msg)
+ lines = vals.msg.splitlines()
+ change_id = cser.make_change_id(vals.commit)
+ extra = [f'{cser_helper.CHANGE_ID_TAG}: {change_id}']
+ vals.msg = '\n'.join(lines[:2] + extra + lines[2:]) + '\n'
+
+ with terminal.capture():
+ cser.unmark('first')
+
+ # We should get back the original commit message
+ series = patchstream.get_metadata('first', 0, 2, git_dir=self.gitdir)
+ self.assertEqual(old_msgs[0], series.commits[0].msg)
+ self.assertEqual(old_msgs[1], series.commits[1].msg)
+
+ def check_series_mark(self):
+ """Checker for marking tests"""
+ cser = self.get_cser()
+ yield cser
+
+ # Start with a dry run, which should do nothing
+ with self.stage('dry run'):
+ with terminal.capture():
+ yield cser
+
+ series = patchstream.get_metadata_for_list('first', self.gitdir, 2)
+ self.assertEqual(2, len(series.commits))
+ self.assertFalse(series.commits[0].change_id)
+ self.assertFalse(series.commits[1].change_id)
+
+ # Now do a real run
+ with self.stage('real run'):
+ with terminal.capture():
+ yield cser
+
+ series = patchstream.get_metadata_for_list('first', self.gitdir, 2)
+ self.assertEqual(2, len(series.commits))
+ self.assertTrue(series.commits[0].change_id)
+ self.assertTrue(series.commits[1].change_id)
+
+ # Try to mark again, which should fail
+ with self.stage('mark twice'):
+ with terminal.capture():
+ with self.assertRaises(ValueError) as exc:
+ cser.mark('first', dry_run=False)
+ self.assertEqual('Marked commits 2/2', str(exc.exception))
+
+ # Use the --marked flag to make it succeed
+ with self.stage('mark twice with --marked'):
+ with terminal.capture():
+ yield cser
+ self.assertEqual('Marked commits 2/2', str(exc.exception))
+
+ series2 = patchstream.get_metadata_for_list('first', self.gitdir,
+ 2)
+ self.assertEqual(2, len(series2.commits))
+ self.assertEqual(series.commits[0].change_id,
+ series2.commits[0].change_id)
+ self.assertEqual(series.commits[1].change_id,
+ series2.commits[1].change_id)
+
+ yield None
+
+ def test_series_mark(self):
+ """Test marking a cseries, i.e. adding Change-Id fields"""
+ cor = self.check_series_mark()
+ cser = next(cor)
+
+ # Start with a dry run, which should do nothing
+ cser = next(cor)
+ cser.mark('first', dry_run=True)
+
+ # Now do a real run
+ cser = next(cor)
+ cser.mark('first', dry_run=False)
+
+ # Try to mark again, which should fail
+ with terminal.capture():
+ with self.assertRaises(ValueError) as exc:
+ cser.mark('first', dry_run=False)
+ self.assertEqual('Marked commits 2/2', str(exc.exception))
+
+ # Use the --allow-marked flag to make it succeed
+ cser = next(cor)
+ cser.mark('first', allow_marked=True, dry_run=False)
+
+ self.assertFalse(next(cor))
+
+ def test_series_mark_cmdline(self):
+ """Test marking a cseries, i.e. adding Change-Id fields"""
+ cor = self.check_series_mark()
+ next(cor)
+
+ # Start with a dry run, which should do nothing
+ next(cor)
+ self.run_args('series', '-n', '-s', 'first', 'mark', pwork=True)
+
+ # Now do a real run
+ next(cor)
+ self.run_args('series', '-s', 'first', 'mark', pwork=True)
+
+ # Try to mark again, which should fail
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', 'mark', expect_ret=1,
+ pwork=True)
+ self.assertIn('Marked commits 2/2', out.getvalue())
+
+ # Use the --allow-marked flag to make it succeed
+ next(cor)
+ self.run_args('series', '-s', 'first', 'mark', '--allow-marked',
+ pwork=True)
+ self.assertFalse(next(cor))
+
+ def test_series_remove(self):
+ """Test removing a series"""
+ cser = self.get_cser()
+
+ with self.stage('remove non-existent series'):
+ with self.assertRaises(ValueError) as exc:
+ cser.remove('first')
+ self.assertEqual("No such series 'first'", str(exc.exception))
+
+ with self.stage('add'):
+ with terminal.capture() as (out, _):
+ cser.add('first', '', mark=True)
+ self.assertTrue(cser.db.series_get_dict())
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(2, len(pclist))
+
+ with self.stage('remove'):
+ with terminal.capture() as (out, _):
+ cser.remove('first')
+ self.assertEqual("Removed series 'first'", out.getvalue().strip())
+ self.assertFalse(cser.db.series_get_dict())
+
+ pclist = cser.get_pcommit_dict()
+ self.assertFalse(len(pclist))
+
+ def test_series_remove_cmdline(self):
+ """Test removing a series using the command line"""
+ cser = self.get_cser()
+
+ with self.stage('remove non-existent series'):
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'first', 'rm', expect_ret=1,
+ pwork=True)
+ self.assertEqual("patman: ValueError: No such series 'first'",
+ out.getvalue().strip())
+
+ with self.stage('add'):
+ with terminal.capture() as (out, _):
+ cser.add('first', '', mark=True)
+ self.assertTrue(cser.db.series_get_dict())
+
+ with self.stage('remove'):
+ with terminal.capture() as (out, _):
+ cser.remove('first')
+ self.assertEqual("Removed series 'first'", out.getvalue().strip())
+ self.assertFalse(cser.db.series_get_dict())
+
+ def check_series_remove_multiple(self):
+ """Check for removing a series with more than one version"""
+ cser = self.get_cser()
+
+ with self.stage('setup'):
+ self.add_first2(True)
+
+ with terminal.capture() as (out, _):
+ cser.add(None, '', mark=True)
+ cser.add('first', '', mark=True)
+ self.assertTrue(cser.db.series_get_dict())
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(4, len(pclist))
+
+ # Do a dry-run removal
+ with self.stage('dry run'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self.assertEqual("Removed version 1 from series 'first'\n"
+ 'Dry run completed', out.getvalue().strip())
+ self.assertEqual({'first'}, cser.db.series_get_dict().keys())
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(2, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(2, svlist[0].version)
+
+ self.assertEqual(2, svlist[1].idnum)
+ self.assertEqual(1, svlist[1].series_id)
+ self.assertEqual(1, svlist[1].version)
+
+ # Now remove for real
+ with self.stage('real'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self.assertEqual("Removed version 1 from series 'first'",
+ out.getvalue().strip())
+ self.assertEqual({'first'}, cser.db.series_get_dict().keys())
+ plist = cser.get_ser_ver_list()
+ self.assertEqual(1, len(plist))
+ pclist = cser.get_pcommit_dict()
+ self.assertEqual(2, len(pclist))
+
+ with self.stage('remove only version'):
+ yield cser
+ self.assertEqual({'first'}, cser.db.series_get_dict().keys())
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(1, len(svlist))
+ self.assertEqual(1, svlist[0].idnum)
+ self.assertEqual(1, svlist[0].series_id)
+ self.assertEqual(2, svlist[0].version)
+
+ with self.stage('remove series (dry run'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self.assertEqual("Removed series 'first'\nDry run completed",
+ out.getvalue().strip())
+ self.assertTrue(cser.db.series_get_dict())
+ self.assertTrue(cser.get_ser_ver_list())
+
+ with self.stage('remove series'):
+ with terminal.capture() as (out, _):
+ yield cser
+ self.assertEqual("Removed series 'first'", out.getvalue().strip())
+ self.assertFalse(cser.db.series_get_dict())
+ self.assertFalse(cser.get_ser_ver_list())
+
+ yield False
+
+ def test_series_remove_multiple(self):
+ """Test removing a series with more than one version"""
+ cor = self.check_series_remove_multiple()
+ cser = next(cor)
+
+ # Do a dry-run removal
+ cser.version_remove('first', 1, dry_run=True)
+ cser = next(cor)
+
+ # Now remove for real
+ cser.version_remove('first', 1)
+ cser = next(cor)
+
+ # Remove only version
+ with self.assertRaises(ValueError) as exc:
+ cser.version_remove('first', 2, dry_run=True)
+ self.assertEqual(
+ "Series 'first' only has one version: remove the series",
+ str(exc.exception))
+ cser = next(cor)
+
+ # Remove series (dry run)
+ cser.remove('first', dry_run=True)
+ cser = next(cor)
+
+ # Remove series (real)
+ cser.remove('first')
+
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_series_remove_multiple_cmdline(self):
+ """Test removing a series with more than one version on cmdline"""
+ cor = self.check_series_remove_multiple()
+ next(cor)
+
+ # Do a dry-run removal
+ self.run_args('series', '-n', '-s', 'first', '-V', '1', 'rm-version',
+ pwork=True)
+ next(cor)
+
+ # Now remove for real
+ self.run_args('series', '-s', 'first', '-V', '1', 'rm-version',
+ pwork=True)
+ next(cor)
+
+ # Remove only version
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-n', '-s', 'first', '-V', '2',
+ 'rm-version', expect_ret=1, pwork=True)
+ self.assertIn(
+ "Series 'first' only has one version: remove the series",
+ out.getvalue().strip())
+ next(cor)
+
+ # Remove series (dry run)
+ self.run_args('series', '-n', '-s', 'first', 'rm', pwork=True)
+ next(cor)
+
+ # Remove series (real)
+ self.run_args('series', '-s', 'first', 'rm', pwork=True)
+
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_patchwork_set_project(self):
+ """Test setting the project ID"""
+ cser = self.get_cser()
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ with terminal.capture() as (out, _):
+ cser.project_set(pwork, 'U-Boot')
+ self.assertEqual(
+ f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot",
+ out.getvalue().strip())
+
+ def test_patchwork_project_get(self):
+ """Test setting the project ID"""
+ cser = self.get_cser()
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ self.assertFalse(cser.project_get())
+ with terminal.capture() as (out, _):
+ cser.project_set(pwork, 'U-Boot')
+ self.assertEqual(
+ f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot",
+ out.getvalue().strip())
+
+ name, pwid, link_name = cser.project_get()
+ self.assertEqual('U-Boot', name)
+ self.assertEqual(self.PROJ_ID, pwid)
+ self.assertEqual('uboot', link_name)
+
+ def test_patchwork_project_get_cmdline(self):
+ """Test setting the project ID"""
+ cser = self.get_cser()
+
+ self.assertFalse(cser.project_get())
+
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ with terminal.capture() as (out, _):
+ self.run_args('-P', 'https://url', 'patchwork', 'set-project',
+ 'U-Boot', pwork=pwork)
+ self.assertEqual(
+ f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot",
+ out.getvalue().strip())
+
+ name, pwid, link_name = cser.project_get()
+ self.assertEqual('U-Boot', name)
+ self.assertEqual(6, pwid)
+ self.assertEqual('uboot', link_name)
+
+ with terminal.capture() as (out, _):
+ self.run_args('-P', 'https://url', 'patchwork', 'get-project')
+ self.assertEqual(
+ f"Project 'U-Boot' patchwork-ID {self.PROJ_ID} link-name uboot",
+ out.getvalue().strip())
+
+ def check_series_list_patches(self):
+ """Test listing the patches for a series"""
+ cser = self.get_cser()
+
+ with self.stage('setup'):
+ with terminal.capture() as (out, _):
+ cser.add(None, '', allow_unmarked=True)
+ cser.add('second', allow_unmarked=True)
+ target = self.repo.lookup_reference('refs/heads/second')
+ self.repo.checkout(
+ target, strategy=pygit2.enums.CheckoutStrategy.FORCE)
+ cser.increment('second')
+
+ with self.stage('list first'):
+ with terminal.capture() as (out, _):
+ yield cser
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual("Branch 'first' (total 2): 2:unknown", next(itr))
+ self.assertIn('PatchId', next(itr))
+ self.assertRegex(next(itr), r' 0 .* i2c: I2C things')
+ self.assertRegex(next(itr), r' 1 .* spi: SPI fixes')
+
+ with self.stage('list second2'):
+ with terminal.capture() as (out, _):
+ yield cser
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ "Branch 'second2' (total 3): 3:unknown", next(itr))
+ self.assertIn('PatchId', next(itr))
+ self.assertRegex(
+ next(itr), ' 0 .* video: Some video improvements')
+ self.assertRegex(next(itr), ' 1 .* serial: Add a serial driver')
+ self.assertRegex(next(itr), ' 2 .* bootm: Make it boot')
+
+ yield None
+
+ def test_series_list_patches(self):
+ """Test listing the patches for a series"""
+ cor = self.check_series_list_patches()
+ cser = next(cor)
+
+ # list first
+ cser.list_patches('first', 1)
+ cser = next(cor)
+
+ # list second2
+ cser.list_patches('second2', 2)
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_series_list_patches_cmdline(self):
+ """Test listing the patches for a series using the cmdline"""
+ cor = self.check_series_list_patches()
+ next(cor)
+
+ # list first
+ self.run_args('series', '-s', 'first', 'patches', pwork=True)
+ next(cor)
+
+ # list second2
+ self.run_args('series', '-s', 'second', '-V', '2', 'patches',
+ pwork=True)
+ self.assertFalse(next(cor))
+ cor.close()
+
+ def test_series_list_patches_detail(self):
+ """Test listing the patches for a series"""
+ cser = self.get_cser()
+ with terminal.capture():
+ cser.add(None, '', allow_unmarked=True)
+ cser.add('second', allow_unmarked=True)
+ target = self.repo.lookup_reference('refs/heads/second')
+ self.repo.checkout(
+ target, strategy=pygit2.enums.CheckoutStrategy.FORCE)
+ cser.increment('second')
+
+ with terminal.capture() as (out, _):
+ cser.list_patches('first', 1, show_commit=True)
+ expect = r'''Branch 'first' (total 2): 2:unknown
+Seq State Com PatchId Commit Subject
+ 0 unknown - .* i2c: I2C things
+
+commit .*
+Author: Test user <test@email.com>
+Date: .*
+
+ i2c: I2C things
+
+ This has some stuff to do with I2C
+
+ i2c.c | 2 ++
+ 1 file changed, 2 insertions(+)
+
+
+ 1 unknown - .* spi: SPI fixes
+
+commit .*
+Author: Test user <test@email.com>
+Date: .*
+
+ spi: SPI fixes
+
+ SPI needs some fixes
+ and here they are
+
+ Signed-off-by: Lord Edmund Blackaddër <weasel@blackadder.org>
+
+ Series-to: u-boot
+ Commit-notes:
+ title of the series
+ This is the cover letter for the series
+ with various details
+ END
+
+ spi.c | 3 +++
+ 1 file changed, 3 insertions(+)
+'''
+ itr = iter(out.getvalue().splitlines())
+ for seq, eline in enumerate(expect.splitlines()):
+ line = next(itr).rstrip()
+ if '*' in eline:
+ self.assertRegex(line, eline, f'line {seq + 1}')
+ else:
+ self.assertEqual(eline, line, f'line {seq + 1}')
+
+ # Show just the patch; this should exclude the commit message
+ with terminal.capture() as (out, _):
+ cser.list_patches('first', 1, show_patch=True)
+ chk = out.getvalue()
+ self.assertIn('SPI fixes', chk) # subject
+ self.assertNotIn('SPI needs some fixes', chk) # commit body
+ self.assertIn('make SPI work', chk) # patch body
+
+ # Show both
+ with terminal.capture() as (out, _):
+ cser.list_patches('first', 1, show_commit=True, show_patch=True)
+ chk = out.getvalue()
+ self.assertIn('SPI fixes', chk) # subject
+ self.assertIn('SPI needs some fixes', chk) # commit body
+ self.assertIn('make SPI work', chk) # patch body
+
+ def check_series_gather(self):
+ """Checker for gathering tags for a series"""
+ cser = self.get_cser()
+ with self.stage('setup'):
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ self.assertFalse(cser.project_get())
+ cser.project_set(pwork, 'U-Boot', quiet=True)
+
+ with terminal.capture() as (out, _):
+ cser.add('second', 'description', allow_unmarked=True)
+
+ ser = cser.get_series_by_name('second')
+ pwid = cser.get_series_svid(ser.idnum, 1)
+
+ # First do a dry run
+ with self.stage('gather: dry run'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ f"Updating series 'second' version 1 from link "
+ f"'{self.SERIES_ID_SECOND_V1}'",
+ lines[0])
+ self.assertEqual('3 patches updated (7 requests)', lines[1])
+ self.assertEqual('Dry run completed', lines[2])
+ self.assertEqual(3, len(lines))
+
+ pwc = cser.get_pcommit_dict(pwid)
+ self.assertIsNone(pwc[0].state)
+ self.assertIsNone(pwc[1].state)
+ self.assertIsNone(pwc[2].state)
+
+ # Now try it again, gathering tags
+ with self.stage('gather: dry run'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ lines = out.getvalue().splitlines()
+ itr = iter(lines)
+ self.assertEqual(
+ f"Updating series 'second' version 1 from link "
+ f"'{self.SERIES_ID_SECOND_V1}'",
+ next(itr))
+ self.assertEqual(' 1 video: Some video improvements', next(itr))
+ self.assertEqual(' + Reviewed-by: Fred Bloggs <fred@bloggs.com>',
+ next(itr))
+ self.assertEqual(' 2 serial: Add a serial driver', next(itr))
+ self.assertEqual(' 3 bootm: Make it boot', next(itr))
+
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual("Processing 3 commits from branch 'second'",
+ next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} '
+ 'video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '1:456': {HASH_RE} as {HASH_RE} "
+ 'serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ f'- {HASH_RE} as {HASH_RE} '
+ 'bootm: Make it boot')
+ self.assertRegex(
+ next(itr),
+ f'Updating branch second from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('3 patches updated (7 requests)', next(itr))
+ self.assertEqual('Dry run completed', next(itr))
+ self.assert_finished(itr)
+
+ # Make sure that no tags were added to the branch
+ series = patchstream.get_metadata_for_list('second', self.gitdir,
+ 3)
+ for cmt in series.commits:
+ self.assertFalse(cmt.rtags,
+ 'Commit {cmt.subject} rtags {cmt.rtags}')
+
+ # Now do it for real
+ with self.stage('gather: real'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ lines2 = out.getvalue().splitlines()
+ self.assertEqual(lines2, lines[:-1])
+
+ # Make sure that the tags were added to the branch
+ series = patchstream.get_metadata_for_list('second', self.gitdir,
+ 3)
+ self.assertEqual(
+ {'Reviewed-by': {'Fred Bloggs <fred@bloggs.com>'}},
+ series.commits[0].rtags)
+ self.assertFalse(series.commits[1].rtags)
+ self.assertFalse(series.commits[2].rtags)
+
+ # Make sure the status was updated
+ pwc = cser.get_pcommit_dict(pwid)
+ self.assertEqual('accepted', pwc[0].state)
+ self.assertEqual('changes-requested', pwc[1].state)
+ self.assertEqual('rejected', pwc[2].state)
+
+ yield None
+
+ def test_series_gather(self):
+ """Test gathering tags for a series"""
+ cor = self.check_series_gather()
+ cser, pwork = next(cor)
+
+ # sync (dry_run)
+ cser.gather(pwork, 'second', None, False, False, False, dry_run=True)
+ cser, pwork = next(cor)
+
+ # gather (dry_run)
+ cser.gather(pwork, 'second', None, False, False, True, dry_run=True)
+ cser, pwork = next(cor)
+
+ # gather (real)
+ cser.gather(pwork, 'second', None, False, False, True)
+
+ self.assertFalse(next(cor))
+
+ def test_series_gather_cmdline(self):
+ """Test gathering tags for a series with cmdline"""
+ cor = self.check_series_gather()
+ _, pwork = next(cor)
+
+ # sync (dry_run)
+ self.run_args(
+ 'series', '-n', '-s', 'second', 'gather', '-G', pwork=pwork)
+
+ # gather (dry_run)
+ _, pwork = next(cor)
+ self.run_args('series', '-n', '-s', 'second', 'gather', pwork=pwork)
+
+ # gather (real)
+ _, pwork = next(cor)
+ self.run_args('series', '-s', 'second', 'gather', pwork=pwork)
+
+ self.assertFalse(next(cor))
+
+ def check_series_gather_all(self):
+ """Gather all series at once"""
+ with self.stage('setup'):
+ cser, pwork = self.setup_second(False)
+
+ with terminal.capture():
+ cser.add('first', 'description', allow_unmarked=True)
+ cser.increment('first')
+ cser.increment('first')
+ cser.link_set('first', 1, '123', True)
+ cser.link_set('first', 2, '1234', True)
+ cser.link_set('first', 3, f'{self.SERIES_ID_FIRST_V3}', True)
+ cser.link_auto(pwork, 'second', 2, True)
+
+ with self.stage('no options'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ self.assertEqual(
+ "Syncing 'first' v3\n"
+ "Syncing 'second' v2\n"
+ '\n'
+ '5 patches and 2 cover letters updated, 0 missing links '
+ '(14 requests)\n'
+ 'Dry run completed',
+ out.getvalue().strip())
+
+ with self.stage('gather'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ lines = out.getvalue().splitlines()
+ itr = iter(lines)
+ self.assertEqual("Syncing 'first' v3", next(itr))
+ self.assertEqual(' 1 i2c: I2C things', next(itr))
+ self.assertEqual(
+ ' + Tested-by: Mary Smith <msmith@wibble.com> # yak',
+ next(itr))
+ self.assertEqual(' 2 spi: SPI fixes', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 2 commits from branch 'first3'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '3:31': {HASH_RE} as {HASH_RE} spi: SPI fixes")
+ self.assertRegex(
+ next(itr),
+ f'Updating branch first3 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+
+ self.assertEqual("Syncing 'second' v2", next(itr))
+ self.assertEqual(' 1 video: Some video improvements', next(itr))
+ self.assertEqual(
+ ' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', next(itr))
+ self.assertEqual(' 2 serial: Add a serial driver', next(itr))
+ self.assertEqual(' 3 bootm: Make it boot', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 3 commits from branch 'second2'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} '
+ 'video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '2:457 1:456': {HASH_RE} as {HASH_RE} "
+ 'serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ f'- {HASH_RE} as {HASH_RE} '
+ 'bootm: Make it boot')
+ self.assertRegex(
+ next(itr),
+ f'Updating branch second2 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+ self.assertEqual(
+ '5 patches and 2 cover letters updated, 0 missing links '
+ '(14 requests)',
+ next(itr))
+ self.assertEqual('Dry run completed', next(itr))
+ self.assert_finished(itr)
+
+ with self.stage('gather, patch comments,!dry_run'):
+ with terminal.capture() as (out, _):
+ yield cser, pwork
+ lines = out.getvalue().splitlines()
+ itr = iter(lines)
+ self.assertEqual("Syncing 'first' v1", next(itr))
+ self.assertEqual(' 1 i2c: I2C things', next(itr))
+ self.assertEqual(
+ ' + Tested-by: Mary Smith <msmith@wibble.com> # yak',
+ next(itr))
+ self.assertEqual(' 2 spi: SPI fixes', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 2 commits from branch 'first'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '1:123': {HASH_RE} as {HASH_RE} spi: SPI fixes")
+ self.assertRegex(
+ next(itr),
+ f'Updating branch first from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+
+ self.assertEqual("Syncing 'first' v2", next(itr))
+ self.assertEqual(' 1 i2c: I2C things', next(itr))
+ self.assertEqual(
+ ' + Tested-by: Mary Smith <msmith@wibble.com> # yak',
+ next(itr))
+ self.assertEqual(' 2 spi: SPI fixes', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 2 commits from branch 'first2'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} '
+ 'i2c: I2C things')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '2:1234': {HASH_RE} as {HASH_RE} spi: SPI fixes")
+ self.assertRegex(
+ next(itr),
+ f'Updating branch first2 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+ self.assertEqual("Syncing 'first' v3", next(itr))
+ self.assertEqual(' 1 i2c: I2C things', next(itr))
+ self.assertEqual(
+ ' + Tested-by: Mary Smith <msmith@wibble.com> # yak',
+ next(itr))
+ self.assertEqual(' 2 spi: SPI fixes', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 2 commits from branch 'first3'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} i2c: I2C things')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '3:31': {HASH_RE} as {HASH_RE} spi: SPI fixes")
+ self.assertRegex(
+ next(itr),
+ f'Updating branch first3 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+
+ self.assertEqual("Syncing 'second' v1", next(itr))
+ self.assertEqual(' 1 video: Some video improvements', next(itr))
+ self.assertEqual(
+ ' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', next(itr))
+ self.assertEqual(
+ 'Review: Fred Bloggs <fred@bloggs.com>', next(itr))
+ self.assertEqual(' > This was my original patch', next(itr))
+ self.assertEqual(' > which is being quoted', next(itr))
+ self.assertEqual(
+ ' I like the approach here and I would love to see more '
+ 'of it.', next(itr))
+ self.assertEqual('', next(itr))
+ self.assertEqual(' 2 serial: Add a serial driver', next(itr))
+ self.assertEqual(' 3 bootm: Make it boot', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 3 commits from branch 'second'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} '
+ 'video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '1:456': {HASH_RE} as {HASH_RE} "
+ 'serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ f'- {HASH_RE} as {HASH_RE} '
+ 'bootm: Make it boot')
+ self.assertRegex(
+ next(itr),
+ f'Updating branch second from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+
+ self.assertEqual("Syncing 'second' v2", next(itr))
+ self.assertEqual(' 1 video: Some video improvements', next(itr))
+ self.assertEqual(
+ ' + Reviewed-by: Fred Bloggs <fred@bloggs.com>', next(itr))
+ self.assertEqual(
+ 'Review: Fred Bloggs <fred@bloggs.com>', next(itr))
+ self.assertEqual(' > This was my original patch', next(itr))
+ self.assertEqual(' > which is being quoted', next(itr))
+ self.assertEqual(
+ ' I like the approach here and I would love to see more '
+ 'of it.', next(itr))
+ self.assertEqual('', next(itr))
+ self.assertEqual(' 2 serial: Add a serial driver', next(itr))
+ self.assertEqual(' 3 bootm: Make it boot', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 3 commits from branch 'second2'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- added 1 tag: {HASH_RE} as {HASH_RE} '
+ 'video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ f"- upd links '2:457 1:456': {HASH_RE} as {HASH_RE} "
+ 'serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ f'- {HASH_RE} as {HASH_RE} '
+ 'bootm: Make it boot')
+ self.assertRegex(
+ next(itr),
+ f'Updating branch second2 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual('', next(itr))
+ self.assertEqual(
+ '12 patches and 3 cover letters updated, 0 missing links '
+ '(32 requests)', next(itr))
+ self.assert_finished(itr)
+
+ yield None
+
+ def test_series_gather_all(self):
+ """Gather all series at once"""
+ cor = self.check_series_gather_all()
+ cser, pwork = next(cor)
+
+ # no options
+ cser.gather_all(pwork, False, True, False, False, dry_run=True)
+ cser, pwork = next(cor)
+
+ # gather
+ cser.gather_all(pwork, False, False, False, True, dry_run=True)
+ cser, pwork = next(cor)
+
+ # gather, patch comments, !dry_run
+ cser.gather_all(pwork, True, False, True, True)
+
+ self.assertFalse(next(cor))
+
+ def test_series_gather_all_cmdline(self):
+ """Sync all series at once using cmdline"""
+ cor = self.check_series_gather_all()
+ _, pwork = next(cor)
+
+ # no options
+ self.run_args('series', '-n', '-s', 'second', 'gather-all', '-G',
+ pwork=pwork)
+ _, pwork = next(cor)
+
+ # gather
+ self.run_args('series', '-n', '-s', 'second', 'gather-all',
+ pwork=pwork)
+ _, pwork = next(cor)
+
+ # gather, patch comments, !dry_run
+ self.run_args('series', '-s', 'second', 'gather-all', '-a', '-c',
+ pwork=pwork)
+
+ self.assertFalse(next(cor))
+
+ def _check_second(self, itr, show_all):
+ """Check output from a 'progress' command
+
+ Args:
+ itr (Iterator): Contains the output lines to check
+ show_all (bool): True if all versions are being shown, not just
+ latest
+ """
+ self.assertEqual('second: Series for my board (versions: 1 2)',
+ next(itr))
+ if show_all:
+ self.assertEqual("Branch 'second' (total 3): 3:unknown",
+ next(itr))
+ self.assertIn('PatchId', next(itr))
+ self.assertRegex(
+ next(itr),
+ ' 0 unknown - .* video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ ' 1 unknown - .* serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ ' 2 unknown - .* bootm: Make it boot')
+ self.assertEqual('', next(itr))
+ self.assertEqual(
+ "Branch 'second2' (total 3): 1:accepted 1:changes 1:rejected",
+ next(itr))
+ self.assertIn('PatchId', next(itr))
+ self.assertEqual(
+ 'Cov 2 139 '
+ 'The name of the cover letter', next(itr))
+ self.assertRegex(
+ next(itr),
+ ' 0 accepted 2 110 .* video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ ' 1 changes 111 .* serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ ' 2 rejected 3 112 .* bootm: Make it boot')
+
+ def test_series_progress(self):
+ """Test showing progress for a cseries"""
+ self.setup_second()
+ self.db_close()
+
+ with self.stage('latest versions'):
+ args = Namespace(subcmd='progress', series='second',
+ show_all_versions=False, list_patches=True)
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = iter(out.getvalue().splitlines())
+ self._check_second(lines, False)
+
+ with self.stage('all versions'):
+ args.show_all_versions = True
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = iter(out.getvalue().splitlines())
+ self._check_second(lines, True)
+
+ def _check_first(self, itr):
+ """Check output from the progress command
+
+ Args:
+ itr (Iterator): Contains the output lines to check
+ """
+ self.assertEqual('first: (versions: 1)', next(itr))
+ self.assertEqual("Branch 'first' (total 2): 2:unknown", next(itr))
+ self.assertIn('PatchId', next(itr))
+ self.assertRegex(
+ next(itr),
+ ' 0 unknown - .* i2c: I2C things')
+ self.assertRegex(
+ next(itr),
+ ' 1 unknown - .* spi: SPI fixes')
+ self.assertEqual('', next(itr))
+
+ def test_series_progress_all(self):
+ """Test showing progress for all cseries"""
+ self.setup_second()
+ self.db_close()
+
+ with self.stage('progress with patches'):
+ args = Namespace(subcmd='progress', series=None,
+ show_all_versions=False, list_patches=True)
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = iter(out.getvalue().splitlines())
+ self._check_first(lines)
+ self._check_second(lines, False)
+
+ with self.stage('all versions'):
+ args.show_all_versions = True
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = iter(out.getvalue().splitlines())
+ self._check_first(lines)
+ self._check_second(lines, True)
+
+ def test_series_progress_no_patches(self):
+ """Test showing progress for all cseries without patches"""
+ self.setup_second()
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'progress', pwork=True)
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ 'Name Description '
+ 'Count Status', next(itr))
+ self.assertTrue(next(itr).startswith('--'))
+ self.assertEqual(
+ 'first '
+ ' 2 2:unknown', next(itr))
+ self.assertEqual(
+ 'second2 The name of the cover letter '
+ ' 3 1:accepted 1:changes 1:rejected', next(itr))
+ self.assertTrue(next(itr).startswith('--'))
+ self.assertEqual(
+ ['2', 'series', '5', '2:unknown', '1:accepted', '1:changes',
+ '1:rejected'],
+ next(itr).split())
+ self.assert_finished(itr)
+
+ def test_series_progress_all_no_patches(self):
+ """Test showing progress for all cseries versions without patches"""
+ self.setup_second()
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'progress', '--show-all-versions',
+ pwork=True)
+ itr = iter(out.getvalue().splitlines())
+ self.assertEqual(
+ 'Name Description '
+ 'Count Status', next(itr))
+ self.assertTrue(next(itr).startswith('--'))
+ self.assertEqual(
+ 'first '
+ ' 2 2:unknown', next(itr))
+ self.assertEqual(
+ 'second Series for my board '
+ ' 3 3:unknown', next(itr))
+ self.assertEqual(
+ 'second2 The name of the cover letter '
+ ' 3 1:accepted 1:changes 1:rejected', next(itr))
+ self.assertTrue(next(itr).startswith('--'))
+ self.assertEqual(
+ ['3', 'series', '8', '5:unknown', '1:accepted', '1:changes',
+ '1:rejected'],
+ next(itr).split())
+ self.assert_finished(itr)
+
+ def test_series_summary(self):
+ """Test showing a summary of series status"""
+ self.setup_second()
+
+ self.db_close()
+ args = Namespace(subcmd='summary', series=None)
+ with terminal.capture() as (out, _):
+ control.do_series(args, test_db=self.tmpdir, pwork=True)
+ lines = out.getvalue().splitlines()
+ self.assertEqual(
+ 'Name Status Description',
+ lines[0])
+ self.assertEqual(
+ '----------------- ------ ------------------------------',
+ lines[1])
+ self.assertEqual('first -/2 ', lines[2])
+ self.assertEqual('second 1/3 Series for my board', lines[3])
+
+ def test_series_open(self):
+ """Test opening a series in a web browser"""
+ cser = self.get_cser()
+ pwork = Patchwork.for_testing(self._fake_patchwork_cser)
+ self.assertFalse(cser.project_get())
+ pwork.project_set(self.PROJ_ID, self.PROJ_LINK_NAME)
+
+ with terminal.capture():
+ cser.add('second', allow_unmarked=True)
+ cser.increment('second')
+ cser.link_auto(pwork, 'second', 2, True)
+ cser.gather(pwork, 'second', 2, False, False, False)
+
+ with mock.patch.object(cros_subprocess.Popen, '__init__',
+ return_value=None) as method:
+ with terminal.capture() as (out, _):
+ cser.open(pwork, 'second2', 2)
+
+ url = ('https://patchwork.ozlabs.org/project/uboot/list/?series=457'
+ '&state=*&archive=both')
+ method.assert_called_once_with(['xdg-open', url])
+ self.assertEqual(f'Opening {url}', out.getvalue().strip())
+
+ def test_name_version(self):
+ """Test handling of series names and versions"""
+ cser = self.get_cser()
+ repo = self.repo
+
+ self.assertEqual(('fred', None),
+ cser_helper.split_name_version('fred'))
+ self.assertEqual(('mary', 2), cser_helper.split_name_version('mary2'))
+
+ ser, version = cser._parse_series_and_version(None, None)
+ self.assertEqual('first', ser.name)
+ self.assertEqual(1, version)
+
+ ser, version = cser._parse_series_and_version('first', None)
+ self.assertEqual('first', ser.name)
+ self.assertEqual(1, version)
+
+ ser, version = cser._parse_series_and_version('first', 2)
+ self.assertEqual('first', ser.name)
+ self.assertEqual(2, version)
+
+ with self.assertRaises(ValueError) as exc:
+ cser._parse_series_and_version('123', 2)
+ self.assertEqual(
+ "Series name '123' cannot be a number, use '<name><version>'",
+ str(exc.exception))
+
+ with self.assertRaises(ValueError) as exc:
+ cser._parse_series_and_version('first', 100)
+ self.assertEqual("Version 100 exceeds 99", str(exc.exception))
+
+ with terminal.capture() as (_, err):
+ cser._parse_series_and_version('mary3', 4)
+ self.assertIn('Version mismatch: -V has 4 but branch name indicates 3',
+ err.getvalue())
+
+ ser, version = cser._parse_series_and_version('mary', 4)
+ self.assertEqual('mary', ser.name)
+ self.assertEqual(4, version)
+
+ # Move off the branch and check for a sensible error
+ commit = repo.revparse_single('first~')
+ repo.checkout_tree(commit)
+ repo.set_head(commit.oid)
+
+ with self.assertRaises(ValueError) as exc:
+ cser._parse_series_and_version(None, None)
+ self.assertEqual('No branch detected: please use -s <series>',
+ str(exc.exception))
+
+ def test_name_version_extra(self):
+ """More tests for some corner cases"""
+ cser, _ = self.setup_second()
+ target = self.repo.lookup_reference('refs/heads/second2')
+ self.repo.checkout(
+ target, strategy=pygit2.enums.CheckoutStrategy.FORCE)
+
+ ser, version = cser._parse_series_and_version(None, None)
+ self.assertEqual('second', ser.name)
+ self.assertEqual(2, version)
+
+ ser, version = cser._parse_series_and_version('second2', None)
+ self.assertEqual('second', ser.name)
+ self.assertEqual(2, version)
+
+ def test_migrate(self):
+ """Test migration to later schema versions"""
+ db = database.Database(f'{self.tmpdir}/.patman.db')
+ with terminal.capture() as (out, err):
+ db.open_it()
+ self.assertEqual(
+ f'Creating new database {self.tmpdir}/.patman.db',
+ err.getvalue().strip())
+
+ self.assertEqual(0, db.get_schema_version())
+
+ for version in range(1, database.LATEST + 1):
+ with terminal.capture() as (out, _):
+ db.migrate_to(version)
+ self.assertTrue(os.path.exists(
+ f'{self.tmpdir}/.patman.dbold.v{version - 1}'))
+ self.assertEqual(f'Update database to v{version}',
+ out.getvalue().strip())
+ self.assertEqual(version, db.get_schema_version())
+ self.assertEqual(4, database.LATEST)
+
+ def test_series_scan(self):
+ """Test scanning a series for updates"""
+ cser, _ = self.setup_second()
+ target = self.repo.lookup_reference('refs/heads/second2')
+ self.repo.checkout(
+ target, strategy=pygit2.enums.CheckoutStrategy.FORCE)
+
+ # Add a new commit
+ self.repo = pygit2.init_repository(self.gitdir)
+ self.make_commit_with_file(
+ 'wip: Try out a new thing', 'Just checking', 'wibble.c',
+ '''changes to wibble''')
+ target = self.repo.revparse_single('HEAD')
+ self.repo.reset(target.oid, pygit2.enums.ResetMode.HARD)
+
+ # name = gitutil.get_branch(self.gitdir)
+ # upstream_name = gitutil.get_upstream(self.gitdir, name)
+ name, ser, version, _ = cser.prep_series(None)
+
+ # We now have 4 commits numbered 0 (second~3) to 3 (the one we just
+ # added). Drop commit 1 (the 'serial' one) from the branch
+ cser._filter_commits(name, ser, 1)
+ svid = cser.get_ser_ver(ser.idnum, version).idnum
+ old_pcdict = cser.get_pcommit_dict(svid).values()
+
+ expect = '''Syncing series 'second2' v2: mark False allow_unmarked True
+ 0 video: Some video improvements
+- 1 serial: Add a serial driver
+ 1 bootm: Make it boot
++ 2 Just checking
+'''
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-n', 'scan', '-M', pwork=True)
+ self.assertEqual(expect + 'Dry run completed\n', out.getvalue())
+
+ new_pcdict = cser.get_pcommit_dict(svid).values()
+ self.assertEqual(list(old_pcdict), list(new_pcdict))
+
+ with terminal.capture() as (out, _):
+ self.run_args('series', 'scan', '-M', pwork=True)
+ self.assertEqual(expect, out.getvalue())
+
+ new_pcdict = cser.get_pcommit_dict(svid).values()
+ self.assertEqual(len(old_pcdict), len(new_pcdict))
+ chk = list(new_pcdict)
+ self.assertNotEqual(list(old_pcdict), list(new_pcdict))
+ self.assertEqual('video: Some video improvements', chk[0].subject)
+ self.assertEqual('bootm: Make it boot', chk[1].subject)
+ self.assertEqual('Just checking', chk[2].subject)
+
+ def test_series_send(self):
+ """Test sending a series"""
+ cser, pwork = self.setup_second()
+
+ # Create a third version
+ with terminal.capture():
+ cser.increment('second')
+ series = patchstream.get_metadata_for_list('second3', self.gitdir, 3)
+ self.assertEqual('2:457 1:456', series.links)
+ self.assertEqual('3', series.version)
+
+ with terminal.capture() as (out, err):
+ self.run_args('series', '-n', '-s', 'second3', 'send',
+ '--no-autolink', pwork=pwork)
+ self.assertIn('Send a total of 3 patches with a cover letter',
+ out.getvalue())
+ self.assertIn(
+ 'video.c:1: warning: Missing or malformed SPDX-License-Identifier '
+ 'tag in line 1', err.getvalue())
+ self.assertIn(
+ '<patch>:19: warning: added, moved or deleted file(s), does '
+ 'MAINTAINERS need updating?', err.getvalue())
+ self.assertIn('bootm.c:1: check: Avoid CamelCase: <Fix>',
+ err.getvalue())
+ self.assertIn(
+ 'Cc: Anatolij Gustschin <agust@denx.de>', out.getvalue())
+
+ self.assertTrue(os.path.exists(os.path.join(
+ self.tmpdir, '0001-video-Some-video-improvements.patch')))
+ self.assertTrue(os.path.exists(os.path.join(
+ self.tmpdir, '0002-serial-Add-a-serial-driver.patch')))
+ self.assertTrue(os.path.exists(os.path.join(
+ self.tmpdir, '0003-bootm-Make-it-boot.patch')))
+
+ def test_series_send_and_link(self):
+ """Test sending a series and then adding its link to the database"""
+ def h_sleep(time_s):
+ if cser.get_time() > 25:
+ self.autolink_extra = {'id': 500,
+ 'name': 'Series for my board',
+ 'version': 3}
+ cser.inc_fake_time(time_s)
+
+ cser, pwork = self.setup_second()
+
+ # Create a third version
+ with terminal.capture():
+ cser.increment('second')
+ series = patchstream.get_metadata_for_list('second3', self.gitdir, 3)
+ self.assertEqual('2:457 1:456', series.links)
+ self.assertEqual('3', series.version)
+
+ with terminal.capture():
+ self.run_args('series', '-n', 'send', pwork=pwork)
+
+ cser.set_fake_time(h_sleep)
+ with terminal.capture() as (out, _):
+ cser.link_auto(pwork, 'second3', 3, True, 50)
+ itr = iter(out.getvalue().splitlines())
+ for i in range(7):
+ self.assertEqual(
+ "Possible matches for 'second' v3 desc 'Series for my board':",
+ next(itr), f'failed at i={i}')
+ self.assertEqual(' Link Version Description', next(itr))
+ self.assertEqual(' 456 1 Series for my board', next(itr))
+ self.assertEqual(' 457 2 Series for my board', next(itr))
+ self.assertEqual('Sleeping for 5 seconds', next(itr))
+ self.assertEqual('Link completed after 35 seconds', next(itr))
+ self.assertRegex(
+ next(itr), 'Checking out upstream commit refs/heads/base: .*')
+ self.assertEqual(
+ "Processing 3 commits from branch 'second3'", next(itr))
+ self.assertRegex(
+ next(itr),
+ f'- {HASH_RE} as {HASH_RE} '
+ 'video: Some video improvements')
+ self.assertRegex(
+ next(itr),
+ f"- add links '3:500 2:457 1:456': {HASH_RE} as {HASH_RE} "
+ 'serial: Add a serial driver')
+ self.assertRegex(
+ next(itr),
+ f'- add v3: {HASH_RE} as {HASH_RE} '
+ 'bootm: Make it boot')
+ self.assertRegex(
+ next(itr),
+ f'Updating branch second3 from {HASH_RE} to {HASH_RE}')
+ self.assertEqual(
+ "Setting link for series 'second' v3 to 500", next(itr))
+
+ def _check_status(self, out, has_comments, has_cover_comments):
+ """Check output from the status command
+
+ Args:
+ itr (Iterator): Contains the output lines to check
+ """
+ itr = iter(out.getvalue().splitlines())
+ if has_cover_comments:
+ self.assertEqual('Cov The name of the cover letter', next(itr))
+ self.assertEqual(
+ 'From: A user <user@user.com>: Sun 13 Apr 14:06:02 MDT 2025',
+ next(itr))
+ self.assertEqual('some comment', next(itr))
+ self.assertEqual('', next(itr))
+
+ self.assertEqual(
+ 'From: Ghenkis Khan <gk@eurasia.gov>: Sun 13 Apr 13:06:02 '
+ 'MDT 2025',
+ next(itr))
+ self.assertEqual('another comment', next(itr))
+ self.assertEqual('', next(itr))
+
+ self.assertEqual(' 1 video: Some video improvements', next(itr))
+ self.assertEqual(' + Reviewed-by: Fred Bloggs <fred@bloggs.com>',
+ next(itr))
+ if has_comments:
+ self.assertEqual(
+ 'Review: Fred Bloggs <fred@bloggs.com>', next(itr))
+ self.assertEqual(' > This was my original patch', next(itr))
+ self.assertEqual(' > which is being quoted', next(itr))
+ self.assertEqual(
+ ' I like the approach here and I would love to see more '
+ 'of it.', next(itr))
+ self.assertEqual('', next(itr))
+
+ self.assertEqual(' 2 serial: Add a serial driver', next(itr))
+ self.assertEqual(' 3 bootm: Make it boot', next(itr))
+ self.assertEqual(
+ '1 new response available in patchwork (use -d to write them to '
+ 'a new branch)', next(itr))
+
+ def test_series_status(self):
+ """Test getting the status of a series, including comments"""
+ cser, pwork = self.setup_second()
+
+ # Use single threading for easy debugging, but the multithreaded
+ # version should produce the same output
+ with self.stage('status second2: single-threaded'):
+ with terminal.capture() as (out, _):
+ cser.status(pwork, 'second', 2, False)
+ self._check_status(out, False, False)
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ with self.stage('status second2 (normal)'):
+ with terminal.capture() as (out2, _):
+ cser.status(pwork, 'second', 2, False)
+ self.assertEqual(out.getvalue(), out2.getvalue())
+ self._check_status(out, False, False)
+
+ with self.stage('with comments'):
+ with terminal.capture() as (out, _):
+ cser.status(pwork, 'second', 2, show_comments=True)
+ self._check_status(out, True, False)
+
+ with self.stage('with comments and cover comments'):
+ with terminal.capture() as (out, _):
+ cser.status(pwork, 'second', 2, show_comments=True,
+ show_cover_comments=True)
+ self._check_status(out, True, True)
+
+ def test_series_status_cmdline(self):
+ """Test getting the status of a series, including comments"""
+ cser, pwork = self.setup_second()
+
+ with self.stage('status second2'):
+ with terminal.capture() as (out, _):
+ self.run_args('series', '-s', 'second', '-V', '2', 'status',
+ pwork=pwork)
+ self._check_status(out, False, False)
+
+ with self.stage('status second2 (normal)'):
+ with terminal.capture() as (out, _):
+ cser.status(pwork, 'second', 2, show_comments=True)
+ self._check_status(out, True, False)
+
+ with self.stage('with comments and cover comments'):
+ with terminal.capture() as (out, _):
+ cser.status(pwork, 'second', 2, show_comments=True,
+ show_cover_comments=True)
+ self._check_status(out, True, True)
+
+ def test_series_no_subcmd(self):
+ """Test handling of things without a subcommand"""
+ parsers = cmdline.setup_parser()
+ parsers['series'].catch_error = True
+ with terminal.capture() as (out, _):
+ cmdline.parse_args(['series'], parsers=parsers)
+ self.assertIn('usage: patman series', out.getvalue())
+
+ parsers['patchwork'].catch_error = True
+ with terminal.capture() as (out, _):
+ cmdline.parse_args(['patchwork'], parsers=parsers)
+ self.assertIn('usage: patman patchwork', out.getvalue())
+
+ parsers['upstream'].catch_error = True
+ with terminal.capture() as (out, _):
+ cmdline.parse_args(['upstream'], parsers=parsers)
+ self.assertIn('usage: patman upstream', out.getvalue())
+
+ def check_series_rename(self):
+ """Check renaming a series"""
+ cser = self.get_cser()
+ with self.stage('setup'):
+ with terminal.capture() as (out, _):
+ cser.add('first', 'my name', allow_unmarked=True)
+
+ # Remember the old series
+ old = cser.get_series_by_name('first')
+
+ self.assertEqual('first', gitutil.get_branch(self.gitdir))
+ with terminal.capture() as (out, _):
+ cser.increment('first')
+ self.assertEqual('first2', gitutil.get_branch(self.gitdir))
+
+ with terminal.capture() as (out, _):
+ cser.increment('first')
+ self.assertEqual('first3', gitutil.get_branch(self.gitdir))
+
+ # Do the dry run
+ with self.stage('rename - dry run'):
+ with terminal.capture() as (out, _):
+ yield cser
+ lines = out.getvalue().splitlines()
+ itr = iter(lines)
+ self.assertEqual("Renaming branch 'first' to 'newname'", next(itr))
+ self.assertEqual(
+ "Renaming branch 'first2' to 'newname2'", next(itr))
+ self.assertEqual(
+ "Renaming branch 'first3' to 'newname3'", next(itr))
+ self.assertEqual("Renamed series 'first' to 'newname'", next(itr))
+ self.assertEqual("Dry run completed", next(itr))
+ self.assert_finished(itr)
+
+ # Check nothing changed
+ self.assertEqual('first3', gitutil.get_branch(self.gitdir))
+ sdict = cser.db.series_get_dict()
+ self.assertIn('first', sdict)
+
+ # Now do it for real
+ with self.stage('rename - real'):
+ with terminal.capture() as (out2, _):
+ yield cser
+ lines2 = out2.getvalue().splitlines()
+ self.assertEqual(lines[:-1], lines2)
+
+ self.assertEqual('newname3', gitutil.get_branch(self.gitdir))
+
+ # Check the series ID did not change
+ ser = cser.get_series_by_name('newname')
+ self.assertEqual(old.idnum, ser.idnum)
+
+ yield None
+
+ def test_series_rename(self):
+ """Test renaming of a series"""
+ cor = self.check_series_rename()
+ cser = next(cor)
+
+ # Rename (dry run)
+ cser.rename('first', 'newname', dry_run=True)
+ cser = next(cor)
+
+ # Rename (real)
+ cser.rename('first', 'newname')
+ self.assertFalse(next(cor))
+
+ def test_series_rename_cmdline(self):
+ """Test renaming of a series with the cmdline"""
+ cor = self.check_series_rename()
+ next(cor)
+
+ # Rename (dry run)
+ self.run_args('series', '-n', '-s', 'first', 'rename', '-N', 'newname',
+ pwork=True)
+ next(cor)
+
+ # Rename (real)
+ self.run_args('series', '-s', 'first', 'rename', '-N', 'newname',
+ pwork=True)
+
+ self.assertFalse(next(cor))
+
+ def test_series_rename_bad(self):
+ """Test renaming when it is not allowed"""
+ cser = self.get_cser()
+ with terminal.capture():
+ cser.add('first', 'my name', allow_unmarked=True)
+ cser.increment('first')
+ cser.increment('first')
+
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first', 'first')
+ self.assertEqual("Cannot rename series 'first' to itself",
+ str(exc.exception))
+
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first2', 'newname')
+ self.assertEqual(
+ "Invalid series name 'first2': did you use the branch name?",
+ str(exc.exception))
+
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first', 'newname2')
+ self.assertEqual(
+ "Invalid series name 'newname2': did you use the branch name?",
+ str(exc.exception))
+
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first', 'second')
+ self.assertEqual("Cannot rename: branches exist: second",
+ str(exc.exception))
+
+ with terminal.capture():
+ cser.add('second', 'another name', allow_unmarked=True)
+ cser.increment('second')
+
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first', 'second')
+ self.assertEqual("Cannot rename: series 'second' already exists",
+ str(exc.exception))
+
+ # Rename second2 so that it gets in the way of the rename
+ gitutil.rename_branch('second2', 'newname2', self.gitdir)
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first', 'newname')
+ self.assertEqual("Cannot rename: branches exist: newname2",
+ str(exc.exception))
+
+ # Rename first3 and make sure it stops the rename
+ gitutil.rename_branch('first3', 'tempbranch', self.gitdir)
+ with self.assertRaises(ValueError) as exc:
+ cser.rename('first', 'newname')
+ self.assertEqual(
+ "Cannot rename: branches missing: first3: branches exist: "
+ 'newname2', str(exc.exception))
+
+ def test_version_change(self):
+ """Test changing a version of a series to a different version number"""
+ cser = self.get_cser()
+
+ with self.stage('setup'):
+ with terminal.capture():
+ cser.add('first', 'my description', allow_unmarked=True)
+
+ with self.stage('non-existent version'):
+ # Check changing a non-existent version
+ with self.assertRaises(ValueError) as exc:
+ cser.version_change('first', 2, 3, dry_run=True)
+ self.assertEqual("Series 'first' does not have a version 2",
+ str(exc.exception))
+
+ with self.stage('new version missing'):
+ with self.assertRaises(ValueError) as exc:
+ cser.version_change('first', None, None, dry_run=True)
+ self.assertEqual("Please provide a new version number",
+ str(exc.exception))
+
+ # Change v1 to v2 (dry run)
+ with self.stage('v1 -> 2 dry run'):
+ with terminal.capture():
+ self.assertTrue(gitutil.check_branch('first', self.gitdir))
+ cser.version_change('first', 1, 3, dry_run=True)
+ self.assertTrue(gitutil.check_branch('first', self.gitdir))
+ self.assertFalse(gitutil.check_branch('first3', self.gitdir))
+
+ # Check that nothing actually happened
+ series = patchstream.get_metadata('first', 0, 2,
+ git_dir=self.gitdir)
+ self.assertNotIn('version', series)
+
+ svlist = cser.get_ser_ver_list()
+ self.assertEqual(1, len(svlist))
+ item = svlist[0]
+ self.assertEqual(1, item.version)
+
+ with self.stage('increment twice'):
+ # Increment so that we get first3
+ with terminal.capture():
+ cser.increment('first')
+ cser.increment('first')
+
+ with self.stage('existing version'):
+ # Check changing to an existing version
+ with self.assertRaises(ValueError) as exc:
+ cser.version_change('first', 1, 3, dry_run=True)
+ self.assertEqual("Series 'first' already has a v3: 1 2 3",
+ str(exc.exception))
+
+ # Change v1 to v4 (for real)
+ with self.stage('v1 -> 4'):
+ with terminal.capture():
+ self.assertTrue(gitutil.check_branch('first', self.gitdir))
+ cser.version_change('first', 1, 4)
+ self.assertTrue(gitutil.check_branch('first', self.gitdir))
+ self.assertTrue(gitutil.check_branch('first4', self.gitdir))
+
+ series = patchstream.get_metadata('first4', 0, 2,
+ git_dir=self.gitdir)
+ self.assertIn('version', series)
+ self.assertEqual('4', series.version)
+
+ svdict = cser.get_ser_ver_dict()
+ self.assertEqual(3, len(svdict))
+ item = svdict[item.idnum]
+ self.assertEqual(4, item.version)
+
+ with self.stage('increment'):
+ # Now try to increment first again
+ with terminal.capture():
+ cser.increment('first')
+
+ ser = cser.get_series_by_name('first')
+ self.assertIn(5, cser._get_version_list(ser.idnum))
+
+ def test_version_change_cmdline(self):
+ """Check changing a version on the cmdline"""
+ self.get_cser()
+ with (mock.patch.object(cseries.Cseries, 'version_change',
+ return_value=None) as method):
+ self.run_args('series', '-s', 'first', 'version-change',
+ pwork=True)
+ method.assert_called_once_with('first', None, None, dry_run=False)
+
+ with (mock.patch.object(cseries.Cseries, 'version_change',
+ return_value=None) as method):
+ self.run_args('series', '-s', 'first', 'version-change',
+ '--new-version', '3', pwork=True)
+ method.assert_called_once_with('first', None, 3, dry_run=False)
diff --git a/tools/patman/test_settings.py b/tools/patman/test_settings.py
index 06b7cbc3ab6..c117836de31 100644
--- a/tools/patman/test_settings.py
+++ b/tools/patman/test_settings.py
@@ -49,7 +49,7 @@ def test_git_local_config():
dest='check_patch', default=True)
# Test "global" config is used.
- settings.Setup(parser, 'unknown', global_config.name)
+ settings.Setup(parser, 'unknown', None, global_config.name)
args, _ = parser.parse_known_args([])
assert args.project == 'u-boot'
send_args, _ = send.parse_known_args([])
diff --git a/tools/rmboard.py b/tools/rmboard.py
index 594fd89b8d7..21d68c57261 100755
--- a/tools/rmboard.py
+++ b/tools/rmboard.py
@@ -112,8 +112,7 @@ def rm_board(board):
rm_kconfig_include(fname)
# Remove unwanted files
- cmd = ['git', 'rm', '-r'] + real
- stdout = command.output(*cmd, capture=True)
+ stdout = command.output('git', 'rm', '-r', *real)
## Change the messages as needed
msg = '''arm: Remove %s board
@@ -130,7 +129,8 @@ Remove it.
# Check if the board is mentioned anywhere else. The user will need to deal
# with this
- print(command.output('git', 'grep', '-il', board, raise_on_error=False))
+ cmd = ['git', 'grep', '-il', board]
+ print(command.output(*cmd, raise_on_error=False))
print(' '.join(cmd))
for board in sys.argv[1:]:
diff --git a/tools/stm32image.c b/tools/stm32image.c
index 5c6991f35de..2a31d37f9cf 100644
--- a/tools/stm32image.c
+++ b/tools/stm32image.c
@@ -8,58 +8,74 @@
/* magic ='S' 'T' 'M' 0x32 */
#define HEADER_MAGIC be32_to_cpu(0x53544D32)
-#define VER_MAJOR_IDX 2
-#define VER_MINOR_IDX 1
-#define VER_VARIANT_IDX 0
+#define VER_MAJOR 2
+#define VER_MINOR 1
+#define VER_VARIANT 0
#define HEADER_VERSION_V1 0x1
+#define HEADER_VERSION_V2 0x2
/* default option : bit0 => no signature */
#define HEADER_DEFAULT_OPTION (cpu_to_le32(0x00000001))
/* default binary type for U-Boot */
#define HEADER_TYPE_UBOOT (cpu_to_le32(0x00000000))
+#define PADDING_HEADER_MAGIC (cpu_to_le32(0xFFFF5453))
+#define PADDING_HEADER_FLAG (1ULL << 31)
+#define PADDING_HEADER_LENGTH 0x180
-struct stm32_header {
+struct stm32_header_v1 {
uint32_t magic_number;
- uint32_t image_signature[64 / 4];
+ uint8_t image_signature[64];
uint32_t image_checksum;
- uint8_t header_version[4];
+ uint8_t header_version[4];
uint32_t image_length;
uint32_t image_entry_point;
uint32_t reserved1;
uint32_t load_address;
uint32_t reserved2;
uint32_t version_number;
+ /* V1.0 specific content */
uint32_t option_flags;
uint32_t ecdsa_algorithm;
- uint32_t ecdsa_public_key[64 / 4];
- uint32_t padding[83 / 4];
- uint32_t binary_type;
+ uint8_t ecdsa_public_key[64];
+ uint8_t padding[83];
+ uint8_t binary_type;
};
-static struct stm32_header stm32image_header;
+struct stm32_header_v2 {
+ uint32_t magic_number;
+ uint8_t image_signature[64];
+ uint32_t image_checksum;
+ uint8_t header_version[4];
+ uint32_t image_length;
+ uint32_t image_entry_point;
+ uint32_t reserved1;
+ uint32_t load_address;
+ uint32_t reserved2;
+ uint32_t version_number;
+ /* V2.0 specific content */
+ uint32_t extension_flags;
+ uint32_t extension_headers_length;
+ uint32_t binary_type;
+ uint8_t padding[16];
+ uint32_t extension_header_type;
+ uint32_t extension_header_length;
+ uint8_t extension_padding[376];
+};
-static void stm32image_default_header(struct stm32_header *ptr)
-{
- if (!ptr)
- return;
-
- ptr->magic_number = HEADER_MAGIC;
- ptr->header_version[VER_MAJOR_IDX] = HEADER_VERSION_V1;
- ptr->option_flags = HEADER_DEFAULT_OPTION;
- ptr->ecdsa_algorithm = cpu_to_le32(1);
- ptr->binary_type = HEADER_TYPE_UBOOT;
-}
+static struct stm32_header_v1 stm32image_header_v1;
+static struct stm32_header_v2 stm32image_header_v2;
-static uint32_t stm32image_checksum(void *start, uint32_t len)
+static uint32_t stm32image_checksum(void *start, uint32_t len,
+ uint32_t header_size)
{
uint32_t csum = 0;
- uint32_t hdr_len = sizeof(struct stm32_header);
uint8_t *p;
- if (len < hdr_len)
+ if (len < header_size) {
return 0;
+ }
- p = start + hdr_len;
- len -= hdr_len;
+ p = (unsigned char *)start + header_size;
+ len -= header_size;
while (len > 0) {
csum += *p;
@@ -70,24 +86,53 @@ static uint32_t stm32image_checksum(void *start, uint32_t len)
return csum;
}
-static int stm32image_check_image_types(uint8_t type)
+static int stm32image_check_image_types_v1(uint8_t type)
{
if (type == IH_TYPE_STM32IMAGE)
return EXIT_SUCCESS;
return EXIT_FAILURE;
}
-static int stm32image_verify_header(unsigned char *ptr, int image_size,
- struct image_tool_params *params)
+static int stm32image_check_image_types_v2(uint8_t type)
+{
+ if (type == IH_TYPE_STM32IMAGE_V2)
+ return EXIT_SUCCESS;
+ return EXIT_FAILURE;
+}
+
+static int stm32image_verify_header_v1(unsigned char *ptr, int image_size,
+ struct image_tool_params *params)
+{
+ struct stm32_header_v1 *stm32hdr = (struct stm32_header_v1 *)ptr;
+ int i;
+
+ if (image_size < sizeof(struct stm32_header_v1))
+ return -1;
+ if (stm32hdr->magic_number != HEADER_MAGIC)
+ return -1;
+ if (stm32hdr->header_version[VER_MAJOR] != HEADER_VERSION_V1)
+ return -1;
+ if (stm32hdr->reserved1 || stm32hdr->reserved2)
+ return -1;
+ for (i = 0; i < (sizeof(stm32hdr->padding) / 4); i++) {
+ if (stm32hdr->padding[i] != 0)
+ return -1;
+ }
+
+ return 0;
+}
+
+static int stm32image_verify_header_v2(unsigned char *ptr, int image_size,
+ struct image_tool_params *params)
{
- struct stm32_header *stm32hdr = (struct stm32_header *)ptr;
+ struct stm32_header_v2 *stm32hdr = (struct stm32_header_v2 *)ptr;
int i;
- if (image_size < sizeof(struct stm32_header))
+ if (image_size < sizeof(struct stm32_header_v2))
return -1;
if (stm32hdr->magic_number != HEADER_MAGIC)
return -1;
- if (stm32hdr->header_version[VER_MAJOR_IDX] != HEADER_VERSION_V1)
+ if (stm32hdr->header_version[VER_MAJOR] != HEADER_VERSION_V2)
return -1;
if (stm32hdr->reserved1 || stm32hdr->reserved2)
return -1;
@@ -101,38 +146,85 @@ static int stm32image_verify_header(unsigned char *ptr, int image_size,
static void stm32image_print_header(const void *ptr, struct image_tool_params *params)
{
- struct stm32_header *stm32hdr = (struct stm32_header *)ptr;
+ struct stm32_header_v1 *stm32hdr_v1 = (struct stm32_header_v1 *)ptr;
+ struct stm32_header_v2 *stm32hdr_v2 = (struct stm32_header_v2 *)ptr;
printf("Image Type : STMicroelectronics STM32 V%d.%d\n",
- stm32hdr->header_version[VER_MAJOR_IDX],
- stm32hdr->header_version[VER_MINOR_IDX]);
+ stm32hdr_v1->header_version[VER_MAJOR],
+ stm32hdr_v1->header_version[VER_MINOR]);
printf("Image Size : %lu bytes\n",
- (unsigned long)le32_to_cpu(stm32hdr->image_length));
+ (unsigned long)le32_to_cpu(stm32hdr_v1->image_length));
printf("Image Load : 0x%08x\n",
- le32_to_cpu(stm32hdr->load_address));
+ le32_to_cpu(stm32hdr_v1->load_address));
printf("Entry Point : 0x%08x\n",
- le32_to_cpu(stm32hdr->image_entry_point));
+ le32_to_cpu(stm32hdr_v1->image_entry_point));
printf("Checksum : 0x%08x\n",
- le32_to_cpu(stm32hdr->image_checksum));
- printf("Option : 0x%08x\n",
- le32_to_cpu(stm32hdr->option_flags));
- printf("BinaryType : 0x%08x\n",
- le32_to_cpu(stm32hdr->binary_type));
+ le32_to_cpu(stm32hdr_v1->image_checksum));
+ switch (stm32hdr_v1->header_version[VER_MAJOR]) {
+ case HEADER_VERSION_V1:
+ printf("Option : 0x%08x\n",
+ le32_to_cpu(stm32hdr_v1->option_flags));
+ printf("BinaryType : 0x%08x\n",
+ le32_to_cpu(stm32hdr_v1->binary_type));
+ break;
+
+ case HEADER_VERSION_V2:
+ printf("Extension : 0x%08x\n",
+ le32_to_cpu(stm32hdr_v2->extension_flags));
+ break;
+
+ default:
+ printf("Incorrect header version\n");
+ }
}
-static void stm32image_set_header(void *ptr, struct stat *sbuf, int ifd,
- struct image_tool_params *params)
+static void stm32image_set_header_v1(void *ptr, struct stat *sbuf, int ifd,
+ struct image_tool_params *params)
{
- struct stm32_header *stm32hdr = (struct stm32_header *)ptr;
+ struct stm32_header_v1 *stm32hdr = (struct stm32_header_v1 *)ptr;
- stm32image_default_header(stm32hdr);
+ stm32hdr->magic_number = HEADER_MAGIC;
+ stm32hdr->version_number = cpu_to_le32(0);
+
+ stm32hdr->header_version[VER_MAJOR] = HEADER_VERSION_V1;
+ stm32hdr->option_flags = HEADER_DEFAULT_OPTION;
+ stm32hdr->ecdsa_algorithm = cpu_to_le32(1);
+ stm32hdr->binary_type = HEADER_TYPE_UBOOT;
stm32hdr->load_address = cpu_to_le32(params->addr);
stm32hdr->image_entry_point = cpu_to_le32(params->ep);
stm32hdr->image_length = cpu_to_le32((uint32_t)sbuf->st_size -
- sizeof(struct stm32_header));
+ sizeof(*stm32hdr));
stm32hdr->image_checksum =
- cpu_to_le32(stm32image_checksum(ptr, sbuf->st_size));
+ cpu_to_le32(stm32image_checksum(ptr, sbuf->st_size,
+ sizeof(*stm32hdr)));
+}
+
+static void stm32image_set_header_v2(void *ptr, struct stat *sbuf, int ifd,
+ struct image_tool_params *params)
+{
+ struct stm32_header_v2 *stm32hdr = (struct stm32_header_v2 *)ptr;
+
+ stm32hdr->magic_number = HEADER_MAGIC;
+ stm32hdr->version_number = cpu_to_le32(0);
+
+ stm32hdr->header_version[VER_MAJOR] = HEADER_VERSION_V2;
+ stm32hdr->extension_flags =
+ cpu_to_le32(PADDING_HEADER_FLAG);
+ stm32hdr->extension_headers_length =
+ cpu_to_le32(PADDING_HEADER_LENGTH);
+ stm32hdr->extension_header_type =
+ cpu_to_le32(PADDING_HEADER_MAGIC);
+ stm32hdr->extension_header_length =
+ cpu_to_le32(PADDING_HEADER_LENGTH);
+
+ stm32hdr->load_address = cpu_to_le32(params->addr);
+ stm32hdr->image_entry_point = cpu_to_le32(params->ep);
+ stm32hdr->image_length = cpu_to_le32((uint32_t)sbuf->st_size -
+ sizeof(*stm32hdr));
+ stm32hdr->image_checksum =
+ cpu_to_le32(stm32image_checksum(ptr, sbuf->st_size,
+ sizeof(*stm32hdr)));
}
/*
@@ -141,14 +233,29 @@ static void stm32image_set_header(void *ptr, struct stat *sbuf, int ifd,
U_BOOT_IMAGE_TYPE(
stm32image,
"STMicroelectronics STM32MP Image support",
- sizeof(struct stm32_header),
- (void *)&stm32image_header,
+ sizeof(struct stm32_header_v1),
+ (void *)&stm32image_header_v1,
+ NULL,
+ stm32image_verify_header_v1,
+ stm32image_print_header,
+ stm32image_set_header_v1,
+ NULL,
+ stm32image_check_image_types_v1,
+ NULL,
+ NULL
+);
+
+U_BOOT_IMAGE_TYPE(
+ stm32imagev2,
+ "STMicroelectronics STM32MP Image V2.0 support",
+ sizeof(struct stm32_header_v2),
+ (void *)&stm32image_header_v2,
NULL,
- stm32image_verify_header,
+ stm32image_verify_header_v2,
stm32image_print_header,
- stm32image_set_header,
+ stm32image_set_header_v2,
NULL,
- stm32image_check_image_types,
+ stm32image_check_image_types_v2,
NULL,
NULL
);
diff --git a/tools/termios_linux.h b/tools/termios_linux.h
index 0806a91180a..0e5a5c475b5 100644
--- a/tools/termios_linux.h
+++ b/tools/termios_linux.h
@@ -32,13 +32,13 @@
#include <asm/ioctls.h>
#include <asm/termbits.h>
-#if defined(BOTHER) && defined(TCGETS2)
+#if defined(BOTHER) && defined(TCGETS2) && !defined(__powerpc64__)
#define termios termios2
#endif
static inline int tcgetattr(int fd, struct termios *t)
{
-#if defined(BOTHER) && defined(TCGETS2)
+#if defined(BOTHER) && defined(TCGETS2) && !defined(__powerpc64__)
return ioctl(fd, TCGETS2, t);
#else
return ioctl(fd, TCGETS, t);
@@ -50,7 +50,7 @@ static inline int tcsetattr(int fd, int a, const struct termios *t)
int cmd;
switch (a) {
-#if defined(BOTHER) && defined(TCGETS2)
+#if defined(BOTHER) && defined(TCGETS2) && !defined(__powerpc64__)
case TCSANOW:
cmd = TCSETS2;
break;
diff --git a/tools/u_boot_pylib/__main__.py b/tools/u_boot_pylib/__main__.py
index c0762bca733..d86b9d7dce0 100755
--- a/tools/u_boot_pylib/__main__.py
+++ b/tools/u_boot_pylib/__main__.py
@@ -16,7 +16,7 @@ if __name__ == "__main__":
from u_boot_pylib import test_util
result = test_util.run_test_suites(
- 'u_boot_pylib', False, False, False, None, None, None,
+ 'u_boot_pylib', False, False, False, False, None, None, None,
['terminal'])
sys.exit(0 if result.wasSuccessful() else 1)
diff --git a/tools/u_boot_pylib/command.py b/tools/u_boot_pylib/command.py
index 0e247355ef6..cb7ebf49ce5 100644
--- a/tools/u_boot_pylib/command.py
+++ b/tools/u_boot_pylib/command.py
@@ -203,7 +203,7 @@ def run_one(*cmd, **kwargs):
return run_pipe([cmd], **kwargs)
-def run_list(cmd):
+def run_list(cmd, **kwargs):
"""Run a command and return its output
Args:
@@ -211,8 +211,9 @@ def run_list(cmd):
Returns:
str: output of command
+ **kwargs (dict of args): Extra arguments to pass in
"""
- return run_pipe([cmd], capture=True).stdout
+ return run_pipe([cmd], capture=True, **kwargs).stdout
def stop_all():
diff --git a/tools/u_boot_pylib/gitutil.py b/tools/u_boot_pylib/gitutil.py
index 0376bece3e6..34b4dbb4839 100644
--- a/tools/u_boot_pylib/gitutil.py
+++ b/tools/u_boot_pylib/gitutil.py
@@ -2,10 +2,11 @@
# Copyright (c) 2011 The Chromium OS Authors.
#
+"""Basic utilities for running the git command-line tool from Python"""
+
import os
import sys
-from patman import settings
from u_boot_pylib import command
from u_boot_pylib import terminal
@@ -14,7 +15,7 @@ USE_NO_DECORATE = True
def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
- count=None):
+ count=None, decorate=False):
"""Create a command to perform a 'git log'
Args:
@@ -23,6 +24,8 @@ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
oneline (bool): True to use --oneline, else False
reverse (bool): True to reverse the log (--reverse)
count (int or None): Number of commits to list, or None for no limit
+ decorate (bool): True to use --decorate
+
Return:
List containing command and arguments to run
"""
@@ -32,8 +35,10 @@ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
cmd += ['--no-pager', 'log', '--no-color']
if oneline:
cmd.append('--oneline')
- if USE_NO_DECORATE:
+ if USE_NO_DECORATE and not decorate:
cmd.append('--no-decorate')
+ if decorate:
+ cmd.append('--decorate')
if reverse:
cmd.append('--reverse')
if count is not None:
@@ -48,7 +53,7 @@ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
return cmd
-def count_commits_to_branch(branch):
+def count_commits_to_branch(branch, git_dir=None, end=None):
"""Returns number of commits between HEAD and the tracking branch.
This looks back to the tracking branch and works out the number of commits
@@ -56,16 +61,22 @@ def count_commits_to_branch(branch):
Args:
branch (str or None): Branch to count from (None for current branch)
+ git_dir (str): Path to git repository (None to use default)
+ end (str): End commit to stop before
Return:
Number of patches that exist on top of the branch
"""
- if branch:
- us, _ = get_upstream('.git', branch)
+ if end:
+ rev_range = f'{end}..{branch}'
+ elif branch:
+ us, msg = get_upstream(git_dir or '.git', branch)
+ if not us:
+ raise ValueError(msg)
rev_range = f'{us}..{branch}'
else:
rev_range = '@{upstream}..'
- cmd = log_cmd(rev_range, oneline=True)
+ cmd = log_cmd(rev_range, git_dir=git_dir, oneline=True)
result = command.run_one(*cmd, capture=True, capture_stderr=True,
oneline=True, raise_on_error=False)
if result.return_code:
@@ -85,9 +96,11 @@ def name_revision(commit_hash):
Name of revision, if any, else None
"""
stdout = command.output_one_line('git', 'name-rev', commit_hash)
+ if not stdout:
+ return None
# We expect a commit, a space, then a revision name
- name = stdout.split(' ')[1].strip()
+ name = stdout.split()[1].strip()
return name
@@ -107,18 +120,21 @@ def guess_upstream(git_dir, branch):
Name of upstream branch (e.g. 'upstream/master') or None if none
Warning/error message, or None if none
"""
- cmd = log_cmd(branch, git_dir=git_dir, oneline=True, count=100)
+ cmd = log_cmd(branch, git_dir=git_dir, oneline=True, count=100,
+ decorate=True)
result = command.run_one(*cmd, capture=True, capture_stderr=True,
raise_on_error=False)
if result.return_code:
return None, f"Branch '{branch}' not found"
for line in result.stdout.splitlines()[1:]:
- commit_hash = line.split(' ')[0]
- name = name_revision(commit_hash)
- if '~' not in name and '^' not in name:
- if name.startswith('remotes/'):
- name = name[8:]
- return name, f"Guessing upstream as '{name}'"
+ parts = line.split(maxsplit=1)
+ if len(parts) >= 2 and parts[1].startswith('('):
+ commit_hash = parts[0]
+ name = name_revision(commit_hash)
+ if '~' not in name and '^' not in name:
+ if name.startswith('remotes/'):
+ name = name[8:]
+ return name, f"Guessing upstream as '{name}'"
return None, f"Cannot find a suitable upstream for branch '{branch}'"
@@ -322,7 +338,8 @@ def prune_worktrees(git_dir):
raise OSError(f'git worktree prune: {result.stderr}')
-def create_patches(branch, start, count, ignore_binary, series, signoff=True):
+def create_patches(branch, start, count, ignore_binary, series, signoff=True,
+ git_dir=None, cwd=None):
"""Create a series of patches from the top of the current branch.
The patch files are written to the current directory using
@@ -335,11 +352,16 @@ def create_patches(branch, start, count, ignore_binary, series, signoff=True):
ignore_binary (bool): Don't generate patches for binary files
series (Series): Series object for this series (set of patches)
signoff (bool): True to add signoff lines automatically
+ git_dir (str): Path to git repository (None to use default)
+ cwd (str): Path to use for git operations
Return:
Filename of cover letter (None if none)
List of filenames of patch files
"""
- cmd = ['git', 'format-patch', '-M']
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['format-patch', '-M']
if signoff:
cmd.append('--signoff')
if ignore_binary:
@@ -352,7 +374,7 @@ def create_patches(branch, start, count, ignore_binary, series, signoff=True):
brname = branch or 'HEAD'
cmd += [f'{brname}~{start + count}..{brname}~{start}']
- stdout = command.run_list(cmd)
+ stdout = command.run_list(cmd, cwd=cwd)
files = stdout.splitlines()
# We have an extra file if there is a cover letter
@@ -361,7 +383,7 @@ def create_patches(branch, start, count, ignore_binary, series, signoff=True):
return None, files
-def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
+def build_email_list(in_list, alias, tag=None, warn_on_error=True):
"""Build a list of email addresses based on an input list.
Takes a list of email addresses and aliases, and turns this into a list
@@ -373,10 +395,10 @@ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
Args:
in_list (list of str): List of aliases/email addresses
- tag (str): Text to put before each address
alias (dict): Alias dictionary:
key: alias
value: list of aliases or email addresses
+ tag (str): Text to put before each address
warn_on_error (bool): True to raise an error when an alias fails to
match, False to just print a message.
@@ -389,15 +411,14 @@ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
>>> alias['mary'] = ['Mary Poppins <m.poppins@cloud.net>']
>>> alias['boys'] = ['fred', ' john']
>>> alias['all'] = ['fred ', 'john', ' mary ']
- >>> build_email_list(['john', 'mary'], None, alias)
+ >>> build_email_list(['john', 'mary'], alias, None)
['j.bloggs@napier.co.nz', 'Mary Poppins <m.poppins@cloud.net>']
- >>> build_email_list(['john', 'mary'], '--to', alias)
+ >>> build_email_list(['john', 'mary'], alias, '--to')
['--to "j.bloggs@napier.co.nz"', \
'--to "Mary Poppins <m.poppins@cloud.net>"']
- >>> build_email_list(['john', 'mary'], 'Cc', alias)
+ >>> build_email_list(['john', 'mary'], alias, 'Cc')
['Cc j.bloggs@napier.co.nz', 'Cc Mary Poppins <m.poppins@cloud.net>']
"""
- quote = '"' if tag and tag[0] == '-' else ''
raw = []
for item in in_list:
raw += lookup_email(item, alias, warn_on_error=warn_on_error)
@@ -406,7 +427,7 @@ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
if item not in result:
result.append(item)
if tag:
- return [f'{tag} {quote}{email}{quote}' for email in result]
+ return [x for email in result for x in (tag, email)]
return result
@@ -437,8 +458,8 @@ def check_suppress_cc_config():
def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
- self_only=False, alias=None, in_reply_to=None, thread=False,
- smtp_server=None):
+ alias, self_only=False, in_reply_to=None, thread=False,
+ smtp_server=None, cwd=None):
"""Email a patch series.
Args:
@@ -449,15 +470,16 @@ def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
warn_on_error (bool): True to print a warning when an alias fails to
match, False to ignore it.
cc_fname (str): Filename of Cc file for per-commit Cc
- self_only (bool): True to just email to yourself as a test
- alias (dict or None): Alias dictionary: (None to use settings default)
+ alias (dict): Alias dictionary:
key: alias
value: list of aliases or email addresses
+ self_only (bool): True to just email to yourself as a test
in_reply_to (str or None): If set we'll pass this to git as
--in-reply-to - should be a message ID that this is in reply to.
thread (bool): True to add --thread to git send-email (make
all patches reply to cover-letter or first patch in series)
smtp_server (str or None): SMTP server to use to send patches
+ cwd (str): Path to use for patch files (None to use current dir)
Returns:
Git command that was/would be run
@@ -498,11 +520,10 @@ send --cc-cmd cc-fname" cover p1 p2'
# Restore argv[0] since we clobbered it.
>>> sys.argv[0] = _old_argv0
"""
- to = build_email_list(series.get('to'), '--to', alias, warn_on_error)
+ to = build_email_list(series.get('to'), alias, '--to', warn_on_error)
if not to:
- git_config_to = command.output('git', 'config', 'sendemail.to',
- raise_on_error=False)
- if not git_config_to:
+ if not command.output('git', 'config', 'sendemail.to',
+ raise_on_error=False):
print("No recipient.\n"
"Please add something like this to a commit\n"
"Series-to: Fred Bloggs <f.blogs@napier.co.nz>\n"
@@ -510,10 +531,10 @@ send --cc-cmd cc-fname" cover p1 p2'
"git config sendemail.to u-boot@lists.denx.de")
return None
cc = build_email_list(list(set(series.get('cc')) - set(series.get('to'))),
- '--cc', alias, warn_on_error)
+ alias, '--cc', warn_on_error)
if self_only:
- to = build_email_list([os.getenv('USER')], '--to',
- alias, warn_on_error)
+ to = build_email_list([os.getenv('USER')], '--to', alias,
+ warn_on_error)
cc = []
cmd = ['git', 'send-email', '--annotate']
if smtp_server:
@@ -525,24 +546,24 @@ send --cc-cmd cc-fname" cover p1 p2'
cmd += to
cmd += cc
- cmd += ['--cc-cmd', f'"{sys.argv[0]} send --cc-cmd {cc_fname}"']
+ cmd += ['--cc-cmd', f'{sys.argv[0]} send --cc-cmd {cc_fname}']
if cover_fname:
cmd.append(cover_fname)
cmd += args
- cmdstr = ' '.join(cmd)
if not dry_run:
- os.system(cmdstr)
- return cmdstr
+ command.run(*cmd, capture=False, capture_stderr=False, cwd=cwd)
+ return' '.join([f'"{x}"' if ' ' in x and '"' not in x else x
+ for x in cmd])
-def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0):
+def lookup_email(lookup_name, alias, warn_on_error=True, level=0):
"""If an email address is an alias, look it up and return the full name
TODO: Why not just use git's own alias feature?
Args:
lookup_name (str): Alias or email address to look up
- alias (dict or None): Alias dictionary: (None to use settings default)
+ alias (dict): Alias dictionary
key: alias
value: list of aliases or email addresses
warn_on_error (bool): True to print a warning when an alias fails to
@@ -589,8 +610,6 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0):
Recursive email alias at 'mary'
['j.bloggs@napier.co.nz', 'm.poppins@cloud.net']
"""
- if not alias:
- alias = settings.alias
lookup_name = lookup_name.strip()
if '@' in lookup_name: # Perhaps a real email address
return [lookup_name]
@@ -625,7 +644,7 @@ def get_top_level():
"""Return name of top-level directory for this git repo.
Returns:
- str: Full path to git top-level directory
+ str: Full path to git top-level directory, or None if not found
This test makes sure that we are running tests in the right subdir
@@ -633,7 +652,12 @@ def get_top_level():
os.path.join(get_top_level(), 'tools', 'patman')
True
"""
- return command.output_one_line('git', 'rev-parse', '--show-toplevel')
+ result = command.run_one(
+ 'git', 'rev-parse', '--show-toplevel', oneline=True, capture=True,
+ capture_stderr=True, raise_on_error=False)
+ if result.return_code:
+ return None
+ return result.stdout.strip()
def get_alias_file():
@@ -651,7 +675,7 @@ def get_alias_file():
if os.path.isabs(fname):
return fname
- return os.path.join(get_top_level(), fname)
+ return os.path.join(get_top_level() or '', fname)
def get_default_user_name():
@@ -693,25 +717,26 @@ def setup():
# Check for a git alias file also
global USE_NO_DECORATE
- alias_fname = get_alias_file()
- if alias_fname:
- settings.ReadGitAliases(alias_fname)
cmd = log_cmd(None, count=0)
USE_NO_DECORATE = (command.run_one(*cmd, raise_on_error=False)
.return_code == 0)
-def get_hash(spec):
+def get_hash(spec, git_dir=None):
"""Get the hash of a commit
Args:
spec (str): Git commit to show, e.g. 'my-branch~12'
+ git_dir (str): Path to git repository (None to use default)
Returns:
str: Hash of commit
"""
- return command.output_one_line('git', 'show', '-s', '--pretty=format:%H',
- spec)
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['show', '-s', '--pretty=format:%H', spec]
+ return command.output_one_line(*cmd)
def get_head():
@@ -723,18 +748,138 @@ def get_head():
return get_hash('HEAD')
-def get_branch():
+def get_branch(git_dir=None):
"""Get the branch we are currently on
Return:
str: branch name, or None if none
+ git_dir (str): Path to git repository (None to use default)
"""
- out = command.output_one_line('git', 'rev-parse', '--abbrev-ref', 'HEAD')
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['rev-parse', '--abbrev-ref', 'HEAD']
+ out = command.output_one_line(*cmd, raise_on_error=False)
if out == 'HEAD':
return None
return out
+def check_dirty(git_dir=None, work_tree=None):
+ """Check if the tree is dirty
+
+ Args:
+ git_dir (str): Path to git repository (None to use default)
+ work_tree (str): Git worktree to use, or None if none
+
+ Return:
+ str: List of dirty filenames and state
+ """
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ if work_tree:
+ cmd += ['--work-tree', work_tree]
+ cmd += ['status', '--porcelain', '--untracked-files=no']
+ return command.output(*cmd).splitlines()
+
+
+def check_branch(name, git_dir=None):
+ """Check if a branch exists
+
+ Args:
+ name (str): Name of the branch to check
+ git_dir (str): Path to git repository (None to use default)
+ """
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['branch', '--list', name]
+
+ # This produces ' <name>' or '* <name>'
+ out = command.output(*cmd).rstrip()
+ return out[2:] == name
+
+
+def rename_branch(old_name, name, git_dir=None):
+ """Check if a branch exists
+
+ Args:
+ old_name (str): Name of the branch to rename
+ name (str): New name for the branch
+ git_dir (str): Path to git repository (None to use default)
+
+ Return:
+ str: Output from command
+ """
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['branch', '--move', old_name, name]
+
+ # This produces ' <name>' or '* <name>'
+ return command.output(*cmd).rstrip()
+
+
+def get_commit_message(commit, git_dir=None):
+ """Gets the commit message for a commit
+
+ Args:
+ commit (str): commit to check
+ git_dir (str): Path to git repository (None to use default)
+
+ Return:
+ list of str: Lines from the commit message
+ """
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['show', '--quiet', commit]
+
+ out = command.output(*cmd)
+ # the header is followed by a blank line
+ lines = out.splitlines()
+ empty = lines.index('')
+ msg = lines[empty + 1:]
+ unindented = [line[4:] for line in msg]
+
+ return unindented
+
+
+def show_commit(commit, msg=True, diffstat=False, patch=False, colour=True,
+ git_dir=None):
+ """Runs 'git show' and returns the output
+
+ Args:
+ commit (str): commit to check
+ msg (bool): Show the commit message
+ diffstat (bool): True to include the diffstat
+ patch (bool): True to include the patch
+ colour (bool): True to force use of colour
+ git_dir (str): Path to git repository (None to use default)
+
+ Return:
+ list of str: Lines from the commit message
+ """
+ cmd = ['git']
+ if git_dir:
+ cmd += ['--git-dir', git_dir]
+ cmd += ['show']
+ if colour:
+ cmd.append('--color')
+ if not msg:
+ cmd.append('--oneline')
+ if diffstat:
+ cmd.append('--stat')
+ else:
+ cmd.append('--quiet')
+ if patch:
+ cmd.append('--patch')
+ cmd.append(commit)
+
+ return command.output(*cmd)
+
+
if __name__ == "__main__":
import doctest
diff --git a/tools/u_boot_pylib/terminal.py b/tools/u_boot_pylib/terminal.py
index 2cd5a54ab52..69c183e85e5 100644
--- a/tools/u_boot_pylib/terminal.py
+++ b/tools/u_boot_pylib/terminal.py
@@ -7,9 +7,12 @@
This module handles terminal interaction including ANSI color codes.
"""
+from contextlib import contextmanager
+from io import StringIO
import os
import re
import shutil
+import subprocess
import sys
# Selection of when we want our output to be colored
@@ -26,6 +29,13 @@ last_print_len = None
# stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python
ansi_escape = re.compile(r'\x1b(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
+# True if we are capturing console output
+CAPTURING = False
+
+# Set this to False to disable output-capturing globally
+USE_CAPTURE = True
+
+
class PrintLine:
"""A line of text output
@@ -130,7 +140,8 @@ def trim_ascii_len(text, size):
return out
-def tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True):
+def tprint(text='', newline=True, colour=None, limit_to_line=False,
+ bright=True, back=None, col=None):
"""Handle a line of output to the terminal.
In test mode this is recorded in a list. Otherwise it is output to the
@@ -146,9 +157,10 @@ def tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True)
if print_test_mode:
print_test_list.append(PrintLine(text, colour, newline, bright))
else:
- if colour:
- col = Color()
- text = col.build(colour, text, bright=bright)
+ if colour is not None:
+ if not col:
+ col = Color()
+ text = col.build(colour, text, bright=bright, back=back)
if newline:
print(text)
last_print_len = None
@@ -200,14 +212,23 @@ def echo_print_test_lines():
if line.newline:
print()
+def have_terminal():
+ """Check if we have an interactive terminal or not
+
+ Returns:
+ bool: true if an interactive terminal is attached
+ """
+ return os.isatty(sys.stdout.fileno())
+
-class Color(object):
+class Color():
"""Conditionally wraps text in ANSI color escape sequences."""
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
BOLD = -1
- BRIGHT_START = '\033[1;%dm'
- NORMAL_START = '\033[22;%dm'
+ BRIGHT_START = '\033[1;%d%sm'
+ NORMAL_START = '\033[22;%d%sm'
BOLD_START = '\033[1m'
+ BACK_EXTRA = ';%d'
RESET = '\033[0m'
def __init__(self, colored=COLOR_IF_TERMINAL):
@@ -224,7 +245,14 @@ class Color(object):
except:
self._enabled = False
- def start(self, color, bright=True):
+ def enabled(self):
+ """Check if colour is enabled
+
+ Return: True if enabled, else False
+ """
+ return self._enabled
+
+ def start(self, color, bright=True, back=None):
"""Returns a start color code.
Args:
@@ -235,8 +263,11 @@ class Color(object):
color, otherwise returns empty string
"""
if self._enabled:
+ if color == self.BOLD:
+ return self.BOLD_START
base = self.BRIGHT_START if bright else self.NORMAL_START
- return base % (color + 30)
+ extra = self.BACK_EXTRA % (back + 40) if back else ''
+ return base % (color + 30, extra)
return ''
def stop(self):
@@ -250,7 +281,7 @@ class Color(object):
return self.RESET
return ''
- def build(self, color, text, bright=True):
+ def build(self, color, text, bright=True, back=None):
"""Returns text with conditionally added color escape sequences.
Keyword arguments:
@@ -265,9 +296,51 @@ class Color(object):
"""
if not self._enabled:
return text
- if color == self.BOLD:
- start = self.BOLD_START
- else:
- base = self.BRIGHT_START if bright else self.NORMAL_START
- start = base % (color + 30)
- return start + text + self.RESET
+ return self.start(color, bright, back) + text + self.RESET
+
+
+# Use this to suppress stdout/stderr output:
+# with terminal.capture() as (stdout, stderr)
+# ...do something...
+@contextmanager
+def capture():
+ global CAPTURING
+
+ capture_out, capture_err = StringIO(), StringIO()
+ old_out, old_err = sys.stdout, sys.stderr
+ try:
+ CAPTURING = True
+ sys.stdout, sys.stderr = capture_out, capture_err
+ yield capture_out, capture_err
+ finally:
+ sys.stdout, sys.stderr = old_out, old_err
+ CAPTURING = False
+ if not USE_CAPTURE:
+ sys.stdout.write(capture_out.getvalue())
+ sys.stderr.write(capture_err.getvalue())
+
+
+@contextmanager
+def pager():
+ """Simple pager for outputting lots of text
+
+ Usage:
+ with terminal.pager():
+ print(...)
+ """
+ proc = None
+ old_stdout = None
+ try:
+ less = os.getenv('PAGER')
+ if not CAPTURING and less != 'none' and have_terminal():
+ if not less:
+ less = 'less -R --quit-if-one-screen'
+ proc = subprocess.Popen(less, stdin=subprocess.PIPE, text=True,
+ shell=True)
+ old_stdout = sys.stdout
+ sys.stdout = proc.stdin
+ yield
+ finally:
+ if proc:
+ sys.stdout = old_stdout
+ proc.communicate()
diff --git a/tools/u_boot_pylib/test_util.py b/tools/u_boot_pylib/test_util.py
index 637403f8715..d258a1935c9 100644
--- a/tools/u_boot_pylib/test_util.py
+++ b/tools/u_boot_pylib/test_util.py
@@ -3,7 +3,6 @@
# Copyright (c) 2016 Google, Inc
#
-from contextlib import contextmanager
import doctest
import glob
import multiprocessing
@@ -13,8 +12,7 @@ import sys
import unittest
from u_boot_pylib import command
-
-from io import StringIO
+from u_boot_pylib import terminal
use_concurrent = True
try:
@@ -113,20 +111,6 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir,
raise ValueError('Test coverage failure')
-# Use this to suppress stdout/stderr output:
-# with capture_sys_output() as (stdout, stderr)
-# ...do something...
-@contextmanager
-def capture_sys_output():
- capture_out, capture_err = StringIO(), StringIO()
- old_out, old_err = sys.stdout, sys.stderr
- try:
- sys.stdout, sys.stderr = capture_out, capture_err
- yield capture_out, capture_err
- finally:
- sys.stdout, sys.stderr = old_out, old_err
-
-
class FullTextTestResult(unittest.TextTestResult):
"""A test result class that can print extended text results to a stream
@@ -172,8 +156,8 @@ class FullTextTestResult(unittest.TextTestResult):
super().addSkip(test, reason)
-def run_test_suites(toolname, debug, verbosity, test_preserve_dirs, processes,
- test_name, toolpath, class_and_module_list):
+def run_test_suites(toolname, debug, verbosity, no_capture, test_preserve_dirs,
+ processes, test_name, toolpath, class_and_module_list):
"""Run a series of test suites and collect the results
Args:
@@ -196,6 +180,9 @@ def run_test_suites(toolname, debug, verbosity, test_preserve_dirs, processes,
sys.argv.append('-D')
if verbosity:
sys.argv.append('-v%d' % verbosity)
+ if no_capture:
+ sys.argv.append('-N')
+ terminal.USE_CAPTURE = False
if toolpath:
for path in toolpath:
sys.argv += ['--toolpath', path]
@@ -208,7 +195,7 @@ def run_test_suites(toolname, debug, verbosity, test_preserve_dirs, processes,
resultclass=FullTextTestResult,
)
- if use_concurrent and processes != 1:
+ if use_concurrent and processes != 1 and not test_name:
suite = ConcurrentTestSuite(suite,
fork_for_tests(processes or multiprocessing.cpu_count()))
@@ -224,7 +211,7 @@ def run_test_suites(toolname, debug, verbosity, test_preserve_dirs, processes,
setup_test_args = getattr(module, 'setup_test_args')
setup_test_args(preserve_indir=test_preserve_dirs,
preserve_outdirs=test_preserve_dirs and test_name is not None,
- toolpath=toolpath, verbosity=verbosity)
+ toolpath=toolpath, verbosity=verbosity, no_capture=no_capture)
if test_name:
# Since Python v3.5 If an ImportError or AttributeError occurs
# while traversing a name then a synthetic test that raises that
diff --git a/tools/u_boot_pylib/tout.py b/tools/u_boot_pylib/tout.py
index 6bd2806f88f..ca72108d6bc 100644
--- a/tools/u_boot_pylib/tout.py
+++ b/tools/u_boot_pylib/tout.py
@@ -9,7 +9,7 @@ import sys
from u_boot_pylib import terminal
# Output verbosity levels that we support
-ERROR, WARNING, NOTICE, INFO, DETAIL, DEBUG = range(6)
+FATAL, ERROR, WARNING, NOTICE, INFO, DETAIL, DEBUG = range(7)
in_progress = False
@@ -42,12 +42,12 @@ def user_is_present():
Returns:
True if it thinks the user is there, and False otherwise
"""
- return stdout_is_tty and verbose > 0
+ return stdout_is_tty and verbose > ERROR
def clear_progress():
"""Clear any active progress message on the terminal."""
global in_progress
- if verbose > 0 and stdout_is_tty and in_progress:
+ if verbose > ERROR and stdout_is_tty and in_progress:
_stdout.write('\r%s\r' % (" " * len (_progress)))
_stdout.flush()
in_progress = False
@@ -60,7 +60,7 @@ def progress(msg, warning=False, trailer='...'):
warning: True if this is a warning."""
global in_progress
clear_progress()
- if verbose > 0:
+ if verbose > ERROR:
_progress = msg + trailer
if stdout_is_tty:
col = _color.YELLOW if warning else _color.GREEN
@@ -87,6 +87,8 @@ def _output(level, msg, color=None):
print(msg, file=sys.stderr)
else:
print(msg)
+ if level == FATAL:
+ sys.exit(1)
def do_output(level, msg):
"""Output a message to the terminal.
@@ -98,6 +100,14 @@ def do_output(level, msg):
"""
_output(level, msg)
+def fatal(msg):
+ """Display an error message and exit
+
+ Args:
+ msg; Message to display.
+ """
+ _output(FATAL, msg, _color.RED)
+
def error(msg):
"""Display an error message
@@ -153,20 +163,21 @@ def user_output(msg):
Args:
msg; Message to display.
"""
- _output(0, msg)
+ _output(ERROR, msg)
-def init(_verbose=WARNING, stdout=sys.stdout):
+def init(_verbose=WARNING, stdout=sys.stdout, allow_colour=True):
"""Initialize a new output object.
Args:
- verbose: Verbosity level (0-4).
+ verbose: Verbosity level (0-6).
stdout: File to use for stdout.
"""
global verbose, _progress, _color, _stdout, stdout_is_tty
verbose = _verbose
_progress = '' # Our last progress message
- _color = terminal.Color()
+ _color = terminal.Color(terminal.COLOR_IF_TERMINAL if allow_colour
+ else terminal.COLOR_NEVER)
_stdout = stdout
# TODO(sjg): Move this into Chromite libraries when we have them