diff options
Diffstat (limited to 'tools')
62 files changed, 997 insertions, 325 deletions
diff --git a/tools/.gitignore b/tools/.gitignore index 0108c567309..6a5c613f772 100644 --- a/tools/.gitignore +++ b/tools/.gitignore @@ -29,6 +29,7 @@ /mxsboot /ncb /prelink-riscv +/preload_check_sign /printinitialenv /proftool /relocate-rela diff --git a/tools/Kconfig b/tools/Kconfig index 01ff0fcf748..8e272ee99a8 100644 --- a/tools/Kconfig +++ b/tools/Kconfig @@ -9,6 +9,11 @@ config MKIMAGE_DTC_PATH some cases the system dtc may not support all required features and the path to a different version should be given here. +config TOOLS_IMAGE_PRE_LOAD + def_bool y + help + Enable pre-load signature support in the tools builds. + config TOOLS_CRC16 def_bool y help diff --git a/tools/Makefile b/tools/Makefile index 237fa900a24..d0e4d2d16c3 100644 --- a/tools/Makefile +++ b/tools/Makefile @@ -66,6 +66,7 @@ mkenvimage-objs := mkenvimage.o os_support.o generated/lib/crc32.o hostprogs-y += dumpimage mkimage hostprogs-$(CONFIG_TOOLS_LIBCRYPTO) += fit_info fit_check_sign hostprogs-$(CONFIG_TOOLS_LIBCRYPTO) += fdt_add_pubkey +hostprogs-$(CONFIG_TOOLS_LIBCRYPTO) += preload_check_sign ifneq ($(CONFIG_CMD_BOOTEFI_SELFTEST)$(CONFIG_FWU_MDATA_GPT_BLK),) hostprogs-y += file2include @@ -75,6 +76,9 @@ FIT_OBJS-y := fit_common.o fit_image.o image-host.o generated/boot/image-fit.o FIT_SIG_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := image-sig-host.o generated/boot/image-fit-sig.o FIT_CIPHER_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := generated/boot/image-cipher.o +HOSTCFLAGS_image-host.o += \ + $(shell pkg-config --cflags libssl libcrypto 2> /dev/null || echo "") + # The following files are synced with upstream DTC. # Use synced versions from scripts/dtc/libfdt/. LIBFDT_OBJS := $(addprefix libfdt/, fdt.o fdt_ro.o fdt_wip.o fdt_sw.o fdt_rw.o \ @@ -83,12 +87,18 @@ LIBFDT_OBJS := $(addprefix libfdt/, fdt.o fdt_ro.o fdt_wip.o fdt_sw.o fdt_rw.o \ RSA_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := $(addprefix generated/lib/rsa/, \ rsa-sign.o rsa-verify.o \ rsa-mod-exp.o) +HOSTCFLAGS_rsa-sign.o += \ + $(shell pkg-config --cflags libssl libcrypto 2> /dev/null || echo "") ECDSA_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := $(addprefix generated/lib/ecdsa/, ecdsa-libcrypto.o) +HOSTCFLAGS_ecdsa-libcrypto.o += \ + $(shell pkg-config --cflags libssl libcrypto 2> /dev/null || echo "") AES_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := $(addprefix generated/lib/aes/, \ aes-encrypt.o aes-decrypt.o) +PRELOAD_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := generated/boot/image-pre-load.o + # Cryptographic helpers and image types that depend on openssl/libcrypto LIBCRYPTO_OBJS-$(CONFIG_TOOLS_LIBCRYPTO) := \ generated/lib/fdt-libcrypto.o \ @@ -158,6 +168,9 @@ fit_info-objs := $(dumpimage-mkimage-objs) fit_info.o fit_check_sign-objs := $(dumpimage-mkimage-objs) fit_check_sign.o fdt_add_pubkey-objs := $(dumpimage-mkimage-objs) fdt_add_pubkey.o file2include-objs := file2include.o +preload_check_sign-objs := $(dumpimage-mkimage-objs) $(PRELOAD_OBJS-y) preload_check_sign.o +HOSTCFLAGS_preload_check_sign.o += \ + $(shell pkg-config --cflags libssl libcrypto 2> /dev/null || echo "") ifneq ($(CONFIG_MX23)$(CONFIG_MX28)$(CONFIG_TOOLS_LIBCRYPTO),) # Add CFG_MXS into host CFLAGS, so we can check whether or not register @@ -195,6 +208,9 @@ HOSTLDLIBS_dumpimage := $(HOSTLDLIBS_mkimage) HOSTLDLIBS_fit_info := $(HOSTLDLIBS_mkimage) HOSTLDLIBS_fit_check_sign := $(HOSTLDLIBS_mkimage) HOSTLDLIBS_fdt_add_pubkey := $(HOSTLDLIBS_mkimage) +HOSTLDLIBS_preload_check_sign := $(HOSTLDLIBS_mkimage) +HOSTLDLIBS_preload_check_sign += \ + $(shell pkg-config --libs libssl libcrypto 2> /dev/null || echo "-lssl -lcrypto") hostprogs-$(CONFIG_EXYNOS5250) += mkexynosspl hostprogs-$(CONFIG_EXYNOS5420) += mkexynosspl diff --git a/tools/binman/bintool.py b/tools/binman/bintool.py index 3c4ad1adbb9..81872db377f 100644 --- a/tools/binman/bintool.py +++ b/tools/binman/bintool.py @@ -328,7 +328,8 @@ class Bintool: return result.stdout @classmethod - def build_from_git(cls, git_repo, make_targets, bintool_path, flags=None): + def build_from_git(cls, git_repo, make_targets, bintool_path, + flags=None, git_branch=None, make_path=None): """Build a bintool from a git repo This clones the repo in a temporary directory, builds it with 'make', @@ -341,6 +342,9 @@ class Bintool: bintool_path (str): Relative path of the tool in the repo, after build is complete flags (list of str): Flags or variables to pass to make, or None + git_branch (str): Branch of git repo, or None to use the default + make_path (str): Relative path inside git repo containing the + Makefile, or None Returns: tuple: @@ -350,10 +354,17 @@ class Bintool: """ tmpdir = tempfile.mkdtemp(prefix='binmanf.') print(f"- clone git repo '{git_repo}' to '{tmpdir}'") - tools.run('git', 'clone', '--depth', '1', git_repo, tmpdir) + if git_branch: + tools.run('git', 'clone', '--depth', '1', '--branch', git_branch, + git_repo, tmpdir) + else: + tools.run('git', 'clone', '--depth', '1', git_repo, tmpdir) for target in make_targets: print(f"- build target '{target}'") - cmd = ['make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}', + makedir = tmpdir + if make_path: + makedir = os.path.join(tmpdir, make_path) + cmd = ['make', '-C', makedir, '-j', f'{multiprocessing.cpu_count()}', target] if flags: cmd += flags diff --git a/tools/binman/bintool_test.py b/tools/binman/bintool_test.py index f9b16d4c73b..949d6f4c8a9 100644 --- a/tools/binman/bintool_test.py +++ b/tools/binman/bintool_test.py @@ -303,6 +303,7 @@ class TestBintool(unittest.TestCase): # See Bintool.build_from_git() tmpdir = cmd[2] self.fname = os.path.join(tmpdir, 'pathname') + os.makedirs(os.path.dirname(tmpdir), exist_ok=True) tools.write_file(self.fname, b'hello') expected = b'this is a test' diff --git a/tools/binman/bintools.rst b/tools/binman/bintools.rst index cd05ad8cb26..9f6cab544a5 100644 --- a/tools/binman/bintools.rst +++ b/tools/binman/bintools.rst @@ -52,6 +52,14 @@ Bintool: cst: Image generation for U-Boot This bintool supports running `cst` with some basic parameters as needed by binman. +cst (imx code signing tool) is used for sigining bootloader binaries for +various i.MX SoCs. + +See `Code Signing Tool Users Guide`_ for more information. + +.. _`Code Signing Tool Users Guide`: + https://community.nxp.com/pwmxy87654/attachments/pwmxy87654/imx-processors/202591/1/CST_UG.pdf + Bintool: fdt_add_pubkey: Add public key to control dtb (spl or u-boot proper) diff --git a/tools/binman/btool/cst.py b/tools/binman/btool/cst.py index 30e78bdbbd9..8a3981adc89 100644 --- a/tools/binman/btool/cst.py +++ b/tools/binman/btool/cst.py @@ -12,6 +12,14 @@ class Bintoolcst(bintool.Bintool): This bintool supports running `cst` with some basic parameters as needed by binman. + + cst (imx code signing tool) is used for sigining bootloader binaries for + various i.MX SoCs. + + See `Code Signing Tool Users Guide`_ for more information. + + .. _`Code Signing Tool Users Guide`: + https://community.nxp.com/pwmxy87654/attachments/pwmxy87654/imx-processors/202591/1/CST_UG.pdf """ def __init__(self, name): super().__init__(name, 'Sign NXP i.MX image') @@ -29,20 +37,17 @@ class Bintoolcst(bintool.Bintool): return self.run_cmd(*args) def fetch(self, method): - """Fetch handler for cst - - This installs cst using the apt utility. - - Args: - method (FETCH_...): Method to use - - Returns: - True if the file was fetched and now installed, None if a method - other than FETCH_BIN was requested - - Raises: - Valuerror: Fetching could not be completed - """ - if method != bintool.FETCH_BIN: + """Build cst from git""" + if method != bintool.FETCH_BUILD: return None - return self.apt_install('imx-code-signing-tool') + + from platform import architecture + arch = 'linux64' if architecture()[0] == '64bit' else 'linux32' + result = self.build_from_git( + 'https://gitlab.apertis.org/pkg/imx-code-signing-tool', + ['all'], + f'code/obj.{arch}/cst', + flags=[f'OSTYPE={arch}', 'ENCRYPTION=yes'], + git_branch='debian/unstable', + make_path=f'code/obj.{arch}/') + return result diff --git a/tools/binman/control.py b/tools/binman/control.py index e73c598298c..81f61e3e152 100644 --- a/tools/binman/control.py +++ b/tools/binman/control.py @@ -522,9 +522,13 @@ def _ProcessTemplates(parent): def _RemoveTemplates(parent): """Remove any templates in the binman description """ + del_nodes = [] for node in parent.subnodes: if node.name.startswith('template'): - node.Delete() + del_nodes.append(node) + + for node in del_nodes: + node.Delete() def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded, indir): """Prepare the images to be processed and select the device tree diff --git a/tools/binman/elf.py b/tools/binman/elf.py index c75f4478813..6ac960e0419 100644 --- a/tools/binman/elf.py +++ b/tools/binman/elf.py @@ -28,7 +28,7 @@ except: # pragma: no cover # BSYM in little endian, keep in sync with include/binman_sym.h BINMAN_SYM_MAGIC_VALUE = 0x4d595342 -# Information about an EFL symbol: +# Information about an ELF symbol: # section (str): Name of the section containing this symbol # address (int): Address of the symbol (its value) # size (int): Size of the symbol in bytes diff --git a/tools/binman/etype/fdtmap.py b/tools/binman/etype/fdtmap.py index f1f6217940f..2259404180c 100644 --- a/tools/binman/etype/fdtmap.py +++ b/tools/binman/etype/fdtmap.py @@ -106,6 +106,9 @@ class Entry_fdtmap(Entry): Returns: FDT map binary data """ + fsw = libfdt.FdtSw() + fsw.finish_reservemap() + def _AddNode(node): """Add a node to the FDT map""" for pname, prop in node.props.items(): @@ -134,8 +137,6 @@ class Entry_fdtmap(Entry): # Build a new tree with all nodes and properties starting from that # node - fsw = libfdt.FdtSw() - fsw.finish_reservemap() with fsw.add_node(''): fsw.property_string('image-node', node.name) _AddNode(node) diff --git a/tools/binman/etype/image_header.py b/tools/binman/etype/image_header.py index 24011884958..2114df8159f 100644 --- a/tools/binman/etype/image_header.py +++ b/tools/binman/etype/image_header.py @@ -62,6 +62,7 @@ class Entry_image_header(Entry): def _GetHeader(self): image_pos = self.GetSiblingImagePos('fdtmap') + offset = None if image_pos == False: self.Raise("'image_header' section must have an 'fdtmap' sibling") elif image_pos is None: diff --git a/tools/binman/etype/pre_load.py b/tools/binman/etype/pre_load.py index 2e4c72359ff..00f1a896767 100644 --- a/tools/binman/etype/pre_load.py +++ b/tools/binman/etype/pre_load.py @@ -112,6 +112,8 @@ class Entry_pre_load(Entry_collection): # Compute the signature if padding_name is None: padding_name = "pkcs-1.5" + padding = None + padding_args = None if padding_name == "pss": salt_len = key.size_in_bytes() - hash_image.digest_size - 2 padding = pss diff --git a/tools/binman/etype/ti_board_config.py b/tools/binman/etype/ti_board_config.py index c10d66edcb1..7c6773ac7bc 100644 --- a/tools/binman/etype/ti_board_config.py +++ b/tools/binman/etype/ti_board_config.py @@ -119,12 +119,14 @@ class Entry_ti_board_config(Entry_section): array of bytes representing value """ size = 0 + br = bytearray() if (data_type == '#/definitions/u8'): size = 1 elif (data_type == '#/definitions/u16'): size = 2 else: size = 4 + br = None if type(val) == int: br = val.to_bytes(size, byteorder='little') return br diff --git a/tools/binman/etype/x509_cert.py b/tools/binman/etype/x509_cert.py index 29630d1b86c..25e6808b7f9 100644 --- a/tools/binman/etype/x509_cert.py +++ b/tools/binman/etype/x509_cert.py @@ -84,6 +84,7 @@ class Entry_x509_cert(Entry_collection): input_fname = tools.get_output_filename('input.%s' % uniq) config_fname = tools.get_output_filename('config.%s' % uniq) tools.write_file(input_fname, input_data) + stdout = None if type == 'generic': stdout = self.openssl.x509_cert( cert_fname=output_fname, diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index 733169b99f6..948fcc02259 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -303,7 +303,7 @@ class TestFunctional(unittest.TestCase): def setUp(self): # Enable this to turn on debugging output # tout.init(tout.DEBUG) - command.test_result = None + command.TEST_RESULT = None def tearDown(self): """Remove the temporary output directory""" @@ -345,8 +345,9 @@ class TestFunctional(unittest.TestCase): Arguments to pass, as a list of strings kwargs: Arguments to pass to Command.RunPipe() """ - result = command.run_pipe([[self._binman_pathname] + list(args)], - capture=True, capture_stderr=True, raise_on_error=False) + all_args = [self._binman_pathname] + list(args) + result = command.run_one(*all_args, capture=True, capture_stderr=True, + raise_on_error=False) if result.return_code and kwargs.get('raise_on_error', True): raise Exception("Error running '%s': %s" % (' '.join(args), result.stdout + result.stderr)) @@ -762,6 +763,16 @@ class TestFunctional(unittest.TestCase): return False return True + def _CheckPreload(self, image, key, algo="sha256,rsa2048", + padding="pkcs-1.5"): + try: + tools.run('preload_check_sign', '-k', key, '-a', algo, '-p', + padding, '-f', image) + except: + self.fail('Expected image signed with a pre-load') + return False + return True + def testRun(self): """Test a basic run with valid args""" result = self._RunBinman('-h') @@ -780,11 +791,11 @@ class TestFunctional(unittest.TestCase): def testFullHelpInternal(self): """Test that the full help is displayed with -H""" try: - command.test_result = command.CommandResult() + command.TEST_RESULT = command.CommandResult() result = self._DoBinman('-H') help_file = os.path.join(self._binman_dir, 'README.rst') finally: - command.test_result = None + command.TEST_RESULT = None def testHelp(self): """Test that the basic help is displayed with -h""" @@ -1872,7 +1883,7 @@ class TestFunctional(unittest.TestCase): def testGbb(self): """Test for the Chromium OS Google Binary Block""" - command.test_result = self._HandleGbbCommand + command.TEST_RESULT = self._HandleGbbCommand entry_args = { 'keydir': 'devkeys', 'bmpblk': 'bmpblk.bin', @@ -1941,7 +1952,7 @@ class TestFunctional(unittest.TestCase): def testVblock(self): """Test for the Chromium OS Verified Boot Block""" self._hash_data = False - command.test_result = self._HandleVblockCommand + command.TEST_RESULT = self._HandleVblockCommand entry_args = { 'keydir': 'devkeys', } @@ -1974,7 +1985,7 @@ class TestFunctional(unittest.TestCase): def testVblockContent(self): """Test that the vblock signs the right data""" self._hash_data = True - command.test_result = self._HandleVblockCommand + command.TEST_RESULT = self._HandleVblockCommand entry_args = { 'keydir': 'devkeys', } @@ -5498,7 +5509,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap def testFitSubentryUsesBintool(self): """Test that binman FIT subentries can use bintools""" - command.test_result = self._HandleGbbCommand + command.TEST_RESULT = self._HandleGbbCommand entry_args = { 'keydir': 'devkeys', 'bmpblk': 'bmpblk.bin', @@ -5783,9 +5794,14 @@ fdt fdtmap Extract the devicetree blob from the fdtmap data = self._DoReadFileDtb( '230_pre_load.dts', entry_args=entry_args, extra_indirs=[os.path.join(self._binman_dir, 'test')])[0] + + image_fname = tools.get_output_filename('image.bin') + is_signed = self._CheckPreload(image_fname, self.TestFile("dev.key")) + self.assertEqual(PRE_LOAD_MAGIC, data[:len(PRE_LOAD_MAGIC)]) self.assertEqual(PRE_LOAD_VERSION, data[4:4 + len(PRE_LOAD_VERSION)]) self.assertEqual(PRE_LOAD_HDR_SIZE, data[8:8 + len(PRE_LOAD_HDR_SIZE)]) + self.assertEqual(is_signed, True) def testPreLoadNoKey(self): """Test an image with a pre-load heade0r with missing key""" @@ -6383,6 +6399,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap ename, prop = entry_m.group(1), entry_m.group(3) entry, entry_name, prop_name = image.LookupEntry(entries, name, msg) + expect_val = None if prop_name == 'offset': expect_val = entry.offset elif prop_name == 'image_pos': @@ -7973,5 +7990,12 @@ fdt fdtmap Extract the devicetree blob from the fdtmap """Test an image with an FIT with multiple FDT images using NAME""" self.CheckFitFdt('345_fit_fdt_name.dts', use_seq_num=False) + def testRemoveTemplate(self): + """Test whether template is removed""" + TestFunctional._MakeInputFile('my-blob.bin', b'blob') + TestFunctional._MakeInputFile('my-blob2.bin', b'other') + self._DoTestFile('346_remove_template.dts', + force_missing_bintools='openssl',) + if __name__ == "__main__": unittest.main() diff --git a/tools/binman/requirements.txt b/tools/binman/requirements.txt new file mode 100644 index 00000000000..f068ef75a30 --- /dev/null +++ b/tools/binman/requirements.txt @@ -0,0 +1,5 @@ +importlib_resources==6.5.2 +jsonschema==4.23.0 +pycryptodomex==3.21.0 +pyelftools==0.31 +yamllint==1.35.1 diff --git a/tools/binman/setup.py b/tools/binman/setup.py index 9a9206eb044..bec078a3d9b 100644 --- a/tools/binman/setup.py +++ b/tools/binman/setup.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: GPL-2.0+ -from distutils.core import setup +from setuptools import setup setup(name='binman', version='1.0', license='GPL-2.0+', diff --git a/tools/binman/state.py b/tools/binman/state.py index 45bae40c525..6772d3678fe 100644 --- a/tools/binman/state.py +++ b/tools/binman/state.py @@ -406,10 +406,13 @@ def CheckSetHashValue(node, get_data_func): hash_node = node.FindNode('hash') if hash_node: algo = hash_node.props.get('algo').value + data = None if algo == 'sha256': m = hashlib.sha256() m.update(get_data_func()) data = m.digest() + if data is None: + raise ValueError(f"Node '{node.path}': Unknown hash algorithm '{algo}'") for n in GetUpdateNodes(hash_node): n.SetData('value', data) diff --git a/tools/binman/test/346_remove_template.dts b/tools/binman/test/346_remove_template.dts new file mode 100644 index 00000000000..e05229f3ebc --- /dev/null +++ b/tools/binman/test/346_remove_template.dts @@ -0,0 +1,49 @@ +// SPDX-License-Identifier: GPL-2.0+ + +/dts-v1/; +/ { + binman: binman { + multiple-images; + + template_1: template-1 { + section { + phandle1: my-blob.bin { + filename = "my-blob.bin"; + type = "blob-ext"; + }; + }; + }; + template_2: template-2 { + section { + ti-secure { + content = <&phandle2>; + keyfile = "key.pem"; + }; + phandle2: my-blob.bin { + filename = "my-blob.bin"; + type = "blob-ext"; + }; + }; + }; + template_3: template-3 { + section { + phandle3: my-blob.bin { + filename = "my-blob.bin"; + type = "blob-ext"; + }; + }; + }; + + file1 { + insert-template = <&template_1>; + }; + + file2 { + insert-template = <&template_2>; + }; + + file3 { + insert-template = <&template_3>; + }; + }; +}; diff --git a/tools/buildman/boards.py b/tools/buildman/boards.py index e7aa0d85a58..2fe43c3fc89 100644 --- a/tools/buildman/boards.py +++ b/tools/buildman/boards.py @@ -251,9 +251,9 @@ class KconfigScanner: '-undef', '-x', 'assembler-with-cpp', defconfig] - result = command.run_pipe([cmd], capture=True, capture_stderr=True) + stdout = command.output(*cmd, capture_stderr=True) temp = tempfile.NamedTemporaryFile(prefix='buildman-') - tools.write_file(temp.name, result.stdout, False) + tools.write_file(temp.name, stdout, False) fname = temp.name tout.info(f'Processing #include to produce {defconfig}') else: diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py index cbf1345281b..4bea0a02b78 100644 --- a/tools/buildman/builder.py +++ b/tools/buildman/builder.py @@ -19,8 +19,8 @@ import time from buildman import builderthread from buildman import toolchain -from patman import gitutil from u_boot_pylib import command +from u_boot_pylib import gitutil from u_boot_pylib import terminal from u_boot_pylib import tools from u_boot_pylib.terminal import tprint @@ -510,7 +510,7 @@ class Builder: stage: Stage that we are at (mrproper, config, oldconfig, build) cwd: Directory where make should be run args: Arguments to pass to make - kwargs: Arguments to pass to command.run_pipe() + kwargs: Arguments to pass to command.run_one() """ def check_output(stream, data): @@ -531,11 +531,12 @@ class Builder: return False self._restarting_config = False - self._terminated = False + self._terminated = False cmd = [self.gnu_make] + list(args) - result = command.run_pipe([cmd], capture=True, capture_stderr=True, - cwd=cwd, raise_on_error=False, infile='/dev/null', - output_func=check_output, **kwargs) + result = command.run_one(*cmd, capture=True, capture_stderr=True, + cwd=cwd, raise_on_error=False, + infile='/dev/null', output_func=check_output, + **kwargs) if self._terminated: # Try to be helpful @@ -1095,14 +1096,13 @@ class Builder: diff = result[name] if name.startswith('_'): continue - if diff != 0: - color = self.col.RED if diff > 0 else self.col.GREEN + colour = self.col.RED if diff > 0 else self.col.GREEN msg = ' %s %+d' % (name, diff) if not printed_target: tprint('%10s %-15s:' % ('', result['_target']), newline=False) printed_target = True - tprint(msg, colour=color, newline=False) + tprint(msg, colour=colour, newline=False) if printed_target: tprint() if show_bloat: @@ -1353,6 +1353,7 @@ class Builder: for line in lines: if not line: continue + col = None if line[0] == '+': col = self.col.GREEN elif line[0] == '-': diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py index 29e6cf32af1..b8578d5b97b 100644 --- a/tools/buildman/builderthread.py +++ b/tools/buildman/builderthread.py @@ -17,8 +17,8 @@ import sys import threading from buildman import cfgutil -from patman import gitutil from u_boot_pylib import command +from u_boot_pylib import gitutil from u_boot_pylib import tools RETURN_CODE_RETRY = -1 @@ -179,13 +179,12 @@ class BuilderThread(threading.Thread): cwd (str): Working directory to set, or None to leave it alone *args (list of str): Arguments to pass to 'make' **kwargs (dict): A list of keyword arguments to pass to - command.run_pipe() + command.run_one() Returns: CommandResult object """ - return self.builder.do_make(commit, brd, stage, cwd, *args, - **kwargs) + return self.builder.do_make(commit, brd, stage, cwd, *args, **kwargs) def _build_args(self, brd, out_dir, out_rel_dir, work_dir, commit_upto): """Set up arguments to the args list based on the settings @@ -588,9 +587,10 @@ class BuilderThread(threading.Thread): lines = [] for fname in BASE_ELF_FILENAMES: cmd = [f'{self.toolchain.cross}nm', '--size-sort', fname] - nm_result = command.run_pipe([cmd], capture=True, - capture_stderr=True, cwd=result.out_dir, - raise_on_error=False, env=env) + nm_result = command.run_one(*cmd, capture=True, + capture_stderr=True, + cwd=result.out_dir, + raise_on_error=False, env=env) if nm_result.stdout: nm_fname = self.builder.get_func_sizes_file( result.commit_upto, result.brd.target, fname) @@ -598,9 +598,10 @@ class BuilderThread(threading.Thread): print(nm_result.stdout, end=' ', file=outf) cmd = [f'{self.toolchain.cross}objdump', '-h', fname] - dump_result = command.run_pipe([cmd], capture=True, - capture_stderr=True, cwd=result.out_dir, - raise_on_error=False, env=env) + dump_result = command.run_one(*cmd, capture=True, + capture_stderr=True, + cwd=result.out_dir, + raise_on_error=False, env=env) rodata_size = '' if dump_result.stdout: objdump = self.builder.get_objdump_file(result.commit_upto, @@ -613,9 +614,10 @@ class BuilderThread(threading.Thread): rodata_size = fields[2] cmd = [f'{self.toolchain.cross}size', fname] - size_result = command.run_pipe([cmd], capture=True, - capture_stderr=True, cwd=result.out_dir, - raise_on_error=False, env=env) + size_result = command.run_one(*cmd, capture=True, + capture_stderr=True, + cwd=result.out_dir, + raise_on_error=False, env=env) if size_result.stdout: lines.append(size_result.stdout.splitlines()[1] + ' ' + rodata_size) @@ -624,9 +626,8 @@ class BuilderThread(threading.Thread): cmd = [f'{self.toolchain.cross}objcopy', '-O', 'binary', '-j', '.rodata.default_environment', 'env/built-in.o', 'uboot.env'] - command.run_pipe([cmd], capture=True, - capture_stderr=True, cwd=result.out_dir, - raise_on_error=False, env=env) + command.run_one(*cmd, capture=True, capture_stderr=True, + cwd=result.out_dir, raise_on_error=False, env=env) if not work_in_output: copy_files(result.out_dir, build_dir, '', ['uboot.env']) diff --git a/tools/buildman/control.py b/tools/buildman/control.py index 55d4d770c5c..5109b1cd5ce 100644 --- a/tools/buildman/control.py +++ b/tools/buildman/control.py @@ -20,9 +20,9 @@ from buildman import bsettings from buildman import cfgutil from buildman import toolchain from buildman.builder import Builder -from patman import gitutil from patman import patchstream from u_boot_pylib import command +from u_boot_pylib import gitutil from u_boot_pylib import terminal from u_boot_pylib import tools from u_boot_pylib.terminal import print_clear, tprint diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py index 4e12c671a3d..b45eb95a1e6 100644 --- a/tools/buildman/func_test.py +++ b/tools/buildman/func_test.py @@ -18,8 +18,8 @@ from buildman import bsettings from buildman import cmdline from buildman import control from buildman import toolchain -from patman import gitutil from u_boot_pylib import command +from u_boot_pylib import gitutil from u_boot_pylib import terminal from u_boot_pylib import test_util from u_boot_pylib import tools @@ -187,7 +187,7 @@ class TestFunctional(unittest.TestCase): self._git_dir = os.path.join(self._base_dir, 'src') self._buildman_pathname = sys.argv[0] self._buildman_dir = os.path.dirname(os.path.realpath(sys.argv[0])) - command.test_result = self._HandleCommand + command.TEST_RESULT = self._HandleCommand bsettings.setup(None) bsettings.add_file(settings_data) self.setupToolchains() @@ -232,8 +232,8 @@ class TestFunctional(unittest.TestCase): self._toolchains.Add('gcc', test=False) def _RunBuildman(self, *args): - return command.run_pipe([[self._buildman_pathname] + list(args)], - capture=True, capture_stderr=True) + all_args = [self._buildman_pathname] + list(args) + return command.run_one(*all_args, capture=True, capture_stderr=True) def _RunControl(self, *args, brds=False, clean_dir=False, test_thread_exceptions=False, get_builder=True): @@ -266,7 +266,7 @@ class TestFunctional(unittest.TestCase): return result def testFullHelp(self): - command.test_result = None + command.TEST_RESULT = None result = self._RunBuildman('-H') help_file = os.path.join(self._buildman_dir, 'README.rst') # Remove possible extraneous strings @@ -277,7 +277,7 @@ class TestFunctional(unittest.TestCase): self.assertEqual(0, result.return_code) def testHelp(self): - command.test_result = None + command.TEST_RESULT = None result = self._RunBuildman('-h') help_file = os.path.join(self._buildman_dir, 'README.rst') self.assertTrue(len(result.stdout) > 1000) @@ -286,13 +286,13 @@ class TestFunctional(unittest.TestCase): def testGitSetup(self): """Test gitutils.Setup(), from outside the module itself""" - command.test_result = command.CommandResult(return_code=1) + command.TEST_RESULT = command.CommandResult(return_code=1) gitutil.setup() - self.assertEqual(gitutil.use_no_decorate, False) + self.assertEqual(gitutil.USE_NO_DECORATE, False) - command.test_result = command.CommandResult(return_code=0) + command.TEST_RESULT = command.CommandResult(return_code=0) gitutil.setup() - self.assertEqual(gitutil.use_no_decorate, True) + self.assertEqual(gitutil.USE_NO_DECORATE, True) def _HandleCommandGitLog(self, args): if args[-1] == '--': @@ -445,7 +445,7 @@ class TestFunctional(unittest.TestCase): stage: Stage that we are at (mrproper, config, build) cwd: Directory where make should be run args: Arguments to pass to make - kwargs: Arguments to pass to command.run_pipe() + kwargs: Arguments to pass to command.run_one() """ self._make_calls += 1 out_dir = '' diff --git a/tools/buildman/main.py b/tools/buildman/main.py index a948f36d9c0..72571b226d9 100755 --- a/tools/buildman/main.py +++ b/tools/buildman/main.py @@ -50,8 +50,7 @@ def run_tests(skip_net_tests, debug, verbose, args): # 'entry' module. result = test_util.run_test_suites( 'buildman', debug, verbose, False, args.threads, test_name, [], - [test.TestBuild, func_test.TestFunctional, - 'buildman.toolchain', 'patman.gitutil']) + [test.TestBuild, func_test.TestFunctional, 'buildman.toolchain']) return (0 if result.wasSuccessful() else 1) diff --git a/tools/buildman/requirements.txt b/tools/buildman/requirements.txt index 052d0ed5c6f..d48650cd1e5 100644 --- a/tools/buildman/requirements.txt +++ b/tools/buildman/requirements.txt @@ -1,5 +1,2 @@ -coverage==6.2 -jsonschema==4.17.3 -pycryptodome==3.20 -pyyaml==6.0 -yamllint==1.26.3 +filelock==3.0.12 +importlib_resources==6.5.2 diff --git a/tools/buildman/test.py b/tools/buildman/test.py index 385a34e5254..c5feb74a105 100644 --- a/tools/buildman/test.py +++ b/tools/buildman/test.py @@ -836,6 +836,7 @@ class TestBuild(unittest.TestCase): tmpdir = self.base_dir with (patch('time.time', side_effect=self.get_time), + patch('time.perf_counter', side_effect=self.get_time), patch('time.monotonic', side_effect=self.get_time), patch('time.sleep', side_effect=self.inc_time), patch('os.kill', side_effect=self.kill)): diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py index 958f36f9f61..5d051e005da 100644 --- a/tools/buildman/toolchain.py +++ b/tools/buildman/toolchain.py @@ -100,7 +100,7 @@ class Toolchain: else: self.priority = priority if test: - result = command.run_pipe([cmd], capture=True, env=env, + result = command.run_one(*cmd, capture=True, env=env, raise_on_error=False) self.ok = result.return_code == 0 if verbose: diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile index d2848ab85f3..a0fd174ff60 100644 --- a/tools/docker/Dockerfile +++ b/tools/docker/Dockerfile @@ -2,7 +2,7 @@ # This Dockerfile is used to build an image containing basic stuff to be used # to build U-Boot and run our test suites. -FROM ubuntu:jammy-20240911.1 +FROM ubuntu:jammy-20250404 LABEL org.opencontainers.image.authors="Tom Rini <trini@konsulko.com>" LABEL org.opencontainers.image.description=" This image is for building U-Boot inside a container" @@ -23,7 +23,7 @@ ENV ARCHS="aarch64 arc i386 m68k mips microblaze nios2 powerpc riscv64 riscv32 s ENV MIRROR=https://mirrors.edge.kernel.org/pub/tools/crosstool/files/bin # Toolchain version -ENV TCVER=13.2.0 +ENV TCVER=14.2.0 RUN echo "Building on $BUILDPLATFORM, for target $TARGETPLATFORM" @@ -32,7 +32,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ apt-get update && apt-get install -y gnupg2 wget xz-utils RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - -RUN echo deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-17 main | tee /etc/apt/sources.list.d/llvm.list +RUN echo deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-18 main | tee /etc/apt/sources.list.d/llvm.list # Create a list of URLs to process, then pass them into a 'while read' loop RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then HOSTARCH=x86_64; else HOSTARCH=arm64; fi; ( \ @@ -64,8 +64,9 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ binutils-dev \ bison \ build-essential \ + byacc \ cgpt \ - clang-17 \ + clang-18 \ coreutils \ cpio \ curl \ @@ -74,8 +75,10 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ e2fsprogs \ efitools \ erofs-utils \ + exfatprogs \ expect \ fakeroot \ + fdisk \ flex \ gawk \ gdisk \ @@ -91,7 +94,6 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ libconfuse-dev \ libgit2-dev \ libjson-glib-dev \ - libguestfs-tools \ libgnutls28-dev \ libgnutls30 \ liblz4-tool \ @@ -105,7 +107,6 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ libtool \ libudev-dev \ libusb-1.0-0-dev \ - linux-image-generic \ lzma-alone \ lzop \ mount \ @@ -122,7 +123,6 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ python3 \ python3-dev \ python3-pip \ - python3-pyelftools \ python3-sphinx \ python3-virtualenv \ rpm2cpio \ @@ -143,9 +143,6 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ xxd \ zip -# Make kernels readable for libguestfs tools to work correctly -RUN chmod +r /boot/vmlinu* - # Build GRUB UEFI targets for ARM & RISC-V, 32-bit and 64-bit RUN git clone git://git.savannah.gnu.org/grub.git /tmp/grub && \ cd /tmp/grub && \ @@ -182,7 +179,7 @@ RUN git clone git://git.savannah.gnu.org/grub.git /tmp/grub && \ search search_fs_file search_fs_uuid search_label serial sleep test \ true && \ make clean && \ - ./configure --target=riscv64 --with-platform=efi \ + grub_cv_cc_mcmodel=no ./configure --target=riscv64 --with-platform=efi \ CC=gcc \ TARGET_CC=/opt/gcc-${TCVER}-nolibc/riscv64-linux/bin/riscv64-linux-gcc \ TARGET_OBJCOPY=/opt/gcc-${TCVER}-nolibc/riscv64-linux/bin/riscv64-linux-objcopy \ @@ -235,13 +232,16 @@ RUN git clone https://gitlab.com/qemu-project/qemu.git /tmp/qemu && \ # Build fiptool RUN git clone https://git.trustedfirmware.org/TF-A/trusted-firmware-a.git /tmp/tf-a && \ cd /tmp/tf-a/ && \ - git checkout v2.10.0 && \ + git checkout v2.12.0 && \ cd tools/fiptool && \ make -j$(nproc) && \ mkdir -p /usr/local/bin && \ cp fiptool /usr/local/bin && \ rm -rf /tmp/tf-a +# Download the Arm Architecture FVP platform. This file is double compressed. +RUN wget -O - https://developer.arm.com/-/cdn-downloads/permalink/FVPs-Architecture/FM-11.28/FVP_Base_RevC-2xAEMvA_11.28_23_Linux64.tgz | gunzip -dc | tar -C /opt -x + # Build genimage (required by some targets to generate disk images) RUN wget -O - https://github.com/pengutronix/genimage/releases/download/v14/genimage-14.tar.xz | tar -C /tmp -xJ && \ cd /tmp/genimage-14 && \ @@ -297,7 +297,8 @@ RUN wget -O - https://coreboot.org/releases/coreboot-24.08.tar.xz | tar -C /tmp make olddefconfig && \ make -j $(nproc) && \ sudo mkdir /opt/coreboot && \ - sudo cp build/coreboot.rom build/cbfstool /opt/coreboot/ + sudo cp build/coreboot.rom build/cbfstool /opt/coreboot/ && \ + rm -rf /tmp/coreboot-24.08 # Create our user/group RUN echo uboot ALL=NOPASSWD: ALL > /etc/sudoers.d/uboot @@ -308,12 +309,18 @@ USER uboot:uboot # COPY / ADD directives don't work as we need them to. RUN wget -O /tmp/pytest-requirements.txt https://source.denx.de/u-boot/u-boot/-/raw/master/test/py/requirements.txt RUN wget -O /tmp/sphinx-requirements.txt https://source.denx.de/u-boot/u-boot/-/raw/master/doc/sphinx/requirements.txt +RUN wget -O /tmp/binman-requirements.txt https://source.denx.de/u-boot/u-boot/-/raw/master/tools/binman/requirements.txt RUN wget -O /tmp/buildman-requirements.txt https://source.denx.de/u-boot/u-boot/-/raw/master/tools/buildman/requirements.txt +RUN wget -O /tmp/patman-requirements.txt https://source.denx.de/u-boot/u-boot/-/raw/master/tools/patman/requirements.txt +RUN wget -O /tmp/u_boot_pylib-requirements.txt https://source.denx.de/u-boot/u-boot/-/raw/master/tools/u_boot_pylib/requirements.txt RUN virtualenv -p /usr/bin/python3 /tmp/venv && \ . /tmp/venv/bin/activate && \ pip install -r /tmp/pytest-requirements.txt \ -r /tmp/sphinx-requirements.txt \ - -r /tmp/buildman-requirements.txt && \ + -r /tmp/binman-requirements.txt \ + -r /tmp/buildman-requirements.txt \ + -r /tmp/patman-requirements.txt \ + -r /tmp/u_boot_pylib-requirements.txt && \ deactivate && \ rm -rf /tmp/venv /tmp/*-requirements.txt diff --git a/tools/dtoc/setup.py b/tools/dtoc/setup.py index 5e092fe0872..ae9ad043b01 100644 --- a/tools/dtoc/setup.py +++ b/tools/dtoc/setup.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: GPL-2.0+ -from distutils.core import setup +from setuptools import setup setup(name='dtoc', version='1.0', license='GPL-2.0+', diff --git a/tools/image-host.c b/tools/image-host.c index e6de34fa059..a9b86902763 100644 --- a/tools/image-host.c +++ b/tools/image-host.c @@ -19,6 +19,11 @@ #include <openssl/evp.h> #endif +#if CONFIG_IS_ENABLED(IMAGE_PRE_LOAD) +#include <openssl/rsa.h> +#include <openssl/err.h> +#endif + /** * fit_set_hash_value - set hash value in requested has node * @fit: pointer to the FIT format image header @@ -729,11 +734,20 @@ static int strlist_add(struct strlist *list, const char *str) { char *dup; + if (!list || !str) + return -1; + dup = strdup(str); + if(!dup) + return -1; + list->strings = realloc(list->strings, (list->count + 1) * sizeof(char *)); - if (!list || !str) + if (!list->strings) { + free(dup); return -1; + } + list->strings[list->count++] = dup; return 0; @@ -1401,3 +1415,139 @@ int fit_check_sign(const void *fit, const void *key, return ret; } #endif + +#if CONFIG_IS_ENABLED(IMAGE_PRE_LOAD) +/** + * rsa_verify_openssl() - Verify a signature against some data with openssl API + * + * Verify a RSA PKCS1.5/PSS signature against an expected hash. + * + * @info: Specifies the key and algorithms + * @region: Pointer to the input data + * @region_count: Number of region + * @sig: Signature + * @sig_len: Number of bytes in the signature + * Return: 0 if verified, -ve on error + */ +int rsa_verify_openssl(struct image_sign_info *info, + const struct image_region region[], int region_count, + uint8_t *sig, uint sig_len) +{ + EVP_PKEY *pkey = NULL; + EVP_PKEY_CTX *ckey = NULL; + EVP_MD_CTX *ctx = NULL; + int pad; + int size; + int i; + int ret = 0; + + if (!info) { + fprintf(stderr, "No info provided\n"); + ret = -EINVAL; + goto out; + } + + if (!info->key) { + fprintf(stderr, "No key provided\n"); + ret = -EINVAL; + goto out; + } + + if (!info->checksum) { + fprintf(stderr, "No checksum information\n"); + ret = -EINVAL; + goto out; + } + + if (!info->padding) { + fprintf(stderr, "No padding information\n"); + ret = -EINVAL; + goto out; + } + + if (region_count < 1) { + fprintf(stderr, "Invalid value for region_count: %d\n", region_count); + ret = -EINVAL; + goto out; + } + + pkey = (EVP_PKEY *)info->key; + + ckey = EVP_PKEY_CTX_new(pkey, NULL); + if (!ckey) { + ret = -ENOMEM; + fprintf(stderr, "EVK key context setup failed: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + goto out; + } + + size = EVP_PKEY_size(pkey); + if (size > sig_len) { + fprintf(stderr, "Invalid signature size (%d bytes)\n", + size); + ret = -EINVAL; + goto out; + } + + ctx = EVP_MD_CTX_new(); + if (!ctx) { + ret = -ENOMEM; + fprintf(stderr, "EVP context creation failed: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + goto out; + } + EVP_MD_CTX_init(ctx); + + if (EVP_DigestVerifyInit(ctx, &ckey, + EVP_get_digestbyname(info->checksum->name), + NULL, pkey) <= 0) { + ret = -EINVAL; + fprintf(stderr, "Verifier setup failed: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + goto out; + } + + if (!strcmp(info->padding->name, "pkcs-1.5")) { + pad = RSA_PKCS1_PADDING; + } else if (!strcmp(info->padding->name, "pss")) { + pad = RSA_PKCS1_PSS_PADDING; + } else { + ret = -ENOMSG; + fprintf(stderr, "Unsupported padding: %s\n", + info->padding->name); + goto out; + } + + if (EVP_PKEY_CTX_set_rsa_padding(ckey, pad) <= 0) { + ret = -EINVAL; + fprintf(stderr, "padding setup has failed: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + goto out; + } + + for (i=0 ; i < region_count ; ++i) { + if (EVP_DigestVerifyUpdate(ctx, region[i].data, + region[i].size) <= 0) { + ret = -EINVAL; + fprintf(stderr, "Hashing data failed: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + goto out; + } + } + + if (EVP_DigestVerifyFinal(ctx, sig, sig_len) <= 0) { + ret = -EINVAL; + fprintf(stderr, "Verifying digest failed: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + goto out; + } +out: + if (ctx) + EVP_MD_CTX_free(ctx); + + if (ret) + fprintf(stderr, "Failed to verify signature\n"); + + return ret; +} +#endif diff --git a/tools/imx8image.c b/tools/imx8image.c index 15510d3e712..0135b190951 100644 --- a/tools/imx8image.c +++ b/tools/imx8image.c @@ -290,6 +290,7 @@ static uint32_t parse_cfg_file(image_t *param_stack, char *name) } } + free(line); fclose(fd); return 0; } diff --git a/tools/imx8mimage.c b/tools/imx8mimage.c index d60d293e649..0f24ba75c0f 100644 --- a/tools/imx8mimage.c +++ b/tools/imx8mimage.c @@ -206,6 +206,7 @@ static uint32_t parse_cfg_file(char *name) } } + free(line); fclose(fd); return 0; } diff --git a/tools/imximage.c b/tools/imximage.c index 467d9f27d2a..55231caf8f3 100644 --- a/tools/imximage.c +++ b/tools/imximage.c @@ -783,6 +783,7 @@ static uint32_t parse_cfg_file(struct imx_header *imxhdr, char *name) } (*set_dcd_rst)(imxhdr, dcd_len, name, lineno); + free(line); fclose(fd); /* Exit if there is no BOOT_FROM field specifying the flash_offset */ diff --git a/tools/kwbimage.c b/tools/kwbimage.c index d1cbced28fc..3dcf5ba66b9 100644 --- a/tools/kwbimage.c +++ b/tools/kwbimage.c @@ -1653,6 +1653,12 @@ static int image_create_config_parse_oneline(char *line, char *unknown_msg = "Ignoring unknown line '%s'\n"; keyword = strtok_r(line, delimiters, &saveptr); + + if (!keyword) { + fprintf(stderr, "Parameter missing in line '%s'\n", line); + return -1; + } + keyword_id = recognize_keyword(keyword); if (!keyword_id) { diff --git a/tools/microcode-tool.py b/tools/microcode-tool.py index 24c02c4fca1..b726794751a 100755 --- a/tools/microcode-tool.py +++ b/tools/microcode-tool.py @@ -279,6 +279,9 @@ def MicrocodeTool(): if (not not options.mcfile) != (not not options.mcfile): parser.error("You must specify either header files or a microcode file, not both") + date = None + microcodes = None + license_text = None if options.headerfile: date, license_text, microcodes = ParseHeaderFiles(options.headerfile) elif options.mcfile: diff --git a/tools/mkimage.h b/tools/mkimage.h index 15741f250fd..5d6bcc9301a 100644 --- a/tools/mkimage.h +++ b/tools/mkimage.h @@ -37,6 +37,10 @@ static inline void *map_sysmem(ulong paddr, unsigned long len) return (void *)(uintptr_t)paddr; } +static inline void unmap_sysmem(const void *vaddr) +{ +} + static inline ulong map_to_sysmem(const void *ptr) { return (ulong)(uintptr_t)ptr; diff --git a/tools/patman/__init__.py b/tools/patman/__init__.py index 08eeffdf6d2..6de0e9fba10 100644 --- a/tools/patman/__init__.py +++ b/tools/patman/__init__.py @@ -1,5 +1,5 @@ # SPDX-License-Identifier: GPL-2.0+ __all__ = ['checkpatch', 'commit', 'control', 'func_test', 'get_maintainer', - 'gitutil', '__main__', 'patchstream', 'project', 'series', - 'settings','setup', 'status', 'test_checkpatch', 'test_settings'] + '__main__', 'patchstream', 'project', 'series', + 'settings', 'setup', 'status', 'test_checkpatch', 'test_settings'] diff --git a/tools/patman/__main__.py b/tools/patman/__main__.py index f645b38b647..36f1c08507c 100755 --- a/tools/patman/__main__.py +++ b/tools/patman/__main__.py @@ -49,7 +49,7 @@ def run_patman(): result = test_util.run_test_suites( 'patman', False, False, False, None, None, None, [test_checkpatch.TestPatch, func_test.TestFunctional, - 'gitutil', 'settings']) + 'settings']) sys.exit(0 if result.wasSuccessful() else 1) diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py index e03cac115e4..2975881705c 100644 --- a/tools/patman/checkpatch.py +++ b/tools/patman/checkpatch.py @@ -8,8 +8,8 @@ import os import re import sys -from patman import gitutil from u_boot_pylib import command +from u_boot_pylib import gitutil from u_boot_pylib import terminal EMACS_PREFIX = r'(?:[0-9]{4}.*\.patch:[0-9]+: )?' diff --git a/tools/patman/cmdline.py b/tools/patman/cmdline.py index d6496c0cb78..562bc823f60 100644 --- a/tools/patman/cmdline.py +++ b/tools/patman/cmdline.py @@ -13,8 +13,8 @@ import os import pathlib import sys -from patman import gitutil from patman import project +from u_boot_pylib import gitutil from patman import settings PATMAN_DIR = pathlib.Path(__file__).parent diff --git a/tools/patman/control.py b/tools/patman/control.py index b292da9dc27..b8a45912058 100644 --- a/tools/patman/control.py +++ b/tools/patman/control.py @@ -12,8 +12,8 @@ import os import sys from patman import checkpatch -from patman import gitutil from patman import patchstream +from u_boot_pylib import gitutil from u_boot_pylib import terminal @@ -63,7 +63,8 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff, branch, start, to_do, ignore_binary, series, signoff) # Fix up the patch files to our liking, and insert the cover letter - patchstream.fix_patches(series, patch_files, keep_change_id) + patchstream.fix_patches(series, patch_files, keep_change_id, + insert_base_commit=not cover_fname) if cover_fname and series.get('cover'): patchstream.insert_cover_letter(cover_fname, series, to_do) return series, cover_fname, patch_files diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py index af6c025a441..720746e21f5 100644 --- a/tools/patman/func_test.py +++ b/tools/patman/func_test.py @@ -18,11 +18,11 @@ import unittest from patman.commit import Commit from patman import control -from patman import gitutil from patman import patchstream from patman.patchstream import PatchStream from patman.series import Series from patman import settings +from u_boot_pylib import gitutil from u_boot_pylib import terminal from u_boot_pylib import tools from u_boot_pylib.test_util import capture_sys_output @@ -216,6 +216,8 @@ class TestFunctional(unittest.TestCase): text = self._get_text('test01.txt') series = patchstream.get_metadata_for_test(text) + series.base_commit = Commit('1a44532') + series.branch = 'mybranch' cover_fname, args = self._create_patches_for_test(series) get_maintainer_script = str(pathlib.Path(__file__).parent.parent.parent / 'get_maintainer.pl') + ' --norolestats' @@ -308,6 +310,8 @@ Simon Glass (2): --\x20 2.7.4 +base-commit: 1a44532 +branch: mybranch ''' lines = open(cover_fname, encoding='utf-8').read().splitlines() self.assertEqual( @@ -353,6 +357,31 @@ Changes in v2: expected = expected.splitlines() self.assertEqual(expected, lines[start:(start+len(expected))]) + def test_base_commit(self): + """Test adding a base commit with no cover letter""" + orig_text = self._get_text('test01.txt') + pos = orig_text.index('commit 5ab48490f03051875ab13d288a4bf32b507d76fd') + text = orig_text[:pos] + series = patchstream.get_metadata_for_test(text) + series.base_commit = Commit('1a44532') + series.branch = 'mybranch' + cover_fname, args = self._create_patches_for_test(series) + self.assertFalse(cover_fname) + with capture_sys_output() as out: + patchstream.fix_patches(series, args, insert_base_commit=True) + self.assertEqual('Cleaned 1 patch\n', out[0].getvalue()) + lines = tools.read_file(args[0], binary=False).splitlines() + pos = lines.index('-- ') + + # We expect these lines at the end: + # -- (with trailing space) + # 2.7.4 + # (empty) + # base-commit: xxx + # branch: xxx + self.assertEqual('base-commit: 1a44532', lines[pos + 3]) + self.assertEqual('branch: mybranch', lines[pos + 4]) + def make_commit_with_file(self, subject, body, fname, text): """Create a file and add it to the git repo with a new commit @@ -511,12 +540,23 @@ complicated as possible''') # Check that it can detect a different branch self.assertEqual(3, gitutil.count_commits_to_branch('second')) with capture_sys_output() as _: - _, cover_fname, patch_files = control.prepare_patches( + series, cover_fname, patch_files = control.prepare_patches( col, branch='second', count=-1, start=0, end=0, ignore_binary=False, signoff=True) self.assertIsNotNone(cover_fname) self.assertEqual(3, len(patch_files)) + cover = tools.read_file(cover_fname, binary=False) + lines = cover.splitlines()[-2:] + base = repo.lookup_reference('refs/heads/base').target + self.assertEqual(f'base-commit: {base}', lines[0]) + self.assertEqual('branch: second', lines[1]) + + # Make sure that the base-commit is not present when it is in the + # cover letter + for fname in patch_files: + self.assertNotIn(b'base-commit:', tools.read_file(fname)) + # Check that it can skip patches at the end with capture_sys_output() as _: _, cover_fname, patch_files = control.prepare_patches( @@ -524,6 +564,13 @@ complicated as possible''') ignore_binary=False, signoff=True) self.assertIsNotNone(cover_fname) self.assertEqual(2, len(patch_files)) + + cover = tools.read_file(cover_fname, binary=False) + lines = cover.splitlines()[-2:] + base2 = repo.lookup_reference('refs/heads/second') + ref = base2.peel(pygit2.GIT_OBJ_COMMIT).parents[0].parents[0].id + self.assertEqual(f'base-commit: {ref}', lines[0]) + self.assertEqual('branch: second', lines[1]) finally: os.chdir(orig_dir) diff --git a/tools/patman/get_maintainer.py b/tools/patman/get_maintainer.py index 8df3d124bac..200ee96551d 100644 --- a/tools/patman/get_maintainer.py +++ b/tools/patman/get_maintainer.py @@ -7,8 +7,8 @@ import os import shlex import shutil -from patman import gitutil from u_boot_pylib import command +from u_boot_pylib import gitutil def find_get_maintainer(script_file_name): diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py index 4955f6aaab9..7a695c37c27 100644 --- a/tools/patman/patchstream.py +++ b/tools/patman/patchstream.py @@ -15,9 +15,9 @@ import shutil import tempfile from patman import commit -from patman import gitutil from patman.series import Series from u_boot_pylib import command +from u_boot_pylib import gitutil # Tags that we detect and remove RE_REMOVE = re.compile(r'^BUG=|^TEST=|^BRANCH=|^Review URL:' @@ -76,8 +76,13 @@ class PatchStream: are interested in. We can also process a patch file in order to remove unwanted tags or inject additional ones. These correspond to the two phases of processing. + + Args: + keep_change_id (bool): Keep the Change-Id tag + insert_base_commit (bool): True to add the base commit to the end """ - def __init__(self, series, is_log=False, keep_change_id=False): + def __init__(self, series, is_log=False, keep_change_id=False, + insert_base_commit=False): self.skip_blank = False # True to skip a single blank line self.found_test = False # Found a TEST= line self.lines_after_test = 0 # Number of lines found after TEST= @@ -103,6 +108,7 @@ class PatchStream: self.recent_quoted = collections.deque([], 5) self.recent_unquoted = queue.Queue() self.was_quoted = None + self.insert_base_commit = insert_base_commit @staticmethod def process_text(text, is_comment=False): @@ -658,6 +664,13 @@ class PatchStream: outfd.write(line + '\n') self.blank_count = 0 self.finalise() + if self.insert_base_commit: + if self.series.base_commit: + print(f'base-commit: {self.series.base_commit.hash}', + file=outfd) + if self.series.branch: + print(f'branch: {self.series.branch}', file=outfd) + def insert_tags(msg, tags_to_emit): """Add extra tags to a commit message @@ -711,7 +724,7 @@ def get_list(commit_range, git_dir=None, count=None): """ params = gitutil.log_cmd(commit_range, reverse=True, count=count, git_dir=git_dir) - return command.run_pipe([params], capture=True).stdout + return command.run_one(*params, capture=True).stdout def get_metadata_for_list(commit_range, git_dir=None, count=None, series=None, allow_overwrite=False): @@ -755,8 +768,12 @@ def get_metadata(branch, start, count): Returns: Series: Object containing information about the commits. """ - return get_metadata_for_list( - '%s~%d' % (branch if branch else 'HEAD', start), None, count) + top = f"{branch if branch else 'HEAD'}~{start}" + series = get_metadata_for_list(top, None, count) + series.base_commit = commit.Commit(gitutil.get_hash(f'{top}~{count}')) + series.branch = branch or gitutil.get_branch() + series.top = top + return series def get_metadata_for_test(text): """Process metadata from a file containing a git log. Used for tests @@ -774,7 +791,8 @@ def get_metadata_for_test(text): pst.finalise() return series -def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False): +def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False, + insert_base_commit=False): """Fix up a patch file, by adding/removing as required. We remove our tags from the patch file, insert changes lists, etc. @@ -788,6 +806,7 @@ def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False): series (Series): Series information about this patch set cmt (Commit): Commit object for this patch file keep_change_id (bool): Keep the Change-Id tag. + insert_base_commit (bool): True to add the base commit to the end Return: list: A list of errors, each str, or [] if all ok. @@ -795,7 +814,8 @@ def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False): handle, tmpname = tempfile.mkstemp() outfd = os.fdopen(handle, 'w', encoding='utf-8') infd = open(fname, 'r', encoding='utf-8') - pst = PatchStream(series, keep_change_id=keep_change_id) + pst = PatchStream(series, keep_change_id=keep_change_id, + insert_base_commit=insert_base_commit) pst.commit = cmt pst.process_stream(infd, outfd) infd.close() @@ -807,7 +827,7 @@ def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False): shutil.move(tmpname, fname) return cmt.warn -def fix_patches(series, fnames, keep_change_id=False): +def fix_patches(series, fnames, keep_change_id=False, insert_base_commit=False): """Fix up a list of patches identified by filenames The patch files are processed in place, and overwritten. @@ -816,6 +836,7 @@ def fix_patches(series, fnames, keep_change_id=False): series (Series): The Series object fnames (:type: list of str): List of patch files to process keep_change_id (bool): Keep the Change-Id tag. + insert_base_commit (bool): True to add the base commit to the end """ # Current workflow creates patches, so we shouldn't need a backup backup_dir = None #tempfile.mkdtemp('clean-patch') @@ -825,7 +846,8 @@ def fix_patches(series, fnames, keep_change_id=False): cmt.patch = fname cmt.count = count result = fix_patch(backup_dir, fname, series, cmt, - keep_change_id=keep_change_id) + keep_change_id=keep_change_id, + insert_base_commit=insert_base_commit) if result: print('%d warning%s for %s:' % (len(result), 's' if len(result) > 1 else '', fname)) @@ -868,4 +890,11 @@ def insert_cover_letter(fname, series, count): out = series.MakeChangeLog(None) line += '\n' + '\n'.join(out) fil.write(line) + + # Insert the base commit and branch + if series.base_commit: + print(f'base-commit: {series.base_commit.hash}', file=fil) + if series.branch: + print(f'branch: {series.branch}', file=fil) + fil.close() diff --git a/tools/patman/project.py b/tools/patman/project.py index 4459042b5d4..d6143a67066 100644 --- a/tools/patman/project.py +++ b/tools/patman/project.py @@ -4,7 +4,7 @@ import os.path -from patman import gitutil +from u_boot_pylib import gitutil def detect_project(): """Autodetect the name of the current project. diff --git a/tools/patman/requirements.txt b/tools/patman/requirements.txt new file mode 100644 index 00000000000..e8cbc6cf0c3 --- /dev/null +++ b/tools/patman/requirements.txt @@ -0,0 +1,5 @@ +ConfigParser==7.1.0 +importlib_resources==6.5.2 +pygit2==1.13.3 +Requests==2.32.3 +setuptools==75.8.0 diff --git a/tools/patman/series.py b/tools/patman/series.py index 6866e1dbd08..b73e9c58de4 100644 --- a/tools/patman/series.py +++ b/tools/patman/series.py @@ -12,8 +12,8 @@ import sys import time from patman import get_maintainer -from patman import gitutil from patman import settings +from u_boot_pylib import gitutil from u_boot_pylib import terminal from u_boot_pylib import tools @@ -42,6 +42,8 @@ class Series(dict): self.notes = [] self.changes = {} self.allow_overwrite = False + self.base_commit = None + self.branch = None # Written in MakeCcFile() # key: name of patch file diff --git a/tools/patman/settings.py b/tools/patman/settings.py index 68c93e313b3..d66b22be1df 100644 --- a/tools/patman/settings.py +++ b/tools/patman/settings.py @@ -12,7 +12,7 @@ import argparse import os import re -from patman import gitutil +from u_boot_pylib import gitutil """Default settings per-project. diff --git a/tools/patman/test_checkpatch.py b/tools/patman/test_checkpatch.py index db7860f551d..3bf16febbf6 100644 --- a/tools/patman/test_checkpatch.py +++ b/tools/patman/test_checkpatch.py @@ -11,10 +11,10 @@ import tempfile import unittest from patman import checkpatch -from patman import gitutil from patman import patchstream from patman import series from patman import commit +from u_boot_pylib import gitutil class Line: @@ -530,4 +530,3 @@ index 0000000..2234c87 if __name__ == "__main__": unittest.main() - gitutil.RunTests() diff --git a/tools/preload_check_sign.c b/tools/preload_check_sign.c new file mode 100644 index 00000000000..ebead459273 --- /dev/null +++ b/tools/preload_check_sign.c @@ -0,0 +1,160 @@ +// SPDX-License-Identifier: GPL-2.0+ +/* + * Check a file including a preload header including a signature + * + * Copyright (c) 2025 Paul HENRYS <paul.henrys_ext@softathome.com> + * + * Binman makes it possible to generate a preload header signing part or the + * complete file. The tool preload_check_sign allows to verify and authenticate + * a file starting with a preload header. + */ +#include <stdio.h> +#include <unistd.h> +#include <openssl/pem.h> +#include <openssl/evp.h> +#include <openssl/err.h> +#include <image.h> + +extern void image_pre_load_sig_set_info(struct image_sig_info *info); +extern int image_pre_load_sig(ulong addr); + +static void usage(char *cmdname) +{ + fprintf(stderr, "Usage: %s -f file -k PEM key file\n" + " -f ==> set file which should be checked\n" + " -k ==> PEM key file\n" + " -a ==> algo (default: sha256,rsa2048)\n" + " -p ==> padding (default: pkcs-1.5)\n" + " -h ==> help\n", + cmdname); + exit(EXIT_FAILURE); +} + +int main(int argc, char **argv) +{ + int ret = 0; + char cmdname[256]; + char *file = NULL; + char *keyfile = NULL; + int c; + FILE *fp = NULL; + FILE *fp_key = NULL; + size_t bytes; + long filesize; + void *buffer = NULL; + EVP_PKEY *pkey = NULL; + char *algo = "sha256,rsa2048"; + char *padding = "pkcs-1.5"; + struct image_sig_info info = {0}; + + strncpy(cmdname, *argv, sizeof(cmdname) - 1); + cmdname[sizeof(cmdname) - 1] = '\0'; + while ((c = getopt(argc, argv, "f:k:a:p:h")) != -1) + switch (c) { + case 'f': + file = optarg; + break; + case 'k': + keyfile = optarg; + break; + case 'a': + algo = optarg; + break; + case 'p': + padding = optarg; + break; + default: + usage(cmdname); + break; + } + + if (!file) { + fprintf(stderr, "%s: Missing file\n", *argv); + usage(*argv); + } + + if (!keyfile) { + fprintf(stderr, "%s: Missing key file\n", *argv); + usage(*argv); + } + + fp = fopen(file, "r"); + if (!fp) { + fprintf(stderr, "Error opening file: %s\n", file); + ret = EXIT_FAILURE; + goto out; + } + + fseek(fp, 0, SEEK_END); + filesize = ftell(fp); + rewind(fp); + + buffer = malloc(filesize); + if (!buffer) { + fprintf(stderr, "Memory allocation failed"); + ret = EXIT_FAILURE; + goto out; + } + + bytes = fread(buffer, 1, filesize, fp); + if (bytes != filesize) { + fprintf(stderr, "Error reading file\n"); + ret = EXIT_FAILURE; + goto out; + } + + fp_key = fopen(keyfile, "r"); + if (!fp_key) { + fprintf(stderr, "Error opening file: %s\n", keyfile); + ret = EXIT_FAILURE; + goto out; + } + + /* Attempt to read the private key */ + pkey = PEM_read_PrivateKey(fp_key, NULL, NULL, NULL); + if (!pkey) { + /* If private key reading fails, try reading as a public key */ + fseek(fp_key, 0, SEEK_SET); + pkey = PEM_read_PUBKEY(fp_key, NULL, NULL, NULL); + } + if (!pkey) { + fprintf(stderr, "Unable to retrieve the public key: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + ret = EXIT_FAILURE; + goto out; + } + + info.algo_name = algo; + info.padding_name = padding; + info.key = (uint8_t *)pkey; + info.mandatory = 1; + info.sig_size = EVP_PKEY_size(pkey); + if (info.sig_size < 0) { + fprintf(stderr, "Fail to retrieve the signature size: %s\n", + ERR_error_string(ERR_get_error(), NULL)); + ret = EXIT_FAILURE; + goto out; + } + + /* Compute signature information */ + info.sig_info.name = info.algo_name; + info.sig_info.padding = image_get_padding_algo(info.padding_name); + info.sig_info.checksum = image_get_checksum_algo(info.sig_info.name); + info.sig_info.crypto = image_get_crypto_algo(info.sig_info.name); + info.sig_info.key = info.key; + info.sig_info.keylen = info.key_len; + + /* Check the signature */ + image_pre_load_sig_set_info(&info); + ret = image_pre_load_sig((ulong)buffer); +out: + if (fp) + fclose(fp); + if (fp_key) + fclose(fp_key); + if (info.key) + EVP_PKEY_free(pkey); + free(buffer); + + exit(ret); +} diff --git a/tools/proftool.c b/tools/proftool.c index af2cdb6d584..c7b427f3078 100644 --- a/tools/proftool.c +++ b/tools/proftool.c @@ -676,6 +676,7 @@ static int read_trace_config(FILE *fin) if (!tok) { error("Invalid trace config data on line %d\n", linenum); + free(line); return -1; } if (0 == strcmp(tok, "include-func")) { @@ -685,6 +686,7 @@ static int read_trace_config(FILE *fin) } else { error("Unknown command in trace config data line %d\n", linenum); + free(line); return -1; } @@ -692,6 +694,7 @@ static int read_trace_config(FILE *fin) if (!tok) { error("Missing pattern in trace config data line %d\n", linenum); + free(line); return -1; } diff --git a/tools/qconfig.py b/tools/qconfig.py index 259adbe1bc9..c96a305a48f 100755 --- a/tools/qconfig.py +++ b/tools/qconfig.py @@ -1453,7 +1453,7 @@ def do_scan_source(path, do_update): print('\nCONFIG options used as Proper in Makefiles but without a non-xPL_ variant:') not_found = check_not_found(all_uses, MODE_PROPER) show_uses(not_found) - proper_not_found |= {not_found.keys()} + proper_not_found |= not_found.keys() # Scan the source code all_uses, _ = scan_src_files(src_list) @@ -1471,7 +1471,7 @@ def do_scan_source(path, do_update): print('\nCONFIG options used as Proper in source but without a non-xPL_ variant:') not_found = check_not_found(all_uses, MODE_PROPER) show_uses(not_found) - proper_not_found |= {not_found.keys()} + proper_not_found |= not_found.keys() print('\nCONFIG options used as SPL but without an xPL_ variant:') for item in sorted(spl_not_found): diff --git a/tools/rmboard.py b/tools/rmboard.py index 0c56b149e0f..594fd89b8d7 100755 --- a/tools/rmboard.py +++ b/tools/rmboard.py @@ -43,18 +43,16 @@ def rm_kconfig_include(path): Args: path: Path to search for and remove """ - cmd = ['git', 'grep', path] - stdout = command.run_pipe([cmd], capture=True, raise_on_error=False).stdout + stdout = command.output('git', 'grep', path, raise_on_error=False) if not stdout: return fname = stdout.split(':')[0] print("Fixing up '%s' to remove reference to '%s'" % (fname, path)) - cmd = ['sed', '-i', '\|%s|d' % path, fname] - stdout = command.run_pipe([cmd], capture=True).stdout + stdout = command.run_one('sed', '-i', rf'\|{path}|d', fname, + capture=True).stdout - cmd = ['git', 'add', fname] - stdout = command.run_pipe([cmd], capture=True).stdout + stdout = command.output('git', 'add', fname) def rm_board(board): """Create a commit which removes a single board @@ -68,8 +66,7 @@ def rm_board(board): """ # Find all MAINTAINERS and Kconfig files which mention the board - cmd = ['git', 'grep', '-l', board] - stdout = command.run_pipe([cmd], capture=True).stdout + stdout = command.output('git', 'grep', '-l', board) maintain = [] kconfig = [] for line in stdout.splitlines(): @@ -109,16 +106,14 @@ def rm_board(board): # Search for Kconfig files in the resulting list. Remove any 'source' lines # which reference Kconfig files we want to remove for path in real: - cmd = ['find', path] - stdout = (command.run_pipe([cmd], capture=True, raise_on_error=False). - stdout) + stdout = command.output('find', path, raise_on_error=False) for fname in stdout.splitlines(): if fname.endswith('Kconfig'): rm_kconfig_include(fname) # Remove unwanted files cmd = ['git', 'rm', '-r'] + real - stdout = command.run_pipe([cmd], capture=True).stdout + stdout = command.output(*cmd, capture=True) ## Change the messages as needed msg = '''arm: Remove %s board @@ -131,13 +126,11 @@ Remove it. msg += 'Patch-cc: %s\n' % name # Create the commit - cmd = ['git', 'commit', '-s', '-m', msg] - stdout = command.run_pipe([cmd], capture=True).stdout + stdout = command.output('git', 'commit', '-s', '-m', msg) # Check if the board is mentioned anywhere else. The user will need to deal # with this - cmd = ['git', 'grep', '-il', board] - print(command.run_pipe([cmd], capture=True, raise_on_error=False).stdout) + print(command.output('git', 'grep', '-il', board, raise_on_error=False)) print(' '.join(cmd)) for board in sys.argv[1:]: diff --git a/tools/sfspl.c b/tools/sfspl.c index c76420ce21b..516e96e8dd9 100644 --- a/tools/sfspl.c +++ b/tools/sfspl.c @@ -70,11 +70,14 @@ static int sfspl_verify_header(unsigned char *buf, int size, printf("Truncated file\n"); return EXIT_FAILURE; } + if ((size_t)size > hdr_size + file_size) + printf("File too long, expected %u bytes\n", + hdr_size + file_size); if (hdr->version != DEFAULT_VERSION) { printf("Unknown file format version\n"); return EXIT_FAILURE; } - crc_check = crc32(0, &buf[hdr_size], size - hdr_size); + crc_check = crc32(0, &buf[hdr_size], file_size); if (crc_check != crc) { printf("Incorrect CRC32\n"); return EXIT_FAILURE; diff --git a/tools/u_boot_pylib/__init__.py b/tools/u_boot_pylib/__init__.py index 63c88e85ec0..807a62e0743 100644 --- a/tools/u_boot_pylib/__init__.py +++ b/tools/u_boot_pylib/__init__.py @@ -1,4 +1,4 @@ # SPDX-License-Identifier: GPL-2.0+ -__all__ = ['command', 'cros_subprocess','terminal', 'test_util', 'tools', - 'tout'] +__all__ = ['command', 'cros_subprocess', 'gitutil', 'terminal', 'test_util', + 'tools', 'tout'] diff --git a/tools/u_boot_pylib/__main__.py b/tools/u_boot_pylib/__main__.py index 8f98d7bd9f8..c0762bca733 100755 --- a/tools/u_boot_pylib/__main__.py +++ b/tools/u_boot_pylib/__main__.py @@ -13,7 +13,6 @@ if __name__ == "__main__": sys.path.append(os.path.join(our_path, '..')) # Run tests - from u_boot_pylib import terminal from u_boot_pylib import test_util result = test_util.run_test_suites( diff --git a/tools/u_boot_pylib/command.py b/tools/u_boot_pylib/command.py index bbe95d86122..0e247355ef6 100644 --- a/tools/u_boot_pylib/command.py +++ b/tools/u_boot_pylib/command.py @@ -1,21 +1,44 @@ # SPDX-License-Identifier: GPL-2.0+ -# Copyright (c) 2011 The Chromium OS Authors. -# +""" +Shell command ease-ups for Python -import os +Copyright (c) 2011 The Chromium OS Authors. +""" + +import subprocess from u_boot_pylib import cros_subprocess -"""Shell command ease-ups for Python.""" +# This permits interception of RunPipe for test purposes. If it is set to +# a function, then that function is called with the pipe list being +# executed. Otherwise, it is assumed to be a CommandResult object, and is +# returned as the result for every run_pipe() call. +# When this value is None, commands are executed as normal. +TEST_RESULT = None + + +class CommandExc(Exception): + """Reports an exception to the caller""" + def __init__(self, msg, result): + """Set up a new exception object + + Args: + result (CommandResult): Execution result so far + """ + super().__init__(msg) + self.result = result + class CommandResult: """A class which captures the result of executing a command. Members: - stdout: stdout obtained from command, as a string - stderr: stderr obtained from command, as a string - return_code: Return code from command - exception: Exception received, or None if all ok + stdout (bytes): stdout obtained from command, as a string + stderr (bytes): stderr obtained from command, as a string + combined (bytes): stdout and stderr interleaved + return_code (int): Return code from command + exception (Exception): Exception received, or None if all ok + output (str or None): Returns output as a single line if requested """ def __init__(self, stdout='', stderr='', combined='', return_code=0, exception=None): @@ -24,8 +47,16 @@ class CommandResult: self.combined = combined self.return_code = return_code self.exception = exception + self.output = None def to_output(self, binary): + """Converts binary output to its final form + + Args: + binary (bool): True to report binary output, False to use strings + Returns: + self + """ if not binary: self.stdout = self.stdout.decode('utf-8') self.stderr = self.stderr.decode('utf-8') @@ -33,49 +64,47 @@ class CommandResult: return self -# This permits interception of RunPipe for test purposes. If it is set to -# a function, then that function is called with the pipe list being -# executed. Otherwise, it is assumed to be a CommandResult object, and is -# returned as the result for every run_pipe() call. -# When this value is None, commands are executed as normal. -test_result = None - -def run_pipe(pipe_list, infile=None, outfile=None, - capture=False, capture_stderr=False, oneline=False, - raise_on_error=True, cwd=None, binary=False, - output_func=None, **kwargs): +def run_pipe(pipe_list, infile=None, outfile=None, capture=False, + capture_stderr=False, oneline=False, raise_on_error=True, cwd=None, + binary=False, output_func=None, **kwargs): """ Perform a command pipeline, with optional input/output filenames. Args: - pipe_list: List of command lines to execute. Each command line is - piped into the next, and is itself a list of strings. For + pipe_list (list of list): List of command lines to execute. Each command + line is piped into the next, and is itself a list of strings. For example [ ['ls', '.git'] ['wc'] ] will pipe the output of 'ls .git' into 'wc'. - infile: File to provide stdin to the pipeline - outfile: File to store stdout - capture: True to capture output - capture_stderr: True to capture stderr - oneline: True to strip newline chars from output - output_func: Output function to call with each output fragment - (if it returns True the function terminates) - kwargs: Additional keyword arguments to cros_subprocess.Popen() + infile (str): File to provide stdin to the pipeline + outfile (str): File to store stdout + capture (bool): True to capture output + capture_stderr (bool): True to capture stderr + oneline (bool): True to strip newline chars from output + raise_on_error (bool): True to raise on an error, False to return it in + the CommandResult + cwd (str or None): Directory to run the command in + binary (bool): True to report binary output, False to use strings + output_func (function): Output function to call with each output + fragment (if it returns True the function terminates) + **kwargs: Additional keyword arguments to cros_subprocess.Popen() Returns: CommandResult object + Raises: + CommandExc if an exception happens """ - if test_result: - if hasattr(test_result, '__call__'): + if TEST_RESULT: + if hasattr(TEST_RESULT, '__call__'): # pylint: disable=E1102 - result = test_result(pipe_list=pipe_list) + result = TEST_RESULT(pipe_list=pipe_list) if result: return result else: - return test_result + return TEST_RESULT # No result: fall through to normal processing result = CommandResult(b'', b'', b'') last_pipe = None pipeline = list(pipe_list) - user_pipestr = '|'.join([' '.join(pipe) for pipe in pipe_list]) + user_pipestr = '|'.join([' '.join(pipe) for pipe in pipe_list]) kwargs['stdout'] = None kwargs['stderr'] = None while pipeline: @@ -96,7 +125,8 @@ def run_pipe(pipe_list, infile=None, outfile=None, except Exception as err: result.exception = err if raise_on_error: - raise Exception("Error running '%s': %s" % (user_pipestr, str)) + raise CommandExc(f"Error running '{user_pipestr}': {err}", + result) from err result.return_code = 255 return result.to_output(binary) @@ -107,31 +137,84 @@ def run_pipe(pipe_list, infile=None, outfile=None, result.output = result.stdout.rstrip(b'\r\n') result.return_code = last_pipe.wait() if raise_on_error and result.return_code: - raise Exception("Error running '%s'" % user_pipestr) + raise CommandExc(f"Error running '{user_pipestr}'", result) return result.to_output(binary) + def output(*cmd, **kwargs): + """Run a command and return its output + + Args: + *cmd (list of str): Command to run + **kwargs (dict of args): Extra arguments to pass in + + Returns: + str: command output + """ kwargs['raise_on_error'] = kwargs.get('raise_on_error', True) return run_pipe([cmd], capture=True, **kwargs).stdout + def output_one_line(*cmd, **kwargs): """Run a command and output it as a single-line string - The command us expected to produce a single line of output + The command is expected to produce a single line of output + + Args: + *cmd (list of str): Command to run + **kwargs (dict of args): Extra arguments to pass in Returns: - String containing output of command + str: output of command with all newlines removed """ raise_on_error = kwargs.pop('raise_on_error', True) result = run_pipe([cmd], capture=True, oneline=True, - raise_on_error=raise_on_error, **kwargs).stdout.strip() + raise_on_error=raise_on_error, **kwargs).stdout.strip() return result + def run(*cmd, **kwargs): + """Run a command + + Note that you must add 'capture' to kwargs to obtain non-empty output + + Args: + *cmd (list of str): Command to run + **kwargs (dict of args): Extra arguments to pass in + + Returns: + str: output of command + """ return run_pipe([cmd], **kwargs).stdout + +def run_one(*cmd, **kwargs): + """Run a single command + + Note that you must add 'capture' to kwargs to obtain non-empty output + + Args: + *cmd (list of str): Command to run + **kwargs (dict of args): Extra arguments to pass in + + Returns: + CommandResult: output of command + """ + return run_pipe([cmd], **kwargs) + + def run_list(cmd): + """Run a command and return its output + + Args: + cmd (list of str): Command to run + + Returns: + str: output of command + """ return run_pipe([cmd], capture=True).stdout + def stop_all(): + """Stop all subprocesses initiated with cros_subprocess""" cros_subprocess.stay_alive = False diff --git a/tools/patman/gitutil.py b/tools/u_boot_pylib/gitutil.py index 10ea5ff39f5..0376bece3e6 100644 --- a/tools/patman/gitutil.py +++ b/tools/u_boot_pylib/gitutil.py @@ -10,7 +10,7 @@ from u_boot_pylib import command from u_boot_pylib import terminal # True to use --no-decorate - we check this in setup() -use_no_decorate = True +USE_NO_DECORATE = True def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False, @@ -18,11 +18,11 @@ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False, """Create a command to perform a 'git log' Args: - commit_range: Range expression to use for log, None for none - git_dir: Path to git repository (None to use default) - oneline: True to use --oneline, else False - reverse: True to reverse the log (--reverse) - count: Number of commits to list, or None for no limit + commit_range (str): Range expression to use for log, None for none + git_dir (str): Path to git repository (None to use default) + oneline (bool): True to use --oneline, else False + reverse (bool): True to reverse the log (--reverse) + count (int or None): Number of commits to list, or None for no limit Return: List containing command and arguments to run """ @@ -32,12 +32,12 @@ def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False, cmd += ['--no-pager', 'log', '--no-color'] if oneline: cmd.append('--oneline') - if use_no_decorate: + if USE_NO_DECORATE: cmd.append('--no-decorate') if reverse: cmd.append('--reverse') if count is not None: - cmd.append('-n%d' % count) + cmd.append(f'-n{count}') if commit_range: cmd.append(commit_range) @@ -55,22 +55,22 @@ def count_commits_to_branch(branch): since then. Args: - branch: Branch to count from (None for current branch) + branch (str or None): Branch to count from (None for current branch) Return: Number of patches that exist on top of the branch """ if branch: - us, msg = get_upstream('.git', branch) - rev_range = '%s..%s' % (us, branch) + us, _ = get_upstream('.git', branch) + rev_range = f'{us}..{branch}' else: rev_range = '@{upstream}..' - pipe = [log_cmd(rev_range, oneline=True)] - result = command.run_pipe(pipe, capture=True, capture_stderr=True, - oneline=True, raise_on_error=False) + cmd = log_cmd(rev_range, oneline=True) + result = command.run_one(*cmd, capture=True, capture_stderr=True, + oneline=True, raise_on_error=False) if result.return_code: - raise ValueError('Failed to determine upstream: %s' % - result.stderr.strip()) + raise ValueError( + f'Failed to determine upstream: {result.stderr.strip()}') patch_count = len(result.stdout.splitlines()) return patch_count @@ -79,13 +79,12 @@ def name_revision(commit_hash): """Gets the revision name for a commit Args: - commit_hash: Commit hash to look up + commit_hash (str): Commit hash to look up Return: Name of revision, if any, else None """ - pipe = ['git', 'name-rev', commit_hash] - stdout = command.run_pipe([pipe], capture=True, oneline=True).stdout + stdout = command.output_one_line('git', 'name-rev', commit_hash) # We expect a commit, a space, then a revision name name = stdout.split(' ')[1].strip() @@ -100,35 +99,35 @@ def guess_upstream(git_dir, branch): 'git name-rev' returns a plain branch name, with no ! or ^ modifiers. Args: - git_dir: Git directory containing repo - branch: Name of branch + git_dir (str): Git directory containing repo + branch (str): Name of branch Returns: Tuple: Name of upstream branch (e.g. 'upstream/master') or None if none Warning/error message, or None if none """ - pipe = [log_cmd(branch, git_dir=git_dir, oneline=True, count=100)] - result = command.run_pipe(pipe, capture=True, capture_stderr=True, - raise_on_error=False) + cmd = log_cmd(branch, git_dir=git_dir, oneline=True, count=100) + result = command.run_one(*cmd, capture=True, capture_stderr=True, + raise_on_error=False) if result.return_code: - return None, "Branch '%s' not found" % branch + return None, f"Branch '{branch}' not found" for line in result.stdout.splitlines()[1:]: commit_hash = line.split(' ')[0] name = name_revision(commit_hash) if '~' not in name and '^' not in name: if name.startswith('remotes/'): name = name[8:] - return name, "Guessing upstream as '%s'" % name - return None, "Cannot find a suitable upstream for branch '%s'" % branch + return name, f"Guessing upstream as '{name}'" + return None, f"Cannot find a suitable upstream for branch '{branch}'" def get_upstream(git_dir, branch): """Returns the name of the upstream for a branch Args: - git_dir: Git directory containing repo - branch: Name of branch + git_dir (str): Git directory containing repo + branch (str): Name of branch Returns: Tuple: @@ -137,31 +136,30 @@ def get_upstream(git_dir, branch): """ try: remote = command.output_one_line('git', '--git-dir', git_dir, 'config', - 'branch.%s.remote' % branch) + f'branch.{branch}.remote') merge = command.output_one_line('git', '--git-dir', git_dir, 'config', - 'branch.%s.merge' % branch) - except Exception: + f'branch.{branch}.merge') + except command.CommandExc: upstream, msg = guess_upstream(git_dir, branch) return upstream, msg if remote == '.': return merge, None - elif remote and merge: + if remote and merge: # Drop the initial refs/heads from merge leaf = merge.split('/', maxsplit=2)[2:] - return '%s/%s' % (remote, '/'.join(leaf)), None - else: - raise ValueError("Cannot determine upstream branch for branch " - "'%s' remote='%s', merge='%s'" - % (branch, remote, merge)) + return f'{remote}/{"/".join(leaf)}', None + raise ValueError("Cannot determine upstream branch for branch " + f"'{branch}' remote='{remote}', merge='{merge}'") def get_range_in_branch(git_dir, branch, include_upstream=False): """Returns an expression for the commits in the given branch. Args: - git_dir: Directory containing git repo - branch: Name of branch + git_dir (str): Directory containing git repo + branch (str): Name of branch + include_upstream (bool): Include the upstream commit as well Return: Expression in the form 'upstream..branch' which can be used to access the commits. If the branch does not exist, returns None. @@ -169,7 +167,7 @@ def get_range_in_branch(git_dir, branch, include_upstream=False): upstream, msg = get_upstream(git_dir, branch) if not upstream: return None, msg - rstr = '%s%s..%s' % (upstream, '~' if include_upstream else '', branch) + rstr = f"{upstream}{'~' if include_upstream else ''}..{branch}" return rstr, msg @@ -177,17 +175,17 @@ def count_commits_in_range(git_dir, range_expr): """Returns the number of commits in the given range. Args: - git_dir: Directory containing git repo - range_expr: Range to check + git_dir (str): Directory containing git repo + range_expr (str): Range to check Return: Number of patches that exist in the supplied range or None if none were found """ - pipe = [log_cmd(range_expr, git_dir=git_dir, oneline=True)] - result = command.run_pipe(pipe, capture=True, capture_stderr=True, - raise_on_error=False) + cmd = log_cmd(range_expr, git_dir=git_dir, oneline=True) + result = command.run_one(*cmd, capture=True, capture_stderr=True, + raise_on_error=False) if result.return_code: - return None, "Range '%s' not found or is invalid" % range_expr + return None, f"Range '{range_expr}' not found or is invalid" patch_count = len(result.stdout.splitlines()) return patch_count, None @@ -196,8 +194,9 @@ def count_commits_in_branch(git_dir, branch, include_upstream=False): """Returns the number of commits in the given branch. Args: - git_dir: Directory containing git repo - branch: Name of branch + git_dir (str): Directory containing git repo + branch (str): Name of branch + include_upstream (bool): Include the upstream commit as well Return: Number of patches that exist on top of the branch, or None if the branch does not exist. @@ -212,7 +211,7 @@ def count_commits(commit_range): """Returns the number of commits in the given range. Args: - commit_range: Range of commits to count (e.g. 'HEAD..base') + commit_range (str): Range of commits to count (e.g. 'HEAD..base') Return: Number of patches that exist on top of the branch """ @@ -227,7 +226,10 @@ def checkout(commit_hash, git_dir=None, work_tree=None, force=False): """Checkout the selected commit for this build Args: - commit_hash: Commit hash to check out + commit_hash (str): Commit hash to check out + git_dir (str): Directory containing git repo, or None for current dir + work_tree (str): Git worktree to use, or None if none + force (bool): True to force the checkout (git checkout -f) """ pipe = ['git'] if git_dir: @@ -241,51 +243,52 @@ def checkout(commit_hash, git_dir=None, work_tree=None, force=False): result = command.run_pipe([pipe], capture=True, raise_on_error=False, capture_stderr=True) if result.return_code != 0: - raise OSError('git checkout (%s): %s' % (pipe, result.stderr)) + raise OSError(f'git checkout ({pipe}): {result.stderr}') -def clone(git_dir, output_dir): - """Checkout the selected commit for this build +def clone(repo, output_dir): + """Clone a repo Args: - commit_hash: Commit hash to check out + repo (str): Repo to clone (e.g. web address) + output_dir (str): Directory to close into """ - pipe = ['git', 'clone', git_dir, '.'] - result = command.run_pipe([pipe], capture=True, cwd=output_dir, - capture_stderr=True) + result = command.run_one('git', 'clone', repo, '.', capture=True, + cwd=output_dir, capture_stderr=True) if result.return_code != 0: - raise OSError('git clone: %s' % result.stderr) + raise OSError(f'git clone: {result.stderr}') def fetch(git_dir=None, work_tree=None): """Fetch from the origin repo Args: - commit_hash: Commit hash to check out + git_dir (str): Directory containing git repo, or None for current dir + work_tree (str or None): Git worktree to use, or None if none """ - pipe = ['git'] + cmd = ['git'] if git_dir: - pipe.extend(['--git-dir', git_dir]) + cmd.extend(['--git-dir', git_dir]) if work_tree: - pipe.extend(['--work-tree', work_tree]) - pipe.append('fetch') - result = command.run_pipe([pipe], capture=True, capture_stderr=True) + cmd.extend(['--work-tree', work_tree]) + cmd.append('fetch') + result = command.run_one(*cmd, capture=True, capture_stderr=True) if result.return_code != 0: - raise OSError('git fetch: %s' % result.stderr) + raise OSError(f'git fetch: {result.stderr}') def check_worktree_is_available(git_dir): """Check if git-worktree functionality is available Args: - git_dir: The repository to test in + git_dir (str): The repository to test in Returns: True if git-worktree commands will work, False otherwise. """ - pipe = ['git', '--git-dir', git_dir, 'worktree', 'list'] - result = command.run_pipe([pipe], capture=True, capture_stderr=True, - raise_on_error=False) + result = command.run_one('git', '--git-dir', git_dir, 'worktree', 'list', + capture=True, capture_stderr=True, + raise_on_error=False) return result.return_code == 0 @@ -293,30 +296,30 @@ def add_worktree(git_dir, output_dir, commit_hash=None): """Create and checkout a new git worktree for this build Args: - git_dir: The repository to checkout the worktree from - output_dir: Path for the new worktree - commit_hash: Commit hash to checkout + git_dir (str): The repository to checkout the worktree from + output_dir (str): Path for the new worktree + commit_hash (str): Commit hash to checkout """ # We need to pass --detach to avoid creating a new branch - pipe = ['git', '--git-dir', git_dir, 'worktree', 'add', '.', '--detach'] + cmd = ['git', '--git-dir', git_dir, 'worktree', 'add', '.', '--detach'] if commit_hash: - pipe.append(commit_hash) - result = command.run_pipe([pipe], capture=True, cwd=output_dir, - capture_stderr=True) + cmd.append(commit_hash) + result = command.run_one(*cmd, capture=True, cwd=output_dir, + capture_stderr=True) if result.return_code != 0: - raise OSError('git worktree add: %s' % result.stderr) + raise OSError(f'git worktree add: {result.stderr}') def prune_worktrees(git_dir): """Remove administrative files for deleted worktrees Args: - git_dir: The repository whose deleted worktrees should be pruned + git_dir (str): The repository whose deleted worktrees should be pruned """ - pipe = ['git', '--git-dir', git_dir, 'worktree', 'prune'] - result = command.run_pipe([pipe], capture=True, capture_stderr=True) + result = command.run_one('git', '--git-dir', git_dir, 'worktree', 'prune', + capture=True, capture_stderr=True) if result.return_code != 0: - raise OSError('git worktree prune: %s' % result.stderr) + raise OSError(f'git worktree prune: {result.stderr}') def create_patches(branch, start, count, ignore_binary, series, signoff=True): @@ -326,11 +329,12 @@ def create_patches(branch, start, count, ignore_binary, series, signoff=True): git format-patch. Args: - branch: Branch to create patches from (None for current branch) - start: Commit to start from: 0=HEAD, 1=next one, etc. - count: number of commits to include - ignore_binary: Don't generate patches for binary files - series: Series object for this series (set of patches) + branch (str): Branch to create patches from (None for current branch) + start (int): Commit to start from: 0=HEAD, 1=next one, etc. + count (int): number of commits to include + ignore_binary (bool): Don't generate patches for binary files + series (Series): Series object for this series (set of patches) + signoff (bool): True to add signoff lines automatically Return: Filename of cover letter (None if none) List of filenames of patch files @@ -344,9 +348,9 @@ def create_patches(branch, start, count, ignore_binary, series, signoff=True): cmd.append('--cover-letter') prefix = series.GetPatchPrefix() if prefix: - cmd += ['--subject-prefix=%s' % prefix] + cmd += [f'--subject-prefix={prefix}'] brname = branch or 'HEAD' - cmd += ['%s~%d..%s~%d' % (brname, start + count, brname, start)] + cmd += [f'{brname}~{start + count}..{brname}~{start}'] stdout = command.run_list(cmd) files = stdout.splitlines() @@ -354,8 +358,7 @@ def create_patches(branch, start, count, ignore_binary, series, signoff=True): # We have an extra file if there is a cover letter if series.get('cover'): return files[0], files[1:] - else: - return None, files + return None, files def build_email_list(in_list, tag=None, alias=None, warn_on_error=True): @@ -369,11 +372,13 @@ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True): command line parameter) then the email address is quoted. Args: - in_list: List of aliases/email addresses - tag: Text to put before each address - alias: Alias dictionary - warn_on_error: True to raise an error when an alias fails to match, - False to just print a message. + in_list (list of str): List of aliases/email addresses + tag (str): Text to put before each address + alias (dict): Alias dictionary: + key: alias + value: list of aliases or email addresses + warn_on_error (bool): True to raise an error when an alias fails to + match, False to just print a message. Returns: List of email addresses @@ -401,7 +406,7 @@ def build_email_list(in_list, tag=None, alias=None, warn_on_error=True): if item not in result: result.append(item) if tag: - return ['%s %s%s%s' % (tag, quote, email, quote) for email in result] + return [f'{tag} {quote}{email}{quote}' for email in result] return result @@ -409,24 +414,23 @@ def check_suppress_cc_config(): """Check if sendemail.suppresscc is configured correctly. Returns: - True if the option is configured correctly, False otherwise. + bool: True if the option is configured correctly, False otherwise. """ suppresscc = command.output_one_line( 'git', 'config', 'sendemail.suppresscc', raise_on_error=False) # Other settings should be fine. - if suppresscc == 'all' or suppresscc == 'cccmd': + if suppresscc in ('all', 'cccmd'): col = terminal.Color() - print((col.build(col.RED, "error") + - ": git config sendemail.suppresscc set to %s\n" - % (suppresscc)) + - " patman needs --cc-cmd to be run to set the cc list.\n" + - " Please run:\n" + - " git config --unset sendemail.suppresscc\n" + - " Or read the man page:\n" + - " git send-email --help\n" + - " and set an option that runs --cc-cmd\n") + print(col.build(col.RED, 'error') + + f': git config sendemail.suppresscc set to {suppresscc}\n' + + ' patman needs --cc-cmd to be run to set the cc list.\n' + + ' Please run:\n' + + ' git config --unset sendemail.suppresscc\n' + + ' Or read the man page:\n' + + ' git send-email --help\n' + + ' and set an option that runs --cc-cmd\n') return False return True @@ -434,24 +438,26 @@ def check_suppress_cc_config(): def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname, self_only=False, alias=None, in_reply_to=None, thread=False, - smtp_server=None, get_maintainer_script=None): + smtp_server=None): """Email a patch series. Args: - series: Series object containing destination info - cover_fname: filename of cover letter - args: list of filenames of patch files - dry_run: Just return the command that would be run - warn_on_error: True to print a warning when an alias fails to match, - False to ignore it. - cc_fname: Filename of Cc file for per-commit Cc - self_only: True to just email to yourself as a test - in_reply_to: If set we'll pass this to git as --in-reply-to. - Should be a message ID that this is in reply to. - thread: True to add --thread to git send-email (make + series (Series): Series object containing destination info + cover_fname (str or None): filename of cover letter + args (list of str): list of filenames of patch files + dry_run (bool): Just return the command that would be run + warn_on_error (bool): True to print a warning when an alias fails to + match, False to ignore it. + cc_fname (str): Filename of Cc file for per-commit Cc + self_only (bool): True to just email to yourself as a test + alias (dict or None): Alias dictionary: (None to use settings default) + key: alias + value: list of aliases or email addresses + in_reply_to (str or None): If set we'll pass this to git as + --in-reply-to - should be a message ID that this is in reply to. + thread (bool): True to add --thread to git send-email (make all patches reply to cover-letter or first patch in series) - smtp_server: SMTP server to use to send patches - get_maintainer_script: File name of script to get maintainers emails + smtp_server (str or None): SMTP server to use to send patches Returns: Git command that was/would be run @@ -502,7 +508,7 @@ send --cc-cmd cc-fname" cover p1 p2' "Series-to: Fred Bloggs <f.blogs@napier.co.nz>\n" "Or do something like this\n" "git config sendemail.to u-boot@lists.denx.de") - return + return None cc = build_email_list(list(set(series.get('cc')) - set(series.get('to'))), '--cc', alias, warn_on_error) if self_only: @@ -511,15 +517,15 @@ send --cc-cmd cc-fname" cover p1 p2' cc = [] cmd = ['git', 'send-email', '--annotate'] if smtp_server: - cmd.append('--smtp-server=%s' % smtp_server) + cmd.append(f'--smtp-server={smtp_server}') if in_reply_to: - cmd.append('--in-reply-to="%s"' % in_reply_to) + cmd.append(f'--in-reply-to="{in_reply_to}"') if thread: cmd.append('--thread') cmd += to cmd += cc - cmd += ['--cc-cmd', '"%s send --cc-cmd %s"' % (sys.argv[0], cc_fname)] + cmd += ['--cc-cmd', f'"{sys.argv[0]} send --cc-cmd {cc_fname}"'] if cover_fname: cmd.append(cover_fname) cmd += args @@ -535,10 +541,13 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0): TODO: Why not just use git's own alias feature? Args: - lookup_name: Alias or email address to look up - alias: Dictionary containing aliases (None to use settings default) - warn_on_error: True to print a warning when an alias fails to match, - False to ignore it. + lookup_name (str): Alias or email address to look up + alias (dict or None): Alias dictionary: (None to use settings default) + key: alias + value: list of aliases or email addresses + warn_on_error (bool): True to print a warning when an alias fails to + match, False to ignore it. + level (int): Depth of alias stack, used to detect recusion/loops Returns: tuple: @@ -591,16 +600,15 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0): out_list = [] if level > 10: - msg = "Recursive email alias at '%s'" % lookup_name + msg = f"Recursive email alias at '{lookup_name}'" if warn_on_error: raise OSError(msg) - else: - print(col.build(col.RED, msg)) - return out_list + print(col.build(col.RED, msg)) + return out_list if lookup_name: if lookup_name not in alias: - msg = "Alias '%s' not found" % lookup_name + msg = f"Alias '{lookup_name}' not found" if warn_on_error: print(col.build(col.RED, msg)) return out_list @@ -617,7 +625,7 @@ def get_top_level(): """Return name of top-level directory for this git repo. Returns: - Full path to git top-level directory + str: Full path to git top-level directory This test makes sure that we are running tests in the right subdir @@ -632,7 +640,7 @@ def get_alias_file(): """Gets the name of the git alias file. Returns: - Filename of git alias file, or None if none + str: Filename of git alias file, or None if none """ fname = command.output_one_line('git', 'config', 'sendemail.aliasesfile', raise_on_error=False) @@ -652,7 +660,8 @@ def get_default_user_name(): Returns: User name found in .gitconfig file, or None if none """ - uname = command.output_one_line('git', 'config', '--global', '--includes', 'user.name') + uname = command.output_one_line('git', 'config', '--global', '--includes', + 'user.name') return uname @@ -662,7 +671,8 @@ def get_default_user_email(): Returns: User's email found in .gitconfig file, or None if none """ - uemail = command.output_one_line('git', 'config', '--global', '--includes', 'user.email') + uemail = command.output_one_line('git', 'config', '--global', '--includes', + 'user.email') return uemail @@ -679,25 +689,50 @@ def get_default_subject_prefix(): def setup(): - """Set up git utils, by reading the alias files.""" + """setup() - Set up git utils, by reading the alias files.""" # Check for a git alias file also - global use_no_decorate + global USE_NO_DECORATE alias_fname = get_alias_file() if alias_fname: settings.ReadGitAliases(alias_fname) cmd = log_cmd(None, count=0) - use_no_decorate = (command.run_pipe([cmd], raise_on_error=False) + USE_NO_DECORATE = (command.run_one(*cmd, raise_on_error=False) .return_code == 0) +def get_hash(spec): + """Get the hash of a commit + + Args: + spec (str): Git commit to show, e.g. 'my-branch~12' + + Returns: + str: Hash of commit + """ + return command.output_one_line('git', 'show', '-s', '--pretty=format:%H', + spec) + + def get_head(): """Get the hash of the current HEAD Returns: Hash of HEAD """ - return command.output_one_line('git', 'show', '-s', '--pretty=format:%H') + return get_hash('HEAD') + + +def get_branch(): + """Get the branch we are currently on + + Return: + str: branch name, or None if none + """ + out = command.output_one_line('git', 'rev-parse', '--abbrev-ref', 'HEAD') + if out == 'HEAD': + return None + return out if __name__ == "__main__": diff --git a/tools/u_boot_pylib/requirements.txt b/tools/u_boot_pylib/requirements.txt new file mode 100644 index 00000000000..1087e6f2857 --- /dev/null +++ b/tools/u_boot_pylib/requirements.txt @@ -0,0 +1 @@ +concurrencytest==0.1.2 diff --git a/tools/u_boot_pylib/tools.py b/tools/u_boot_pylib/tools.py index 0499a75526f..1afd289eadd 100644 --- a/tools/u_boot_pylib/tools.py +++ b/tools/u_boot_pylib/tools.py @@ -376,7 +376,7 @@ def run_result(name, *args, **kwargs): args = tuple(extra_args) + args name = os.path.expanduser(name) # Expand paths containing ~ all_args = (name,) + args - result = command.run_pipe([all_args], capture=True, capture_stderr=True, + result = command.run_one(*all_args, capture=True, capture_stderr=True, env=env, raise_on_error=False, binary=binary) if result.return_code: if raise_on_error: diff --git a/tools/ublimage.c b/tools/ublimage.c index 8f9b58c7983..a1bd807bfa0 100644 --- a/tools/ublimage.c +++ b/tools/ublimage.c @@ -178,6 +178,7 @@ static uint32_t parse_cfg_file(struct ubl_header *ublhdr, char *name) lineno, fld, &dcd_len); } } + free(line); fclose(fd); return dcd_len; |