diff options
Diffstat (limited to 'tools/moveconfig.py')
-rwxr-xr-x | tools/moveconfig.py | 797 |
1 files changed, 467 insertions, 330 deletions
diff --git a/tools/moveconfig.py b/tools/moveconfig.py index 373b395fda4..35fe6710d70 100755 --- a/tools/moveconfig.py +++ b/tools/moveconfig.py @@ -10,15 +10,17 @@ Move config options from headers to defconfig files. See doc/develop/moveconfig.rst for documentation. """ +from argparse import ArgumentParser import asteval import collections +from contextlib import ExitStack import copy import difflib +import doctest import filecmp import fnmatch import glob import multiprocessing -import optparse import os import queue import re @@ -28,6 +30,7 @@ import sys import tempfile import threading import time +import unittest from buildman import bsettings from buildman import kconfiglib @@ -69,44 +72,36 @@ CONFIG_DATABASE = 'moveconfig.db' CONFIG_LEN = len('CONFIG_') SIZES = { - "SZ_1": 0x00000001, "SZ_2": 0x00000002, - "SZ_4": 0x00000004, "SZ_8": 0x00000008, - "SZ_16": 0x00000010, "SZ_32": 0x00000020, - "SZ_64": 0x00000040, "SZ_128": 0x00000080, - "SZ_256": 0x00000100, "SZ_512": 0x00000200, - "SZ_1K": 0x00000400, "SZ_2K": 0x00000800, - "SZ_4K": 0x00001000, "SZ_8K": 0x00002000, - "SZ_16K": 0x00004000, "SZ_32K": 0x00008000, - "SZ_64K": 0x00010000, "SZ_128K": 0x00020000, - "SZ_256K": 0x00040000, "SZ_512K": 0x00080000, - "SZ_1M": 0x00100000, "SZ_2M": 0x00200000, - "SZ_4M": 0x00400000, "SZ_8M": 0x00800000, - "SZ_16M": 0x01000000, "SZ_32M": 0x02000000, - "SZ_64M": 0x04000000, "SZ_128M": 0x08000000, - "SZ_256M": 0x10000000, "SZ_512M": 0x20000000, - "SZ_1G": 0x40000000, "SZ_2G": 0x80000000, - "SZ_4G": 0x100000000 + 'SZ_1': 0x00000001, 'SZ_2': 0x00000002, + 'SZ_4': 0x00000004, 'SZ_8': 0x00000008, + 'SZ_16': 0x00000010, 'SZ_32': 0x00000020, + 'SZ_64': 0x00000040, 'SZ_128': 0x00000080, + 'SZ_256': 0x00000100, 'SZ_512': 0x00000200, + 'SZ_1K': 0x00000400, 'SZ_2K': 0x00000800, + 'SZ_4K': 0x00001000, 'SZ_8K': 0x00002000, + 'SZ_16K': 0x00004000, 'SZ_32K': 0x00008000, + 'SZ_64K': 0x00010000, 'SZ_128K': 0x00020000, + 'SZ_256K': 0x00040000, 'SZ_512K': 0x00080000, + 'SZ_1M': 0x00100000, 'SZ_2M': 0x00200000, + 'SZ_4M': 0x00400000, 'SZ_8M': 0x00800000, + 'SZ_16M': 0x01000000, 'SZ_32M': 0x02000000, + 'SZ_64M': 0x04000000, 'SZ_128M': 0x08000000, + 'SZ_256M': 0x10000000, 'SZ_512M': 0x20000000, + 'SZ_1G': 0x40000000, 'SZ_2G': 0x80000000, + 'SZ_4G': 0x100000000 } ### helper functions ### -def get_devnull(): - """Get the file object of '/dev/null' device.""" - try: - devnull = subprocess.DEVNULL # py3k - except AttributeError: - devnull = open(os.devnull, 'wb') - return devnull - def check_top_directory(): """Exit if we are not at the top of source directory.""" - for f in ('README', 'Licenses'): - if not os.path.exists(f): + for fname in 'README', 'Licenses': + if not os.path.exists(fname): sys.exit('Please run at the top of source directory.') def check_clean_directory(): """Exit if the source tree is not clean.""" - for f in ('.config', 'include/config'): - if os.path.exists(f): + for fname in '.config', 'include/config': + if os.path.exists(fname): sys.exit("source tree is not clean, please run 'make mrproper'") def get_make_cmd(): @@ -116,22 +111,22 @@ def get_make_cmd(): necessarily "make". (for example, "gmake" on FreeBSD). Returns the most appropriate command name on your system. """ - process = subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE) - ret = process.communicate() - if process.returncode: - sys.exit('GNU Make not found') + with subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE) as proc: + ret = proc.communicate() + if proc.returncode: + sys.exit('GNU Make not found') return ret[0].rstrip() def get_matched_defconfig(line): """Get the defconfig files that match a pattern Args: - line: Path or filename to match, e.g. 'configs/snow_defconfig' or + line (str): Path or filename to match, e.g. 'configs/snow_defconfig' or 'k2*_defconfig'. If no directory is provided, 'configs/' is prepended Returns: - a list of matching defconfig files + list of str: a list of matching defconfig files """ dirname = os.path.dirname(line) if dirname: @@ -144,38 +139,43 @@ def get_matched_defconfigs(defconfigs_file): """Get all the defconfig files that match the patterns in a file. Args: - defconfigs_file: File containing a list of defconfigs to process, or - '-' to read the list from stdin + defconfigs_file (str): File containing a list of defconfigs to process, + or '-' to read the list from stdin Returns: - A list of paths to defconfig files, with no duplicates + list of str: A list of paths to defconfig files, with no duplicates """ defconfigs = [] - if defconfigs_file == '-': - fd = sys.stdin - defconfigs_file = 'stdin' - else: - fd = open(defconfigs_file) - for i, line in enumerate(fd): - line = line.strip() - if not line: - continue # skip blank lines silently - if ' ' in line: - line = line.split(' ')[0] # handle 'git log' input - matched = get_matched_defconfig(line) - if not matched: - print("warning: %s:%d: no defconfig matched '%s'" % \ - (defconfigs_file, i + 1, line), file=sys.stderr) - - defconfigs += matched + with ExitStack() as stack: + if defconfigs_file == '-': + inf = sys.stdin + defconfigs_file = 'stdin' + else: + inf = stack.enter_context(open(defconfigs_file, encoding='utf-8')) + for i, line in enumerate(inf): + line = line.strip() + if not line: + continue # skip blank lines silently + if ' ' in line: + line = line.split(' ')[0] # handle 'git log' input + matched = get_matched_defconfig(line) + if not matched: + print(f"warning: {defconfigs_file}:{i + 1}: no defconfig matched '{line}'", + file=sys.stderr) + + defconfigs += matched # use set() to drop multiple matching - return [ defconfig[len('configs') + 1:] for defconfig in set(defconfigs) ] + return [defconfig[len('configs') + 1:] for defconfig in set(defconfigs)] def get_all_defconfigs(): - """Get all the defconfig files under the configs/ directory.""" + """Get all the defconfig files under the configs/ directory. + + Returns: + list of str: List of paths to defconfig files + """ defconfigs = [] - for (dirpath, dirnames, filenames) in os.walk('configs'): + for (dirpath, _, filenames) in os.walk('configs'): dirpath = dirpath[len('configs') + 1:] for filename in fnmatch.filter(filenames, '*_defconfig'): defconfigs.append(os.path.join(dirpath, filename)) @@ -189,20 +189,18 @@ def color_text(color_enabled, color, string): # Otherwise, additional whitespace or line-feed might be printed. return '\n'.join([ '\033[' + color + 'm' + s + '\033[0m' if s else '' for s in string.split('\n') ]) - else: - return string + return string -def show_diff(a, b, file_path, color_enabled): +def show_diff(alines, blines, file_path, color_enabled): """Show unidified diff. - Arguments: - a: A list of lines (before) - b: A list of lines (after) - file_path: Path to the file - color_enabled: Display the diff in color + Args: + alines (list of str): A list of lines (before) + blines (list of str): A list of lines (after) + file_path (str): Path to the file + color_enabled (bool): Display the diff in color """ - - diff = difflib.unified_diff(a, b, + diff = difflib.unified_diff(alines, blines, fromfile=os.path.join('a', file_path), tofile=os.path.join('b', file_path)) @@ -214,21 +212,23 @@ def show_diff(a, b, file_path, color_enabled): else: print(line, end=' ') -def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre, - extend_post): +def extend_matched_lines(lines, matched, pre_patterns, post_patterns, + extend_pre, extend_post): """Extend matched lines if desired patterns are found before/after already matched lines. - Arguments: - lines: A list of lines handled. - matched: A list of line numbers that have been already matched. - (will be updated by this function) - pre_patterns: A list of regular expression that should be matched as - preamble. - post_patterns: A list of regular expression that should be matched as - postamble. - extend_pre: Add the line number of matched preamble to the matched list. - extend_post: Add the line number of matched postamble to the matched list. + Args: + lines (list of str): list of lines handled. + matched (list of int): list of line numbers that have been already + matched (will be updated by this function) + pre_patterns (list of re.Pattern): list of regular expression that should + be matched as preamble + post_patterns (list of re.Pattern): list of regular expression that should + be matched as postamble + extend_pre (bool): Add the line number of matched preamble to the matched + list + extend_post (bool): Add the line number of matched postamble to the + matched list """ extended_matched = [] @@ -243,15 +243,15 @@ def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre if j >= len(lines): break - for p in pre_patterns: - if p.search(lines[i - 1]): + for pat in pre_patterns: + if pat.search(lines[i - 1]): break else: # not matched continue - for p in post_patterns: - if p.search(lines[j]): + for pat in post_patterns: + if pat.search(lines[j]): break else: # not matched @@ -265,13 +265,21 @@ def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre matched += extended_matched matched.sort() -def confirm(options, prompt): - if not options.yes: +def confirm(args, prompt): + """Ask the user to confirm something + + Args: + args (Namespace ): program arguments + + Returns: + bool: True to confirm, False to cancel/stop + """ + if not args.yes: while True: - choice = input('{} [y/n]: '.format(prompt)) + choice = input(f'{prompt} [y/n]: ') choice = choice.lower() print(choice) - if choice == 'y' or choice == 'n': + if choice in ('y', 'n'): break if choice == 'n': @@ -279,47 +287,84 @@ def confirm(options, prompt): return True -def cleanup_empty_blocks(header_path, options): - """Clean up empty conditional blocks +def write_file(fname, data): + """Write data to a file - Arguments: - header_path: path to the cleaned file. - options: option flags. + Args: + fname (str): Filename to write to + data (list of str): Lines to write (with or without trailing newline); + or str to write """ - pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M) - with open(header_path) as f: + with open(fname, 'w', encoding='utf-8') as out: + if isinstance(data, list): + for line in data: + print(line.rstrip('\n'), file=out) + else: + out.write(data) + +def read_file(fname, as_lines=True, skip_unicode=False): + """Read a file and return the contents + + Args: + fname (str): Filename to read from + as_lines: Return file contents as a list of lines + skip_unicode (bool): True to report unicode errors and continue + + Returns: + iter of str: List of ;ines from the file with newline removed; str if + as_lines is False with newlines intact; or None if a unicode error + occurred + + Raises: + UnicodeDecodeError: Unicode error occurred when reading + """ + with open(fname, encoding='utf-8') as inf: try: - data = f.read() + if as_lines: + return [line.rstrip('\n') for line in inf.readlines()] + else: + return inf.read() except UnicodeDecodeError as e: - print("Failed on file %s': %s" % (header_path, e)) - return + if not skip_unicode: + raises + print("Failed on file %s': %s" % (fname, e)) + return None + +def cleanup_empty_blocks(header_path, args): + """Clean up empty conditional blocks + + Args: + header_path (str): path to the cleaned file. + args (Namespace): program arguments + """ + pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M) + data = read_file(header_path, as_lines=False, skip_unicode=True) + if data is None: + return new_data = pattern.sub('\n', data) show_diff(data.splitlines(True), new_data.splitlines(True), header_path, - options.color) + args.color) - if options.dry_run: + if args.dry_run: return - with open(header_path, 'w') as f: - f.write(new_data) + if new_data != data: + write_file(header_path, new_data) -def cleanup_one_header(header_path, patterns, options): +def cleanup_one_header(header_path, patterns, args): """Clean regex-matched lines away from a file. - Arguments: + Args: header_path: path to the cleaned file. patterns: list of regex patterns. Any lines matching to these patterns are deleted. - options: option flags. + args (Namespace): program arguments """ - with open(header_path) as f: - try: - lines = f.readlines() - except UnicodeDecodeError as e: - print("Failed on file %s': %s" % (header_path, e)) - return + lines = read_file(header_path, skip_unicode=True) + if lines is None: + return matched = [] for i, line in enumerate(lines): @@ -360,23 +405,21 @@ def cleanup_one_header(header_path, patterns, options): for i in reversed(matched): tolines.pop(i) - show_diff(lines, tolines, header_path, options.color) + show_diff(lines, tolines, header_path, args.color) - if options.dry_run: + if args.dry_run: return - with open(header_path, 'w') as f: - for line in tolines: - f.write(line) + write_file(header_path, tolines) -def cleanup_headers(configs, options): +def cleanup_headers(configs, args): """Delete config defines from board headers. - Arguments: + Args: configs: A list of CONFIGs to remove. - options: option flags. + args (Namespace): program arguments """ - if not confirm(options, 'Clean up headers?'): + if not confirm(args, 'Clean up headers?'): return patterns = [] @@ -395,23 +438,22 @@ def cleanup_headers(configs, options): # This file contains UTF-16 data and no CONFIG symbols if header_path == 'include/video_font_data.h': continue - cleanup_one_header(header_path, patterns, options) - cleanup_empty_blocks(header_path, options) + cleanup_one_header(header_path, patterns, args) + cleanup_empty_blocks(header_path, args) -def cleanup_one_extra_option(defconfig_path, configs, options): +def cleanup_one_extra_option(defconfig_path, configs, args): """Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in one defconfig file. - Arguments: + Args: defconfig_path: path to the cleaned defconfig file. configs: A list of CONFIGs to remove. - options: option flags. + args (Namespace): program arguments """ start = 'CONFIG_SYS_EXTRA_OPTIONS="' end = '"\n' - with open(defconfig_path) as f: - lines = f.readlines() + lines = read_file(defconfig_path) for i, line in enumerate(lines): if line.startswith(start) and line.endswith(end): @@ -438,23 +480,21 @@ def cleanup_one_extra_option(defconfig_path, configs, options): else: tolines.pop(i) - show_diff(lines, tolines, defconfig_path, options.color) + show_diff(lines, tolines, defconfig_path, args.color) - if options.dry_run: + if args.dry_run: return - with open(defconfig_path, 'w') as f: - for line in tolines: - f.write(line) + write_file(defconfig_path, tolines) -def cleanup_extra_options(configs, options): +def cleanup_extra_options(configs, args): """Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in defconfig files. - Arguments: + Args: configs: A list of CONFIGs to remove. - options: option flags. + args (Namespace): program arguments """ - if not confirm(options, 'Clean up CONFIG_SYS_EXTRA_OPTIONS?'): + if not confirm(args, 'Clean up CONFIG_SYS_EXTRA_OPTIONS?'): return configs = [ config[len('CONFIG_'):] for config in configs ] @@ -463,25 +503,23 @@ def cleanup_extra_options(configs, options): for defconfig in defconfigs: cleanup_one_extra_option(os.path.join('configs', defconfig), configs, - options) + args) -def cleanup_whitelist(configs, options): +def cleanup_whitelist(configs, args): """Delete config whitelist entries - Arguments: + Args: configs: A list of CONFIGs to remove. - options: option flags. + args (Namespace): program arguments """ - if not confirm(options, 'Clean up whitelist entries?'): + if not confirm(args, 'Clean up whitelist entries?'): return - with open(os.path.join('scripts', 'config_whitelist.txt')) as f: - lines = f.readlines() + lines = read_file(os.path.join('scripts', 'config_whitelist.txt')) lines = [x for x in lines if x.strip() not in configs] - with open(os.path.join('scripts', 'config_whitelist.txt'), 'w') as f: - f.write(''.join(lines)) + write_file(os.path.join('scripts', 'config_whitelist.txt'), lines) def find_matching(patterns, line): for pat in patterns: @@ -489,22 +527,21 @@ def find_matching(patterns, line): return True return False -def cleanup_readme(configs, options): +def cleanup_readme(configs, args): """Delete config description in README - Arguments: + Args: configs: A list of CONFIGs to remove. - options: option flags. + args (Namespace): program arguments """ - if not confirm(options, 'Clean up README?'): + if not confirm(args, 'Clean up README?'): return patterns = [] for config in configs: patterns.append(re.compile(r'^\s+%s' % config)) - with open('README') as f: - lines = f.readlines() + lines = read_file('README') found = False newlines = [] @@ -520,8 +557,7 @@ def cleanup_readme(configs, options): if not found: newlines.append(line) - with open('README', 'w') as f: - f.write(''.join(newlines)) + write_file('README', newlines) def try_expand(line): """If value looks like an expression, try expanding it @@ -534,12 +570,12 @@ def try_expand(line): aeval = asteval.Interpreter( usersyms=SIZES, minimal=True ) cfg, val = re.split("=", line) val= val.strip('\"') - if re.search("[*+-/]|<<|SZ_+|\(([^\)]+)\)", val): + if re.search(r'[*+-/]|<<|SZ_+|\(([^\)]+)\)', val): newval = hex(aeval(val)) - print("\tExpanded expression %s to %s" % (val, newval)) + print('\tExpanded expression %s to %s' % (val, newval)) return cfg+'='+newval except: - print("\tFailed to expand expression in %s" % line) + print('\tFailed to expand expression in %s' % line) return line @@ -552,7 +588,7 @@ class Progress: def __init__(self, total): """Create a new progress indicator. - Arguments: + Args: total: A number of defconfig files to process. """ self.current = 0 @@ -588,16 +624,16 @@ class KconfigParser: re_arch = re.compile(r'CONFIG_SYS_ARCH="(.*)"') re_cpu = re.compile(r'CONFIG_SYS_CPU="(.*)"') - def __init__(self, configs, options, build_dir): + def __init__(self, configs, args, build_dir): """Create a new parser. - Arguments: + Args: configs: A list of CONFIGs to move. - options: option flags. + args (Namespace): program arguments build_dir: Build directory. """ self.configs = configs - self.options = options + self.args = args self.dotconfig = os.path.join(build_dir, '.config') self.autoconf = os.path.join(build_dir, 'include', 'autoconf.mk') self.spl_autoconf = os.path.join(build_dir, 'spl', 'include', @@ -613,7 +649,7 @@ class KconfigParser: """ arch = '' cpu = '' - for line in open(self.dotconfig): + for line in read_file(self.dotconfig): m = self.re_arch.match(line) if m: arch = m.group(1) @@ -638,7 +674,7 @@ class KconfigParser: defconfig, .config, and include/autoconf.mk in order to decide which action should be taken for this defconfig. - Arguments: + Args: config: CONFIG name to parse. dotconfig_lines: lines from the .config file. autoconf_lines: lines from the include/autoconf.mk file. @@ -688,7 +724,7 @@ class KconfigParser: searching the target options. Move the config option(s) to the .config as needed. - Arguments: + Args: defconfig: defconfig name. Returns: @@ -702,7 +738,7 @@ class KconfigParser: suspicious = False rm_files = [self.config_autoconf, self.autoconf] - if self.options.spl: + if self.args.spl: if os.path.exists(self.spl_autoconf): autoconf_path = self.spl_autoconf rm_files.append(self.spl_autoconf) @@ -710,16 +746,14 @@ class KconfigParser: for f in rm_files: os.remove(f) return (updated, suspicious, - color_text(self.options.color, COLOR_BROWN, + color_text(self.args.color, COLOR_BROWN, "SPL is not enabled. Skipped.") + '\n') else: autoconf_path = self.autoconf - with open(self.dotconfig) as f: - dotconfig_lines = f.readlines() + dotconfig_lines = read_file(self.dotconfig) - with open(autoconf_path) as f: - autoconf_lines = f.readlines() + autoconf_lines = read_file(autoconf_path) for config in self.configs: result = self.parse_one_config(config, dotconfig_lines, @@ -733,10 +767,10 @@ class KconfigParser: actlog = "Move '%s'" % value log_color = COLOR_LIGHT_GREEN elif action == ACTION_NO_ENTRY: - actlog = "%s is not defined in Kconfig. Do nothing." % value + actlog = '%s is not defined in Kconfig. Do nothing.' % value log_color = COLOR_LIGHT_BLUE elif action == ACTION_NO_ENTRY_WARN: - actlog = "%s is not defined in Kconfig (suspicious). Do nothing." % value + actlog = '%s is not defined in Kconfig (suspicious). Do nothing.' % value log_color = COLOR_YELLOW suspicious = True elif action == ACTION_NO_CHANGE: @@ -744,17 +778,17 @@ class KconfigParser: % value log_color = COLOR_LIGHT_PURPLE elif action == ACTION_SPL_NOT_EXIST: - actlog = "SPL is not enabled for this defconfig. Skip." + actlog = 'SPL is not enabled for this defconfig. Skip.' log_color = COLOR_PURPLE else: - sys.exit("Internal Error. This should not happen.") + sys.exit('Internal Error. This should not happen.') - log += color_text(self.options.color, log_color, actlog) + '\n' + log += color_text(self.args.color, log_color, actlog) + '\n' - with open(self.dotconfig, 'a') as f: + with open(self.dotconfig, 'a', encoding='utf-8') as out: for (action, value) in results: if action == ACTION_MOVE: - f.write(value + '\n') + out.write(value + '\n') updated = True self.results = results @@ -773,14 +807,13 @@ class KconfigParser: log = '' - with open(self.defconfig) as f: - defconfig_lines = f.readlines() + defconfig_lines = read_file(self.defconfig) for (action, value) in self.results: if action != ACTION_MOVE: continue if not value + '\n' in defconfig_lines: - log += color_text(self.options.color, COLOR_YELLOW, + log += color_text(self.args.color, COLOR_YELLOW, "'%s' was removed by savedefconfig.\n" % value) @@ -823,14 +856,14 @@ class Slot: for faster processing. """ - def __init__(self, toolchains, configs, options, progress, devnull, + def __init__(self, toolchains, configs, args, progress, devnull, make_cmd, reference_src_dir, db_queue): """Create a new process slot. - Arguments: + Args: toolchains: Toolchains object containing toolchains. configs: A list of CONFIGs to move. - options: option flags. + args: Program arguments progress: A progress indicator. devnull: A file object of '/dev/null'. make_cmd: command name of GNU Make. @@ -839,14 +872,14 @@ class Slot: db_queue: output queue to write config info for the database """ self.toolchains = toolchains - self.options = options + self.args = args self.progress = progress self.build_dir = tempfile.mkdtemp() self.devnull = devnull self.make_cmd = (make_cmd, 'O=' + self.build_dir) self.reference_src_dir = reference_src_dir self.db_queue = db_queue - self.parser = KconfigParser(configs, options, self.build_dir) + self.parser = KconfigParser(configs, args, self.build_dir) self.state = STATE_IDLE self.failed_boards = set() self.suspicious_boards = set() @@ -873,7 +906,7 @@ class Slot: given defconfig and add it to the slot. Just returns False if the slot is occupied (i.e. the current subprocess is still running). - Arguments: + Args: defconfig: defconfig name. Returns: @@ -921,24 +954,24 @@ class Slot: if self.current_src_dir: self.current_src_dir = None self.do_defconfig() - elif self.options.build_db: + elif self.args.build_db: self.do_build_db() else: self.do_savedefconfig() elif self.state == STATE_SAVEDEFCONFIG: self.update_defconfig() else: - sys.exit("Internal Error. This should not happen.") + sys.exit('Internal Error. This should not happen.') return True if self.state == STATE_IDLE else False def handle_error(self): """Handle error cases.""" - self.log += color_text(self.options.color, COLOR_LIGHT_RED, - "Failed to process.\n") - if self.options.verbose: - self.log += color_text(self.options.color, COLOR_LIGHT_CYAN, + self.log += color_text(self.args.color, COLOR_LIGHT_RED, + 'Failed to process.\n') + if self.args.verbose: + self.log += color_text(self.args.color, COLOR_LIGHT_CYAN, self.ps.stderr.read().decode()) self.finish(False) @@ -959,7 +992,7 @@ class Slot: try: toolchain = self.toolchains.Select(arch) except ValueError: - self.log += color_text(self.options.color, COLOR_YELLOW, + self.log += color_text(self.args.color, COLOR_YELLOW, "Tool chain for '%s' is missing. Do nothing.\n" % arch) self.finish(False) return @@ -976,11 +1009,10 @@ class Slot: def do_build_db(self): """Add the board to the database""" configs = {} - with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd: - for line in fd.readlines(): - if line.startswith('CONFIG'): - config, value = line.split('=', 1) - configs[config] = value.rstrip() + for line in read_file(os.path.join(self.build_dir, AUTO_CONF_PATH)): + if line.startswith('CONFIG'): + config, value = line.split('=', 1) + configs[config] = value.rstrip() self.db_queue.put([self.defconfig, configs]) self.finish(True) @@ -992,14 +1024,14 @@ class Slot: self.suspicious_boards.add(self.defconfig) self.log += log - if not self.options.force_sync and not updated: + if not self.args.force_sync and not updated: self.finish(True) return if updated: - self.log += color_text(self.options.color, COLOR_LIGHT_GREEN, - "Syncing by savedefconfig...\n") + self.log += color_text(self.args.color, COLOR_LIGHT_GREEN, + 'Syncing by savedefconfig...\n') else: - self.log += "Syncing by savedefconfig (forced by option)...\n" + self.log += 'Syncing by savedefconfig (forced by option)...\n' cmd = list(self.make_cmd) cmd.append('savedefconfig') @@ -1019,17 +1051,17 @@ class Slot: updated = not filecmp.cmp(orig_defconfig, new_defconfig) if updated: - self.log += color_text(self.options.color, COLOR_LIGHT_BLUE, - "defconfig was updated.\n") + self.log += color_text(self.args.color, COLOR_LIGHT_BLUE, + 'defconfig was updated.\n') - if not self.options.dry_run and updated: + if not self.args.dry_run and updated: shutil.move(new_defconfig, orig_defconfig) self.finish(True) def finish(self, success): """Display log along with progress and go to the idle state. - Arguments: + Args: success: Should be True when the defconfig was processed successfully, or False when it fails. """ @@ -1042,8 +1074,8 @@ class Slot: print(log, file=(sys.stdout if success else sys.stderr)) if not success: - if self.options.exit_on_error: - sys.exit("Exit on error.") + if self.args.exit_on_error: + sys.exit('Exit on error.') # If --exit-on-error flag is not set, skip this board and continue. # Record the failed board. self.failed_boards.add(self.defconfig) @@ -1066,32 +1098,32 @@ class Slots: """Controller of the array of subprocess slots.""" - def __init__(self, toolchains, configs, options, progress, + def __init__(self, toolchains, configs, args, progress, reference_src_dir, db_queue): """Create a new slots controller. - Arguments: + Args: toolchains: Toolchains object containing toolchains. configs: A list of CONFIGs to move. - options: option flags. + args: Program arguments progress: A progress indicator. reference_src_dir: Determine the true starting config state from this source tree. db_queue: output queue to write config info for the database """ - self.options = options + self.args = args self.slots = [] - devnull = get_devnull() + devnull = subprocess.DEVNULL make_cmd = get_make_cmd() - for i in range(options.jobs): - self.slots.append(Slot(toolchains, configs, options, progress, + for i in range(args.jobs): + self.slots.append(Slot(toolchains, configs, args, progress, devnull, make_cmd, reference_src_dir, db_queue)) def add(self, defconfig): """Add a new subprocess if a vacant slot is found. - Arguments: + Args: defconfig: defconfig name to be put into. Returns: @@ -1135,14 +1167,13 @@ class Slots: if boards: boards = '\n'.join(boards) + '\n' - msg = "The following boards were not processed due to error:\n" + msg = 'The following boards were not processed due to error:\n' msg += boards - msg += "(the list has been saved in %s)\n" % output_file - print(color_text(self.options.color, COLOR_LIGHT_RED, + msg += '(the list has been saved in %s)\n' % output_file + print(color_text(self.args.color, COLOR_LIGHT_RED, msg), file=sys.stderr) - with open(output_file, 'w') as f: - f.write(boards) + write_file(output_file, boards) def show_suspicious_boards(self): """Display all boards (defconfigs) with possible misconversion.""" @@ -1154,15 +1185,14 @@ class Slots: if boards: boards = '\n'.join(boards) + '\n' - msg = "The following boards might have been converted incorrectly.\n" - msg += "It is highly recommended to check them manually:\n" + msg = 'The following boards might have been converted incorrectly.\n' + msg += 'It is highly recommended to check them manually:\n' msg += boards - msg += "(the list has been saved in %s)\n" % output_file - print(color_text(self.options.color, COLOR_YELLOW, + msg += '(the list has been saved in %s)\n' % output_file + print(color_text(self.args.color, COLOR_YELLOW, msg), file=sys.stderr) - with open(output_file, 'w') as f: - f.write(boards) + write_file(output_file, boards) class ReferenceSource: @@ -1171,11 +1201,11 @@ class ReferenceSource: def __init__(self, commit): """Create a reference source directory based on a specified commit. - Arguments: + Args: commit: commit to git-clone """ self.src_dir = tempfile.mkdtemp() - print("Cloning git repo to a separate work directory...") + print('Cloning git repo to a separate work directory...') subprocess.check_output(['git', 'clone', os.getcwd(), '.'], cwd=self.src_dir) print("Checkout '%s' to build the original autoconf.mk." % \ @@ -1198,38 +1228,38 @@ class ReferenceSource: return self.src_dir -def move_config(toolchains, configs, options, db_queue): +def move_config(toolchains, configs, args, db_queue): """Move config options to defconfig files. - Arguments: + Args: configs: A list of CONFIGs to move. - options: option flags + args: Program arguments """ if len(configs) == 0: - if options.force_sync: + if args.force_sync: print('No CONFIG is specified. You are probably syncing defconfigs.', end=' ') - elif options.build_db: + elif args.build_db: print('Building %s database' % CONFIG_DATABASE) else: print('Neither CONFIG nor --force-sync is specified. Nothing will happen.', end=' ') else: print('Move ' + ', '.join(configs), end=' ') - print('(jobs: %d)\n' % options.jobs) + print('(jobs: %d)\n' % args.jobs) - if options.git_ref: - reference_src = ReferenceSource(options.git_ref) + if args.git_ref: + reference_src = ReferenceSource(args.git_ref) reference_src_dir = reference_src.get_dir() else: reference_src_dir = None - if options.defconfigs: - defconfigs = get_matched_defconfigs(options.defconfigs) + if args.defconfigs: + defconfigs = get_matched_defconfigs(args.defconfigs) else: defconfigs = get_all_defconfigs() progress = Progress(len(defconfigs)) - slots = Slots(toolchains, configs, options, progress, reference_src_dir, - db_queue) + slots = Slots(toolchains, configs, args, progress, reference_src_dir, + db_queue) # Main loop to process defconfig files: # Add a new subprocess into a vacant slot. @@ -1263,7 +1293,7 @@ def find_kconfig_rules(kconf, config, imply_config): sym = kconf.syms.get(imply_config) if sym: for sel, cond in (sym.selects + sym.implies): - if sel == config: + if sel.name == config: return sym return None @@ -1291,13 +1321,13 @@ def check_imply_rule(kconf, config, imply_config): nodes = sym.nodes if len(nodes) != 1: return '%d locations' % len(nodes) - fname, linenum = nodes[0].filename, nodes[0].linern + node = nodes[0] + fname, linenum = node.filename, node.linenr cwd = os.getcwd() if cwd and fname.startswith(cwd): fname = fname[len(cwd) + 1:] file_line = ' at %s:%d' % (fname, linenum) - with open(fname) as fd: - data = fd.read().splitlines() + data = read_file(fname) if data[linenum - 1] != 'config %s' % imply_config: return None, 0, 'bad sym format %s%s' % (data[linenum], file_line) return fname, linenum, 'adding%s' % file_line @@ -1314,14 +1344,13 @@ def add_imply_rule(config, fname, linenum): Message indicating the result """ file_line = ' at %s:%d' % (fname, linenum) - data = open(fname).read().splitlines() + data = read_file(fname) linenum -= 1 for offset, line in enumerate(data[linenum:]): if line.strip().startswith('help') or not line: data.insert(linenum + offset, '\timply %s' % config) - with open(fname, 'w') as fd: - fd.write('\n'.join(data) + '\n') + write_file(fname, data) return 'added%s' % file_line return 'could not insert%s' @@ -1336,7 +1365,54 @@ IMPLY_FLAGS = { 'non-arch-board': [ IMPLY_NON_ARCH_BOARD, 'Allow Kconfig options outside arch/ and /board/ to imply'], -}; +} + + +def read_database(): + """Read in the config database + + Returns: + tuple: + set of all config options seen (each a str) + set of all defconfigs seen (each a str) + dict of configs for each defconfig: + key: defconfig name, e.g. "MPC8548CDS_legacy_defconfig" + value: dict: + key: CONFIG option + value: Value of option + dict of defconfigs for each config: + key: CONFIG option + value: set of boards using that option + + """ + configs = {} + + # key is defconfig name, value is dict of (CONFIG_xxx, value) + config_db = {} + + # Set of all config options we have seen + all_configs = set() + + # Set of all defconfigs we have seen + all_defconfigs = set() + + defconfig_db = collections.defaultdict(set) + for line in read_file(CONFIG_DATABASE): + line = line.rstrip() + if not line: # Separator between defconfigs + config_db[defconfig] = configs + all_defconfigs.add(defconfig) + configs = {} + elif line[0] == ' ': # CONFIG line + config, value = line.strip().split('=', 1) + configs[config] = value + defconfig_db[config].add(defconfig) + all_configs.add(config) + else: # New defconfig + defconfig = line + + return all_configs, all_defconfigs, config_db, defconfig_db + def do_imply_config(config_list, add_imply, imply_flags, skip_added, check_kconfig=True, find_superset=False): @@ -1382,39 +1458,11 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added, """ kconf = KconfigScanner().conf if check_kconfig else None if add_imply and add_imply != 'all': - add_imply = add_imply.split() - - # key is defconfig name, value is dict of (CONFIG_xxx, value) - config_db = {} + add_imply = add_imply.split(',') - # Holds a dict containing the set of defconfigs that contain each config - # key is config, value is set of defconfigs using that config - defconfig_db = collections.defaultdict(set) - - # Set of all config options we have seen - all_configs = set() - - # Set of all defconfigs we have seen - all_defconfigs = set() + all_configs, all_defconfigs, config_db, defconfig_db = read_database() - # Read in the database - configs = {} - with open(CONFIG_DATABASE) as fd: - for line in fd.readlines(): - line = line.rstrip() - if not line: # Separator between defconfigs - config_db[defconfig] = configs - all_defconfigs.add(defconfig) - configs = {} - elif line[0] == ' ': # CONFIG line - config, value = line.strip().split('=', 1) - configs[config] = value - defconfig_db[config].add(defconfig) - all_configs.add(config) - else: # New defconfig - defconfig = line - - # Work through each target config option in tern, independently + # Work through each target config option in turn, independently for config in config_list: defconfigs = defconfig_db.get(config) if not defconfigs: @@ -1436,10 +1484,10 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added, for imply_config in rest_configs: if 'ERRATUM' in imply_config: continue - if not (imply_flags & IMPLY_CMD): + if not imply_flags & IMPLY_CMD: if 'CONFIG_CMD' in imply_config: continue - if not (imply_flags & IMPLY_TARGET): + if not imply_flags & IMPLY_TARGET: if 'CONFIG_TARGET' in imply_config: continue @@ -1522,7 +1570,7 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added, in_arch_board = not sym or (fname.startswith('arch') or fname.startswith('board')) if (not in_arch_board and - not (imply_flags & IMPLY_NON_ARCH_BOARD)): + not imply_flags & IMPLY_NON_ARCH_BOARD): continue if add_imply and (add_imply == 'all' or @@ -1546,76 +1594,162 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added, add_imply_rule(config[CONFIG_LEN:], fname, linenum) +def do_find_config(config_list): + """Find boards with a given combination of CONFIGs + + Params: + config_list: List of CONFIG options to check (each a string consisting + of a config option, with or without a CONFIG_ prefix. If an option + is preceded by a tilde (~) then it must be false, otherwise it must + be true) + """ + all_configs, all_defconfigs, config_db, defconfig_db = read_database() + + # Get the whitelist + adhoc_configs = set(read_file('scripts/config_whitelist.txt')) + + # Start with all defconfigs + out = all_defconfigs + + # Work through each config in turn + adhoc = [] + for item in config_list: + # Get the real config name and whether we want this config or not + cfg = item + want = True + if cfg[0] == '~': + want = False + cfg = cfg[1:] + + if cfg in adhoc_configs: + adhoc.append(cfg) + continue + + # Search everything that is still in the running. If it has a config + # that we want, or doesn't have one that we don't, add it into the + # running for the next stage + in_list = out + out = set() + for defc in in_list: + has_cfg = cfg in config_db[defc] + if has_cfg == want: + out.add(defc) + if adhoc: + print(f"Error: Not in Kconfig: %s" % ' '.join(adhoc)) + else: + print(f'{len(out)} matches') + print(' '.join(out)) + + +def prefix_config(cfg): + """Prefix a config with CONFIG_ if needed + + This handles ~ operator, which indicates that the CONFIG should be disabled + + >>> prefix_config('FRED') + 'CONFIG_FRED' + >>> prefix_config('CONFIG_FRED') + 'CONFIG_FRED' + >>> prefix_config('~FRED') + '~CONFIG_FRED' + >>> prefix_config('~CONFIG_FRED') + '~CONFIG_FRED' + >>> prefix_config('A123') + 'CONFIG_A123' + """ + op = '' + if cfg[0] == '~': + op = cfg[0] + cfg = cfg[1:] + if not cfg.startswith('CONFIG_'): + cfg = 'CONFIG_' + cfg + return op + cfg + + def main(): try: cpu_count = multiprocessing.cpu_count() except NotImplementedError: cpu_count = 1 - parser = optparse.OptionParser() - # Add options here - parser.add_option('-a', '--add-imply', type='string', default='', + epilog = '''Move config options from headers to defconfig files. See +doc/develop/moveconfig.rst for documentation.''' + + parser = ArgumentParser(epilog=epilog) + # Add arguments here + parser.add_argument('-a', '--add-imply', type=str, default='', help='comma-separated list of CONFIG options to add ' "an 'imply' statement to for the CONFIG in -i") - parser.add_option('-A', '--skip-added', action='store_true', default=False, + parser.add_argument('-A', '--skip-added', action='store_true', default=False, help="don't show options which are already marked as " 'implying others') - parser.add_option('-b', '--build-db', action='store_true', default=False, + parser.add_argument('-b', '--build-db', action='store_true', default=False, help='build a CONFIG database') - parser.add_option('-c', '--color', action='store_true', default=False, + parser.add_argument('-c', '--color', action='store_true', default=False, help='display the log in color') - parser.add_option('-C', '--commit', action='store_true', default=False, + parser.add_argument('-C', '--commit', action='store_true', default=False, help='Create a git commit for the operation') - parser.add_option('-d', '--defconfigs', type='string', + parser.add_argument('-d', '--defconfigs', type=str, help='a file containing a list of defconfigs to move, ' "one per line (for example 'snow_defconfig') " "or '-' to read from stdin") - parser.add_option('-i', '--imply', action='store_true', default=False, - help='find options which imply others') - parser.add_option('-I', '--imply-flags', type='string', default='', - help="control the -i option ('help' for help") - parser.add_option('-n', '--dry-run', action='store_true', default=False, - help='perform a trial run (show log with no changes)') - parser.add_option('-e', '--exit-on-error', action='store_true', + parser.add_argument('-e', '--exit-on-error', action='store_true', default=False, help='exit immediately on any error') - parser.add_option('-s', '--force-sync', action='store_true', default=False, - help='force sync by savedefconfig') - parser.add_option('-S', '--spl', action='store_true', default=False, - help='parse config options defined for SPL build') - parser.add_option('-H', '--headers-only', dest='cleanup_headers_only', + parser.add_argument('-f', '--find', action='store_true', default=False, + help='Find boards with a given config combination') + parser.add_argument('-H', '--headers-only', dest='cleanup_headers_only', action='store_true', default=False, help='only cleanup the headers') - parser.add_option('-j', '--jobs', type='int', default=cpu_count, + parser.add_argument('-i', '--imply', action='store_true', default=False, + help='find options which imply others') + parser.add_argument('-I', '--imply-flags', type=str, default='', + help="control the -i option ('help' for help") + parser.add_argument('-j', '--jobs', type=int, default=cpu_count, help='the number of jobs to run simultaneously') - parser.add_option('-r', '--git-ref', type='string', + parser.add_argument('-n', '--dry-run', action='store_true', default=False, + help='perform a trial run (show log with no changes)') + parser.add_argument('-r', '--git-ref', type=str, help='the git ref to clone for building the autoconf.mk') - parser.add_option('-y', '--yes', action='store_true', default=False, + parser.add_argument('-s', '--force-sync', action='store_true', default=False, + help='force sync by savedefconfig') + parser.add_argument('-S', '--spl', action='store_true', default=False, + help='parse config options defined for SPL build') + parser.add_argument('-t', '--test', action='store_true', default=False, + help='run unit tests') + parser.add_argument('-y', '--yes', action='store_true', default=False, help="respond 'yes' to any prompts") - parser.add_option('-v', '--verbose', action='store_true', default=False, + parser.add_argument('-v', '--verbose', action='store_true', default=False, help='show any build errors as boards are built') - parser.usage += ' CONFIG ...' + parser.add_argument('configs', nargs='*') - (options, configs) = parser.parse_args() + args = parser.parse_args() + configs = args.configs - if len(configs) == 0 and not any((options.force_sync, options.build_db, - options.imply)): + if args.test: + sys.argv = [sys.argv[0]] + fail, count = doctest.testmod() + if fail: + return 1 + unittest.main() + + if not any((len(configs), args.force_sync, args.build_db, args.imply, + args.find)): parser.print_usage() sys.exit(1) # prefix the option name with CONFIG_ if missing - configs = [ config if config.startswith('CONFIG_') else 'CONFIG_' + config - for config in configs ] + configs = [prefix_config(cfg) for cfg in configs] check_top_directory() - if options.imply: + if args.imply: imply_flags = 0 - if options.imply_flags == 'all': + if args.imply_flags == 'all': imply_flags = -1 - elif options.imply_flags: - for flag in options.imply_flags.split(','): + elif args.imply_flags: + for flag in args.imply_flags.split(','): bad = flag not in IMPLY_FLAGS if bad: print("Invalid flag '%s'" % flag) @@ -1627,8 +1761,11 @@ def main(): sys.exit(1) imply_flags |= IMPLY_FLAGS[flag][0] - do_imply_config(configs, options.add_imply, imply_flags, - options.skip_added) + do_imply_config(configs, args.add_imply, imply_flags, args.skip_added) + return + + if args.find: + do_find_config(configs) return config_db = {} @@ -1637,22 +1774,22 @@ def main(): t.setDaemon(True) t.start() - if not options.cleanup_headers_only: + if not args.cleanup_headers_only: check_clean_directory() bsettings.Setup('') toolchains = toolchain.Toolchains() toolchains.GetSettings() toolchains.Scan(verbose=False) - move_config(toolchains, configs, options, db_queue) + move_config(toolchains, configs, args, db_queue) db_queue.join() if configs: - cleanup_headers(configs, options) - cleanup_extra_options(configs, options) - cleanup_whitelist(configs, options) - cleanup_readme(configs, options) + cleanup_headers(configs, args) + cleanup_extra_options(configs, args) + cleanup_whitelist(configs, args) + cleanup_readme(configs, args) - if options.commit: + if args.commit: subprocess.call(['git', 'add', '-u']) if configs: msg = 'Convert %s %sto Kconfig' % (configs[0], @@ -1664,8 +1801,8 @@ def main(): msg += '\n\nRsync all defconfig files using moveconfig.py' subprocess.call(['git', 'commit', '-s', '-m', msg]) - if options.build_db: - with open(CONFIG_DATABASE, 'w') as fd: + if args.build_db: + with open(CONFIG_DATABASE, 'w', encoding='utf-8') as fd: for defconfig, configs in config_db.items(): fd.write('%s\n' % defconfig) for config in sorted(configs.keys()): @@ -1673,4 +1810,4 @@ def main(): fd.write('\n') if __name__ == '__main__': - main() + sys.exit(main()) |