diff options
Diffstat (limited to 'tools/net')
| -rw-r--r-- | tools/net/sunrpc/xdrgen/templates/C/typedef/decoder/fixed_length_opaque.j2 | 2 | ||||
| -rw-r--r-- | tools/net/ynl/Makefile.deps | 1 | ||||
| -rw-r--r-- | tools/net/ynl/lib/ynl-priv.h | 14 | ||||
| -rw-r--r-- | tools/net/ynl/lib/ynl.c | 6 | ||||
| -rwxr-xr-x | tools/net/ynl/pyynl/cli.py | 2 | ||||
| -rwxr-xr-x | tools/net/ynl/pyynl/ethtool.py | 17 | ||||
| -rw-r--r-- | tools/net/ynl/pyynl/lib/__init__.py | 4 | ||||
| -rw-r--r-- | tools/net/ynl/pyynl/lib/doc_generator.py | 402 | ||||
| -rw-r--r-- | tools/net/ynl/pyynl/lib/nlspec.py | 2 | ||||
| -rw-r--r-- | tools/net/ynl/pyynl/lib/ynl.py | 96 | ||||
| -rwxr-xr-x | tools/net/ynl/pyynl/ynl_gen_c.py | 198 | ||||
| -rwxr-xr-x | tools/net/ynl/pyynl/ynl_gen_rst.py | 384 |
12 files changed, 638 insertions, 490 deletions
diff --git a/tools/net/sunrpc/xdrgen/templates/C/typedef/decoder/fixed_length_opaque.j2 b/tools/net/sunrpc/xdrgen/templates/C/typedef/decoder/fixed_length_opaque.j2 index 8b4ff08c49e5..bdc7bd24ffb1 100644 --- a/tools/net/sunrpc/xdrgen/templates/C/typedef/decoder/fixed_length_opaque.j2 +++ b/tools/net/sunrpc/xdrgen/templates/C/typedef/decoder/fixed_length_opaque.j2 @@ -13,5 +13,5 @@ xdrgen_decode_{{ name }}(struct xdr_stream *xdr, {{ classifier }}{{ name }} *ptr {% if annotate %} /* (fixed-length opaque) */ {% endif %} - return xdr_stream_decode_opaque_fixed(xdr, ptr, {{ size }}) >= 0; + return xdr_stream_decode_opaque_fixed(xdr, ptr, {{ size }}) == 0; }; diff --git a/tools/net/ynl/Makefile.deps b/tools/net/ynl/Makefile.deps index 90686e241157..865fd2e8519e 100644 --- a/tools/net/ynl/Makefile.deps +++ b/tools/net/ynl/Makefile.deps @@ -31,6 +31,7 @@ CFLAGS_ovpn:=$(call get_hdr_inc,_LINUX_OVPN_H,ovpn.h) CFLAGS_ovs_datapath:=$(call get_hdr_inc,__LINUX_OPENVSWITCH_H,openvswitch.h) CFLAGS_ovs_flow:=$(call get_hdr_inc,__LINUX_OPENVSWITCH_H,openvswitch.h) CFLAGS_ovs_vport:=$(call get_hdr_inc,__LINUX_OPENVSWITCH_H,openvswitch.h) +CFLAGS_psp:=$(call get_hdr_inc,_LINUX_PSP_H,psp.h) CFLAGS_rt-addr:=$(call get_hdr_inc,__LINUX_RTNETLINK_H,rtnetlink.h) \ $(call get_hdr_inc,__LINUX_IF_ADDR_H,if_addr.h) CFLAGS_rt-link:=$(call get_hdr_inc,__LINUX_RTNETLINK_H,rtnetlink.h) \ diff --git a/tools/net/ynl/lib/ynl-priv.h b/tools/net/ynl/lib/ynl-priv.h index 824777d7e05e..ced7dce44efb 100644 --- a/tools/net/ynl/lib/ynl-priv.h +++ b/tools/net/ynl/lib/ynl-priv.h @@ -106,7 +106,6 @@ ynl_gemsg_start_req(struct ynl_sock *ys, __u32 id, __u8 cmd, __u8 version); struct nlmsghdr * ynl_gemsg_start_dump(struct ynl_sock *ys, __u32 id, __u8 cmd, __u8 version); -int ynl_attr_validate(struct ynl_parse_arg *yarg, const struct nlattr *attr); int ynl_submsg_failed(struct ynl_parse_arg *yarg, const char *field_name, const char *sel_name); @@ -314,7 +313,7 @@ ynl_attr_put_str(struct nlmsghdr *nlh, unsigned int attr_type, const char *str) struct nlattr *attr; size_t len; - len = strlen(str); + len = strlen(str) + 1; if (__ynl_attr_put_overflow(nlh, len)) return; @@ -322,7 +321,7 @@ ynl_attr_put_str(struct nlmsghdr *nlh, unsigned int attr_type, const char *str) attr->nla_type = attr_type; strcpy((char *)ynl_attr_data(attr), str); - attr->nla_len = NLA_HDRLEN + NLA_ALIGN(len); + attr->nla_len = NLA_HDRLEN + len; nlh->nlmsg_len += NLMSG_ALIGN(attr->nla_len); } @@ -467,4 +466,13 @@ ynl_attr_put_sint(struct nlmsghdr *nlh, __u16 type, __s64 data) else ynl_attr_put_s64(nlh, type, data); } + +int __ynl_attr_validate(struct ynl_parse_arg *yarg, const struct nlattr *attr, + unsigned int type); + +static inline int ynl_attr_validate(struct ynl_parse_arg *yarg, + const struct nlattr *attr) +{ + return __ynl_attr_validate(yarg, attr, ynl_attr_type(attr)); +} #endif diff --git a/tools/net/ynl/lib/ynl.c b/tools/net/ynl/lib/ynl.c index 2a169c3c0797..2bcd781111d7 100644 --- a/tools/net/ynl/lib/ynl.c +++ b/tools/net/ynl/lib/ynl.c @@ -360,15 +360,15 @@ static int ynl_cb_done(const struct nlmsghdr *nlh, struct ynl_parse_arg *yarg) /* Attribute validation */ -int ynl_attr_validate(struct ynl_parse_arg *yarg, const struct nlattr *attr) +int __ynl_attr_validate(struct ynl_parse_arg *yarg, const struct nlattr *attr, + unsigned int type) { const struct ynl_policy_attr *policy; - unsigned int type, len; unsigned char *data; + unsigned int len; data = ynl_attr_data(attr); len = ynl_attr_data_len(attr); - type = ynl_attr_type(attr); if (type > yarg->rsp_policy->max_attr) { yerr(yarg->ys, YNL_ERROR_INTERNAL, "Internal error, validating unknown attribute"); diff --git a/tools/net/ynl/pyynl/cli.py b/tools/net/ynl/pyynl/cli.py index 33ccc5c1843b..8c192e900bd3 100755 --- a/tools/net/ynl/pyynl/cli.py +++ b/tools/net/ynl/pyynl/cli.py @@ -113,6 +113,8 @@ def main(): spec = f"{spec_dir()}/{args.family}.yaml" if args.schema is None and spec.startswith(sys_schema_dir): args.schema = '' # disable schema validation when installed + if args.process_unknown is None: + args.process_unknown = True else: spec = args.spec if not os.path.isfile(spec): diff --git a/tools/net/ynl/pyynl/ethtool.py b/tools/net/ynl/pyynl/ethtool.py index cab6b576c876..fd0f6b8d54d1 100755 --- a/tools/net/ynl/pyynl/ethtool.py +++ b/tools/net/ynl/pyynl/ethtool.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause import argparse -import json import pathlib import pprint import sys @@ -45,13 +44,16 @@ def print_field(reply, *desc): Pretty-print a set of fields from the reply. desc specifies the fields and the optional type (bool/yn). """ + if not reply: + return + if len(desc) == 0: return print_field(reply, *zip(reply.keys(), reply.keys())) for spec in desc: try: field, name, tp = spec - except: + except ValueError: field, name = spec tp = 'int' @@ -156,7 +158,6 @@ def main(): global args args = parser.parse_args() - script_abs_dir = os.path.dirname(os.path.abspath(sys.argv[0])) spec = os.path.join(spec_dir(), 'ethtool.yaml') schema = os.path.join(schema_dir(), 'genetlink-legacy.yaml') @@ -255,14 +256,14 @@ def main(): reply = dumpit(ynl, args, 'channels-get') print(f'Channel parameters for {args.device}:') - print(f'Pre-set maximums:') + print('Pre-set maximums:') print_field(reply, ('rx-max', 'RX'), ('tx-max', 'TX'), ('other-max', 'Other'), ('combined-max', 'Combined')) - print(f'Current hardware settings:') + print('Current hardware settings:') print_field(reply, ('rx-count', 'RX'), ('tx-count', 'TX'), @@ -276,14 +277,14 @@ def main(): print(f'Ring parameters for {args.device}:') - print(f'Pre-set maximums:') + print('Pre-set maximums:') print_field(reply, ('rx-max', 'RX'), ('rx-mini-max', 'RX Mini'), ('rx-jumbo-max', 'RX Jumbo'), ('tx-max', 'TX')) - print(f'Current hardware settings:') + print('Current hardware settings:') print_field(reply, ('rx', 'RX'), ('rx-mini', 'RX Mini'), @@ -298,7 +299,7 @@ def main(): return if args.statistics: - print(f'NIC statistics:') + print('NIC statistics:') # TODO: pass id? strset = dumpit(ynl, args, 'strset-get') diff --git a/tools/net/ynl/pyynl/lib/__init__.py b/tools/net/ynl/pyynl/lib/__init__.py index 71518b9842ee..ec9ea00071be 100644 --- a/tools/net/ynl/pyynl/lib/__init__.py +++ b/tools/net/ynl/pyynl/lib/__init__.py @@ -4,6 +4,8 @@ from .nlspec import SpecAttr, SpecAttrSet, SpecEnumEntry, SpecEnumSet, \ SpecFamily, SpecOperation, SpecSubMessage, SpecSubMessageFormat from .ynl import YnlFamily, Netlink, NlError +from .doc_generator import YnlDocGenerator + __all__ = ["SpecAttr", "SpecAttrSet", "SpecEnumEntry", "SpecEnumSet", "SpecFamily", "SpecOperation", "SpecSubMessage", "SpecSubMessageFormat", - "YnlFamily", "Netlink", "NlError"] + "YnlFamily", "Netlink", "NlError", "YnlDocGenerator"] diff --git a/tools/net/ynl/pyynl/lib/doc_generator.py b/tools/net/ynl/pyynl/lib/doc_generator.py new file mode 100644 index 000000000000..3a16b8eb01ca --- /dev/null +++ b/tools/net/ynl/pyynl/lib/doc_generator.py @@ -0,0 +1,402 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0 +# -*- coding: utf-8; mode: python -*- + +""" + Class to auto generate the documentation for Netlink specifications. + + :copyright: Copyright (C) 2023 Breno Leitao <leitao@debian.org> + :license: GPL Version 2, June 1991 see linux/COPYING for details. + + This class performs extensive parsing to the Linux kernel's netlink YAML + spec files, in an effort to avoid needing to heavily mark up the original + YAML file. + + This code is split in two classes: + 1) RST formatters: Use to convert a string to a RST output + 2) YAML Netlink (YNL) doc generator: Generate docs from YAML data +""" + +from typing import Any, Dict, List +import yaml + +LINE_STR = '__lineno__' + +class NumberedSafeLoader(yaml.SafeLoader): # pylint: disable=R0901 + """Override the SafeLoader class to add line number to parsed data""" + + def construct_mapping(self, node, *args, **kwargs): + mapping = super().construct_mapping(node, *args, **kwargs) + mapping[LINE_STR] = node.start_mark.line + + return mapping + +class RstFormatters: + """RST Formatters""" + + SPACE_PER_LEVEL = 4 + + @staticmethod + def headroom(level: int) -> str: + """Return space to format""" + return " " * (level * RstFormatters.SPACE_PER_LEVEL) + + @staticmethod + def bold(text: str) -> str: + """Format bold text""" + return f"**{text}**" + + @staticmethod + def inline(text: str) -> str: + """Format inline text""" + return f"``{text}``" + + @staticmethod + def sanitize(text: str) -> str: + """Remove newlines and multiple spaces""" + # This is useful for some fields that are spread across multiple lines + return str(text).replace("\n", " ").strip() + + def rst_fields(self, key: str, value: str, level: int = 0) -> str: + """Return a RST formatted field""" + return self.headroom(level) + f":{key}: {value}" + + def rst_definition(self, key: str, value: Any, level: int = 0) -> str: + """Format a single rst definition""" + return self.headroom(level) + key + "\n" + self.headroom(level + 1) + str(value) + + def rst_paragraph(self, paragraph: str, level: int = 0) -> str: + """Return a formatted paragraph""" + return self.headroom(level) + paragraph + + def rst_bullet(self, item: str, level: int = 0) -> str: + """Return a formatted a bullet""" + return self.headroom(level) + f"- {item}" + + @staticmethod + def rst_subsection(title: str) -> str: + """Add a sub-section to the document""" + return f"{title}\n" + "-" * len(title) + + @staticmethod + def rst_subsubsection(title: str) -> str: + """Add a sub-sub-section to the document""" + return f"{title}\n" + "~" * len(title) + + @staticmethod + def rst_section(namespace: str, prefix: str, title: str) -> str: + """Add a section to the document""" + return f".. _{namespace}-{prefix}-{title}:\n\n{title}\n" + "=" * len(title) + + @staticmethod + def rst_subtitle(title: str) -> str: + """Add a subtitle to the document""" + return "\n" + "-" * len(title) + f"\n{title}\n" + "-" * len(title) + "\n\n" + + @staticmethod + def rst_title(title: str) -> str: + """Add a title to the document""" + return "=" * len(title) + f"\n{title}\n" + "=" * len(title) + "\n\n" + + def rst_list_inline(self, list_: List[str], level: int = 0) -> str: + """Format a list using inlines""" + return self.headroom(level) + "[" + ", ".join(self.inline(i) for i in list_) + "]" + + @staticmethod + def rst_ref(namespace: str, prefix: str, name: str) -> str: + """Add a hyperlink to the document""" + mappings = {'enum': 'definition', + 'fixed-header': 'definition', + 'nested-attributes': 'attribute-set', + 'struct': 'definition'} + if prefix in mappings: + prefix = mappings[prefix] + return f":ref:`{namespace}-{prefix}-{name}`" + + def rst_header(self) -> str: + """The headers for all the auto generated RST files""" + lines = [] + + lines.append(self.rst_paragraph(".. SPDX-License-Identifier: GPL-2.0")) + lines.append(self.rst_paragraph(".. NOTE: This document was auto-generated.\n\n")) + + return "\n".join(lines) + + @staticmethod + def rst_toctree(maxdepth: int = 2) -> str: + """Generate a toctree RST primitive""" + lines = [] + + lines.append(".. toctree::") + lines.append(f" :maxdepth: {maxdepth}\n\n") + + return "\n".join(lines) + + @staticmethod + def rst_label(title: str) -> str: + """Return a formatted label""" + return f".. _{title}:\n\n" + + @staticmethod + def rst_lineno(lineno: int) -> str: + """Return a lineno comment""" + return f".. LINENO {lineno}\n" + +class YnlDocGenerator: + """YAML Netlink specs Parser""" + + fmt = RstFormatters() + + def parse_mcast_group(self, mcast_group: List[Dict[str, Any]]) -> str: + """Parse 'multicast' group list and return a formatted string""" + lines = [] + for group in mcast_group: + lines.append(self.fmt.rst_bullet(group["name"])) + + return "\n".join(lines) + + def parse_do(self, do_dict: Dict[str, Any], level: int = 0) -> str: + """Parse 'do' section and return a formatted string""" + lines = [] + if LINE_STR in do_dict: + lines.append(self.fmt.rst_lineno(do_dict[LINE_STR])) + + for key in do_dict.keys(): + if key == LINE_STR: + continue + lines.append(self.fmt.rst_paragraph(self.fmt.bold(key), level + 1)) + if key in ['request', 'reply']: + lines.append(self.parse_do_attributes(do_dict[key], level + 1) + "\n") + else: + lines.append(self.fmt.headroom(level + 2) + do_dict[key] + "\n") + + return "\n".join(lines) + + def parse_do_attributes(self, attrs: Dict[str, Any], level: int = 0) -> str: + """Parse 'attributes' section""" + if "attributes" not in attrs: + return "" + lines = [self.fmt.rst_fields("attributes", + self.fmt.rst_list_inline(attrs["attributes"]), + level + 1)] + + return "\n".join(lines) + + def parse_operations(self, operations: List[Dict[str, Any]], namespace: str) -> str: + """Parse operations block""" + preprocessed = ["name", "doc", "title", "do", "dump", "flags"] + linkable = ["fixed-header", "attribute-set"] + lines = [] + + for operation in operations: + if LINE_STR in operation: + lines.append(self.fmt.rst_lineno(operation[LINE_STR])) + + lines.append(self.fmt.rst_section(namespace, 'operation', + operation["name"])) + lines.append(self.fmt.rst_paragraph(operation["doc"]) + "\n") + + for key in operation.keys(): + if key == LINE_STR: + continue + + if key in preprocessed: + # Skip the special fields + continue + value = operation[key] + if key in linkable: + value = self.fmt.rst_ref(namespace, key, value) + lines.append(self.fmt.rst_fields(key, value, 0)) + if 'flags' in operation: + lines.append(self.fmt.rst_fields('flags', + self.fmt.rst_list_inline(operation['flags']))) + + if "do" in operation: + lines.append(self.fmt.rst_paragraph(":do:", 0)) + lines.append(self.parse_do(operation["do"], 0)) + if "dump" in operation: + lines.append(self.fmt.rst_paragraph(":dump:", 0)) + lines.append(self.parse_do(operation["dump"], 0)) + + # New line after fields + lines.append("\n") + + return "\n".join(lines) + + def parse_entries(self, entries: List[Dict[str, Any]], level: int) -> str: + """Parse a list of entries""" + ignored = ["pad"] + lines = [] + for entry in entries: + if isinstance(entry, dict): + # entries could be a list or a dictionary + field_name = entry.get("name", "") + if field_name in ignored: + continue + type_ = entry.get("type") + if type_: + field_name += f" ({self.fmt.inline(type_)})" + lines.append( + self.fmt.rst_fields(field_name, + self.fmt.sanitize(entry.get("doc", "")), + level) + ) + elif isinstance(entry, list): + lines.append(self.fmt.rst_list_inline(entry, level)) + else: + lines.append(self.fmt.rst_bullet(self.fmt.inline(self.fmt.sanitize(entry)), + level)) + + lines.append("\n") + return "\n".join(lines) + + def parse_definitions(self, defs: Dict[str, Any], namespace: str) -> str: + """Parse definitions section""" + preprocessed = ["name", "entries", "members"] + ignored = ["render-max"] # This is not printed + lines = [] + + for definition in defs: + if LINE_STR in definition: + lines.append(self.fmt.rst_lineno(definition[LINE_STR])) + + lines.append(self.fmt.rst_section(namespace, 'definition', definition["name"])) + for k in definition.keys(): + if k == LINE_STR: + continue + if k in preprocessed + ignored: + continue + lines.append(self.fmt.rst_fields(k, self.fmt.sanitize(definition[k]), 0)) + + # Field list needs to finish with a new line + lines.append("\n") + if "entries" in definition: + lines.append(self.fmt.rst_paragraph(":entries:", 0)) + lines.append(self.parse_entries(definition["entries"], 1)) + if "members" in definition: + lines.append(self.fmt.rst_paragraph(":members:", 0)) + lines.append(self.parse_entries(definition["members"], 1)) + + return "\n".join(lines) + + def parse_attr_sets(self, entries: List[Dict[str, Any]], namespace: str) -> str: + """Parse attribute from attribute-set""" + preprocessed = ["name", "type"] + linkable = ["enum", "nested-attributes", "struct", "sub-message"] + ignored = ["checks"] + lines = [] + + for entry in entries: + lines.append(self.fmt.rst_section(namespace, 'attribute-set', + entry["name"])) + + if "doc" in entry: + lines.append(self.fmt.rst_paragraph(entry["doc"], 0) + "\n") + + for attr in entry["attributes"]: + if LINE_STR in attr: + lines.append(self.fmt.rst_lineno(attr[LINE_STR])) + + type_ = attr.get("type") + attr_line = attr["name"] + if type_: + # Add the attribute type in the same line + attr_line += f" ({self.fmt.inline(type_)})" + + lines.append(self.fmt.rst_subsubsection(attr_line)) + + for k in attr.keys(): + if k == LINE_STR: + continue + if k in preprocessed + ignored: + continue + if k in linkable: + value = self.fmt.rst_ref(namespace, k, attr[k]) + else: + value = self.fmt.sanitize(attr[k]) + lines.append(self.fmt.rst_fields(k, value, 0)) + lines.append("\n") + + return "\n".join(lines) + + def parse_sub_messages(self, entries: List[Dict[str, Any]], namespace: str) -> str: + """Parse sub-message definitions""" + lines = [] + + for entry in entries: + lines.append(self.fmt.rst_section(namespace, 'sub-message', + entry["name"])) + for fmt in entry["formats"]: + value = fmt["value"] + + lines.append(self.fmt.rst_bullet(self.fmt.bold(value))) + for attr in ['fixed-header', 'attribute-set']: + if attr in fmt: + lines.append(self.fmt.rst_fields(attr, + self.fmt.rst_ref(namespace, + attr, + fmt[attr]), + 1)) + lines.append("\n") + + return "\n".join(lines) + + def parse_yaml(self, obj: Dict[str, Any]) -> str: + """Format the whole YAML into a RST string""" + lines = [] + + # Main header + lineno = obj.get('__lineno__', 0) + lines.append(self.fmt.rst_lineno(lineno)) + + family = obj['name'] + + lines.append(self.fmt.rst_header()) + lines.append(self.fmt.rst_label("netlink-" + family)) + + title = f"Family ``{family}`` netlink specification" + lines.append(self.fmt.rst_title(title)) + lines.append(self.fmt.rst_paragraph(".. contents:: :depth: 3\n")) + + if "doc" in obj: + lines.append(self.fmt.rst_subtitle("Summary")) + lines.append(self.fmt.rst_paragraph(obj["doc"], 0)) + + # Operations + if "operations" in obj: + lines.append(self.fmt.rst_subtitle("Operations")) + lines.append(self.parse_operations(obj["operations"]["list"], + family)) + + # Multicast groups + if "mcast-groups" in obj: + lines.append(self.fmt.rst_subtitle("Multicast groups")) + lines.append(self.parse_mcast_group(obj["mcast-groups"]["list"])) + + # Definitions + if "definitions" in obj: + lines.append(self.fmt.rst_subtitle("Definitions")) + lines.append(self.parse_definitions(obj["definitions"], family)) + + # Attributes set + if "attribute-sets" in obj: + lines.append(self.fmt.rst_subtitle("Attribute sets")) + lines.append(self.parse_attr_sets(obj["attribute-sets"], family)) + + # Sub-messages + if "sub-messages" in obj: + lines.append(self.fmt.rst_subtitle("Sub-messages")) + lines.append(self.parse_sub_messages(obj["sub-messages"], family)) + + return "\n".join(lines) + + # Main functions + # ============== + + def parse_yaml_file(self, filename: str) -> str: + """Transform the YAML specified by filename into an RST-formatted string""" + with open(filename, "r", encoding="utf-8") as spec_file: + numbered_yaml = yaml.load(spec_file, Loader=NumberedSafeLoader) + content = self.parse_yaml(numbered_yaml) + + return content diff --git a/tools/net/ynl/pyynl/lib/nlspec.py b/tools/net/ynl/pyynl/lib/nlspec.py index 314ec8007496..85c17fe01e35 100644 --- a/tools/net/ynl/pyynl/lib/nlspec.py +++ b/tools/net/ynl/pyynl/lib/nlspec.py @@ -501,7 +501,7 @@ class SpecFamily(SpecElement): return SpecStruct(self, elem) def new_sub_message(self, elem): - return SpecSubMessage(self, elem); + return SpecSubMessage(self, elem) def new_operation(self, elem, req_val, rsp_val): return SpecOperation(self, elem, req_val, rsp_val) diff --git a/tools/net/ynl/pyynl/lib/ynl.py b/tools/net/ynl/pyynl/lib/ynl.py index 55b59f6c79b8..62383c70ebb9 100644 --- a/tools/net/ynl/pyynl/lib/ynl.py +++ b/tools/net/ynl/pyynl/lib/ynl.py @@ -9,7 +9,6 @@ import socket import struct from struct import Struct import sys -import yaml import ipaddress import uuid import queue @@ -231,14 +230,7 @@ class NlMsg: self.extack['unknown'].append(extack) if attr_space: - # We don't have the ability to parse nests yet, so only do global - if 'miss-type' in self.extack and 'miss-nest' not in self.extack: - miss_type = self.extack['miss-type'] - if miss_type in attr_space.attrs_by_val: - spec = attr_space.attrs_by_val[miss_type] - self.extack['miss-type'] = spec['name'] - if 'doc' in spec: - self.extack['miss-type-doc'] = spec['doc'] + self.annotate_extack(attr_space) def _decode_policy(self, raw): policy = {} @@ -264,6 +256,18 @@ class NlMsg: policy['mask'] = attr.as_scalar('u64') return policy + def annotate_extack(self, attr_space): + """ Make extack more human friendly with attribute information """ + + # We don't have the ability to parse nests yet, so only do global + if 'miss-type' in self.extack and 'miss-nest' not in self.extack: + miss_type = self.extack['miss-type'] + if miss_type in attr_space.attrs_by_val: + spec = attr_space.attrs_by_val[miss_type] + self.extack['miss-type'] = spec['name'] + if 'doc' in spec: + self.extack['miss-type-doc'] = spec['doc'] + def cmd(self): return self.nl_type @@ -277,12 +281,12 @@ class NlMsg: class NlMsgs: - def __init__(self, data, attr_space=None): + def __init__(self, data): self.msgs = [] offset = 0 while offset < len(data): - msg = NlMsg(data, offset, attr_space=attr_space) + msg = NlMsg(data, offset) offset += msg.nl_len self.msgs.append(msg) @@ -557,11 +561,13 @@ class YnlFamily(SpecFamily): if attr["type"] == 'nest': nl_type |= Netlink.NLA_F_NESTED - attr_payload = b'' sub_space = attr['nested-attributes'] - sub_attrs = SpaceAttrs(self.attr_sets[sub_space], value, search_attrs) - for subname, subvalue in value.items(): - attr_payload += self._add_attr(sub_space, subname, subvalue, sub_attrs) + attr_payload = self._add_nest_attrs(value, sub_space, search_attrs) + elif attr['type'] == 'indexed-array' and attr['sub-type'] == 'nest': + nl_type |= Netlink.NLA_F_NESTED + sub_space = attr['nested-attributes'] + attr_payload = self._encode_indexed_array(value, sub_space, + search_attrs) elif attr["type"] == 'flag': if not value: # If value is absent or false then skip attribute creation. @@ -570,7 +576,9 @@ class YnlFamily(SpecFamily): elif attr["type"] == 'string': attr_payload = str(value).encode('ascii') + b'\x00' elif attr["type"] == 'binary': - if isinstance(value, bytes): + if value is None: + attr_payload = b'' + elif isinstance(value, bytes): attr_payload = value elif isinstance(value, str): if attr.display_hint: @@ -579,6 +587,9 @@ class YnlFamily(SpecFamily): attr_payload = bytes.fromhex(value) elif isinstance(value, dict) and attr.struct_name: attr_payload = self._encode_struct(attr.struct_name, value) + elif isinstance(value, list) and attr.sub_type in NlAttr.type_formats: + format = NlAttr.get_format(attr.sub_type) + attr_payload = b''.join([format.pack(x) for x in value]) else: raise Exception(f'Unknown type for binary attribute, value: {value}') elif attr['type'] in NlAttr.type_formats or attr.is_auto_scalar: @@ -610,9 +621,38 @@ class YnlFamily(SpecFamily): else: raise Exception(f'Unknown type at {space} {name} {value} {attr["type"]}') + return self._add_attr_raw(nl_type, attr_payload) + + def _add_attr_raw(self, nl_type, attr_payload): pad = b'\x00' * ((4 - len(attr_payload) % 4) % 4) return struct.pack('HH', len(attr_payload) + 4, nl_type) + attr_payload + pad + def _add_nest_attrs(self, value, sub_space, search_attrs): + sub_attrs = SpaceAttrs(self.attr_sets[sub_space], value, search_attrs) + attr_payload = b'' + for subname, subvalue in value.items(): + attr_payload += self._add_attr(sub_space, subname, subvalue, + sub_attrs) + return attr_payload + + def _encode_indexed_array(self, vals, sub_space, search_attrs): + attr_payload = b'' + for i, val in enumerate(vals): + idx = i | Netlink.NLA_F_NESTED + val_payload = self._add_nest_attrs(val, sub_space, search_attrs) + attr_payload += self._add_attr_raw(idx, val_payload) + return attr_payload + + def _get_enum_or_unknown(self, enum, raw): + try: + name = enum.entries_by_val[raw].name + except KeyError as error: + if self.process_unknown: + name = f"Unknown({raw})" + else: + raise error + return name + def _decode_enum(self, raw, attr_spec): enum = self.consts[attr_spec['enum']] if enum.type == 'flags' or attr_spec.get('enum-as-flags', False): @@ -620,11 +660,11 @@ class YnlFamily(SpecFamily): value = set() while raw: if raw & 1: - value.add(enum.entries_by_val[i].name) + value.add(self._get_enum_or_unknown(enum, i)) raw >>= 1 i += 1 else: - value = enum.entries_by_val[raw].name + value = self._get_enum_or_unknown(enum, raw) return value def _decode_binary(self, attr, attr_spec): @@ -686,7 +726,7 @@ class YnlFamily(SpecFamily): return attr.as_bin() def _rsp_add(self, rsp, name, is_multi, decoded): - if is_multi == None: + if is_multi is None: if name in rsp and type(rsp[name]) is not list: rsp[name] = [rsp[name]] is_multi = True @@ -719,14 +759,14 @@ class YnlFamily(SpecFamily): decoded = {} offset = 0 if msg_format.fixed_header: - decoded.update(self._decode_struct(attr.raw, msg_format.fixed_header)); + decoded.update(self._decode_struct(attr.raw, msg_format.fixed_header)) offset = self._struct_size(msg_format.fixed_header) if msg_format.attr_set: if msg_format.attr_set in self.attr_sets: subdict = self._decode(NlAttrs(attr.raw, offset), msg_format.attr_set) decoded.update(subdict) else: - raise Exception(f"Unknown attribute-set '{attr_space}' when decoding '{attr_spec.name}'") + raise Exception(f"Unknown attribute-set '{msg_format.attr_set}' when decoding '{attr_spec.name}'") return decoded def _decode(self, attrs, space, outer_attrs = None): @@ -757,6 +797,8 @@ class YnlFamily(SpecFamily): decoded = True elif attr_spec.is_auto_scalar: decoded = attr.as_auto_scalar(attr_spec['type'], attr_spec.byte_order) + if 'enum' in attr_spec: + decoded = self._decode_enum(decoded, attr_spec) elif attr_spec["type"] in NlAttr.type_formats: decoded = attr.as_scalar(attr_spec['type'], attr_spec.byte_order) if 'enum' in attr_spec: @@ -914,7 +956,7 @@ class YnlFamily(SpecFamily): formatted = hex(raw) else: formatted = bytes.hex(raw, ' ') - elif display_hint in [ 'ipv4', 'ipv6' ]: + elif display_hint in [ 'ipv4', 'ipv6', 'ipv4-or-v6' ]: formatted = format(ipaddress.ip_address(raw)) elif display_hint == 'uuid': formatted = str(uuid.UUID(bytes=raw)) @@ -923,12 +965,17 @@ class YnlFamily(SpecFamily): return formatted def _from_string(self, string, attr_spec): - if attr_spec.display_hint in ['ipv4', 'ipv6']: + if attr_spec.display_hint in ['ipv4', 'ipv6', 'ipv4-or-v6']: ip = ipaddress.ip_address(string) if attr_spec['type'] == 'binary': raw = ip.packed else: raw = int(ip) + elif attr_spec.display_hint == 'hex': + if attr_spec['type'] == 'binary': + raw = bytes.fromhex(string) + else: + raw = int(string, 16) else: raise Exception(f"Display hint '{attr_spec.display_hint}' not implemented" f" when parsing '{attr_spec['name']}'") @@ -1034,12 +1081,13 @@ class YnlFamily(SpecFamily): op_rsp = [] while not done: reply = self.sock.recv(self._recv_size) - nms = NlMsgs(reply, attr_space=op.attr_set) + nms = NlMsgs(reply) self._recv_dbg_print(reply, nms) for nl_msg in nms: if nl_msg.nl_seq in reqs_by_seq: (op, vals, req_msg, req_flags) = reqs_by_seq[nl_msg.nl_seq] if nl_msg.extack: + nl_msg.annotate_extack(op.attr_set) self._decode_extack(req_msg, op, nl_msg.extack, vals) else: op = None diff --git a/tools/net/ynl/pyynl/ynl_gen_c.py b/tools/net/ynl/pyynl/ynl_gen_c.py index 76032e01c2e7..aadeb3abcad8 100755 --- a/tools/net/ynl/pyynl/ynl_gen_c.py +++ b/tools/net/ynl/pyynl/ynl_gen_c.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: ((GPL-2.0 WITH Linux-syscall-note) OR BSD-3-Clause) import argparse -import collections import filecmp import pathlib import os @@ -14,7 +13,7 @@ import yaml sys.path.append(pathlib.Path(__file__).resolve().parent.as_posix()) from lib import SpecFamily, SpecAttrSet, SpecAttr, SpecOperation, SpecEnumSet, SpecEnumEntry -from lib import SpecSubMessage, SpecSubMessageFormat +from lib import SpecSubMessage def c_upper(name): @@ -243,7 +242,7 @@ class Type(SpecAttr): raise Exception(f"Attr get not implemented for class type {self.type}") def attr_get(self, ri, var, first): - lines, init_lines, local_vars = self._attr_get(ri, var) + lines, init_lines, _ = self._attr_get(ri, var) if type(lines) is str: lines = [lines] if type(init_lines) is str: @@ -251,10 +250,6 @@ class Type(SpecAttr): kw = 'if' if first else 'else if' ri.cw.block_start(line=f"{kw} (type == {self.enum_name})") - if local_vars: - for local in local_vars: - ri.cw.p(local) - ri.cw.nl() if not self.is_multi_val(): ri.cw.p("if (ynl_attr_validate(yarg, attr))") @@ -275,9 +270,8 @@ class Type(SpecAttr): def _setter_lines(self, ri, member, presence): raise Exception(f"Setter not implemented for class type {self.type}") - def setter(self, ri, space, direction, deref=False, ref=None): + def setter(self, ri, space, direction, deref=False, ref=None, var="req"): ref = (ref if ref else []) + [self.c_name] - var = "req" member = f"{var}->{'.'.join(ref)}" local_vars = [] @@ -332,7 +326,7 @@ class TypeUnused(Type): def attr_get(self, ri, var, first): pass - def setter(self, ri, space, direction, deref=False, ref=None): + def setter(self, ri, space, direction, deref=False, ref=None, var=None): pass @@ -355,7 +349,7 @@ class TypePad(Type): def attr_policy(self, cw): pass - def setter(self, ri, space, direction, deref=False, ref=None): + def setter(self, ri, space, direction, deref=False, ref=None, var=None): pass @@ -399,7 +393,7 @@ class TypeScalar(Type): if 'enum' in self.attr: enum = self.family.consts[self.attr['enum']] low, high = enum.value_range() - if low == None and high == None: + if low is None and high is None: self.checks['sparse'] = True else: if 'min' not in self.checks: @@ -486,7 +480,7 @@ class TypeString(Type): ri.cw.p(f"char *{self.c_name};") def _attr_typol(self): - typol = f'.type = YNL_PT_NUL_STR, ' + typol = '.type = YNL_PT_NUL_STR, ' if self.is_selector: typol += '.is_selector = 1, ' return typol @@ -540,7 +534,7 @@ class TypeBinary(Type): ri.cw.p(f"void *{self.c_name};") def _attr_typol(self): - return f'.type = YNL_PT_BINARY,' + return '.type = YNL_PT_BINARY,' def _attr_policy(self, policy): if len(self.checks) == 0: @@ -557,7 +551,7 @@ class TypeBinary(Type): elif 'exact-len' in self.checks: mem = 'NLA_POLICY_EXACT_LEN(' + self.get_limit_str('exact-len') + ')' elif 'min-len' in self.checks: - mem = '{ .len = ' + self.get_limit_str('min-len') + ', }' + mem = 'NLA_POLICY_MIN_LEN(' + self.get_limit_str('min-len') + ')' elif 'max-len' in self.checks: mem = 'NLA_POLICY_MAX_LEN(' + self.get_limit_str('max-len') + ')' @@ -637,10 +631,10 @@ class TypeBitfield32(Type): return "struct nla_bitfield32" def _attr_typol(self): - return f'.type = YNL_PT_BITFIELD32, ' + return '.type = YNL_PT_BITFIELD32, ' def _attr_policy(self, policy): - if not 'enum' in self.attr: + if 'enum' not in self.attr: raise Exception('Enum required for bitfield32 attr') enum = self.family.consts[self.attr['enum']] mask = enum.get_mask(as_flags=True) @@ -695,13 +689,14 @@ class TypeNest(Type): f"parg.data = &{var}->{self.c_name};"] return get_lines, init_lines, None - def setter(self, ri, space, direction, deref=False, ref=None): + def setter(self, ri, space, direction, deref=False, ref=None, var="req"): ref = (ref if ref else []) + [self.c_name] for _, attr in ri.family.pure_nested_structs[self.nested_attrs].member_list(): if attr.is_recursive(): continue - attr.setter(ri, self.nested_attrs, direction, deref=deref, ref=ref) + attr.setter(ri, self.nested_attrs, direction, deref=deref, ref=ref, + var=var) class TypeMultiAttr(Type): @@ -725,7 +720,11 @@ class TypeMultiAttr(Type): return 'struct ynl_string *' elif self.attr['type'] in scalars: scalar_pfx = '__' if ri.ku_space == 'user' else '' - return scalar_pfx + self.attr['type'] + if self.is_auto_scalar: + name = self.type[0] + '64' + else: + name = self.attr['type'] + return scalar_pfx + name else: raise Exception(f"Sub-type {self.attr['type']} not supported yet") @@ -792,7 +791,7 @@ class TypeMultiAttr(Type): f"{presence} = n_{self.c_name};"] -class TypeArrayNest(Type): +class TypeIndexedArray(Type): def is_multi_val(self): return True @@ -816,21 +815,28 @@ class TypeArrayNest(Type): f'unsigned int n_{self.c_name}'] return super().arg_member(ri) + def _attr_policy(self, policy): + if self.attr['sub-type'] == 'nest': + return f'NLA_POLICY_NESTED_ARRAY({self.nested_render_name}_nl_policy)' + return super()._attr_policy(policy) + def _attr_typol(self): if self.attr['sub-type'] in scalars: return f'.type = YNL_PT_U{c_upper(self.sub_type[1:])}, ' elif self.attr['sub-type'] == 'binary' and 'exact-len' in self.checks: return f'.type = YNL_PT_BINARY, .len = {self.checks["exact-len"]}, ' - else: + elif self.attr['sub-type'] == 'nest': return f'.type = YNL_PT_NEST, .nest = &{self.nested_render_name}_nest, ' + else: + raise Exception(f"Typol for IndexedArray sub-type {self.attr['sub-type']} not supported, yet") def _attr_get(self, ri, var): local_vars = ['const struct nlattr *attr2;'] get_lines = [f'attr_{self.c_name} = attr;', 'ynl_attr_for_each_nested(attr2, attr) {', - '\tif (ynl_attr_validate(yarg, attr2))', + '\tif (__ynl_attr_validate(yarg, attr2, type))', '\t\treturn YNL_PARSE_CB_ERROR;', - f'\t{var}->_count.{self.c_name}++;', + f'\tn_{self.c_name}++;', '}'] return get_lines, None, local_vars @@ -848,13 +854,25 @@ class TypeArrayNest(Type): ri.cw.p(f'for (i = 0; i < {var}->_count.{self.c_name}; i++)') ri.cw.p(f"{self.nested_render_name}_put(nlh, i, &{var}->{self.c_name}[i]);") else: - raise Exception(f"Put for ArrayNest sub-type {self.attr['sub-type']} not supported, yet") + raise Exception(f"Put for IndexedArray sub-type {self.attr['sub-type']} not supported, yet") ri.cw.p('ynl_attr_nest_end(nlh, array);') def _setter_lines(self, ri, member, presence): return [f"{member} = {self.c_name};", f"{presence} = n_{self.c_name};"] + def free_needs_iter(self): + return self.sub_type == 'nest' + + def _free_lines(self, ri, var, ref): + lines = [] + if self.sub_type == 'nest': + lines += [ + f"for (i = 0; i < {var}->{ref}_count.{self.c_name}; i++)", + f'{self.nested_render_name}_free(&{var}->{ref}{self.c_name}[i]);', + ] + lines += f"free({var}->{ref}{self.c_name});", + return lines class TypeNestTypeValue(Type): def _complex_member_type(self, ri): @@ -909,7 +927,7 @@ class TypeSubMessage(TypeNest): else: sel_var = f"{var}->{sel}" get_lines = [f'if (!{sel_var})', - f'return ynl_submsg_failed(yarg, "%s", "%s");' % + 'return ynl_submsg_failed(yarg, "%s", "%s");' % (self.name, self['selector']), f"if ({self.nested_render_name}_parse(&parg, {sel_var}, attr))", "return YNL_PARSE_CB_ERROR;"] @@ -1125,7 +1143,7 @@ class AttrSet(SpecAttrSet): t = TypeNest(self.family, self, elem, value) elif elem['type'] == 'indexed-array' and 'sub-type' in elem: if elem["sub-type"] in ['binary', 'nest', 'u32']: - t = TypeArrayNest(self.family, self, elem, value) + t = TypeIndexedArray(self.family, self, elem, value) else: raise Exception(f'new_attr: unsupported sub-type {elem["sub-type"]}') elif elem['type'] == 'nest-type-value': @@ -1563,7 +1581,7 @@ class RenderInfo: if family.is_classic(): self.fixed_hdr_len = f"sizeof(struct {c_lower(fixed_hdr)})" else: - raise Exception(f"Per-op fixed header not supported, yet") + raise Exception("Per-op fixed header not supported, yet") # 'do' and 'dump' response parsing is identical @@ -1879,7 +1897,9 @@ def rdir(direction): def op_prefix(ri, direction, deref=False): suffix = f"_{ri.type_name}" - if not ri.op_mode or ri.op_mode == 'do': + if not ri.op_mode: + pass + elif ri.op_mode == 'do': suffix += f"{direction_to_suffix[direction]}" else: if direction == 'request': @@ -2032,6 +2052,20 @@ def put_enum_to_str(family, cw, enum): _put_enum_to_str_helper(cw, enum.render_name, map_name, 'value', enum=enum) +def put_local_vars(struct): + local_vars = [] + has_array = False + has_count = False + for _, arg in struct.member_list(): + has_array |= arg.type == 'indexed-array' + has_count |= arg.presence_type() == 'count' + if has_array: + local_vars.append('struct nlattr *array;') + if has_count: + local_vars.append('unsigned int i;') + return local_vars + + def put_req_nested_prototype(ri, struct, suffix=';'): func_args = ['struct nlmsghdr *nlh', 'unsigned int attr_type', @@ -2054,15 +2088,7 @@ def put_req_nested(ri, struct): init_lines.append(f"hdr = ynl_nlmsg_put_extra_header(nlh, {struct_sz});") init_lines.append(f"memcpy(hdr, &obj->_hdr, {struct_sz});") - has_anest = False - has_count = False - for _, arg in struct.member_list(): - has_anest |= arg.type == 'indexed-array' - has_count |= arg.presence_type() == 'count' - if has_anest: - local_vars.append('struct nlattr *array;') - if has_count: - local_vars.append('unsigned int i;') + local_vars += put_local_vars(struct) put_req_nested_prototype(ri, struct, suffix='') ri.cw.block_start() @@ -2097,35 +2123,43 @@ def _multi_parse(ri, struct, init_lines, local_vars): if ri.family.is_classic(): iter_line = f"ynl_attr_for_each(attr, nlh, sizeof({struct.fixed_header}))" else: - raise Exception(f"Per-op fixed header not supported, yet") + raise Exception("Per-op fixed header not supported, yet") - array_nests = set() + indexed_arrays = set() multi_attrs = set() needs_parg = False + var_set = set() for arg, aspec in struct.member_list(): if aspec['type'] == 'indexed-array' and 'sub-type' in aspec: if aspec["sub-type"] in {'binary', 'nest'}: - local_vars.append(f'const struct nlattr *attr_{aspec.c_name};') - array_nests.add(arg) + local_vars.append(f'const struct nlattr *attr_{aspec.c_name} = NULL;') + indexed_arrays.add(arg) elif aspec['sub-type'] in scalars: - local_vars.append(f'const struct nlattr *attr_{aspec.c_name};') - array_nests.add(arg) + local_vars.append(f'const struct nlattr *attr_{aspec.c_name} = NULL;') + indexed_arrays.add(arg) else: raise Exception(f'Not supported sub-type {aspec["sub-type"]}') if 'multi-attr' in aspec: multi_attrs.add(arg) needs_parg |= 'nested-attributes' in aspec needs_parg |= 'sub-message' in aspec - if array_nests or multi_attrs: + + try: + _, _, l_vars = aspec._attr_get(ri, '') + var_set |= set(l_vars) if l_vars else set() + except Exception: + pass # _attr_get() not implemented by simple types, ignore + local_vars += list(var_set) + if indexed_arrays or multi_attrs: local_vars.append('int i;') if needs_parg: local_vars.append('struct ynl_parse_arg parg;') init_lines.append('parg.ys = yarg->ys;') - all_multi = array_nests | multi_attrs + all_multi = indexed_arrays | multi_attrs - for anest in sorted(all_multi): - local_vars.append(f"unsigned int n_{struct[anest].c_name} = 0;") + for arg in sorted(all_multi): + local_vars.append(f"unsigned int n_{struct[arg].c_name} = 0;") ri.cw.block_start() ri.cw.write_func_lvar(local_vars) @@ -2145,8 +2179,8 @@ def _multi_parse(ri, struct, init_lines, local_vars): else: ri.cw.p('hdr = ynl_nlmsg_data_offset(nlh, sizeof(struct genlmsghdr));') ri.cw.p(f"memcpy(&dst->_hdr, hdr, sizeof({struct.fixed_header}));") - for anest in sorted(all_multi): - aspec = struct[anest] + for arg in sorted(all_multi): + aspec = struct[arg] ri.cw.p(f"if (dst->{aspec.c_name})") ri.cw.p(f'return ynl_error_parse(yarg, "attribute already present ({struct.attr_set.name}.{aspec.name})");') @@ -2164,8 +2198,8 @@ def _multi_parse(ri, struct, init_lines, local_vars): ri.cw.block_end() ri.cw.nl() - for anest in sorted(array_nests): - aspec = struct[anest] + for arg in sorted(indexed_arrays): + aspec = struct[arg] ri.cw.block_start(line=f"if (n_{aspec.c_name})") ri.cw.p(f"dst->{aspec.c_name} = calloc(n_{aspec.c_name}, sizeof(*dst->{aspec.c_name}));") @@ -2190,8 +2224,8 @@ def _multi_parse(ri, struct, init_lines, local_vars): ri.cw.block_end() ri.cw.nl() - for anest in sorted(multi_attrs): - aspec = struct[anest] + for arg in sorted(multi_attrs): + aspec = struct[arg] ri.cw.block_start(line=f"if (n_{aspec.c_name})") ri.cw.p(f"dst->{aspec.c_name} = calloc(n_{aspec.c_name}, sizeof(*dst->{aspec.c_name}));") ri.cw.p(f"dst->_count.{aspec.c_name} = n_{aspec.c_name};") @@ -2346,10 +2380,7 @@ def print_req(ri): local_vars += ['size_t hdr_len;', 'void *hdr;'] - for _, attr in ri.struct["request"].member_list(): - if attr.presence_type() == 'count': - local_vars += ['unsigned int i;'] - break + local_vars += put_local_vars(ri.struct['request']) print_prototype(ri, direction, terminate=False) ri.cw.block_start() @@ -2416,6 +2447,9 @@ def print_dump(ri): local_vars += ['size_t hdr_len;', 'void *hdr;'] + if 'request' in ri.op[ri.op_mode]: + local_vars += put_local_vars(ri.struct['request']) + ri.cw.write_func_lvar(local_vars) ri.cw.p('yds.yarg.ys = ys;') @@ -2470,11 +2504,22 @@ def free_arg_name(direction): return 'obj' -def print_alloc_wrapper(ri, direction): +def print_alloc_wrapper(ri, direction, struct=None): name = op_prefix(ri, direction) - ri.cw.write_func_prot(f'static inline struct {name} *', f"{name}_alloc", [f"void"]) + struct_name = name + if ri.type_name_conflict: + struct_name += '_' + + args = ["void"] + cnt = "1" + if struct and struct.in_multi_val: + args = ["unsigned int n"] + cnt = "n" + + ri.cw.write_func_prot(f'static inline struct {struct_name} *', + f"{name}_alloc", args) ri.cw.block_start() - ri.cw.p(f'return calloc(1, sizeof(struct {name}));') + ri.cw.p(f'return calloc({cnt}, sizeof(struct {struct_name}));') ri.cw.block_end() @@ -2489,7 +2534,7 @@ def print_free_prototype(ri, direction, suffix=';'): def print_nlflags_set(ri, direction): name = op_prefix(ri, direction) - ri.cw.write_func_prot(f'static inline void', f"{name}_set_nlflags", + ri.cw.write_func_prot('static inline void', f"{name}_set_nlflags", [f"struct {name} *req", "__u16 nl_flags"]) ri.cw.block_start() ri.cw.p('req->_nlmsg_flags = nl_flags;') @@ -2520,7 +2565,7 @@ def _print_type(ri, direction, struct): line = attr.presence_member(ri.ku_space, type_filter) if line: if not meta_started: - ri.cw.block_start(line=f"struct") + ri.cw.block_start(line="struct") meta_started = True ri.cw.p(line) if meta_started: @@ -2544,6 +2589,19 @@ def print_type(ri, direction): def print_type_full(ri, struct): _print_type(ri, "", struct) + if struct.request and struct.in_multi_val: + print_alloc_wrapper(ri, "", struct) + ri.cw.nl() + free_rsp_nested_prototype(ri) + ri.cw.nl() + + # Name conflicts are too hard to deal with with the current code base, + # they are very rare so don't bother printing setters in that case. + if ri.ku_space == 'user' and not ri.type_name_conflict: + for _, attr in struct.member_list(): + attr.setter(ri, ri.attr_set, "", var="obj") + ri.cw.nl() + def print_type_helpers(ri, direction, deref=False): print_free_prototype(ri, direction) @@ -2671,7 +2729,7 @@ def print_dump_type_free(ri): ri.cw.nl() _free_type_members(ri, 'rsp', ri.struct['reply'], ref='obj.') - ri.cw.p(f'free(rsp);') + ri.cw.p('free(rsp);') ri.cw.block_end() ri.cw.block_end() ri.cw.nl() @@ -2682,7 +2740,7 @@ def print_ntf_type_free(ri): ri.cw.block_start() _free_type_members_iter(ri, ri.struct['reply']) _free_type_members(ri, 'rsp', ri.struct['reply'], ref='obj.') - ri.cw.p(f'free(rsp);') + ri.cw.p('free(rsp);') ri.cw.block_end() ri.cw.nl() @@ -2777,8 +2835,6 @@ def print_kernel_policy_sparse_enum_validates(family, cw): cw.p('/* Sparse enums validation callbacks */') first = False - sign = '' if attr.type[0] == 'u' else '_signed' - suffix = 'ULL' if attr.type[0] == 'u' else 'LL' cw.write_func_prot('static int', f'{c_lower(attr.enum_name)}_validate', ['const struct nlattr *attr', 'struct netlink_ext_ack *extack']) cw.block_start() @@ -3185,8 +3241,9 @@ def render_uapi(family, cw): cw.block_end(line=';') cw.nl() elif const['type'] == 'const': + name_pfx = const.get('name-prefix', f"{family.ident_name}-") defines.append([c_upper(family.get('c-define-name', - f"{family.ident_name}-{const['name']}")), + f"{name_pfx}{const['name']}")), const['value']]) if defines: @@ -3298,7 +3355,7 @@ def render_user_family(family, cw, prototype): cw.block_start(f'{symbol} = ') cw.p(f'.name\t\t= "{family.c_name}",') if family.is_classic(): - cw.p(f'.is_classic\t= true,') + cw.p('.is_classic\t= true,') cw.p(f'.classic_id\t= {family.get("protonum")},') if family.is_classic(): if family.fixed_header: @@ -3515,9 +3572,6 @@ def main(): for attr_set, struct in parsed.pure_nested_structs.items(): ri = RenderInfo(cw, parsed, args.mode, "", "", attr_set) print_type_full(ri, struct) - if struct.request and struct.in_multi_val: - free_rsp_nested_prototype(ri) - cw.nl() for op_name, op in parsed.ops.items(): cw.p(f"/* ============== {op.enum_name} ============== */") diff --git a/tools/net/ynl/pyynl/ynl_gen_rst.py b/tools/net/ynl/pyynl/ynl_gen_rst.py index 0cb6348e28d3..90ae19aac89d 100755 --- a/tools/net/ynl/pyynl/ynl_gen_rst.py +++ b/tools/net/ynl/pyynl/ynl_gen_rst.py @@ -10,353 +10,17 @@ This script performs extensive parsing to the Linux kernel's netlink YAML spec files, in an effort to avoid needing to heavily mark up the original - YAML file. - - This code is split in three big parts: - 1) RST formatters: Use to convert a string to a RST output - 2) Parser helpers: Functions to parse the YAML data structure - 3) Main function and small helpers + YAML file. It uses the library code from scripts/lib. """ -from typing import Any, Dict, List import os.path +import pathlib import sys import argparse import logging -import yaml - - -SPACE_PER_LEVEL = 4 - - -# RST Formatters -# ============== -def headroom(level: int) -> str: - """Return space to format""" - return " " * (level * SPACE_PER_LEVEL) - - -def bold(text: str) -> str: - """Format bold text""" - return f"**{text}**" - - -def inline(text: str) -> str: - """Format inline text""" - return f"``{text}``" - - -def sanitize(text: str) -> str: - """Remove newlines and multiple spaces""" - # This is useful for some fields that are spread across multiple lines - return str(text).replace("\n", " ").strip() - - -def rst_fields(key: str, value: str, level: int = 0) -> str: - """Return a RST formatted field""" - return headroom(level) + f":{key}: {value}" - - -def rst_definition(key: str, value: Any, level: int = 0) -> str: - """Format a single rst definition""" - return headroom(level) + key + "\n" + headroom(level + 1) + str(value) - - -def rst_paragraph(paragraph: str, level: int = 0) -> str: - """Return a formatted paragraph""" - return headroom(level) + paragraph - - -def rst_bullet(item: str, level: int = 0) -> str: - """Return a formatted a bullet""" - return headroom(level) + f"- {item}" - - -def rst_subsection(title: str) -> str: - """Add a sub-section to the document""" - return f"{title}\n" + "-" * len(title) - - -def rst_subsubsection(title: str) -> str: - """Add a sub-sub-section to the document""" - return f"{title}\n" + "~" * len(title) - - -def rst_section(namespace: str, prefix: str, title: str) -> str: - """Add a section to the document""" - return f".. _{namespace}-{prefix}-{title}:\n\n{title}\n" + "=" * len(title) - - -def rst_subtitle(title: str) -> str: - """Add a subtitle to the document""" - return "\n" + "-" * len(title) + f"\n{title}\n" + "-" * len(title) + "\n\n" - - -def rst_title(title: str) -> str: - """Add a title to the document""" - return "=" * len(title) + f"\n{title}\n" + "=" * len(title) + "\n\n" - - -def rst_list_inline(list_: List[str], level: int = 0) -> str: - """Format a list using inlines""" - return headroom(level) + "[" + ", ".join(inline(i) for i in list_) + "]" - - -def rst_ref(namespace: str, prefix: str, name: str) -> str: - """Add a hyperlink to the document""" - mappings = {'enum': 'definition', - 'fixed-header': 'definition', - 'nested-attributes': 'attribute-set', - 'struct': 'definition'} - if prefix in mappings: - prefix = mappings[prefix] - return f":ref:`{namespace}-{prefix}-{name}`" - - -def rst_header() -> str: - """The headers for all the auto generated RST files""" - lines = [] - - lines.append(rst_paragraph(".. SPDX-License-Identifier: GPL-2.0")) - lines.append(rst_paragraph(".. NOTE: This document was auto-generated.\n\n")) - - return "\n".join(lines) - - -def rst_toctree(maxdepth: int = 2) -> str: - """Generate a toctree RST primitive""" - lines = [] - - lines.append(".. toctree::") - lines.append(f" :maxdepth: {maxdepth}\n\n") - - return "\n".join(lines) - - -def rst_label(title: str) -> str: - """Return a formatted label""" - return f".. _{title}:\n\n" - - -# Parsers -# ======= - - -def parse_mcast_group(mcast_group: List[Dict[str, Any]]) -> str: - """Parse 'multicast' group list and return a formatted string""" - lines = [] - for group in mcast_group: - lines.append(rst_bullet(group["name"])) - - return "\n".join(lines) - - -def parse_do(do_dict: Dict[str, Any], level: int = 0) -> str: - """Parse 'do' section and return a formatted string""" - lines = [] - for key in do_dict.keys(): - lines.append(rst_paragraph(bold(key), level + 1)) - if key in ['request', 'reply']: - lines.append(parse_do_attributes(do_dict[key], level + 1) + "\n") - else: - lines.append(headroom(level + 2) + do_dict[key] + "\n") - - return "\n".join(lines) - - -def parse_do_attributes(attrs: Dict[str, Any], level: int = 0) -> str: - """Parse 'attributes' section""" - if "attributes" not in attrs: - return "" - lines = [rst_fields("attributes", rst_list_inline(attrs["attributes"]), level + 1)] - - return "\n".join(lines) - - -def parse_operations(operations: List[Dict[str, Any]], namespace: str) -> str: - """Parse operations block""" - preprocessed = ["name", "doc", "title", "do", "dump", "flags"] - linkable = ["fixed-header", "attribute-set"] - lines = [] - - for operation in operations: - lines.append(rst_section(namespace, 'operation', operation["name"])) - lines.append(rst_paragraph(operation["doc"]) + "\n") - - for key in operation.keys(): - if key in preprocessed: - # Skip the special fields - continue - value = operation[key] - if key in linkable: - value = rst_ref(namespace, key, value) - lines.append(rst_fields(key, value, 0)) - if 'flags' in operation: - lines.append(rst_fields('flags', rst_list_inline(operation['flags']))) - - if "do" in operation: - lines.append(rst_paragraph(":do:", 0)) - lines.append(parse_do(operation["do"], 0)) - if "dump" in operation: - lines.append(rst_paragraph(":dump:", 0)) - lines.append(parse_do(operation["dump"], 0)) - - # New line after fields - lines.append("\n") - - return "\n".join(lines) - - -def parse_entries(entries: List[Dict[str, Any]], level: int) -> str: - """Parse a list of entries""" - ignored = ["pad"] - lines = [] - for entry in entries: - if isinstance(entry, dict): - # entries could be a list or a dictionary - field_name = entry.get("name", "") - if field_name in ignored: - continue - type_ = entry.get("type") - if type_: - field_name += f" ({inline(type_)})" - lines.append( - rst_fields(field_name, sanitize(entry.get("doc", "")), level) - ) - elif isinstance(entry, list): - lines.append(rst_list_inline(entry, level)) - else: - lines.append(rst_bullet(inline(sanitize(entry)), level)) - - lines.append("\n") - return "\n".join(lines) - - -def parse_definitions(defs: Dict[str, Any], namespace: str) -> str: - """Parse definitions section""" - preprocessed = ["name", "entries", "members"] - ignored = ["render-max"] # This is not printed - lines = [] - - for definition in defs: - lines.append(rst_section(namespace, 'definition', definition["name"])) - for k in definition.keys(): - if k in preprocessed + ignored: - continue - lines.append(rst_fields(k, sanitize(definition[k]), 0)) - - # Field list needs to finish with a new line - lines.append("\n") - if "entries" in definition: - lines.append(rst_paragraph(":entries:", 0)) - lines.append(parse_entries(definition["entries"], 1)) - if "members" in definition: - lines.append(rst_paragraph(":members:", 0)) - lines.append(parse_entries(definition["members"], 1)) - - return "\n".join(lines) - - -def parse_attr_sets(entries: List[Dict[str, Any]], namespace: str) -> str: - """Parse attribute from attribute-set""" - preprocessed = ["name", "type"] - linkable = ["enum", "nested-attributes", "struct", "sub-message"] - ignored = ["checks"] - lines = [] - - for entry in entries: - lines.append(rst_section(namespace, 'attribute-set', entry["name"])) - for attr in entry["attributes"]: - type_ = attr.get("type") - attr_line = attr["name"] - if type_: - # Add the attribute type in the same line - attr_line += f" ({inline(type_)})" - - lines.append(rst_subsubsection(attr_line)) - - for k in attr.keys(): - if k in preprocessed + ignored: - continue - if k in linkable: - value = rst_ref(namespace, k, attr[k]) - else: - value = sanitize(attr[k]) - lines.append(rst_fields(k, value, 0)) - lines.append("\n") - - return "\n".join(lines) - - -def parse_sub_messages(entries: List[Dict[str, Any]], namespace: str) -> str: - """Parse sub-message definitions""" - lines = [] - - for entry in entries: - lines.append(rst_section(namespace, 'sub-message', entry["name"])) - for fmt in entry["formats"]: - value = fmt["value"] - - lines.append(rst_bullet(bold(value))) - for attr in ['fixed-header', 'attribute-set']: - if attr in fmt: - lines.append(rst_fields(attr, - rst_ref(namespace, attr, fmt[attr]), - 1)) - lines.append("\n") - - return "\n".join(lines) - - -def parse_yaml(obj: Dict[str, Any]) -> str: - """Format the whole YAML into a RST string""" - lines = [] - - # Main header - - lines.append(rst_header()) - - family = obj['name'] - - title = f"Family ``{family}`` netlink specification" - lines.append(rst_title(title)) - lines.append(rst_paragraph(".. contents:: :depth: 3\n")) - - if "doc" in obj: - lines.append(rst_subtitle("Summary")) - lines.append(rst_paragraph(obj["doc"], 0)) - - # Operations - if "operations" in obj: - lines.append(rst_subtitle("Operations")) - lines.append(parse_operations(obj["operations"]["list"], family)) - - # Multicast groups - if "mcast-groups" in obj: - lines.append(rst_subtitle("Multicast groups")) - lines.append(parse_mcast_group(obj["mcast-groups"]["list"])) - - # Definitions - if "definitions" in obj: - lines.append(rst_subtitle("Definitions")) - lines.append(parse_definitions(obj["definitions"], family)) - - # Attributes set - if "attribute-sets" in obj: - lines.append(rst_subtitle("Attribute sets")) - lines.append(parse_attr_sets(obj["attribute-sets"], family)) - - # Sub-messages - if "sub-messages" in obj: - lines.append(rst_subtitle("Sub-messages")) - lines.append(parse_sub_messages(obj["sub-messages"], family)) - - return "\n".join(lines) - - -# Main functions -# ============== +sys.path.append(pathlib.Path(__file__).resolve().parent.as_posix()) +from lib import YnlDocGenerator # pylint: disable=C0413 def parse_arguments() -> argparse.Namespace: """Parse arguments from user""" @@ -367,9 +31,6 @@ def parse_arguments() -> argparse.Namespace: # Index and input are mutually exclusive group = parser.add_mutually_exclusive_group() - group.add_argument( - "-x", "--index", action="store_true", help="Generate the index page" - ) group.add_argument("-i", "--input", help="YAML file name") args = parser.parse_args() @@ -391,15 +52,6 @@ def parse_arguments() -> argparse.Namespace: return args -def parse_yaml_file(filename: str) -> str: - """Transform the YAML specified by filename into an RST-formatted string""" - with open(filename, "r", encoding="utf-8") as spec_file: - yaml_data = yaml.safe_load(spec_file) - content = parse_yaml(yaml_data) - - return content - - def write_to_rstfile(content: str, filename: str) -> None: """Write the generated content into an RST file""" logging.debug("Saving RST file to %s", filename) @@ -408,35 +60,17 @@ def write_to_rstfile(content: str, filename: str) -> None: rst_file.write(content) -def generate_main_index_rst(output: str) -> None: - """Generate the `networking_spec/index` content and write to the file""" - lines = [] - - lines.append(rst_header()) - lines.append(rst_label("specs")) - lines.append(rst_title("Netlink Family Specifications")) - lines.append(rst_toctree(1)) - - index_dir = os.path.dirname(output) - logging.debug("Looking for .rst files in %s", index_dir) - for filename in sorted(os.listdir(index_dir)): - if not filename.endswith(".rst") or filename == "index.rst": - continue - lines.append(f" {filename.replace('.rst', '')}\n") - - logging.debug("Writing an index file at %s", output) - write_to_rstfile("".join(lines), output) - - def main() -> None: """Main function that reads the YAML files and generates the RST files""" args = parse_arguments() + parser = YnlDocGenerator() + if args.input: logging.debug("Parsing %s", args.input) try: - content = parse_yaml_file(os.path.join(args.input)) + content = parser.parse_yaml_file(os.path.join(args.input)) except Exception as exception: logging.warning("Failed to parse %s.", args.input) logging.warning(exception) @@ -444,10 +78,6 @@ def main() -> None: write_to_rstfile(content, args.output) - if args.index: - # Generate the index RST file - generate_main_index_rst(args.output) - if __name__ == "__main__": main() |
