| #!/usr/bin/env python3 |
| # SPDX-FileCopyrightText: Copyright The Zephyr Project Contributors |
| # SPDX-License-Identifier: Apache-2.0 |
| |
| """ |
| Doxygen Coverage Diff Tool |
| |
| Compares Doxygen documentation coverage between two coverxygen JSON reports (reference and |
| comparison branches) and identifies newly introduced undocumented API symbols. |
| |
| Outputs GitHub Actions annotations for undocumented new symbols and fails if any are found. |
| |
| Usage: |
| python3 doxygen_coverage_diff.py --reference base.json --comparison pr.json |
| |
| The JSON files should be generated by coverxygen with --format json-v3. |
| """ |
| |
| import argparse |
| import json |
| import sys |
| from collections import Counter |
| from dataclasses import dataclass |
| from pathlib import Path |
| |
| |
| @dataclass(frozen=True) |
| class Symbol: |
| """A documentable API symbol. |
| |
| Attributes: |
| file: Path to the file containing the symbol. |
| line: Line number where the symbol is defined. |
| name: Name of the symbol. |
| kind: Kind of the symbol (function, variable, etc.). |
| documented: Whether the symbol is documented. |
| """ |
| |
| file: str |
| line: int |
| name: str |
| kind: str |
| documented: bool |
| |
| |
| def extract_symbols(data: dict, strip_prefix: str = None) -> list[Symbol]: |
| """Extract all symbols from coverxygen json-v3 format as a flat list. |
| |
| Args: |
| data: Parsed JSON data from coverxygen (in "json-v3" format). |
| strip_prefix: Optional path prefix to remove from file paths. |
| |
| Returns: |
| List of Symbol tuples representing all documentable symbols. |
| """ |
| symbols = [] |
| for path, sym_list in data.get("files", {}).items(): |
| if strip_prefix and path.startswith(strip_prefix): |
| path = path.removeprefix(strip_prefix).lstrip("/") |
| if not isinstance(sym_list, list): |
| continue |
| symbols.extend( |
| Symbol( |
| path, |
| s.get("line", 0), |
| s.get("symbol", ""), |
| s.get("kind", ""), |
| s.get("documented", False), |
| ) |
| for s in sym_list |
| ) |
| return symbols |
| |
| |
| def find_undocumented_new(ref: list[Symbol], comp: list[Symbol]) -> list[Symbol]: |
| """Find symbols that are undocumented in comparison but weren't in reference. |
| |
| Uses Counter-based matching to handle multiple symbols with the same name/kind. |
| |
| Args: |
| ref: List of symbols from the reference (base) branch. |
| comp: List of symbols from the comparison (PR) branch. |
| |
| Returns: |
| List of newly undocumented symbols, sorted by file and line number. |
| """ |
| ref_undoc = Counter((s.name, s.kind) for s in ref if not s.documented) |
| |
| result = [] |
| for s in comp: |
| if s.documented: |
| continue |
| key = (s.name, s.kind) |
| if ref_undoc[key] > 0: |
| ref_undoc[key] -= 1 |
| else: |
| result.append(s) |
| |
| return sorted(result, key=lambda s: (s.file, s.line)) |
| |
| |
| def split_by_warn_paths( |
| symbols: list[Symbol], warn_paths: list[str] |
| ) -> tuple[list[Symbol], list[Symbol]]: |
| """Split symbols into errors and warnings based on path prefixes. |
| |
| Symbols in paths matching any of the warn_paths prefixes are treated as warnings; all others are |
| treated as errors. |
| |
| Args: |
| symbols: List of undocumented symbols to categorize. |
| warn_paths: List of path prefixes where undocumented symbols should be treated as warnings |
| instead of errors. |
| |
| Returns: |
| A tuple of (errors, warnings) where each element is a list of Symbol. |
| """ |
| if not warn_paths: |
| return symbols, [] |
| |
| errors = [] |
| warnings = [] |
| for sym in symbols: |
| if any(sym.file.startswith(prefix) for prefix in warn_paths): |
| warnings.append(sym) |
| else: |
| errors.append(sym) |
| return errors, warnings |
| |
| |
| def print_github_annotations( |
| symbols: list[Symbol], level: str = "error", max_annotations: int = 50 |
| ) -> None: |
| """Print GitHub Actions workflow annotations for undocumented symbols. |
| |
| Prints annotations in GitHub Actions format (::error:: or ::warning::) |
| |
| Args: |
| symbols: List of undocumented symbols to annotate. |
| level: Annotation level, either "error" or "warning". |
| max_annotations: Maximum number of annotations to output. |
| Additional symbols are summarized in a single warning. |
| """ |
| for sym in symbols[:max_annotations]: |
| print( |
| f"::{level} file={sym.file},line={sym.line}," |
| f"title=Missing Doxygen documentation::{sym.kind} '{sym.name}'" |
| " is missing Doxygen comments." |
| ) |
| |
| if len(symbols) > max_annotations: |
| print( |
| f"::warning::... and {len(symbols) - max_annotations} more undocumented symbols " |
| f"(showing first {max_annotations})" |
| ) |
| |
| |
| def _write_symbol_table(output_file, symbols: list[Symbol]) -> None: |
| """Write a markdown table of symbols. |
| |
| Args: |
| output_file: File object to write to. |
| symbols: List of symbols to include in the table. |
| """ |
| output_file.write("| File | Line | Kind | Symbol |\n|------|------|------|--------|\n") |
| for sym in symbols: |
| output_file.write(f"| `{sym.file}` | {sym.line} | {sym.kind} | `{sym.name}` |\n") |
| |
| |
| def write_summary(errors: list[Symbol], warnings: list[Symbol], summary_file: str) -> None: |
| """Write a markdown summary table to the given file. |
| |
| Creates a formatted markdown section suitable for GitHub Actions job summaries. |
| |
| Args: |
| errors: List of undocumented symbols that are errors. |
| warnings: List of undocumented symbols that are warnings. |
| summary_file: Path to the file to append the summary to. |
| """ |
| try: |
| with open(summary_file, "a", encoding="utf-8") as f: |
| f.write("\n## 📚 Doxygen Coverage Check Results\n\n") |
| |
| if not errors and not warnings: |
| f.write("✅ **All new API symbols are properly documented!**\n") |
| return |
| |
| if errors: |
| f.write( |
| "> [!CAUTION]\n" |
| f"> **{len(errors)} new API symbol(s) with missing documentation.**" |
| ) |
| f.write("\n\n") |
| _write_symbol_table(f, errors) |
| f.write("\n") |
| |
| if warnings: |
| f.write( |
| "> [!WARNING]\n" |
| f"> **{len(warnings)} new API symbol(s) with missing documentation " |
| "(in paths configured as warning-only).**" |
| ) |
| f.write("\n\n") |
| _write_symbol_table(f, warnings) |
| f.write("\n") |
| |
| f.write( |
| "\n### How to fix\n\n" |
| "Add Doxygen documentation comments to the listed symbols (or properly hide them " |
| "from the public API documentation if they are internal).\n\n" |
| "See the project's [Doxygen style guidelines]" |
| "(https://docs.zephyrproject.org/latest/contribute/style/doxygen.html).\n" |
| ) |
| except OSError as e: |
| print(f"::warning::Failed to write summary file '{summary_file}': {e}") |
| |
| |
| def main() -> int: |
| """Main entry point for the Doxygen coverage diff tool. |
| |
| Parses command-line arguments, loads coverage JSON files, compares them to find newly |
| undocumented symbols, and outputs GitHub Actions annotations. |
| |
| Returns: |
| 0 if no undocumented symbols (errors) are found, 1 otherwise. |
| """ |
| parser = argparse.ArgumentParser( |
| description="Compare Doxygen coverage between base and PR branches", |
| allow_abbrev=False, |
| ) |
| parser.add_argument("--reference", required=True, help="Path to reference coverage JSON") |
| parser.add_argument("--comparison", required=True, help="Path to comparison coverage JSON") |
| parser.add_argument( |
| "--summary-file", help="Path to write markdown summary (e.g., $GITHUB_STEP_SUMMARY)" |
| ) |
| parser.add_argument( |
| "--strip-reference-prefix", help="Path prefix to strip from reference file paths" |
| ) |
| parser.add_argument( |
| "--strip-comparison-prefix", help="Path prefix to strip from comparison file paths" |
| ) |
| parser.add_argument( |
| "--warn-paths", |
| nargs="*", |
| default=[], |
| help="Path prefixes where undocumented symbols are treated as warnings", |
| ) |
| args = parser.parse_args() |
| |
| # Validate inputs |
| for name, path in [("Reference", args.reference), ("Comparison", args.comparison)]: |
| if not Path(path).is_file(): |
| print(f"::error::{name} coverage file not found: {path}") |
| return 1 |
| |
| # Load and process |
| try: |
| ref_data = json.loads(Path(args.reference).read_text(encoding="utf-8")) |
| comp_data = json.loads(Path(args.comparison).read_text(encoding="utf-8")) |
| except json.JSONDecodeError as e: |
| print(f"::error::Failed to parse coverage JSON: {e}") |
| return 1 |
| |
| ref_symbols = extract_symbols(ref_data, args.strip_reference_prefix) |
| comp_symbols = extract_symbols(comp_data, args.strip_comparison_prefix) |
| |
| print(f"Reference: {len(ref_symbols)} symbols") |
| print(f"Comparison: {len(comp_symbols)} symbols") |
| |
| undocumented_new = find_undocumented_new(ref_symbols, comp_symbols) |
| print(f"New undocumented symbols: {len(undocumented_new)}") |
| |
| errors, warnings = split_by_warn_paths(undocumented_new, args.warn_paths) |
| print(f" Errors: {len(errors)}, Warnings: {len(warnings)}") |
| |
| if errors: |
| print_github_annotations(errors, level="error") |
| if warnings: |
| print_github_annotations(warnings, level="warning") |
| |
| if args.summary_file: |
| write_summary(errors, warnings, args.summary_file) |
| |
| if errors: |
| print(f"\nFound {len(errors)} new API symbol(s) without documentation (errors).") |
| if warnings: |
| print(f"Additionally, {len(warnings)} symbol(s) in warning-only paths.") |
| return 1 |
| |
| if warnings: |
| print(f"\nFound {len(warnings)} new API symbol(s) without documentation (warnings only).") |
| |
| print("\nAll new API symbols are properly documented (or in warning-only paths).") |
| return 0 |
| |
| |
| if __name__ == "__main__": |
| sys.exit(main()) |