From fad117bbeb630977e36a7934ef9299518e3b2ae6 Mon Sep 17 00:00:00 2001 From: Mateusz Leonowicz Date: Tue, 20 Aug 2024 13:32:19 +0200 Subject: [PATCH 1/7] tools: Inline SiteSpawner package Internal-tag: [#63639] Signed-off-by: Mateusz Leonowicz --- tools/SiteSpawner/.gitignore | 54 ++ tools/SiteSpawner/LICENSE | 202 ++++++ tools/SiteSpawner/README.md | 85 +++ tools/SiteSpawner/pyproject.toml | 55 ++ tools/SiteSpawner/src/sitespawner/__init__.py | 295 ++++++++ tools/SiteSpawner/src/sitespawner/common.py | 99 +++ .../src/sitespawner/convert_data.py | 50 ++ .../src/sitespawner/gen_coverage_report.py | 235 +++++++ tools/SiteSpawner/src/sitespawner/generate.py | 109 +++ tools/SiteSpawner/src/sitespawner/genhtml.py | 358 ++++++++++ .../src/sitespawner/update_style.py | 33 + .../src/sitespawner/update_webpage.py | 106 +++ .../assets/chips-alliance-logo-mono.svg | 22 + tools/SiteSpawner/styles/cov.css | 642 ++++++++++++++++++ tools/SiteSpawner/styles/main.css | 20 + .../coverage_report/coverage_report.html | 75 ++ .../template/coverage_report/main_table.html | 68 ++ .../template/coverage_report/src_view.html | 75 ++ .../coverage_report/summary_table.html | 21 + tools/SiteSpawner/template/redirect.html | 11 + tools/SiteSpawner/template/webpage/conf.py | 88 +++ .../template/webpage/coverage_dashboard.md | 24 + tools/SiteSpawner/template/webpage/dev.md | 7 + tools/SiteSpawner/template/webpage/index.md | 8 + tools/SiteSpawner/template/webpage/main.md | 4 + 25 files changed, 2746 insertions(+) create mode 100644 tools/SiteSpawner/.gitignore create mode 100644 tools/SiteSpawner/LICENSE create mode 100644 tools/SiteSpawner/README.md create mode 100644 tools/SiteSpawner/pyproject.toml create mode 100644 tools/SiteSpawner/src/sitespawner/__init__.py create mode 100644 tools/SiteSpawner/src/sitespawner/common.py create mode 100644 tools/SiteSpawner/src/sitespawner/convert_data.py create mode 100644 tools/SiteSpawner/src/sitespawner/gen_coverage_report.py create mode 100755 tools/SiteSpawner/src/sitespawner/generate.py create mode 100644 tools/SiteSpawner/src/sitespawner/genhtml.py create mode 100644 tools/SiteSpawner/src/sitespawner/update_style.py create mode 100644 tools/SiteSpawner/src/sitespawner/update_webpage.py create mode 100644 tools/SiteSpawner/styles/assets/chips-alliance-logo-mono.svg create mode 100644 tools/SiteSpawner/styles/cov.css create mode 100644 tools/SiteSpawner/styles/main.css create mode 100644 tools/SiteSpawner/template/coverage_report/coverage_report.html create mode 100644 tools/SiteSpawner/template/coverage_report/main_table.html create mode 100644 tools/SiteSpawner/template/coverage_report/src_view.html create mode 100644 tools/SiteSpawner/template/coverage_report/summary_table.html create mode 100644 tools/SiteSpawner/template/redirect.html create mode 100644 tools/SiteSpawner/template/webpage/conf.py create mode 100644 tools/SiteSpawner/template/webpage/coverage_dashboard.md create mode 100644 tools/SiteSpawner/template/webpage/dev.md create mode 100644 tools/SiteSpawner/template/webpage/index.md create mode 100644 tools/SiteSpawner/template/webpage/main.md diff --git a/tools/SiteSpawner/.gitignore b/tools/SiteSpawner/.gitignore new file mode 100644 index 00000000000..e9e1e9b790e --- /dev/null +++ b/tools/SiteSpawner/.gitignore @@ -0,0 +1,54 @@ +# Temporary and binary files +*~ +*.py[cod] +*.so +*.cfg +!.isort.cfg +!setup.cfg +*.orig +*.log +*.pot +__pycache__/* +.cache/* +.*.swp +*/.ipynb_checkpoints/* +.DS_Store + +# Project files +.ropeproject +.project +.pydevproject +.settings +.idea +.vscode +tags + +# Package files +*.egg +*.eggs/ +.installed.cfg +*.egg-info + +# Unittest and coverage +htmlcov/* +.coverage +.coverage.* +.tox +junit*.xml +coverage.xml +.pytest_cache/ + +# Build and docs folder/files +build/* +dist/* +sdist/* +docs/api/* +docs/_rst/* +docs/_build/* +cover/* +MANIFEST + +# Per-project virtualenvs +.venv*/ +.conda*/ +.python-version diff --git a/tools/SiteSpawner/LICENSE b/tools/SiteSpawner/LICENSE new file mode 100644 index 00000000000..7a4a3ea2424 --- /dev/null +++ b/tools/SiteSpawner/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/tools/SiteSpawner/README.md b/tools/SiteSpawner/README.md new file mode 100644 index 00000000000..68aecc605e1 --- /dev/null +++ b/tools/SiteSpawner/README.md @@ -0,0 +1,85 @@ +# SIteSpawner (SIS) + +## Installation + +``` +pip3 install . +``` + +## Usage + +All subcommands and accepted arguments can be printed with: + +``` +sis --help +``` + +The tool is consists of 3 individual subcommands, and an aggregate of them. + +### Coverage data conversion + +`*.dat` coverage data into `*.info` files conversion is executed via: + +``` +sis convert +``` + +The tool allows to pass a path to the directory containing `*.dat` files via `--dat-dir` option. If not specified, the current working directory will be considered. + +Similarly, it allows to specify an output directory for produced `*.info` files via `--info-dir`. If not specified, the `*.info` files will be stored where their `*.dat` counterparts are. + +### Coverage dashboard generation + +Coverage dashboard from `*.info` files can be generated with: + +``` +sis reports +``` + +### Webpage assembly (update) + +Collect coverage dashboard (optionally documentation) and merge it into existing collection of pages: + +``` +sis webpage --loc-github-ref-name + --loc-github-event-name --pr-number +``` + +Command expects reference name, event name and PR number if applies. +Those parameters dictate the localization of the generated pages in the website. + +E.g. if `ref` is `main`, the coverage dashboard and documentation will be placed under `BASE_URL/main/...`. + +Similarly, if pages where generated within a merge request of number ``, the pages will be located under `BASE_URL/dev//...` + +## Package layout + + +* [pyproject.toml](pyproject.toml) Project setup, configuration, dependencies +* [src](src) + * [sitespawner](src/sitespawner) + * [common.py](src/sitespawner/common.py) Shared definitions + * [convert_data.py](src/sitespawner/convert_data.py) `*.dat` -> `*.info` coverage files conversion + * [gen_coverage_report.py](src/sitespawner/gen_coverage_report.py) Prepares sources & invokes `genhtml.py` in `reports` stage + * [generate.py](src/sitespawner/generate.py) Executed at `webpage` stage, invokes `spinx-build` with rendered `webpage` templates + * [genhtml.py](src/sitespawner/genhtml.py) Generates HTML coverage report based on coverage summaries (provided by `gen_coverage_report.py`) + * [\_\_init\_\_.py](src/sitespawner/__init__.py) Parsers & argument processing + * [update_style.py](src/sitespawner/update_style.py) Overwrites documentation theme styles & copies assets to final webpage directory + * [update_webpage.py](src/sitespawner/update_webpage.py) Gathers artifacts from current execution & joins them with existing webpage (e.g. appends a new PR onto PR list) +* [styles](styles) Custom CSS files & assets + * [assets](styles/assets) Page assets (e.g. logos) + * [cov.css](styles/cov.css) Styles utilized with coverage dashboard + * [main.css](styles/main.css) Styles to override documentation theme +* [template](template) Jinja2 templates for coverage reports / webpage + * [coverage_report](template/coverage_report) HTML templates for coverage dashboard + * [coverage_report.html](template/coverage_report/coverage_report.html) Main coverage dashboard view + * [main_table.html](template/coverage_report/main_table.html) Main table of the coverage dashboard, list of sources and its coverage statistics + * [src_view.html](template/coverage_report/src_view.html) Source file view + * [summary_table.html](template/coverage_report/summary_table.html) Coverage summary table template placed in top right corner of the coverage dashboard + * [redirect.html](templates/redirect.html) HTML template that is used to create a main `index.html` file for the webpage + * [webpage](templates/webpage) Final webpage templates + * [conf.py](templates/webpage/conf.py) Sphinx configuration file + * [coverage_dashboard.md](templates/webpage/coverage_dashboard.md) View of all coverage dashboards + * [dev.md](templates/webpage/dev.md) Developer view (list of open PRs, branches outside of the main branch) + * [index.md](templates/webpage/index.md) Page with references to available views (currently main & dev) + * [main.md](templates/webpage/main.md) View on the main branch diff --git a/tools/SiteSpawner/pyproject.toml b/tools/SiteSpawner/pyproject.toml new file mode 100644 index 00000000000..005c80fcb86 --- /dev/null +++ b/tools/SiteSpawner/pyproject.toml @@ -0,0 +1,55 @@ +[build-system] +requires = ["setuptools >= 65", "wheel >= 0.38"] +build-backend = "setuptools.build_meta" + +[project] +name = "SiteSpawner" +authors = [ + { name = "Antmicro" } +] + +description = "Project website generator, embeding RTL coverage reports." +requires-python = ">=3.8" +version = "0.0.1" +license = {file = "LICENSE"} +classifiers = [ + "Programming Language :: Python :: 3", + "Operating System :: OS Independent", +] +dependencies = [ + "antmicro-sphinx-utils @ git+https://github.com/antmicro/antmicro-sphinx-utils.git", + "beautifulsoup4", + "coloredlogs", + "gitpython", + "jinja2", + "myst-parser", + "sphinx < 8.0.0", + "sphinx_tabs", + "sphinx-immaterial", + "sphinxcontrib-mermaid", + "termcolor", +] + +[tool.setuptools.packages.find] +where = ["src", "."] +include = ["template", "styles", "sitespawner"] + +[tool.setuptools.package-data] +"template" = ["coverage_report/*", "webpage/*", "*.html"] +"styles" = ["*", "assets/*"] + + +[project.scripts] +sis = 'sitespawner.__init__:main' + +[project.optional-dependencies] +dev = [ + "black", +] + +[tool.black] +line-length = 100 + +[tool.isort] +profile = "black" +multi_line_output = 3 diff --git a/tools/SiteSpawner/src/sitespawner/__init__.py b/tools/SiteSpawner/src/sitespawner/__init__.py new file mode 100644 index 00000000000..d198008ff6f --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/__init__.py @@ -0,0 +1,295 @@ +import argparse +import logging +from importlib.metadata import PackageNotFoundError, version +from pathlib import Path + +from .common import root_dir, get_logger, set_loglevel +from .convert_data import convert_data +from .gen_coverage_report import main as gen +from .update_webpage import update_webpage + +try: + dist_name = "SiteSpawner" + __version__ = version(dist_name) +except PackageNotFoundError: + __version__ = "unknown" +finally: + del version, PackageNotFoundError + +logger = get_logger(__name__) + + +def create_subparser(subparsers, name, description, help, args_list, handler): + """ + Helper function to create a subparser with given arguments and handler. + + Parameters + ---------- + subparsers : _SubParsersAction + Subparsers of parent's parser where the new subparser will be added. + name : str + Name for the subparser. + description : str + Description for the subparser. + help : str + The help text for the subparser. + args_list: list + A list of dictionaries, each containing argument settings. + handler : Callable[Namespace] + Function to be executed for this subparser. + """ + parser = subparsers.add_parser(name, help=help, description=description) + for arg in args_list: + parser.add_argument(arg["name"], **arg["options"]) + parser.set_defaults(handler=handler) + + +def convert_handler(args): + convert_data(args) + + +def reports_handler(args): + if Path(root_dir).absolute == Path(args.report_dir).absolute: + raise ValueError(f"Existing webpage root and output paths mustn't be the same: {args.root}") + gen(args) + + +def webpage_handler(args): + update_webpage( + args.loc_github_ref_name, args.loc_github_event_name, args.pr_number, args.page_url + ) + + +def all_handler(args): + # Convert Data + logger.info("Step 1/3: Convert data") + convert_handler(args) + + # Generate reports + logger.info("Step 2/3: Generate coverage reports") + reports_handler(args) + + # Generate final pages, update styles for webpage, copy static elements + logger.info("Step 3/3: Update / Create webpage") + webpage_handler(args) + + +def setup_parser(): + parser = argparse.ArgumentParser( + description="Generating coverage reports with Verilator's *.dat files." + ) + parser.add_argument( + "--version", + action="version", + version=f"SiteSpawner {__version__}", + ) + parser.add_argument( + "-v", + "--verbose", + dest="loglevel", + help="set loglevel to INFO", + action="store_const", + const=logging.INFO, + ) + parser.add_argument( + "-d", + "--debug", + dest="loglevel", + help="set loglevel to DEBUG", + action="store_const", + const=logging.DEBUG, + ) + + dat_dir = { + "name": "--dat-dir", + "options": { + "metavar": "dat_dir", + "type": str, + "help": "Path to directory containing *.dat files", + "required": True, + }, + } + info_dir = { + "name": "--info-dir", + "options": { + "metavar": "info_dir", + "type": str, + "help": ( + "Path to directory where *.info files will be stored.\n" + "If not specified, *.info will be stored where its *.dat counterpart is." + ), + }, + } + + subparsers = parser.add_subparsers(dest="cmd") + convert_args = [dat_dir, info_dir] + create_subparser( + subparsers=subparsers, + name="convert", + description="Convert Coverage Data", + help="Convert Verilator's *.dat coverage files into *.info files", + args_list=convert_args, + handler=convert_handler, + ) + + logo_src = { + "name": "--logo-src", + "options": { + "metavar": "logo_src", + "type": str, + "default": "_static/white.svg", + "help": "Path to logo to be attached with the report, relative to index.html file in the destination dir.", + }, + } + logo_href = { + "name": "--logo-href", + "options": { + "metavar": "logo_href", + "default": "index.html", + "type": str, + "help": "URL to be associated with the logo.", + }, + } + report_dir = { + "name": "--report-dir", + "options": { + "metavar": "report_dir", + "default": "report", + "type": str, + "help": "Coverage dashboard directory", + }, + } + src_pattern = { + "name": "--src-pattern", + "options": { + "metavar": "src_pattern", + "default": "*", + "type": str, + "help": "Pattern used for designs' source file extraction.", + }, + } + src_remove_pattern = { + "name": "--src-remove-pattern", + "options": { + "metavar": "src_remove_pattern", + "action": "append", + "default": None, + "type": str, + "help": "Pattern used for removing designs' source files from coverage report generation.", + }, + } + src_path = { + "name": "src_path", + "options": { + "metavar": "src_path", + "default": None, + "type": str, + "help": ( + "Path to design's source code. " + "Last segment of path will be displayed in the report. " + "If not specified, the source code path will be " + "the longest common path of reported source files." + ), + }, + } + info_report_dir = { + "name": "--info-report-dir", + "options": { + "metavar": "info_report_dir", + "type": str, + "help": ( + "Path to directory with *.info coverage files.\n" + "If not specified will recursively search from current directory." + ), + }, + } + reports_args = [ + logo_src, + logo_href, + report_dir, + src_pattern, + src_path, + info_report_dir, + src_remove_pattern, + ] + create_subparser( + subparsers=subparsers, + name="reports", + description="Generate Coverage Reports", + help=("Gathers *.info files and generates the collective HTML coverage dashboard."), + args_list=reports_args, + handler=reports_handler, + ) + + ref_name = { + "name": "--loc-github-ref-name", + "options": { + "type": str, + "metavar": "loc_github_ref_name", + "help": "GitHub ref name, use ${{ github.ref }}", + "required": True, + }, + } + event_name = { + "name": "--loc-github-event-name", + "options": { + "type": str, + "metavar": "loc_github_event_name", + "help": "GitHub event name, use ${{ github.event_name }}", + "required": True, + }, + } + pr_number = { + "name": "--pr-number", + "options": { + "type": int, + "metavar": "pr_number", + "help": "Number of the PR, e.g., 42", + "required": True, + }, + } + page_url = { + "name": "--page-url", + "options": { + "type": str, + "metavar": "page_url", + "help": "Base URL of the website. Otherwise, will apply relative reference for redirect.", + }, + } + webpage_args = [ref_name, event_name, pr_number, page_url] + create_subparser( + subparsers=subparsers, + name="webpage", + description="Update / assemble webpage with coverage reports", + help="Update webpage based on GitHub refs and events.", + args_list=webpage_args, + handler=webpage_handler, + ) + + create_subparser( + subparsers=subparsers, + name="all", + description="Execute all steps consecutively.", + help="Perform data conversion, coverage dashboard generation and assemble the webpage.", + args_list=convert_args + reports_args + webpage_args, + handler=all_handler, + ) + + return parser + + +def main(): + parser = setup_parser() + args = parser.parse_args() + + if args.loglevel: + set_loglevel(args.loglevel) + + if args.cmd: + args.handler(args) + else: + parser.print_help() + + +if __name__ == "__main__": + main() diff --git a/tools/SiteSpawner/src/sitespawner/common.py b/tools/SiteSpawner/src/sitespawner/common.py new file mode 100644 index 00000000000..04e73060199 --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/common.py @@ -0,0 +1,99 @@ +import logging +from functools import wraps +from pathlib import Path +import sys + +from termcolor import colored +from typing import Any + + +class CustomFormatter(logging.Formatter): + grey = "\x1b[37m" + blue = "\x1b[1;36m" + yellow = "\x1b[33;20m" + red = "\x1b[31;20m" + bold_red = "\x1b[31;1m" + reset = "\x1b[0m" + logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s" + + FORMATS = { + logging.DEBUG: grey + logformat + reset, + logging.INFO: blue + logformat + reset, + logging.WARNING: yellow + logformat + reset, + logging.ERROR: red + logformat + reset, + logging.CRITICAL: bold_red + logformat + reset, + } + + def format(self, record): + log_fmt = self.FORMATS.get(record.levelno) + formatter = logging.Formatter(log_fmt, "%Y-%m-%d %H:%M:%S") + return formatter.format(record) + + +# Resolve paths to package resources +root_dir = Path(__file__).parent.parent +template_dir = root_dir / "template" +coverage_dashboard_template_dir = template_dir / "coverage_report" +webpage_template_dir = template_dir / "webpage" +styles_dir = root_dir / "styles" + + +def get_logger(name: str) -> logging.Logger: + """Returns a logger with the specified name. + + It also makes sure that the root logger + is initialised with a proper handler.""" + logger = logging.getLogger(name) + + # Make sure that the root logger is initialised + setup_root_logger(stream=sys.stdout) + return logger + + +def setup_root_logger(stream: Any): + """Setups the root logger by setting its verbosity and adding + a handler with custom formatting to it.""" + # Obtaining the root logger + logger = logging.getLogger() + + # If custom handler does exist, it means it was already initialised + if not logger.hasHandlers(): + ch = logging.StreamHandler(stream=stream) + ch.setFormatter(CustomFormatter()) + logger.addHandler(ch) + + +def set_loglevel(loglevel: int): + """Set the log level of the root logger.""" + logger = logging.getLogger() + logger.setLevel(loglevel) + + +def args_on_debug_logger(logger): + def _args_on_debug_logger(func): + @wraps(func) + def wrapper(*args, **kwargs): + logger.debug(f"{func.__name__}: Args: {args} Kwargs: {kwargs}") + return func(*args, **kwargs) + + return wrapper + + return _args_on_debug_logger + + +def main_func_log(logger, step_name): + def _main_func_log(func): + @wraps(func) + def wrapper(*args, **kwargs): + logger.info(step_name) + try: + res = func(*args, **kwargs) + logger.info(f'{func.__name__} {colored("SUCCESS", "green")}') + return res + except Exception as e: + logger.error(f'{func.__name__} {colored("FAILED", "red")}') + raise e + + return wrapper + + return _main_func_log diff --git a/tools/SiteSpawner/src/sitespawner/convert_data.py b/tools/SiteSpawner/src/sitespawner/convert_data.py new file mode 100644 index 00000000000..e40ae02d0e2 --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/convert_data.py @@ -0,0 +1,50 @@ +import subprocess +from pathlib import Path + +from .common import args_on_debug_logger, main_func_log, get_logger + +logger = get_logger(__name__) + + +@args_on_debug_logger(logger=logger) +def convert_coverage_data(dat_dir, out_dir, dat_pattern="coverage*.dat"): + """Converts *.dat coverage data files into *.info files.""" + dat_dir = Path(dat_dir) + + # Find all coverage*.dat files + files = list(Path.glob(dat_dir, f"**/{dat_pattern}")) + + if not files: + logger.error("No 'coverage*.dat' files were found.") + logger.error(f"Searched directory: {dat_dir.absolute}") + logger.error(f"{__name__} ended with errors") + raise Exception("No 'coverage*.dat' data files were found.") + + for dat_file in files: + info_filename = dat_file.name.replace(".dat", ".info") + info_path = (dat_file.parent if not out_dir else Path(out_dir)) / info_filename + + try: + subprocess.run( + [ + "verilator_coverage", + "--write-info", + info_path, + dat_file, + ], + check=True, + ) + logger.debug(f"Conversion: {dat_file} -> {info_path} SUCCEEDED") + except subprocess.CalledProcessError: + raise Exception(f"Failed to convert {dat_file}") + + +@main_func_log(logger, "Convert Coverage Data: *.dat -> *.info") +def convert_data(args): + dat_dir = args.dat_dir + out_dir = args.info_dir + + if out_dir: + Path(out_dir).mkdir(parents=True, exist_ok=True) + + convert_coverage_data(dat_dir, out_dir) diff --git a/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py b/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py new file mode 100644 index 00000000000..4f39537e8e3 --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py @@ -0,0 +1,235 @@ +from io import TextIOWrapper +import logging +import subprocess +from pathlib import Path +from shutil import copy2 +from typing import List, Optional + +from .common import args_on_debug_logger, main_func_log, styles_dir, get_logger +from .genhtml import genhtml, get_common_src_path, parse_infos + +logger = get_logger(__name__) + + +def obtain_stdout(filename) -> TextIOWrapper | int: + """Checks whether the logger is in debug mode. If it is + then returns a file descriptor to the file with the given filename. + Otherwise, returns subprocess.DEVNULL.""" + if logger.getEffectiveLevel() <= logging.DEBUG: + return open(filename, "w+") + return subprocess.DEVNULL + + +def lcov_merge(input_files: List[str], output_file: str): + """Invokes lcov tool to add `input_file` into the tracefile. + `output_file` becomes then an aggregate of *.info files.""" + lcov_command = ["lcov", "-o", output_file] + + for input_file in input_files: + lcov_command += ["-a", input_file] + + subprocess.run( + lcov_command, + stdout=obtain_stdout(f"{output_file}_merge.log"), + ) + + +@args_on_debug_logger(logger=logger) +def lcov_genhtml( + info_files, + path_prefix, + lcov_report_dir="lcov_report", + log_output_path="lcov_genhtml.log", +): + """Invokes lcov's genhtml tool to generate source file views for the coverage report.""" + command = ["genhtml", "--output-dir", lcov_report_dir, *info_files] + + if not path_prefix: + subprocess.run( + command, + stdout=obtain_stdout(log_output_path), + ) + else: + command += ["--prefix", str(path_prefix)] + subprocess.run( + command, + stdout=obtain_stdout(log_output_path), + ) + + +@args_on_debug_logger(logger=logger) +def generate_coverage_reports( + output_dir, + src_path, + src_pattern="*", + src_remove_pattern=None, + logo_src=None, + logo_href=None, + info_report_dir=None, + info_pattern="coverage*.info", +): + """Iterates over available *.info files, merges them & generates summaries + for each coverage type with the use of lcov. + Calls `genhtml` to generate coverage dashboards for individual tests as + well as for the all tests combined.""" + curr_dir = Path.cwd() + if not info_report_dir: + info_report_dir = curr_dir + + if not Path(src_path).exists(): + raise ValueError(f"Sources path doesn't exist {src_path}") + + # Extract coverage info files + info_files = list(Path(info_report_dir).glob(f"**/{info_pattern}")) + processed_info = False + + for info_file in info_files: + logger.debug(f"Preprocessing {info_file}") + lcov_extract_command = ["lcov", "--extract", info_file, src_pattern, "-o", info_file] + + data, _ = parse_infos([str(info_file)]) + if len(data.keys()) == 0: + logger.warning(f"No data found in .info file: {info_file}") + continue + + processed_info = True + path_prefix = get_common_src_path(data.keys()) + resolved_src_path = Path(src_path).resolve() + + # Align paths to end in the same directory: + parts = path_prefix.parts + src_dir_parts = resolved_src_path.name + for i in reversed(range(len(parts))): + if parts[i] == src_dir_parts: + break + path_prefix = path_prefix.parent + + resolved_path_prefix = Path(path_prefix).resolve() + logger.debug(f"Deduced source path prefix: {path_prefix}") + if resolved_src_path != resolved_path_prefix: + logger.debug(f"Substituting prefix: {path_prefix} -> {resolved_src_path}") + lcov_extract_command += [ + "--substitute", + f"s|{path_prefix}|{resolved_src_path}|", + ] + + subprocess.run( + lcov_extract_command, + stdout=obtain_stdout(f"{info_file}_extraction.log"), + ) + if src_remove_pattern is not None: + subprocess.run( + ["lcov", "--remove", info_file, *src_remove_pattern, "-o", info_file], + stdout=obtain_stdout(f"{info_file}_remove.log"), + ) + + if not processed_info: + logger.error("No valid 'coverage*.info' data files were found.") + logger.error(f"Searched directory: {info_report_dir} and all subdirectories.") + raise Exception("No valid 'coverage*.info' data files were found.") + + # Run LCOV's genhtml to gather source-file pages + branch_merged = Path("./merged_branch.info") + toggle_merged = Path("./merged_toggle.info") + lcov_genhtml_output_merged_log = Path("./lcov_genhtml_merged.out") + + # Find and classify coverage files + branch_files, toggle_files = {}, {} + files = Path(info_report_dir).glob("**/coverage_*.info") + file_names = set() + + for file in files: + if file.name.endswith("_branch.info"): + file_names.add(file.name.removesuffix("_branch.info")) + branch_files[file.name.removesuffix("_branch.info")] = file + elif file.name.endswith("_toggle.info"): + file_names.add(file.name.removesuffix("_toggle.info")) + toggle_files[file.name.removesuffix("_toggle.info")] = file + + # Generate reports for each coverage file set + for name_body in file_names: + input_files = [] + if name_body in toggle_files: + input_files.append(str(toggle_files[name_body])) + if name_body in branch_files: + input_files.append(str(branch_files[name_body])) + test_name = name_body.removeprefix("coverage_") + + test_output_dir = Path(output_dir) / f"all_{test_name}" + (test_output_dir / "_static").mkdir(parents=True, exist_ok=True) + + info_files = Path(info_report_dir).glob(f"**/*{test_name}*.info") + lcov_html_dir = curr_dir / "lcov_report" + + lcov_genhtml_output_name = Path(f"./lcov_genhtml_{test_name}.log") + lcov_genhtml(info_files, src_path, lcov_html_dir, lcov_genhtml_output_name) + genhtml( + input_files=input_files, + output_dir=test_output_dir, + test_name=test_name, + logo_src=logo_src, + logo_href=logo_href, + html_src_dir=lcov_html_dir, + ) + + copy2(styles_dir / "main.css", test_output_dir) + copy2(styles_dir / "cov.css", test_output_dir) + copy2( + styles_dir / "assets" / "chips-alliance-logo-mono.svg", + test_output_dir / "_static" / "white.svg", + ) + + # Merge branch files + merged_input_files = [] + + if branch_files: + lcov_merge(branch_files.values(), branch_merged) + merged_input_files.append(str(branch_merged)) + + # Merge toggle files + if toggle_files: + lcov_merge(toggle_files.values(), toggle_merged) + merged_input_files.append(str(toggle_merged)) + + # Generate final combined report + final_output_dir = Path(output_dir) / "all" + (final_output_dir / "_static").mkdir(parents=True, exist_ok=True) + + lcov_genhtml( + merged_input_files, + src_path, + lcov_html_dir, + lcov_genhtml_output_merged_log, + ) + genhtml( + input_files=merged_input_files, + output_dir=final_output_dir, + test_name="all", + logo_src=logo_src, + logo_href=logo_href, + html_src_dir=lcov_html_dir, + ) + + copy2(styles_dir / "main.css", final_output_dir) + copy2(styles_dir / "cov.css", final_output_dir) + copy2( + styles_dir / "assets" / "chips-alliance-logo-mono.svg", + final_output_dir / "_static" / "white.svg", + ) + + +@main_func_log(logger, "Generate Coverage Reports") +def main(args): + # Set output directory and create it if it doesn't exist + report_dir = Path(args.report_dir) + report_dir.mkdir(parents=True, exist_ok=True) + + generate_coverage_reports( + output_dir=report_dir, + src_pattern=args.src_pattern, + src_remove_pattern=args.src_remove_pattern, + src_path=args.src_path, + logo_src=args.logo_src, + logo_href=args.logo_href, + info_report_dir=args.info_report_dir, + ) diff --git a/tools/SiteSpawner/src/sitespawner/generate.py b/tools/SiteSpawner/src/sitespawner/generate.py new file mode 100755 index 00000000000..438f59ccef6 --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/generate.py @@ -0,0 +1,109 @@ +from pathlib import Path +from shutil import copy + +import jinja2 + +from .common import args_on_debug_logger, get_logger + +logger = get_logger(__name__) + + +def render_template(src, dst, **kwargs): + """ + Renders a jinja2 template file to another file + """ + with open(src, "r") as fr, open(dst, "w") as fw: + tpl = jinja2.Template(fr.read()) + fw.write(tpl.render(**kwargs)) + + +@args_on_debug_logger(logger) +def make_coverage_report_index(branch, root, output, templates): + """Prepares coverage report index page.""" + # Coverage types individual dashboards accumulate + # Coverage dashboard displays coverage types side-by-side + # on singular page; all files are prefixed with 'all'. + cov_dashboards = ["all"] + path = Path(root) / "coverage_dashboard" + + # Collect summary reports + summary = {k: k if (path / k).is_dir() else None for k in cov_dashboards} + + # Collect individual test reports + individual = {k: dict() for k in cov_dashboards} + for key in cov_dashboards: + pfx = f"{key}_" + + if not path.exists(): + logger.warning(f"Not found {path}...") + logger.warning("Skipping") + continue + + for file in sorted(path.iterdir()): + if not file.is_dir(): + continue + + fname = file.name + if not fname.startswith(pfx): + continue + + # Extract test name + test_name = fname.removeprefix(pfx) + + # Append the report + individual[key][test_name] = fname + + # Render the template + params = { + "ref": branch + "_coverage_dashboard", + "summary": summary, + "individual": individual, + } + + output.mkdir(parents=True, exist_ok=True) + render_template( + templates / "coverage_dashboard.md", + output / "coverage_dashboard.md", + **params, + ) + + +@args_on_debug_logger(logger) +def make_dev_index(branches, output, templates): + """Prepares the branch/pr index page.""" + params = {"branches": branches} + render_template(templates / "dev.md", output / "dev.md", **params) + + +def generate(template, root, output): + """Processes webpage *.md templates.""" + template = Path(template) + root = Path(root) + output = Path(output) + + # Reports for the main branch + make_coverage_report_index("main", root / "main", output / "main", template) + + # Reports for development branches / pull requests + branches = [] + + path = root / "dev" + + if path.is_dir(): + for filepath in path.iterdir(): + if not filepath.is_dir(): + continue + + fname = filepath.name + branches.append(fname) + make_coverage_report_index( + fname, root / "dev" / fname, output / "dev" / fname, template + ) + + # Prepare the branch/pr index page + make_dev_index(branches, output, template) + + # Copy other files/pages + files = ["conf.py", "main.md", "index.md"] + for file in files: + copy(template / file, output / file) diff --git a/tools/SiteSpawner/src/sitespawner/genhtml.py b/tools/SiteSpawner/src/sitespawner/genhtml.py new file mode 100644 index 00000000000..4be90dd7732 --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/genhtml.py @@ -0,0 +1,358 @@ +import datetime +from collections import OrderedDict, defaultdict +from copy import deepcopy +from os.path import commonpath +from pathlib import Path +from typing import List + +from bs4 import BeautifulSoup as BS +from jinja2 import Environment, FileSystemLoader + +from .common import ( + args_on_debug_logger, + coverage_dashboard_template_dir, + main_func_log, + get_logger, +) + +logger = get_logger(__name__) + + +def get_color(value: float, total_points: int, min_value: float = 0, max_value: float = 100): + """Given coverage level, provides the color for the visual coverage bar.""" + frac = value / total_points * 100 if total_points != 0 else 0 + + midpoint = (max_value - min_value) / 2 + if int(total_points) == 0: # No coverage points + r, g, b = (169, 169, 169) # Background color of the component + elif frac <= midpoint: + r, g, b = (255, int(255 * frac / midpoint), 0) + else: + r, g, b = (int(255 * (max_value - frac) / midpoint), 255, 0) + return "#%s%s%s;" % tuple([hex(c)[2:].rjust(2, "0") for c in (r, g, b)]) + + +# Summary parsing # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +def get_common_src_path(paths): + """Longest common path of given `paths`.""" + return Path(commonpath([Path(x).resolve() for x in paths if Path(x).is_absolute()])) + + +def parse_infos(input_files: List[str]): + for file in input_files: + if not Path(file).is_file(): + raise FileNotFoundError(f"Input file '{file}' does not exist.") + + data = defaultdict(defaultdict) + code_root_path = None + + for i in input_files: + lines_found_sum = 0 + lines_hit_sum = 0 + module_name, _ = i.split("_")[-1].split(".") + + with open(i, "r") as f: + file_path = None + lines_found = None + lines_hit = None + for line in f: + if line.startswith("SF:"): + file_path = line[3:].strip() + elif line.startswith("LF:"): + lines_found = int(line[3:]) + lines_found_sum += lines_found + elif line.startswith("LH:"): + lines_hit = int(line[3:]) + lines_hit_sum += lines_hit + elif line.startswith("end_of_record"): + data[file_path][module_name] = [lines_hit, lines_found] + file_path = None + lines_found = None + lines_hit = None + + data["Total:"][module_name] = [lines_hit_sum, lines_found_sum] + return data, code_root_path + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# HTML components generation # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +def generate_table(data, template_env, links=False): + """Generates coverage table component.""" + cov_types = sorted(list(list(data.items())[0][1].keys()), reverse=True) + num_tests = len(cov_types) + raw_widths = [40, 20, 20] + widths_arr = [str(i / num_tests) + "%" for i in raw_widths] + + name_w = 20 + cov_container_size = (100 - name_w) / num_tests + hit_w = cov_container_size / 4 + rate_w = cov_container_size - hit_w + + template_env.globals["get_color"] = get_color + main_table_html = template_env.get_template("main_table.html") + # Only pass actual coverage data, leave out the summary + # The hit rates also have to be sorted in the same way cov_types are + cov_data = { + k: dict(sorted(v.items(), reverse=True)) for (k, v) in data.items() if k != "Total:" + } + return main_table_html.render( + cov_types=cov_types, + width_cov_desc=sum(raw_widths) / num_tests, + name_w=name_w, + rate_w=rate_w, + hit_w=hit_w, + data=cov_data, + widths_arr=widths_arr, + links=links, + ) + + +def generate_summary(data: list, key: str, template_env: Environment, new_row=False): + """Generates coverage summary table component that then is included next to the info header.""" + summary_html = template_env.get_template("summary_table.html") + + frac = data[0] / data[1] * 100 if data[1] != 0 else 0 + full_cov_color = get_color(data[0], data[1]) + return summary_html.render( + new_row=new_row, + cov_type_token=key, + color_token=full_cov_color, + hitrate_token="{:.1f}%".format(frac), + hit_token=str(data[0]), + total_token=str(data[1]), + ) + + +def render_page( + data, + root_name, + path_segments, + out_dir, + test_name, + logo_src, + logo_href, + template_env, + links=True, +): + """Combines the final report page.""" + report_html = template_env.get_template("coverage_report.html") + + output = report_html.render( + header_token="Full", + logo_src=logo_src, + logo_href=logo_href, + fulltable_token=generate_table(data, template_env, links), + root_name=root_name, + path_segments=path_segments, + testname_token=test_name, + time_token=datetime.datetime.now().strftime("%d-%m-%Y"), + **{ + f"{test}_summary_token": generate_summary(data["Total:"][test], test, template_env) + for test in data["Total:"].keys() + }, + ) + + with open(out_dir, "w") as f: + print(output, file=f) + + +def sub_src_view( + data, + file, + test_name, + root_name, + path_segments, + src_prefix, + out_dir, + html_src_dir, + logo_src, + logo_href, + template_env, +): + """Generate page for the source file based on the view generated by lcov's genhtml.""" + file = Path(file).resolve() + html_name = f"{file.name}.gcov.html" + + if file.is_absolute(): + cmn_path = commonpath([file, src_prefix]) + inner_path = file.relative_to(cmn_path).parent + else: + inner_path = file.parent + + main_table = None + + src_html_path = Path(html_src_dir) / Path(inner_path) / html_name + + if not src_html_path.exists(): + logger.warning(f"Not found: {src_html_path}") + return + + with open(src_html_path, "r") as src: + soup = BS(src, features="html.parser") + elem = soup.findAll("table") + main_table = elem[len(elem) - 2] + + report_html = template_env.get_template("src_view.html") + + logger.debug(f"Generate summary for file {file.name}") + + output = report_html.render( + header_token="Full", + logo_src=logo_src, + logo_href=logo_href, + root_name=root_name, + path_segments=path_segments, + src_file_table=main_table, + testname_token=test_name, + time_token=datetime.datetime.now().strftime("%d-%m-%Y"), + **{ + f"{test}_summary_token": generate_summary(data[test], test, template_env) + for test in data.keys() + }, + ) + + with open(out_dir, "w") as f: + print(output, file=f) + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Data normalization dependent on the view # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +def generate_dir_dict(data, dir): + """Process coverage data grouped by directory/module.""" + gdict = defaultdict(lambda: defaultdict(list)) + for file, cov_data in dict(data).items(): + if file == "Total:": + gdict[file] = deepcopy(cov_data) + continue + + base = Path(file).resolve().parent.relative_to(dir) + + for key, d in cov_data.items(): + gdict[str(base)][key].append(d) + + return OrderedDict(sorted(gdict.items())) + + +def generate_file_dict(data, base: Path, code_root_path: Path): + """Process coverage data grouped by file.""" + gdict = defaultdict(lambda: defaultdict(list)) + + for file, cov_data in dict(data).items(): + if file == "Total:": + continue + if Path(file).resolve().parent.relative_to(code_root_path) == base: + for key, data in cov_data.items(): + gdict[Path(file).name][key] = data + + if len(gdict["Total:"][key]) == 0: + gdict["Total:"][key] = [0, 0] + + gdict["Total:"][key][0] += data[0] + gdict["Total:"][key][1] += data[1] + + return gdict + + +def unify_dict(data): + """Adds 0-entries for tests with non reported coverage.""" + # Figure out the list of all tests: + tests = set([k for cov_data in data.values() for k in cov_data.keys()]) + + # Add missing ones to the dict: + for cov_data in data.values(): + for test in tests: + if test not in cov_data: + cov_data[test] = [0, 0] + return data + + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Root genhtml function # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + + +@main_func_log(logger, "Generate HTML Coverage Report") +@args_on_debug_logger(logger) +def genhtml(input_files, output_dir, test_name, html_src_dir, logo_src=None, logo_href=None): + """Generates coverage dashboard from *.info files.""" + + if not Path(output_dir).is_dir(): + raise FileNotFoundError(f"Output directory '{output_dir}' does not exist.") + + data, code_root_path = parse_infos(input_files) + + # The LCOV must be ran with '--list-full-path' so that the paths to sources + # are not 'simplified' with '...'. + code_root_path = get_common_src_path(data.keys()).parent + + data = unify_dict(data) + tld = generate_dir_dict(data, code_root_path) + + template_env = Environment(loader=FileSystemLoader(coverage_dashboard_template_dir)) + + for file in list(data.keys()): + if file == "Total:": + continue + + file_path = Path(file).resolve() + segments = str(file_path.relative_to(code_root_path)).split("/") + + sub_src_view( + data=data[file], + file=file, + test_name=test_name, + root_name="caliptra-rtl", + path_segments=segments, + src_prefix=code_root_path, + out_dir=f"{output_dir}/index_{Path(file).name}.html", + html_src_dir=html_src_dir, + logo_src=logo_src, + logo_href=logo_href, + template_env=template_env, + ) + + for key in list(tld.keys()): + if key == "Total:": + continue + subdata = generate_file_dict(data, Path(key), code_root_path) + render_page( + data=subdata, + root_name="caliptra-rtl", + path_segments=key.split("/"), + out_dir=f"{output_dir}/index_{key.replace('/','_')}.html", + test_name=test_name, + logo_src=logo_src, + logo_href=logo_href, + template_env=template_env, + ) + + for file, cov_data in tld.items(): + if file == "Total:": + continue + for test_type, dat in cov_data.items(): + hit, total = 0, 0 + for measurement in dat: + hit += measurement[0] + total += measurement[1] + if total > 0: + cov_data[test_type] = [hit, total] + else: + cov_data[test_type] = [0, 0] + render_page( + data=tld, + root_name="caliptra-rtl", + path_segments=["src"], + out_dir=f"{output_dir}/index.html", + test_name=test_name, + logo_src=logo_src, + logo_href=logo_href, + template_env=template_env, + links=True, + ) + + return code_root_path diff --git a/tools/SiteSpawner/src/sitespawner/update_style.py b/tools/SiteSpawner/src/sitespawner/update_style.py new file mode 100644 index 00000000000..fde60aa8c0f --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/update_style.py @@ -0,0 +1,33 @@ +from pathlib import Path +from shutil import copy + +from .common import args_on_debug_logger, main_func_log, styles_dir, get_logger + +logger = get_logger(__name__) + + +def copy_files(source, search_pattern, build_dir): + files = list(build_dir.rglob(search_pattern)) + for file in files: + logger.debug(f"Copy {source} to {file}") + copy(source, file) + + +@main_func_log(logger, "Update webpage styles") +@args_on_debug_logger(logger) +def update_style(build_dir): + """Replaces styles for sphinx build and injects assets into the `build_dir`.""" + + build_dir = Path(build_dir) + copy( + styles_dir / "main.css", + build_dir / "html" / "_static", + ) + + copy( + styles_dir / "assets" / "chips-alliance-logo-mono.svg", + build_dir / "html" / "_static" / "white.svg", + ) + + chips_cov_css = styles_dir / "cov.css" + copy_files(chips_cov_css, chips_cov_css.name, build_dir) diff --git a/tools/SiteSpawner/src/sitespawner/update_webpage.py b/tools/SiteSpawner/src/sitespawner/update_webpage.py new file mode 100644 index 00000000000..c30d82f358b --- /dev/null +++ b/tools/SiteSpawner/src/sitespawner/update_webpage.py @@ -0,0 +1,106 @@ +import os +import subprocess +from pathlib import Path +from shutil import copy2, copytree, rmtree +from jinja2 import Environment, FileSystemLoader + +from .common import ( + args_on_debug_logger, + main_func_log, + webpage_template_dir, + template_dir, + get_logger, +) +from .generate import generate +from .update_style import update_style + +logger = get_logger(__name__) + + +@args_on_debug_logger(logger) +def replace_dir(src_dir, dst_dir): + """Removes the destination directory, creates an empty destination directory, + and copies contents of source directory to destination directory.""" + src_path = Path(src_dir) + dst_path = Path(dst_dir) + + if not src_path.is_dir(): + return logger.warning("Source directory not present!") + + # Replace existing pages with new ones + if dst_path.exists(): + rmtree(dst_path) + dst_path.mkdir(parents=True, exist_ok=True) + + # Copy items to the new directory + for item in src_path.iterdir(): + copytree(item, dst_path / item.name, dirs_exist_ok=True) + + +@main_func_log(logger, "Update webpage") +@args_on_debug_logger(logger) +def update_webpage(loc_github_ref_name, loc_github_event_name, pr_number, page_url=None): + """Updates the public part of the gh-pages based on git refs, github events, and PR numbers.""" + # Determine the directory based on the GitHub ref and event + if loc_github_ref_name == "main": + directory = "main" + elif loc_github_event_name == "pull_request": + directory = f"dev/{pr_number}" + elif loc_github_event_name == "push": + directory = f'dev/{loc_github_ref_name.replace("/", "_")}' + else: + logger.error(f"Invalid event type: {loc_github_event_name} on ref: {loc_github_ref_name}") + raise ValueError("Unknown deployment type") + + md_source_dir = Path("source") + legacy_page_dir = Path("public.old") + new_page_dir = Path("public.new") + + replace_dir("coverage_dashboard", legacy_page_dir / "html" / directory / "coverage_dashboard") + + if md_source_dir.exists(): + rmtree(md_source_dir) + else: + md_source_dir.mkdir(parents=True) + + logger.info("Syncing directories...") + + for root, dirs, files in os.walk(legacy_page_dir): + root = Path(root) + relative_path = root.relative_to(legacy_page_dir) + dst_dir = new_page_dir / relative_path + # Create directories in destination + for dir_name in dirs: + (dst_dir / dir_name).mkdir(parents=True, exist_ok=True) + # Copy files to destination + for fname in files: + src_file = root / fname + dst_file = dst_dir / fname + copy2(src_file, dst_file) + + generate(webpage_template_dir, str(legacy_page_dir / "html"), str(md_source_dir)) + + SPHINXBUILD = os.getenv("SPHINXBUILD", "sphinx-build") + SPHINXOPTS = os.getenv("SPHINXOPTS") + + logger.info("Building the HTML documentation using Sphinx...") + + cmd = [SPHINXBUILD, "-M", "html", str(md_source_dir), str(new_page_dir)] + + if SPHINXOPTS: + cmd.append(SPHINXOPTS) + + subprocess.run(cmd, cwd=legacy_page_dir.parent, check=True) + + update_style(new_page_dir) + + if not page_url: + page_url = "." + else: + page_url = page_url.rstrip("//") + + env = Environment(loader=FileSystemLoader(template_dir)) + redirect = env.get_template("redirect.html").render(page_url=page_url) + + with open(new_page_dir / "index.html", "w") as f: + print(redirect, file=f) diff --git a/tools/SiteSpawner/styles/assets/chips-alliance-logo-mono.svg b/tools/SiteSpawner/styles/assets/chips-alliance-logo-mono.svg new file mode 100644 index 00000000000..90fa7a27697 --- /dev/null +++ b/tools/SiteSpawner/styles/assets/chips-alliance-logo-mono.svg @@ -0,0 +1,22 @@ + + + diff --git a/tools/SiteSpawner/styles/cov.css b/tools/SiteSpawner/styles/cov.css new file mode 100644 index 00000000000..60dcbb498ab --- /dev/null +++ b/tools/SiteSpawner/styles/cov.css @@ -0,0 +1,642 @@ +/* All views: initial background and text color */ +@import url('https://fonts.googleapis.com/css2?family=Roboto:wght@400;700&display=swap'); + +body { + color: #E9EBFA; + background-color: #0E1116; + padding: 0; + margin: 0; + font-family: 'Roboto', sans-serif; + box-sizing: border-box; + font-size: 16px; +} + +/* All views: standard link format*/ +a:link { + color: #00D0C9; + text-decoration: underline; + font-family: 'Roboto', sans-serif; +} + +/* All views: standard link - visited format */ +a:visited { + color: #E9EBFA; + text-decoration: underline; +} + +/* All views: standard link - activated format */ +a:active { + color: #00D0C9; + color: #E9EBFA; + text-decoration: underline; +} + +center { + padding: 95px; +} + +th { + border: 1px solid; +} + +td { + color: #E9EBFA; + align-items: center; +} + +/* Enable border on all centered tables aside from the info table */ +body>center>table:not(.info-table) td:not(.coverBarOutline) { + border: 1px solid #31363C; +} + +body>center>table:not(.info-table) tr:not(.covDescHeader):hover { + background-color: #8B8D8E; +} + +table { + border-collapse: collapse; + width: 100%; +} + +/* Background for the title navbar */ +table.title-header-shadow { + border: 0; + background-color: #25292E; + color: #DFE1F1; + padding-bottom: 10px; + padding: 95px 0; +} + +/* Cell containing the table with logo & title */ +td.title-container { + width: auto; + max-width: 61rem; + display: flex; + padding: 0 .2rem; + align-items: center; + margin-left: 95px; + margin-right: 95px; +} + +/* Table with logo & title */ +table.title-header { + color: #DFE1F1; + text-align: center; + align-items: left; +} + +/* Logo on the top navbar */ +td.title-logo { + padding: .4rem; + width: 10%; +} + +/* Title on the top navbar */ +td.title { + font-size: 20px; + font-weight: bold; + text-align: left; + padding: 1.2rem; +} + +/* Info table (Test metadata) */ +table.info-table { + border: 1px; + align-items: center; + padding: 0 95px; + font-size: 20px; + border-color: #DFE1F1; +} + +tr.info-table-view { + font-size: 35px; +} + +td.headerInfo { + text-align: right; + padding-right: 6px; + + font-weight: bold; + white-space: nowrap; +} + +td.headerInfoValue { + text-align: left; + color: #00D0C9; + + font-weight: bold; + white-space: nowrap; +} + +td.headerCovSummary { + color: #DFE1F1; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-size: 20px; + font-weight: 500; + white-space: nowrap; +} + +td.headerCovSummary.rowLeft { + text-align: right; +} + +td.headerCovSummary.colTop { + text-align: center; +} + +td.headerCovSummaryEntry { + border-radius: 6px; + text-align: right; + color: #DFE1F1; + text-align: center; + background-color: #31363C; + font-weight: bold; + white-space: nowrap; + padding-left: 12px; + padding-right: 4px; + font-size: 16px; + border-color: #DFE1F1; +} + +tr.covDescHeader { + color: #DFE1F1; + text-align: center; + font-size: 20px; + white-space: nowrap; +} + +td.headerCovDesc { + padding-left: 6px; + padding-right: 6px; + padding-bottom: 0px; + font-weight: 500; +} + +td.headerCovSubDesc { + padding-left: 12px; + padding-right: 4px; + border-color: #DFE1F1; + font-weight: 200; +} + +/* All views: color of horizontal ruler */ +td.ruler>img { + height: 1px; + width: 100%; + background-color: rgba(255, 255, 255, 0.3); + aspect-ratio: 1 / 1; +} + +/* Cleanup required */ + +/* Source code view/table entry background: format for lines classified as "Uncovered New Code (+ => 0): +Newly added code is not tested" */ +td.tlaUNC { + text-align: right; + background-color: #FF6230; +} + +td.tlaBgUNC { + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Uncovered New Code (+ => 0): +Newly added code is not tested" */ +span.tlaUNC { + text-align: left; + background-color: #FF6230; +} + +span.tlaBgUNC { + background-color: #FF6230; +} + +a.tlaBgUNC { + background-color: #FF6230; + color: #000000; +} + +td.headerCovTableHeadUNC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Lost Baseline Coverage (1 => 0): +Unchanged code is no longer tested" */ +td.tlaLBC { + text-align: right; + background-color: #FF6230; +} + +td.tlaBgLBC { + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Lost Baseline Coverage (1 => 0): +Unchanged code is no longer tested" */ +span.tlaLBC { + text-align: left; + background-color: #FF6230; +} + +span.tlaBgLBC { + background-color: #FF6230; +} + +a.tlaBgLBC { + background-color: #FF6230; + color: #000000; +} + +td.headerCovTableHeadLBC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Uncovered Included Code (# => 0): +Previously unused code is untested" */ +td.tlaUIC { + text-align: right; + background-color: #FF6230; +} + +td.tlaBgUIC { + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Uncovered Included Code (# => 0): +Previously unused code is untested" */ +span.tlaUIC { + text-align: left; + background-color: #FF6230; +} + +span.tlaBgUIC { + background-color: #FF6230; +} + +a.tlaBgUIC { + background-color: #FF6230; + color: #000000; +} + +td.headerCovTableHeadUIC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Uncovered Baseline Code (0 => 0): +Unchanged code was untested before, is untested now" */ +td.tlaUBC { + text-align: right; + background-color: #FF6230; +} + +td.tlaBgUBC { + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Uncovered Baseline Code (0 => 0): +Unchanged code was untested before, is untested now" */ +span.tlaUBC { + text-align: left; + background-color: #FF6230; +} + +span.tlaBgUBC { + background-color: #FF6230; +} + +a.tlaBgUBC { + background-color: #FF6230; + color: #000000; +} + +td.headerCovTableHeadUBC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FF6230; +} + +/* Source code view/table entry background: format for lines classified as "Gained Baseline Coverage (0 => 1): +Unchanged code is tested now" */ +td.tlaGBC { + text-align: right; + background-color: #8B8D8E; +} + +td.tlaBgGBC { + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Gained Baseline Coverage (0 => 1): +Unchanged code is tested now" */ +span.tlaGBC { + text-align: left; + background-color: #8B8D8E; +} + +span.tlaBgGBC { + background-color: #8B8D8E; +} + +a.tlaBgGBC { + background-color: #8B8D8E; + color: #000000; +} + +td.headerCovTableHeadGBC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Gained Included Coverage (# => 1): +Previously unused code is tested now" */ +td.tlaGIC { + text-align: right; + background-color: #8B8D8E; +} + +td.tlaBgGIC { + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Gained Included Coverage (# => 1): +Previously unused code is tested now" */ +span.tlaGIC { + text-align: left; + background-color: #8B8D8E; +} + +span.tlaBgGIC { + background-color: #8B8D8E; +} + +a.tlaBgGIC { + background-color: #8B8D8E; + color: #000000; +} + +td.headerCovTableHeadGIC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Gained New Coverage (+ => 1): +Newly added code is tested" */ +td.tlaGNC { + text-align: right; + background-color: #8B8D8E; +} + +td.tlaBgGNC { + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Gained New Coverage (+ => 1): +Newly added code is tested" */ +span.tlaGNC { + text-align: left; + background-color: #8B8D8E; +} + +span.tlaBgGNC { + background-color: #8B8D8E; +} + +a.tlaBgGNC { + background-color: #8B8D8E; + color: #000000; +} + +td.headerCovTableHeadGNC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Covered Baseline Code (1 => 1): +Unchanged code was tested before and is still tested" */ +td.tlaCBC { + text-align: right; + background-color: #8B8D8E; +} + +td.tlaBgCBC { + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Covered Baseline Code (1 => 1): +Unchanged code was tested before and is still tested" */ +span.tlaCBC { + text-align: left; + background-color: #8B8D8E; +} + +span.tlaBgCBC { + background-color: #8B8D8E; +} + +a.tlaBgCBC { + background-color: #8B8D8E; + color: #000000; +} + +td.headerCovTableHeadCBC { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #8B8D8E; +} + +/* Source code view/table entry background: format for lines classified as "Excluded Uncovered Baseline (0 => #): +Previously untested code is unused now" */ +td.tlaEUB { + text-align: right; + background-color: #FFFFFF; +} + +td.tlaBgEUB { + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Excluded Uncovered Baseline (0 => #): +Previously untested code is unused now" */ +span.tlaEUB { + text-align: left; + background-color: #FFFFFF; +} + +span.tlaBgEUB { + background-color: #FFFFFF; +} + +a.tlaBgEUB { + background-color: #FFFFFF; + color: #000000; +} + +td.headerCovTableHeadEUB { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Excluded Covered Baseline (1 => #): +Previously tested code is unused now" */ +td.tlaECB { + text-align: right; + background-color: #FFFFFF; +} + +td.tlaBgECB { + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Excluded Covered Baseline (1 => #): +Previously tested code is unused now" */ +span.tlaECB { + text-align: left; + background-color: #FFFFFF; +} + +span.tlaBgECB { + background-color: #FFFFFF; +} + +a.tlaBgECB { + background-color: #FFFFFF; + color: #000000; +} + +td.headerCovTableHeadECB { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Deleted Uncovered Baseline (0 => -): +Previously untested code has been deleted" */ +td.tlaDUB { + text-align: right; + background-color: #FFFFFF; +} + +td.tlaBgDUB { + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Deleted Uncovered Baseline (0 => -): +Previously untested code has been deleted" */ +span.tlaDUB { + text-align: left; + background-color: #FFFFFF; +} + +span.tlaBgDUB { + background-color: #FFFFFF; +} + +a.tlaBgDUB { + background-color: #FFFFFF; + color: #000000; +} + +td.headerCovTableHeadDUB { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Deleted Covered Baseline (1 => -): +Previously tested code has been deleted" */ +td.tlaDCB { + text-align: right; + background-color: #FFFFFF; +} + +td.tlaBgDCB { + background-color: #FFFFFF; +} + +/* Source code view/table entry background: format for lines classified as "Deleted Covered Baseline (1 => -): +Previously tested code has been deleted" */ +span.tlaDCB { + text-align: left; + background-color: #FFFFFF; +} + +span.tlaBgDCB { + background-color: #FFFFFF; +} + +a.tlaBgDCB { + background-color: #FFFFFF; + color: #000000; +} + +td.headerCovTableHeadDCB { + text-align: center; + padding-right: 6px; + padding-left: 6px; + padding-bottom: 0px; + font-family: 'Roboto', sans-serif; + white-space: nowrap; + background-color: #FFFFFF; +} + +/* Source code view: format for date/owner bin that is not hit */ +span.missBins { + background-color: #ff0000 + /* red */ +} \ No newline at end of file diff --git a/tools/SiteSpawner/styles/main.css b/tools/SiteSpawner/styles/main.css new file mode 100644 index 00000000000..cd211b1927d --- /dev/null +++ b/tools/SiteSpawner/styles/main.css @@ -0,0 +1,20 @@ +[data-md-color-scheme="slate"] { + --md-hue: 218; + --md-default-bg-color: hsla(var(--md-hue), 22%, 7%, 1); +} + +[data-md-color-primary="teal"] { + --md-primary-fg-color: #25292e; +} + +[data-md-color-scheme="slate"][data-md-color-primary="teal"] { + --md-typeset-a-color: #00d0c9; +} + +.md-social { + display: none; +} + +.md-header__option { + display: none; +} \ No newline at end of file diff --git a/tools/SiteSpawner/template/coverage_report/coverage_report.html b/tools/SiteSpawner/template/coverage_report/coverage_report.html new file mode 100644 index 00000000000..246f1e803c1 --- /dev/null +++ b/tools/SiteSpawner/template/coverage_report/coverage_report.html @@ -0,0 +1,75 @@ + + + + + + + {{ header_token }} + coverage report + + + + + + + + +
+ + + +
+ Caliptra RTL + {{ header_token }} + coverage report +
+
+
+ + + + + + + + + + + + + + + {{ toggle_summary_token }} + + + + + + {{ branch_summary_token }} + + {{ functional_summary_token }} +
Current view: + {{ root_name }}{% if path_segments %}—{{ path_segments | join('—') }}{% endif %} + CoverageHitTotal
Test Date: + {{ time_token }} +
Test: + {{ testname_token }} +
+
+
+ {{ fulltable_token }} +
+ + + \ No newline at end of file diff --git a/tools/SiteSpawner/template/coverage_report/main_table.html b/tools/SiteSpawner/template/coverage_report/main_table.html new file mode 100644 index 00000000000..8cee414b605 --- /dev/null +++ b/tools/SiteSpawner/template/coverage_report/main_table.html @@ -0,0 +1,68 @@ + + + + + {% for key in cov_types %} + + {% endfor %} + + + + + {% for key in cov_types %} + + + {% endfor %} + + + + {% for file, cov_data in data.items() %} + + + + {% for key, numbers in cov_data.items() %} + {% set cov_color = get_color(numbers[0], numbers[1]) %} + {% if numbers[1] != 0 %} + {% set frac = (numbers[0]|float) / (numbers[1]|float) * 100 %} + {% else %} + {% set frac = 0 %} + {% endif %} + + + + + + + {% endfor %} + + {% endfor %} +
{{ key|capitalize }}
SourceRateHit / Total
+ {% if links %} + + {{ file }} + + {% else %} + {{ file }} + {% endif %} + +
+ {% if frac > 5 %} + {% set w = frac %} + {% else %} + {% set w = 5 %} + {% endif %} + +
  +
+
+
+ {% if numbers[0] == 0 %} + {{ "——" }} + {% else %} + {{ '%0.1f' % frac }}% + {% endif %} + + {{ numbers[0] }} + / + {{ numbers[1] }} +
\ No newline at end of file diff --git a/tools/SiteSpawner/template/coverage_report/src_view.html b/tools/SiteSpawner/template/coverage_report/src_view.html new file mode 100644 index 00000000000..fe29e51e423 --- /dev/null +++ b/tools/SiteSpawner/template/coverage_report/src_view.html @@ -0,0 +1,75 @@ + + + + + + + {{ header_token }} + coverage report + + + + + + + + +
+ + + +
+ Caliptra RTL + {{ header_token }} + coverage report +
+
+
+ + + + + + + + + + + + + + + {{ toggle_summary_token }} + + + + + + {{ branch_summary_token }} + + {{ functional_summary_token }} +
Current view: + {{ root_name }}{% if path_segments %}—{{ path_segments | join('—') }}{% endif + %} + CoverageHitTotal
Test Date: + {{ time_token }} +
Test: + {{ testname_token }} +
+
+ + {{ src_file_table }} + + + \ No newline at end of file diff --git a/tools/SiteSpawner/template/coverage_report/summary_table.html b/tools/SiteSpawner/template/coverage_report/summary_table.html new file mode 100644 index 00000000000..e4f5152e4c1 --- /dev/null +++ b/tools/SiteSpawner/template/coverage_report/summary_table.html @@ -0,0 +1,21 @@ +{% if new_row %} + + + + +{% endif %} + + {{ cov_type_token|capitalize }} + + + {{ hitrate_token }} + + + {{ hit_token }} + + + {{ total_token }} + +{% if new_row %} + +{% endif %} \ No newline at end of file diff --git a/tools/SiteSpawner/template/redirect.html b/tools/SiteSpawner/template/redirect.html new file mode 100644 index 00000000000..3939ec300b1 --- /dev/null +++ b/tools/SiteSpawner/template/redirect.html @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/tools/SiteSpawner/template/webpage/conf.py b/tools/SiteSpawner/template/webpage/conf.py new file mode 100644 index 00000000000..d755c9d06ab --- /dev/null +++ b/tools/SiteSpawner/template/webpage/conf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. +# +# Updated documentation of the configuration options is available at +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +from datetime import datetime + +from antmicro_sphinx_utils.defaults import ( + extensions as default_extensions, + myst_enable_extensions as default_myst_enable_extensions, + antmicro_html, +) + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ----------------------------------------------------- + +# General information about the project. +project = "Caliptra RTL" +basic_filename = "caliptra-rtl-coverage-reports" +authors = "CHIPS Alliance" +copyright = f"{authors}, {datetime.now().year}" + +# The short X.Y version. +version = "" +# The full version, including alpha/beta/rc tags. +release = "" + +# Temporary; Remove once the clash between myst-parser and immaterial is fixed +sphinx_immaterial_override_builtin_admonitions = False + +numfig = True + +# If you need to add extensions just add to those lists +extensions = default_extensions +myst_enable_extensions = default_myst_enable_extensions + +myst_substitutions = {"project": project} + +myst_url_schemes = { + "http": None, + "https": None, + "external": "{{path}}", +} + +today_fmt = "%Y-%m-%d" + +todo_include_todos = False + +# -- Options for HTML output --------------------------------------------------- + +html_theme = "sphinx_immaterial" + +html_last_updated_fmt = today_fmt + +html_show_sphinx = False + +(html_logo, html_theme_options, html_context) = antmicro_html() + + +html_theme_options["palette"][0].update( + { + "scheme": "slate", + "primary": "teal", + "accent": "white", + } +) + +# # Disable toggle theme button +# html_theme_options = { +# "palette": [] +# } + +html_title = project + + +def setup(app): + app.add_css_file("main.css") diff --git a/tools/SiteSpawner/template/webpage/coverage_dashboard.md b/tools/SiteSpawner/template/webpage/coverage_dashboard.md new file mode 100644 index 00000000000..769ff4525aa --- /dev/null +++ b/tools/SiteSpawner/template/webpage/coverage_dashboard.md @@ -0,0 +1,24 @@ +({{ ref }})= +# Coverage dashboard + +## Summary reports (all tests) + +{%- for coverage in summary %} +{%- if summary[coverage] %} + * [{{ coverage }} coverage](external:coverage_dashboard/{{ summary[coverage] }}/index.html) +{%- else %} + * {{ coverage }} coverage (no data) +{%- endif %} +{%- endfor %} + +## Individual test reports + +{%- for coverage in individual %} +{%- if individual[coverage] %} +{%- for test in individual[coverage] %} + * [{{ test }}](external:coverage_dashboard/{{ individual[coverage][test] }}/index.html) +{%- endfor %} +{%- else %} +no data +{%- endif %} +{%- endfor %} diff --git a/tools/SiteSpawner/template/webpage/dev.md b/tools/SiteSpawner/template/webpage/dev.md new file mode 100644 index 00000000000..9ae49bf93bd --- /dev/null +++ b/tools/SiteSpawner/template/webpage/dev.md @@ -0,0 +1,7 @@ +# Active pull requests + +{%- for branch in branches %} + * {{ branch }} + * [Coverage]({{ branch }}_coverage_dashboard) + +{%- endfor %} diff --git a/tools/SiteSpawner/template/webpage/index.md b/tools/SiteSpawner/template/webpage/index.md new file mode 100644 index 00000000000..800fc84abad --- /dev/null +++ b/tools/SiteSpawner/template/webpage/index.md @@ -0,0 +1,8 @@ +# {{ project }} + +```{toctree} +:maxdepth: 2 + +main +dev +``` diff --git a/tools/SiteSpawner/template/webpage/main.md b/tools/SiteSpawner/template/webpage/main.md new file mode 100644 index 00000000000..3697a640c7e --- /dev/null +++ b/tools/SiteSpawner/template/webpage/main.md @@ -0,0 +1,4 @@ +# Main branch + + * [Coverage](main_coverage_dashboard) + \ No newline at end of file From 6f0cd269299ca5bf1d890e250fc40e0b62823ede Mon Sep 17 00:00:00 2001 From: Mateusz Leonowicz Date: Tue, 20 Aug 2024 14:41:32 +0200 Subject: [PATCH 2/7] .github: Update workflows to use SIS package Internal-tag: [#63639] Signed-off-by: Mateusz Leonowicz --- .github/workflows/gh-pages-pr-remove.yml | 10 +++++++++- .github/workflows/publish-webpage.yml | 12 ++++++++++-- .github/workflows/report-coverage.yml | 12 ++++++++++-- .github/workflows/test-openocd.yml | 11 +++++++++-- .github/workflows/test-regression.yml | 11 +++++++++-- .github/workflows/test-riscof.yml | 11 +++++++++-- .github/workflows/test-riscv-dv.yml | 11 +++++++++-- .github/workflows/test-uarch.yml | 11 +++++++++-- .github/workflows/test-verification.yml | 11 +++++++++-- 9 files changed, 83 insertions(+), 17 deletions(-) diff --git a/.github/workflows/gh-pages-pr-remove.yml b/.github/workflows/gh-pages-pr-remove.yml index 104de169e3b..e6bbe3874fc 100644 --- a/.github/workflows/gh-pages-pr-remove.yml +++ b/.github/workflows/gh-pages-pr-remove.yml @@ -21,6 +21,14 @@ jobs: - name: Setup repository uses: actions/checkout@v3 + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Download deployment uses: actions/checkout@v3 with: @@ -45,7 +53,7 @@ jobs: run: | rm -rf ${{ env.ROOT_DIR }}/html/dev/${{ steps.PR.outputs.number }} rm -rf ${{ env.ROOT_DIR }}/doctrees/dev/${{ steps.PR.outputs.number }} - .github/scripts/update_webpage.sh ${{ github.ref_name }} ${{ github.event_name }} ${{ steps.PR.outputs.number }} + sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number ${{ steps.PR.outputs.number }} - name: Add redirect index page run: | diff --git a/.github/workflows/publish-webpage.yml b/.github/workflows/publish-webpage.yml index d469a4ff84b..5b5530be91a 100644 --- a/.github/workflows/publish-webpage.yml +++ b/.github/workflows/publish-webpage.yml @@ -18,6 +18,14 @@ jobs: - name: Setup python uses: actions/setup-python@v4 + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Print metadata run: | run_information="Repository: ${{ github.repository }} Commit SHA:${{ github.sha }} Workflow: ${{ github.workflow }} Run:${{ github.run_id }}" @@ -62,12 +70,12 @@ jobs: - name: Update webpage if: github.event_name != 'pull_request' run: | - .github/scripts/update_webpage.sh ${{ github.ref_name }} ${{ github.event_name }} 0 + sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number 0 - name: Update webpage PR if: github.event_name == 'pull_request' run: | - .github/scripts/update_webpage.sh ${{ github.ref_name }} ${{ github.event_name }} ${{ github.event.number }} + sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number ${{ github.event.number }} - name: Add redirect index page run: | diff --git a/.github/workflows/report-coverage.yml b/.github/workflows/report-coverage.yml index e030b515000..40a17b25f52 100644 --- a/.github/workflows/report-coverage.yml +++ b/.github/workflows/report-coverage.yml @@ -22,6 +22,14 @@ jobs: - name: Setup repository uses: actions/checkout@v3 + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + # This step is needed to have the same VeeR codebase as used in tests - name: Configure VeeR run: | @@ -32,7 +40,7 @@ jobs: run: | git clone https://github.com/linux-test-project/lcov pushd lcov - git checkout v1.16 + git checkout v2.1 echo "LCOV_PATH=`realpath bin`" >> "$GITHUB_ENV" popd @@ -75,7 +83,7 @@ jobs: - name: Generate reports run: | export PATH=${{ env.LCOV_PATH }}:${PATH} - bash .github/scripts/gen_coverage_reports.sh + sis -d reports . --report-dir report --src-pattern \*design\* - name: Pack artifacts if: always() diff --git a/.github/workflows/test-openocd.yml b/.github/workflows/test-openocd.yml index 0f5ea30517d..6b976189ade 100644 --- a/.github/workflows/test-openocd.yml +++ b/.github/workflows/test-openocd.yml @@ -71,6 +71,14 @@ jobs: with: submodules: recursive + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Build verilated simulation run: | export PATH=/opt/verilator/bin:/opt/openocd/bin:$PATH @@ -87,8 +95,7 @@ jobs: run: | export PATH=/opt/verilator/bin:$PATH export RV_ROOT=$(pwd) - .github/scripts/convert_coverage_data.sh ${RV_ROOT}/run - echo "convert_coverage_data.sh exited with RET_CODE = "$? + sis -d convert --dat-dir ${RV_ROOT}/run mkdir -p results mv ${RV_ROOT}/run/coverage.info \ results/coverage_openocd_${{ matrix.bus }}_${{ matrix.coverage }}.info diff --git a/.github/workflows/test-regression.yml b/.github/workflows/test-regression.yml index af957a9d9fa..f481aa1881d 100644 --- a/.github/workflows/test-regression.yml +++ b/.github/workflows/test-regression.yml @@ -74,6 +74,14 @@ jobs: with: submodules: recursive + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Setup environment run: | echo "/opt/verilator/bin" >> $GITHUB_PATH @@ -92,8 +100,7 @@ jobs: - name: Prepare coverage data run: | - .github/scripts/convert_coverage_data.sh ${TEST_PATH}/ - echo "convert_coverage_data.sh exited with RET_CODE = "$? + sis -d convert --dat-dir ${TEST_PATH}/ mkdir -p results mv ${TEST_PATH}/coverage.info \ results/coverage_${{ matrix.bus }}_${{ matrix.test }}_${{ matrix.coverage }}.info diff --git a/.github/workflows/test-riscof.yml b/.github/workflows/test-riscof.yml index 8a4df8e24e3..7a104aab644 100644 --- a/.github/workflows/test-riscof.yml +++ b/.github/workflows/test-riscof.yml @@ -96,6 +96,14 @@ jobs: with: submodules: recursive + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Install RISCOF run: | pip3 install git+https://github.com/riscv/riscof@a25e315 @@ -146,8 +154,7 @@ jobs: - name: Prepare coverage data run: | export PATH=/opt/verilator/bin:$PATH - .github/scripts/convert_coverage_data.sh riscof/coverage/ - echo "convert_coverage_data.sh exited with RET_CODE = "$? + sis -d convert --dat-dir riscof/coverage/ mv riscof/coverage/coverage.info \ riscof/coverage/coverage_riscof_${{matrix.priv}}_${{ matrix.coverage }}.info diff --git a/.github/workflows/test-riscv-dv.yml b/.github/workflows/test-riscv-dv.yml index 8794baa649b..7c2b1d60d65 100644 --- a/.github/workflows/test-riscv-dv.yml +++ b/.github/workflows/test-riscv-dv.yml @@ -322,6 +322,14 @@ jobs: with: submodules: recursive + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Install Python deps run: | pip install -r third_party/riscv-dv/requirements.txt @@ -382,8 +390,7 @@ jobs: - name: Prepare coverage data run: | - .github/scripts/convert_coverage_data.sh ${RV_ROOT}/tools/riscv-dv/work/ - echo "convert_coverage_data.sh exited with RET_CODE = "$? + sis -d convert --dat-dir ${RV_ROOT}/tools/riscv-dv/work/ mkdir -p results mv ${RV_ROOT}/tools/riscv-dv/work/coverage.info \ results/coverage_riscv-dv_${{matrix.priv}}_${{ matrix.test }}_${{ matrix.coverage }}.info diff --git a/.github/workflows/test-uarch.yml b/.github/workflows/test-uarch.yml index 485b20cc224..040c1db3f99 100644 --- a/.github/workflows/test-uarch.yml +++ b/.github/workflows/test-uarch.yml @@ -63,6 +63,14 @@ jobs: with: submodules: recursive + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Setup Cache Metadata id: cache_metadata run: | @@ -146,8 +154,7 @@ jobs: - name: Prepare coverage data run: | export PATH=/opt/verilator/bin:$PATH - .github/scripts/convert_coverage_data.sh ${TEST_PATH}/${TEST_NAME}/ - echo "convert_coverage_data.sh exited with RET_CODE = "$? + sis -d convert --dat-dir ${TEST_PATH}/${TEST_NAME}/ mkdir -p results mv ${TEST_PATH}/${TEST_NAME}/*.info results/ diff --git a/.github/workflows/test-verification.yml b/.github/workflows/test-verification.yml index 63d8de3a879..39c334a7fa7 100644 --- a/.github/workflows/test-verification.yml +++ b/.github/workflows/test-verification.yml @@ -24,6 +24,14 @@ jobs: with: submodules: recursive + - name: install SiteSpawner package + run: | + python3 -m venv .venv + .venv/bin/python3 -m pip install tools/SiteSpawner + source .venv/bin/activate + echo "PATH=$PATH" >> $GITHUB_ENV + python3 -m pip install tools/SiteSpawner + - name: Setup Cache Metadata id: cache_metadata run: | @@ -114,8 +122,7 @@ jobs: - name: Prepare coverage data run: | export PATH=/opt/verilator/bin:$PATH - .github/scripts/convert_coverage_data.sh ${TEST_PATH}/coverage.dat - echo "convert_coverage_data.sh exited with RET_CODE = "$? + sis -d convert --dat-dir ${TEST_PATH} mkdir -p results mv ${TEST_PATH}/coverage.info \ results/coverage_${{ matrix.test }}_${{ matrix.coverage }}.info From c75626c6c954045514fe181cda500a45e77b5bfc Mon Sep 17 00:00:00 2001 From: Mateusz Leonowicz Date: Tue, 20 Aug 2024 14:41:42 +0200 Subject: [PATCH 3/7] .github: Remove obsolete coverage related scripts Internal-tag: [#63639] Signed-off-by: Mateusz Leonowicz --- .github/scripts/convert_coverage_data.sh | 44 ----------- .github/scripts/gen_coverage_reports.sh | 70 ----------------- .github/scripts/update_webpage.sh | 95 ------------------------ 3 files changed, 209 deletions(-) delete mode 100755 .github/scripts/convert_coverage_data.sh delete mode 100755 .github/scripts/gen_coverage_reports.sh delete mode 100755 .github/scripts/update_webpage.sh diff --git a/.github/scripts/convert_coverage_data.sh b/.github/scripts/convert_coverage_data.sh deleted file mode 100755 index bcedde0cd9e..00000000000 --- a/.github/scripts/convert_coverage_data.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -SELF_DIR="$(dirname $(readlink -f ${BASH_SOURCE[0]}))" -. ${SELF_DIR}/common.inc.sh - -convert_coverage_data(){ - # This function uses verilator_coverage module to convert a coverage .dat - # file(s) into an .info file(s) for further processing. - # Args: - # DAT_DIR: path to dir containing coverage.dat file(s) - DAT_DIR="${1:-results_verification}" - echo -e "${COLOR_WHITE}======= Parse arguments =======${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}DAT_DIR = ${DAT_DIR}" - echo -e "${COLOR_WHITE}===============================${COLOR_CLEAR}" - - # Function body - FILES=`find ${DAT_DIR} -name "coverage*.dat"` - if [ -z "$FILES" ]; then - echo -e "${COLOR_RED}ERROR: No coverage data files were found${COLOR_CLEAR}" - echo -e "${COLOR_RED}ERROR: Searched directory: `realpath ${DAT_DIR}`${COLOR_CLEAR}" - echo -e "${COLOR_RED}ERROR: convert_coverage_data ended with errors${COLOR_CLEAR}" - exit -1 - else - for dat_file in ${FILES}; do - info_file=`basename -s .dat ${dat_file}`.info - info_realpath=`realpath \`dirname ${dat_file}\`` - info_file=${info_realpath}/${info_file} - verilator_coverage --write-info ${info_file} ${dat_file} - echo -e "${COLOR_WHITE}Conversion: ${dat_file} -> ${info_file} ${COLOR_GREEN}SUCCEEDED${COLOR_CLEAR}" - done - fi -} - -# Example usage -# DAT_DIR="results_verification" -# -# convert_coverage_data $DAT_DIR - -echo -e "${COLOR_WHITE}========== convert_coverage_data ==============${COLOR_CLEAR}" - -convert_coverage_data "$@" - -echo -e "${COLOR_WHITE}convert_coverage_data ${COLOR_GREEN}SUCCEEDED${COLOR_CLEAR}" -echo -e "${COLOR_WHITE}========== convert_coverage_data ==============${COLOR_CLEAR}" diff --git a/.github/scripts/gen_coverage_reports.sh b/.github/scripts/gen_coverage_reports.sh deleted file mode 100755 index a5d107115e2..00000000000 --- a/.github/scripts/gen_coverage_reports.sh +++ /dev/null @@ -1,70 +0,0 @@ -#!/bin/bash - -#--------------# -# LCOV -#--------------# -generate_coverage_reports(){ - # This function creates... - # Args - # OUTPUT_DIR - directory, where index.html will be placed - # GIT_SHA - git revision - check_args_count $# 2 - OUTPUT_DIR=$1 - GENHTML_OPTS=$2 - echo -e "${COLOR_WHITE}========== generate_coverage_reports =========${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}OUTPUT_DIR = ${OUTPUT_DIR}${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}GENHTML_OPTS = ${GENHTML_OPTS}${COLOR_CLEAR}" - - for info_file in `find . -name '*.info'`; do - lcov --extract ${info_file} \*design\* -o ${info_file} - done - - for COVERAGE in branch toggle all functional; do - DIR=${OUTPUT_DIR}/${COVERAGE} - # Summary - mkdir -p ${DIR} - FILES=`find . -name "coverage_*_${COVERAGE}.info" -printf "%P\n"` - - if [ -z "$FILES" ]; then - echo -e "${COLOR_WHITE}There are no files for coverage ${COVERAGE} ${COLOR_YELLOW}WARNING${COLOR_CLEAR}" - else - # genhtml -o ${DIR} -t "all" ${GENHTML_OPTS} ${FILES} - genhtml -o ${DIR} -t "all" --header-title "RTL ${COVERAGE} coverage report" ${GENHTML_OPTS} ${FILES} - find ${DIR}/ -name "*.html" -exec sed -i "s/Line Coverage/${COVERAGE^} Coverage/g" {} + - - # Individual per-test - for FILE in ${FILES}; do - TEST=${FILE/coverage_/} - TEST=${TEST/_${COVERAGE}.info/} - - mkdir -p ${DIR}_${TEST} - # genhtml -o ${DIR}_${TEST} -t ${TEST} ${GENHTML_OPTS} ${FILE} - genhtml -o ${DIR}_${TEST} -t ${TEST} --header-title "RTL ${COVERAGE} coverage report" ${GENHTML_OPTS} ${FILE} - find ${DIR}_${TEST}/ -name "*.html" -exec sed -i "s/Line Coverage/${COVERAGE^} Coverage/g" {} + - done - fi - done -} - -#--------------# -# Script -#--------------# -SELF_DIR="$(dirname $(readlink -f ${BASH_SOURCE[0]}))" -. ${SELF_DIR}/common.inc.sh - -# Get revision -GIT_SHA=`git describe --always` -if [ $? -ne 0 ]; then - GIT_SHA="unknown" -fi -set -e -OUTPUT_DIR=report -mkdir -p ${OUTPUT_DIR} -GENHTML_OPTS="--no-function-coverage --no-source" - -echo -e "${COLOR_WHITE}========== gen_coverage_reports ==============${COLOR_CLEAR}" - -generate_coverage_reports ${OUTPUT_DIR} "${GENHTML_OPTS}" - -echo -e "${COLOR_WHITE}gen_coverage_reports ${COLOR_GREEN}SUCCEEDED${COLOR_CLEAR}" -echo -e "${COLOR_WHITE}========== gen_coverage_reports ==============${COLOR_CLEAR}" diff --git a/.github/scripts/update_webpage.sh b/.github/scripts/update_webpage.sh deleted file mode 100755 index a42c4cd5ec7..00000000000 --- a/.github/scripts/update_webpage.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash - -SELF_DIR="$(dirname $(readlink -f ${BASH_SOURCE[0]}))" -. ${SELF_DIR}/common.inc.sh - -replace_dir(){ - # This function removes the destination dir, creates an empty destination directory, - # copies contents of source dir to destination dir. - # Args: - # SRC_DIR - valid path to source directory - # DST_DIR - valid path to destination directory - check_args_count $# 2 - SRC_DIR=$1 - DST_DIR=$2 - echo -e "${COLOR_WHITE}=========== replace_dir args ===========${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}SRC_DIR = ${SRC_DIR}${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}DST_DIR = ${DST_DIR}${COLOR_CLEAR}" - - if [ -d "${SRC_DIR}" ]; then - # Replace existing pages with new ones - rm -rf ${DST_DIR} - mkdir -p ${DST_DIR} - # Copy the new one - cp -arf ${SRC_DIR}/* ${DST_DIR} - else - echo -e "${COLOR_YELLOW}Source directory not present!${COLOR_CLEAR}" - fi -} - -update_webpage(){ - # This function updates the public part of the gh-pages, which contain - # coverage and verification reports. Different destination directory is - # selected based on git refs, github events and pr numbers - # Args: - # LOC_GITHUB_REF_NAME - use ${{ github.ref }} - # LOC_GITHUB_EVENT_NAME - use ${{ github.event_name }} - # PR_NUMBER - number of the PR, e.g. 81 - check_args_count $# 3 - LOC_GITHUB_REF_NAME=$1 - LOC_GITHUB_EVENT_NAME=$2 - PR_NUMBER=$3 - echo -e "${COLOR_WHITE}========== update_webpage args =========${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}LOC_GITHUB_REF_NAME = ${LOC_GITHUB_REF_NAME}" - echo -e "${COLOR_WHITE}LOC_GITHUB_EVENT_NAME = ${LOC_GITHUB_EVENT_NAME}" - echo -e "${COLOR_WHITE}PR_NUMBER = ${PR_NUMBER}" - - # Function body - # On main, deploy the main page - if [ ${LOC_GITHUB_REF_NAME} = 'main' ]; then - DIR=main - # On a PR, deploy to a PR subdirectory - elif [ ${LOC_GITHUB_EVENT_NAME} = 'pull_request' ]; then - DIR=dev/${PR_NUMBER} - # On a push, deploy to a branch subdirectory - elif [ ${LOC_GITHUB_EVENT_NAME} = 'push' ]; then - # If ref_name contains slash "/", replace it with underscore "_" - DIR=dev/${LOC_GITHUB_REF_NAME//\//_} - # Unknown - else - echo -e "${COLOR_WHITE}Unknown deployment type ${COLOR_RED}FAIL${COLOR_CLEAR}" - exit -1 - fi - PUBLIC_DIR=./public.old - - replace_dir ./coverage_dashboard ${PUBLIC_DIR}/html/${DIR}/coverage_dashboard - replace_dir ./verification_dashboard ${PUBLIC_DIR}/html/${DIR}/verification_dashboard - replace_dir ./docs_rendered ${PUBLIC_DIR}/html/${DIR}/docs_rendered - replace_dir ./riscof_dashboard ${PUBLIC_DIR}/html/${DIR}/verification_dashboard/riscof - - pushd .github/scripts/indexgen - python -m venv venv - source venv/bin/activate - pip install -r requirements.txt - popd - - - make -C .github/scripts/indexgen all ROOTDIR=`realpath ./public.old` BUILDDIR=`realpath ./public.new` - - echo -e "${COLOR_WHITE}Makefile exit status:$?${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}================= tree =================${COLOR_CLEAR}" - tree -d -L 3 ./public.new/ - - echo -e "${COLOR_WHITE}Webpage update ${COLOR_GREEN}SUCCEEDED${COLOR_CLEAR}" - echo -e "${COLOR_WHITE}============ update_webpage ============${COLOR_CLEAR}" -} - -# Example usage -# mkdir -p coverage_dashboard verification_dashboard && touch coverage_dashboard/index.html verification_dashboard/index.html -# LOC_GITHUB_REF_NAME=mczyz/test -# LOC_GITHUB_EVENT_NAME=push -# PR_NUMBER=81 -# update_webpage ${LOC_GITHUB_REF_NAME} ${LOC_GITHUB_EVENT_NAME} ${PR_NUMBER} - -check_args_count $# 3 -update_webpage "$@" From 6a815a530f076349866ac7b6baef7f644f0e4469 Mon Sep 17 00:00:00 2001 From: Aleksander Kiryk Date: Thu, 12 Sep 2024 19:27:45 +0200 Subject: [PATCH 4/7] Upload documentation and add links --- .github/workflows/gh-pages-pr-remove.yml | 2 ++ .github/workflows/publish-webpage.yml | 4 ++++ tools/SiteSpawner/template/webpage/dev.md | 2 +- tools/SiteSpawner/template/webpage/main.md | 2 +- 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/gh-pages-pr-remove.yml b/.github/workflows/gh-pages-pr-remove.yml index e6bbe3874fc..f2ede98f8a8 100644 --- a/.github/workflows/gh-pages-pr-remove.yml +++ b/.github/workflows/gh-pages-pr-remove.yml @@ -53,6 +53,8 @@ jobs: run: | rm -rf ${{ env.ROOT_DIR }}/html/dev/${{ steps.PR.outputs.number }} rm -rf ${{ env.ROOT_DIR }}/doctrees/dev/${{ steps.PR.outputs.number }} + mkdir -p public.new/html/dev/${{ steps.PR.outputs.number }}/docs_rendered + cp -arf ./docs_rendered/* public.new/html/dev/${{ steps.PR.outputs.number }}/docs_rendered sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number ${{ steps.PR.outputs.number }} - name: Add redirect index page diff --git a/.github/workflows/publish-webpage.yml b/.github/workflows/publish-webpage.yml index 5b5530be91a..b9dee36d848 100644 --- a/.github/workflows/publish-webpage.yml +++ b/.github/workflows/publish-webpage.yml @@ -70,11 +70,15 @@ jobs: - name: Update webpage if: github.event_name != 'pull_request' run: | + mkdir -p public.new/html/main/docs_rendered + cp -arf ./docs_rendered/* public.new/html/main/docs_rendered sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number 0 - name: Update webpage PR if: github.event_name == 'pull_request' run: | + mkdir -p public.new/html/dev/${{ github.event.number }}/docs_rendered + cp -arf ./docs_rendered/* public.new/html/dev/${{ github.event.number }}/docs_rendered sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number ${{ github.event.number }} - name: Add redirect index page diff --git a/tools/SiteSpawner/template/webpage/dev.md b/tools/SiteSpawner/template/webpage/dev.md index 9ae49bf93bd..d5810fb2891 100644 --- a/tools/SiteSpawner/template/webpage/dev.md +++ b/tools/SiteSpawner/template/webpage/dev.md @@ -3,5 +3,5 @@ {%- for branch in branches %} * {{ branch }} * [Coverage]({{ branch }}_coverage_dashboard) - + * [Documentation](external:dev/{{ branch }}/docs_rendered/html/index.html) {%- endfor %} diff --git a/tools/SiteSpawner/template/webpage/main.md b/tools/SiteSpawner/template/webpage/main.md index 3697a640c7e..c63b616ce28 100644 --- a/tools/SiteSpawner/template/webpage/main.md +++ b/tools/SiteSpawner/template/webpage/main.md @@ -1,4 +1,4 @@ # Main branch * [Coverage](main_coverage_dashboard) - \ No newline at end of file + * [Documentation](external:main/docs_rendered/html/index.html) From a01e747c536359b89a584c10892a84ed0479e5be Mon Sep 17 00:00:00 2001 From: Aleksander Kiryk Date: Fri, 13 Sep 2024 13:38:24 +0200 Subject: [PATCH 5/7] Bump SiteSpawner --- tools/SiteSpawner/src/sitespawner/__init__.py | 43 ++++++++++++++++- .../src/sitespawner/gen_coverage_report.py | 10 +++- tools/SiteSpawner/src/sitespawner/generate.py | 21 +++++---- tools/SiteSpawner/src/sitespawner/genhtml.py | 46 +++++++++++-------- .../src/sitespawner/update_webpage.py | 26 +++++++++-- .../coverage_report/coverage_report.html | 2 +- .../template/coverage_report/src_view.html | 2 +- tools/SiteSpawner/template/webpage/conf.py | 9 +++- tools/SiteSpawner/template/webpage/dev.md | 5 +- tools/SiteSpawner/template/webpage/main.md | 7 ++- 10 files changed, 130 insertions(+), 41 deletions(-) diff --git a/tools/SiteSpawner/src/sitespawner/__init__.py b/tools/SiteSpawner/src/sitespawner/__init__.py index d198008ff6f..4c35d47b35c 100644 --- a/tools/SiteSpawner/src/sitespawner/__init__.py +++ b/tools/SiteSpawner/src/sitespawner/__init__.py @@ -56,7 +56,12 @@ def reports_handler(args): def webpage_handler(args): update_webpage( - args.loc_github_ref_name, args.loc_github_event_name, args.pr_number, args.page_url + args.loc_github_ref_name, + args.loc_github_event_name, + args.pr_number, + args.doc_project_name, + args.include_documentation, + args.page_url, ) @@ -203,6 +208,15 @@ def setup_parser(): ), }, } + src_project_name = { + "name": "--src-project-name", + "options": { + "metavar": "src_project_name", + "type": str, + "default": "Project", + "help": ("Name of the project that is displayed in the webpage for sources."), + }, + } reports_args = [ logo_src, logo_href, @@ -211,6 +225,7 @@ def setup_parser(): src_path, info_report_dir, src_remove_pattern, + src_project_name, ] create_subparser( subparsers=subparsers, @@ -256,7 +271,31 @@ def setup_parser(): "help": "Base URL of the website. Otherwise, will apply relative reference for redirect.", }, } - webpage_args = [ref_name, event_name, pr_number, page_url] + doc_project_name = { + "name": "--doc-project-name", + "options": { + "metavar": "doc_project_name", + "type": str, + "default": "Project", + "help": ("Name of the project used in documentation."), + }, + } + include_documentation = { + "name": "--include-documentation", + "options": { + "action": "store_true", + "dest": "include_documentation", + "help": "Whethet to include documentation in the built webpage", + }, + } + webpage_args = [ + ref_name, + event_name, + pr_number, + page_url, + doc_project_name, + include_documentation, + ] create_subparser( subparsers=subparsers, name="webpage", diff --git a/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py b/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py index 4f39537e8e3..5fb088a8924 100644 --- a/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py +++ b/tools/SiteSpawner/src/sitespawner/gen_coverage_report.py @@ -50,7 +50,7 @@ def lcov_genhtml( stdout=obtain_stdout(log_output_path), ) else: - command += ["--prefix", str(path_prefix)] + command += ["--prefix", str(Path(path_prefix).resolve())] subprocess.run( command, stdout=obtain_stdout(log_output_path), @@ -66,6 +66,7 @@ def generate_coverage_reports( logo_src=None, logo_href=None, info_report_dir=None, + project_name="Project", info_pattern="coverage*.info", ): """Iterates over available *.info files, merges them & generates summaries @@ -87,7 +88,7 @@ def generate_coverage_reports( logger.debug(f"Preprocessing {info_file}") lcov_extract_command = ["lcov", "--extract", info_file, src_pattern, "-o", info_file] - data, _ = parse_infos([str(info_file)]) + data = parse_infos([str(info_file)]) if len(data.keys()) == 0: logger.warning(f"No data found in .info file: {info_file}") continue @@ -166,6 +167,8 @@ def generate_coverage_reports( genhtml( input_files=input_files, output_dir=test_output_dir, + src_path=src_path, + project_name=project_name, test_name=test_name, logo_src=logo_src, logo_href=logo_href, @@ -204,6 +207,8 @@ def generate_coverage_reports( genhtml( input_files=merged_input_files, output_dir=final_output_dir, + src_path=src_path, + project_name=project_name, test_name="all", logo_src=logo_src, logo_href=logo_href, @@ -232,4 +237,5 @@ def main(args): logo_src=args.logo_src, logo_href=args.logo_href, info_report_dir=args.info_report_dir, + project_name=args.src_project_name, ) diff --git a/tools/SiteSpawner/src/sitespawner/generate.py b/tools/SiteSpawner/src/sitespawner/generate.py index 438f59ccef6..d7e927d4f4a 100755 --- a/tools/SiteSpawner/src/sitespawner/generate.py +++ b/tools/SiteSpawner/src/sitespawner/generate.py @@ -18,7 +18,7 @@ def render_template(src, dst, **kwargs): @args_on_debug_logger(logger) -def make_coverage_report_index(branch, root, output, templates): +def make_coverage_report_index(branch, root, output, templates, include_documentation): """Prepares coverage report index page.""" # Coverage types individual dashboards accumulate # Coverage dashboard displays coverage types side-by-side @@ -69,20 +69,22 @@ def make_coverage_report_index(branch, root, output, templates): @args_on_debug_logger(logger) -def make_dev_index(branches, output, templates): +def make_dev_index(branches, output, templates, include_documentation): """Prepares the branch/pr index page.""" - params = {"branches": branches} + params = {"branches": branches, "include_documentation": include_documentation} render_template(templates / "dev.md", output / "dev.md", **params) -def generate(template, root, output): +def generate(template, root, output, include_documentation): """Processes webpage *.md templates.""" template = Path(template) root = Path(root) output = Path(output) # Reports for the main branch - make_coverage_report_index("main", root / "main", output / "main", template) + make_coverage_report_index( + "main", root / "main", output / "main", template, include_documentation + ) # Reports for development branches / pull requests branches = [] @@ -97,13 +99,16 @@ def generate(template, root, output): fname = filepath.name branches.append(fname) make_coverage_report_index( - fname, root / "dev" / fname, output / "dev" / fname, template + fname, root / "dev" / fname, output / "dev" / fname, template, include_documentation ) # Prepare the branch/pr index page - make_dev_index(branches, output, template) + make_dev_index(branches, output, template, include_documentation) + render_template( + template / "main.md", output / "main.md", **{"include_documentation": include_documentation} + ) # Copy other files/pages - files = ["conf.py", "main.md", "index.md"] + files = ["conf.py", "index.md"] for file in files: copy(template / file, output / file) diff --git a/tools/SiteSpawner/src/sitespawner/genhtml.py b/tools/SiteSpawner/src/sitespawner/genhtml.py index 4be90dd7732..97f841bfa59 100644 --- a/tools/SiteSpawner/src/sitespawner/genhtml.py +++ b/tools/SiteSpawner/src/sitespawner/genhtml.py @@ -46,7 +46,6 @@ def parse_infos(input_files: List[str]): raise FileNotFoundError(f"Input file '{file}' does not exist.") data = defaultdict(defaultdict) - code_root_path = None for i in input_files: lines_found_sum = 0 @@ -73,7 +72,7 @@ def parse_infos(input_files: List[str]): lines_hit = None data["Total:"][module_name] = [lines_hit_sum, lines_found_sum] - return data, code_root_path + return data # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # @@ -136,6 +135,7 @@ def render_page( logo_src, logo_href, template_env, + project_name, links=True, ): """Combines the final report page.""" @@ -143,6 +143,7 @@ def render_page( output = report_html.render( header_token="Full", + project_name=project_name, logo_src=logo_src, logo_href=logo_href, fulltable_token=generate_table(data, template_env, links), @@ -171,21 +172,17 @@ def sub_src_view( html_src_dir, logo_src, logo_href, + project_name, template_env, ): """Generate page for the source file based on the view generated by lcov's genhtml.""" file = Path(file).resolve() - html_name = f"{file.name}.gcov.html" - if file.is_absolute(): - cmn_path = commonpath([file, src_prefix]) - inner_path = file.relative_to(cmn_path).parent - else: - inner_path = file.parent + inner_path = file.relative_to(src_prefix).parent + html_name = f"{file.name}.gcov.html" main_table = None - - src_html_path = Path(html_src_dir) / Path(inner_path) / html_name + src_html_path = Path(html_src_dir) / inner_path / html_name if not src_html_path.exists(): logger.warning(f"Not found: {src_html_path}") @@ -202,6 +199,7 @@ def sub_src_view( output = report_html.render( header_token="Full", + project_name=project_name, logo_src=logo_src, logo_href=logo_href, root_name=root_name, @@ -278,17 +276,26 @@ def unify_dict(data): @main_func_log(logger, "Generate HTML Coverage Report") @args_on_debug_logger(logger) -def genhtml(input_files, output_dir, test_name, html_src_dir, logo_src=None, logo_href=None): +def genhtml( + input_files, + src_path, + output_dir, + test_name, + html_src_dir, + project_name="Project", + logo_src=None, + logo_href=None, +): """Generates coverage dashboard from *.info files.""" if not Path(output_dir).is_dir(): raise FileNotFoundError(f"Output directory '{output_dir}' does not exist.") - data, code_root_path = parse_infos(input_files) + data = parse_infos(input_files) # The LCOV must be ran with '--list-full-path' so that the paths to sources # are not 'simplified' with '...'. - code_root_path = get_common_src_path(data.keys()).parent + code_root_path = Path(src_path).resolve().parent data = unify_dict(data) tld = generate_dir_dict(data, code_root_path) @@ -306,13 +313,14 @@ def genhtml(input_files, output_dir, test_name, html_src_dir, logo_src=None, log data=data[file], file=file, test_name=test_name, - root_name="caliptra-rtl", + root_name=code_root_path.name, path_segments=segments, - src_prefix=code_root_path, + src_prefix=Path(src_path).resolve(), out_dir=f"{output_dir}/index_{Path(file).name}.html", html_src_dir=html_src_dir, logo_src=logo_src, logo_href=logo_href, + project_name=project_name, template_env=template_env, ) @@ -322,12 +330,13 @@ def genhtml(input_files, output_dir, test_name, html_src_dir, logo_src=None, log subdata = generate_file_dict(data, Path(key), code_root_path) render_page( data=subdata, - root_name="caliptra-rtl", + root_name=code_root_path.name, path_segments=key.split("/"), out_dir=f"{output_dir}/index_{key.replace('/','_')}.html", test_name=test_name, logo_src=logo_src, logo_href=logo_href, + project_name=project_name, template_env=template_env, ) @@ -345,14 +354,13 @@ def genhtml(input_files, output_dir, test_name, html_src_dir, logo_src=None, log cov_data[test_type] = [0, 0] render_page( data=tld, - root_name="caliptra-rtl", + root_name=code_root_path.name, path_segments=["src"], out_dir=f"{output_dir}/index.html", test_name=test_name, logo_src=logo_src, logo_href=logo_href, template_env=template_env, + project_name=project_name, links=True, ) - - return code_root_path diff --git a/tools/SiteSpawner/src/sitespawner/update_webpage.py b/tools/SiteSpawner/src/sitespawner/update_webpage.py index c30d82f358b..fcb81cb975d 100644 --- a/tools/SiteSpawner/src/sitespawner/update_webpage.py +++ b/tools/SiteSpawner/src/sitespawner/update_webpage.py @@ -39,7 +39,14 @@ def replace_dir(src_dir, dst_dir): @main_func_log(logger, "Update webpage") @args_on_debug_logger(logger) -def update_webpage(loc_github_ref_name, loc_github_event_name, pr_number, page_url=None): +def update_webpage( + loc_github_ref_name, + loc_github_event_name, + pr_number, + project_name, + include_documentation, + page_url=None, +): """Updates the public part of the gh-pages based on git refs, github events, and PR numbers.""" # Determine the directory based on the GitHub ref and event if loc_github_ref_name == "main": @@ -78,14 +85,27 @@ def update_webpage(loc_github_ref_name, loc_github_event_name, pr_number, page_u dst_file = dst_dir / fname copy2(src_file, dst_file) - generate(webpage_template_dir, str(legacy_page_dir / "html"), str(md_source_dir)) + generate( + webpage_template_dir, + str(legacy_page_dir / "html"), + str(md_source_dir), + include_documentation=include_documentation, + ) SPHINXBUILD = os.getenv("SPHINXBUILD", "sphinx-build") SPHINXOPTS = os.getenv("SPHINXOPTS") logger.info("Building the HTML documentation using Sphinx...") - cmd = [SPHINXBUILD, "-M", "html", str(md_source_dir), str(new_page_dir)] + cmd = [ + SPHINXBUILD, + "-M", + "html", + str(md_source_dir), + str(new_page_dir), + "-D", + f"project={project_name}", + ] if SPHINXOPTS: cmd.append(SPHINXOPTS) diff --git a/tools/SiteSpawner/template/coverage_report/coverage_report.html b/tools/SiteSpawner/template/coverage_report/coverage_report.html index 246f1e803c1..d103e88f482 100644 --- a/tools/SiteSpawner/template/coverage_report/coverage_report.html +++ b/tools/SiteSpawner/template/coverage_report/coverage_report.html @@ -28,7 +28,7 @@ - Caliptra RTL + {{ project_name }} {{ header_token }} coverage report diff --git a/tools/SiteSpawner/template/coverage_report/src_view.html b/tools/SiteSpawner/template/coverage_report/src_view.html index fe29e51e423..d565fdae9ae 100644 --- a/tools/SiteSpawner/template/coverage_report/src_view.html +++ b/tools/SiteSpawner/template/coverage_report/src_view.html @@ -28,7 +28,7 @@ - Caliptra RTL + {{ project_name }} {{ header_token }} coverage report diff --git a/tools/SiteSpawner/template/webpage/conf.py b/tools/SiteSpawner/template/webpage/conf.py index d755c9d06ab..16acb5369b5 100644 --- a/tools/SiteSpawner/template/webpage/conf.py +++ b/tools/SiteSpawner/template/webpage/conf.py @@ -45,7 +45,7 @@ extensions = default_extensions myst_enable_extensions = default_myst_enable_extensions -myst_substitutions = {"project": project} +myst_substitutions = {} myst_url_schemes = { "http": None, @@ -80,9 +80,14 @@ # html_theme_options = { # "palette": [] # } - html_title = project def setup(app): + project_name = app.config.project + project_words = project_name.split() + + app.config.basic_filename = f"{'-'.join(project_words)}-coverage-reports" + app.config.html_title = project_name + myst_substitutions["project"] = project_name app.add_css_file("main.css") diff --git a/tools/SiteSpawner/template/webpage/dev.md b/tools/SiteSpawner/template/webpage/dev.md index d5810fb2891..f06443b8487 100644 --- a/tools/SiteSpawner/template/webpage/dev.md +++ b/tools/SiteSpawner/template/webpage/dev.md @@ -3,5 +3,8 @@ {%- for branch in branches %} * {{ branch }} * [Coverage]({{ branch }}_coverage_dashboard) - * [Documentation](external:dev/{{ branch }}/docs_rendered/html/index.html) + + {% if include_documentation %} + * [Documentation](external:main/docs_rendered/html/index.html) + {% endif %} {%- endfor %} diff --git a/tools/SiteSpawner/template/webpage/main.md b/tools/SiteSpawner/template/webpage/main.md index c63b616ce28..b2ded03cabd 100644 --- a/tools/SiteSpawner/template/webpage/main.md +++ b/tools/SiteSpawner/template/webpage/main.md @@ -1,4 +1,7 @@ # Main branch - * [Coverage](main_coverage_dashboard) - * [Documentation](external:main/docs_rendered/html/index.html) +* [Coverage](main_coverage_dashboard) + +{% if include_documentation %} +* [Documentation](external:main/docs_rendered/html/index.html) +{% endif %} From 8e86fccc2dae26ddcd59a910b26141bf365b61c7 Mon Sep 17 00:00:00 2001 From: Aleksander Kiryk Date: Fri, 13 Sep 2024 13:49:03 +0200 Subject: [PATCH 6/7] Pass project name to doc generator --- .github/workflows/gh-pages-pr-remove.yml | 6 +++++- .github/workflows/publish-webpage.yml | 12 ++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/gh-pages-pr-remove.yml b/.github/workflows/gh-pages-pr-remove.yml index f2ede98f8a8..2323b560d49 100644 --- a/.github/workflows/gh-pages-pr-remove.yml +++ b/.github/workflows/gh-pages-pr-remove.yml @@ -55,7 +55,11 @@ jobs: rm -rf ${{ env.ROOT_DIR }}/doctrees/dev/${{ steps.PR.outputs.number }} mkdir -p public.new/html/dev/${{ steps.PR.outputs.number }}/docs_rendered cp -arf ./docs_rendered/* public.new/html/dev/${{ steps.PR.outputs.number }}/docs_rendered - sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number ${{ steps.PR.outputs.number }} + sis -d webpage \ + --doc-project-name "Cores VeeR EL2" \ + --loc-github-ref-name ${{ github.ref_name }} \ + --loc-github-event-name ${{ github.event_name }} \ + --pr-number ${{ steps.PR.outputs.number }} - name: Add redirect index page run: | diff --git a/.github/workflows/publish-webpage.yml b/.github/workflows/publish-webpage.yml index b9dee36d848..d70c054049a 100644 --- a/.github/workflows/publish-webpage.yml +++ b/.github/workflows/publish-webpage.yml @@ -72,14 +72,22 @@ jobs: run: | mkdir -p public.new/html/main/docs_rendered cp -arf ./docs_rendered/* public.new/html/main/docs_rendered - sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number 0 + sis -d webpage \ + --doc-project-name "Cores VeeR EL2" \ + --loc-github-ref-name ${{ github.ref_name }} \ + --loc-github-event-name ${{ github.event_name }} \ + --pr-number 0 - name: Update webpage PR if: github.event_name == 'pull_request' run: | mkdir -p public.new/html/dev/${{ github.event.number }}/docs_rendered cp -arf ./docs_rendered/* public.new/html/dev/${{ github.event.number }}/docs_rendered - sis -d webpage --loc-github-ref-name ${{ github.ref_name }} --loc-github-event-name ${{ github.event_name }} --pr-number ${{ github.event.number }} + sis -d webpage \ + --doc-project-name "Cores VeeR EL2" \ + --loc-github-ref-name ${{ github.ref_name }} \ + --loc-github-event-name ${{ github.event_name }} \ + --pr-number ${{ github.event.number }} - name: Add redirect index page run: | From 067cc0926678a2de1f60a0aff3982b255dd52e5e Mon Sep 17 00:00:00 2001 From: Aleksander Kiryk Date: Tue, 17 Sep 2024 14:27:31 +0200 Subject: [PATCH 7/7] Include documentation --- .github/workflows/gh-pages-pr-remove.yml | 1 + .github/workflows/publish-webpage.yml | 2 ++ 2 files changed, 3 insertions(+) diff --git a/.github/workflows/gh-pages-pr-remove.yml b/.github/workflows/gh-pages-pr-remove.yml index 2323b560d49..42cb07dc866 100644 --- a/.github/workflows/gh-pages-pr-remove.yml +++ b/.github/workflows/gh-pages-pr-remove.yml @@ -56,6 +56,7 @@ jobs: mkdir -p public.new/html/dev/${{ steps.PR.outputs.number }}/docs_rendered cp -arf ./docs_rendered/* public.new/html/dev/${{ steps.PR.outputs.number }}/docs_rendered sis -d webpage \ + --include-documentation \ --doc-project-name "Cores VeeR EL2" \ --loc-github-ref-name ${{ github.ref_name }} \ --loc-github-event-name ${{ github.event_name }} \ diff --git a/.github/workflows/publish-webpage.yml b/.github/workflows/publish-webpage.yml index d70c054049a..9b5d2e51206 100644 --- a/.github/workflows/publish-webpage.yml +++ b/.github/workflows/publish-webpage.yml @@ -73,6 +73,7 @@ jobs: mkdir -p public.new/html/main/docs_rendered cp -arf ./docs_rendered/* public.new/html/main/docs_rendered sis -d webpage \ + --include-documentation \ --doc-project-name "Cores VeeR EL2" \ --loc-github-ref-name ${{ github.ref_name }} \ --loc-github-event-name ${{ github.event_name }} \ @@ -84,6 +85,7 @@ jobs: mkdir -p public.new/html/dev/${{ github.event.number }}/docs_rendered cp -arf ./docs_rendered/* public.new/html/dev/${{ github.event.number }}/docs_rendered sis -d webpage \ + --include-documentation \ --doc-project-name "Cores VeeR EL2" \ --loc-github-ref-name ${{ github.ref_name }} \ --loc-github-event-name ${{ github.event_name }} \