Skip to content

Commit

Permalink
Counting the number of unique applications, tutorials, ... (#702)
Browse files Browse the repository at this point in the history
* Counting the number of unique applications, tutorials, ...

* Fix lint

Signed-off-by: Julien Jomier <[email protected]>

* Update README to link to searchable HoloHub component catalog instead of badges

Signed-off-by: Julien Jomier <[email protected]>

---------

Signed-off-by: Julien Jomier <[email protected]>
  • Loading branch information
jjomier authored Feb 24, 2025
1 parent 9f67b0f commit 6e4e377
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 22 deletions.
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@
![Metadata](https://img.shields.io/github/actions/workflow/status/nvidia-holoscan/holohub/check_metadata.yml?branch=main&label=Metadata
)

[![Applications](https://img.shields.io/badge/Applications-91-59A700)](https://github.com/nvidia-holoscan/holohub/tree/main/applications)
[![Operators](https://img.shields.io/badge/Operators-42-59A700)](https://github.com/nvidia-holoscan/holohub/tree/main/operators)
[![Tutorials](https://img.shields.io/badge/Tutorials-12-59A700)](https://github.com/nvidia-holoscan/holohub/tree/main/tutorials)
Visit [https://nvidia-holoscan.github.io/holohub](https://nvidia-holoscan.github.io/holohub) for a searchable catalog of all available components.

This is a central repository for the NVIDIA Holoscan AI sensor processing community to share reference applications, operators, tutorials and benchmarks. We invite users and developers of the Holoscan platform to reuse and contribute to this repository.

Expand Down
40 changes: 21 additions & 19 deletions doc/website/scripts/generate_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,31 +74,34 @@ def parse_metadata_file(metadata_file: Path, statistics) -> None:
# Read the file
# Parse the JSON data
with open(metadata_file, "r") as metadatafile:
print(metadatafile)
metadata = json.load(metadatafile)
key = list(metadata.keys())[0]
dest_dir = str(key) + "s"

# Extract the application name
# Extract the application name, removing cpp/python from the path for counting
if dest_dir == "applications":
path = re.sub(r".*/applications/", "", str(metadata_file)).removesuffix("/metadata.json")
statistics["applications"] += 1
base_path = re.sub(r"/(cpp|python)$", "", path)
statistics["unique_applications"].add(base_path)
elif dest_dir == "workflows":
path = re.sub(r".*/workflows/", "", str(metadata_file)).removesuffix("/metadata.json")
statistics["workflows"] += 1
# For operators we put a flat list
base_path = re.sub(r"/(cpp|python)$", "", path)
statistics["unique_workflows"].add(base_path)
elif dest_dir == "operators":
path = str(metadata_file).removesuffix("/metadata.json")
path = path.split("/")[-1]
statistics["operators"] += 1
base_path = re.sub(r"/(cpp|python)$", "", path)
statistics["unique_operators"].add(base_path)
elif dest_dir == "tutorials":
path = str(metadata_file).removesuffix("/metadata.json")
path = path.split("/")[-1]
statistics["tutorials"] += 1
base_path = re.sub(r"/(cpp|python)$", "", path)
statistics["unique_tutorials"].add(base_path)
elif dest_dir == "benchmarks":
path = str(metadata_file).removesuffix("/metadata.json")
path = path.split("/")[-1]
statistics["benchmarks"] += 1
base_path = re.sub(r"/(cpp|python)$", "", path)
statistics["unique_benchmarks"].add(base_path)
else:
logger.error(f"Don't know the output path for: {dest_dir}")
return
Expand Down Expand Up @@ -178,7 +181,6 @@ def parse_metadata_file(metadata_file: Path, statistics) -> None:
imgmatch = match[1:]
if imgmatch.startswith("/"):
imgmatch = imgmatch[1:]
logger.info(f"TEST: {match} = {imgmatch}")
readme_text = readme_text.replace(
match,
"https://github.com/nvidia-holoscan/holohub/blob/main/"
Expand Down Expand Up @@ -261,11 +263,11 @@ def generate_pages() -> None:
root = Path(__file__).parent.parent.parent.parent

statistics = {
"operators": 0,
"tutorials": 0,
"applications": 0,
"workflows": 0,
"benchmarks": 0,
"unique_operators": set(),
"unique_tutorials": set(),
"unique_applications": set(),
"unique_workflows": set(),
"unique_benchmarks": set(),
}

logger.info(f"root: {root}")
Expand All @@ -279,11 +281,11 @@ def generate_pages() -> None:
homefile_path = str(Path(__file__).parent.parent) + "/docs/index.md"
with open(homefile_path, "r") as home_file:
home_text = home_file.read()
home_text = home_text.replace("#operators", str(statistics["operators"]))
home_text = home_text.replace("#tutorials", str(statistics["tutorials"]))
home_text = home_text.replace("#applications", str(statistics["applications"]))
home_text = home_text.replace("#workflows", str(statistics["workflows"]))
home_text = home_text.replace("#benchmarks", str(statistics["benchmarks"]))
home_text = home_text.replace("#operators", str(len(statistics["unique_operators"])))
home_text = home_text.replace("#tutorials", str(len(statistics["unique_tutorials"])))
home_text = home_text.replace("#applications", str(len(statistics["unique_applications"])))
home_text = home_text.replace("#workflows", str(len(statistics["unique_workflows"])))
home_text = home_text.replace("#benchmarks", str(len(statistics["unique_benchmarks"])))

with mkdocs_gen_files.open("index.md", "w") as fd:
fd.write(home_text)
Expand Down

0 comments on commit 6e4e377

Please sign in to comment.