From d867e38d6ef7dc59abe77f3cf3de8c62e217e8d5 Mon Sep 17 00:00:00 2001 From: "Gergely Daroczi (@daroczig)" Date: Sat, 24 Feb 2024 00:42:39 +0100 Subject: [PATCH] high-level project info for dbdocs.io --- .github/workflows/dbdocs.yaml | 2 ++ project.dbml | 11 +++++++++++ 2 files changed, 13 insertions(+) create mode 100644 project.dbml diff --git a/.github/workflows/dbdocs.yaml b/.github/workflows/dbdocs.yaml index d695805f..b224b2e4 100644 --- a/.github/workflows/dbdocs.yaml +++ b/.github/workflows/dbdocs.yaml @@ -21,6 +21,8 @@ jobs: run: sc-crawler schema mysql > schema.sql - name: Convert SQL schema to DBML run: sql2dbml --mysql schema.sql -o schema.dbml + - name: Add project description + run: cat project.dbml >> schema.dbml - name: Update dbdocs project env: DBDOCS_TOKEN: ${{ secrets.DBDOCS_TOKEN }} diff --git a/project.dbml b/project.dbml new file mode 100644 index 00000000..aa583ae8 --- /dev/null +++ b/project.dbml @@ -0,0 +1,11 @@ +Project DBML { + Note: ''' + # Spare Cores (SC) Crawler database schemas + + [Spare Cores](https://sparecores.com), a Python-based open-source ecosystem, provides a comprehensive and standardized inventory, along with performance evaluations of available compute resources across public cloud and server providers. The project providers different components to interact with the database schemas defined here: + + * Run the SC Crawler, to compile your own database: https://github.com/SpareCores/sc-crawler + * Use the SC Data package to easily grab a copy of the database: https://github.com/SpareCores/sc-data + * Fire up an API using the SC Keeper package on the top of the database: https://github.com/SpareCores/sc-keeper + ''' +}