diff --git a/.github/workflows/_docker-build.yml b/.github/workflows/_docker-build.yml index f57132259..cf3621274 100644 --- a/.github/workflows/_docker-build.yml +++ b/.github/workflows/_docker-build.yml @@ -57,4 +57,4 @@ jobs: GIT_COMMIT_SHA=${{ github.sha }} GIT_COMMIT_MESSAGE=${{ github.event.head_commit.message }} cache-from: type=gha - cache-to: type=gha,mode=max + cache-to: type=gha,mode=min diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 75c29856a..000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,221 +0,0 @@ -name: Main CI/CD - -on: - pull_request: - branches: [main] - types: [opened, reopened, synchronize] - push: - branches: [main] - workflow_dispatch: - -jobs: - # JOB to run change detection - changes: - runs-on: ubuntu-latest - # Set job outputs to values from filter step - outputs: - main-api: ${{ (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) || steps.filter.outputs.main-api }} - feedback: ${{ (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) || steps.filter.outputs.feedback }} - calendar: ${{ (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) || steps.filter.outputs.calendar }} - data: ${{ (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) || steps.filter.outputs.data }} - webclient: ${{ (github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'reopened')) || steps.filter.outputs.webclient }} - steps: - - uses: actions/checkout@v3 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: | - main-api: - - '.github/**' - - 'server/main-api/**' - - 'server/Cargo.*' - feedback: - - '.github/**' - - 'server/feedback/**' - - 'server/Cargo.*' - calendar: - - '.github/**' - - 'server/calendar/**' - - 'server/Cargo.*' - data: - - '.github/**' - - 'data/**' - webclient: - - '.github/**' - - 'webclient/**' - cargo-test: - runs-on: ubuntu-latest - steps: - - name: Setup | Checkout - uses: actions/checkout@v3 - - run: git config --global user.email "github-actions[bot]@users.noreply.github.com" - - run: git config --global user.name "github-actions[bot]" - - name: Setup | Rust - uses: ATiltedTree/setup-rust@v1 - with: - rust-version: stable - components: clippy - - name: Build | Lint - run: cargo clippy --manifest-path "server/Cargo.toml" --workspace - - name: Build | Test - run: cargo test --manifest-path "server/Cargo.toml" --workspace - feedback-build: - needs: - - cargo-test - - changes - if: ${{ needs.changes.outputs.feedback == 'true' }} - uses: ./.github/workflows/_docker-build.yml - with: - image_suffix: feedback - context: ./server - dockerfile: feedback/Dockerfile - permissions: - contents: read - packages: write - feedback-deployment: - uses: ./.github/workflows/_restart-argocd.yml - needs: - - feedback-build - with: - deployment: feedback - secrets: - ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} - calendar-build: - needs: - - cargo-test - - changes - if: ${{ needs.changes.outputs.calendar == 'true' }} - uses: ./.github/workflows/_docker-build.yml - with: - image_suffix: calendar - context: ./server - dockerfile: calendar/Dockerfile - permissions: - contents: read - packages: write - calendar-deployment: - uses: ./.github/workflows/_restart-argocd.yml - needs: - - calendar-build - with: - deployment: calendar - secrets: - ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} - main-api-build: - uses: ./.github/workflows/_docker-build.yml - needs: - - cargo-test - - changes - if: ${{ needs.changes.outputs.main-api == 'true' }} - with: - image_suffix: main-api - context: ./server - dockerfile: main-api/Dockerfile.server - permissions: - contents: read - packages: write - building-db-init-build: - uses: ./.github/workflows/_docker-build.yml - needs: - - cargo-test - - changes - if: ${{ needs.changes.outputs.main-api == 'true' }} - with: - image_suffix: building-db-init - context: ./server/main-api - dockerfile: Dockerfile.dbinit - permissions: - contents: read - packages: write - mieli-search-init-build: - uses: ./.github/workflows/_docker-build.yml - needs: - - cargo-test - - changes - if: ${{ needs.changes.outputs.main-api == 'true' }} - with: - image_suffix: mieli-search-init - context: ./server/main-api - dockerfile: Dockerfile.msinit - permissions: - contents: read - packages: write - server-deployment: - uses: ./.github/workflows/_restart-argocd.yml - needs: - - main-api-build - - building-db-init-build - - mieli-search-init-build - with: - deployment: server - secrets: - ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} - data-test: - needs: - - changes - if: ${{ needs.changes.outputs.data == 'true' }} - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9", "3.10", "3.11"] - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r data/requirements.txt - - name: Test with unittest - run: | - python -m unittest discover - data-build: - needs: - - changes - - data-test - uses: ./.github/workflows/_docker-build.yml - with: - image_suffix: data - context: ./data - dockerfile: Dockerfile - permissions: - contents: read - packages: write - data-deployment-1: - uses: ./.github/workflows/_restart-argocd.yml - needs: - - data-build - with: - deployment: data - secrets: - ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} - data-deployment-2: # we need to restart the server, as otherwise it will not serve the new data - uses: ./.github/workflows/_restart-argocd.yml - needs: - - data-deployment-1 - with: - deployment: server - secrets: - ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} - webclient-build: - uses: ./.github/workflows/_docker-build.yml - needs: - - changes - if: ${{ needs.changes.outputs.webclient == 'true' }} - with: - image_suffix: webclient - context: ./webclient - dockerfile: Dockerfile - permissions: - contents: read - packages: write - webclient-deployment: - uses: ./.github/workflows/_restart-argocd.yml - needs: - - webclient-build - with: - deployment: webclient - secrets: - ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} diff --git a/.github/workflows/data-cicd.yml b/.github/workflows/data-cicd.yml new file mode 100644 index 000000000..0aba40c07 --- /dev/null +++ b/.github/workflows/data-cicd.yml @@ -0,0 +1,71 @@ +name: Data CI/CD + +on: + pull_request: + branches: [main] + types: [opened, reopened, synchronize] + push: + branches: [main] + workflow_dispatch: + +jobs: + data-test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r data/requirements.txt + - name: Test with unittest + run: | + python -m unittest discover + #type-check: + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v3 + #- name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v4 + # with: + # python-version: "3.10" + # - name: Install dependencies + # run: | + # python -m pip install --upgrade pip + # pip install -r data/requirements.txt -r requirements_dev.txt -r server/main-api/test/requirements.txt + # - name: Run mypy + # run: | + # mypy --strict data + data-build: + needs: + - data-test + uses: ./.github/workflows/_docker-build.yml + with: + image_suffix: data + context: ./data + dockerfile: Dockerfile + permissions: + contents: read + packages: write + data-deployment-1: + uses: ./.github/workflows/_restart-argocd.yml + needs: + - data-build + with: + deployment: data + secrets: + ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} + data-deployment-2: # we need to restart the server, as otherwise it will not serve the new data + uses: ./.github/workflows/_restart-argocd.yml + needs: + - data-deployment-1 + with: + deployment: server + secrets: + ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 000000000..05a86e9db --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,35 @@ +name: Linting + +on: + pull_request: + branches: [main] + types: [opened, reopened, synchronize] + push: + branches: [main] + workflow_dispatch: + +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + cache: 'npm' + cache-dependency-path: 'webclient/package.json' + - name: Install node dependencies + run: | + npm install -g openapi-format + npm install --prefix webclient + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: 'pip' + cache-dependency-path: 'requirements*.txt' + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install -r data/requirements.txt -r requirements_dev.txt -r server/main-api/test/requirements.txt + - name: Run pre-commit + uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/server-cicd.yml b/.github/workflows/server-cicd.yml new file mode 100644 index 000000000..e1caee3ee --- /dev/null +++ b/.github/workflows/server-cicd.yml @@ -0,0 +1,136 @@ +name: Server CI/CD + +on: + pull_request: + branches: [main] + types: [opened, reopened, synchronize] + push: + branches: [main] + workflow_dispatch: + +jobs: + tests: + runs-on: ubuntu-latest + steps: + - name: Setup | Checkout + uses: actions/checkout@v3 + - name: Setup | Rust + uses: actions-rust-lang/setup-rust-toolchain@v1.5.0 + with: + toolchain: stable + components: clippy + cache: false + - name: Setup | Rust-Cache + uses: Swatinem/rust-cache@v2 + with: + workspaces: | + server + - name: Setup | Setup Git to run feedback unit-tests + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + - run: cargo test --workspace + working-directory: server + linting: + runs-on: ubuntu-latest + steps: + - name: Setup | Checkout + uses: actions/checkout@v3 + - name: Setup | Rust + uses: actions-rust-lang/setup-rust-toolchain@v1.5.0 + with: + toolchain: stable + cache: false + - name: Setup | Rust-Cache + uses: Swatinem/rust-cache@v2 + with: + workspaces: | + server + - run: cargo clippy --workspace + working-directory: server + feedback-build: + needs: + - tests + - linting + uses: ./.github/workflows/_docker-build.yml + with: + image_suffix: feedback + context: ./server + dockerfile: feedback/Dockerfile + permissions: + contents: read + packages: write + feedback-deployment: + uses: ./.github/workflows/_restart-argocd.yml + needs: + - feedback-build + with: + deployment: feedback + secrets: + ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} + calendar-build: + needs: + - tests + - linting + uses: ./.github/workflows/_docker-build.yml + with: + image_suffix: calendar + context: ./server + dockerfile: calendar/Dockerfile + permissions: + contents: read + packages: write + calendar-deployment: + uses: ./.github/workflows/_restart-argocd.yml + needs: + - calendar-build + with: + deployment: calendar + secrets: + ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} + main-api-build: + uses: ./.github/workflows/_docker-build.yml + needs: + - tests + - linting + with: + image_suffix: main-api + context: ./server + dockerfile: main-api/Dockerfile.server + permissions: + contents: read + packages: write + building-db-init-build: + uses: ./.github/workflows/_docker-build.yml + needs: + - tests + - linting + with: + image_suffix: building-db-init + context: ./server/main-api + dockerfile: Dockerfile.dbinit + permissions: + contents: read + packages: write + mieli-search-init-build: + uses: ./.github/workflows/_docker-build.yml + needs: + - tests + - linting + with: + image_suffix: mieli-search-init + context: ./server/main-api + dockerfile: Dockerfile.msinit + permissions: + contents: read + packages: write + server-deployment: + uses: ./.github/workflows/_restart-argocd.yml + needs: + - main-api-build + - building-db-init-build + - mieli-search-init-build + with: + deployment: server + secrets: + ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} diff --git a/.github/workflows/webclient-cicd.yml b/.github/workflows/webclient-cicd.yml new file mode 100644 index 000000000..0f6b6bead --- /dev/null +++ b/.github/workflows/webclient-cicd.yml @@ -0,0 +1,28 @@ +name: Webclient CI/CD + +on: + pull_request: + branches: [main] + types: [opened, reopened, synchronize] + push: + branches: [main] + workflow_dispatch: + +jobs: + webclient-build: + uses: ./.github/workflows/_docker-build.yml + with: + image_suffix: webclient + context: ./webclient + dockerfile: Dockerfile + permissions: + contents: read + packages: write + webclient-deployment: + uses: ./.github/workflows/_restart-argocd.yml + needs: + - webclient-build + with: + deployment: webclient + secrets: + ARGOCD_TOKEN: ${{ secrets.ARGOCD_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 00c858d8a..8d83c46a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: hooks: - id: cargo-fmt name: cargo fmt - entry: cargo fmt --manifest-path "server/Cargo.toml" --workspace + entry: cargo fmt --all --manifest-path "server/Cargo.toml" language: system pass_filenames: false # only webclient @@ -60,13 +60,6 @@ repos: hooks: - id: python-check-blanket-noqa - id: python-use-type-annotations - - repo: local - hooks: - - id: mypy - name: mypy - entry: mypy --ignore-missing-imports - language: system - types: [python] - repo: https://github.com/PyCQA/flake8 rev: 6.0.0 hooks: diff --git a/.sourcery.yaml b/.sourcery.yaml index a415ae804..da2e9f588 100644 --- a/.sourcery.yaml +++ b/.sourcery.yaml @@ -29,7 +29,7 @@ rule_settings: - refactoring - suggestion - comment - python_version: '3.9' # A string specifying the lowest Python version your project supports. Sourcery will not suggest refactorings requiring a higher Python version. + python_version: '3.10' # A string specifying the lowest Python version your project supports. Sourcery will not suggest refactorings requiring a higher Python version. # rules: # A list of custom rules Sourcery will include in its analysis. # - id: no-print-statements diff --git a/data/README.md b/data/README.md index 51e0bc1ec..fc826ac78 100644 --- a/data/README.md +++ b/data/README.md @@ -21,7 +21,7 @@ Please follow the [system dependencys docs](/resources/documentation/Dependencys ### Dependencies -Since data needs some python dependencys, you will need to install them first. +Since data needs some python dependency's, you will need to install them first. We recommend doing this in a virtual environment. From the root of the project, run: diff --git a/data/external/models/common.py b/data/external/models/common.py index 288fc59ad..42b35ea85 100644 --- a/data/external/models/common.py +++ b/data/external/models/common.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pydantic from pydantic.dataclasses import dataclass @@ -10,6 +12,9 @@ ) +RESULTS = Path(__file__).parent.parent / "results" + + @dataclass(config=PydanticConfiguration) class TranslatableStr: # pylint: disable-next=invalid-name diff --git a/data/external/models/nat.py b/data/external/models/nat.py index 9653ad536..7546e9d37 100644 --- a/data/external/models/nat.py +++ b/data/external/models/nat.py @@ -1,4 +1,6 @@ -from external.models.common import PydanticConfiguration, TranslatableStr +import json + +from external.models.common import PydanticConfiguration, RESULTS, TranslatableStr from pydantic.dataclasses import dataclass @@ -10,12 +12,24 @@ class Building: building_short: str | None address: str | None + @classmethod + def load_all(cls) -> list["Building"]: + """Load all nat.Building's""" + with open(RESULTS / "buildings_nat.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] + @dataclass(config=PydanticConfiguration) class Campus: campus: TranslatableStr campusshort: TranslatableStr + @classmethod + def load_all(cls) -> dict[str, "Campus"]: + """Load all nat.Campus's""" + with open(RESULTS / "campus_nat.json", encoding="utf-8") as file: + return {key: cls(**item) for key, item in json.load(file).items()} + @dataclass(config=PydanticConfiguration) class Coordinate: @@ -73,6 +87,12 @@ class Room: building_code: str org_id: int | None = None + @classmethod + def load_all(cls) -> dict[str, "Room"]: + """Load all nat.Room's""" + with open(RESULTS / "rooms_nat.json", encoding="utf-8") as file: + return {key: cls(**item) for key, item in json.load(file).items()} + @dataclass(config=PydanticConfiguration) class Organisation: @@ -80,3 +100,9 @@ class Organisation: org_name: TranslatableStr org_type: str org_url: str | None + + @classmethod + def load_all(cls) -> dict[str, "Organisation"]: + """Load all nat.Organisation's""" + with open(RESULTS / "orgs_nat.json", encoding="utf-8") as file: + return {key: cls(**item) for key, item in json.load(file).items()} diff --git a/data/external/models/public_transport.py b/data/external/models/public_transport.py index 6e139fc06..acd7b5860 100644 --- a/data/external/models/public_transport.py +++ b/data/external/models/public_transport.py @@ -1,4 +1,6 @@ -from external.models.common import PydanticConfiguration +import json + +from external.models.common import PydanticConfiguration, RESULTS from pydantic.dataclasses import dataclass @@ -17,3 +19,9 @@ class Station: lat: float lon: float sub_stations: list[SubStation] + + @classmethod + def load_all(cls) -> list["Station"]: + """Load all public_transport.Station's""" + with open(RESULTS / "public_transport.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] diff --git a/data/external/models/roomfinder.py b/data/external/models/roomfinder.py index 7e1a7a8ca..b0ea5f3de 100644 --- a/data/external/models/roomfinder.py +++ b/data/external/models/roomfinder.py @@ -1,6 +1,7 @@ +import json import typing -from external.models.common import PydanticConfiguration +from external.models.common import PydanticConfiguration, RESULTS from pydantic.dataclasses import dataclass @@ -26,6 +27,12 @@ class Building: utm_zone: int b_room_count: int + @classmethod + def load_all(cls) -> list["Building"]: + """Load all nat.Building's""" + with open(RESULTS / "buildings_roomfinder.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] + @dataclass(config=PydanticConfiguration) class LatLonBox: @@ -46,6 +53,12 @@ class Map: scale: int latlonbox: LatLonBox | None = None + @classmethod + def load_all(cls) -> list["Map"]: + """Load all nat.Map's""" + with open(RESULTS / "maps_roomfinder.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] + @dataclass(config=PydanticConfiguration) class RoomMetadata: @@ -76,3 +89,9 @@ class Room: b_id: str b_name: str b_room_count: int = 0 + + @classmethod + def load_all(cls) -> list["Room"]: + """Load all nat.Room's""" + with open(RESULTS / "rooms_roomfinder.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] diff --git a/data/external/models/test_models.py b/data/external/models/test_models.py index 0ca312023..081652de5 100644 --- a/data/external/models/test_models.py +++ b/data/external/models/test_models.py @@ -1,96 +1,45 @@ -import json import unittest -from pathlib import Path from external.models import nat, public_transport, roomfinder, tumonline -RESULTS = Path(__file__).parent.parent / "results" - -class NAT(unittest.TestCase): - def test_buildings(self): - """Test if the buildings can be loaded as nat.Building objects""" - with open(RESULTS / "buildings_nat.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - nat.Building(**item) - - def test_rooms(self): - """Test if the rooms can be loaded as nat.Room objects""" - with open(RESULTS / "rooms_nat.json", encoding="utf-8") as file: - for item in json.load(file).values(): - with self.subTest(item=item): - nat.Room(**item) - - def test_campus(self): - """Test if the campi can be loaded as nat.Campus objects""" - with open(RESULTS / "campus_nat.json", encoding="utf-8") as file: - for item in json.load(file).values(): - with self.subTest(item=item): - nat.Campus(**item) - - def test_org(self): - """Test if the orgs can be loaded as nat.Organisation objects""" - with open(RESULTS / "orgs_nat.json", encoding="utf-8") as file: - for item in json.load(file).values(): - with self.subTest(item=item): - nat.Organisation(**item) - - -class Roomfinder(unittest.TestCase): - def test_maps(self): - """Test if the maps can be loaded as roomfinder.Map objects""" - with open(RESULTS / "maps_roomfinder.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - roomfinder.Map(**item) - - def test_rooms(self): - """Test if the rooms can be loaded as roomfinder.Room objects""" - with open(RESULTS / "rooms_roomfinder.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - roomfinder.Room(**item) - - def test_buildings(self): - """Test if the buildings can be loaded as roomfinder.Building objects""" - with open(RESULTS / "buildings_roomfinder.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - roomfinder.Building(**item) - - -class TUMonline(unittest.TestCase): - def test_rooms(self): - """Test if the rooms can be loaded as tumonline.Room objects""" - with open(RESULTS / "rooms_tumonline.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - tumonline.Room(**item) - - def test_buildings(self): - """Test if the buildings can be loaded as tumonline.Building objects""" - with open(RESULTS / "buildings_tumonline.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - tumonline.Building(**item) - - def test_orgs(self): - """Test if the orgs can be loaded as tumonline.Organisation objects""" +class ModelLoader(unittest.TestCase): + def test_nat(self): + """Test if the nat models can be loaded correctly""" + with self.subTest(nat.Building): + nat.Building.load_all() + with self.subTest(nat.Room): + nat.Room.load_all() + with self.subTest(nat.Campus): + nat.Campus.load_all() + with self.subTest(nat.Organisation): + nat.Organisation.load_all() + + def test_roomfinder(self): + """Test if the roomfinder models can be loaded correctly""" + with self.subTest(roomfinder.Map): + roomfinder.Map.load_all() + with self.subTest(roomfinder.Room): + roomfinder.Room.load_all() + with self.subTest(roomfinder.Building): + roomfinder.Building.load_all() + + def test_tumonline(self): + """Test if the tumonline models can be loaded correctly""" + with self.subTest(tumonline.Room): + tumonline.Room.load_all() + with self.subTest(tumonline.Building): + tumonline.Building.load_all() for lang in ("de", "en"): - with open(RESULTS / f"orgs-{lang}_tumonline.json", encoding="utf-8") as file: - for item in json.load(file).values(): - with self.subTest(item=item, lang=lang): - tumonline.Organisation(**item) - - -class PublicTransport(unittest.TestCase): - def test_stations(self): - """Test if the stations can be loaded as public_transport.Station objects""" - with open(RESULTS / "public_transport.json", encoding="utf-8") as file: - for item in json.load(file): - with self.subTest(item=item): - public_transport.Station(**item) + with self.subTest(tumonline.Organisation, lang=lang): + tumonline.Organisation.load_all_for(lang) + with self.subTest(tumonline.Usage): + tumonline.Usage.load_all() + + def test_public_transport(self): + """Test if the public_transport models can be loaded correctly""" + with self.subTest(public_transport.Station): + public_transport.Station.load_all() if __name__ == "__main__": diff --git a/data/external/models/tumonline.py b/data/external/models/tumonline.py index dea3d6ea6..ae9593c8e 100644 --- a/data/external/models/tumonline.py +++ b/data/external/models/tumonline.py @@ -1,4 +1,6 @@ -from external.models.common import PydanticConfiguration +import json + +from external.models.common import PydanticConfiguration, RESULTS from pydantic.dataclasses import dataclass @@ -39,6 +41,12 @@ class Room: usage: int extended: ExtendedRoomData | None = None + @classmethod + def load_all(cls) -> list["Room"]: + """Load all tumonline.Room's""" + with open(RESULTS / "rooms_tumonline.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] + @dataclass(config=PydanticConfiguration) class Building: @@ -46,6 +54,12 @@ class Building: filter_id: int name: str + @classmethod + def load_all(cls) -> list["Building"]: + """Load all tumonline.Building's""" + with open(RESULTS / "buildings_tumonline.json", encoding="utf-8") as file: + return [cls(**item) for item in json.load(file)] + @dataclass(config=PydanticConfiguration) class Organisation: @@ -54,3 +68,23 @@ class Organisation: code: str name: str path: str + + @classmethod + def load_all_for(cls, lang: str) -> dict[str, "Organisation"]: + """Load all tumonline.Organisation's for a specific language""" + with open(RESULTS / f"orgs-{lang}_tumonline.json", encoding="utf-8") as file: + return {key: cls(**item) for key, item in json.load(file).items()} + + +@dataclass(config=PydanticConfiguration) +class Usage: + # pylint: disable-next=invalid-name + id: int + din_277: str + name: str + + @classmethod + def load_all(cls) -> dict[int, "Usage"]: + """Load all tumonline.Usage's""" + with open(RESULTS / "usages_tumonline.json", encoding="utf-8") as file: + return {item["id"]: cls(**item) for item in json.load(file)} diff --git a/data/external/scrapers/public_transport.py b/data/external/scrapers/public_transport.py index d7db39350..e48f99fd4 100644 --- a/data/external/scrapers/public_transport.py +++ b/data/external/scrapers/public_transport.py @@ -1,54 +1,107 @@ import csv +import logging from zipfile import ZipFile from external.scraping_utils import _download_file, CACHE_PATH, cached_json -MVV_GTFS_URL = "https://www.mvv-muenchen.de/fileadmin/mediapool/02-Fahrplanauskunft/03-Downloads/openData/mvv_gtfs.zip" +MVV_OPENDATA_URL = "https://www.mvv-muenchen.de/fileadmin/mediapool/02-Fahrplanauskunft/03-Downloads/openData" +MVV_GTFS_URL = f"{MVV_OPENDATA_URL}/mvv_gtfs.zip" +MVV_HST_REPORT_URL = f"{MVV_OPENDATA_URL}/MVV_HSTReport2212.csv" # train/tram stations + some bus stations PUBLIC_TRANSPORT_CACHE_PATH = CACHE_PATH / "public_transport" -@cached_json("public_transport.json") -def scrape_stations(): - """Scrape the stations from the MVV GTFS data and return them as a list of dicts""" +def _load_bus_stations(stations: dict) -> None: + """Load the bus stations from the MVV GTFS data and add them to stations dict""" _download_file(MVV_GTFS_URL, PUBLIC_TRANSPORT_CACHE_PATH / "fahrplandaten.zip") with ZipFile(PUBLIC_TRANSPORT_CACHE_PATH / "fahrplandaten.zip") as file_zip: file_zip.extract("stops.txt", PUBLIC_TRANSPORT_CACHE_PATH) - with open(PUBLIC_TRANSPORT_CACHE_PATH / "stops.txt", encoding="utf-8") as file: - lines = csv.DictReader(file, delimiter=",") - stations = {} - repeat_later = [] # when parent station is not already in dict - for line in lines: - if line["location_type"]: - stations.setdefault( - line["stop_id"], - { - "station_id": line["stop_id"], - "name": line["stop_name"], - "lat": float(line["stop_lat"]), - "lon": float(line["stop_lon"]), - "sub_stations": [], - }, - ) - else: - sub_station = { + lines = list(csv.DictReader(file, delimiter=",")) + repeat_later = [] # when parent station is not already in dict + for line in lines: + if line["location_type"]: + stations.setdefault( + line["stop_id"], + { "station_id": line["stop_id"], "name": line["stop_name"], "lat": float(line["stop_lat"]), "lon": float(line["stop_lon"]), - "parent": line["parent_station"], - } - - if parent := stations.get(line["parent_station"]): - parent["sub_stations"].append(sub_station) - else: - repeat_later.append(sub_station) - - for sub in repeat_later: - if parent := stations.get(sub["parent"]): - parent["sub_stations"].append(sub) - # remove parent property from sub stations - for station in stations.values(): - for sub in station["sub_stations"]: - del sub["parent"] - return sorted(stations.values(), key=lambda x: x["lat"]) + "sub_stations": [], + }, + ) + else: + sub_station = { + "station_id": line["stop_id"], + "name": line["stop_name"], + "lat": float(line["stop_lat"]), + "lon": float(line["stop_lon"]), + "parent": line["parent_station"], + } + if not sub_station["parent"]: + sub_station["parent"] = ":".join(line["stop_id"].split(":")[:3]) + + if parent := stations.get(line["parent_station"]): + parent["sub_stations"].append(sub_station) + else: + repeat_later.append(sub_station) + + for sub in repeat_later: + if parent := stations.get(sub["parent"]): + parent["sub_stations"].append(sub) + else: + if sub["station_id"]: + logging.warning(f"{sub['name']} with id {sub['station_id']} has no parent in our data") + + +def _load_train_stations(stations: dict) -> None: + """Load the bus stations from the MVV_HST_REPORT data and add them to stations dict""" + _download_file(MVV_HST_REPORT_URL, PUBLIC_TRANSPORT_CACHE_PATH / "train_stations.csv") + with open(PUBLIC_TRANSPORT_CACHE_PATH / "train_stations.csv", encoding="utf-8") as file: + lines = [line for line in csv.DictReader(file, delimiter=";") if line["\ufeffHstNummer"]] + repeat_later = [] # when parent station is not already in dict + for line in lines: + if line["Globale ID"].count(":") == 2: # example: de:09184:460 + stations.setdefault( + line["Globale ID"], + { + "station_id": line["Globale ID"], + "name": line["Name ohne Ort"], + "lat": float(line["WGS84 X"].replace(",", ".")), + "lon": float(line["WGS84 Y"].replace(",", ".")), + "sub_stations": [], + }, + ) + else: + parent_id = ":".join(line["Globale ID"].split(":")[:3]) + sub_station = { + "station_id": line["Globale ID"], + "name": line["Name ohne Ort"], + "lat": float(line["WGS84 X"].replace(",", ".")), + "lon": float(line["WGS84 Y"].replace(",", ".")), + "parent": parent_id, + } + + if parent := stations.get(parent_id): + parent["sub_stations"].append(sub_station) + else: + repeat_later.append(sub_station) + for sub in repeat_later: + if parent := stations.get(sub["parent"]): + parent["sub_stations"].append(sub) + else: + if sub["station_id"]: + logging.warning(f"{sub['name']} with id {sub['station_id']} has no parent in our data") + + +@cached_json("public_transport.json") +def scrape_stations(): + """Scrape the stations from the MVV GTFS data and return them as a list of dicts""" + stations = {} + _load_train_stations(stations) + _load_bus_stations(stations) + # remove parent property from sub stations + for station in stations.values(): + for sub in station["sub_stations"]: + del sub["parent"] + return sorted(stations.values(), key=lambda x: x["lat"]) diff --git a/data/processors/coords.py b/data/processors/coords.py index 586351ead..602b7b57d 100644 --- a/data/processors/coords.py +++ b/data/processors/coords.py @@ -2,6 +2,9 @@ import logging import utm +from utils import distance_via_great_circle + +MAX_DISTANCE_METERS_FROM_PARENT = 200 def assert_buildings_have_coords(data): @@ -119,8 +122,32 @@ def check_coords(input_data): ) +def validate_coords(input_data): + """Check that coordinates are not too far away from their parent""" + for iid, data in input_data.items(): + if data["type"] != "room": + continue + coords = data["coords"] + parent_id = data["parents"][-1] + parent_coords = input_data[parent_id]["coords"] + + distance_to_parent = distance_via_great_circle( + coords["lat"], + coords["lon"], + parent_coords["lat"], + parent_coords["lon"], + ) + + if distance_to_parent > MAX_DISTANCE_METERS_FROM_PARENT: + logging.warning( + f"{iid} {coords} is {distance_to_parent}m away from its parent {parent_id} {parent_coords}. " + "Please recheck if the coordinate makes sense", + ) + + def add_and_check_coords(data): """Add coordinates to all entries and check for issues""" assert_buildings_have_coords(data) assign_coordinates(data) check_coords(data) + validate_coords(data) diff --git a/data/processors/export.py b/data/processors/export.py index c298c1f20..15e6c5053 100644 --- a/data/processors/export.py +++ b/data/processors/export.py @@ -1,3 +1,4 @@ +import dataclasses import json from pathlib import Path from typing import Any, Union @@ -126,4 +127,12 @@ def export_for_api(data, path): del export_data[_id]["props"][k] with open(path, "w", encoding="utf-8") as file: - json.dump(export_data, file) + json.dump(export_data, file, cls=EnhancedJSONEncoder) + + +class EnhancedJSONEncoder(json.JSONEncoder): + def default(self, o): + """Enhanced JSONEncoder that can handle dataclasses""" + if dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + return super().default(o) diff --git a/data/processors/nat.py b/data/processors/nat.py index 2e7cd63a0..b3f552b26 100644 --- a/data/processors/nat.py +++ b/data/processors/nat.py @@ -1,9 +1,10 @@ import json import logging from collections import Counter -from dataclasses import dataclass +from dataclasses import asdict, dataclass import yaml +from external.models import nat with open("sources/12_nat_excluded_buildings.yaml", encoding="utf-8") as excluded_buildings_raw: EXCLUDED_BUILDINGS = set(yaml.safe_load(excluded_buildings_raw.read())) @@ -18,17 +19,13 @@ class NATBuilding: b_alias: None | str b_address: None | str - def __init__(self, data: dict): + def __init__(self, data: nat.Building): self.b_id = None # Later set by _infer_internal_id() - self.b_code = data["building_code"] # Building id/code used by the NAT roomfinder - self.b_name = data["building_name"] - self.b_tumonline_id = data["building_id"] - self.b_alias = data["building_short"] - self.b_address = data["address"] - - def as_dict(self): - """Return the building data as dict""" - return self.__dict__ + self.b_code = data.building_code # Building id/code used by the NAT roomfinder + self.b_name = data.building_name + self.b_tumonline_id = data.building_id + self.b_alias = data.building_short + self.b_address = data.address def merge_nat_buildings(data): @@ -36,11 +33,10 @@ def merge_nat_buildings(data): Merge the buildings in the NAT Roomfinder with the existing data. This may overwrite existing data, if they have patched some fields. """ - with open("external/results/buildings_nat.json", encoding="utf-8") as file: - buildings = json.load(file) + buildings = nat.Building.load_all() # Sanity-check: Make sure that the buildings in the data are unique - building_ids = [b["building_code"] for b in buildings] + building_ids = [b.building_code for b in buildings] if duplicate_building_ids := {b_id: cnt for b_id, cnt in Counter(building_ids).items() if cnt > 1}: raise ValueError(f"There are duplicate buildings in the data: {duplicate_building_ids}") @@ -75,11 +71,11 @@ def _infer_internal_id(building, data): return building.b_id -def _merge_building(data, building): +def _merge_building(data: dict, building: NATBuilding) -> None: internal_id = _infer_internal_id(building, data) b_data = data[internal_id] - b_data["nat_data"] = building.as_dict() + b_data["nat_data"] = asdict(building) # NAT buildings are merged after TUMonline and the MyTUM Roomfinder. So if the others # weren't used as sources, but the NAT Roomfinder has this building, we know it's from there. @@ -101,7 +97,7 @@ def merge_nat_rooms(_data): """ with open("external/results/rooms_nat.json", encoding="utf-8") as file: - _rooms = json.load(file) + _rooms = json.load(file) # noqa: F841 # TODO: implement the merging of NAT rooms logging.warning("Merging NAT rooms is not yet implemented") diff --git a/data/processors/public_transport.py b/data/processors/public_transport.py index 18b0694c3..70904f6e8 100644 --- a/data/processors/public_transport.py +++ b/data/processors/public_transport.py @@ -1,24 +1,11 @@ -import json from dataclasses import asdict -from math import acos, cos, radians, sin from external.models.public_transport import Station +from utils import distance_via_great_circle MAXDISTANCE = 1000 METERS_PER_LATITUDE_DEGREE = 111210 MAXDEGDIFF_PER_LATITUDE_DEGREE = MAXDISTANCE / METERS_PER_LATITUDE_DEGREE -EARTH_RADIUS_METERS = 6_371_000 - - -def _distance_via_great_circle(lat1: float, lon1: float, lat2: float, lon2: float) -> float: - """ - Calculate the approximate distance in meters betweeen two points using the great circle approach - Basic idea from https://blog.petehouston.com/calculate-distance-of-two-locations-on-earth/ - """ - lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2]) - # angular distance using the https://wikipedia.org/wiki/Haversine_formula - angular_distance = acos(sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(lon1 - lon2)) - return EARTH_RADIUS_METERS * angular_distance def _filter_by_latitude(lat: float, stations: list[Station]) -> list[Station]: @@ -35,7 +22,7 @@ def nearby_stations(lat: float, lon: float, stations: list[Station]) -> list[dic """returns a list of tuples in form: [distance in meter, station]""" results = [] for station in _filter_by_latitude(lat, stations): - if (distance := _distance_via_great_circle(station.lat, station.lon, lat, lon)) <= MAXDISTANCE: + if (distance := distance_via_great_circle(station.lat, station.lon, lat, lon)) <= MAXDISTANCE: station_dict = {"distance": distance} | asdict(station) results.append(station_dict) return sorted(results, key=lambda x: x["distance"]) @@ -43,8 +30,7 @@ def nearby_stations(lat: float, lon: float, stations: list[Station]) -> list[dic def add_nearby_public_transport(data): """Add the nearby public transport stations to the data""" - with open("external/results/public_transport.json", encoding="utf-8") as file: - stations = [Station(**x) for x in json.load(file)] + stations = Station.load_all() for entry in data.values(): if coords := entry.get("coords", None): # noqa: SIM102 diff --git a/data/processors/roomfinder.py b/data/processors/roomfinder.py index f936c0525..b313d919b 100644 --- a/data/processors/roomfinder.py +++ b/data/processors/roomfinder.py @@ -1,4 +1,3 @@ -import json import logging import re @@ -12,14 +11,11 @@ def merge_roomfinder_buildings(data): Merge the buildings in Roomfinder with the existing data. This will not overwrite the existing data, but act directly on the provided data. """ - with open("external/results/buildings_roomfinder.json", encoding="utf-8") as file: - buildings = [roomfinder.Building(**b) for b in json.load(file)] - with open("sources/10_patches-roomfinder-buildings.yaml", encoding="utf-8") as file: patches = yaml.safe_load(file.read()) error = False - for building in buildings: + for building in roomfinder.Building.load_all(): # 'Building' 0000 contains some buildings and places not in TUMonline as rooms. # They might be integrated customly somewhere else, but here we ignore these. if building.b_id == "0000": @@ -81,9 +77,6 @@ def merge_roomfinder_rooms(data): This will not overwrite the existing data, but act directly on the provided data. """ - with open("external/results/rooms_roomfinder.json", encoding="utf-8") as file: - rooms = [roomfinder.Room(**r) for r in json.load(file)] - with open("sources/16_roomfinder-merge-patches.yaml", encoding="utf-8") as file: patches = yaml.safe_load(file.read()) @@ -95,7 +88,7 @@ def merge_roomfinder_rooms(data): if ("type" in _data and _data["type"] == "room" and "tumonline_data" in _data) } - for room in rooms: + for room in roomfinder.Room.load_all(): # Try to find the existing room id (which is based on the SAP Code). # We use the TUMonline arch_name for this, because we don't know the SAP Code here. try: diff --git a/data/processors/tumonline.py b/data/processors/tumonline.py index c30725d4c..c268cf190 100644 --- a/data/processors/tumonline.py +++ b/data/processors/tumonline.py @@ -2,7 +2,9 @@ import logging import string +import pydantic import yaml +from external.models import tumonline from processors.merge import recursively_merge from processors.patch import apply_patches from utils import TranslatableStr as _ @@ -17,13 +19,10 @@ def merge_tumonline_buildings(data): Merge the buildings in TUMonline with the existing data. This will not overwrite the existing data, but act directly on the provided data. """ - with open("external/results/buildings_tumonline.json", encoding="utf-8") as file: - buildings = json.load(file) - error = False - for building in buildings: + for building in tumonline.Building.load_all(): # Normalize the building name (sometimes has more than one space) - b_name = " ".join(building["name"].split()).strip() + b_name = " ".join(building.name.split()).strip() # Extract the building id try: @@ -62,8 +61,8 @@ def merge_tumonline_buildings(data): b_data["tumonline_data"] = { "name": b_name, - "filter_id": building["filter_id"], - "area_id": building["area_id"], + "filter_id": building.filter_id, + "area_id": building.area_id, } b_data.setdefault("props", {}).setdefault("ids", {}).setdefault("b_id", b_id) @@ -73,20 +72,22 @@ def merge_tumonline_buildings(data): return data +@pydantic.dataclasses.dataclass +# pylint: disable=too-few-public-methods +class InactiveOrg: + name: str + + +# pylint: disable=too-many-locals def merge_tumonline_rooms(data): """ Merge the rooms in TUMonline with the existing data. This will not overwrite the existing data, but act directly on the provided data. """ rooms = _clean_tumonline_rooms() - with open("external/results/usages_tumonline.json", encoding="utf-8") as file: - usages = json.load(file) - usages_lookup = {usage["id"]: usage for usage in usages} - with open("external/results/orgs-de_tumonline.json", encoding="utf-8") as file_de: - orgs_de = json.load(file_de) - with open("external/results/orgs-en_tumonline.json", encoding="utf-8") as file_en: - orgs_en = json.load(file_en) + orgs_de = tumonline.Organisation.load_all_for("de") + orgs_en = tumonline.Organisation.load_all_for("en") missing_buildings: dict[str, int] = {} for room in rooms: @@ -114,8 +115,8 @@ def merge_tumonline_rooms(data): "operator_id": int(room["op_link"].strip(OPERATOR_WEBNAV_LINK_PREFIX)), "operator_link": room["op_link"], "operator_name": _( - orgs_de.get(operator, {}).get("name", f"Inaktive Organisation ({operator})"), - orgs_en.get(operator, {}).get("name", f"Inactive Organisation ({operator})"), + orgs_de.get(operator, InactiveOrg(name=f"Inaktive Organisation ({operator})").name), + orgs_en.get(operator, InactiveOrg(name=f"Inactive Organisation ({operator})").name), ), "room_link": room["room_link"], "calendar": room["calendar"], @@ -144,11 +145,12 @@ def merge_tumonline_rooms(data): } # Usage + usages_lookup = tumonline.Usage.load_all() if room["usage"] in usages_lookup: tumonline_usage = usages_lookup[room["usage"]] - parts = tumonline_usage["din_277"].split(" - ") + parts = tumonline_usage.din_277.split(" - ") r_data["usage"] = { - "name": _(tumonline_usage["name"]), + "name": _(tumonline_usage.name), "din_277": parts[0], "din_277_desc": parts[1], } @@ -315,7 +317,7 @@ def _infer_arch_name( room["patched"] = True -def _maybe_set_alt_name(arch_name_parts, room): +def _maybe_set_alt_name(arch_name_parts: tuple[str, str], room: dict) -> None: """ deduces the alt_name from the roomname diff --git a/data/sources/01_areas-extended.yaml b/data/sources/01_areas-extended.yaml index 322a88460..8bfaa8c8d 100644 --- a/data/sources/01_areas-extended.yaml +++ b/data/sources/01_areas-extended.yaml @@ -181,6 +181,7 @@ zentralgelaende: "05": { use_as: "04" } "0510": # Verwaltungsbau (Z10) osm: ["relation/6758375"] # smaller part: "way/31095179" + coords: { lat: 48.14884, lon: 11.56790 } "0511": # Elektro/Werkstatt/Lösungsmittel (Z11) osm: ["way/42918170"] #"0512": # Garagen (Z12) diff --git a/data/utils.py b/data/utils.py index a0b9bde20..eafcca3dd 100644 --- a/data/utils.py +++ b/data/utils.py @@ -1,5 +1,6 @@ import logging import os +from math import acos, cos, radians, sin from pathlib import Path from PIL import Image @@ -102,3 +103,19 @@ def setup_logging(level): logging.addLevelName(logging.WARNING, f"\033[1;33m{logging.getLevelName(logging.WARNING)}\033[1;0m") logging.addLevelName(logging.ERROR, f"\033[1;41m{logging.getLevelName(logging.ERROR)}\033[1;0m") logging.addLevelName(logging.CRITICAL, f"\033[1;41m{logging.getLevelName(logging.CRITICAL)}\033[1;0m") + + +EARTH_RADIUS_METERS = 6_371_000 + + +def distance_via_great_circle(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """ + Calculate the approximate distance in meters betweeen two points using the great circle approach + Basic idea from https://blog.petehouston.com/calculate-distance-of-two-locations-on-earth/ + """ + if lat1 == lat2 and lon1 == lon2: + return 0.0 + lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2]) + # angular distance using the https://wikipedia.org/wiki/Haversine_formula + angular_distance = acos(sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(lon1 - lon2)) + return EARTH_RADIUS_METERS * angular_distance diff --git a/resources/documentation/Dependencys.md b/resources/documentation/Dependencys.md index 5acec192f..03580e5b6 100644 --- a/resources/documentation/Dependencys.md +++ b/resources/documentation/Dependencys.md @@ -44,10 +44,9 @@ the latest version. #### Python3 The server does have some scripts, which are written in python, and they implicitly depend on a recent version of -python (~3.10). +python (>=3.10). If you don't meet this requirement, head over to the [python website](https://www.python.org/downloads/) and download the latest version. -We also assume that `python --version` outputs something like `Python 3.1X.Y`. #### Rust diff --git a/server/Cargo.lock b/server/Cargo.lock index a5901241a..21cda9e1f 100644 --- a/server/Cargo.lock +++ b/server/Cargo.lock @@ -936,6 +936,12 @@ dependencies = [ "instant", ] +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + [[package]] name = "fdeflate" version = "0.3.0" @@ -1052,7 +1058,7 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" dependencies = [ - "fastrand", + "fastrand 1.9.0", "futures-core", "futures-io", "memchr", @@ -1457,17 +1463,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys", -] - [[package]] name = "ipnet" version = "2.8.0" @@ -1481,7 +1476,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi", - "rustix 0.38.3", + "rustix", "windows-sys", ] @@ -1644,12 +1639,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.3" @@ -2100,9 +2089,9 @@ dependencies = [ [[package]] name = "octocrab" -version = "0.25.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bc095e456c43e3afe5a53cdcf11aae1965663b941f7a5efb49b6ef53ce8529" +checksum = "0943920a77d028b66ebe4407813733ad4d0ebe7b12dafd608ddec8478e5fef0b" dependencies = [ "arc-swap", "async-trait", @@ -2647,20 +2636,6 @@ dependencies = [ "semver", ] -[[package]] -name = "rustix" -version = "0.37.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys", -] - [[package]] name = "rustix" version = "0.38.3" @@ -2670,15 +2645,15 @@ dependencies = [ "bitflags 2.3.3", "errno", "libc", - "linux-raw-sys 0.4.3", + "linux-raw-sys", "windows-sys", ] [[package]] name = "rustls" -version = "0.21.3" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b19faa85ecb5197342b54f987b142fb3e30d0c90da40f80ef4fa9a726e6676ed" +checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" dependencies = [ "log", "ring", @@ -3069,15 +3044,14 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.6.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" dependencies = [ - "autocfg", "cfg-if", - "fastrand", + "fastrand 2.0.0", "redox_syscall", - "rustix 0.37.23", + "rustix", "windows-sys", ] diff --git a/server/calendar/Cargo.toml b/server/calendar/Cargo.toml index 78fc89f3d..bbd304dfa 100644 --- a/server/calendar/Cargo.toml +++ b/server/calendar/Cargo.toml @@ -41,7 +41,7 @@ prometheus = { version = "0.13.3", features = ["default", "push"] } # scraper rand = "0.8.5" futures = "0.3.28" -rustls = "0.21.3" +rustls = "0.21.5" reqwest = { version = "0.11.18", features = ["rustls", "json"] } minidom = "0.15.2" regex = "1.9.1" diff --git a/server/calendar/Dockerfile b/server/calendar/Dockerfile index 97eeab184..5f1e3b040 100644 --- a/server/calendar/Dockerfile +++ b/server/calendar/Dockerfile @@ -1,5 +1,5 @@ # Compile -FROM rust:1.70-alpine AS compiler +FROM rust:1.71-alpine AS compiler # to ache the build this line inludes all the dependencys all servers need # this is not an issue since we copy the generated binary to a more minimal envornment diff --git a/server/feedback/Cargo.toml b/server/feedback/Cargo.toml index 226a04cea..cf093caec 100644 --- a/server/feedback/Cargo.toml +++ b/server/feedback/Cargo.toml @@ -26,7 +26,7 @@ serde_json.workspace = true rand = "0.8.5" regex = "1.9.1" -octocrab = "0.25.1" +octocrab = "0.28.0" # web jsonwebtoken = "8.3.0" @@ -34,7 +34,7 @@ chrono= { version = "0.4.26", default-features = false } actix-governor = { version = "0.4.1", features = ["log"] } # proposing feedback -tempfile = "3.6.0" +tempfile = "3.7.0" image = { version = "0.24.6", features = ["default", "webp-encoder"] } imageproc = "0.23.0" base64 = "0.21.2" diff --git a/server/feedback/Dockerfile b/server/feedback/Dockerfile index cf02f9acb..f2ca4235c 100644 --- a/server/feedback/Dockerfile +++ b/server/feedback/Dockerfile @@ -1,5 +1,5 @@ # Compile -FROM rust:1.70-alpine AS compiler +FROM rust:1.71-alpine AS compiler # to ache the build this line inludes all the dependencys all servers need # this is not an issue since we copy the generated binary to a more minimal envornment diff --git a/server/feedback/src/proposed_edits/discription.rs b/server/feedback/src/proposed_edits/discription.rs index 032346f30..bb90bca41 100644 --- a/server/feedback/src/proposed_edits/discription.rs +++ b/server/feedback/src/proposed_edits/discription.rs @@ -75,8 +75,7 @@ mod test_discription { #[test] fn test_apply_set() { let mut description = Description::default(); - let mut set = HashMap::new(); - set.insert("key".to_string(), TestEdit::default()); + let set = HashMap::from([("key".to_string(), TestEdit)]); description.appply_set("category", set, Path::new("")); assert_eq!(description.title, "1 category edits"); assert_eq!(description.body, "The following category edits were made:\n| entry | edit | \n| --- | --- | \n| [`key`](https://nav.tum.de/view/key) | applied_value |\n"); diff --git a/server/feedback/src/tokens.rs b/server/feedback/src/tokens.rs index 59e1224cd..2f0459a0e 100644 --- a/server/feedback/src/tokens.rs +++ b/server/feedback/src/tokens.rs @@ -9,7 +9,7 @@ pub struct RecordedTokens(Mutex>); pub struct TokenRecord { kid: u64, - next_reset: usize, + next_reset: i64, } fn able_to_process_feedback() -> bool { @@ -19,20 +19,20 @@ fn able_to_process_feedback() -> bool { // Additionally, there is a short delay until a token can be used. // Clients need to wait that time if (for some reason) the user submitted // faster than limited here. -const TOKEN_MIN_AGE: usize = 5; -const TOKEN_MAX_AGE: usize = 3600 * 12; // 12h +const TOKEN_MIN_AGE: i64 = 5; +const TOKEN_MAX_AGE: i64 = 3600 * 12; // 12h #[derive(Debug, Serialize, Deserialize)] pub struct Claims { - exp: usize, // Required (validate_exp defaults to true in validation). Expiration time (as UTC timestamp) - iat: usize, // Optional. Issued at (as UTC timestamp) - nbf: usize, // Optional. Not Before (as UTC timestamp) - kid: u64, // Optional. Key ID + exp: i64, // Required (validate_exp defaults to true in validation). Expiration time (as UTC timestamp) + iat: i64, // Optional. Issued at (as UTC timestamp) + nbf: i64, // Optional. Not Before (as UTC timestamp) + kid: u64, // Optional. Key ID } impl Claims { pub fn new() -> Self { - let now = chrono::Utc::now().timestamp() as usize; + let now = chrono::Utc::now().timestamp(); Self { exp: now + TOKEN_MAX_AGE, iat: now, @@ -78,7 +78,7 @@ impl RecordedTokens { // - neither synced across multiple feedback instances, nor // - persisted between reboots - let now = chrono::Utc::now().timestamp() as usize; + let now = chrono::Utc::now().timestamp(); let mut tokens = self.0.lock().await; // remove outdated tokens (no longer relevant for rate limit) tokens.retain(|t| t.next_reset > now); @@ -100,7 +100,7 @@ impl RecordedTokens { #[derive(Debug, Serialize, Deserialize)] struct Token { - created_at: usize, // unix timestamp + created_at: i64, // unix timestamp token: String, } @@ -120,7 +120,7 @@ pub async fn get_token() -> HttpResponse { match token { Ok(token) => { - let created_at = chrono::Utc::now().timestamp() as usize; + let created_at = chrono::Utc::now().timestamp(); HttpResponse::Created().json(Token { created_at, token }) } Err(e) => { diff --git a/server/main-api/Cargo.toml b/server/main-api/Cargo.toml index 070963f2d..1041cb9a8 100644 --- a/server/main-api/Cargo.toml +++ b/server/main-api/Cargo.toml @@ -38,7 +38,7 @@ meilisearch-sdk = "0.24.1" logos="0.13.0" # maps -rustls = "0.21.3" +rustls = "0.21.5" reqwest = { version= "0.11.18", features = ["rustls"] } image = "0.24.6" imageproc = "0.23.0" diff --git a/server/main-api/Dockerfile.server b/server/main-api/Dockerfile.server index 893730f82..24957cc90 100644 --- a/server/main-api/Dockerfile.server +++ b/server/main-api/Dockerfile.server @@ -1,5 +1,5 @@ # Compile -FROM rust:1.70-alpine AS compiler +FROM rust:1.71-alpine AS compiler # to ache the build this line inludes all the dependencys all servers need # this is not an issue since we copy the generated binary to a more minimal envornment diff --git a/server/main-api/src/entries/get.rs b/server/main-api/src/entries/get.rs index 99cf0af97..3c843996e 100644 --- a/server/main-api/src/entries/get.rs +++ b/server/main-api/src/entries/get.rs @@ -10,7 +10,7 @@ pub async fn get_handler( web::Query(args): web::Query, ) -> HttpResponse { let conn = &mut utils::establish_connection(); - let (probable_id, redirect_url) = match get_alias_and_redirect(conn, params.into_inner()) { + let (probable_id, redirect_url) = match get_alias_and_redirect(conn, ¶ms.into_inner()) { Some(alias_and_redirect) => alias_and_redirect, None => return HttpResponse::NotFound().body("Not found"), }; @@ -53,10 +53,10 @@ pub async fn get_handler( } } -fn get_alias_and_redirect(conn: &mut SqliteConnection, query: String) -> Option<(String, String)> { - use crate::schema::aliases::dsl::*; +fn get_alias_and_redirect(conn: &mut SqliteConnection, query: &str) -> Option<(String, String)> { + use crate::schema::aliases::dsl::{alias, aliases, key, type_, visible_id}; let result = aliases - .filter(alias.eq(&query).or(key.eq(&query))) + .filter(alias.eq(query).or(key.eq(query))) .select((key, visible_id, type_)) .distinct() .load::(conn); @@ -85,14 +85,11 @@ fn get_alias_and_redirect(conn: &mut SqliteConnection, query: String) -> Option< fn extract_redirect_exact_match(type_: &str, key: &str) -> String { match type_ { - "root" => "".to_string(), + "root" => String::new(), "campus" => format!("/campus/{key}"), - "site" => format!("/site/{key}"), - "area" => format!("/site/{key}"), // Currently also "site", maybe "group"? TODO - "building" => format!("/building/{key}"), - "joined_building" => format!("/building/{key}"), - "room" => format!("/room/{key}"), - "virtual_room" => format!("/room/{key}"), + "site" | "area" => format!("/site/{key}"), + "building" | "joined_building" => format!("/building/{key}"), + "room" | "virtual_room" => format!("/room/{key}"), "poi" => format!("/poi/{key}"), _ => format!("/view/{key}"), // can be triggered if we add a type but don't add it here } diff --git a/server/main-api/src/maps/fetch_tile.rs b/server/main-api/src/maps/fetch_tile.rs index be2c3c57a..b08501139 100644 --- a/server/main-api/src/maps/fetch_tile.rs +++ b/server/main-api/src/maps/fetch_tile.rs @@ -66,21 +66,23 @@ impl FetchTileTask { pub async fn fulfill(self) -> Option<((u32, u32), image::DynamicImage)> { // gets the image fro the server. using a disk-cached image if possible let filename = format!("{}_{}_{}@2x.png", self.z, self.x, self.y); - let file = std::env::temp_dir().join("tiles").join(filename); - let tile = match tokio::fs::read(&file).await { + let file_path = std::env::temp_dir().join("tiles").join(filename); + let tile = match tokio::fs::read(&file_path).await { Ok(content) => web::Bytes::from(content), Err(_) => { - let mut tile = self.download_map_image(&file).await; + let mut tile = self.download_map_image(&file_path).await; for i in 1..3 { if tile.is_err() { - warn!("Error while downloading {file:?} {i} times. Retrying"); - tile = self.download_map_image(&file).await; + warn!("Error while downloading {file_path:?} {i} times. Retrying"); + tile = self.download_map_image(&file_path).await; } } match tile { Ok(t) => t, Err(e) => { - error!("could not fulfill {file:?} 3 times. Giving up. Last error {e:?}"); + error!( + "could not fulfill {file_path:?} 3 times. Giving up. Last error {e:?}" + ); return None; } } @@ -90,7 +92,7 @@ impl FetchTileTask { match image::load_from_memory(&tile) { Ok(img) => Some((self.index, img)), Err(e) => { - error!("Error while parsing image: {e:#?} for {file:?}"); + error!("Error while parsing image: {e:#?} for {file_path:?}"); None } } @@ -111,18 +113,17 @@ impl FetchTileTask { let url = self.get_tileserver_url(); let res = reqwest::get(&url).await?.bytes().await?; - match res.len() { - response_size @ 0..=500 => Err(io::Error::new( + if let response_size @ 0..=500 = res.len() { + return Err(io::Error::new( ErrorKind::Other, format!("Got a short Response from {url}. . Response ({response_size}B): {res:?}"), ) - .into()), - _ => { - if let Err(e) = tokio::fs::write(file, &res).await { - warn!("failed to write {url} to {file:?} because {e:?}. Files wont be cached"); - }; - Ok(res) - } + .into()); } + + if let Err(e) = tokio::fs::write(file, &res).await { + warn!("failed to write {url} to {file:?} because {e:?}. Files wont be cached"); + }; + Ok(res) } } diff --git a/server/main-api/src/maps/mod.rs b/server/main-api/src/maps/mod.rs index 3053dd2bc..d58b724b1 100644 --- a/server/main-api/src/maps/mod.rs +++ b/server/main-api/src/maps/mod.rs @@ -30,15 +30,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) { fn get_localised_data(id: &str, should_use_english: bool) -> Result { let conn = &mut utils::establish_connection(); - let result = match should_use_english { - true => { - use crate::schema::en::dsl; - dsl::en.filter(dsl::key.eq(&id)).load::(conn) - } - false => { - use crate::schema::de::dsl; - dsl::de.filter(dsl::key.eq(&id)).load::(conn) - } + let result = if should_use_english { + use crate::schema::en::dsl; + dsl::en.filter(dsl::key.eq(&id)).load::(conn) + } else { + use crate::schema::de::dsl; + dsl::de.filter(dsl::key.eq(&id)).load::(conn) }; match result { @@ -77,10 +74,10 @@ async fn construct_image_from_data(_id: &str, data: DBRoomEntry) -> Option Vec { +fn wrap_image_in_response(img: &image::RgbaImage) -> Vec { let mut w = Cursor::new(Vec::new()); img.write_to(&mut w, image::ImageOutputFormat::Png).unwrap(); w.into_inner() diff --git a/server/main-api/src/maps/overlay_map.rs b/server/main-api/src/maps/overlay_map.rs index c1304d5c7..9a5d3dbca 100644 --- a/server/main-api/src/maps/overlay_map.rs +++ b/server/main-api/src/maps/overlay_map.rs @@ -108,9 +108,9 @@ mod overlay_tests { #[test] fn test_lat_lon_z_to_xyz() { - let (x, y, _) = lat_lon_z_to_xyz(52.520008, 13.404954, 17); - assert_eq!(x, 70416.59); - assert_eq!(y, 42985.734); + let (x, y, _) = lat_lon_z_to_xyz(52.520_008, 13.404_954, 17); + assert_eq!(x, 70416.59_f32); + assert_eq!(y, 42985.734_f32); } #[test] diff --git a/server/main-api/src/search/search_executor/formatter.rs b/server/main-api/src/search/search_executor/formatter.rs index dfaff78de..edf7f33f7 100644 --- a/server/main-api/src/search/search_executor/formatter.rs +++ b/server/main-api/src/search/search_executor/formatter.rs @@ -52,7 +52,6 @@ impl RoomVisitor { split_arch_id.1, )) } - TextToken::SplittableText(_) => None, // If it doesn't match some precise room format, but the search is clearly // matching the arch name and not the main name, then we highlight this arch name. @@ -78,7 +77,9 @@ impl RoomVisitor { parsed_aid.1, )) } - TextToken::Text(_) => None, + + // not relevant enough for room highlighting + TextToken::Text(_) | TextToken::SplittableText(_) => None, } } diff --git a/server/main-api/src/search/search_executor/parser.rs b/server/main-api/src/search/search_executor/parser.rs index 0ae8804c6..7cb2e4398 100644 --- a/server/main-api/src/search/search_executor/parser.rs +++ b/server/main-api/src/search/search_executor/parser.rs @@ -17,15 +17,15 @@ impl Filter { let parents: Vec<&str> = self.parents.iter().map(|s| s.as_str()).collect(); filters.push(format!( "((parent_keywords IN {parents:?}) OR (parent_building_names IN {parents:?}) OR (campus IN {parents:?}))" - )) + )); } if !self.types.is_empty() { let types: Vec<&str> = self.types.iter().map(|s| s.as_str()).collect(); - filters.push(format!("(type IN {types:?})")) + filters.push(format!("(type IN {types:?})")); } if !self.usages.is_empty() { let usages: Vec<&str> = self.usages.iter().map(|s| s.as_str()).collect(); - filters.push(format!("(usage IN {usages:?})")) + filters.push(format!("(usage IN {usages:?})")); } filters.join(" AND ") } diff --git a/server/main-api/src/search/search_executor/query.rs b/server/main-api/src/search/search_executor/query.rs index af094810e..356281c46 100644 --- a/server/main-api/src/search/search_executor/query.rs +++ b/server/main-api/src/search/search_executor/query.rs @@ -31,10 +31,7 @@ struct GeoEntryFilters { impl GeoEntryFilters { fn from(filters: &Filter) -> Self { let ms_filter = filters.as_meilisearch_filters(); - let separator = match ms_filter.is_empty() { - true => "", - false => " AND ", - }; + let separator = if ms_filter.is_empty() { " " } else { " AND " }; Self { default: ms_filter.clone(), buildings: format!("facet = \"building\"{separator}{ms_filter}"), diff --git a/webclient/nginx.conf b/webclient/nginx.conf index eb9931c87..3532b2eed 100644 --- a/webclient/nginx.conf +++ b/webclient/nginx.conf @@ -57,7 +57,7 @@ http { # metadata location = /robots.txt { access_log off; } - location = /googlebef9161f1176c5e0.html { access_log off; root /app/.well-known/; } # google search console + location = /googlebef9161f1176c5e0.html { access_log off; return 200 'google-site-verification: googlebef9161f1176c5e0.html'; } # google search console location = /sitemap-webclient.xml { access_log off; } location = /favicon.ico { access_log off; } location = /navigatum-card.png { access_log off; } diff --git a/webclient/package.json b/webclient/package.json index 69f2cfdbd..f108daf80 100644 --- a/webclient/package.json +++ b/webclient/package.json @@ -33,7 +33,7 @@ "@sentry/vite-plugin": "^2.4.0", "@types/swagger-ui-dist": "^3.30.1", "@vitejs/plugin-vue": "^4.2.3", - "@vue/eslint-config-prettier": "^7.1.0", + "@vue/eslint-config-prettier": "^8.0.0", "@vue/eslint-config-typescript": "^11.0.3", "@vue/tsconfig": "^0.4.0", "@yankeeinlondon/link-builder": "^1.2.1", diff --git a/webclient/public/.well-known/googlebef9161f1176c5e0.html b/webclient/public/.well-known/googlebef9161f1176c5e0.html deleted file mode 100644 index a3120f1ee..000000000 --- a/webclient/public/.well-known/googlebef9161f1176c5e0.html +++ /dev/null @@ -1 +0,0 @@ -google-site-verification: googlebef9161f1176c5e0.html diff --git a/webclient/vite.config.ts b/webclient/vite.config.ts index 302edf7d6..2f1bc5b8c 100644 --- a/webclient/vite.config.ts +++ b/webclient/vite.config.ts @@ -13,7 +13,10 @@ import { sentryVitePlugin } from "@sentry/vite-plugin"; // https://vitejs.dev/config/ let commit_message = process.env.GIT_COMMIT_MESSAGE || "development"; -commit_message = commit_message.replaceAll(/[^a-z0-9 ]+/gi, "_").replaceAll(/_$/g, "").trim(); +commit_message = commit_message + .replaceAll(/[^a-z0-9 ]+/gi, "_") + .replaceAll(/_$/g, "") + .trim(); export default defineConfig({ envDir: path.resolve(__dirname, "./env"), appType: "spa",