diff --git a/LICENSE b/LICENSE index 9d94da10..7eec235d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 Timo Thurow +Copyright (c) 2023 Hochschule Osnabrück, LMIS AG, THGA, BO-I-T Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index 19637948..389ea0e4 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,15 @@ -# AW40-hub-docker - +# AW40-HUB + +

+ OpenAPI + Python 3.11 + FastAPI + License: GPL v3 +

+ +## Description +This is the prototype implementation of the AW4.0 HUB architecture and part of the [Car Repair 4.0](https://www.autowerkstatt40.org/en/) research project. The purpose of the HUB is to enable car workshops to use AI driven diagnostics, persist acquired data from cars in a database as well as to participate alongside other car workshops as well as AI model providers in an [Gaia-X](https://gaia-x.eu/) compatible Dataspace to sell data and aquire new AI models. +The name AW40 is a shortened version of the german project title "Autowerkstatt 4.0". ## Requirements - Docker v25.0 or later (run `docker --version`) @@ -10,7 +20,7 @@ If you just need to update buildx, see [this section](#updating-docker-buildx-bu ## Overview -Prototype implementation of the AW40 HUB Architecture on Docker\ +This is the prototype implementation of the AW4.0 HUB Architecture.\ Currently included services: | Service (see [docker-compose.yml](docker-compose.yml)) | Description | @@ -30,8 +40,8 @@ Currently included services: ## Usage -### Start the developement HUB -**WARNING: DO NOT RUN THE DEVELOPEMENT HUB ON PUBLIC SERVER**\ +### Start the development HUB +**WARNING: DO NOT RUN THE DEVELOPMENT HUB ON PUBLIC SERVER**\ To start the HUB in developer mode use:\ ```docker compose --env-file=dev.env --profile full up -d``` diff --git a/api/Dockerfile b/api/Dockerfile index e617becd..2cd73770 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -13,6 +13,9 @@ RUN groupadd -r api && \ # use api users home directory as workdir WORKDIR /home/api +# create directory to store asset data and chown to api user +RUN mkdir /home/api/asset-data && chown api:api /home/api/asset-data + # install minimal requirements COPY ./requirements.txt /home/api/requirements.txt RUN pip install --upgrade pip && \ diff --git a/api/api.env b/api/api.env index 32b40bc2..24bc3e26 100644 --- a/api/api.env +++ b/api/api.env @@ -1,5 +1,6 @@ API_ALLOW_ORIGINS=${API_ALLOW_ORIGINS:?error} API_KEY_DIAGNOSTICS=${API_KEY_DIAGNOSTICS:?err} +API_KEY_ASSETS=${API_KEY_ASSETS:?err} MONGO_HOST=mongo MONGO_USERNAME=${MONGO_API_USERNAME:-mongo-api-user} MONGO_PASSWORD=${MONGO_API_PASSWORD:?error} diff --git a/api/api/data_management/__init__.py b/api/api/data_management/__init__.py index 87985854..8ffea393 100644 --- a/api/api/data_management/__init__.py +++ b/api/api/data_management/__init__.py @@ -1,4 +1,11 @@ __all__ = [ + "NewAsset", + "Asset", + "AssetDefinition", + "AssetMetaData", + "NewPublication", + "Publication", + "AssetDataStatus", "NewCase", "Case", "CaseUpdate", @@ -31,6 +38,10 @@ "BaseSignalStore" ] +from .assets import ( + NewAsset, AssetDefinition, Asset, AssetMetaData, Publication, + NewPublication, AssetDataStatus +) from .case import NewCase, Case, CaseUpdate from .customer import Customer, CustomerBase, CustomerUpdate from .diagnosis import ( diff --git a/api/api/data_management/assets.py b/api/api/data_management/assets.py new file mode 100644 index 00000000..13093f58 --- /dev/null +++ b/api/api/data_management/assets.py @@ -0,0 +1,182 @@ +import json +import os +from datetime import datetime, UTC +from enum import Enum +from typing import Optional, Annotated, ClassVar, Literal +from zipfile import ZipFile + +from beanie import Document, before_event, Delete +from pydantic import BaseModel, StringConstraints, Field + +from .case import Case + + +class AssetDataStatus(str, Enum): + defined = "defined" + processing = "processing" + ready = "ready" + + +class AssetDefinition(BaseModel): + """ + Defines filter conditions that cases have to match to be included in an + asset. + """ + vin: Optional[ + Annotated[str, StringConstraints(min_length=3, max_length=9)] + ] = Field( + default=None, + description="Partial VIN used to filter cases for inclusion in the " + "asset." + ) + obd_data_dtc: Optional[ + Annotated[str, StringConstraints(min_length=5, max_length=5)] + ] = Field( + default=None, + description="DTC that has to be present in a case's OBD datasets for " + "inclusion in the asset." + ) + timeseries_data_component: Optional[str] = Field( + default=None, + description="Timeseries data component that has to be present in a " + "case's timeseries datasets for inclusion in the asset." + ) + + +class PublicationNetwork(str, Enum): + pontusxdev = "PONTUSXDEV" + pontusxtest = "PONTUSXTEST" + + +class PublicationBase(BaseModel): + network: PublicationNetwork = Field( + description="Network that an asset is available in via this " + "publication", + default=PublicationNetwork.pontusxdev + ) + license: str = "CUSTOM" + price: float = 1.0 + + +class NewPublication(PublicationBase): + """Schema for new asset publications.""" + nautilus_private_key: str = Field( + description="Key for dataspace authentication." + ) + + +class Publication(PublicationBase): + """Publication information for an asset.""" + did: str = Field( + description="Id of this publication within its network." + ) + asset_url: str = Field( + description="URL to access asset data from the network." + ) + asset_key: str = Field( + description="Publication specific key to access data via `asset_url`.", + exclude=True + ) + + +class AssetMetaData(BaseModel): + name: str + definition: AssetDefinition + description: str + timestamp: datetime = Field(default_factory=lambda: datetime.now(UTC)) + type: Literal["dataset"] = "dataset" + author: str + + +class Asset(AssetMetaData, Document): + """DB schema and interface for assets.""" + + class Settings: + name = "assets" + + data_status: AssetDataStatus = AssetDataStatus.defined + publication: Optional[Publication] = None + + asset_data_dir_path: ClassVar[str] = "asset-data" + + @staticmethod + def _publication_case_json(case: Case) -> str: + """Convert a Case into a publication ready json string.""" + # Keep WMI+VDS from VIN and mask VIS. See + # https://de.wikipedia.org/wiki/Fahrzeug-Identifizierungsnummer#Aufbau + case.vehicle_vin = case.vehicle_vin[:9] + 8*"*" + # Exclude fields only relevant for internal data management from case + exclude = { + field: True for field in [ + "customer_id", "workshop_id", "diagnosis_id", + "timeseries_data_added", "obd_data_added", "symptoms_added", + "status" + ] + } + # Exclude fields only relevant for internal data management from + # submodels + for data_submodel in ["timeseries_data", "obd_data", "symptoms"]: + exclude[data_submodel] = {"__all__": {"data_id"}} + + case_json = case.model_dump_json(exclude=exclude, indent=1) + return case_json + + @property + def data_file_name(self): + """Zip file name of the asset's dataset.""" + return f"{str(self.id)}.zip" + + @property + def data_file_path(self): + """Path to this asset's dataset.""" + return os.path.join( + self.asset_data_dir_path, self.data_file_name + ) + + async def process_definition(self): + """ + Process the definition of an Asset to prepare the defined data for + publication in a dataspace. + """ + self.data_status = AssetDataStatus.processing + await self.save() + # Find all cases matching the definition + cases = await Case.find_in_hub( + vin=self.definition.vin, + obd_data_dtc=self.definition.obd_data_dtc, + timeseries_data_component=self.definition.timeseries_data_component + ) + # Create a new zip archive for this asset + with ZipFile(self.data_file_path, "x") as archive: + archive.mkdir("cases") + archive.mkdir("signals") + for case in cases: + case_id = str(case.id) + case_json = self._publication_case_json(case) + archive.writestr( + f"cases/{case_id}.json", data=case_json + ) + for tsd in case.timeseries_data: + signal_id = str(tsd.signal_id) + signal = await tsd.get_signal() + archive.writestr( + f"signals/{signal_id}.json", data=json.dumps(signal) + ) + + self.data_status = AssetDataStatus.ready + await self.save() + + @before_event(Delete) + def _delete_asset_data(self): + """Remove associated data when asset is deleted.""" + # If there is an archive file associated with this asset, delete it. + if os.path.exists(self.data_file_path): + os.remove(self.data_file_path) + + +class NewAsset(BaseModel): + """Schema for new asset added via the api.""" + name: str + definition: Optional[AssetDefinition] = AssetDefinition() + description: str + author: str diff --git a/api/api/data_management/case.py b/api/api/data_management/case.py index deaee1c7..4509bb76 100644 --- a/api/api/data_management/case.py +++ b/api/api/data_management/case.py @@ -126,19 +126,55 @@ async def find_in_hub( cls, customer_id: Optional[str] = None, vin: Optional[str] = None, - workshop_id: Optional[str] = None + workshop_id: Optional[str] = None, + obd_data_dtc: Optional[str] = None, + timeseries_data_component: Optional[str] = None ) -> List[Self]: """ - Get list of all cases filtered by customer_id, vehicle_vin and - workshop_id. + Get list of all cases with optional filtering by customer_id, + vehicle_vin, workshop_id or obd_data dtc. + + Parameters + ---------- + customer_id + Customer Id to search for. Only cases associated with the specified + customer are returned + vin + (Partial) VIN to search for. The specified parameter value is + matched against the beginning of the stored vins. + This allows partial vin specification e.g. to search for cases with + vehicles by a specific manufacturer. + workshop_id + Workshop Id to search for. Only cases from the specified workshop + are returned. + obd_data_dtc + DTC to search for. Only cases with at least one occurrence of the + specified dtc in any of the OBD datasets are returned. + timeseries_data_component + Timeseries data component to search for. Only cases that contain at + least one timeseries dataset for the specified component are + returned. + + Returns + ------- + List of cases matching the specified search criteria. """ filter = {} if customer_id is not None: filter["customer_id"] = PydanticObjectId(customer_id) if vin is not None: - filter["vehicle_vin"] = vin + # VIN is matched against beginning of stored vins + filter["vehicle_vin"] = {"$regex": f"^{vin}"} if workshop_id is not None: filter["workshop_id"] = workshop_id + if obd_data_dtc is not None: + # Only return cases that contain the specified dtc in any + # of the obd datasets + filter["obd_data.dtcs"] = obd_data_dtc + if timeseries_data_component is not None: + # Only return cases that contain a timeseries dataset with the + # specified component + filter["timeseries_data.component"] = timeseries_data_component cases = await cls.find(filter).to_list() return cases diff --git a/api/api/dataspace_management/__init__.py b/api/api/dataspace_management/__init__.py new file mode 100644 index 00000000..25e7e08c --- /dev/null +++ b/api/api/dataspace_management/__init__.py @@ -0,0 +1,5 @@ +__all__ = [ + "Nautilus" +] + +from .nautilus import Nautilus diff --git a/api/api/dataspace_management/nautilus.py b/api/api/dataspace_management/nautilus.py new file mode 100644 index 00000000..a43fc7b2 --- /dev/null +++ b/api/api/dataspace_management/nautilus.py @@ -0,0 +1,105 @@ +from typing import Optional, Tuple + +import httpx + +from ..data_management import Asset, Publication + + +class Nautilus: + _url: Optional[str] = None + _timeout: Optional[int] = None # Timeout for external requests to nautilus + _api_key_assets: Optional[str] = None + + def __init__(self): + if not self._url: + raise AttributeError("No Nautilus connection configured.") + + @classmethod + def configure(cls, url: str, timeout: int, api_key_assets: str): + """Configure the nautilus connection details.""" + cls._url = url + cls._timeout = timeout + cls._api_key_assets = api_key_assets + + @property + def _publication_url(self): + return "/".join([self._url, "publish"]) + + @property + def _revocation_url(self): + return "/".join([self._url, "revoke"]) + + async def _post_request( + self, + url: str, + headers: dict, + json_payload: Optional[dict] = None + ) -> Tuple[Optional[httpx.Response], str]: + """ + Helper method to perform a POST request with standard error handling. + """ + try: + response = await httpx.AsyncClient().post( + url, json=json_payload, headers=headers, timeout=self._timeout + ) + response.raise_for_status() + return response, "success" + except httpx.TimeoutException: + return None, "Connection timeout." + except httpx.HTTPStatusError as e: + return None, e.response.text + + async def publish_access_dataset( + self, asset: Asset, nautilus_private_key: str + ) -> Tuple[Optional[str], str]: + """ + Publish an asset to Nautilus. + """ + # Set up request payload + payload = { + "service_descr": { + "url": asset.publication.asset_url, + "api_key": self._api_key_assets, + "data_key": asset.publication.asset_key + }, + "asset_descr": { + **asset.model_dump( + include={"name", "type", "description", "author"} + ), + "license": asset.publication.license, + "price": { + "value": asset.publication.price, + "currency": "FIXED_EUROE" + } + } + } + # Attempt publication + response, info = await self._post_request( + url="/".join( + [self._publication_url, asset.publication.network] + ), + headers={"priv_key": nautilus_private_key}, + json_payload=payload + ) + # Publication failed. No did is returned. + if response is None: + return None, info + + # Publication successful. Did is returned. + did = response.json().get("assetdid") + return did, info + + async def revoke_publication( + self, publication: Publication, nautilus_private_key: str + ) -> Tuple[bool, str]: + """Revoke a published asset in Nautilus.""" + url = "/".join( + [self._revocation_url, publication.network, publication.did] + ) + response, info = await self._post_request( + url=url, headers={"priv_key": nautilus_private_key} + ) + if response is None: + return False, info + + return True, "success" diff --git a/api/api/main.py b/api/api/main.py index e99ceb33..1454fee5 100644 --- a/api/api/main.py +++ b/api/api/main.py @@ -6,14 +6,15 @@ from .data_management import ( Case, Vehicle, Customer, Workshop, TimeseriesMetaData, Diagnosis, - AttachmentBucket + AttachmentBucket, Asset ) from .data_management.timeseries_data import GridFSSignalStore +from .dataspace_management import Nautilus from .diagnostics_management import DiagnosticTaskManager, KnowledgeGraph from .settings import settings from .security.keycloak import Keycloak from .v1 import api_v1 -from .routers import diagnostics +from .routers import diagnostics, assets app = FastAPI() app.add_middleware( @@ -42,7 +43,7 @@ async def init_mongo(): await init_beanie( client[settings.mongo_db], document_models=[ - Case, Vehicle, Customer, Workshop, Diagnosis + Case, Vehicle, Customer, Workshop, Diagnosis, Asset ] ) @@ -83,3 +84,13 @@ def init_keycloak(): @app.on_event("startup") def set_api_keys(): diagnostics.api_key_auth.valid_key = settings.api_key_diagnostics + assets.api_key_auth.valid_key = settings.api_key_assets + + +@app.on_event("startup") +def init_nautilus(): + Nautilus.configure( + url=settings.nautilus_url, + timeout=settings.nautilus_timeout, + api_key_assets=settings.api_key_assets + ) diff --git a/api/api/routers/assets.py b/api/api/routers/assets.py new file mode 100644 index 00000000..a8df8d60 --- /dev/null +++ b/api/api/routers/assets.py @@ -0,0 +1,256 @@ +import secrets +from typing import List + +from bson import ObjectId +from bson.errors import InvalidId +from fastapi import ( + APIRouter, BackgroundTasks, Depends, HTTPException, Request, Body +) +from fastapi.responses import FileResponse, JSONResponse +from fastapi.security import APIKeyHeader + +from ..data_management import ( + NewAsset, Asset, Publication, AssetDataStatus, NewPublication +) +from ..dataspace_management import Nautilus +from ..security.token_auth import authorized_assets_access +from ..security.api_key_auth import APIKeyAuth + +api_key_auth = APIKeyAuth() + +tags_metadata = [ + { + "name": "Dataspace Assets", + "description": "Proprietary dataspace asset management." + }, + { + "name": "Public Dataspace Resources", + "description": "Access to resources shared within the dataspace." + } +] + +management_router = APIRouter( + tags=["Dataspace Assets"], + prefix="/dataspace/manage", + dependencies=[Depends(authorized_assets_access)] +) + +public_router = APIRouter( + tags=["Public Dataspace Resources"], + prefix="/dataspace/public" +) + + +@management_router.get("/assets", status_code=200, response_model=List[Asset]) +async def list_assets( +): + """Retrieve list of assets.""" + return await Asset.find().to_list() + + +@management_router.post("/assets", status_code=201, response_model=Asset) +async def add_asset( + asset: NewAsset, background_tasks: BackgroundTasks +): + """ + Add a new asset. + + Afterwards, data will be processed and packaged for publication in the + background. + """ + _asset = await Asset(**asset.model_dump()).create() + background_tasks.add_task(_asset.process_definition) + return _asset + + +async def asset_by_id(asset_id: str) -> Asset: + """ + Reusable dependency to handle retrieval of assets by ID. 404 HTTP + exception is raised in case of invalid id. + """ + # Invalid ID format causes 404 + try: + asset_oid = ObjectId(asset_id) + except InvalidId: + raise HTTPException( + status_code=404, detail="Invalid format for asset_id." + ) + # Non-existing ID causes 404 + asset = await Asset.get(asset_oid) + if asset is None: + raise HTTPException( + status_code=404, + detail=f"No asset with id '{asset_id}' found." + ) + + return asset + + +@management_router.get( + "/assets/{asset_id}", status_code=200, response_model=Asset +) +async def get_asset( + asset: Asset = Depends(asset_by_id) +): + """Get an Asset by ID.""" + return asset + + +@management_router.delete( + "/assets/{asset_id}", status_code=200, response_model=None +) +async def delete_asset( + asset: Asset = Depends(asset_by_id), + nautilus: Nautilus = Depends(Nautilus), + nautilus_private_key: str = Body(embed=True) +): + """Delete an Asset and revoke any publications.""" + if asset.publication is not None: + revocation_successful, info = await nautilus.revoke_publication( + publication=asset.publication, + nautilus_private_key=nautilus_private_key + ) + if not revocation_successful: + raise HTTPException( + status_code=500, + detail=f"Failed communication with nautilus: {info}" + ) + await asset.delete() + return None + + +@management_router.get( + "/assets/{asset_id}/data", status_code=200, response_class=FileResponse +) +async def get_asset_dataset( + asset: Asset = Depends(asset_by_id), +): + """Download the dataset of an Asset.""" + if asset.data_status != AssetDataStatus.ready: + raise HTTPException( + status_code=400, + detail="Preparation of asset data hasn't finished, yet." + ) + return FileResponse( + path=asset.data_file_path, filename=asset.data_file_name + ) + + +@management_router.post( + "/assets/{asset_id}/publication", + status_code=201, + response_model=Publication +) +async def publish_asset( + new_publication: NewPublication, + request: Request, + asset: Asset = Depends(asset_by_id), + nautilus: Nautilus = Depends(Nautilus) +): + """Publish the asset in the dataspace.""" + if asset.data_status != AssetDataStatus.ready: + raise HTTPException( + status_code=400, + detail=f"Asset cannot be published until data_status is " + f"{AssetDataStatus.ready.value}." + ) + # If asset is already published, respond with publication information and + # 200 instead of 201 to indicate that no new resource was created. + if asset.publication is not None: + return JSONResponse( + content=asset.publication.model_dump(), status_code=200 + ) + + # New publication + # The full URL for data access depends on deployment and mounting prefixes. + # Hence, split requested URL by management router prefix, keep the first + # part and append the url path of the get_published_dataset endpoint to + # make sure that the asset_url points to the appropriate location for the + # current environment. + asset_url = "".join( + [ + str(request.url).split(management_router.prefix)[0], + public_router.url_path_for( + "get_published_dataset", asset_id=asset.id + ) + ] + ) + + # Might need to fix asset_url scheme if running behind reverse proxy + x_forwarded_proto = request.headers.get("x-forwarded-proto", None) + if x_forwarded_proto is not None: + if x_forwarded_proto != asset_url[:len(x_forwarded_proto)]: + asset_url = f"{x_forwarded_proto}://{asset_url.split('://')[1]}" + + # Generate a new asset key + asset_key = secrets.token_urlsafe(32) + + # Setup Publication with undetermined did + publication = Publication( + did="undetermined", + asset_key=asset_key, + asset_url=asset_url, + **new_publication.model_dump() + ) + asset.publication = publication + await asset.save() + + # Use nautilus to trigger the publication + did, info = await nautilus.publish_access_dataset( + asset=asset, + nautilus_private_key=new_publication.nautilus_private_key + ) + if did is None: + asset.publication = None + await asset.save() + raise HTTPException( + status_code=500, + detail=f"Failed communication with nautilus: {info}" + ) + + # Store the publication did + asset.publication.did = did + await asset.save() + return publication + + +@public_router.head( + "/assets/{asset_id}/data", + status_code=200, + response_class=FileResponse +) +async def get_published_dataset_head( + asset: Asset = Depends(asset_by_id) +): + return FileResponse( + path=asset.data_file_path, filename=f"{asset.name}.zip" + ) + + +@public_router.get( + "/assets/{asset_id}/data", + status_code=200, + response_class=FileResponse, + dependencies=[Depends(api_key_auth)] +) +async def get_published_dataset( + asset_key: str = Depends(APIKeyHeader(name="data_key")), + asset: Asset = Depends(asset_by_id) +): + """Public download link for asset data.""" + publication = asset.publication + if publication is None: + raise HTTPException( + status_code=404, + detail=f"No published asset with ID '{asset.id}' found." + ) + asset_key_valid = secrets.compare_digest(publication.asset_key, asset_key) + if not asset_key_valid: + raise HTTPException( + status_code=401, + detail="Could not validate asset key.", + headers={"WWW-Authenticate": "asset_key"}, + ) + return FileResponse( + path=asset.data_file_path, filename=f"{asset.name}.zip" + ) diff --git a/api/api/routers/customers.py b/api/api/routers/customers.py index 7ddb902e..474b7fdc 100644 --- a/api/api/routers/customers.py +++ b/api/api/routers/customers.py @@ -23,7 +23,7 @@ ) -@router.get("/", status_code=200, response_model=List[Customer]) +@router.get("", status_code=200, response_model=List[Customer]) async def list_customers( response: Response, request: Request, @@ -71,7 +71,7 @@ async def list_customers( return customers -@router.post("/", status_code=201, response_model=Customer) +@router.post("", status_code=201, response_model=Customer) async def add_customer(customer: CustomerBase): """Add a new customer.""" customer = await Customer(**customer.model_dump()).create() diff --git a/api/api/routers/shared.py b/api/api/routers/shared.py index c8eb46e2..eb69033f 100644 --- a/api/api/routers/shared.py +++ b/api/api/routers/shared.py @@ -22,7 +22,6 @@ } ] - router = APIRouter( tags=["Shared"], dependencies=[Depends(authorized_shared_access)] @@ -33,14 +32,24 @@ async def list_cases( customer_id: Optional[str] = None, vin: Optional[str] = None, - workshop_id: Optional[str] = None + workshop_id: Optional[str] = None, + obd_data_dtc: Optional[str] = None, + timeseries_data_component: Optional[str] = None ) -> List[Case]: """ List all cases in Hub. Query params can be used to filter by `customer_id`, - `vin` and `workshop_id`. + (partial) `vin`, `workshop_id`, `obd_data_dtc` or + `timeseries_data_component`. + + The specified `vin` is matched against the beginning of the stored vehicle + vins. """ cases = await Case.find_in_hub( - customer_id=customer_id, vin=vin, workshop_id=workshop_id + customer_id=customer_id, + vin=vin, + workshop_id=workshop_id, + obd_data_dtc=obd_data_dtc, + timeseries_data_component=timeseries_data_component ) return cases @@ -88,6 +97,7 @@ class DatasetById: Parameterized dependency to fetch a dataset by id or raise 404 if the data_id is not existent. """ + def __init__( self, data_type: Literal["timeseries_data", "obd_data", "symptom"] ): diff --git a/api/api/routers/workshop.py b/api/api/routers/workshop.py index acf6c639..c32d632c 100644 --- a/api/api/routers/workshop.py +++ b/api/api/routers/workshop.py @@ -66,10 +66,24 @@ async def list_cases( workshop_id: str, customer_id: Optional[str] = None, - vin: Optional[str] = None + vin: Optional[str] = None, + obd_data_dtc: Optional[str] = None, + timeseries_data_component: Optional[str] = None ) -> List[Case]: + """ + List all cases in Hub. Query params can be used to filter by `customer_id`, + (partial) `vin`, `workshop_id`, `obd_data_dtc` or + `timeseries_data_component`. + + The specified `vin` is matched against the beginning of the stored vehicle + vins. + """ cases = await Case.find_in_hub( - customer_id=customer_id, vin=vin, workshop_id=workshop_id + customer_id=customer_id, + vin=vin, + workshop_id=workshop_id, + obd_data_dtc=obd_data_dtc, + timeseries_data_component=timeseries_data_component ) return cases diff --git a/api/api/security/token_auth.py b/api/api/security/token_auth.py index 5d7ba3f5..f5d14db7 100644 --- a/api/api/security/token_auth.py +++ b/api/api/security/token_auth.py @@ -12,6 +12,8 @@ REQUIRED_SHARED_ROLE = "shared" # required role for customer data management REQUIRED_CUSTOMERS_ROLE = "customers" +# required role for asset data management +REQUIRED_ASSETS_ROLE = "assets" failed_auth_exception = HTTPException( @@ -104,3 +106,14 @@ async def authorized_customers_access( """ if REQUIRED_CUSTOMERS_ROLE not in token_data.roles: raise failed_auth_exception + + +async def authorized_assets_access( + token_data: TokenData = Depends(verify_token) +): + """ + Authorize access to asset data management if the user is assigned the + respective role. + """ + if REQUIRED_ASSETS_ROLE not in token_data.roles: + raise failed_auth_exception diff --git a/api/api/settings.py b/api/api/settings.py index 05d67d48..7cae615a 100644 --- a/api/api/settings.py +++ b/api/api/settings.py @@ -19,8 +19,13 @@ class Settings(BaseSettings): keycloak_url: str = "http://keycloak:8080" keycloak_workshop_realm: str = "werkstatt-hub" + nautilus_url: str = "http://nautilus:3000/nautilus" + nautilus_timeout: int = 120 + api_key_diagnostics: str + api_key_assets: str + exclude_diagnostics_router: bool = False @property diff --git a/api/api/v1.py b/api/api/v1.py index 5f6bc3d5..61123b38 100644 --- a/api/api/v1.py +++ b/api/api/v1.py @@ -1,10 +1,12 @@ +import logging + from fastapi import FastAPI + from .routers import ( health, shared, workshop, diagnostics, knowledge, - customers + customers, assets ) from .settings import settings -import logging class EndpointLogFilter(logging.Filter): @@ -37,6 +39,9 @@ def filter(self, record: logging.LogRecord) -> bool: api_v1.include_router(knowledge.router, prefix="/knowledge") api_v1.include_router(workshop.router) api_v1.include_router(customers.router, prefix="/customers") +# Prefixes for the assets routers are handled in the module +api_v1.include_router(assets.management_router) +api_v1.include_router(assets.public_router) if not settings.exclude_diagnostics_router: api_v1.include_router(diagnostics.router, prefix="/diagnostics") else: diff --git a/api/tests/conftest.py b/api/tests/conftest.py index f5403edd..26a4b8e3 100644 --- a/api/tests/conftest.py +++ b/api/tests/conftest.py @@ -9,7 +9,8 @@ Vehicle, Customer, Workshop, - Diagnosis + Diagnosis, + Asset ) from beanie import init_beanie from bson import ObjectId @@ -47,7 +48,7 @@ async def initialized_beanie_context(motor_db): context manager to handle test setup and teardown. """ models = [ - Case, Vehicle, Customer, Workshop, Diagnosis + Case, Vehicle, Customer, Workshop, Diagnosis, Asset ] class InitializedBeanieContext: @@ -317,3 +318,8 @@ def another_rsa_public_key_pem() -> bytes: """Get a public key that does not match keys from any other fixture.""" _, public_key_pem = _create_rsa_key_pair() return public_key_pem + + +@pytest.fixture(autouse=True) +def set_asset_data_dir_path_to_temporary_test_dir(tmp_path): + Asset.asset_data_dir_path = tmp_path diff --git a/api/tests/data_management/__init__.py b/api/tests/data_management/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api/tests/data_management/test_assets.py b/api/tests/data_management/test_assets.py new file mode 100644 index 00000000..bfef146e --- /dev/null +++ b/api/tests/data_management/test_assets.py @@ -0,0 +1,262 @@ +import json +import os +from typing import List +from zipfile import ZipFile + +import pytest +from api.data_management import ( + Asset, AssetDefinition, AssetDataStatus, Case, NewOBDData, + NewTimeseriesData, TimeseriesMetaData, NewSymptom +) +from pydantic import ValidationError + +from .test_timeseries_data import MockSignalStore + + +@pytest.fixture +def vin(): + """ + Real world VIN from + https://de.wikipedia.org/wiki/Fahrzeug-Identifizierungsnummer + """ + return "W0L000051T2123456" + + +@pytest.fixture(autouse=True) +def set_timeseries_data_signal_store_to_mock(): + TimeseriesMetaData.signal_store = MockSignalStore() + + +class TestAssetDefinition: + + def test_default(self): + # All attributes are optional + AssetDefinition() + + @pytest.mark.parametrize("vin_len", [1, 2, *range(10, 18)]) + def test_vin_length_restriction_not_met(self, vin_len, vin): + with pytest.raises(ValidationError): + AssetDefinition(vin=vin[:vin_len]) + + @pytest.mark.parametrize("vin_len", range(3, 10)) + def test_vin_length_restriction_met(self, vin_len, vin): + AssetDefinition(vin=vin[:vin_len]) + + @pytest.mark.parametrize("dtc", ["P", "P0", "P00", "P000", "P00000"]) + def test_invalid_dtc(self, dtc): + with pytest.raises(ValidationError): + AssetDefinition(obd_data_dtc=dtc) + + def test_valid_dtc(self): + AssetDefinition(obd_data_dtc="P4242") + + +class TestAsset: + + def _check_archive_case_data(self, case: Case, archive_case_data: dict): + """ + Validates contents of a single case stored in an archive. + """ + assert archive_case_data["id"] == str(case.id) + # Confirm that the vin is correctly masked + assert ( + archive_case_data["vehicle_vin"] == + case.vehicle_vin[:9] + 8 * "*" + ) + + # Confirm that fields only relevant to internal data management + # are removed from top-level and submodels + for field in [ + "customer_id", "workshop_id", "diagnosis_id", + "timeseries_data_added", "obd_data_added", "symptoms_added" + ]: + assert field not in archive_case_data + for submodel in ["timeseries_data", "obd_data", "symptoms"]: + for submodel_entry in archive_case_data[submodel]: + assert "data_id" not in submodel_entry + + def _check_archive(self, archive: ZipFile, expected_cases: List[Case]): + """ + Validates the structure and contents of archives generated with the + process_definition_method. + """ + # Get a list of all members (files and directories) in the archive + archive_members = archive.namelist() + + # Ensure the presence of the presence of the expected "cases/" and + # "signals/" directories + assert "cases/" in archive_members + assert "signals/" in archive_members + + # Track the number of signal files in the archive + signals_in_archive = 0 + + for case in expected_cases: + # Ensure the expected case path exists in the archive + archive_case_path = f"cases/{str(case.id)}.json" + assert archive_case_path in archive_members + + # Load and validate case data stored in the archive + archive_case_data = json.loads(archive.read(archive_case_path)) + self._check_archive_case_data(case, archive_case_data) + + for tsd in archive_case_data["timeseries_data"]: + signals_in_archive += 1 + # Ensure the expected signal path exists in the archive + archive_signal_path = f"signals/{tsd['signal_id']}.json" + assert archive_signal_path in archive_members + # Ensure the signal file is valid JSON + assert json.loads(archive.read(archive_signal_path)) + + # Confirm that there is nothing but the expected members (2 directories + # + cases + signals) in the archive + assert ( + len(archive_members) == 2 + + len(expected_cases) + + signals_in_archive + ) + + @pytest.mark.parametrize( + "definition,expected_cases_idx", + [ + (AssetDefinition(), [0, 1, 2]), + (AssetDefinition(vin="W0L"), [0, 1]), + (AssetDefinition(vin="W0L1"), [0]), + (AssetDefinition(obd_data_dtc="P0001"), [0, 2]), + (AssetDefinition(timeseries_data_component="CompA"), [1, 2]), + (AssetDefinition(vin="W0L", obd_data_dtc="P0001"), [0]), + ( + AssetDefinition( + vin="W0L", timeseries_data_component="CompA" + ), + [1] + ), + ( + AssetDefinition( + vin="W0L", + timeseries_data_component="CompB", + obd_data_dtc="P0001" + ), + # No case matches the definition. Hence, archive is + # expected to be empty. + [] + ) + ] + ) + @pytest.mark.asyncio + async def test_process_definition( + self, + definition, + expected_cases_idx, + initialized_beanie_context + ): + async with initialized_beanie_context: + # Three cases with different VINs are stored in the db + cases = [] + cases.append( + Case(vehicle_vin="W0L111111T1111111", workshop_id="a") + ) + cases.append( + Case(vehicle_vin="W0L222222T2222222", workshop_id="b") + ) + cases.append( + Case(vehicle_vin="1111111T150000L0W", workshop_id="c") + ) + for case in cases: + await case.create() + + # Add an OBD Dataset to each case + await cases[0].add_obd_data(NewOBDData(dtcs=["P0001"])) + await cases[1].add_obd_data(NewOBDData(dtcs=["Q0002"])) + await cases[2].add_obd_data(NewOBDData(dtcs=["P0001"])) + + # Add timeseries data to subset of cases + await cases[1].add_timeseries_data( + NewTimeseriesData( + signal=[.0, .1], + sampling_rate=1, + duration=2, + component="CompA", + label="unknown" + ) + ) + await cases[1].add_timeseries_data( + NewTimeseriesData( + signal=[.0, .1, .2], + sampling_rate=1, + duration=3, + component="CompB", + label="unknown" + ) + ) + await cases[2].add_timeseries_data( + NewTimeseriesData( + signal=[.0, .1, .2, .3], + sampling_rate=2, + duration=2, + component="CompA", + label="unknown" + ) + ) + + # Add a symptom to one case + await cases[0].add_symptom( + NewSymptom(component="CompC", label="defect") + ) + + # Create an asset with the parametrized definition + asset = await Asset( + name="Test Asset", + description="This is an test asset.", + definition=definition, + author="test author" + ).create() + + # Process the definition + await asset.process_definition() + # Assert up-to-date data_status in db + await asset.sync() + assert asset.data_status == AssetDataStatus.ready + + # Check the zip archive generated for the parametrized definition + with ZipFile(asset.data_file_path, "r") as archive: + self._check_archive( + archive=archive, + expected_cases=[cases[i] for i in expected_cases_idx] + ) + + @pytest.mark.asyncio + async def test__delete_asset_data(self, initialized_beanie_context): + """ + Confirm automatic deletion of asset data archive upon asset deletion. + """ + async with initialized_beanie_context: + asset = await Asset( + name="Test Asset", + description="This is an test asset.", + definition=AssetDefinition(), + author="Test author" + ).create() + # Test existence of archive file after processing the definition + await asset.process_definition() + assert os.path.exists(asset.data_file_path) + # Test non-existence of archive file after deleting asset from db + await asset.delete() + assert not os.path.exists(asset.data_file_path) + + @pytest.mark.asyncio + async def test__delete_asset_data_without_file( + self, initialized_beanie_context + ): + """ + Confirm that asset deletion passes without an existing archive file. + """ + async with initialized_beanie_context: + asset = await Asset( + name="Test Asset", + description="This is an test asset.", + definition=AssetDefinition(), + author="Test author" + ).create() + # Delete before an archive file was created + await asset.delete() diff --git a/api/tests/data_management/test_case.py b/api/tests/data_management/test_case.py index dbc085cd..1454e199 100644 --- a/api/tests/data_management/test_case.py +++ b/api/tests/data_management/test_case.py @@ -15,6 +15,7 @@ SymptomUpdate, SymptomLabel ) +from bson import ObjectId from pydantic import ValidationError @@ -34,9 +35,9 @@ def case_with_diagnostic_data(new_case, timeseries_data): new_case["timeseries_data"] = timeseries_data new_case["obd_data"] = {"dtcs": ["P0001"]} new_case["symptoms"] = { - "component": "battery", - "label": "defect" - } + "component": "battery", + "label": "defect" + } return new_case @@ -160,6 +161,147 @@ async def test_find_in_hub( assert case_2_result[0].vehicle_vin == case_2_vin assert case_3_result[0].workshop_id == case_3_workshop_id + @pytest.mark.parametrize( + "query_vin,expected_vins", + [ + ("AB", ["ABC", "ABCD"]), + ("ABC", ["ABC", "ABCD"]), + ("ABCD", ["ABCD"]), + ("BC", []) + ] + ) + @pytest.mark.asyncio + async def test_find_in_hub_with_partial_vin( + self, initialized_beanie_context, query_vin, expected_vins + ): + async with initialized_beanie_context: + workshop_id = "test-workshop" + await Case(vehicle_vin="ABC", workshop_id=workshop_id).create() + await Case(vehicle_vin="ABCD", workshop_id=workshop_id).create() + await Case(vehicle_vin="ZABC", workshop_id=workshop_id).create() + + retrieved_cases = await Case.find_in_hub(vin=query_vin) + retrieved_vins = sorted([_.vehicle_vin for _ in retrieved_cases]) + assert retrieved_vins == expected_vins + + @pytest.mark.parametrize( + "query_dtc,expected_cases", + [ + ("P0001", [0, 1]), + ("Q0002", [1]), + ("Z0001", []) + ] + ) + @pytest.mark.asyncio + async def test_find_in_hub_by_obd_data_dtc( + self, initialized_beanie_context, query_dtc, expected_cases + ): + async with initialized_beanie_context: + workshop_id = "test-workshop" + vin = "test-vin" + # Three cases are put into the db + cases = [] + for _ in range(3): + case = await Case( + vehicle_vin=vin, workshop_id=workshop_id + ).create() + cases.append(case) + # OBD data with dtcs is added to two of the three cases + await cases[0].add_obd_data( + NewOBDData(dtcs=["P0001"]) + ) + await cases[1].add_obd_data( + NewOBDData(dtcs=["P0001", "Q0002"]) + ) + + retrieved_cases = await Case.find_in_hub(obd_data_dtc=query_dtc) + retrieved_case_ids = sorted([_.id for _ in retrieved_cases]) + expected_case_ids = sorted([cases[i].id for i in expected_cases]) + assert retrieved_case_ids == expected_case_ids + + @pytest.mark.parametrize("query_dtc", ["P0001", "Q0001", "Z0002"]) + @pytest.mark.asyncio + async def test_find_in_hub_by_obd_data_dtc_with_multiple_datasets( + self, initialized_beanie_context, query_dtc + ): + """Test non-standard situation with multiple obd datasets in a case.""" + async with initialized_beanie_context: + case = await Case(workshop_id="42", vehicle_vin="42").create() + await case.add_obd_data( + NewOBDData(dtcs=["P0001", "Z0002"]) + ) + await case.add_obd_data( + NewOBDData(dtcs=["Q0001", "Z0002"]) + ) + await case.add_obd_data( + NewOBDData(dtcs=[]) + ) + retrieved_cases = await Case.find_in_hub(obd_data_dtc=query_dtc) + assert [_.id for _ in retrieved_cases] == [case.id] + + @pytest.mark.parametrize( + "query_component,expected_cases", + [ + ("Comp-A", [0, 1]), + ("Comp-B", [1]), + ("Comp-C", []) + ] + ) + @pytest.mark.asyncio + async def test_find_in_hub_by_timeseries_data_component( + self, + monkeypatch, + initialized_beanie_context, + query_component, + expected_cases + ): + # Patch to_timeseries_data to avoid signal store configuration + async def mock_to_timeseries_data(self): + # Just exchange signal for signal_id without storing the signal + meta_data = self.model_dump(exclude={"signal"}) + meta_data["signal_id"] = ObjectId() + return TimeseriesData(**meta_data) + + monkeypatch.setattr( + NewTimeseriesData, "to_timeseries_data", mock_to_timeseries_data + ) + + async with initialized_beanie_context: + workshop_id = "test-workshop" + vin = "test-vin" + # Three cases are put into the db + cases = [] + for _ in range(3): + case = await Case( + vehicle_vin=vin, workshop_id=workshop_id + ).create() + cases.append(case) + + common_kwargs = { + "signal": [.42], + "sampling_rate": 1, + "duration": 1, + "label": "unknown" + } + # One dataset is added to first case + await cases[0].add_timeseries_data( + NewTimeseriesData(component="Comp-A", **common_kwargs) + ) + # Two datasets are added to second case + await cases[1].add_timeseries_data( + NewTimeseriesData(component="Comp-B", **common_kwargs) + ) + await cases[1].add_timeseries_data( + NewTimeseriesData(component="Comp-A", **common_kwargs) + ) + + retrieved_cases = await Case.find_in_hub( + timeseries_data_component=query_component + ) + retrieved_case_ids = sorted([_.id for _ in retrieved_cases]) + expected_case_ids = sorted([cases[i].id for i in expected_cases]) + assert retrieved_case_ids == expected_case_ids + @pytest.mark.asyncio async def test_data_counter_are_correctly_initilialized( self, new_case, initialized_beanie_context @@ -192,6 +334,7 @@ class MockNewTimeseriesData(NewTimeseriesData): A mock for NewTimeseriesData that does not interact with a signal store when executing to_timeseries_data. """ + async def to_timeseries_data(self): signal_id = test_signal_id meta_data = self.model_dump(exclude={"signal"}) @@ -264,10 +407,12 @@ async def test_add_symptom( case.symptoms_added = previous_adds await case.add_symptom( - NewSymptom(**{ - "component": "battery", - "label": SymptomLabel("defect") - }) + NewSymptom( + **{ + "component": "battery", + "label": SymptomLabel("defect") + } + ) ) # refetch case and assert existence of single symptom diff --git a/api/tests/routers/test_assets.py b/api/tests/routers/test_assets.py new file mode 100644 index 00000000..ec77ef7b --- /dev/null +++ b/api/tests/routers/test_assets.py @@ -0,0 +1,928 @@ +import os +from datetime import datetime, timedelta, UTC +from unittest.mock import AsyncMock +from zipfile import ZipFile + +import httpx +import pytest +from api.data_management import ( + Asset, AssetDefinition, Publication +) +from api.dataspace_management import nautilus +from api.routers import assets +from api.routers.assets import Nautilus +from api.security.keycloak import Keycloak +from bson import ObjectId +from fastapi import FastAPI +from fastapi.testclient import TestClient +from jose import jws + + +@pytest.fixture +def jwt_payload(): + return { + "iat": datetime.now(UTC).timestamp(), + "exp": (datetime.now(UTC) + timedelta(60)).timestamp(), + "preferred_username": "some-user-with-assets-access", + "realm_access": {"roles": ["assets"]} + } + + +@pytest.fixture +def signed_jwt(jwt_payload, rsa_private_key_pem: bytes): + """Create a JWT signed with private RSA key.""" + return jws.sign(jwt_payload, rsa_private_key_pem, algorithm="RS256") + + +@pytest.fixture +def app(motor_db): + app = FastAPI() + app.include_router(assets.management_router) + app.include_router(assets.public_router) + assets.api_key_auth.valid_key = "assets-key-dev" + yield app + + +@pytest.fixture +def unauthenticated_client(app): + """Unauthenticated client, e.g. no bearer token in header.""" + yield TestClient(app) + + +@pytest.fixture +def authenticated_client( + unauthenticated_client, rsa_public_key_pem, signed_jwt +): + """Turn unauthenticated client into authenticated client.""" + + # Client gets auth header with valid bearer token + client = unauthenticated_client + client.headers.update({"Authorization": f"Bearer {signed_jwt}"}) + + # Make app use public key from fixture for token validation + app = client.app + app.dependency_overrides[ + Keycloak.get_public_key_for_workshop_realm + ] = lambda: rsa_public_key_pem.decode() + + return client + + +@pytest.fixture +def base_url(): + return "http://testserver" + + +@pytest.fixture +def authenticated_async_client( + app, rsa_public_key_pem, signed_jwt, base_url +): + """ + Authenticated async client for tests that require mongodb access via + beanie. Note that for this module, this is the client authorized to + manage assets via /dataspace/manage/... + """ + + # Client with valid auth header + client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + base_url=base_url, + headers={"Authorization": f"Bearer {signed_jwt}"} + ) + + # Make app use public key from fixture for token validation + app.dependency_overrides[ + Keycloak.get_public_key_for_workshop_realm + ] = lambda: rsa_public_key_pem.decode() + + return client + + +@pytest.fixture +def n_assets_in_data_context(): + return 2 + + +@pytest.fixture +def asset_ids_in_data_context(n_assets_in_data_context): + """Valid asset_id, e.g. needs to work with PydanticObjectId""" + return [str(ObjectId()) for _ in range(n_assets_in_data_context)] + + +@pytest.fixture +def data_context( + motor_db, asset_ids_in_data_context +): + """ + Seed db with test data. + + Usage: `async with initialized_beanie_context, data_context: ...` + """ + + class DataContext: + async def __aenter__(self): + # Seed the db with a few assets + for i, a_id in enumerate(asset_ids_in_data_context): + await Asset( + id=a_id, + name=f"A{i}", + description=f"This is asset {i}.", + definition=AssetDefinition(), + author="Test author" + ).create() + + async def __aexit__(self, exc_type, exc, tb): + pass + + return DataContext() + + +@pytest.mark.asyncio +async def test_list_assets_in_empty_db( + authenticated_async_client, initialized_beanie_context +): + async with initialized_beanie_context: + response = await authenticated_async_client.get( + "/dataspace/manage/assets" + ) + assert response.status_code == 200 + assert response.json() == [] + + +@pytest.mark.asyncio +async def test_list_assets( + authenticated_async_client, + initialized_beanie_context, + data_context, + n_assets_in_data_context +): + async with initialized_beanie_context, data_context: + # Request without any additional params + response = await authenticated_async_client.get( + "/dataspace/manage/assets" + ) + # Validate response status code and data + assert response.status_code == 200 + assert len(response.json()) == n_assets_in_data_context + + +@pytest.mark.asyncio +async def test_add_asset( + authenticated_async_client, + initialized_beanie_context +): + name = "New Asset" + description = "A new asset added via the api." + async with initialized_beanie_context: + response = await authenticated_async_client.post( + "/dataspace/manage/assets", + json={ + "name": name, + "description": description, + "definition": {}, + "author": "Test author" + } + ) + assert response.status_code == 201 + # Confirm asset data in response + response_data = response.json() + assert response_data["name"] == name + assert response_data["description"] == description + assert response_data["data_status"] == "defined" + # Confirm storage in db + asset_db = await Asset.get(response_data["_id"]) + assert asset_db + assert asset_db.name == name + assert asset_db.description == description + # Confirm processing of the asset + assert asset_db.data_status == "ready" + assert os.path.exists(asset_db.data_file_path) + + +@pytest.mark.asyncio +async def test_get_asset( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + response = await authenticated_async_client.get( + f"/dataspace/manage/assets/{asset_id}" + ) + assert response.status_code == 200 + assert response.json()["_id"] == asset_id + + +@pytest.fixture +def patch_nautilus_to_fail_revocation( + authenticated_async_client, monkeypatch +): + """ + Patch Nautilus to enforce failure of any attempt to revoke a publication + in the dataspace. + """ + # Configure url to avoid failure of Nautilus constructor + Nautilus.configure( + url="http://nothing-here", + timeout=None, + api_key_assets=None + ) + + def _raise(*args, **kwargs): + raise Exception("Simulated failure during asset revocation") + + monkeypatch.setattr(Nautilus, "revoke_publication", _raise) + yield + # Clean up + Nautilus.configure(url=None, timeout=None, api_key_assets=None) + + +@pytest.mark.asyncio +async def test_delete_asset( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_fail_revocation # ... as there is no publication +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + response = await authenticated_async_client.request( + "DELETE", + f"/dataspace/manage/assets/{asset_id}", + json={"nautilus_private_key": "42"} + ) + assert response.status_code == 200 + assert response.json() is None + # Confirm deletion in db + asset_db = await Asset.get(asset_id) + assert asset_db is None + + +@pytest.fixture +def patch_nautilus_to_avoid_external_revocation_request( + authenticated_async_client, monkeypatch +): + """ + Patch Nautilus to avoid external request for asset revocation + """ + # Configure url to avoid failure of Nautilus constructor + Nautilus.configure( + url="http://nothing-here", + timeout=None, + api_key_assets=None + ) + + # Create mock for the httpx.AsyncClient.post method + mock_post = AsyncMock(spec=httpx.AsyncClient.post) + mock_post.return_value = httpx.Response( + status_code=200, + request=httpx.Request("POST", "http://nothing-here") + ) + + # Patch httpx.AsyncClient.post in the nautilus module to avoid external + # request + monkeypatch.setattr( + nautilus.httpx.AsyncClient, + "post", + mock_post + ) + + yield + + # Clean up + Nautilus.configure(url=None, timeout=None, api_key_assets=None) + + +@pytest.mark.asyncio +async def test_delete_asset_with_publication( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_avoid_external_revocation_request +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Get one of the assets in the data_context, process it's definition + # and add a publication + asset = await Asset.get(asset_id) + await asset.process_definition() + asset.publication = Publication( + did="some-did", + asset_key="some-key", + asset_url="http://some-url" + ) + await asset.save() + # Delete it + response = await authenticated_async_client.request( + "DELETE", + f"/dataspace/manage/assets/{asset_id}", + json={"nautilus_private_key": "42"} + ) + assert response.status_code == 200 + assert response.json() is None + # Confirm deletion in db + asset_db = await Asset.get(asset_id) + assert asset_db is None + + +@pytest.mark.asyncio +async def test_get_asset_dataset_not_ready( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Attempt to retrieve asset data before the asset definition was + # processed + response = await authenticated_async_client.get( + f"/dataspace/manage/assets/{asset_id}/data" + ) + assert response.status_code == 400 + + +@pytest.mark.asyncio +async def test_get_asset_dataset( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + tmp_path +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Process asset definition + asset = await Asset.get(asset_id) + await asset.process_definition() + # Attempt to retrieve asset data after successful processing + response = await authenticated_async_client.get( + f"/dataspace/manage/assets/{asset_id}/data" + ) + assert response.status_code == 200 + # Download the archive and validate structure + download_path = tmp_path / "download.zip" + with open(download_path, "wb") as file: + file.write(response.content) + with ZipFile(download_path, "r") as archive: + # Get a list of all members (files and directories) in the archive + archive_members = archive.namelist() + # Ensure the presence of the presence of the expected "cases/" and + # "signals/" directories + assert "cases/" in archive_members + assert "signals/" in archive_members + + +@pytest.fixture +def patch_nautilus_to_fail_publication( + authenticated_async_client, monkeypatch +): + """ + Patch Nautilus to enforce failure of any attempt to publish to the + dataspace. + """ + # Configure url to avoid failure of Nautilus constructor + Nautilus.configure( + url="http://nothing-here", + timeout=None, + api_key_assets=None + ) + + def _raise(): + raise Exception("Simulated failure during dataset publication") + + monkeypatch.setattr(Nautilus, "publish_access_dataset", _raise) + yield + # Clean up + Nautilus.configure(url=None, timeout=None, api_key_assets=None) + + +@pytest.mark.asyncio +async def test_publish_asset_not_ready( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_fail_publication +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Attempt to publish asset before the asset definition was + # processed + response = await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + assert response.status_code == 400 + + +@pytest.mark.asyncio +async def test_publish_asset_already_published( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_fail_publication +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Get one of the assets in the data_context, process it's definition + # and add a publication + asset = await Asset.get(asset_id) + await asset.process_definition() + asset.publication = Publication( + did="some-did", + asset_key="some-key", + asset_url="http://some-url" + ) + await asset.save() + # Attempt to publish the asset that already has a publication + response = await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + # Response should indicate success but without creation of a new resource + # via 200 status code. + assert response.status_code == 200 + # Client should receive information about the existing publication + assert response.json() == asset.publication.model_dump() + + +@pytest.fixture +def patch_nautilus_to_avoid_external_request( + authenticated_async_client, monkeypatch +): + """ + Patch Nautilus to just return a publication without first attempting any + external http requests. + """ + # Configure url to avoid failure of Nautilus constructor + Nautilus.configure( + url="http://nothing-here", + timeout=None, + api_key_assets=None + ) + + # Create mock of httpx.AsyncClient + class MockAsyncClient: + async def post(self, url, headers, timeout, json): + return httpx.Response( + status_code=201, + request=httpx.Request("post", url), + json={"assetdid": "newdid"} + ) + + # Patch httpx.AsyncClient.post in the nautilus module to avoid external + # request + monkeypatch.setattr( + nautilus.httpx, "AsyncClient", MockAsyncClient + ) + yield + # Clean up + Nautilus.configure(url=None, timeout=None, api_key_assets=None) + + +@pytest.mark.asyncio +async def test_publish_asset( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_avoid_external_request +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Process asset definition to allow publication + asset = await Asset.get(asset_id) + await asset.process_definition() + # Attempt to publish to dataspace + response = await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + # Status code should indicate creation of new resource + assert response.status_code == 201 + # The asset db object should contain a publication including asset_key + await asset.sync() + assert asset.publication.asset_key + # Response data should include all publication information except the + # asset key + assert response.json() == asset.publication.model_dump( + exclude={"asset_key"} + ) + + +@pytest.fixture +def patch_nautilus_to_timeout_communication( + authenticated_async_client, monkeypatch +): + """ + Patch Nautilus such that external publication request times out. + """ + # Configure url to avoid failure of Nautilus constructor + Nautilus.configure( + url="http://nothing-here", + timeout=None, + api_key_assets=None + ) + + # Patch httpx.AsyncClient.post in the nautilus module to timeout + class MockAsyncClient: + async def post(self, url, headers, timeout, json): + raise httpx.TimeoutException( + "Simulated timeout during dataset publication" + ) + + monkeypatch.setattr( + nautilus.httpx, "AsyncClient", MockAsyncClient + ) + yield + # Clean up + Nautilus.configure(url=None, timeout=None, api_key_assets=None) + + +@pytest.mark.asyncio +async def test_publish_asset_with_communication_timeout( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_timeout_communication +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Process asset definition to allow publication + asset = await Asset.get(asset_id) + await asset.process_definition() + # Attempt to publish to dataspace + response = await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + # Http exception should indicate failed communication + assert response.status_code == 500 + assert response.json()["detail"] == ("Failed communication with " + "nautilus: Connection timeout.") + + +@pytest.fixture(params=[400, 401, 500, 501]) +def patch_nautilus_to_fail_http_communication( + authenticated_async_client, monkeypatch, request +): + """ + Patch Nautilus such that external publication request fails with + non-success http status code. + """ + # Configure url to avoid failure of Nautilus constructor + Nautilus.configure( + url="http://nothing-here", + timeout=None, + api_key_assets=None + ) + + # Patch httpx.AsyncClient.post in the nautilus module to avoid external + # request and to respond with non-success http code + class MockAsyncClient: + async def post(self, url, headers, timeout, json): + return httpx.Response( + status_code=request.param, + text="Failed.", + request=httpx.Request("post", url) + ) + + monkeypatch.setattr( + nautilus.httpx, "AsyncClient", MockAsyncClient + ) + yield + # Clean up + Nautilus.configure(url=None, timeout=None, api_key_assets=None) + + +@pytest.mark.asyncio +async def test_publish_asset_with_failed_http_communication( + authenticated_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_fail_http_communication +): + asset_id = asset_ids_in_data_context[0] + async with initialized_beanie_context, data_context: + # Process asset definition to allow publication + asset = await Asset.get(asset_id) + await asset.process_definition() + # Attempt to publish to dataspace + response = await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + # Http exception should indicate failed communication + assert response.status_code == 500 + assert response.json()["detail"] == ("Failed communication with " + "nautilus: Failed.") + + +@pytest.mark.parametrize( + "method,endpoint", + [ + ("get", ""), + ("delete", ""), + ("get", "/data"), + ("post", "/publication") + ] +) +@pytest.mark.asyncio +async def test_asset_not_found( + method, + endpoint, + authenticated_async_client, + initialized_beanie_context +): + # Fresh ID and no data initialization + asset_id = str(ObjectId()) + async with initialized_beanie_context: + response = await authenticated_async_client.request( + method=method, url=f"/dataspace/manage/assets/{asset_id}{endpoint}" + ) + assert response.status_code == 404 + assert response.json() == { + "detail": f"No asset with id '{asset_id}' found." + } + + +@pytest.mark.parametrize( + "route", assets.management_router.routes, ids=lambda r: r.name +) +def test_missing_bearer_token(route, unauthenticated_client): + """Endpoints should not be accessible without a bearer token.""" + assert len(route.methods) == 1, "Test assumes one method per route." + method = next(iter(route.methods)) + response = unauthenticated_client.request(method=method, url=route.path) + assert response.status_code == 403 + assert response.json() == {"detail": "Not authenticated"} + + +@pytest.fixture +def jwt_payload_with_unauthorized_role(jwt_payload): + jwt_payload["realm_access"]["roles"] = ["workshop", "not assets"] + return jwt_payload + + +@pytest.fixture +def signed_jwt_with_unauthorized_role( + jwt_payload_with_unauthorized_role, rsa_private_key_pem: bytes +): + return jws.sign( + jwt_payload_with_unauthorized_role, + rsa_private_key_pem, algorithm="RS256" + ) + + +@pytest.mark.parametrize( + "route", assets.management_router.routes, ids=lambda r: r.name +) +def test_unauthorized_user( + route, authenticated_client, signed_jwt_with_unauthorized_role +): + """ + Endpoints should not be accessible, if the user role encoded in the + token does not indicate assets access. + """ + assert len(route.methods) == 1, "Test assumes one method per route." + method = next(iter(route.methods)) + authenticated_client.headers.update( + {"Authorization": f"Bearer {signed_jwt_with_unauthorized_role}"} + ) + response = authenticated_client.request(method=method, url=route.path) + assert response.status_code == 401 + assert response.json() == {"detail": "Could not validate token."} + + +@pytest.mark.parametrize( + "route", assets.management_router.routes, ids=lambda r: r.name +) +def test_invalid_jwt_signature( + route, authenticated_client, another_rsa_public_key_pem +): + """ + Endpoints should not be accessible, if the public key retrieved from + keycloak does not match the private key used to sign a JWT. + """ + assert len(route.methods) == 1, "Test assumes one method per route." + method = next(iter(route.methods)) + # The token signature of the authenticated client will not match the public + # key anymore + authenticated_client.app.dependency_overrides[ + Keycloak.get_public_key_for_workshop_realm + ] = lambda: another_rsa_public_key_pem.decode() + response = authenticated_client.request(method=method, url=route.path) + assert response.status_code == 401 + assert response.json() == {"detail": "Could not validate token."} + + +@pytest.fixture +def expired_jwt_payload(): + return { + "iat": (datetime.now(UTC) - timedelta(60)).timestamp(), + "exp": (datetime.now(UTC) - timedelta(1)).timestamp(), + "preferred_username": "user", + "realm_access": {"roles": ["assets"]} + } + + +@pytest.fixture +def expired_jwt(expired_jwt_payload, rsa_private_key_pem: bytes): + """Create an expired JWT signed with private RSA key.""" + return jws.sign( + expired_jwt_payload, rsa_private_key_pem, algorithm="RS256" + ) + + +@pytest.mark.parametrize( + "route", assets.management_router.routes, ids=lambda r: r.name +) +def test_expired_jwt(route, authenticated_client, expired_jwt): + """ + Endpoints should not be accessible, if the bearer token is expired. + """ + assert len(route.methods) == 1, "Test assumes one method per route." + method = next(iter(route.methods)) + # The token offered by the authenticated client is expired + response = authenticated_client.request( + method=method, + url=route.path, + headers={"Authorization": f"Bearer {expired_jwt}"} + ) + assert response.status_code == 401 + assert response.json() == {"detail": "Could not validate token."} + + +@pytest.fixture +def public_async_client(app, base_url): + """ + Client to access the public dataspace router. Does not have a Bearer + token issued by keycloak. + """ + client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + base_url=base_url + ) + return client + + +@pytest.mark.asyncio +async def test_get_published_dataset( + authenticated_async_client, + public_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_avoid_external_request, + tmp_path +): + asset_id = asset_ids_in_data_context[0] + async with (initialized_beanie_context, data_context): + # Get one of the assets in the data_context, process it's definition + # and have the client authenticated for management publish it + asset = await Asset.get(asset_id) + await asset.process_definition() + await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + # As part of the publishing process, an asset_url and asset_key were + # created. Fetch those from the db, as the public client will need + # them to access the asset data archive. + await asset.sync() + asset_url = asset.publication.asset_url + asset_key = asset.publication.asset_key + # Confirm expectation about the constructed URL + assert asset_url == (f"{str(public_async_client.base_url)}/dataspace/" + f"public/assets/{asset_id}/data") + + # Now do the actual testing: Can the public client access the asset + # data archive using the automatically created url and key? + response = await public_async_client.get( + asset_url, + headers={"data_key": asset_key, "x-api-key": "assets-key-dev"} + ) + assert response.status_code == 200 + # Download the archive and validate structure + download_path = tmp_path / "download.zip" + with open(download_path, "wb") as file: + file.write(response.content) + with ZipFile(download_path, "r") as archive: + # Get a list of all members (files and directories) in the archive + archive_members = archive.namelist() + # Ensure the presence of the presence of the expected "cases/" and + # "signals/" directories + assert "cases/" in archive_members + assert "signals/" in archive_members + + +@pytest.mark.asyncio +async def test_get_published_dataset_invalid_asset_key( + authenticated_async_client, + public_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context, + patch_nautilus_to_avoid_external_request +): + asset_id = asset_ids_in_data_context[0] + async with (initialized_beanie_context, data_context): + # Get one of the assets in the data_context, process it's definition + # and have the client authenticated for management publish it + asset = await Asset.get(asset_id) + await asset.process_definition() + await authenticated_async_client.post( + f"/dataspace/manage/assets/{asset_id}/publication", + json={"nautilus_private_key": "42"} + ) + # As part of the publishing process, an asset_url and asset_key were + # created. Only fetch the url here. + await asset.sync() + asset_url = asset.publication.asset_url + # Confirm expectation about the constructed URL + assert asset_url == (f"{str(public_async_client.base_url)}/dataspace/" + f"public/assets/{asset_id}/data") + + # Now do the actual testing: The asset url is valid, but the public + # client can not access the dataset with an invalid asset_key + response = await public_async_client.get( + asset_url, + headers={ + "data_key": "this-sure-is-not-the-right-key", + "x-api-key": "assets-key-dev"} + ) + assert response.status_code == 401 + assert response.json() == {"detail": "Could not validate asset key."} + + +@pytest.mark.asyncio +async def test_get_published_dataset_asset_not_published( + public_async_client, + initialized_beanie_context, + data_context, + asset_ids_in_data_context +): + # Id of an unpublished asset in the data context + asset_id = asset_ids_in_data_context[0] + async with (initialized_beanie_context, data_context): + # If the asset would be published, this would be the url for data + # retrieval by the public client. + asset_url = f"/dataspace/public/assets/{asset_id}/data" + # Public client tries to fetch the data (maybe the asset was published + # in the past) + response = await public_async_client.get( + asset_url, + headers={"data_key": "some-key", "x-api-key": "assets-key-dev"} + ) + assert response.status_code == 404 + assert response.json() == { + "detail": f"No published asset with ID '{asset_id}' found." + } + + +@pytest.mark.asyncio +async def test_get_published_dataset_asset_invalid_asset_id( + public_async_client, + initialized_beanie_context +): + async with (initialized_beanie_context): + # Public client attempts to fetch an asset that does not exist at all + # (Note: No data context here and new asset id) + asset_id = str(ObjectId()) + asset_url = f"/dataspace/public/assets/{asset_id}/data" + response = await public_async_client.get( + asset_url, + headers={"data_key": "some-key", "x-api-key": "assets-key-dev"} + ) + assert response.status_code == 404 + assert response.json() == { + "detail": f"No asset with id '{asset_id}' found." + } + + +def test_get_published_dataset_no_asset_key(unauthenticated_client): + any_asset_id = str(ObjectId()) + response = unauthenticated_client.get( + f"/dataspace/public/assets/{any_asset_id}/data", + headers={"x-api-key": "assets-key-dev"} + ) + assert response.status_code == 403 + assert response.json() == {"detail": "Not authenticated"} + + +def test_get_published_dataset_no_asset_api_key(unauthenticated_client): + any_asset_id = str(ObjectId()) + response = unauthenticated_client.get( + f"/dataspace/public/assets/{any_asset_id}/data", + headers={"x-api-key": "assets-key-dev"} + ) + assert response.status_code == 403 + assert response.json() == {"detail": "Not authenticated"} diff --git a/api/tests/routers/test_customers.py b/api/tests/routers/test_customers.py index b4f9930a..63637503 100644 --- a/api/tests/routers/test_customers.py +++ b/api/tests/routers/test_customers.py @@ -32,7 +32,7 @@ def signed_jwt(jwt_payload, rsa_private_key_pem: bytes): @pytest.fixture def app(motor_db): app = FastAPI() - app.include_router(customers.router) + app.include_router(customers.router, prefix="/customers") yield app @@ -151,7 +151,7 @@ async def test_list_customers_in_empty_db( authenticated_async_client, initialized_beanie_context ): async with initialized_beanie_context: - response = await authenticated_async_client.get("/") + response = await authenticated_async_client.get("/customers") assert response.status_code == 200 assert response.json() == [] assert response.headers["link"] == "", \ @@ -167,7 +167,7 @@ async def test_list_customers( ): async with initialized_beanie_context, data_context: # Request without any additional params - response = await authenticated_async_client.get("/") + response = await authenticated_async_client.get("/customers") # Validate response status code and data assert response.status_code == 200 assert len(response.json()) == n_customers_in_data_context @@ -210,7 +210,7 @@ async def test_list_customers_pagination( await Customer(first_name="B", last_name="A").create() await Customer(first_name="A", last_name="A").create() response = await authenticated_async_client.get( - f"/?page_size={page_size}&page={page}" + f"/customers?page_size={page_size}&page={page}" ) assert response.status_code == 200 response_data = response.json() @@ -230,7 +230,7 @@ async def test_list_customers_pagination_valid_page_size_limits( ): async with initialized_beanie_context, data_context: response = await authenticated_async_client.get( - f"/?page_size={page_size}&page=0" + f"/customers?page_size={page_size}&page=0" ) assert response.status_code == 200 assert len(response.json()) == min(page_size, n_customers_in_data_context) @@ -245,7 +245,7 @@ async def test_list_customers_pagination_invalid_page_size_limits( ): async with initialized_beanie_context: response = await authenticated_async_client.get( - f"/?page_size={page_size}&page=0" + f"/customers?page_size={page_size}&page=0" ) assert response.status_code == 422, \ "Expected response to indicate unprocessable content." @@ -263,7 +263,7 @@ async def test_list_customers_out_of_range_page( out_of_range_page = max_page_index + 1 async with initialized_beanie_context, data_context: response = await authenticated_async_client.get( - f"/?page_size={page_size}&page={out_of_range_page}" + f"/customers?page_size={page_size}&page={out_of_range_page}" ) assert response.status_code == 400 assert response.json()["detail"] == \ @@ -286,7 +286,7 @@ async def test_list_customers_pagination_links( c_1 = await Customer(first_name="A", last_name="A").create() # Test retrieval of all docs using the link header for navigation retrieved_docs = [] - next_page = f"/?page_size={page_size}&page=0" + next_page = f"/customers?page_size={page_size}&page=0" while next_page: response = await authenticated_async_client.get(next_page) assert response.status_code == 200 @@ -310,7 +310,7 @@ async def test_add_customer( last_name = "some-last-name" async with initialized_beanie_context: response = await authenticated_async_client.post( - "/", + "/customers", json={"first_name": first_name, "last_name": last_name} ) assert response.status_code == 201 @@ -334,7 +334,9 @@ async def test_get_customer( ): customer_id = customer_ids_in_data_context[0] async with initialized_beanie_context, data_context: - response = await authenticated_async_client.get(f"/{customer_id}") + response = await authenticated_async_client.get( + f"/customers/{customer_id}" + ) assert response.status_code == 200 assert response.json()["_id"] == customer_id @@ -350,7 +352,7 @@ async def test_update_customer( update = {"first_name": "NewFirstName", "city": "NewCity"} async with initialized_beanie_context, data_context: response = await authenticated_async_client.patch( - f"/{customer_id}", json=update + f"/customers/{customer_id}", json=update ) assert response.status_code == 200 # Confirm customer data in response @@ -373,7 +375,9 @@ async def test_delete_customer( ): customer_id = customer_ids_in_data_context[0] async with initialized_beanie_context, data_context: - response = await authenticated_async_client.delete(f"/{customer_id}") + response = await authenticated_async_client.delete( + f"/customers/{customer_id}" + ) assert response.status_code == 200 assert response.json() is None # Confirm deletion in db @@ -392,7 +396,7 @@ async def test_customer_not_found( customer_id = str(ObjectId()) async with initialized_beanie_context: response = await authenticated_async_client.request( - method=method, url=f"/{customer_id}" + method=method, url=f"/customers/{customer_id}" ) assert response.status_code == 404 assert response.json()["detail"] == \ @@ -406,7 +410,9 @@ def test_missing_bearer_token(route, unauthenticated_client): """Endpoints should not be accessible without a bearer token.""" assert len(route.methods) == 1, "Test assumes one method per route." method = next(iter(route.methods)) - response = unauthenticated_client.request(method=method, url=route.path) + response = unauthenticated_client.request( + method=method, url=f"/customers{route.path}" + ) assert response.status_code == 403 assert response.json() == {"detail": "Not authenticated"} @@ -442,7 +448,9 @@ def test_unauthorized_user( authenticated_client.headers.update( {"Authorization": f"Bearer {signed_jwt_with_unauthorized_role}"} ) - response = authenticated_client.request(method=method, url=route.path) + response = authenticated_client.request( + method=method, url=f"/customers{route.path}" + ) assert response.status_code == 401 assert response.json() == {"detail": "Could not validate token."} @@ -464,7 +472,9 @@ def test_invalid_jwt_signature( authenticated_client.app.dependency_overrides[ Keycloak.get_public_key_for_workshop_realm ] = lambda: another_rsa_public_key_pem.decode() - response = authenticated_client.request(method=method, url=route.path) + response = authenticated_client.request( + method=method, url=f"/customers{route.path}" + ) assert response.status_code == 401 assert response.json() == {"detail": "Could not validate token."} @@ -499,7 +509,7 @@ def test_expired_jwt(route, authenticated_client, expired_jwt): # The token offered by the authenticated client is expired response = authenticated_client.request( method=method, - url=route.path, + url=f"/customers{route.path}", headers={"Authorization": f"Bearer {expired_jwt}"} ) assert response.status_code == 401 diff --git a/api/tests/routers/test_shared.py b/api/tests/routers/test_shared.py index 27554aaf..932150f7 100644 --- a/api/tests/routers/test_shared.py +++ b/api/tests/routers/test_shared.py @@ -124,9 +124,14 @@ def case_data(case_id, customer_id, vin, workshop_id): @pytest.fixture -def timeseries_data(): +def timeseries_data_component(): + return "battery" + + +@pytest.fixture +def timeseries_data(timeseries_data_component): return { - "component": "battery", + "component": timeseries_data_component, "label": "norm", "sampling_rate": 1, "duration": 3, @@ -136,9 +141,14 @@ def timeseries_data(): @pytest.fixture -def obd_data(): +def obd_data_dtc(): + return "X4242" + + +@pytest.fixture +def obd_data(obd_data_dtc): return { - "dtcs": ["P0001", "U0001"] + "dtcs": ["P0001", "U0001", obd_data_dtc] } @@ -235,7 +245,13 @@ async def test_list_cases( assert response_data[0]["_id"] == case_id -@pytest.mark.parametrize("query_param", ["customer_id", "vin", "workshop_id"]) +@pytest.mark.parametrize( + "query_param", + [ + "customer_id", "vin", "workshop_id", "obd_data_dtc", + "timeseries_data_component" + ] +) @pytest.mark.asyncio async def test_list_cases_with_single_filter( authenticated_async_client, initialized_beanie_context, data_context, @@ -256,12 +272,15 @@ async def test_list_cases_with_single_filter( @pytest.mark.asyncio async def test_list_cases_with_multiple_filters( authenticated_async_client, initialized_beanie_context, data_context, - case_id, customer_id, vin, workshop_id + case_id, customer_id, vin, workshop_id, obd_data_dtc, + timeseries_data_component ): """Test filtering by multiple query params.""" query_string = f"?customer_id={customer_id}&" \ f"vin={vin}&" \ - f"workshop_id={workshop_id}" + f"workshop_id={workshop_id}&" \ + f"obd_data_dtc={obd_data_dtc}&" \ + f"timeseries_data_component={timeseries_data_component}" url = f"/cases{query_string}" async with initialized_beanie_context, data_context: response = await authenticated_async_client.get(url) @@ -271,7 +290,11 @@ async def test_list_cases_with_multiple_filters( assert response_data[0]["_id"] == case_id -@pytest.mark.parametrize("query_param", ["customer_id", "vin", "workshop_id"]) +@pytest.mark.parametrize( + "query_param", + ["customer_id", "vin", "workshop_id", "obd_data_dtc", + "timeseries_data_component"] +) @pytest.mark.asyncio async def test_list_cases_with_unmatched_filters( authenticated_async_client, initialized_beanie_context, data_context, diff --git a/api/tests/routers/test_workshop.py b/api/tests/routers/test_workshop.py index 25470d6d..3e51de73 100644 --- a/api/tests/routers/test_workshop.py +++ b/api/tests/routers/test_workshop.py @@ -154,8 +154,13 @@ def authenticated_client( @mock.patch("api.routers.workshop.Case.find_in_hub", autospec=True) def test_list_cases(find_in_hub, authenticated_client, workshop_id): - - async def mock_find_in_hub(customer_id, workshop_id, vin): + async def mock_find_in_hub( + customer_id, + workshop_id, + vin, + obd_data_dtc, + timeseries_data_component + ): return [] # patch Case.find_in_hub to use mock_find_in_hub @@ -168,7 +173,11 @@ async def mock_find_in_hub(customer_id, workshop_id, vin): assert response.status_code == 200 assert response.json() == [] find_in_hub.assert_called_once_with( - customer_id=None, vin=None, workshop_id=workshop_id + customer_id=None, + vin=None, + workshop_id=workshop_id, + obd_data_dtc=None, + timeseries_data_component=None ) @@ -176,8 +185,13 @@ async def mock_find_in_hub(customer_id, workshop_id, vin): def test_list_cases_with_filters( find_in_hub, authenticated_client, workshop_id ): - - async def mock_find_in_hub(customer_id, workshop_id, vin): + async def mock_find_in_hub( + customer_id, + workshop_id, + vin, + obd_data_dtc, + timeseries_data_component + ): return [] # patch Case.find_in_hub to use mock_find_in_hub @@ -186,16 +200,27 @@ async def mock_find_in_hub(customer_id, workshop_id, vin): # request with filter params customer_id = "test customer" vin = "test vin" + obd_data_dtc = "P0001" + timeseries_data_component = "Test-Comp" response = authenticated_client.get( f"/{workshop_id}/cases", - params={"customer_id": customer_id, "vin": vin} + params={ + "customer_id": customer_id, + "vin": vin, + "obd_data_dtc": obd_data_dtc, + "timeseries_data_component": timeseries_data_component + } ) # confirm expected response and usage of db interface assert response.status_code == 200 assert response.json() == [] find_in_hub.assert_called_once_with( - customer_id=customer_id, vin=vin, workshop_id=workshop_id + customer_id=customer_id, + vin=vin, + workshop_id=workshop_id, + obd_data_dtc=obd_data_dtc, + timeseries_data_component=timeseries_data_component ) @@ -233,7 +258,6 @@ def test_add_case_with_invalid_customer_id( @mock.patch("api.routers.workshop.Case.get", autospec=True) @pytest.mark.asyncio async def test_case_from_workshop(get, case_id, workshop_id): - async def mock_get(*args): """ Always returns a case with case_id and workshop_id predefined in test @@ -270,7 +294,6 @@ async def mock_get(*args): @mock.patch("api.routers.workshop.Case.get", autospec=True) @pytest.mark.asyncio async def test_case_from_workshop_wrong_workshop(get, case_id, workshop_id): - async def mock_get(*args): """ Always returns a case with case_id as predifined in test scope above diff --git a/demo-ui/demo_ui/main.py b/demo-ui/demo_ui/main.py index 2ef7d99b..8c6b412f 100644 --- a/demo-ui/demo_ui/main.py +++ b/demo-ui/demo_ui/main.py @@ -22,7 +22,7 @@ def filter(self, record: logging.LogRecord) -> bool: return record.getMessage().find(self.prefix) == -1 -app = FastAPI() +app = FastAPI(root_path=settings.root_path) app.add_middleware( SessionMiddleware, secret_key=settings.session_secret, max_age=None ) @@ -232,7 +232,9 @@ def login_post( ) request.session["access_token"] = access_token request.session["refresh_token"] = refresh_token - redirect_url = app.url_path_for("cases", workshop_id=workshop_id) + redirect_url = app.root_path + app.url_path_for( + "cases", workshop_id=workshop_id + ) return redirect_url @@ -287,7 +289,7 @@ async def new_case_post( # remove empty fields form = {k: v for k, v in form.items() if v} case = await post_to_api(ressource_url, access_token, json=dict(form)) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=case["workshop_id"], case_id=case["_id"] ) return redirect_url @@ -330,7 +332,7 @@ def case_delete_get( ressource_url: str = Depends(get_case_url) ): delete_via_api(ressource_url, access_token) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "cases", workshop_id=request.path_params["workshop_id"] ) return redirect_url @@ -373,7 +375,7 @@ async def new_obd_data_post( ) new_data_id = case["obd_data"][-1]["data_id"] - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "obd_data", workshop_id=case["workshop_id"], case_id=case["_id"], @@ -413,7 +415,7 @@ def obd_data_delete_get( ressource_url: str = Depends(get_obd_data_url) ): delete_via_api(ressource_url, access_token) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=request.path_params["workshop_id"], case_id=request.path_params["case_id"] @@ -465,7 +467,7 @@ async def new_timeseries_data_upload_omniview( ) new_data_id = case["timeseries_data"][-1]["data_id"] - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "timeseries_data", workshop_id=case["workshop_id"], case_id=case["_id"], @@ -497,7 +499,7 @@ async def new_timeseries_data_upload_picoscope( ) new_data_id = case["timeseries_data"][-1]["data_id"] - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "timeseries_data", workshop_id=case["workshop_id"], case_id=case["_id"], @@ -543,7 +545,7 @@ def timeseries_data_delete_get( ressource_url: str = Depends(get_timeseries_data_url) ): delete_via_api(ressource_url, access_token) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=request.path_params["workshop_id"], case_id=request.path_params["case_id"] @@ -582,7 +584,7 @@ async def new_symptom_post( ): form = await request.form() case = await post_to_api(ressource_url, access_token, json=dict(form)) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=case["workshop_id"], case_id=case["_id"] ) return redirect_url @@ -599,7 +601,7 @@ def symptom_delete_get( ressource_url: str = Depends(get_symptoms_url) ): delete_via_api(ressource_url, access_token) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=request.path_params["workshop_id"], case_id=request.path_params["case_id"] @@ -618,7 +620,7 @@ async def start_diagnosis( ressource_url: str = Depends(get_diagnosis_url) ): await post_to_api(ressource_url, access_token=access_token) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=request.path_params["workshop_id"], case_id=request.path_params["case_id"] @@ -678,7 +680,7 @@ def diagnosis_delete_get( ressource_url: str = Depends(get_diagnosis_url) ): delete_via_api(ressource_url, access_token) - redirect_url = app.url_path_for( + redirect_url = app.root_path + app.url_path_for( "case", workshop_id=request.path_params["workshop_id"], case_id=request.path_params["case_id"] @@ -686,9 +688,10 @@ def diagnosis_delete_get( return redirect_url -@app.get("/ui/logout", response_class=RedirectResponse) +@app.get("/ui/logout", response_class=RedirectResponse, status_code=303) def logout(request: Request): request.session.pop("access_token", None) request.session.pop("refresh_token", None) flash_message(request, "Sie wurden erfolgreich ausgeloggt.") - return RedirectResponse("/ui", status_code=303) + redirect_url = app.root_path + app.url_path_for("login_get") + return redirect_url diff --git a/demo-ui/demo_ui/settings.py b/demo-ui/demo_ui/settings.py index 965f4ef0..462676f8 100644 --- a/demo-ui/demo_ui/settings.py +++ b/demo-ui/demo_ui/settings.py @@ -16,5 +16,7 @@ class Settings(BaseSettings): timezone: str = "Europe/Berlin" + root_path: str = "" + settings = Settings() diff --git a/demo-ui/demo_ui/templates/case.html b/demo-ui/demo_ui/templates/case.html index 7dfab55e..737a00c7 100644 --- a/demo-ui/demo_ui/templates/case.html +++ b/demo-ui/demo_ui/templates/case.html @@ -1,13 +1,13 @@ {% extends "base.html" %} {% block sidebar %} -Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) -Fälle +Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) +Fälle Ausgewählt: {{ case["_id"] }} {% endblock %} {% block content %} -

Fall {{ case["_id"] }} Zurück zur Übersicht

+

Fall {{ case["_id"] }} Zurück zur Übersicht

Details

diff --git a/demo-ui/demo_ui/templates/cases.html b/demo-ui/demo_ui/templates/cases.html index 27080676..776bd8b5 100644 --- a/demo-ui/demo_ui/templates/cases.html +++ b/demo-ui/demo_ui/templates/cases.html @@ -1,12 +1,12 @@ {% extends "base.html" %} {% block sidebar %} - Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) + Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) Fälle {% endblock %} {% block content %} -

Alle Fälle Neuen Fall anlegen Abmelden

+

Alle Fälle Neuen Fall anlegen Abmelden

diff --git a/demo-ui/demo_ui/templates/diagnosis_report.html b/demo-ui/demo_ui/templates/diagnosis_report.html index d1dc62b6..deb1083a 100644 --- a/demo-ui/demo_ui/templates/diagnosis_report.html +++ b/demo-ui/demo_ui/templates/diagnosis_report.html @@ -1,16 +1,16 @@ {% extends "base.html" %} {% block sidebar %} -Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) -Fälle -Ausgewählt: {{ case_id }} +Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) +Fälle +Ausgewählt: {{ case_id }}Diagnose {% endblock %} {% block content %}

Diagnose Report - Zurück zum Fall + Zurück zum Fall


Fall: {{ case_id }}

diff --git a/demo-ui/demo_ui/templates/http_exception.html b/demo-ui/demo_ui/templates/http_exception.html index 4a9864f7..349e7aab 100644 --- a/demo-ui/demo_ui/templates/http_exception.html +++ b/demo-ui/demo_ui/templates/http_exception.html @@ -1,8 +1,8 @@ {% extends "base.html" %} {% block sidebar %} -Anmeldung Werkstatt -Fälle +Anmeldung Werkstatt +FälleFehler {% endblock %} diff --git a/demo-ui/demo_ui/templates/new_case.html b/demo-ui/demo_ui/templates/new_case.html index 151037d7..1f6ad1ca 100644 --- a/demo-ui/demo_ui/templates/new_case.html +++ b/demo-ui/demo_ui/templates/new_case.html @@ -1,14 +1,14 @@ {% extends "base.html" %} {% block sidebar %} -Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) -Fälle +Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) +FälleNeu anlegen {% endblock %} {% block content %} -

Neuen Fall anlegen Abbrechen

+

Neuen Fall anlegen Abbrechen


@@ -16,7 +16,7 @@

Neuen Fall anlegen Kunde
- +


diff --git a/demo-ui/demo_ui/templates/new_obd_data.html b/demo-ui/demo_ui/templates/new_obd_data.html index 69dbdf1c..7aac5fed 100644 --- a/demo-ui/demo_ui/templates/new_obd_data.html +++ b/demo-ui/demo_ui/templates/new_obd_data.html @@ -1,13 +1,13 @@ {% extends "base.html" %} {% block sidebar %} -
Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) -Fälle -Ausgewählt: {{ request.path_params["case_id"] }} +Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) +Fälle +Ausgewählt: {{ request.path_params["case_id"] }} OBD: Neuer Datensatz {% endblock %} {% block content %} -

Neuen OBD Datensatz anlegen Abbrechen

+

Neuen OBD Datensatz anlegen Abbrechen

Option 1: Fehlercodes manuell bereitstellen

diff --git a/demo-ui/demo_ui/templates/new_symptom.html b/demo-ui/demo_ui/templates/new_symptom.html index 002a21e1..fa5715a8 100644 --- a/demo-ui/demo_ui/templates/new_symptom.html +++ b/demo-ui/demo_ui/templates/new_symptom.html @@ -1,13 +1,13 @@ {% extends "base.html" %} {% block sidebar %} -Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) -Fälle -Ausgewählt: {{ request.path_params["case_id"] }} +Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) +Fälle +Ausgewählt: {{ request.path_params["case_id"] }} Symptome: Neuer Datensatz {% endblock %} {% block content %} -

Neues Symptom hinzufügen Abbrechen

+

Neues Symptom hinzufügen Abbrechen


diff --git a/demo-ui/demo_ui/templates/new_timeseries_data.html b/demo-ui/demo_ui/templates/new_timeseries_data.html index ca1a893e..d7c10e1d 100644 --- a/demo-ui/demo_ui/templates/new_timeseries_data.html +++ b/demo-ui/demo_ui/templates/new_timeseries_data.html @@ -1,13 +1,13 @@ {% extends "base.html" %} {% block sidebar %} -Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) -Fälle -Ausgewählt: {{ request.path_params["case_id"] }} +Werkstatt {{ request.path_params["workshop_id"] }} (Abmelden) +Fälle +Ausgewählt: {{ request.path_params["case_id"] }} OBD: Neuer Datensatz {% endblock %} {% block content %} -

Neues Oszillogramm hochladen Abbrechen

+

Neues Oszillogramm hochladen Abbrechen

ID
diff --git a/dev.env b/dev.env index 46ca056c..2450e970 100644 --- a/dev.env +++ b/dev.env @@ -3,11 +3,24 @@ DEFAULT_RESTART_POLICY=no ROOT_EMAIL=test@test.com BASE_ADDRESS=werkstatthub.docker.localhost + FRONTEND_ADDRESS=${BASE_ADDRESS} +#FRONTEND_PATH=/frontend + DOCS_ADDRESS=docs.${BASE_ADDRESS} +#DOCS_PATH=/docs + KEYCLOAK_ADDRESS=keycloak.${BASE_ADDRESS} +#KEYCLOAK_PATH=/keycloak + API_ADDRESS=api.${BASE_ADDRESS} +#API_PATH=/fastapi + TRAEFIK_ADDRESS= traefik.${BASE_ADDRESS} +#TRAEFIK_PATH=`/api`, `/dashboard` + +# dashboard: 8m9d8e3816w2efpSReCKuf82w0D9sXpL +TRAEFIK_BASIC_AUTH_USERS='dashboard:$apr1$UOHqlCsx$xM.l6x2abKQnS49CcxBsu.' PROXY_DEFAULT_ENTRYPOINTS=web #websecure PROXY_DEFAULT_SCHEME=http @@ -53,6 +66,7 @@ KEYCLOAK_FRONTED_REDIRECT_URIS=" API_ALLOW_ORIGINS=http://localhost:4200,http://localhost:4300,${PROXY_DEFAULT_SCHEME}://${FRONTEND_ADDRESS} API_KEY_DIAGNOSTICS=diagnostics-key-dev +API_KEY_ASSETS=assets-key-dev API_LOG_LEVEL=info REDIS_PASSWORD="redispw" diff --git a/docker-compose.yml b/docker-compose.yml index 8ca74969..da429d78 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,19 +12,36 @@ services: - full - rollout1 command: + # Basic Configuration + - "--log.level=ERROR" + - "--api=true" + - "--providers.docker=true" + - "--providers.docker.network=hubnet" + - "--providers.docker.exposedByDefault=false" + + # EntryPoints + - "--entrypoints.web.address=:80" + #- "--entrypoints.web.http.redirections.entryPoint.to=websecure" + + - "--entrypoints.websecure.address=:443" + - "--entrypoints.websecure.http.tls=true" + # Let's Encrypt - - '--certificatesresolvers.letsencrypt.acme.email=${ROOT_EMAIL:?error}' - - '--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web' - - '--certificatesresolvers.letsencrypt.acme.httpchallenge=true' - - '--certificatesresolvers.letsencrypt.acme.storage=/certs/acme.json' + - "--certificatesResolvers.letsencrypt.acme.email=${ROOT_EMAIL:?error}" + - "--certificatesResolvers.letsencrypt.acme.httpChallenge.entryPoint=web" + - "--certificatesResolvers.letsencrypt.acme.httpChallenge=true" + - "--certificatesResolvers.letsencrypt.acme.storage=/certs/acme.json" + # Global Certificate Resolver - - '--entrypoints.websecure.http.tls.certresolver=${PROXY_DEFAULT_CERTIFICATE_RESOLVER:-}' + - "--entrypoints.websecure.http.tls.certresolver=${PROXY_DEFAULT_CERTIFICATE_RESOLVER:-}" labels: - traefik.enable=true - traefik.docker.network=hubnet - - traefik.http.routers.traefik.rule=Host(`${TRAEFIK_ADDRESS:?error}`) + - traefik.http.routers.traefik.rule=Host(`${TRAEFIK_ADDRESS:?error}`) && PathPrefix(`/api`, `/dashboard`) - traefik.http.routers.traefik.entrypoints=${PROXY_DEFAULT_ENTRYPOINTS:?error} + - traefik.http.middlewares.traefik-auth.basicauth.users=${TRAEFIK_BASIC_AUTH_USERS:?error} + - traefik.http.routers.traefik.middlewares=traefik-auth@docker - traefik.http.routers.traefik.service=api@internal networks: hubnet: @@ -36,9 +53,9 @@ services: ports: - 80:80 - 443:443 + - 9000:9000 volumes: - /var/run/docker.sock:/var/run/docker.sock:ro - - ./proxy/proxy-config.yml:/etc/traefik/traefik.yml:ro - proxy-certs:/certs mongo: @@ -77,8 +94,10 @@ services: - traefik.enable=true - traefik.docker.network=hubnet - - traefik.http.routers.keycloak.rule=Host(`${KEYCLOAK_ADDRESS:?error}`) + - traefik.http.routers.keycloak.rule=Host(`${KEYCLOAK_ADDRESS:?error}`) && PathPrefix(`${KEYCLOAK_PATH:-/}`) - traefik.http.routers.keycloak.entrypoints=${PROXY_DEFAULT_ENTRYPOINTS:?error} + - traefik.http.middlewares.keycloak-remove-prefix.stripprefix.prefixes=${KEYCLOAK_PATH:-/} + - traefik.http.routers.keycloak.middlewares=keycloak-remove-prefix@docker - traefik.http.routers.keycloak.service=keycloak - traefik.http.services.keycloak.loadbalancer.server.port=8080 networks: @@ -128,7 +147,7 @@ services: context: ./api <<: *default_restart_policy # DEVELOPMENT: run with reload and mount api package code - command: uvicorn --reload api.main:app + command: uvicorn --reload api.main:app --root-path ${API_PATH:-""} profiles: - full - rollout1 @@ -148,8 +167,10 @@ services: - traefik.enable=true - traefik.docker.network=hubnet - - traefik.http.routers.api.rule=Host(`${API_ADDRESS:?error}`) + - traefik.http.routers.api.rule=Host(`${API_ADDRESS:?error}`) && PathPrefix(`${API_PATH:-/}`) - traefik.http.routers.api.entrypoints=${PROXY_DEFAULT_ENTRYPOINTS:?error} + - traefik.http.middlewares.api-remove-prefix.stripprefix.prefixes=${API_PATH:-/} + - traefik.http.routers.api.middlewares=api-remove-prefix@docker - traefik.http.routers.api.service=api - traefik.http.services.api.loadbalancer.server.port=8000 networks: @@ -160,14 +181,16 @@ services: - 127.0.0.1:8000:8000 # For pytest volumes: - ./api/api/:/home/api/api/ + - asset-data:/home/api/asset-data frontend: build: context: ./frontend args: - - API_ADDRESS=${API_ADDRESS:?error} - - FRONTEND_ADDRESS=${FRONTEND_ADDRESS:?error} - - KEYCLOAK_ADDRESS=${KEYCLOAK_ADDRESS:?error} + - API_ADDRESS=${API_ADDRESS:?error}${API_PATH:-} + - FRONTEND_ADDRESS=${FRONTEND_ADDRESS:?error}${FRONTEND_PATH:-} + - FRONTEND_PATH=${FRONTEND_PATH:-} + - KEYCLOAK_ADDRESS=${KEYCLOAK_ADDRESS:?error}${KEYCLOAK_PATH:-} - KEYCLOAK_FRONTEND_CLIENT=${KEYCLOAK_FRONTEND_CLIENT:?error} - KEYCLOAK_REALM=${KEYCLOAK_REALM:?error} - FRONTEND_LOG_LEVEL=${FRONTEND_LOG_LEVEL:?error} @@ -184,8 +207,10 @@ services: - traefik.enable=true - traefik.docker.network=hubnet - - traefik.http.routers.frontend.rule=Host(`${FRONTEND_ADDRESS:?error}`) + - traefik.http.routers.frontend.rule=Host(`${FRONTEND_ADDRESS:?error}`) && PathPrefix(`${FRONTEND_PATH:-/}`) - traefik.http.routers.frontend.entrypoints=${PROXY_DEFAULT_ENTRYPOINTS:?error} + - traefik.http.middlewares.frontend-remove-prefix.stripprefix.prefixes=${FRONTEND_PATH:-/} + - traefik.http.routers.frontend.middlewares=frontend-remove-prefix@docker - traefik.http.routers.frontend.service=frontend - traefik.http.services.frontend.loadbalancer.server.port=80 networks: @@ -204,8 +229,10 @@ services: - traefik.enable=true - traefik.docker.network=hubnet - - traefik.http.routers.docs.rule=Host(`${DOCS_ADDRESS:?error}`) + - traefik.http.routers.docs.rule=Host(`${DOCS_ADDRESS:?error}`) && PathPrefix(`${DOCS_PATH:-/}`) - traefik.http.routers.docs.entrypoints=${PROXY_DEFAULT_ENTRYPOINTS:?error} + - traefik.http.middlewares.docs-remove-prefix.stripprefix.prefixes=${DOCS_PATH:-/} + - traefik.http.routers.docs.middlewares=docs-remove-prefix@docker - traefik.http.routers.docs.service=docs - traefik.http.services.docs.loadbalancer.server.port=80 networks: @@ -253,7 +280,7 @@ services: - full - rollout1 ports: - - 3030:3030 + - 127.0.0.1:3030:3030 hostname: "knowledge-graph" networks: - hubintranet @@ -285,3 +312,4 @@ volumes: mongo-data: keycloak-db-data: proxy-certs: + asset-data: diff --git a/frontend/Dockerfile b/frontend/Dockerfile index b89061d0..4383b2bc 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -27,6 +27,7 @@ RUN flutter pub get ARG API_ADDRESS ARG FRONTEND_ADDRESS +ARG FRONTEND_PATH ARG FRONTEND_BASIC_AUTH_KEY ARG KEYCLOAK_ADDRESS ARG KEYCLOAK_FRONTEND_CLIENT @@ -36,7 +37,7 @@ ARG FRONTEND_REDIRECT_URI_MOBILE ARG PROXY_DEFAULT_SCHEME RUN flutter pub run build_runner build --delete-conflicting-outputs --release -RUN flutter build web +RUN flutter build web --base-href $FRONTEND_PATH/ FROM nginx:alpine AS serve diff --git a/frontend/app/assets/localization/de-DE.yaml b/frontend/app/assets/localization/de-DE.yaml index 59527646..c0c6f0e4 100644 --- a/frontend/app/assets/localization/de-DE.yaml +++ b/frontend/app/assets/localization/de-DE.yaml @@ -1,14 +1,19 @@ general: + apply: "Anwenden" cancel: "Abbrechen" case: "Fall" city: "Stadt" close: "Schließen" + component: "Komponente" + confirm: "Bestätigen" + create: "Erstellen" customer: "Kunde" customerId: "Kunden ID" dataType: "Datentyp" date: "Datum" datasets: "Datensätze" delete: "Löschen" + details: "Details" edit: "Bearbeiten" email: "E-Mail" filter: "Filtern" @@ -32,6 +37,7 @@ general: noWorkshopId: "Keine @:general.workshop ID" notFound: "nicht gefunden" notSpecified: "keine Angabe" + obd: "OBD" obligatoryField: "Pflichtfeld" occasion: "Anlass" phone: "Telefon" @@ -46,6 +52,7 @@ general: state: "Zustand" status: "Status" street: "Straße" + symptom: "Symptom" then: "Dann" transition: "Übergang" tsn: "TSN" @@ -56,6 +63,7 @@ general: workshop: "Werkstatt" workshopId: "Werkstatt ID" yes: "Ja" + unknownDateTime: "Unbekannte Zeit" diagnoses: title: "Diagnosen" actions: @@ -70,7 +78,7 @@ diagnoses: details: headline: "Detailansicht" deleteDiagnosisSuccessMessage: "Die Diagnose wurde erfolgreich gelöscht." - deleteDiagnosisErrorMessage: "Der Diagnose konnte nicht gelöscht werden." + deleteDiagnosisErrorMessage: "Die Diagnose konnte nicht gelöscht werden." startDiagnosisSuccessMessage: "Die Diagnose wurde erfolgreich gestartet." startDiagnosisFailureMessage: "Die Diagnose konnte nicht erfolgreich gestartet werden." dialog: @@ -94,9 +102,10 @@ cases: title: "Fälle" actions: addCase: "Neuen Fall anlegen" - updateCase: "Fall bearbeiten" - sortCases: "@:general.sort" + createAsset: "Asset erstellen" filterCases: "@:general.filter" + sortCases: "@:general.sort" + updateCase: "Fall bearbeiten" occasions: notSpecified: "@:general.notSpecified" service: "@:general.service" @@ -104,9 +113,13 @@ cases: status: open: "Offen" closed: "Geschlossen" - filterDialog: - title: "Fälle filtern" - toggleShared: "Fälle aus allen Werkstätten anzeigen" + createAssetDialog: + title: "Asset erstellen" + name: "Name" + description: "Beschreibung" + author: "Autor" + successMessage: "Das Asset wurde erfolgreich erstellt." + errorMessage: "Das Asset konnte nicht erstellt werden." addCaseDialog: customerTooltip: "Fall einem Kunden zuordnen" createNewCustomerTooltip: "Neuen Kunden erstellen" @@ -116,6 +129,15 @@ cases: confirmDialog: title: "Kunde hinzufügen" description: "Möchten Sie Kunde {customer} zum Fall hinzufügen?" + filterDialog: + error: "Fehler-Code" + noFilterInput: "Bitte mindestens einen Filter eingeben" + resetFilterCriteria: "Filter zurücksetzen" + title: "Fälle filtern" + toggleShared: "Fälle aus allen Werkstätten anzeigen" + tooltip: "Bitte auswählen" + vin: "Die ersten sechs Stellen der VIN" + vinLengthInvalid: "Muss maximal sechs Zeichen lang sein" details: headline: "Detailansicht" dialog: @@ -124,6 +146,9 @@ cases: showDiagnosis: "Diagnose anzeigen" deleteCaseSuccessMessage: "Der Fall wurde erfolgreich gelöscht." deleteCaseErrorMessage: "Der Fall konnte nicht gelöscht werden." + deleteDataSuccessMessage: "Der Datensatz wurde erfolgreich gelöscht." + deleteDataUnknownDataTypeMessage: "Der Datensatz konnte nicht gelöscht werden. Fehler: Unbekannter Datensatz-Typ" + deleteDataErrorMessage: "Der Datensatz konnte nicht gelöscht werden." occasion: unknown: "Unbekannt" service_routine: "@:general.service" @@ -150,6 +175,33 @@ vehicles: updateVehicle: "Fahrzeug bearbeiten" details: headline: "Detailansicht" +assets: + title: "Assets" + headlines: + description: "Beschreibung" + filter: "Filter" + name: "Name" + timeOfGeneration: "Zeitpunkt der Generation" + confirmation: + title: Asset Upload + description: "Möchten Sie das Asset wirklich anbieten?" + remove: + title: "Asset entfernen" + description: "Möchten Sie das Asset wirklich entfernen?" + details: + deleteAssetSuccessMessage: "Das Asset wurde erfolgreich gelöscht." + deleteAssetErrorMessage: "Das Asset konnte nicht gelöscht werden." + publishAssetSuccessMessage: "Das Asset wurde erfolgreich angeboten." + publishAssetErrorMessage: "Das Asset konnte nicht angeboten werden." + upload: + title: "Asset-Upload" + price: "Preis" + license: "Lizenz" + privateKey: "Private Key" + offer: "Anbieten" + retract: "Entfernen" + invalidPriceFormat: "Ungültiges Preis-Format" + noAssets: "Keine Assets" training: title: "Schulung" pageNotFound: @@ -168,8 +220,8 @@ forms: invalidNumber: "Ungültige Zahlen in Feldern" obd: label: "Diagnose-Fehlercodes" - hint: "Geben Sie Codes getrennt durch Kommas ein" - suffix: "z.B. P0001, U0001, U0002" + hint: "Eingabe getrennt durch Kommas" + helper: "z.B. P0001, U0001, U0002 (je 5 Zeichen)" omniview: component: label: "Komponente" @@ -185,6 +237,7 @@ forms: labelA: "Komponente A" labelB: "Komponente B" labelC: "Komponente C" + labelD: "Komponente D" label: "Label" hint: "Komponente eingeben" timeseries: diff --git a/frontend/app/assets/localization/en-GB.yaml b/frontend/app/assets/localization/en-GB.yaml index a7255063..c8c63a01 100644 --- a/frontend/app/assets/localization/en-GB.yaml +++ b/frontend/app/assets/localization/en-GB.yaml @@ -1,14 +1,19 @@ general: + apply: "Apply" cancel: "Cancel" case: "Case" city: "City" close: "Close" + component: "Component" + confirm: "Confirm" + create: "Create" customer: "Customer" - customerId: "Customer id" + customerId: "Customer ID" dataType: "Data Type" date: "Date" datasets: "Datasets" delete: "Delete" + details: "Details" edit: "Edit" email: "Email" filter: "Filter" @@ -23,16 +28,17 @@ general: logout: "Logout" milage: "Milage" no: "No" - noCustomerId: "No customer id" + noCustomerId: "No customer ID" noData: "No data available" noDate: "No Date" noDiagnoses: "There is currently no information about ongoing diagnoses." noVehicles: "There is currently no information about the vehicles." - noVehicleVin: "No vehicle vin" - noWorkshopId: "No workshop id" + noVehicleVin: "No vehicle VIN" + noWorkshopId: "No workshop ID" notFound: "Not found" notSpecified: "Not specified" number: "No." + obd: "OBD" obligatoryField: "Obligatory" occasion: "Occasion" phone: "Phone" @@ -47,16 +53,18 @@ general: state: "State" status: "Status" street: "Street" + symptom: "Symptom" then: "Then" transition: "Transition" tsn: "TSN" unnamed: "Unnamed" vehicle: "Vehicle" - vehicleVin: "Vehicle vin" + vehicleVin: "Vehicle VIN" vin: "VIN" workshop: "Workshop" - workshopId: "Workshop id" + workshopId: "Workshop ID" yes: "Yes" + unknownDateTime: "Unknown Time" diagnoses: title: "Diagnosis" actions: @@ -95,9 +103,10 @@ cases: title: "Cases" actions: addCase: "Add case" - updateCase: "Update case" - sortCases: "@:general.sort" + createAsset: "Create asset" filterCases: "@:general.filter" + sortCases: "@:general.sort" + updateCase: "Update case" occasions: notSpecified: "@:general.notSpecified" service: "@:general.service" @@ -105,9 +114,13 @@ cases: status: open: "Open" closed: "Closed" - filterDialog: - title: "Filter cases" - toggleShared: "Show cases from all workshops" + createAssetDialog: + title: "Create asset" + name: "Name" + description: "Description" + author: "Author" + successMessage: "The asset was successfully created" + errorMessage: "The asset could not be created." addCaseDialog: customerTooltip: "Assign case to a customer" createNewCustomerTooltip: "Create new customer" @@ -117,6 +130,15 @@ cases: confirmDialog: title: "Add customer" description: "Do you want to add customer {customer} to the case?" + filterDialog: + error: "Error code" + noFilterInput: "Please enter at least one filter" + resetFilterCriteria: "Reset filter" + title: "Filter cases" + toggleShared: "Show cases from all workshops" + tooltip: "Please select" + vin: "First six numbers of vehicle vin" + vinLengthInvalid: "Must be a maximum of six characters long" details: headline: "Details" dialog: @@ -125,11 +147,14 @@ cases: showDiagnosis: "Show diagnosis" deleteCaseSuccessMessage: "The case was successfully deleted." deleteCaseErrorMessage: "The case could not be deleted." + deleteDataSuccessMessage: "The dataset was successfully deleted." + deleteDataUnknownDataTypeMessage: "The dataset cannot be deleted. Error: Unknown data record type" + deleteDataErrorMessage: "The dataset cannot be deleted." occasion: unknown: "Unknown" service_routine: "@:general.service" problem_defect: "@:general.repair" - startDiagnosis: "Start diagnosis" + startDiagnosis: "Start Diagnosis" status: open: "Open" closed: "Closed" @@ -138,7 +163,7 @@ cases: symptoms: "Symptoms" timeseriesData: "Timeseries Data" uploadData: - label: "Upload data" + label: "Upload Data" customers: title: "Customers" headlines: @@ -151,6 +176,33 @@ vehicles: updateVehicle: "Update vehicle" details: headline: "Details" +assets: + title: "Assets" + headlines: + description: "Description" + filter: "Filter" + name: "Name" + timeOfGeneration: "Time of Generation" + confirmation: + title: Asset Upload + description: "Do you really would like to offer the asset?" + remove: + title: "Remove Asset" + description: "Do you really would like to remove the asset?" + details: + deleteAssetSuccessMessage: "The asset was successfully deleted." + deleteAssetErrorMessage: "The asset could not be deleted." + publishAssetSuccessMessage: "The asset was successfully offered." + publishAssetErrorMessage: "The asset could not be offered." + upload: + title: "Asset Upload" + price: "Price" + license: "License" + privateKey: "Private Key" + offer: "Offer" + retract: "Retract" + invalidPriceFormat: "Invalid price format" + noAssets: "No Assets" training: title: "Training" pageNotFound: @@ -170,7 +222,7 @@ forms: obd: label: "Diagnostic Trouble Codes" hint: "Enter codes, separated by commas" - suffix: "e.g., P0001, U0001, U0002" + helper: "e.g., P0001, U0001, U0002 (for each 5 characters)" omniview: component: label: "Component" @@ -186,6 +238,7 @@ forms: labelA: "Component A" labelB: "Component B" labelC: "Component C" + labelD: "Component D" label: "Label" hint: "Enter a Component" timeseries: diff --git a/frontend/app/lib/aw_hub_app.dart b/frontend/app/lib/aw_hub_app.dart index 4830c4dc..91a7be81 100644 --- a/frontend/app/lib/aw_hub_app.dart +++ b/frontend/app/lib/aw_hub_app.dart @@ -1,9 +1,11 @@ import "package:aw40_hub_frontend/configs/localization_config.dart"; import "package:aw40_hub_frontend/main.dart"; +import "package:aw40_hub_frontend/providers/asset_provider.dart"; import "package:aw40_hub_frontend/providers/auth_provider.dart"; import "package:aw40_hub_frontend/providers/case_provider.dart"; import "package:aw40_hub_frontend/providers/customer_provider.dart"; import "package:aw40_hub_frontend/providers/diagnosis_provider.dart"; +import "package:aw40_hub_frontend/providers/knowledge_provider.dart"; import "package:aw40_hub_frontend/providers/theme_provider.dart"; import "package:aw40_hub_frontend/providers/vehicle_provider.dart"; import "package:aw40_hub_frontend/routing/router.dart"; @@ -58,15 +60,15 @@ class AWHubApp extends StatelessWidget { ), ChangeNotifierProxyProvider( create: (_) => DiagnosisProvider(httpService), - update: (_, authProvider, caseProvider) => + update: (_, authProvider, diagnosisProvider) => // ignore: discarded_futures - caseProvider!..fetchAndSetAuthToken(authProvider), + diagnosisProvider!..fetchAndSetAuthToken(authProvider), ), ChangeNotifierProxyProvider( create: (_) => CustomerProvider(httpService), - update: (_, authProvider, caseProvider) => + update: (_, authProvider, customerProvider) => // ignore: discarded_futures - caseProvider!..fetchAndSetAuthToken(authProvider), + customerProvider!..fetchAndSetAuthToken(authProvider), ), ChangeNotifierProxyProvider( create: (_) => VehicleProvider(httpService), @@ -74,6 +76,18 @@ class AWHubApp extends StatelessWidget { // ignore: discarded_futures vehicleProvider!..fetchAndSetAuthToken(authProvider), ), + ChangeNotifierProxyProvider( + create: (_) => AssetProvider(httpService), + update: (_, authProvider, assetProvider) => + // ignore: discarded_futures + assetProvider!..fetchAndSetAuthToken(authProvider), + ), + ChangeNotifierProxyProvider( + create: (_) => KnowledgeProvider(httpService), + update: (_, authProvider, knowledgeProvider) => + // ignore: discarded_futures + knowledgeProvider!..fetchAndSetAuthToken(authProvider), + ), ChangeNotifierProvider( create: (_) => ThemeProvider(), ), diff --git a/frontend/app/lib/data_sources/assets_data_table_source.dart b/frontend/app/lib/data_sources/assets_data_table_source.dart new file mode 100644 index 00000000..dcafef5d --- /dev/null +++ b/frontend/app/lib/data_sources/assets_data_table_source.dart @@ -0,0 +1,53 @@ +import "package:aw40_hub_frontend/models/asset_model.dart"; +import "package:aw40_hub_frontend/utils/extensions.dart"; +import "package:easy_localization/easy_localization.dart"; +import "package:flutter/material.dart"; + +class AssetsDataTableSource extends DataTableSource { + AssetsDataTableSource({ + required this.themeData, + required this.selectedAssetIndexNotifier, + required this.assetModels, + required this.onPressedRow, + }); + List assetModels; + final void Function(int) onPressedRow; + final ThemeData themeData; + final ValueNotifier selectedAssetIndexNotifier; + + @override + DataRow? getRow(int index) { + final assetModel = assetModels[index]; + + final String? formattedDateTime = + assetModel.timestamp?.toGermanDateString(); + + return DataRow( + onSelectChanged: (_) => onPressedRow(index), + selected: selectedAssetIndexNotifier.value == index, + color: MaterialStateProperty.resolveWith( + (Set states) { + if (states.contains(MaterialState.selected)) { + return themeData.colorScheme.primary.withOpacity(0.08); + } + return null; // Use the default value. + }), + cells: [ + DataCell(Text(assetModel.name)), + DataCell( + Text(assetModel.definition.toJsonWithoutNullValues().toString()), + ), + DataCell(Text(formattedDateTime ?? tr("general.unknownDateTime"))), + ], + ); + } + + @override + bool get isRowCountApproximate => false; + + @override + int get rowCount => assetModels.length; + + @override + int get selectedRowCount => 0; +} diff --git a/frontend/app/lib/data_sources/cases_data_table_source.dart b/frontend/app/lib/data_sources/cases_data_table_source.dart index 33a74bc7..e24156e5 100644 --- a/frontend/app/lib/data_sources/cases_data_table_source.dart +++ b/frontend/app/lib/data_sources/cases_data_table_source.dart @@ -7,14 +7,14 @@ import "package:flutter/material.dart"; class CasesDataTableSource extends DataTableSource { CasesDataTableSource({ required this.themeData, - required this.currentIndexNotifier, + required this.selectedCaseIndexNotifier, required this.caseModels, required this.onPressedRow, }); List caseModels; final void Function(int) onPressedRow; final ThemeData themeData; - final ValueNotifier currentIndexNotifier; + final ValueNotifier selectedCaseIndexNotifier; final Map caseStatusIcons = { CaseStatus.open: Icons.cached, CaseStatus.closed: Icons.done, @@ -37,7 +37,7 @@ class CasesDataTableSource extends DataTableSource { final caseModel = caseModels[index]; return DataRow( onSelectChanged: (_) => onPressedRow(index), - selected: currentIndexNotifier.value == index, + selected: selectedCaseIndexNotifier.value == index, color: MaterialStateProperty.resolveWith( (Set states) { if (states.contains(MaterialState.selected)) { diff --git a/frontend/app/lib/dialogs/create_asset_dialog.dart b/frontend/app/lib/dialogs/create_asset_dialog.dart new file mode 100644 index 00000000..fe954caf --- /dev/null +++ b/frontend/app/lib/dialogs/create_asset_dialog.dart @@ -0,0 +1,193 @@ +import "dart:async"; + +import "package:aw40_hub_frontend/dtos/asset_definition_dto.dart"; +import "package:aw40_hub_frontend/dtos/new_asset_dto.dart"; +import "package:aw40_hub_frontend/providers/asset_provider.dart"; +import "package:aw40_hub_frontend/providers/case_provider.dart"; +import "package:easy_localization/easy_localization.dart"; +import "package:flutter/material.dart"; +import "package:provider/provider.dart"; +import "package:routemaster/routemaster.dart"; + +class CreateAssetDialog extends StatelessWidget { + CreateAssetDialog({super.key}); + + final TextEditingController _nameController = TextEditingController(); + final TextEditingController _descriptionController = TextEditingController(); + final TextEditingController _authorController = TextEditingController(); + + late final AssetProvider _assetProvider; + late final CaseProvider _caseProvider; + final _formKey = GlobalKey(); + + @override + Widget build(BuildContext context) { + final theme = Theme.of(context); + _assetProvider = Provider.of(context, listen: false); + _caseProvider = Provider.of(context, listen: false); + + return AlertDialog( + title: Text(tr("cases.createAssetDialog.title")), + content: CreateAssetDialogContent( + nameController: _nameController, + descriptionController: _descriptionController, + authorController: _authorController, + formKey: _formKey, + ), + actions: [ + TextButton( + onPressed: () async => _onCancel(context), + child: Text( + tr("general.cancel"), + style: theme.textTheme.labelLarge?.copyWith( + color: theme.colorScheme.error, + ), + ), + ), + TextButton( + onPressed: () async => _createAsset(context, _formKey), + child: Text(tr("general.create")), + ), + ], + ); + } + + Future _onCancel(BuildContext context) async { + await Routemaster.of(context).pop(); + } + + Future _createAsset( + BuildContext context, + final GlobalKey formKey, + ) async { + final messengerState = ScaffoldMessenger.of(context); + final FormState? currentFormKeyState = _formKey.currentState; + + if (currentFormKeyState != null && currentFormKeyState.validate()) { + currentFormKeyState.save(); + + final name = _nameController.text; + final description = _descriptionController.text; + final author = _authorController.text; + + final fitlerCriteria = _caseProvider.filterCriteria; + final definition = AssetDefinitionDto( + fitlerCriteria?.vin, + fitlerCriteria?.obdDataDtc, + fitlerCriteria?.timeseriesDataComponent, + ); + + final newAsset = NewAssetDto( + name, + definition, + description, + author, + ); + + final result = await _assetProvider.createAsset(newAsset); + final String snackBarText = result != null + ? tr("cases.createAssetDialog.successMessage") + : tr("cases.createAssetDialog.errorMessage"); + messengerState.showSnackBar(SnackBar(content: Text(snackBarText))); + + // ignore: use_build_context_synchronously + await Routemaster.of(context).pop(); + } + } +} + +class CreateAssetDialogContent extends StatefulWidget { + const CreateAssetDialogContent({ + required this.nameController, + required this.descriptionController, + required this.authorController, + required this.formKey, + super.key, + }); + + final TextEditingController nameController; + final TextEditingController descriptionController; + final TextEditingController authorController; + final GlobalKey formKey; + + @override + State createState() => + _CreateAssetDialogContentState(); +} + +class _CreateAssetDialogContentState extends State { + @override + Widget build(BuildContext context) { + return Form( + key: widget.formKey, + child: SizedBox( + height: 250, + width: 350, + child: Column( + children: [ + SizedBox( + width: 320, + height: 66, + child: TextFormField( + autovalidateMode: AutovalidateMode.onUserInteraction, + decoration: InputDecoration( + labelText: tr("cases.createAssetDialog.name"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + controller: widget.nameController, + validator: (value) { + if (value == null || value.isEmpty) { + return tr("general.obligatoryField"); + } + return null; + }, + ), + ), + Padding( + padding: const EdgeInsets.only(top: 16, bottom: 16), + child: SizedBox( + width: 320, + height: 66, + child: TextFormField( + autovalidateMode: AutovalidateMode.onUserInteraction, + decoration: InputDecoration( + labelText: tr("cases.createAssetDialog.description"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + controller: widget.descriptionController, + validator: (value) { + if (value == null || value.isEmpty) { + return tr("general.obligatoryField"); + } + return null; + }, + ), + ), + ), + SizedBox( + width: 320, + height: 66, + child: TextFormField( + autovalidateMode: AutovalidateMode.onUserInteraction, + decoration: InputDecoration( + labelText: tr("cases.createAssetDialog.author"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + controller: widget.authorController, + validator: (value) { + if (value == null || value.isEmpty) { + return tr("general.obligatoryField"); + } + return null; + }, + ), + ), + ], + ), + ), + ); + } +} diff --git a/frontend/app/lib/dialogs/filter_cases_dialog.dart b/frontend/app/lib/dialogs/filter_cases_dialog.dart index b054d6a1..de2ccf4e 100644 --- a/frontend/app/lib/dialogs/filter_cases_dialog.dart +++ b/frontend/app/lib/dialogs/filter_cases_dialog.dart @@ -1,60 +1,227 @@ +import "dart:async"; + +import "package:aw40_hub_frontend/exceptions/app_exception.dart"; import "package:aw40_hub_frontend/providers/case_provider.dart"; +import "package:aw40_hub_frontend/providers/knowledge_provider.dart"; +import "package:aw40_hub_frontend/text_input_formatters/upper_case_text_input_formatter.dart"; +import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:aw40_hub_frontend/utils/filter_criteria.dart"; import "package:easy_localization/easy_localization.dart"; import "package:flutter/material.dart"; import "package:provider/provider.dart"; import "package:routemaster/routemaster.dart"; class FilterCasesDialog extends StatelessWidget { - const FilterCasesDialog({super.key}); + FilterCasesDialog({super.key}); + + final TextEditingController _obdDataDtcController = TextEditingController(); + final TextEditingController _vinController = TextEditingController(); + final TextEditingController _timeseriesDataComponentController = + TextEditingController(); + + late final CaseProvider _caseProvider; @override Widget build(BuildContext context) { + final theme = Theme.of(context); + _caseProvider = Provider.of(context, listen: false); + + final FilterCriteria? currentFilterCriteria = _caseProvider.filterCriteria; + _obdDataDtcController.text = currentFilterCriteria?.obdDataDtc ?? ""; + _vinController.text = currentFilterCriteria?.vin ?? ""; + _timeseriesDataComponentController.text = + currentFilterCriteria?.timeseriesDataComponent ?? ""; + return AlertDialog( title: Text(tr("cases.filterDialog.title")), - content: const FilterCasesDialogContent(), + content: FilterCasesDialogContent( + obdDataDtcController: _obdDataDtcController, + vinController: _vinController, + timeseriesDataComponentController: _timeseriesDataComponentController, + ), actions: [ TextButton( - child: Text(tr("general.close")), onPressed: () async => _onCancel(context), + child: Text( + tr("general.cancel"), + style: theme.textTheme.labelLarge?.copyWith( + color: theme.colorScheme.error, + ), + ), + ), + TextButton( + onPressed: () async => _resetFilterCriteria(context), + child: Text( + tr("cases.filterDialog.resetFilterCriteria"), + style: theme.textTheme.labelLarge?.copyWith( + color: theme.colorScheme.error, + ), + ), + ), + TextButton( + onPressed: () async => _applyFilterForCases(context), + child: Text(tr("general.apply")), ), ], ); } + Future _resetFilterCriteria(BuildContext context) async { + _caseProvider.resetFilterCriteria(); + await Routemaster.of(context).pop(); + } + Future _onCancel(BuildContext context) async { await Routemaster.of(context).pop(); } + + bool _containsAnyFilterInput() { + return _obdDataDtcController.text.isNotEmpty || + _vinController.text.isNotEmpty || + _timeseriesDataComponentController.text.isNotEmpty; + } + + Future _applyFilterForCases(BuildContext context) async { + _caseProvider.resetSelectedcaseIndexNotifier(); + + if (!_containsAnyFilterInput()) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar(content: Text(tr("cases.filterDialog.noFilterInput"))), + ); + return; + } + + final obdDataDtc = _obdDataDtcController.text; + final vin = _vinController.text; + final timeseriesDataComponent = _timeseriesDataComponentController.text; + final filterCriteria = FilterCriteria( + obdDataDtc: obdDataDtc.isEmpty ? null : obdDataDtc, + vin: vin.isEmpty ? null : vin, + timeseriesDataComponent: + timeseriesDataComponent.isEmpty ? null : timeseriesDataComponent, + ); + + _caseProvider.setFilterCriteria(filterCriteria); + + // ignore: use_build_context_synchronously + await Routemaster.of(context).pop(); + } } class FilterCasesDialogContent extends StatefulWidget { const FilterCasesDialogContent({ + required this.obdDataDtcController, + required this.vinController, + required this.timeseriesDataComponentController, super.key, }); + final TextEditingController obdDataDtcController; + final TextEditingController vinController; + final TextEditingController timeseriesDataComponentController; + @override State createState() => _FilterCasesDialogContentState(); } class _FilterCasesDialogContentState extends State { - bool _switchState = true; @override Widget build(BuildContext context) { - _switchState = Provider.of(context).showSharedCases; - return Row( - children: [ - Text(tr("cases.filterDialog.toggleShared")), - Switch( - value: _switchState, - onChanged: (v) async { - setState(() { - _switchState = v; - }); - await Provider.of(context, listen: false) - .toggleShowSharedCases(); - }, - ), - ], + final knowledgeProvider = + Provider.of(context, listen: false); + + return FutureBuilder( + // ignore: discarded_futures + future: knowledgeProvider.getVehicleComponents(), + builder: (BuildContext context, AsyncSnapshot> snapshot) { + if (snapshot.connectionState != ConnectionState.done || + !snapshot.hasData) { + return const SizedBox( + height: 250, + width: 350, + child: Center(child: CircularProgressIndicator()), + ); + } + final List? vehicleComponents = snapshot.data; + if (vehicleComponents == null) { + throw AppException( + exceptionType: ExceptionType.notFound, + exceptionMessage: "Received no vehicle components data.", + ); + } + return SizedBox( + height: 250, + width: 350, + child: Column( + children: [ + SizedBox( + width: 320, + height: 66, + child: TextFormField( + autovalidateMode: AutovalidateMode.onUserInteraction, + inputFormatters: [UpperCaseTextInputFormatter()], + decoration: InputDecoration( + labelText: tr("cases.filterDialog.error"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + controller: widget.obdDataDtcController, + ), + ), + Padding( + padding: const EdgeInsets.only(top: 16, bottom: 16), + child: SizedBox( + width: 320, + height: 66, + child: TextFormField( + autovalidateMode: AutovalidateMode.onUserInteraction, + inputFormatters: [UpperCaseTextInputFormatter()], + decoration: InputDecoration( + labelText: tr("cases.filterDialog.vin"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + controller: widget.vinController, + validator: (String? value) { + if ((value?.length ?? 0) > 6) { + return tr("cases.filterDialog.vinLengthInvalid"); + } + if (value != null && value.contains(RegExp("[IOQ]"))) { + return tr( + "cases.addCaseDialog.vinCharactersInvalid", + ); + } + + return null; + }, + ), + ), + ), + Tooltip( + message: tr("cases.filterDialog.tooltip"), + child: DropdownMenu( + controller: widget.timeseriesDataComponentController, + label: Text(tr("general.component")), + hintText: tr("forms.optional"), + enableFilter: true, + width: 320, + menuStyle: const MenuStyle(alignment: Alignment.bottomLeft), + dropdownMenuEntries: + vehicleComponents.map>( + (String vehicleComponent) { + return DropdownMenuEntry( + value: vehicleComponent, + label: vehicleComponent, + ); + }, + ).toList(), + ), + ), + ], + ), + ); + }, ); } } diff --git a/frontend/app/lib/dialogs/offer_assets_dialog.dart b/frontend/app/lib/dialogs/offer_assets_dialog.dart new file mode 100644 index 00000000..5a8f3c8d --- /dev/null +++ b/frontend/app/lib/dialogs/offer_assets_dialog.dart @@ -0,0 +1,207 @@ +import "dart:async"; + +import "package:aw40_hub_frontend/dtos/new_publication_dto.dart"; +import "package:aw40_hub_frontend/exceptions/app_exception.dart"; +import "package:aw40_hub_frontend/forms/offer_assets_form.dart"; +import "package:aw40_hub_frontend/models/new_publication_model.dart"; +import "package:aw40_hub_frontend/providers/asset_provider.dart"; +import "package:aw40_hub_frontend/services/ui_service.dart"; +import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:easy_localization/easy_localization.dart"; +import "package:flutter/material.dart"; +import "package:logging/logging.dart"; +import "package:provider/provider.dart"; +import "package:routemaster/routemaster.dart"; + +class OfferAssetsDialog extends StatefulWidget { + const OfferAssetsDialog({ + required this.assetModelId, + super.key, + }); + + final String assetModelId; + + @override + State createState() => _OfferAssetsDialogState(); +} + +class _OfferAssetsDialogState extends State { + // ignore: unused_field + final Logger _logger = Logger("offer_assets_dialog"); + final _formKey = GlobalKey(); + final TextEditingController _priceController = TextEditingController(); + final TextEditingController _licenseController = TextEditingController(); + final TextEditingController _privateKeyController = TextEditingController(); + + late AssetProvider _assetProvider; + + final title = tr("assets.upload.title"); + + @override + Widget build(BuildContext context) { + final theme = Theme.of(context); + _assetProvider = Provider.of(context, listen: false); + + return AlertDialog( + title: Text(title), + content: OfferAssetsDialogForm( + formKey: _formKey, + priceController: _priceController, + licenseController: _licenseController, + privateKeyController: _privateKeyController, + ), + actions: [ + TextButton( + onPressed: () async => _onCancel(context), + child: Text( + tr("general.cancel"), + style: theme.textTheme.labelLarge?.copyWith( + color: theme.colorScheme.error, + ), + ), + ), + TextButton( + onPressed: () async { + final FormState? currentFormKeyState = _formKey.currentState; + if (currentFormKeyState != null && currentFormKeyState.validate()) { + currentFormKeyState.save(); + + final double? price = + double.tryParse(_priceController.text.replaceAll(",", ".")); + if (price == null) { + throw AppException( + exceptionType: ExceptionType.unexpectedNullValue, + exceptionMessage: "Price was null or invalid.", + ); + } + + final String licenseType = _licenseController.text; + if (licenseType.isEmpty) { + throw AppException( + exceptionType: ExceptionType.unexpectedNullValue, + exceptionMessage: "License type was not selected.", + ); + } + + final String privateKeyType = _privateKeyController.text; + if (privateKeyType.isEmpty) { + throw AppException( + exceptionType: ExceptionType.unexpectedNullValue, + exceptionMessage: "PrivateKey type was not null.", + ); + } + + final bool confirmation = + await _showConfirmOfferDialog(context) ?? false; + + if (confirmation) { + await _publishAsset(price, licenseType, privateKeyType); + // ignore: use_build_context_synchronously + unawaited(Routemaster.of(context).pop()); + } + } + }, + child: Text(tr("assets.upload.offer")), + ), + ], + ); + } + + Future _publishAsset( + double price, + String licenseType, + String privateKeyType, + ) async { + final ScaffoldMessengerState scaffoldMessengerState = + ScaffoldMessenger.of(context); + final String assetId = widget.assetModelId; + final NewPublicationDto newPublicationDto = NewPublicationDto( + // TODO is this hard coded value ok? + "PONTUSXDEV", + licenseType, + price, + privateKeyType, + ); + final NewPublicationModel? result = + await _assetProvider.publishAsset(assetId, newPublicationDto); + final String message = result != null + ? tr("assets.details.publishAssetSuccessMessage") + : tr("assets.details.publishAssetErrorMessage"); + UIService.showMessage(message, scaffoldMessengerState); + } + + static Future _showConfirmOfferDialog(BuildContext context) { + final theme = Theme.of(context); + return showDialog( + context: context, + builder: (BuildContext context) { + return AlertDialog( + title: Text(tr("assets.confirmation.title")), + content: Text(tr("assets.confirmation.description")), + actions: [ + TextButton( + onPressed: () => Navigator.pop(context, false), + child: Text( + tr("general.cancel"), + style: theme.textTheme.labelLarge?.copyWith( + color: theme.colorScheme.error, + ), + ), + ), + TextButton( + onPressed: () => Navigator.pop(context, true), + child: Text( + tr("assets.upload.offer"), + ), + ), + ], + ); + }, + ); + } + + Future _onCancel(BuildContext context) async { + await Routemaster.of(context).pop(); + } +} + +// ignore: must_be_immutable +class OfferAssetsDialogForm extends StatefulWidget { + const OfferAssetsDialogForm({ + required this.formKey, + required this.priceController, + required this.licenseController, + required this.privateKeyController, + super.key, + }); + + final GlobalKey formKey; + final TextEditingController priceController; + final TextEditingController licenseController; + final TextEditingController privateKeyController; + + @override + State createState() => _AddCaseDialogFormState(); +} + +class _AddCaseDialogFormState extends State { + @override + Widget build(BuildContext context) { + return Form( + key: widget.formKey, + child: SizedBox( + width: 400, + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + OfferAssetsForm( + priceController: widget.priceController, + licenseController: widget.licenseController, + privateKeyController: widget.privateKeyController, + ), + ], + ), + ), + ); + } +} diff --git a/frontend/app/lib/dtos/asset_definition_dto.dart b/frontend/app/lib/dtos/asset_definition_dto.dart new file mode 100644 index 00000000..aff4458e --- /dev/null +++ b/frontend/app/lib/dtos/asset_definition_dto.dart @@ -0,0 +1,32 @@ +import "package:aw40_hub_frontend/models/asset_definition_model.dart"; +import "package:json_annotation/json_annotation.dart"; + +part "asset_definition_dto.g.dart"; + +@JsonSerializable() +class AssetDefinitionDto { + AssetDefinitionDto( + this.vin, + this.obdDataDtc, + this.timeseriesDataComponent, + ); + + factory AssetDefinitionDto.fromJson(Map json) => + _$AssetDefinitionDtoFromJson(json); + + Map toJson() => _$AssetDefinitionDtoToJson(this); + + AssetDefinitionModel toModel() { + return AssetDefinitionModel( + vin: vin, + obdDataDtc: obdDataDtc, + timeseriesDataComponent: timeseriesDataComponent, + ); + } + + String? vin; + @JsonKey(name: "obd_data_dtc") + String? obdDataDtc; + @JsonKey(name: "timeseries_data_component") + String? timeseriesDataComponent; +} diff --git a/frontend/app/lib/dtos/asset_dto.dart b/frontend/app/lib/dtos/asset_dto.dart new file mode 100644 index 00000000..9650f3c5 --- /dev/null +++ b/frontend/app/lib/dtos/asset_dto.dart @@ -0,0 +1,52 @@ +import "package:aw40_hub_frontend/dtos/asset_definition_dto.dart"; +import "package:aw40_hub_frontend/dtos/publication_dto.dart"; +import "package:aw40_hub_frontend/models/asset_model.dart"; +import "package:json_annotation/json_annotation.dart"; + +part "asset_dto.g.dart"; + +@JsonSerializable() +class AssetDto { + AssetDto( + this.id, + this.name, + this.definition, + this.description, + this.timestamp, + this.type, + this.author, + this.dataStatus, + this.publication, + ); + + factory AssetDto.fromJson(Map json) { + return _$AssetDtoFromJson(json); + } + Map toJson() => _$AssetDtoToJson(this); + + AssetModel toModel() { + return AssetModel( + id: id, + name: name, + definition: definition.toModel(), + description: description, + timestamp: timestamp, + type: type, + author: author, + dataStatus: dataStatus, + publication: publication?.toModel(), + ); + } + + @JsonKey(name: "_id") + String? id; + String name; + AssetDefinitionDto definition; + + String description; + DateTime? timestamp; + String? type; + String author; + String? dataStatus; + PublicationDto? publication; +} diff --git a/frontend/app/lib/dtos/assets_update_dto.dart b/frontend/app/lib/dtos/assets_update_dto.dart new file mode 100644 index 00000000..298305a9 --- /dev/null +++ b/frontend/app/lib/dtos/assets_update_dto.dart @@ -0,0 +1,19 @@ +import "package:json_annotation/json_annotation.dart"; + +part "assets_update_dto.g.dart"; + +@JsonSerializable() +class AssetsUpdateDto { + AssetsUpdateDto( + this.tsn, + this.yearBuild, + ); + + factory AssetsUpdateDto.fromJson(Map json) => + _$AssetsUpdateDtoFromJson(json); + Map toJson() => _$AssetsUpdateDtoToJson(this); + + String? tsn; + @JsonKey(name: "year_build") + int? yearBuild; +} diff --git a/frontend/app/lib/dtos/customer_update_dto.dart b/frontend/app/lib/dtos/customer_update_dto.dart index 1441374e..075f6397 100644 --- a/frontend/app/lib/dtos/customer_update_dto.dart +++ b/frontend/app/lib/dtos/customer_update_dto.dart @@ -20,11 +20,14 @@ class CustomerUpdateDto { _$CustomerUpdateDtoFromJson(json); Map toJson() => _$CustomerUpdateDtoToJson(this); + @JsonKey(name: "first_name") String firstname; + @JsonKey(name: "last_name") String lastname; String? email; String? phone; String? street; + @JsonKey(name: "house_number") String? housenumber; String? postcode; String? city; diff --git a/frontend/app/lib/dtos/nautilus_private_key_dto.dart b/frontend/app/lib/dtos/nautilus_private_key_dto.dart new file mode 100644 index 00000000..98a13ad1 --- /dev/null +++ b/frontend/app/lib/dtos/nautilus_private_key_dto.dart @@ -0,0 +1,17 @@ +import "package:json_annotation/json_annotation.dart"; + +part "nautilus_private_key_dto.g.dart"; + +@JsonSerializable() +class NautilusPrivateKeyDto { + NautilusPrivateKeyDto( + this.nautilusPrivateKey, + ); + + factory NautilusPrivateKeyDto.fromJson(Map json) => + _$NautilusPrivateKeyDtoFromJson(json); + Map toJson() => _$NautilusPrivateKeyDtoToJson(this); + + @JsonKey(name: "nautilus_private_key") + String nautilusPrivateKey; +} diff --git a/frontend/app/lib/dtos/new_asset_dto.dart b/frontend/app/lib/dtos/new_asset_dto.dart new file mode 100644 index 00000000..e5c56888 --- /dev/null +++ b/frontend/app/lib/dtos/new_asset_dto.dart @@ -0,0 +1,24 @@ +import "package:aw40_hub_frontend/dtos/asset_definition_dto.dart"; +import "package:json_annotation/json_annotation.dart"; + +part "new_asset_dto.g.dart"; + +/// DTO for the POST /{workshop_id}/cases endpoint. +@JsonSerializable() +class NewAssetDto { + NewAssetDto( + this.name, + this.definition, + this.description, + this.author, + ); + + factory NewAssetDto.fromJson(Map json) => + _$NewAssetDtoFromJson(json); + Map toJson() => _$NewAssetDtoToJson(this); + + String name; + AssetDefinitionDto? definition; + String description; + String author; +} diff --git a/frontend/app/lib/dtos/new_publication_dto.dart b/frontend/app/lib/dtos/new_publication_dto.dart new file mode 100644 index 00000000..6a232f78 --- /dev/null +++ b/frontend/app/lib/dtos/new_publication_dto.dart @@ -0,0 +1,34 @@ +import "package:aw40_hub_frontend/models/new_publication_model.dart"; +import "package:json_annotation/json_annotation.dart"; + +part "new_publication_dto.g.dart"; + +@JsonSerializable() +class NewPublicationDto { + NewPublicationDto( + this.network, + this.license, + this.price, + this.privateKey, + ); + + factory NewPublicationDto.fromJson(Map json) => + _$NewPublicationDtoFromJson(json); + + Map toJson() => _$NewPublicationDtoToJson(this); + + NewPublicationModel toModel() { + return NewPublicationModel( + license: license, + network: network, + price: price, + privateKey: privateKey, + ); + } + + String network; + String license; + double? price; + @JsonKey(name: "nautilus_private_key") + String privateKey; +} diff --git a/frontend/app/lib/dtos/publication_dto.dart b/frontend/app/lib/dtos/publication_dto.dart new file mode 100644 index 00000000..860404f0 --- /dev/null +++ b/frontend/app/lib/dtos/publication_dto.dart @@ -0,0 +1,37 @@ +import "package:aw40_hub_frontend/models/publication_model.dart"; +import "package:json_annotation/json_annotation.dart"; + +part "publication_dto.g.dart"; + +@JsonSerializable() +class PublicationDto { + PublicationDto( + this.network, + this.license, + this.price, + this.did, + this.assetUrl, + ); + + factory PublicationDto.fromJson(Map json) => + _$PublicationDtoFromJson(json); + + Map toJson() => _$PublicationDtoToJson(this); + + PublicationModel toModel() { + return PublicationModel( + assetUrl: assetUrl, + did: did, + license: license, + network: network, + price: price, + ); + } + + String network; + String license; + double? price; + String did; + @JsonKey(name: "asset_url") + String assetUrl; +} diff --git a/frontend/app/lib/forms/offer_assets_form.dart b/frontend/app/lib/forms/offer_assets_form.dart new file mode 100644 index 00000000..1959eb50 --- /dev/null +++ b/frontend/app/lib/forms/offer_assets_form.dart @@ -0,0 +1,87 @@ +import "package:easy_localization/easy_localization.dart"; +import "package:flutter/material.dart"; + +class OfferAssetsForm extends StatelessWidget { + const OfferAssetsForm({ + required this.priceController, + required this.licenseController, + required this.privateKeyController, + super.key, + }); + + final TextEditingController priceController; + final TextEditingController licenseController; + final TextEditingController privateKeyController; + + @override + Widget build(BuildContext context) { + return Column( + children: [ + const SizedBox(height: 16), + SizedBox( + width: 192, + child: TextFormField( + controller: priceController, + autovalidateMode: AutovalidateMode.onUserInteraction, + decoration: InputDecoration( + labelText: tr("assets.upload.price"), + suffixText: "€", + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + keyboardType: const TextInputType.numberWithOptions(decimal: true), + validator: (value) { + if (value == null || value.isEmpty) { + return tr("general.obligatoryField"); + } + // Validate decimal format (2 decimal places) + const pricePattern = r"^\d+([.,]\d{1,2})?$"; + final regExp = RegExp(pricePattern); + if (!regExp.hasMatch(value)) { + return tr("assets.upload.invalidPriceFormat"); + } + return null; + }, + ), + ), + const SizedBox(height: 16), + SizedBox( + width: 192, + child: TextFormField( + controller: licenseController, + decoration: InputDecoration( + labelText: tr("assets.upload.license"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + validator: (value) { + if (value == null || value.isEmpty) { + return tr("general.obligatoryField"); + } + return null; + }, + ), + ), + const SizedBox(height: 16), + SizedBox( + width: 192, + child: TextFormField( + controller: privateKeyController, + decoration: InputDecoration( + labelText: tr("assets.upload.privateKey"), + border: const OutlineInputBorder(), + errorStyle: const TextStyle(height: 0.1), + ), + obscureText: true, + validator: (value) { + if (value == null || value.isEmpty) { + return tr("general.obligatoryField"); + } + return null; + }, + ), + ), + ], + ); + } +} diff --git a/frontend/app/lib/forms/upload_obd_form.dart b/frontend/app/lib/forms/upload_obd_form.dart index b4c119ab..5aaf3fad 100644 --- a/frontend/app/lib/forms/upload_obd_form.dart +++ b/frontend/app/lib/forms/upload_obd_form.dart @@ -30,7 +30,7 @@ class _UploadObdFormState extends State { decoration: InputDecoration( labelText: tr("forms.obd.label"), hintText: tr("forms.obd.hint"), - suffixText: tr("forms.obd.suffix"), + helperText: tr("forms.obd.helper"), border: const OutlineInputBorder(), ), ), @@ -54,7 +54,8 @@ class _UploadObdFormState extends State { Future _onSubmit() async { final provider = Provider.of(context, listen: false); final messengerState = ScaffoldMessenger.of(context); - final List codes = _controller.text.split("\n"); + final List codes = + _controller.text.split(",").map((code) => code.trim()).toList(); final dto = NewOBDDataDto(null, codes); final bool result = await provider.uploadObdData(widget.caseId, dto); final String snackBarText = result diff --git a/frontend/app/lib/forms/upload_picoscope_form.dart b/frontend/app/lib/forms/upload_picoscope_form.dart index d6f57f41..57177844 100644 --- a/frontend/app/lib/forms/upload_picoscope_form.dart +++ b/frontend/app/lib/forms/upload_picoscope_form.dart @@ -23,12 +23,15 @@ class _UploadPicoscopeFormState extends State { final TextEditingController _componentAController = TextEditingController(); final TextEditingController _componentBController = TextEditingController(); final TextEditingController _componentCController = TextEditingController(); + final TextEditingController _componentDController = TextEditingController(); final TextEditingController _labelAController = TextEditingController(); final TextEditingController _labelBController = TextEditingController(); final TextEditingController _labelCController = TextEditingController(); + final TextEditingController _labelDController = TextEditingController(); PicoscopeLabel? selectedLabelA; PicoscopeLabel? selectedLabelB; PicoscopeLabel? selectedLabelC; + PicoscopeLabel? selectedLabelD; final _formKey = GlobalKey(); @override @@ -164,6 +167,46 @@ class _UploadPicoscopeFormState extends State { ), ], ), + const SizedBox(height: 16), + Row( + children: [ + Expanded( + flex: 2, + child: TextFormField( + autovalidateMode: AutovalidateMode.onUserInteraction, + controller: _componentDController, + decoration: InputDecoration( + labelText: tr("forms.picoscope.component.labelD"), + hintText: tr("forms.picoscope.component.hint"), + border: const OutlineInputBorder(), + suffixText: tr("forms.optional"), + ), + ), + ), + const SizedBox(width: 16), + Expanded( + child: DropdownMenu( + controller: _labelDController, + label: Text(tr("forms.picoscope.component.label")), + hintText: tr("forms.optional"), + onSelected: (PicoscopeLabel? picoscopeLabel) { + setState(() { + selectedLabelD = picoscopeLabel; + }); + }, + dropdownMenuEntries: PicoscopeLabel.values + .map>( + (PicoscopeLabel timeseriesDataLabel) { + return DropdownMenuEntry( + value: timeseriesDataLabel, + label: timeseriesDataLabel.name, + ); + }, + ).toList(), + ), + ), + ], + ), ], ), onSubmit: _onSubmit, @@ -192,13 +235,15 @@ class _UploadPicoscopeFormState extends State { final String componentA = _componentAController.text; final String componentB = _componentBController.text; final String componentC = _componentCController.text; - //if (_labelAController.text.isEmpty) return; + final String componentD = _componentDController.text; final PicoscopeLabel? labelA = EnumToString.fromString(PicoscopeLabel.values, _labelAController.text); final PicoscopeLabel? labelB = EnumToString.fromString(PicoscopeLabel.values, _labelBController.text); final PicoscopeLabel? labelC = EnumToString.fromString(PicoscopeLabel.values, _labelCController.text); + final PicoscopeLabel? labelD = + EnumToString.fromString(PicoscopeLabel.values, _labelDController.text); final bool result = await provider.uploadPicoscopeData( widget.caseId, @@ -207,9 +252,11 @@ class _UploadPicoscopeFormState extends State { componentA, componentB, componentC, + componentD, labelA, labelB, labelC, + labelD, ); final String snackBarText = result diff --git a/frontend/app/lib/models/asset_definition_model.dart b/frontend/app/lib/models/asset_definition_model.dart new file mode 100644 index 00000000..6d7422ca --- /dev/null +++ b/frontend/app/lib/models/asset_definition_model.dart @@ -0,0 +1,23 @@ +class AssetDefinitionModel { + AssetDefinitionModel({ + required this.vin, + required this.obdDataDtc, + required this.timeseriesDataComponent, + }); + + String? vin; + String? obdDataDtc; + String? timeseriesDataComponent; + + Map toJsonWithoutNullValues() { + final Map data = {}; + + if (vin != null) data["vin"] = vin; + if (obdDataDtc != null) data["obdDataDtc"] = obdDataDtc; + if (timeseriesDataComponent != null) { + data["timeseriesDataComponent"] = timeseriesDataComponent; + } + + return data; + } +} diff --git a/frontend/app/lib/models/asset_model.dart b/frontend/app/lib/models/asset_model.dart new file mode 100644 index 00000000..fdf3ab0f --- /dev/null +++ b/frontend/app/lib/models/asset_model.dart @@ -0,0 +1,27 @@ +import "package:aw40_hub_frontend/models/asset_definition_model.dart"; +import "package:aw40_hub_frontend/models/publication_model.dart"; + +class AssetModel { + AssetModel({ + required this.id, + required this.name, + required this.definition, + required this.description, + required this.timestamp, + required this.type, + required this.author, + required this.dataStatus, + required this.publication, + }); + + String? id; // TODO make non-nullable? + String name; + AssetDefinitionModel definition; + + String description; + DateTime? timestamp; + String? type; + String author; + String? dataStatus; + PublicationModel? publication; +} diff --git a/frontend/app/lib/models/data_model.dart b/frontend/app/lib/models/data_model.dart new file mode 100644 index 00000000..8fec9890 --- /dev/null +++ b/frontend/app/lib/models/data_model.dart @@ -0,0 +1,9 @@ +abstract class DataModel { + DataModel({ + required this.timestamp, + required this.dataId, + }); + + DateTime? timestamp; + int? dataId; +} diff --git a/frontend/app/lib/models/new_publication_model.dart b/frontend/app/lib/models/new_publication_model.dart new file mode 100644 index 00000000..2b8302b1 --- /dev/null +++ b/frontend/app/lib/models/new_publication_model.dart @@ -0,0 +1,13 @@ +class NewPublicationModel { + NewPublicationModel({ + required this.network, + required this.license, + required this.price, + required this.privateKey, + }); + + String network; + String license; + double? price; + String privateKey; +} diff --git a/frontend/app/lib/models/obd_data_model.dart b/frontend/app/lib/models/obd_data_model.dart index 178b030e..afd421c2 100644 --- a/frontend/app/lib/models/obd_data_model.dart +++ b/frontend/app/lib/models/obd_data_model.dart @@ -1,13 +1,13 @@ -class ObdDataModel { +import "package:aw40_hub_frontend/models/data_model.dart"; + +class ObdDataModel extends DataModel { ObdDataModel({ - required this.timestamp, + required super.timestamp, required this.obdSpecs, required this.dtcs, - required this.dataId, + required super.dataId, }); - DateTime? timestamp; - dynamic obdSpecs; - List dtcs; - int? dataId; + final dynamic obdSpecs; + final List dtcs; } diff --git a/frontend/app/lib/models/publication_model.dart b/frontend/app/lib/models/publication_model.dart new file mode 100644 index 00000000..b66c6daa --- /dev/null +++ b/frontend/app/lib/models/publication_model.dart @@ -0,0 +1,15 @@ +class PublicationModel { + PublicationModel({ + required this.network, + required this.license, + required this.price, + required this.did, + required this.assetUrl, + }); + + String network; + String license; + double? price; + String did; + String assetUrl; +} diff --git a/frontend/app/lib/models/symptom_model.dart b/frontend/app/lib/models/symptom_model.dart index 2316b762..826681a4 100644 --- a/frontend/app/lib/models/symptom_model.dart +++ b/frontend/app/lib/models/symptom_model.dart @@ -1,15 +1,14 @@ +import "package:aw40_hub_frontend/models/data_model.dart"; import "package:aw40_hub_frontend/utils/enums.dart"; -class SymptomModel { +class SymptomModel extends DataModel { SymptomModel({ - required this.timestamp, + required super.timestamp, required this.component, required this.label, - required this.dataId, + required super.dataId, }); - DateTime? timestamp; - String component; - SymptomLabel label; - int? dataId; + final String component; + final SymptomLabel label; } diff --git a/frontend/app/lib/models/timeseries_data_model.dart b/frontend/app/lib/models/timeseries_data_model.dart index 445486dd..0202049f 100644 --- a/frontend/app/lib/models/timeseries_data_model.dart +++ b/frontend/app/lib/models/timeseries_data_model.dart @@ -1,25 +1,24 @@ +import "package:aw40_hub_frontend/models/data_model.dart"; import "package:aw40_hub_frontend/utils/enums.dart"; -class TimeseriesDataModel { +class TimeseriesDataModel extends DataModel { TimeseriesDataModel({ - required this.timestamp, + required super.timestamp, required this.component, required this.label, required this.samplingRate, required this.duration, required this.type, required this.deviceSpecs, - required this.dataId, + required super.dataId, required this.signalId, }); - DateTime? timestamp; - String component; - TimeseriesDataLabel label; - int samplingRate; - int duration; - TimeseriesType? type; - dynamic deviceSpecs; - int? dataId; - String signalId; + final String component; + final TimeseriesDataLabel label; + final int samplingRate; + final int duration; + final TimeseriesType? type; + final dynamic deviceSpecs; + final String signalId; } diff --git a/frontend/app/lib/providers/asset_provider.dart b/frontend/app/lib/providers/asset_provider.dart new file mode 100644 index 00000000..67b95fd7 --- /dev/null +++ b/frontend/app/lib/providers/asset_provider.dart @@ -0,0 +1,137 @@ +import "dart:convert"; + +import "package:aw40_hub_frontend/dtos/asset_dto.dart"; +import "package:aw40_hub_frontend/dtos/nautilus_private_key_dto.dart"; +import "package:aw40_hub_frontend/dtos/new_asset_dto.dart"; +import "package:aw40_hub_frontend/dtos/new_publication_dto.dart"; +import "package:aw40_hub_frontend/exceptions/app_exception.dart"; +import "package:aw40_hub_frontend/models/asset_model.dart"; +import "package:aw40_hub_frontend/models/new_publication_model.dart"; +import "package:aw40_hub_frontend/providers/auth_provider.dart"; +import "package:aw40_hub_frontend/services/helper_service.dart"; +import "package:aw40_hub_frontend/services/http_service.dart"; +import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:flutter/foundation.dart"; +import "package:http/http.dart"; +import "package:logging/logging.dart"; + +class AssetProvider with ChangeNotifier { + AssetProvider(this._httpService); + + final HttpService _httpService; + + final Logger _logger = Logger("asset_provider"); + + String? _authToken; + + Future> getAssets() async { + final String authToken = _getAuthToken(); + final Response response = await _httpService.getAssets( + authToken, + ); + if (response.statusCode != 200) { + _logger.warning( + "Could not get assets. " + "${response.statusCode}: ${response.reasonPhrase}", + ); + return []; + } + final json = jsonDecode(response.body); + if (json is! List) { + _logger.warning("Could not decode json response to List."); + return []; + } + return json.map((e) => AssetDto.fromJson(e).toModel()).toList(); + } + + Future createAsset(NewAssetDto newAssetDto) async { + final String authToken = _getAuthToken(); + final Map newAssetJson = newAssetDto.toJson(); + final Response response = + await _httpService.createAsset(authToken, newAssetJson); + final bool verifyStatusCode = HelperService.verifyStatusCode( + response.statusCode, + 201, + "Could not create asset. ", + response, + _logger, + ); + if (!verifyStatusCode) return null; + notifyListeners(); + return _decodeAssetModelFromResponseBody(response); + } + + Future publishAsset( + String assetId, + NewPublicationDto newPublicationDto, + ) async { + final String authToken = _getAuthToken(); + final Map newPublicationJson = newPublicationDto.toJson(); + final Response response = + await _httpService.publishAsset(authToken, assetId, newPublicationJson); + final bool verifyStatusCode = HelperService.verifyStatusCode( + response.statusCode, + 201, + "Could not publish asset. ", + response, + _logger, + ); + if (!verifyStatusCode) return null; + notifyListeners(); + return _decodeNewPublicationModelFromResponseBody(response); + } + + Future deleteAsset( + String assetId, + NautilusPrivateKeyDto nautilusPrivateKeyDto, + ) async { + final String authToken = _getAuthToken(); + final Response response = await _httpService.deleteAsset( + authToken, + assetId, + nautilusPrivateKeyDto.toJson(), + ); + final bool verifyStatusCode = HelperService.verifyStatusCode( + response.statusCode, + 200, + "Could not delete asset. ", + response, + _logger, + ); + if (!verifyStatusCode) return false; + notifyListeners(); + return true; + } + + NewPublicationModel _decodeNewPublicationModelFromResponseBody( + Response response, + ) { + final Map body = jsonDecode(response.body); + final NewPublicationDto receivedNewPublication = NewPublicationDto.fromJson( + body, + ); + return receivedNewPublication.toModel(); + } + + AssetModel _decodeAssetModelFromResponseBody(Response response) { + final Map body = jsonDecode(response.body); + final AssetDto assetDto = AssetDto.fromJson(body); + return assetDto.toModel(); + } + + Future fetchAndSetAuthToken(AuthProvider authProvider) async { + _authToken = await authProvider.getAuthToken(); + notifyListeners(); + } + + String _getAuthToken() { + final String? authToken = _authToken; + if (authToken == null) { + throw AppException( + exceptionMessage: "Called AssetsProvider without auth token.", + exceptionType: ExceptionType.unexpectedNullValue, + ); + } + return authToken; + } +} diff --git a/frontend/app/lib/providers/case_provider.dart b/frontend/app/lib/providers/case_provider.dart index 6903021c..0dc449d5 100644 --- a/frontend/app/lib/providers/case_provider.dart +++ b/frontend/app/lib/providers/case_provider.dart @@ -10,6 +10,7 @@ import "package:aw40_hub_frontend/providers/auth_provider.dart"; import "package:aw40_hub_frontend/services/helper_service.dart"; import "package:aw40_hub_frontend/services/http_service.dart"; import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:aw40_hub_frontend/utils/filter_criteria.dart"; import "package:flutter/material.dart"; import "package:http/http.dart"; import "package:logging/logging.dart"; @@ -17,15 +18,42 @@ import "package:logging/logging.dart"; class CaseProvider with ChangeNotifier { CaseProvider(this._httpService); final HttpService _httpService; - final Logger _logger = Logger("case_provider"); + + String? _authToken; late String workshopId; + bool notifiedListenersAfterGettingEmptyCurrentCases = false; + bool _showSharedCases = true; bool get showSharedCases => _showSharedCases; - String? _authToken; + + ValueNotifier selectedCaseIndexNotifier = ValueNotifier(null); + + FilterCriteria? _filterCriteria; + FilterCriteria? get filterCriteria => _filterCriteria; + + void setFilterCriteria(FilterCriteria criteria) { + _filterCriteria = criteria; + notifyListeners(); + } + + void resetFilterCriteria() { + _filterCriteria = null; + notifiedListenersAfterGettingEmptyCurrentCases = false; + notifyListeners(); + } + + void resetSelectedcaseIndexNotifier() { + selectedCaseIndexNotifier.value = null; + } + + bool isFilterActive() { + return filterCriteria != null; + } Future toggleShowSharedCases() async { _showSharedCases = !_showSharedCases; + notifiedListenersAfterGettingEmptyCurrentCases = false; await getCurrentCases(); notifyListeners(); } @@ -35,9 +63,16 @@ class CaseProvider with ChangeNotifier { // * Return value currently not used. final Response response; if (_showSharedCases) { - response = await _httpService.getSharedCases(authToken); + response = await _httpService.getSharedCases( + authToken, + filterCriteria: filterCriteria, + ); } else { - response = await _httpService.getCases(authToken, workshopId); + response = await _httpService.getCases( + authToken, + workshopId, + filterCriteria: filterCriteria, + ); } final bool verifyStatusCode = HelperService.verifyStatusCode( response.statusCode, @@ -47,8 +82,18 @@ class CaseProvider with ChangeNotifier { response, _logger, ); - if (!verifyStatusCode) return []; - return _jsonBodyToCaseModelList(response.body); + late List result; + if (!verifyStatusCode) { + result = []; + } else { + result = _jsonBodyToCaseModelList(response.body); + } + + if (result.isEmpty) { + notifiedListenersAfterGettingEmptyCurrentCases = true; + notifyListeners(); + } + return result; } Future> getCasesByVehicleVin(String vehicleVin) async { @@ -148,13 +193,6 @@ class CaseProvider with ChangeNotifier { _logger.warning("Unimplemented: sortCases()"); } - Future filterCases() async { - // Klasse FilterCriteria mit Feld für jedes Filterkriterium. - // Aktuelle Filter werden durch Zustand einer FilterCriteria Instanz - // definiert. - _logger.warning("Unimplemented: filterCases()"); - } - Future uploadObdData(String caseId, NewOBDDataDto obdDataDto) async { final String authToken = _getAuthToken(); final Map obdDataJson = obdDataDto.toJson(); @@ -172,6 +210,7 @@ class CaseProvider with ChangeNotifier { _logger, ); if (!verifyStatusCode) return false; + notifyListeners(); return true; } @@ -188,13 +227,16 @@ class CaseProvider with ChangeNotifier { vcdsData, filename, ); - return HelperService.verifyStatusCode( + final bool verifyStatusCode = HelperService.verifyStatusCode( response.statusCode, 201, "Could not upload vcds data. ", response, _logger, ); + if (!verifyStatusCode) return false; + notifyListeners(); + return true; } Future uploadTimeseriesData( @@ -235,9 +277,11 @@ class CaseProvider with ChangeNotifier { String? componentA, String? componentB, String? componentC, + String? componentD, PicoscopeLabel? labelA, PicoscopeLabel? labelB, PicoscopeLabel? labelC, + PicoscopeLabel? labelD, ) async { final String authToken = _getAuthToken(); final Response response = await _httpService.uploadPicoscopeData( @@ -249,9 +293,11 @@ class CaseProvider with ChangeNotifier { componentA: componentA, componentB: componentB, componentC: componentC, + componentD: componentD, labelA: labelA, labelB: labelB, labelC: labelC, + labelD: labelD, ); final bool verifyStatusCode = HelperService.verifyStatusCode( response.statusCode, @@ -321,6 +367,78 @@ class CaseProvider with ChangeNotifier { return true; } + Future deleteObdData( + int? dataId, + String workshopId, + String caseId, + ) async { + final String authToken = _getAuthToken(); + final Response response = await _httpService.deleteObdData( + authToken, + dataId, + workshopId, + caseId, + ); + final bool verifyStatusCode = HelperService.verifyStatusCode( + response.statusCode, + 200, + "Could not delete OBD data", + response, + _logger, + ); + if (!verifyStatusCode) return false; + notifyListeners(); + return true; + } + + Future deleteTimeseriesData( + int? dataId, + String workshopId, + String caseId, + ) async { + final String authToken = _getAuthToken(); + final Response response = await _httpService.deleteTimeseriesData( + authToken, + dataId, + workshopId, + caseId, + ); + final bool verifyStatusCode = HelperService.verifyStatusCode( + response.statusCode, + 200, + "Could not delete timeseries data", + response, + _logger, + ); + if (!verifyStatusCode) return false; + notifyListeners(); + return true; + } + + Future deleteSymptomData( + int? dataId, + String workshopId, + String caseId, + ) async { + final String authToken = _getAuthToken(); + final Response response = await _httpService.deleteSymptomData( + authToken, + dataId, + workshopId, + caseId, + ); + final bool verifyStatusCode = HelperService.verifyStatusCode( + response.statusCode, + 200, + "Could not delete symptom data", + response, + _logger, + ); + if (!verifyStatusCode) return false; + notifyListeners(); + return true; + } + Future fetchAndSetAuthToken(AuthProvider authProvider) async { _authToken = await authProvider.getAuthToken(); notifyListeners(); diff --git a/frontend/app/lib/providers/knowledge_provider.dart b/frontend/app/lib/providers/knowledge_provider.dart new file mode 100644 index 00000000..25263b3a --- /dev/null +++ b/frontend/app/lib/providers/knowledge_provider.dart @@ -0,0 +1,55 @@ +import "dart:convert"; + +import "package:aw40_hub_frontend/exceptions/app_exception.dart"; +import "package:aw40_hub_frontend/providers/auth_provider.dart"; +import "package:aw40_hub_frontend/services/http_service.dart"; +import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:flutter/foundation.dart"; +import "package:http/http.dart"; +import "package:logging/logging.dart"; + +class KnowledgeProvider with ChangeNotifier { + KnowledgeProvider(this._httpService); + + final HttpService _httpService; + + final Logger _logger = Logger("knowledge_provider"); + + String? _authToken; + + Future fetchAndSetAuthToken(AuthProvider authProvider) async { + _authToken = await authProvider.getAuthToken(); + notifyListeners(); + } + + String _getAuthToken() { + final String? authToken = _authToken; + if (authToken == null) { + throw AppException( + exceptionMessage: "Called KnowledgeProvider without auth token.", + exceptionType: ExceptionType.unexpectedNullValue, + ); + } + return authToken; + } + + Future> getVehicleComponents() async { + final String authToken = _getAuthToken(); + final Response response = await _httpService.getVehicleComponents( + authToken, + ); + if (response.statusCode != 200) { + _logger.warning( + "Could not get vehicle components. " + "${response.statusCode}: ${response.reasonPhrase}", + ); + return []; + } + final json = jsonDecode(response.body); + if (json is! List) { + _logger.warning("Could not decode json response to List."); + return []; + } + return json.map((e) => e.toString()).toList(); + } +} diff --git a/frontend/app/lib/routing/router.dart b/frontend/app/lib/routing/router.dart index 721bfb32..9693f699 100644 --- a/frontend/app/lib/routing/router.dart +++ b/frontend/app/lib/routing/router.dart @@ -1,5 +1,6 @@ import "package:aw40_hub_frontend/providers/auth_provider.dart"; import "package:aw40_hub_frontend/scaffolds/scaffold_wrapper.dart"; +import "package:aw40_hub_frontend/screens/assets_screen.dart"; import "package:aw40_hub_frontend/screens/cases_screen.dart"; import "package:aw40_hub_frontend/screens/customers_screen.dart"; import "package:aw40_hub_frontend/screens/diagnoses_screen.dart"; @@ -109,6 +110,14 @@ Map _basicRoutes = { ), ); }, + kRouteAssets: (RouteData info) { + return const MaterialPage( + child: ScaffoldWrapper( + currentIndex: 4, + child: AssetsScreen(), + ), + ); + }, }; Map _mechanicsRoutes = {}; diff --git a/frontend/app/lib/scaffolds/scaffold_wrapper.dart b/frontend/app/lib/scaffolds/scaffold_wrapper.dart index 8c71a165..92dd06b4 100644 --- a/frontend/app/lib/scaffolds/scaffold_wrapper.dart +++ b/frontend/app/lib/scaffolds/scaffold_wrapper.dart @@ -1,10 +1,13 @@ import "dart:async"; import "package:aw40_hub_frontend/dialogs/add_case_dialog.dart"; +import "package:aw40_hub_frontend/dialogs/create_asset_dialog.dart"; import "package:aw40_hub_frontend/dialogs/filter_cases_dialog.dart"; +import "package:aw40_hub_frontend/dtos/new_asset_dto.dart"; import "package:aw40_hub_frontend/dtos/new_case_dto.dart"; import "package:aw40_hub_frontend/models/logged_in_user_model.dart"; import "package:aw40_hub_frontend/models/navigation_item_model.dart"; +import "package:aw40_hub_frontend/providers/asset_provider.dart"; import "package:aw40_hub_frontend/providers/auth_provider.dart"; import "package:aw40_hub_frontend/providers/case_provider.dart"; import "package:aw40_hub_frontend/scaffolds/desktop_scaffold.dart"; @@ -33,6 +36,40 @@ class _ScaffoldWrapperState extends State { // ignore: unused_field final Logger _logger = Logger("scaffold_wrapper"); int currentIndex = 0; + bool _switchState = true; + bool _isFilterActive = false; + + @override + Widget build(BuildContext context) { + final caseProvider = Provider.of(context); + _switchState = caseProvider.showSharedCases; + _isFilterActive = caseProvider.isFilterActive(); + + final LoggedInUserModel loggedInUserModel = + Provider.of(context).loggedInUser; + if (widget.currentIndex != null) currentIndex = widget.currentIndex!; + + final List navigationItemModels = + _getMenuItemModels(); + + return DesktopScaffold( + navItems: navigationItemModels, + currentIndex: currentIndex, + onNavItemTap: onItemTap, + loggedInUserModel: loggedInUserModel, + child: widget.child, + ); + } + + Future _showCreateAssetDialog() async { + final NewAssetDto? newCase = await showDialog( + context: context, + builder: (BuildContext context) { + return CreateAssetDialog(); + }, + ); + return newCase; + } Future _showAddCaseDialog() async { final NewCaseDto? newCase = await showDialog( @@ -52,7 +89,7 @@ class _ScaffoldWrapperState extends State { Future _showFilterCasesDialog() async { await showDialog( context: context, - builder: (BuildContext context) => const FilterCasesDialog(), + builder: (BuildContext context) => FilterCasesDialog(), ); } @@ -83,31 +120,88 @@ class _ScaffoldWrapperState extends State { List _getMenuItemModels() { final caseProvider = Provider.of(context, listen: false); + final assetProvider = Provider.of(context, listen: false); final List navigationItemModels = [ NavigationMenuItemModel( title: tr("cases.title"), icon: const Icon(Icons.cases_sharp), destination: kRouteCases, actions: [ - IconButton( - onPressed: () async { - final NewCaseDto? newCase = await _showAddCaseDialog(); - if (newCase == null) return; - await caseProvider.addCase(newCase); - }, - icon: const Icon(Icons.add), - tooltip: tr("cases.actions.addCase"), + Padding( + padding: const EdgeInsets.only(right: 8), + child: Transform.scale( + scale: 0.75, + child: Tooltip( + message: tr("cases.filterDialog.toggleShared"), + child: Switch( + value: _switchState, + onChanged: (v) async { + setState(() { + _switchState = v; + }); + await Provider.of( + context, + listen: false, + ).toggleShowSharedCases(); + }, + ), + ), + ), ), - IconButton( - onPressed: () {}, - icon: const Icon(Icons.sort), - tooltip: tr("cases.actions.sortCases"), + Padding( + padding: const EdgeInsets.only(right: 8), + child: IconButton( + onPressed: _isFilterActive && + !caseProvider + .notifiedListenersAfterGettingEmptyCurrentCases + ? () async { + final NewAssetDto? newAsset = + await _showCreateAssetDialog(); + if (newAsset == null) return; + await assetProvider.createAsset(newAsset); + } + : null, + icon: const Icon(Icons.create_new_folder), + tooltip: tr("cases.actions.createAsset"), + ), ), - IconButton( - onPressed: () async => _showFilterCasesDialog(), - icon: const Icon(Icons.filter_list), - tooltip: tr("cases.actions.filterCases"), + Padding( + padding: const EdgeInsets.only(right: 8), + child: IconButton( + onPressed: () async { + final NewCaseDto? newCase = await _showAddCaseDialog(); + if (newCase == null) return; + await caseProvider.addCase(newCase); + }, + icon: const Icon(Icons.add_circle), + tooltip: tr("cases.actions.addCase"), + ), + ), + Padding( + padding: const EdgeInsets.only(right: 8), + child: IconButton( + onPressed: () {}, + icon: const Icon(Icons.sort), + tooltip: tr("cases.actions.sortCases"), + ), ), + Padding( + padding: const EdgeInsets.only(right: 8), + child: DecoratedBox( + decoration: BoxDecoration( + color: _isFilterActive + ? Colors.blue.withOpacity(0.2) + : Colors.transparent, + shape: BoxShape.circle, + ), + child: IconButton( + onPressed: () async => _showFilterCasesDialog(), + icon: const Icon(Icons.filter_list), + color: _isFilterActive ? Colors.blue : null, + tooltip: tr("cases.actions.filterCases"), + ), + ), + ) ], ), NavigationMenuItemModel( @@ -137,6 +231,11 @@ class _ScaffoldWrapperState extends State { icon: const Icon(Icons.car_repair), destination: kRouteVecicles, ), + NavigationMenuItemModel( + title: tr("assets.title"), + icon: const Icon(Icons.storage), + destination: kRouteAssets, + ), NavigationMenuItemModel( title: tr("training.title"), icon: const Icon(Icons.school), @@ -149,22 +248,4 @@ class _ScaffoldWrapperState extends State { ]; return navigationItemModels; } - - @override - Widget build(BuildContext context) { - final LoggedInUserModel loggedInUserModel = - Provider.of(context).loggedInUser; - if (widget.currentIndex != null) currentIndex = widget.currentIndex!; - - final List navigationItemModels = - _getMenuItemModels(); - - return DesktopScaffold( - navItems: navigationItemModels, - currentIndex: currentIndex, - onNavItemTap: onItemTap, - loggedInUserModel: loggedInUserModel, - child: widget.child, - ); - } } diff --git a/frontend/app/lib/screens/assets_screen.dart b/frontend/app/lib/screens/assets_screen.dart new file mode 100644 index 00000000..19e8ff58 --- /dev/null +++ b/frontend/app/lib/screens/assets_screen.dart @@ -0,0 +1,9 @@ +import "package:aw40_hub_frontend/views/assets_view.dart"; +import "package:flutter/material.dart"; + +class AssetsScreen extends StatelessWidget { + const AssetsScreen({super.key}); + + @override + Widget build(BuildContext context) => const AssetsView(); +} diff --git a/frontend/app/lib/screens/login_screen.dart b/frontend/app/lib/screens/login_screen.dart index a3cdc939..b9fcf76a 100644 --- a/frontend/app/lib/screens/login_screen.dart +++ b/frontend/app/lib/screens/login_screen.dart @@ -6,6 +6,7 @@ import "package:aw40_hub_frontend/providers/auth_provider.dart"; import "package:aw40_hub_frontend/providers/case_provider.dart"; import "package:aw40_hub_frontend/providers/customer_provider.dart"; import "package:aw40_hub_frontend/providers/diagnosis_provider.dart"; +import "package:aw40_hub_frontend/providers/knowledge_provider.dart"; import "package:aw40_hub_frontend/providers/vehicle_provider.dart"; import "package:aw40_hub_frontend/services/auth_service.dart"; import "package:aw40_hub_frontend/services/config_service.dart"; @@ -118,6 +119,14 @@ class _LoginScreenState extends State { final CustomerProvider customerProvider = Provider.of(context, listen: false); + //final AssetProvider assetProvider = + //Provider.of(context, listen: false); + + // apparently this has an effect despite the variable not being used here + // ignore: unused_local_variable + final KnowledgeProvider knowledgeProvider = + Provider.of(context, listen: false); + await authProvider.tryLoginWithStoredRefreshToken(); if (!authProvider.isLoggedIn()) { diff --git a/frontend/app/lib/services/http_service.dart b/frontend/app/lib/services/http_service.dart index 582bc229..add616b2 100644 --- a/frontend/app/lib/services/http_service.dart +++ b/frontend/app/lib/services/http_service.dart @@ -3,6 +3,7 @@ import "dart:convert"; import "package:aw40_hub_frontend/dtos/new_symptom_dto.dart"; import "package:aw40_hub_frontend/services/config_service.dart"; import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:aw40_hub_frontend/utils/filter_criteria.dart"; import "package:aw40_hub_frontend/utils/token_refreshing_http_client_interceptor.dart"; import "package:collection/collection.dart"; import "package:enum_to_string/enum_to_string.dart"; @@ -38,16 +39,43 @@ class HttpService { return _client.get(Uri.parse("$backendUrl/health/ping")); } - Future getSharedCases(String token) { + Future getSharedCases( + String token, { + FilterCriteria? filterCriteria, + }) { + final uri = Uri.parse("$backendUrl/shared/cases").replace( + queryParameters: { + if (filterCriteria?.vin != null) "vin": filterCriteria?.vin, + if (filterCriteria?.obdDataDtc != null) + "obd_data_dtc": filterCriteria?.obdDataDtc, + if (filterCriteria?.timeseriesDataComponent != null) + "timeseries_data_component": filterCriteria?.timeseriesDataComponent, + }, + ); + return _client.get( - Uri.parse("$backendUrl/shared/cases"), + uri, headers: getAuthHeaderWith(token), ); } - Future getCases(String token, String workshopId) { + Future getCases( + String token, + String workshopId, { + FilterCriteria? filterCriteria, + }) { + final uri = Uri.parse("$backendUrl/$workshopId/cases").replace( + queryParameters: { + if (filterCriteria?.vin != null) "vin": filterCriteria?.vin, + if (filterCriteria?.obdDataDtc != null) + "obd_data_dtc": filterCriteria?.obdDataDtc, + if (filterCriteria?.timeseriesDataComponent != null) + "timeseries_data_component": filterCriteria?.timeseriesDataComponent, + }, + ); + return _client.get( - Uri.parse("$backendUrl/$workshopId/cases"), + uri, headers: getAuthHeaderWith(token), ); } @@ -109,6 +137,42 @@ class HttpService { ); } + Future deleteObdData( + String token, + int? dataId, + String workshopId, + String caseId, + ) { + return _client.delete( + Uri.parse("$backendUrl/$workshopId/cases/$caseId/obd_data/$dataId"), + headers: getAuthHeaderWith(token), + ); + } + + Future deleteTimeseriesData( + String token, + int? dataId, + String workshopId, + String caseId, + ) { + return _client.delete( + Uri.parse("$backendUrl/$workshopId/cases/$caseId/timeseries/$dataId"), + headers: getAuthHeaderWith(token), + ); + } + + Future deleteSymptomData( + String token, + int? dataId, + String workshopId, + String caseId, + ) { + return _client.delete( + Uri.parse("$backendUrl/$workshopId/cases/$caseId/symptoms/$dataId"), + headers: getAuthHeaderWith(token), + ); + } + Future getDiagnoses( String token, String workshopId, @@ -426,4 +490,63 @@ class HttpService { body: jsonEncode(requestBody), ); } + + Future getAssets( + String token, + ) { + return _client.get( + Uri.parse("$backendUrl/dataspace/manage/assets"), + headers: getAuthHeaderWith(token), + ); + } + + Future createAsset( + String token, + Map requestBody, + ) { + return _client.post( + Uri.parse("$backendUrl/dataspace/manage/assets"), + headers: getAuthHeaderWith(token, { + "Content-Type": "application/json; charset=UTF-8", + }), + body: jsonEncode(requestBody), + ); + } + + Future publishAsset( + String token, + String assetId, + Map requestBody, + ) { + return _client.post( + Uri.parse("$backendUrl/dataspace/manage/assets/$assetId/publication"), + headers: getAuthHeaderWith(token, { + "Content-Type": "application/json; charset=UTF-8", + }), + body: jsonEncode(requestBody), + ); + } + + Future deleteAsset( + String token, + String assetId, + Map requestBody, + ) { + return _client.delete( + Uri.parse("$backendUrl/dataspace/manage/assets/$assetId"), + headers: getAuthHeaderWith(token, { + "Content-Type": "application/json; charset=UTF-8", + }), + body: jsonEncode(requestBody), + ); + } + + Future getVehicleComponents( + String token, + ) { + return _client.get( + Uri.parse("$backendUrl/knowledge/components"), + headers: getAuthHeaderWith(token), + ); + } } diff --git a/frontend/app/lib/services/mock_http_service.dart b/frontend/app/lib/services/mock_http_service.dart index b40898ce..3a897b5f 100644 --- a/frontend/app/lib/services/mock_http_service.dart +++ b/frontend/app/lib/services/mock_http_service.dart @@ -17,6 +17,7 @@ import "package:aw40_hub_frontend/dtos/vehicle_dto.dart"; import "package:aw40_hub_frontend/services/helper_service.dart"; import "package:aw40_hub_frontend/services/http_service.dart"; import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:aw40_hub_frontend/utils/filter_criteria.dart"; import "package:http/http.dart" show Response; import "package:logging/logging.dart"; @@ -873,7 +874,11 @@ class MockHttpService implements HttpService { } @override - Future getCases(String token, String workshopId) { + Future getCases( + String token, + String workshopId, { + FilterCriteria? filterCriteria, + }) { _demoCaseDto.workshopId = workshopId; for (final c in _caseDtos) { c.workshopId = workshopId; @@ -944,7 +949,10 @@ class MockHttpService implements HttpService { } @override - Future getSharedCases(String token) { + Future getSharedCases( + String token, { + FilterCriteria? filterCriteria, + }) { final List caseDtos = _caseDtos + _sharedCaseDtos; return Future.delayed( Duration(milliseconds: delay), @@ -1431,4 +1439,77 @@ class MockHttpService implements HttpService { // TODO: implement addCustomer throw UnimplementedError(); } + + @override + Future deleteTimeseriesData( + String token, + int? dataId, + String workshopId, + String caseId, + ) { + // TODO: implement deleteTimeseriesData + throw UnimplementedError(); + } + + @override + Future deleteObdData( + String token, + int? dataId, + String workshopId, + String caseId, + ) { + // TODO: implement deleteObdData + throw UnimplementedError(); + } + + @override + Future deleteSymptomData( + String token, + int? dataId, + String workshopId, + String caseId, + ) { + // TODO: implement deleteSymptomData + throw UnimplementedError(); + } + + @override + Future getAssets( + String token, + ) { + // TODO: implement getAssets + throw UnimplementedError(); + } + + @override + Future getVehicleComponents(String token) { + // TODO: implement getVehicleComponents + throw UnimplementedError(); + } + + @override + Future createAsset(String token, Map requestBody) { + // TODO: implement createAsset + throw UnimplementedError(); + } + + @override + Future publishAsset( + String token, + String assetId, + Map requestBody, + ) { + // TODO: implement publishAsset + throw UnimplementedError(); + } + + @override + Future deleteAsset( + String token, + String assetId, + Map? requestBody, + ) { + // TODO: implement deleteAsset + throw UnimplementedError(); + } } diff --git a/frontend/app/lib/services/ui_service.dart b/frontend/app/lib/services/ui_service.dart new file mode 100644 index 00000000..bddeb3f7 --- /dev/null +++ b/frontend/app/lib/services/ui_service.dart @@ -0,0 +1,10 @@ +import "package:flutter/material.dart"; + +class UIService { + static void showMessage(String text, ScaffoldMessengerState state) { + final SnackBar snackBar = SnackBar( + content: Center(child: Text(text)), + ); + state.showSnackBar(snackBar); + } +} diff --git a/frontend/app/lib/utils/constants.dart b/frontend/app/lib/utils/constants.dart index 223b7252..3b5cd20a 100644 --- a/frontend/app/lib/utils/constants.dart +++ b/frontend/app/lib/utils/constants.dart @@ -6,6 +6,7 @@ const String kRouteCases = "/cases"; const String kRouteVecicles = "/vehicles"; const String kRouteCustomers = "/customers"; const String kRouteDiagnosis = "/diagnoses/:diagnosisId"; +const String kRouteAssets = "/assets"; // External links const String kExternalLinkMoodle = "https://moodle.aw4null.de"; diff --git a/frontend/app/lib/utils/filter_criteria.dart b/frontend/app/lib/utils/filter_criteria.dart new file mode 100644 index 00000000..14e6edf1 --- /dev/null +++ b/frontend/app/lib/utils/filter_criteria.dart @@ -0,0 +1,11 @@ +class FilterCriteria { + FilterCriteria({ + this.vin, + this.obdDataDtc, + this.timeseriesDataComponent, + }); + + final String? vin; + final String? obdDataDtc; + final String? timeseriesDataComponent; +} diff --git a/frontend/app/lib/views/assets_detail_view.dart b/frontend/app/lib/views/assets_detail_view.dart new file mode 100644 index 00000000..32735592 --- /dev/null +++ b/frontend/app/lib/views/assets_detail_view.dart @@ -0,0 +1,220 @@ +import "package:aw40_hub_frontend/dialogs/offer_assets_dialog.dart"; +import "package:aw40_hub_frontend/dtos/asset_dto.dart"; +import "package:aw40_hub_frontend/dtos/nautilus_private_key_dto.dart"; +import "package:aw40_hub_frontend/models/asset_model.dart"; +import "package:aw40_hub_frontend/providers/asset_provider.dart"; +import "package:aw40_hub_frontend/services/ui_service.dart"; +import "package:aw40_hub_frontend/utils/extensions.dart"; +import "package:easy_localization/easy_localization.dart"; +import "package:flutter/material.dart"; +import "package:provider/provider.dart"; + +class AssetsDetailView extends StatelessWidget { + const AssetsDetailView({ + required this.assetModel, + required this.onClose, + super.key, + }); + + final AssetModel assetModel; + final void Function() onClose; + + @override + Widget build(BuildContext context) { + return DesktopAssetsDetailView( + assetModel: assetModel, + onClose: onClose, + onDelete: () {}, + ); + } +} + +class DesktopAssetsDetailView extends StatefulWidget { + const DesktopAssetsDetailView({ + required this.assetModel, + required this.onClose, + required this.onDelete, + super.key, + }); + + final AssetModel assetModel; + final void Function() onClose; + final void Function() onDelete; + + @override + State createState() => + _DesktopAssetsDetailViewState(); +} + +class _DesktopAssetsDetailViewState extends State { + @override + Widget build(BuildContext context) { + final ThemeData theme = Theme.of(context); + final ColorScheme colorScheme = theme.colorScheme; + final TextTheme textTheme = theme.textTheme; + final assetProvider = Provider.of(context, listen: false); + + final List attributesCase = [ + tr("assets.headlines.timeOfGeneration"), + tr("assets.headlines.name"), + tr("assets.headlines.filter") + ]; + + final String? formattedDateTime = + widget.assetModel.timestamp?.toGermanDateString(); + + final List valuesCase = [ + formattedDateTime ?? tr("general.unknownDateTime"), + widget.assetModel.name, + widget.assetModel.definition.toJsonWithoutNullValues().toString(), + ]; + + return SizedBox.expand( + child: SingleChildScrollView( + child: Card( + child: Padding( + padding: const EdgeInsets.all(16), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + IconButton( + icon: const Icon(Icons.keyboard_double_arrow_right), + iconSize: 28, + onPressed: widget.onClose, + style: IconButton.styleFrom( + foregroundColor: colorScheme.primary, + ), + ), + Text( + tr("general.details"), + style: textTheme.displaySmall, + ), + const Spacer(), + IconButton( + icon: const Icon(Icons.delete_forever), + iconSize: 28, + style: IconButton.styleFrom( + foregroundColor: colorScheme.error, + ), + onPressed: () async => _onDeleteButtonPress( + context, + widget.assetModel.id!, + assetProvider, + ), + ), + ], + ), + const SizedBox(height: 16), + Table( + columnWidths: const {0: IntrinsicColumnWidth()}, + children: List.generate( + attributesCase.length, + (i) => TableRow( + children: [ + const SizedBox(height: 32), + Text(attributesCase[i]), + Text(valuesCase[i]), + ], + ), + ), + ), + const SizedBox(height: 16), + Row( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + FilledButton.icon( + icon: const Icon(Icons.drive_folder_upload_outlined), + label: Text(tr("assets.upload.offer")), + onPressed: () async { + await _showOfferAssetsDialog(); + }, + ), + ], + ), + ], + ), + ), + ), + ), + ); + } + + Future _onDeleteButtonPress( + BuildContext context, + String diagnosisModelCaseId, + AssetProvider assetProvider, + ) async { + await _showConfirmRemoveDialog(context).then((String? privateKey) async { + final ScaffoldMessengerState scaffoldMessengerState = + ScaffoldMessenger.of(context); + if (privateKey == null) return; + + final bool deletionResult = await assetProvider.deleteAsset( + diagnosisModelCaseId, + NautilusPrivateKeyDto(privateKey), + ); + final String message = deletionResult + ? tr("assets.details.deleteAssetSuccessMessage") + : tr("assets.details.deleteAssetErrorMessage"); + UIService.showMessage(message, scaffoldMessengerState); + }); + } + + Future _showOfferAssetsDialog() async { + return showDialog( + context: context, + builder: (BuildContext context) { + return OfferAssetsDialog(assetModelId: widget.assetModel.id!); + }, + ); + } + + Future _showConfirmRemoveDialog(BuildContext context) async { + final TextEditingController privateKeyController = TextEditingController(); + + return showDialog( + context: context, + builder: (BuildContext context) { + final theme = Theme.of(context); + return AlertDialog( + title: Text(tr("assets.remove.title")), + content: Column( + mainAxisSize: MainAxisSize.min, + children: [ + Text(tr("assets.remove.description")), + const SizedBox(height: 16), + TextFormField( + controller: privateKeyController, + decoration: InputDecoration( + labelText: tr("assets.upload.privateKey"), + border: const OutlineInputBorder(), + ), + obscureText: true, + ), + ], + ), + actions: [ + TextButton( + onPressed: () => Navigator.pop(context, null), + child: Text( + tr("general.cancel"), + style: theme.textTheme.labelLarge?.copyWith( + color: theme.colorScheme.error, + ), + ), + ), + TextButton( + onPressed: () => Navigator.pop( + context, + privateKeyController.text, + ), + child: Text(tr("general.confirm")), + ), + ], + ); + }, + ); + } +} diff --git a/frontend/app/lib/views/assets_view.dart b/frontend/app/lib/views/assets_view.dart new file mode 100644 index 00000000..571db790 --- /dev/null +++ b/frontend/app/lib/views/assets_view.dart @@ -0,0 +1,140 @@ +import "package:aw40_hub_frontend/data_sources/assets_data_table_source.dart"; +import "package:aw40_hub_frontend/exceptions/app_exception.dart"; +import "package:aw40_hub_frontend/models/asset_model.dart"; +import "package:aw40_hub_frontend/providers/asset_provider.dart"; +import "package:aw40_hub_frontend/utils/enums.dart"; +import "package:aw40_hub_frontend/views/assets_detail_view.dart"; +import "package:easy_localization/easy_localization.dart"; +import "package:flutter/material.dart"; +import "package:logging/logging.dart"; +import "package:provider/provider.dart"; + +class AssetsView extends StatefulWidget { + const AssetsView({ + super.key, + }); + + @override + State createState() => _AssetsView(); +} + +class _AssetsView extends State { + final Logger _logger = Logger("AssetsViewLogger"); + ValueNotifier selectedAssetIndexNotifier = ValueNotifier(null); + + @override + void dispose() { + selectedAssetIndexNotifier.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + final assetProvider = Provider.of(context); + + return FutureBuilder( + // ignore: discarded_futures + future: assetProvider.getAssets(), + builder: + (BuildContext context, AsyncSnapshot> snapshot) { + if (snapshot.connectionState != ConnectionState.done || + !snapshot.hasData) { + _logger.shout(snapshot.error); + _logger.shout(snapshot.data); + return const Center(child: CircularProgressIndicator()); + } + final List? assetModels = snapshot.data; + if (assetModels == null) { + throw AppException( + exceptionType: ExceptionType.notFound, + exceptionMessage: "Received no assets.", + ); + } + + return AssetsTable( + assetModels: assetModels, + selectedAssetIndexNotifier: selectedAssetIndexNotifier, + ); + }, + ); + } +} + +class AssetsTable extends StatefulWidget { + const AssetsTable({ + required this.assetModels, + required this.selectedAssetIndexNotifier, + super.key, + }); + + final List assetModels; + final ValueNotifier selectedAssetIndexNotifier; + + @override + State createState() => AssetsTableState(); +} + +class AssetsTableState extends State { + ValueNotifier selectedAssetIndexNotifier = ValueNotifier(null); + + @override + void dispose() { + selectedAssetIndexNotifier.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + if (widget.assetModels.isEmpty) { + return Center( + child: Text( + tr("assets.noAssets"), + style: Theme.of(context).textTheme.displaySmall, + ), + ); + } + + return Row( + children: [ + Expanded( + flex: 3, + child: SingleChildScrollView( + child: PaginatedDataTable( + source: AssetsDataTableSource( + themeData: Theme.of(context), + selectedAssetIndexNotifier: selectedAssetIndexNotifier, + assetModels: widget.assetModels, + onPressedRow: (int i) => + setState(() => selectedAssetIndexNotifier.value = i), + ), + showCheckboxColumn: false, + rowsPerPage: 50, + columns: [ + DataColumn(label: Text(tr("assets.headlines.name"))), + DataColumn(label: Text(tr("assets.headlines.filter"))), + DataColumn( + label: Text(tr("assets.headlines.timeOfGeneration")), + ), + ], + ), + ), + ), + + // Show detail view if a assets is selected. + ValueListenableBuilder( + valueListenable: selectedAssetIndexNotifier, + builder: (context, value, child) { + if (value == null) return const SizedBox.shrink(); + return Expanded( + flex: 2, + child: AssetsDetailView( + assetModel: widget.assetModels[value], + onClose: () => selectedAssetIndexNotifier.value = null, + ), + ); + }, + ) + ], + ); + } +} diff --git a/frontend/app/lib/views/case_detail_view.dart b/frontend/app/lib/views/case_detail_view.dart index 5ba0720c..5b3cfd38 100644 --- a/frontend/app/lib/views/case_detail_view.dart +++ b/frontend/app/lib/views/case_detail_view.dart @@ -4,6 +4,7 @@ import "package:aw40_hub_frontend/components/dataset_upload_case_view.dart"; import "package:aw40_hub_frontend/dialogs/update_case_dialog.dart"; import "package:aw40_hub_frontend/dtos/case_update_dto.dart"; import "package:aw40_hub_frontend/models/case_model.dart"; +import "package:aw40_hub_frontend/models/data_model.dart"; import "package:aw40_hub_frontend/models/diagnosis_model.dart"; import "package:aw40_hub_frontend/models/logged_in_user_model.dart"; import "package:aw40_hub_frontend/models/obd_data_model.dart"; @@ -12,6 +13,8 @@ import "package:aw40_hub_frontend/models/timeseries_data_model.dart"; import "package:aw40_hub_frontend/providers/auth_provider.dart"; import "package:aw40_hub_frontend/providers/case_provider.dart"; import "package:aw40_hub_frontend/providers/diagnosis_provider.dart"; +import "package:aw40_hub_frontend/services/ui_service.dart"; +import "package:aw40_hub_frontend/utils/enums.dart"; import "package:aw40_hub_frontend/utils/extensions.dart"; import "package:easy_localization/easy_localization.dart"; import "package:flutter/material.dart"; @@ -38,6 +41,8 @@ class CaseDetailView extends StatelessWidget { Provider.of(context, listen: false).loggedInUser, caseModel.id, ), + onDeleteData: (int? dataId, DatasetType datasetType) async => + _onDeleteDataPress(context, dataId, datasetType), ); } @@ -86,17 +91,59 @@ class CaseDetailView extends StatelessWidget { final String message = result ? tr("cases.details.deleteCaseSuccessMessage") : tr("cases.details.deleteCaseErrorMessage"); - _showMessage(message, scaffoldMessengerState); + UIService.showMessage(message, scaffoldMessengerState); }); onClose(); } - static void _showMessage(String text, ScaffoldMessengerState state) { - final SnackBar snackBar = SnackBar( - content: Center(child: Text(text)), - ); - state.showSnackBar(snackBar); + Future _onDeleteDataPress( + BuildContext context, + int? dataId, + DatasetType datasetType, + ) async { + final caseProvider = Provider.of(context, listen: false); + final ScaffoldMessengerState scaffoldMessengerState = + ScaffoldMessenger.of(context); + + final bool? dialogResult = await _showConfirmDeleteDialog(context); + if (dialogResult == null || !dialogResult) return; + + bool result = false; + switch (datasetType) { + case DatasetType.timeseries: + result = await caseProvider.deleteTimeseriesData( + dataId, + caseModel.workshopId, + caseModel.id, + ); + break; + case DatasetType.obd: + result = await caseProvider.deleteObdData( + dataId, + caseModel.workshopId, + caseModel.id, + ); + break; + case DatasetType.symptom: + result = await caseProvider.deleteSymptomData( + dataId, + caseModel.workshopId, + caseModel.id, + ); + break; + case DatasetType.unknown: + UIService.showMessage( + tr("cases.details.deleteDataUnknownDataTypeMessage"), + scaffoldMessengerState, + ); + return; + } + + final String message = result + ? tr("cases.details.deleteDataSuccessMessage") + : tr("cases.details.deleteDataErrorMessage"); + UIService.showMessage(message, scaffoldMessengerState); } } @@ -105,12 +152,15 @@ class DesktopCaseDetailView extends StatefulWidget { required this.caseModel, required this.onClose, required this.onDelete, + required this.onDeleteData, super.key, }); final CaseModel caseModel; final void Function() onClose; final void Function() onDelete; + final Future Function(int? dataId, DatasetType datasetType) + onDeleteData; @override State createState() => _DesktopCaseDetailViewState(); @@ -166,7 +216,6 @@ class _DesktopCaseDetailViewState extends State { foregroundColor: colorScheme.primary, ), ), - // const SizedBox(width: 16), Text( tr("cases.details.headline"), style: textTheme.displaySmall, @@ -267,7 +316,10 @@ class _DesktopCaseDetailViewState extends State { "diagnoses.details.startDiagnosisFailureMessage", ); } - _showMessage(message, scaffoldMessengerState); + UIService.showMessage( + message, + scaffoldMessengerState, + ); } else { routemaster.push( "/diagnoses/${widget.caseModel.diagnosisId}", @@ -295,21 +347,25 @@ class _DesktopCaseDetailViewState extends State { Text(tr("general.noData")) else Table( - columnWidths: const {0: IntrinsicColumnWidth()}, + columnWidths: const { + 0: FlexColumnWidth(2), + 1: FlexColumnWidth(5), + 2: FlexColumnWidth(3), + 3: FlexColumnWidth(), + }, children: [ TableRow( children: [ - const SizedBox(height: 32), Text(tr("general.id")), Text(tr("general.date")), Text(tr("general.dataType")), + const Text(""), ], ), ...[ - ...widget.caseModel.timeseriesData - .map(buildTimeseriesDataRow), - ...widget.caseModel.obdData.map(buildObdDataRow), - ...widget.caseModel.symptoms.map(buildSymptomsDataRow), + ...widget.caseModel.timeseriesData.map(buildDataRow), + ...widget.caseModel.obdData.map(buildDataRow), + ...widget.caseModel.symptoms.map(buildDataRow), ], ], ), @@ -326,36 +382,69 @@ class _DesktopCaseDetailViewState extends State { widget.caseModel.timeseriesData.isEmpty && widget.caseModel.obdData.isEmpty; - TableRow buildTimeseriesDataRow(TimeseriesDataModel timeseriesDataModel) { - return TableRow( - children: [ - const SizedBox(height: 32), - Text(timeseriesDataModel.dataId.toString()), - Text(timeseriesDataModel.timestamp?.toGermanDateTimeString() ?? ""), - Text(timeseriesDataModel.type?.name.capitalize() ?? ""), - ], - ); - } + TableRow buildDataRow(DataModel model) { + Text textWidget = const Text(""); + switch (model.runtimeType) { + case ObdDataModel: + textWidget = Text(tr("general.obd")); + break; + case TimeseriesDataModel: + final timeseriesDataModel = model as TimeseriesDataModel; + textWidget = Text(timeseriesDataModel.type?.name.capitalize() ?? ""); + break; + case SymptomModel: + textWidget = Text(tr("general.symptom")); + break; + } - TableRow buildObdDataRow(ObdDataModel obdDataModel) { + final ThemeData theme = Theme.of(context); + final ColorScheme colorScheme = theme.colorScheme; return TableRow( children: [ - const SizedBox(height: 32), - Text(obdDataModel.dataId.toString()), - Text(obdDataModel.timestamp?.toGermanDateTimeString() ?? ""), - const Text("Obd"), + SizedBox( + height: 32, + child: Align( + alignment: Alignment.bottomLeft, + child: Text(model.dataId.toString()), + ), + ), + SizedBox( + height: 32, + child: Align( + alignment: Alignment.bottomLeft, + child: Text(model.timestamp?.toGermanDateTimeString() ?? ""), + ), + ), + SizedBox( + height: 32, + child: Align( + alignment: Alignment.bottomLeft, + child: textWidget, + ), + ), + deleteButton( + colorScheme, + model.dataId, + DatasetType.obd, + ), ], ); } - TableRow buildSymptomsDataRow(SymptomModel symptomModel) { - return TableRow( - children: [ - const SizedBox(height: 32), - Text(symptomModel.dataId.toString()), - Text(symptomModel.timestamp?.toGermanDateTimeString() ?? ""), - const Text("Symptom"), - ], + IconButton deleteButton( + ColorScheme colorScheme, + int? dataId, + DatasetType datasetType, + ) { + return IconButton( + icon: const Icon(Icons.delete_forever), + iconSize: 28, + style: IconButton.styleFrom( + foregroundColor: colorScheme.error, + ), + onPressed: () async { + await widget.onDeleteData(dataId, datasetType); + }, ); } @@ -367,11 +456,4 @@ class _DesktopCaseDetailViewState extends State { }, ); } - - static void _showMessage(String text, ScaffoldMessengerState state) { - final SnackBar snackBar = SnackBar( - content: Center(child: Text(text)), - ); - state.showSnackBar(snackBar); - } } diff --git a/frontend/app/lib/views/cases_view.dart b/frontend/app/lib/views/cases_view.dart index f46f096d..ddf50240 100644 --- a/frontend/app/lib/views/cases_view.dart +++ b/frontend/app/lib/views/cases_view.dart @@ -6,6 +6,7 @@ import "package:aw40_hub_frontend/utils/enums.dart"; import "package:aw40_hub_frontend/views/case_detail_view.dart"; import "package:easy_localization/easy_localization.dart"; import "package:flutter/material.dart"; +import "package:logging/logging.dart"; import "package:provider/provider.dart"; class CasesView extends StatefulWidget { @@ -18,23 +19,38 @@ class CasesView extends StatefulWidget { } class _CasesViewState extends State { - ValueNotifier currentCaseIndexNotifier = ValueNotifier(null); + final Logger _logger = Logger("CasesView"); @override void dispose() { - currentCaseIndexNotifier.dispose(); super.dispose(); } @override Widget build(BuildContext context) { + _logger.info("build _CasesViewState"); final caseProvider = Provider.of(context); + + if (caseProvider.notifiedListenersAfterGettingEmptyCurrentCases) { + _logger.info("notifiedListenersAfterGettingEmptyCurrentCases = true"); + caseProvider.notifiedListenersAfterGettingEmptyCurrentCases = false; + return buildCasesTable([], caseProvider); + } + return FutureBuilder( // ignore: discarded_futures future: caseProvider.getCurrentCases(), builder: (BuildContext context, AsyncSnapshot> snapshot) { + _logger.info( + // ignore: lines_longer_than_80_chars + "FutureBuilder called - ConnectionState: ${snapshot.connectionState}, " + "Has Data: ${snapshot.hasData}, " + "Error: ${snapshot.error}, " + "Data: ${snapshot.data}", + ); if (snapshot.connectionState != ConnectionState.done || !snapshot.hasData) { + _logger.info("Returning: Center with CircularProgressIndicator"); return const Center(child: CircularProgressIndicator()); } final List? caseModels = snapshot.data; @@ -44,46 +60,52 @@ class _CasesViewState extends State { exceptionMessage: "Received no case data.", ); } - return Row( - children: [ - Expanded( - flex: 3, - child: CasesTable( - caseIndexNotifier: currentCaseIndexNotifier, - caseModel: caseModels, - ), - ), - - // Show detail view if a case is selected. - ValueListenableBuilder( - valueListenable: currentCaseIndexNotifier, - builder: (context, value, child) { - if (value == null) return const SizedBox.shrink(); - return Expanded( - flex: 2, - child: CaseDetailView( - caseModel: caseModels[value], - onClose: () => currentCaseIndexNotifier.value = null, - ), - ); - }, - ) - ], - ); + return buildCasesTable(caseModels, caseProvider); }, ); } + + Row buildCasesTable(List caseModels, CaseProvider caseProvider) { + _logger.info("called buildCasesTable with data $caseModels"); + return Row( + children: [ + Expanded( + flex: 3, + child: CasesTable( + selectedCaseIndexNotifier: caseProvider.selectedCaseIndexNotifier, + caseModel: caseModels, + ), + ), + + // Show detail view if a case is selected. + ValueListenableBuilder( + valueListenable: caseProvider.selectedCaseIndexNotifier, + builder: (context, value, child) { + if (value == null) return const SizedBox.shrink(); + return Expanded( + flex: 2, + child: CaseDetailView( + caseModel: caseModels[value], + onClose: () => + caseProvider.selectedCaseIndexNotifier.value = null, + ), + ); + }, + ) + ], + ); + } } class CasesTable extends StatelessWidget { const CasesTable({ required this.caseModel, - required this.caseIndexNotifier, + required this.selectedCaseIndexNotifier, super.key, }); final List caseModel; - final ValueNotifier caseIndexNotifier; + final ValueNotifier selectedCaseIndexNotifier; @override Widget build(BuildContext context) { @@ -91,10 +113,10 @@ class CasesTable extends StatelessWidget { child: PaginatedDataTable( source: CasesDataTableSource( themeData: Theme.of(context), - currentIndexNotifier: caseIndexNotifier, + selectedCaseIndexNotifier: selectedCaseIndexNotifier, caseModels: caseModel, onPressedRow: (int i) { - caseIndexNotifier.value = i; + selectedCaseIndexNotifier.value = i; }, ), showCheckboxColumn: false, diff --git a/frontend/app/lib/views/customer_view.dart b/frontend/app/lib/views/customer_view.dart index 8e37d31b..e81a9aad 100644 --- a/frontend/app/lib/views/customer_view.dart +++ b/frontend/app/lib/views/customer_view.dart @@ -6,7 +6,6 @@ import "package:aw40_hub_frontend/utils/enums.dart"; import "package:aw40_hub_frontend/views/customer_detail_view.dart"; import "package:easy_localization/easy_localization.dart"; import "package:flutter/material.dart"; -import "package:logging/logging.dart"; import "package:provider/provider.dart"; class CustomerView extends StatefulWidget { @@ -20,7 +19,6 @@ class CustomerView extends StatefulWidget { class _CustomerViewState extends State { final currentCustomerIndexNotifier = ValueNotifier(null); - Logger customViewLogger = Logger("CustomerViewLogger"); @override void dispose() { diff --git a/frontend/app/lib/views/diagnosis_detail_view.dart b/frontend/app/lib/views/diagnosis_detail_view.dart index fc2bb5aa..ee25debd 100644 --- a/frontend/app/lib/views/diagnosis_detail_view.dart +++ b/frontend/app/lib/views/diagnosis_detail_view.dart @@ -7,6 +7,7 @@ import "package:aw40_hub_frontend/models/diagnosis_model.dart"; import "package:aw40_hub_frontend/models/state_machine_log_entry_model.dart"; import "package:aw40_hub_frontend/providers/diagnosis_provider.dart"; import "package:aw40_hub_frontend/services/helper_service.dart"; +import "package:aw40_hub_frontend/services/ui_service.dart"; import "package:aw40_hub_frontend/utils/enums.dart"; import "package:aw40_hub_frontend/utils/extensions.dart"; import "package:collection/collection.dart"; @@ -220,17 +221,10 @@ class _DiagnosisDetailView extends State { final String message = deletionResult ? tr("diagnoses.details.deleteDiagnosisSuccessMessage") : tr("diagnoses.details.deleteDiagnosisErrorMessage"); - _showMessage(message, scaffoldMessengerState); + UIService.showMessage(message, scaffoldMessengerState); }); } - static void _showMessage(String text, ScaffoldMessengerState state) { - final SnackBar snackBar = SnackBar( - content: Center(child: Text(text)), - ); - state.showSnackBar(snackBar); - } - String? _getFaultPathFromStateMachineLog( List stateMachineLog, ) { diff --git a/frontend/app/lib/views/vehicle_detail_view.dart b/frontend/app/lib/views/vehicle_detail_view.dart index eca5f74a..143ac637 100644 --- a/frontend/app/lib/views/vehicle_detail_view.dart +++ b/frontend/app/lib/views/vehicle_detail_view.dart @@ -50,21 +50,25 @@ class _VehicleDetailView extends State { child: Column( crossAxisAlignment: CrossAxisAlignment.start, children: [ - IconButton( - icon: const Icon(Icons.keyboard_double_arrow_right), - iconSize: 28, - onPressed: widget.onClose, - style: IconButton.styleFrom( - foregroundColor: colorScheme.primary, - ), - ), - // Title bar Row( - mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - Text( - tr("vehicles.details.headline"), - style: textTheme.displaySmall, + IconButton( + icon: const Icon(Icons.keyboard_double_arrow_right), + iconSize: 28, + onPressed: widget.onClose, + style: IconButton.styleFrom( + foregroundColor: colorScheme.primary, + ), + ), + // Title bar + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + Text( + tr("vehicles.details.headline"), + style: textTheme.displaySmall, + ), + ], ), ], ), diff --git a/frontend/app/test/services/http_service_test.dart b/frontend/app/test/services/http_service_test.dart index 4497bf3d..40b0549b 100644 --- a/frontend/app/test/services/http_service_test.dart +++ b/frontend/app/test/services/http_service_test.dart @@ -76,7 +76,7 @@ void main() { expect(request.body, isEmpty, reason: "Request body is not empty"); expect( request.url.toString(), - endsWith("/shared/cases"), + contains("/shared/cases"), reason: "Request URL does not end with /shared/cases", ); return http.Response('{"status": "success"}', 200); @@ -103,7 +103,7 @@ void main() { expect(request.body, isEmpty, reason: "Request body is not empty"); expect( request.url.toString(), - endsWith("/$workshopId/cases"), + contains("/$workshopId/cases"), reason: "Request URL does not end with /{workshopId}/cases", ); return http.Response('{"status": "success"}', 200); diff --git a/keycloak/keycloak-config.sh b/keycloak/keycloak-config.sh index 12b4ea4e..9de03139 100755 --- a/keycloak/keycloak-config.sh +++ b/keycloak/keycloak-config.sh @@ -49,6 +49,12 @@ $kcadm create roles \ -s description="Role for API customers endpoints" +$kcadm create roles \ + -r werkstatt-hub \ + -s name=assets \ + -s description="Role for API assets endpoints" + + # Add groups and set roles $kcadm create groups \ -r werkstatt-hub \ @@ -58,7 +64,8 @@ $kcadm add-roles \ -r werkstatt-hub \ --gname Mechanics \ --rolename workshop \ - --rolename shared + --rolename shared \ + --rolename customers $kcadm create groups \ -r werkstatt-hub \ @@ -68,7 +75,9 @@ $kcadm add-roles \ -r werkstatt-hub \ --gname Analysts \ --rolename workshop \ - --rolename shared + --rolename shared \ + --rolename customers \ + --rolename assets $kcadm create groups \ -r werkstatt-hub \ @@ -158,7 +167,8 @@ then --uusername aw40hub-dev-workshop \ --rolename workshop \ --rolename shared \ - --rolename customers + --rolename customers \ + --rolename assets $kcadm create clients \ -r werkstatt-hub \ diff --git a/keycloak/keycloak.env b/keycloak/keycloak.env index 0a541b99..fbfd2fe2 100644 --- a/keycloak/keycloak.env +++ b/keycloak/keycloak.env @@ -9,7 +9,8 @@ KC_DB_URL_DATABASE=${KEYCLOAK_DB_DATABASE_NAME:-db} KC_DB_USERNAME=${KEYCLOAK_DB_USER:-postgre-user} KC_DB_PASSWORD=${KEYCLOAK_DB_PASSWORD:?error} KC_PROXY=edge -KC_HOSTNAME_URL=${PROXY_DEFAULT_SCHEME:?error}://${KEYCLOAK_ADDRESS:?error} +KC_HOSTNAME_URL=${PROXY_DEFAULT_SCHEME:?error}://${KEYCLOAK_ADDRESS:?error}${KEYCLOAK_PATH:-/} +KC_HOSTNAME_ADMIN_URL=${PROXY_DEFAULT_SCHEME:?error}://${KEYCLOAK_ADDRESS:?error}${KEYCLOAK_PATH:-/} KC_HOSTNAME_STRICT=true # Analyst user role diff --git a/nautilus/utils/publish.ts b/nautilus/utils/publish.ts index e235e332..ba7547ac 100644 --- a/nautilus/utils/publish.ts +++ b/nautilus/utils/publish.ts @@ -33,8 +33,8 @@ export async function publishAccessDataset( url: url, method: 'GET', headers: { - API_KEY: api_key, - DATA_KEY: data_key + 'x-api-key': api_key, + data_key: data_key } } const pricingConfig = pricingConfigs[price.currency] diff --git a/proxy/proxy-config.yml b/proxy/proxy-config.yml deleted file mode 100644 index 128ca3a4..00000000 --- a/proxy/proxy-config.yml +++ /dev/null @@ -1,20 +0,0 @@ -providers: - docker: - network: hubnet - exposedByDefault: false -entrypoints: - web: - address: :80 - websecure: - address: :443 - http: - tls: - options: default -tls: - options: - default: - minVersion: VersionTLS12 -log: - level: ERROR -api: {} -