diff --git a/.gitignore b/.gitignore index 09634cee..7368b165 100755 --- a/.gitignore +++ b/.gitignore @@ -118,9 +118,9 @@ venv.bak/ # Rope project settings .ropeproject -# poetry -.poetry/ -poetry.lock +# uv +.venv/ +uv.lock # mkdocs documentation /site diff --git a/Dockerfile b/Dockerfile index c0769c7d..0d361f7a 100755 --- a/Dockerfile +++ b/Dockerfile @@ -20,12 +20,6 @@ RUN apt-get install -y curl net-tools iproute2 iputils-ping RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 2 RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 -# Install pip -RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py -RUN python3.11 get-pip.py - -RUN python3.11 -m pip install --upgrade pip setuptools virtualenv - # Install gcc and git RUN apt-get update && apt-get install -y build-essential gcc g++ clang git make cmake @@ -42,12 +36,18 @@ RUN apt-get update RUN apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -RUN curl -sSL https://install.python-poetry.org | python3 - +ADD https://astral.sh/uv/install.sh /uv-installer.sh -ENV PATH="${PATH}:/root/.local/bin" +RUN sh /uv-installer.sh && rm /uv-installer.sh -ENV POETRY_VIRTUALENVS_CREATE=false +ENV PATH="/root/.local/bin/:$PATH" COPY pyproject.toml . -RUN poetry install --only core --no-root +RUN uv python install 3.11.7 + +RUN uv python pin 3.11.7 + +RUN uv sync --group core + +ENV PATH=".venv/bin:$PATH" diff --git a/Makefile b/Makefile index cc9a2362..e756a2bd 100644 --- a/Makefile +++ b/Makefile @@ -1,142 +1,146 @@ -POETRY_HOME := $(CURDIR)/.poetry -POETRY := $(POETRY_HOME)/bin/poetry - -MIN_PYTHON_VERSION := 3.10 - -PYTHON_VERSIONS := 3.11 3.10 - -PYTHON := $(shell \ - for ver in $(PYTHON_VERSIONS); do \ - if command -v python$$ver >/dev/null 2>&1; then echo python$$ver; exit 0; fi; \ - done \ -) - -ifndef PYTHON -$(error "Python version $(MIN_PYTHON_VERSION) or higher is required but not found.") -endif - -.PHONY: pre-install -pre-install: - @echo "🐍 Using Python interpreter: $(PYTHON)" - @echo "🐍 Checking if Python is installed" - @command -v $(PYTHON) >/dev/null 2>&1 || { echo >&2 "$(PYTHON) is not installed. Aborting."; exit 1; } - @echo "🐍 Checking Python version" - @$(PYTHON) --version | grep -E "Python 3\.(1[0-9]|[2-9][0-9])" >/dev/null 2>&1 || { echo >&2 "Python $(MIN_PYTHON_VERSION) or higher is required. Aborting."; exit 1; } - @echo "📦 Checking if Poetry is installed" - @if ! command -v poetry >/dev/null 2>&1 || [ ! -d "$(POETRY_HOME)" ]; then \ - echo "Poetry is not installed or POETRY_HOME does not exist. Installing Poetry."; \ - curl -sSL https://install.python-poetry.org | POETRY_HOME=$(POETRY_HOME) $(PYTHON) -; \ - fi - @echo "📦 Configuring Poetry" - @if [ -z "$$CONDA_PREFIX" ] && [ -z "$$VIRTUAL_ENV" ]; then \ - echo "Configuring Poetry to create a virtual environment."; \ - $(POETRY) config virtualenvs.in-project true; \ +UV := uv +PYTHON_VERSION := 3.11 +UV_INSTALL_SCRIPT := https://astral.sh/uv/install.sh + +command_exists = $(shell command -v $(1) >/dev/null 2>&1 && echo true || echo false) + +define install_uv + @echo "📦 uv is not installed. Installing uv..." + @curl -LsSf $(UV_INSTALL_SCRIPT) | sh +endef + +.PHONY: check-uv +check-uv: ## Check and install uv if necessary + @if command -v $(UV) >/dev/null 2>&1; then \ + echo "📦 uv is already installed."; \ else \ - echo "Configuring Poetry to use the existing environment."; \ - $(POETRY) config virtualenvs.create false; \ + echo "📦 uv is not installed. Installing uv..."; \ + curl -LsSf $(UV_INSTALL_SCRIPT) | sh; \ fi - @echo "📦 Setting Poetry to use $(PYTHON)" - @$(POETRY) env use $(PYTHON) || { echo "Failed to set Python version for Poetry. Aborting."; exit 1; } + +.PHONY: install-python +install-python: check-uv ## Install Python with uv + @echo "🐍 Installing Python $(PYTHON_VERSION) with uv" + @$(UV) python install $(PYTHON_VERSION) + @echo "🔧 Configuring Python $(PYTHON_VERSION) as the default Python version" + @$(UV) python pin $(PYTHON_VERSION) .PHONY: install -install: pre-install ## Install the poetry environment and install the pre-commit hooks - @echo "📦 Installing dependencies with Poetry" - @PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring $(POETRY) install --with core +install: install-python ## Install core dependencies + @echo "📦 Installing core dependencies with uv" + @$(UV) sync --group core @echo "🔧 Installing pre-commit hooks" - @$(POETRY) run pre-commit install + @$(UV) run pre-commit install + @echo "" + @echo "🐳 Building nebula-frontend docker image. Do you want to continue? (y/n)" + @read ans && [ $${ans:-N} = y ] || { echo "Build cancelled."; exit 1; } + @docker build -t nebula-frontend -f nebula/frontend/Dockerfile . + @echo "" + @echo "🐳 Building nebula-core docker image. Do you want to continue? (y/n)" + @read ans && [ $${ans:-N} = y ] || { echo "Build cancelled."; exit 1; } + @docker build -t nebula-core . + @echo "" @$(MAKE) shell .PHONY: full-install -full-install: pre-install ## Install the poetry environment and install the pre-commit hooks - @echo "📦 Installing dependencies with Poetry" - @PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring $(POETRY) install --with core,docs,dev +full-install: install-python ## Install all dependencies (core, docs) + @echo "📦 Installing all dependencies with uv" + @$(UV) sync --group core --group docs @echo "🔧 Installing pre-commit hooks" - @$(POETRY) run pre-commit install + @$(UV) run pre-commit install @$(MAKE) shell .PHONY: shell -shell: ## Start a shell in the poetry environment - @if [ -z "$$CONDA_PREFIX" ] && [ -z "$$VIRTUAL_ENV" ]; then \ - echo "🐚 Activating virtual environment"; \ - $(POETRY) shell; \ +shell: ## Start a shell in the uv environment + @echo "🐚 Starting a shell in the uv environment" + @if [ -n "$$VIRTUAL_ENV" ]; then \ + echo "🐚 Already in a virtual environment: $$VIRTUAL_ENV"; \ + elif [ ! -d ".venv" ]; then \ + echo "❌ .venv directory not found. Running 'make install' to create it..."; \ + $(MAKE) install; \ else \ - echo "🐚 Conda or virtual environment detected, skipping Poetry shell activation"; \ + echo "🐚 Run the following command to activate the virtual environment:"; \ + echo ""; \ + echo '[Linux/MacOS] \033[1;32msource .venv/bin/activate\033[0m'; \ + echo '[Windows] \033[1;32m.venv\\bin\\activate\033[0m'; \ + echo ""; \ + echo "🚀 NEBULA is ready to use!"; \ + echo "🚀 Created by \033[1;34mEnrique Tomás Martínez Beltrán\033[0m <\033[1;34menriquetomas@um.es\033[0m>"; \ fi -.PHONY: sync -sync: ## Sync the lock file - @echo "📦 Syncing the lock file" - @PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring $(POETRY) lock +.PHONY: lock +lock: ## Update the lock file + @echo "🔒 This will update the lock file. Do you want to continue? (y/n)" + @read ans && [ $${ans:-N} = y ] || { echo "Lock cancelled."; exit 1; } + @echo "🔒 Locking dependencies..." + @$(UV) lock .PHONY: update-libs -update-libs: ## Update libraries to the latest version - @echo "🔧 This will override the version of current libraries. Do you want to continue? (y/n)" +update-libs: ## Update libraries to the latest version + @echo "🔧 This will override the versions of current libraries. Do you want to continue? (y/n)" @read ans && [ $${ans:-N} = y ] || { echo "Update cancelled."; exit 1; } @echo "📦 Updating libraries..." - @PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring $(POETRY) update + @$(UV) update .PHONY: check -check: ## Run code quality tools. +check: ## Run code quality tools @echo "🛠️ Running code quality checks" - @echo "🔍 Checking Poetry lock file consistency" - @$(POETRY) check --lock + @echo "🔍 Checking uv lock file consistency" + @$(UV) sync @echo "🚨 Linting code with pre-commit" - @$(POETRY) run pre-commit run -a + @$(UV) run pre-commit run -a .PHONY: check-plus -check-plus: check ## Run additional code quality tools. - @echo "🔍 Checking code formatting with black - @$(POETRY) run black --check ." +check-plus: check ## Run additional code quality tools + @echo "🔍 Checking code formatting with black" + @$(UV) run black --check . @echo "⚙️ Static type checking with mypy" - @$(POETRY) run mypy + @$(UV) run mypy @echo "🔎 Checking for obsolete dependencies" - @$(POETRY) run deptry . + @$(UV) run deptry . .PHONY: build -build: clean-build ## Build wheel file using poetry +build: clean-build ## Build the wheel file @echo "🚀 Creating wheel file" - @$(POETRY) build + @$(UV) build .PHONY: clean-build -clean-build: ## clean build artifacts +clean-build: ## Clean build artifacts @rm -rf dist .PHONY: publish -publish: ## publish a release to pypi. - @echo "🚀 Publishing: Dry run." - @$(POETRY) config pypi-token.pypi $(PYPI_TOKEN) - @$(POETRY) publish --dry-run - @echo "🚀 Publishing." - @$(POETRY) publish +publish: ## Publish a release to PyPI + @echo "🚀 Publishing..."" + @$(UV) publish --token $(PYPI_TOKEN) .PHONY: build-and-publish -build-and-publish: build publish ## Build and publish. +build-and-publish: build publish ## Build and publish the package .PHONY: doc-test -doc-test: ## Test if documentation can be built without warnings or errors - @$(POETRY) run mkdocs build -f docs/mkdocs.yml -d _build -s +doc-test: ## Test if documentation can be built without errors + @$(UV) run mkdocs build -f docs/mkdocs.yml -d _build -s .PHONY: doc-build -doc-build: ## Build the documentation - @$(POETRY) run mkdocs build -f docs/mkdocs.yml -d _build +doc-build: ## Build the documentation + @$(UV) run mkdocs build -f docs/mkdocs.yml -d _build .PHONY: doc-serve -doc-serve: ## Build and serve the documentation - @$(POETRY) run mkdocs serve -f docs/mkdocs.yml +doc-serve: ## Serve the documentation locally + @$(UV) run mkdocs serve -f docs/mkdocs.yml .PHONY: format -format: ## Format code with black and isort +format: ## Format code with black and isort @echo "🎨 Formatting code" - @$(POETRY) run black . - @$(POETRY) run isort . + @$(UV) run black . + @$(UV) run isort . .PHONY: clean -clean: clean-build ## Clean up build artifacts and cache files +clean: clean-build ## Clean up build artifacts and caches @echo "🧹 Cleaning up build artifacts and caches" @rm -rf __pycache__ */__pycache__ .mypy_cache .PHONY: help -help: +help: ## Display available commands @echo "Available commands:" @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "💡 \033[36m%-20s\033[0m %s\n", $$1, $$2}' diff --git a/README.md b/README.md index 4d5ec13d..8a6f212f 100755 --- a/README.md +++ b/README.md @@ -147,7 +147,7 @@ Distributed under the GNU GPLv3 License. See `LICENSE` for more information. We would like to thank the following projects for their contributions which have helped shape NEBULA: - [PyTorch Lightning](https://github.com/Lightning-AI/pytorch-lightning) for the training loop and model management -- [Tensorboard](https://github.com/tensorflow/tensorboard) and [Aim](https://github.com/aimhubio/aim) for the visualization tools and monitoring capabilities +- [Tensorboard](https://github.com/tensorflow/tensorboard) for the visualization tools and monitoring capabilities - Different datasets ([nebula/core/datasets](https://github.com/CyberDataLab/nebula/tree/main/nebula/core/datasets)) and models ([nebula/core/models](https://github.com/CyberDataLab/nebula/tree/main/nebula/core/models)) for testing and validation purposes - [FastAPI](https://github.com/tiangolo/fastapi) for the RESTful API - [Web3](https://github.com/ethereum/web3.py) for the blockchain integration diff --git a/app/logs/server.log b/app/logs/server.log deleted file mode 100755 index e69de29b..00000000 diff --git a/nebula/controller.py b/nebula/controller.py index 45256ff5..eb0356b4 100755 --- a/nebula/controller.py +++ b/nebula/controller.py @@ -525,7 +525,7 @@ def run_frontend(self): - NEBULA_PRODUCTION={production} - NEBULA_GPU_AVAILABLE={gpu_available} - NEBULA_ADVANCED_ANALYTICS={advanced_analytics} - - SERVER_LOG=/nebula/app/logs/server.log + - NEBULA_SERVER_LOG=/nebula/app/logs/server.log - NEBULA_LOGS_DIR=/nebula/app/logs/ - NEBULA_CONFIG_DIR=/nebula/app/config/ - NEBULA_CERTS_DIR=/nebula/app/certs/ diff --git a/nebula/core/engine.py b/nebula/core/engine.py index 51ea21f6..05a049c1 100755 --- a/nebula/core/engine.py +++ b/nebula/core/engine.py @@ -17,7 +17,6 @@ logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("fsspec").setLevel(logging.WARNING) logging.getLogger("matplotlib").setLevel(logging.ERROR) -logging.getLogger("aim").setLevel(logging.ERROR) logging.getLogger("plotly").setLevel(logging.ERROR) import pdb diff --git a/nebula/core/training/lightning.py b/nebula/core/training/lightning.py index 39e04785..a03ff083 100755 --- a/nebula/core/training/lightning.py +++ b/nebula/core/training/lightning.py @@ -16,15 +16,10 @@ from lightning.pytorch.loggers import CSVLogger from torch.nn import functional as F +from nebula.config.config import TRAINING_LOGGER from nebula.core.utils.deterministic import enable_deterministic from nebula.core.utils.nebulalogger_tensorboard import NebulaTensorBoardLogger -try: - from nebula.core.utils.nebulalogger import NebulaLogger -except: - pass -from nebula.config.config import TRAINING_LOGGER - logging_training = logging.getLogger(TRAINING_LOGGER) @@ -170,23 +165,6 @@ def create_logger(self): ) # Restore logger configuration nebulalogger.set_logger_config(logger_config) - elif self.config.participant["tracking_args"]["local_tracking"] == "advanced": - nebulalogger = NebulaLogger( - config=self.config, - engine=self, - scenario_start_time=self.config.participant["scenario_args"]["start_time"], - repo=f"{self.config.participant['tracking_args']['log_dir']}", - experiment=self.experiment_name, - run_name=f"participant_{self.idx}", - train_metric_prefix="train_", - test_metric_prefix="test_", - val_metric_prefix="val_", - log_system_params=False, - ) - # nebulalogger_aim = NebulaLogger(config=self.config, engine=self, scenario_start_time=self.config.participant["scenario_args"]["start_time"], repo=f"aim://nebula-frontend:8085", - # experiment=self.experiment_name, run_name=f"participant_{self.idx}", - # train_metric_prefix='train_', test_metric_prefix='test_', val_metric_prefix='val_', log_system_params=False) - self.config.participant["tracking_args"]["run_hash"] = nebulalogger.experiment.hash else: nebulalogger = None diff --git a/nebula/core/utils/nebulalogger.py b/nebula/core/utils/nebulalogger.py deleted file mode 100755 index fe05716f..00000000 --- a/nebula/core/utils/nebulalogger.py +++ /dev/null @@ -1,44 +0,0 @@ -import logging -from datetime import datetime -from typing import TYPE_CHECKING - -from aim import Image -from aim.pytorch_lightning import AimLogger - -if TYPE_CHECKING: - from nebula.core.engine import Engine - - -class NebulaLogger(AimLogger): - def __init__(self, config, engine: "Engine", scenario_start_time, *args, **kwargs): - self.config = config - self.engine = engine - self.scenario_start_time = scenario_start_time - self.local_step = 0 - self.global_step = 0 - super().__init__(*args, **kwargs) - - def finalize(self, status: str = "") -> None: - super().finalize(status) - logging.info(f"Finalizing logger: {status}") - - def get_step(self): - return int((datetime.now() - datetime.strptime(self.scenario_start_time, "%d/%m/%Y %H:%M:%S")).total_seconds()) - - def log_data(self, data, step=None): - time_start = datetime.now() - try: - logging.debug(f"Logging data: {data}") - super().log_metrics(data) - except Exception as e: - logging.exception(f"Error logging statistics data [{data}]: {e}") - logging.debug(f"Time taken to log data: {datetime.now() - time_start}") - - def log_figure(self, figure, step=None, name=None): - time_start = datetime.now() - try: - logging.debug(f"Logging figure: {name}") - self.experiment.track(Image(figure), name=name) - except Exception as e: - logging.exception(f"Error logging figure: {e}") - logging.debug(f"Time taken to log figure: {datetime.now() - time_start}") diff --git a/nebula/frontend/Dockerfile b/nebula/frontend/Dockerfile index fe4d3ae3..f064d119 100755 --- a/nebula/frontend/Dockerfile +++ b/nebula/frontend/Dockerfile @@ -20,12 +20,6 @@ RUN apt-get install -y curl net-tools iproute2 iputils-ping RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 2 RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 -# Install pip -RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py -RUN python3.11 get-pip.py - -RUN python3.11 -m pip install --upgrade pip setuptools virtualenv - RUN apt-get install -y nginx # Install gcc and git @@ -44,15 +38,21 @@ RUN apt-get update RUN apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -RUN curl -sSL https://install.python-poetry.org | python3 - +ADD https://astral.sh/uv/install.sh /uv-installer.sh -ENV PATH="${PATH}:/root/.local/bin" +RUN sh /uv-installer.sh && rm /uv-installer.sh -ENV POETRY_VIRTUALENVS_CREATE=false +ENV PATH="/root/.local/bin/:$PATH" COPY pyproject.toml . -RUN poetry install --only frontend --no-root +RUN uv python install 3.11.7 + +RUN uv python pin 3.11.7 + +RUN uv sync --group frontend + +ENV PATH="/.venv/bin:$PATH" COPY /nebula/frontend/start_services.sh . diff --git a/nebula/frontend/app.py b/nebula/frontend/app.py index 0b35b441..4d64dbb2 100755 --- a/nebula/frontend/app.py +++ b/nebula/frontend/app.py @@ -15,7 +15,43 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__))) sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..")) -logging.basicConfig(level=logging.INFO) + +class Settings: + port: int = os.environ.get("NEBULA_FRONTEND_PORT", 6060) + production: bool = os.environ.get("NEBULA_PRODUCTION", "False") == "True" + gpu_available: bool = os.environ.get("NEBULA_GPU_AVAILABLE", "False") == "True" + advanced_analytics: bool = os.environ.get("NEBULA_ADVANCED_ANALYTICS", "False") == "True" + host_platform: str = os.environ.get("NEBULA_HOST_PLATFORM", "unix") + log_dir: str = os.environ.get("NEBULA_LOGS_DIR") + config_dir: str = os.environ.get("NEBULA_CONFIG_DIR") + cert_dir: str = os.environ.get("NEBULA_CERTS_DIR") + root_host_path: str = os.environ.get("NEBULA_ROOT_HOST") + config_frontend_dir: str = os.environ.get("FEDELLAR_CONFIG_FRONTEND_DIR", "config") + statistics_port: int = os.environ.get("NEBULA_STATISTICS_PORT", 8080) + secret_key: str = os.environ.get("SECRET_KEY", os.urandom(24).hex()) + PERMANENT_SESSION_LIFETIME: datetime.timedelta = datetime.timedelta(minutes=60) + templates_dir: str = "templates" + server_log: str = os.environ.get("NEBULA_SERVER_LOG", "/nebula/app/logs/server.log") + + +settings = Settings() + +logging.basicConfig( + level=logging.INFO, + format="[%(asctime)s] [%(levelname)s] %(message)s", + handlers=[ + logging.StreamHandler(), + logging.FileHandler(settings.server_log, mode="w"), + ], +) + +uvicorn_loggers = ["uvicorn", "uvicorn.error", "uvicorn.access"] +for logger_name in uvicorn_loggers: + logger = logging.getLogger(logger_name) + logger.propagate = False # Prevent duplicate logs + handler = logging.FileHandler(settings.server_log, mode="a") + handler.setFormatter(logging.Formatter("[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s")) + logger.addHandler(handler) from ansi2html import Ansi2HTMLConverter @@ -51,7 +87,6 @@ delete_user_from_db, get_all_scenarios_and_check_completed, get_notes, - get_run_hashes_scenario, get_running_scenario, get_scenario_by_name, get_user_info, @@ -72,28 +107,9 @@ ) from nebula.frontend.utils import Utils - -class Settings: - port: int = os.environ.get("NEBULA_FRONTEND_PORT", 6060) - production: bool = os.environ.get("NEBULA_PRODUCTION", "False") == "True" - gpu_available: bool = os.environ.get("NEBULA_GPU_AVAILABLE", "False") == "True" - advanced_analytics: bool = os.environ.get("NEBULA_ADVANCED_ANALYTICS", "False") == "True" - host_platform: str = os.environ.get("NEBULA_HOST_PLATFORM", "unix") - log_dir: str = os.environ.get("NEBULA_LOGS_DIR") - config_dir: str = os.environ.get("NEBULA_CONFIG_DIR") - cert_dir: str = os.environ.get("NEBULA_CERTS_DIR") - root_host_path: str = os.environ.get("NEBULA_ROOT_HOST") - config_frontend_dir: str = os.environ.get("FEDELLAR_CONFIG_FRONTEND_DIR", "config") - statistics_port: int = os.environ.get("NEBULA_STATISTICS_PORT", 8080) - secret_key: str = os.environ.get("SECRET_KEY", os.urandom(24).hex()) - PERMANENT_SESSION_LIFETIME: datetime.timedelta = datetime.timedelta(minutes=60) - templates_dir: str = "templates" - - -settings = Settings() +logging.info(f"🚀 Starting Nebula Frontend on port {settings.port}") logging.info(f"NEBULA_PRODUCTION: {settings.production}") -logging.info(f"NEBULA_ADVANCED_ANALYTICS: {settings.advanced_analytics}") app = FastAPI() app.add_middleware(SessionMiddleware, secret_key=settings.secret_key) @@ -783,24 +799,7 @@ def remove_scenario(scenario_name=None): from nebula.scenarios import ScenarioManagement if settings.advanced_analytics: - from aim.sdk.repo import Repo - - # NEBULALOGGER START - try: - repo = Repo.from_path(f"{settings.log_dir}") - list_tuples_participant_hash = get_run_hashes_scenario(scenario_name) - hashes = [tuple[1] for tuple in list_tuples_participant_hash] - logging.info(f"Removing statistics from {scenario_name}: {hashes}") - success, remaining_runs = repo.delete_runs(hashes) - if success: - logging.info(f"Successfully deleted {len(hashes)} runs.") - else: - logging.info("Something went wrong while deleting runs.") - logging.info(f"Remaining runs: {remaining_runs}") - except Exception as e: - logging.exception(f"Error removing statistics from {scenario_name}: {e}") - pass - # NEBULALOGGER END + logging.info("Advanced analytics enabled") # Remove registered nodes and conditions nodes_registration.pop(scenario_name, None) remove_nodes_by_scenario_name(scenario_name) @@ -857,133 +856,11 @@ async def nebula_remove_scenario(scenario_name: str, request: Request, session: if settings.advanced_analytics: logging.info("Advanced analytics enabled") - - # NEBULALOGGER START - def get_tracking_hash_scenario(scenario_name): - import requests - - url = f"http://127.0.0.1:{settings.statistics_port}/nebula/statistics/api/experiments" - # Get JSON data from the URL - response = requests.get(url) - if response.status_code == 200: - experiments = response.json() - for experiment in experiments: - if experiment["name"] == scenario_name: - return experiment["id"] - - return None - - @app.get("/nebula/dashboard/statistics/", response_class=HTMLResponse) - @app.get("/nebula/dashboard/{scenario_name}/statistics/", response_class=HTMLResponse) - async def nebula_dashboard_statistics(request: Request, scenario_name: str = None): - statistics_url = "/nebula/statistics/" - if scenario_name is not None: - experiment_hash = get_tracking_hash_scenario(scenario_name=scenario_name) - statistics_url += f"experiments/{experiment_hash}/runs" - - return templates.TemplateResponse("statistics.html", {"request": request, "statistics_url": statistics_url}) - - @app.get( - "/nebula/dashboard/{scenario_name}/node/{hash}/metrics", - response_class=HTMLResponse, - ) - async def nebula_dashboard_node_metrics(request: Request, scenario_name: str, hash: str): - statistics_url = f"/nebula/statistics/runs/{hash}/metrics" - return templates.TemplateResponse("statistics.html", {"request": request, "statistics_url": statistics_url}) - - @app.api_route("/nebula/statistics/", methods=["GET", "POST"]) - @app.api_route("/nebula/statistics/{path:path}", methods=["GET", "POST"]) - async def statistics_proxy(request: Request, path: str = None, session: dict = Depends(get_session)): - if "user" in session: - query_string = urlencode(request.query_params) - - url = f"http://127.0.0.1:{settings.statistics_port}/nebula/statistics" - url = f"{url}{('/' + path) if path else '/'}" + (f"?{query_string}" if query_string else "") - - headers = {key: value for key, value in request.headers.items() if key.lower() != "host"} - - response = requests.request( - method=request.method, - url=url, - headers=headers, - data=await request.body(), - cookies=request.cookies, - allow_redirects=False, - ) - - excluded_headers = [ - "content-encoding", - "content-length", - "transfer-encoding", - "connection", - ] - filtered_headers = [ - (name, value) for name, value in response.raw.headers.items() if name.lower() not in excluded_headers - ] - - return Response( - content=response.content, - status_code=response.status_code, - headers=dict(filtered_headers), - ) - else: - raise HTTPException(status_code=401) - - @app.get("/nebula/dashboard/{scenario_name}/download/metrics") - async def nebula_dashboard_download_metrics( - scenario_name: str, request: Request, session: dict = Depends(get_session) - ): - from aim.sdk.repo import Repo - - if "user" in session: - # Obtener las métricas del escenario - os.makedirs( - Utils.check_path(settings.log_dir, os.path.join(scenario_name, "metrics")), - exist_ok=True, - ) - - aim_repo = Repo.from_path("/nebula/nebula/app/logs") - query = f"run.experiment == '{scenario_name}'" - df = aim_repo.query_metrics(query).dataframe() - - hash_to_participant = {hash: participant for participant, hash in get_run_hashes_scenario(scenario_name)} - df["participant"] = df["run.hash"].map(hash_to_participant) - df.drop( - columns=["run", "run.hash", "metric.context", "epoch"], - axis=1, - inplace=True, - ) - cols = df.columns.tolist() - cols.remove("participant") - cols.remove("metric.name") - df = df.reindex(columns=["participant", "metric.name"] + cols) - - for name, group in df.groupby("participant"): - group.to_csv( - os.path.join(settings.log_dir, scenario_name, "metrics", f"{name}.csv"), - index=True, - ) - - # Crear un archivo zip con las métricas, enviarlo al usuario y eliminarlo - memory_file = io.BytesIO() - with zipfile.ZipFile(memory_file, "w", zipfile.ZIP_DEFLATED) as zipf: - zipdir(os.path.join(settings.log_dir, scenario_name, "metrics"), zipf) - - memory_file.seek(0) - - return StreamingResponse( - memory_file, - media_type="application/zip", - headers={"Content-Disposition": f"attachment; filename={scenario_name}_metrics.zip"}, - ) - else: - raise HTTPException(status_code=401) - - # NEBULALOGGER END else: logging.info("Advanced analytics disabled") # TENSORBOARD START + @app.get("/nebula/dashboard/statistics/", response_class=HTMLResponse) @app.get("/nebula/dashboard/{scenario_name}/statistics/", response_class=HTMLResponse) async def nebula_dashboard_statistics(request: Request, scenario_name: str = None): diff --git a/nebula/frontend/start_services.sh b/nebula/frontend/start_services.sh index a50a016b..595477cc 100755 --- a/nebula/frontend/start_services.sh +++ b/nebula/frontend/start_services.sh @@ -32,11 +32,7 @@ if [ "$NEBULA_ADVANCED_ANALYTICS" = "False" ]; then echo "Starting Tensorboard analytics" tensorboard --host 0.0.0.0 --port 8080 --logdir $NEBULA_LOGS_DIR --window_title "NEBULA Statistics" --reload_interval 30 --max_reload_threads 10 --reload_multifile true & else - echo "Starting Aim analytics" - # --dev flag is used to enable development mode - # aim server --repo $NEBULA_LOGS_DIR --port 8085 & - aim init --repo $NEBULA_LOGS_DIR - aim up --repo $NEBULA_LOGS_DIR --port 8080 --base-path /nebula/statistics & + echo "Advanced analytics are enabled" fi tail -f /dev/null diff --git a/pyproject.toml b/pyproject.toml index 9004654d..2f0ac0b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,178 +1,177 @@ -[tool.poetry] +[project] name = "nebula-dfl" version = "0.0.1" description = "NEBULA: A Platform for Decentralized Federated Learning" -authors = ["Enrique Tomás Martínez Beltrán "] -repository = "https://github.com/CyberDataLab/nebula" -documentation = "https://nebula.enriquetomasmb.com/" +authors = [{name = "Enrique Tomás Martínez Beltrán", email = "enriquetomas@um.es"}] +maintainers = [{name = "Enrique Tomás Martínez Beltrán", email = "enriquetomas@um.es"}] readme = "README.md" -license = "GPL-3.0" -packages = [ - {include = "nebula"} +keywords = [ + "nebula", "federated learning", "decentralized", "privacy", "security", "blockchain", "docker", "gpu", "nvidia", + "cuda", "pytorch", "communication", "networking", "distributed", "machine learning", "deep learning", "ai", + "artificial intelligence", "data science", "data privacy", "data security", "data protection", "data sharing" +] +license = {file = "LICENSE"} +requires-python = "<4.0,>=3.10" +dependencies = [ + "cryptography==43.0.1", + "docker==7.1.0", + "matplotlib==3.9.2", + "networkx==3.3", + "nvidia-ml-py==12.560.30", + "psutil==6.0.0", + "pycryptodome==3.20.0", + "python-dotenv==1.0.1", + "requests==2.32.3", + "watchdog==5.0.2", + "deptry<1.0.0,>=0.20.0", + "isort<6.0.0,>=5.13.2", + "mypy<2.0.0,>=1.13.0", + "pre-commit<5.0.0,>=4.0.1", + "ruff<1.0.0,>=0.7.2", ] -[tool.poetry.dependencies] -python = ">=3.10,<4.0" -python-dotenv = "1.0.1" -requests = "2.32.3" -docker = "7.1.0" -web3 = "6.20.0" -matplotlib = "3.9.2" -networkx = "3.3" -psutil = "6.0.0" -pycryptodome = "3.20.0" -cryptography = "43.0.1" -nvidia-ml-py = "12.560.30" -watchdog = "5.0.2" - -[tool.poetry.group.dev.dependencies] -deptry = "^0.20.0" -mypy = "^1.13.0" -pre-commit = "^4.0.1" -ruff = "^0.7.2" -isort = "^5.13.2" - -[tool.poetry.group.docs.dependencies] -mkdocs = "^1.6.1" -mkdocs-material = "^9.5.43" -mkdocs-material-extensions = "^1.3.1" -mkdocs-autorefs = "^1.2.0" -mkdocstrings = {extras = ["python"], version = "^0.26.2"} -mkdocs-git-revision-date-localized-plugin = "^1.3.0" -mkdocs-gen-files = "^0.5.0" -mkdocs-literate-nav = "^0.6.1" -mkdocs-section-index = "^0.3.9" - +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Software Development :: Libraries :: Python Modules", +] -[tool.poetry.group.core.dependencies] -protobuf = "4.25.3" -qtconsole = "5.6.0" -aiohttp = "3.10.5" -async-timeout = "4.0.3" -tcconfig = "0.29.1" -geopy = "2.4.1" -numpy = "2.1.1" -torch = "2.4.1" -torchvision = "0.19.1" -torchtext = "^0.18.0" -torchdata = "0.8.0" -torchmetrics = "1.4.1" -lightning = "2.4.0" -transformers = "4.44.2" -plotly = "5.24.0" -tensorboard = "2.17.1" -tensorboardx = "2.6.2.2" -pytest = "8.3.3" -python-dotenv = "1.0.1" -pyyaml = "6.0.2" -setuptools = "74.1.2" -matplotlib = "3.9.2" -networkx = "3.3" -requests = "2.32.3" -pillow = "10.4.0" -ansi2html = "1.9.2" -pycryptodome = "3.20.0" -cryptography = "43.0.1" -psutil = "6.0.0" -rich = "13.8.1" -seaborn = "0.13.2" -scikit-learn = "1.5.1" -scikit-image = "0.24.0" -datasets = "2.21.0" -timm = "1.0.9" -nvidia-ml-py = "12.560.30" -web3 = "6.20.0" -tabulate = "0.9.0" -retry = "0.9.2" -docker = "7.1.0" -openpyxl = "3.1.5" -lz4 = "4.3.3" +[project.urls] +Homepage = "https://github.com/CyberDataLab/nebula" +Documentation = "https://nebula.enriquetomasmb.com/" +Repository = "https://github.com/CyberDataLab/nebula" -[tool.poetry.group.frontend.dependencies] -setuptools = "74.1.2" -wheel = "0.44.0" -protobuf = "4.25.3" -tensorboard = "2.17.1" -tensorboardx = "2.6.2.2" -pandas = "2.2.2" -fastapi = {version = "0.114.0", extras = ["all"]} -uvicorn = "0.30.6" -jinja2 = "3.1.4" -pytest = "8.3.3" -matplotlib = "3.9.2" -plotly = "5.24.0" -python-dotenv = "1.0.1" -networkx = "3.3" -requests = "2.32.3" -ansi2html = "1.9.2" -gunicorn = "23.0.0" -geopy = "2.4.1" -cryptography = "43.0.1" -pyopenssl = "24.2.1" -pycryptodome = "3.20.0" -pyinstrument = "4.7.3" -cffi = "1.17.1" -web3 = "6.20.0" -aiosqlite = "0.20.0" -docker = "7.1.0" -argon2-cffi = "23.1.0" +[dependency-groups] +docs = [ + "mkdocs<2.0.0,>=1.6.1", + "mkdocs-autorefs<2.0.0,>=1.2.0", + "mkdocs-gen-files<1.0.0,>=0.5.0", + "mkdocs-git-revision-date-localized-plugin<2.0.0,>=1.3.0", + "mkdocs-literate-nav<1.0.0,>=0.6.1", + "mkdocs-material<10.0.0,>=9.5.43", + "mkdocs-material-extensions<2.0.0,>=1.3.1", + "mkdocs-section-index<1.0.0,>=0.3.9", + "mkdocstrings[python]<1.0.0,>=0.26.2", +] +core = [ + "aiohttp==3.10.5", + "ansi2html==1.9.2", + "async-timeout==4.0.3", + "cryptography==43.0.1", + "datasets==2.21.0", + "docker==7.1.0", + "geopy==2.4.1", + "lightning==2.4.0", + "lz4==4.3.3", + "matplotlib==3.9.2", + "networkx==3.3", + "numpy==2.1.1", + "plotly==5.24.0", + "protobuf==4.25.3", + "psutil==6.0.0", + "pycryptodome==3.20.0", + "pyyaml==6.0.2", + "python-dotenv==1.0.1", + "requests==2.32.3", + "retry==0.9.2", + "rich==13.8.1", + "scikit-image==0.24.0", + "scikit-learn==1.5.1", + "seaborn==0.13.2", + "setuptools==74.1.2", + "tabulate==0.9.0", + "tcconfig==0.29.1", + "tensorboard==2.17.1", + "tensorboardx==2.6.2.2", + "timm==1.0.9", + "torch==2.4.1", + "torchmetrics==1.4.1", + "torchvision==0.19.1", + "transformers==4.44.2", + "web3==6.20.0", +] +frontend = [ + "aiosqlite==0.20.0", + "ansi2html==1.9.2", + "argon2-cffi==23.1.0", + "cffi==1.17.1", + "cryptography==43.0.1", + "docker==7.1.0", + "fastapi[all]==0.114.0", + "geopy==2.4.1", + "gunicorn==23.0.0", + "jinja2==3.1.4", + "lz4==4.3.3", + "matplotlib==3.9.2", + "networkx==3.3", + "openpyxl==3.1.5", + "pandas==2.2.2", + "plotly==5.24.0", + "protobuf==4.25.3", + "psutil==6.0.0", + "pycryptodome==3.20.0", + "pyinstrument==4.7.3", + "pyopenssl==24.2.1", + "python-dotenv==1.0.1", + "requests==2.32.3", + "setuptools==74.1.2", + "tensorboard==2.17.1", + "tensorboardx==2.6.2.2", + "uvicorn==0.30.6", + "web3==6.20.0", + "wheel==0.44.0", +] [build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" +requires = ["setuptools >= 61.0"] +build-backend = "setuptools.build_meta" [tool.mypy] files = ["nebula"] -disallow_untyped_defs = "True" -disallow_any_unimported = "True" -no_implicit_optional = "True" -check_untyped_defs = "True" -warn_return_any = "True" -warn_unused_ignores = "True" -show_error_codes = "True" +disallow_untyped_defs = true +disallow_any_unimported = true +no_implicit_optional = true +check_untyped_defs = true +warn_return_any = true +warn_unused_ignores = true +show_error_codes = true [tool.ruff] target-version = "py311" line-length = 120 fix = true lint.select = [ - # flake8-2020 - "YTT", - # flake8-bandit - "S", - # flake8-bugbear - "B", - # flake8-builtins - "A", - # flake8-comprehensions - "C4", - # flake8-debugger - "T10", - # flake8-simplify - "SIM", - # isort - "I", - # mccabe - "C90", - # pycodestyle - "E", "W", - # pyflakes - "F", - # pygrep-hooks - "PGH", - # pyupgrade - "UP", - # ruff - "RUF", - # tryceratops - "TRY", + "YTT", # flake8-2020 + "S", # flake8-bandit + "B", # flake8-bugbear + "A", # flake8-builtins + "C4", # flake8-comprehensions + "T10", # flake8-debugger + "SIM", # flake8-simplify + "I", # isort + "C90", # mccabe + "E", "W", # pycodestyle + "F", # pyflakes + "PGH", # pygrep-hooks + "UP", # pyupgrade + "RUF", # ruff + "TRY", # tryceratops ] lint.ignore = [ - # LineTooLong - "E501", - # DoNotAssignLambda - "E731", + "E501", # LineTooLong + "E731", # DoNotAssignLambda ] [tool.ruff.format] preview = true + +[tool.setuptools.packages.find] +include = ["nebula", "app"] +where = []