From 90cd35381a5f9c3a2c69c28628da4adc2101a4ca Mon Sep 17 00:00:00 2001 From: IlyasMoutawwakil Date: Fri, 10 May 2024 09:52:49 +0200 Subject: [PATCH] log --- py_txi/inference_server.py | 1 - py_txi/text_generation_inference.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/py_txi/inference_server.py b/py_txi/inference_server.py index c954654..09c42a4 100644 --- a/py_txi/inference_server.py +++ b/py_txi/inference_server.py @@ -194,7 +194,6 @@ def close(self) -> None: del self.semaphore if hasattr(self, "client"): - self.client del self.client def __del__(self) -> None: diff --git a/py_txi/text_generation_inference.py b/py_txi/text_generation_inference.py index c05da6a..d34d7ac 100644 --- a/py_txi/text_generation_inference.py +++ b/py_txi/text_generation_inference.py @@ -32,13 +32,13 @@ def __post_init__(self) -> None: if self.image is None: if is_nvidia_system() and self.gpus is not None: - LOGGER.info("\t+ Using the latest NVIDIA GPU image for Text-Generation-Inference") + LOGGER.info("\t+ Using latest NVIDIA GPU image for Text-Generation-Inference") self.image = "ghcr.io/huggingface/text-generation-inference:latest" elif is_rocm_system() and self.devices is not None: - LOGGER.info("\t+ Using the latest ROCm AMD GPU image for Text-Generation-Inference") + LOGGER.info("\t+ Using latest ROCm AMD GPU image for Text-Generation-Inference") self.image = "ghcr.io/huggingface/text-generation-inference:latest-rocm" else: - LOGGER.info("\t+ Using the version 1.4 since it's the last image supporting CPU") + LOGGER.info("\t+ Using version 1.4 image for Text-Generation-Inference (last image with CPU support)") self.image = "ghcr.io/huggingface/text-generation-inference:1.4" if is_rocm_system() and "rocm" not in self.image: