From a290aa4a6ee01f18cc3ed6b2c4cdcca1186910ef Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Thu, 14 Mar 2024 21:57:29 +0000 Subject: [PATCH 01/10] Updating to py 3.11 and tf 2.15 / 2.13 --- donkeycar/__init__.py | 6 +++--- donkeycar/parts/pytorch/torch_train.py | 2 +- donkeycar/tests/test_torch.py | 2 +- setup.cfg | 14 +++++++------- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/donkeycar/__init__.py b/donkeycar/__init__.py index 69d83cf52..aae7c9c76 100644 --- a/donkeycar/__init__.py +++ b/donkeycar/__init__.py @@ -3,7 +3,7 @@ from pyfiglet import Figlet import logging -__version__ = '5.1.dev0' +__version__ = '5.1.dev1' logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper()) @@ -13,8 +13,8 @@ print(f.renderText('Donkey Car')) print(f'using donkey v{__version__} ...') -if sys.version_info.major < 3 or sys.version_info.minor < 8: - msg = f'Donkey Requires Python 3.8 or greater. You are using {sys.version}' +if sys.version_info.major < 3 or sys.version_info.minor < 11: + msg = f'Donkey Requires Python 3.11 or greater. You are using {sys.version}' raise ValueError(msg) # The default recursion limits in CPython are too small. diff --git a/donkeycar/parts/pytorch/torch_train.py b/donkeycar/parts/pytorch/torch_train.py index 844b68ade..6a39aa8ba 100644 --- a/donkeycar/parts/pytorch/torch_train.py +++ b/donkeycar/parts/pytorch/torch_train.py @@ -46,7 +46,7 @@ def train(cfg, tub_paths, model_output_path, model_type, checkpoint_path=None): if cfg.PRINT_MODEL_SUMMARY: summarize(model) - trainer = pl.Trainer(gpus=gpus, logger=logger, max_epochs=cfg.MAX_EPOCHS, + trainer = pl.Trainer(logger=logger, max_epochs=cfg.MAX_EPOCHS, default_root_dir=output_dir) data_module = TorchTubDataModule(cfg, tub_paths) diff --git a/donkeycar/tests/test_torch.py b/donkeycar/tests/test_torch.py index 3137e50dd..4c884a7db 100644 --- a/donkeycar/tests/test_torch.py +++ b/donkeycar/tests/test_torch.py @@ -109,7 +109,7 @@ def test_training_pipeline(config: Config, model_type: str, car_dir: str) \ gpus = 0 # Overfit the data - trainer = pl.Trainer(gpus=gpus, overfit_batches=2, max_epochs=30) + trainer = pl.Trainer(overfit_batches=2, max_epochs=30) trainer.fit(model, data_module) final_loss = model.loss_history[-1] assert final_loss < 0.35, \ diff --git a/setup.cfg b/setup.cfg index bbfd61ad0..3948c14cf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,14 +17,13 @@ classifiers = # Indicate who your project is intended for Intended Audience :: Developers Topic :: Scientific/Engineering :: Artificial Intelligence - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.11 [options] packages = find_namespace: zip_safe = True include_package_data = True -python_requires = >=3.8,<4 +python_requires = >=3.11.0,<3.12 install_requires = numpy pillow @@ -51,7 +50,7 @@ pi = adafruit-circuitpython-ssd1306 adafruit-circuitpython-rplidar RPi.GPIO - tensorflow-aarch64==2.9.3 + tensorflow-aarch64==2.15 opencv-contrib-python nano = @@ -66,7 +65,7 @@ nano = pandas==2.0 pc = - tensorflow==2.9 + tensorflow==2.13 matplotlib kivy==2.1 pandas @@ -74,7 +73,7 @@ pc = albumentations macos = - tensorflow-macos==2.9 + tensorflow-macos==2.13 matplotlib kivy==2.1 pandas @@ -88,7 +87,8 @@ dev = mypy torch = - pytorch + torch + pytorch-lightning torchvision==0.12 torchaudio fastai From 6b96b770b5fff0d9ceb95b95063554417db74fe5 Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Thu, 14 Mar 2024 22:35:15 +0000 Subject: [PATCH 02/10] Required changes for updating to py 3.11 and tf 2.15 / 2.13 --- donkeycar/parts/pytorch/torch_train.py | 4 ++-- donkeycar/tests/test_telemetry.py | 5 ++++- donkeycar/tests/test_torch.py | 2 +- setup.cfg | 9 ++++----- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/donkeycar/parts/pytorch/torch_train.py b/donkeycar/parts/pytorch/torch_train.py index 6a39aa8ba..7413b0009 100644 --- a/donkeycar/parts/pytorch/torch_train.py +++ b/donkeycar/parts/pytorch/torch_train.py @@ -46,8 +46,8 @@ def train(cfg, tub_paths, model_output_path, model_type, checkpoint_path=None): if cfg.PRINT_MODEL_SUMMARY: summarize(model) - trainer = pl.Trainer(logger=logger, max_epochs=cfg.MAX_EPOCHS, - default_root_dir=output_dir) + trainer = pl.Trainer(accelerator='cpu', logger=logger, + max_epochs=cfg.MAX_EPOCHS, default_root_dir=output_dir) data_module = TorchTubDataModule(cfg, tub_paths) trainer.fit(model, data_module) diff --git a/donkeycar/tests/test_telemetry.py b/donkeycar/tests/test_telemetry.py index 872cf8620..3159d7fe4 100644 --- a/donkeycar/tests/test_telemetry.py +++ b/donkeycar/tests/test_telemetry.py @@ -3,6 +3,8 @@ import time from unittest import mock from paho.mqtt.client import Client +from paho.mqtt.enums import CallbackAPIVersion + import donkeycar.templates.cfg_complete as cfg from donkeycar.parts.telemetry import MqttTelemetry import pytest @@ -16,7 +18,8 @@ def test_mqtt_telemetry(): cfg.TELEMETRY_MQTT_JSON_ENABLE = True # Create receiver - sub = Client(clean_session=True) + sub = Client(callback_api_version=CallbackAPIVersion.API_VERSION2, + clean_session=True) # def on_message(client, userdata, message): # data = message.payload diff --git a/donkeycar/tests/test_torch.py b/donkeycar/tests/test_torch.py index 4c884a7db..f6bdd2193 100644 --- a/donkeycar/tests/test_torch.py +++ b/donkeycar/tests/test_torch.py @@ -109,7 +109,7 @@ def test_training_pipeline(config: Config, model_type: str, car_dir: str) \ gpus = 0 # Overfit the data - trainer = pl.Trainer(overfit_batches=2, max_epochs=30) + trainer = pl.Trainer(accelerator='cpu', overfit_batches=2, max_epochs=30) trainer.fit(model, data_module) final_loss = model.loss_history[-1] assert final_loss < 0.35, \ diff --git a/setup.cfg b/setup.cfg index 3948c14cf..b6f24ae88 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,7 +34,6 @@ install_requires = paho-mqtt simple_pid progress - typing_extensions pyfiglet psutil pynmea2 @@ -60,14 +59,14 @@ nano = Jetson.GPIO numpy==1.23 matplotlib==3.7 - kivy==2.1 + kivy plotly pandas==2.0 pc = tensorflow==2.13 matplotlib - kivy==2.1 + kivy pandas plotly albumentations @@ -75,7 +74,7 @@ pc = macos = tensorflow-macos==2.13 matplotlib - kivy==2.1 + kivy pandas plotly albumentations @@ -89,7 +88,7 @@ dev = torch = torch pytorch-lightning - torchvision==0.12 + torchvision torchaudio fastai From 7a3ec4b5ec057100d768fe6730d8d750d1dde1ae Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Thu, 21 Mar 2024 21:15:53 +0000 Subject: [PATCH 03/10] Fix python version 3.11 in CI --- .github/workflows/python-package-conda.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 6b0311720..fd03481ab 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -18,10 +18,10 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Create python 3.9 conda env + - name: Create python 3.11 conda env uses: conda-incubator/setup-miniconda@v3 with: - python-version: 3.9 + python-version: 3.11 mamba-version: "*" activate-environment: donkey auto-activate-base: false From 996cad3ba65f60ebd935e94a3697e5ab578a3fc4 Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Mon, 25 Mar 2024 22:14:21 +0000 Subject: [PATCH 04/10] Removed deprecated padding in ui and used modern form of package versioning in setup. --- donkeycar/management/ui.kv | 2 +- setup.cfg | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/donkeycar/management/ui.kv b/donkeycar/management/ui.kv index 96d72e9a4..34eeadd32 100644 --- a/donkeycar/management/ui.kv +++ b/donkeycar/management/ui.kv @@ -104,7 +104,7 @@ valign: 'middle' font_size: 14 if platform == 'linux' else 28 size_hint_x: 0.8 - padding_x: 10 + padding: 10, 0 canvas.before: Color: rgba: 0.14, 0.15, 0.22, 1 diff --git a/setup.cfg b/setup.cfg index b6f24ae88..22aaea135 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,7 +49,7 @@ pi = adafruit-circuitpython-ssd1306 adafruit-circuitpython-rplidar RPi.GPIO - tensorflow-aarch64==2.15 + tensorflow-aarch64==2.15.* opencv-contrib-python nano = @@ -57,14 +57,14 @@ nano = adafruit-circuitpython-ssd1306 adafruit-circuitpython-rplidar Jetson.GPIO - numpy==1.23 - matplotlib==3.7 + numpy==1.23.* + matplotlib==3.7.* kivy plotly - pandas==2.0 + pandas==2.0.* pc = - tensorflow==2.13 + tensorflow==2.15.* matplotlib kivy pandas @@ -72,7 +72,7 @@ pc = albumentations macos = - tensorflow-macos==2.13 + tensorflow-macos==2.15.* matplotlib kivy pandas From 055cac53120ee633d78deaf3e6afa0cdaa151af1 Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Tue, 26 Mar 2024 20:52:52 +0000 Subject: [PATCH 05/10] Fix telemetry test after paho upgrade --- donkeycar/parts/telemetry.py | 3 ++- donkeycar/tests/test_telemetry.py | 7 +------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/donkeycar/parts/telemetry.py b/donkeycar/parts/telemetry.py index f6a32aaca..64d7c40ec 100644 --- a/donkeycar/parts/telemetry.py +++ b/donkeycar/parts/telemetry.py @@ -14,6 +14,7 @@ import numpy as np from logging import StreamHandler from paho.mqtt.client import Client as MQTTClient +from paho.mqtt.enums import CallbackAPIVersion logger = logging.getLogger() @@ -40,7 +41,7 @@ def __init__(self, cfg): self._mqtt_broker = os.environ.get('DONKEY_MQTT_BROKER', cfg.TELEMETRY_MQTT_BROKER_HOST) # 'iot.eclipse.org' self._topic = cfg.TELEMETRY_MQTT_TOPIC_TEMPLATE % self._donkey_name self._use_json_format = cfg.TELEMETRY_MQTT_JSON_ENABLE - self._mqtt_client = MQTTClient() + self._mqtt_client = MQTTClient(callback_api_version=CallbackAPIVersion.VERSION2) self._mqtt_client.connect(self._mqtt_broker, cfg.TELEMETRY_MQTT_BROKER_PORT) self._mqtt_client.loop_start() self._on = True diff --git a/donkeycar/tests/test_telemetry.py b/donkeycar/tests/test_telemetry.py index 3159d7fe4..02a8f6907 100644 --- a/donkeycar/tests/test_telemetry.py +++ b/donkeycar/tests/test_telemetry.py @@ -7,7 +7,6 @@ import donkeycar.templates.cfg_complete as cfg from donkeycar.parts.telemetry import MqttTelemetry -import pytest from random import randint @@ -18,13 +17,9 @@ def test_mqtt_telemetry(): cfg.TELEMETRY_MQTT_JSON_ENABLE = True # Create receiver - sub = Client(callback_api_version=CallbackAPIVersion.API_VERSION2, + sub = Client(callback_api_version=CallbackAPIVersion.VERSION2, clean_session=True) - # def on_message(client, userdata, message): - # data = message.payload - # print(message) - on_message_mock = mock.Mock() sub.on_message = on_message_mock sub.connect(cfg.TELEMETRY_MQTT_BROKER_HOST) From d1f6389ab718c9607e867cc909a3f02c8dc5e29c Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Fri, 29 Mar 2024 18:58:19 +0000 Subject: [PATCH 06/10] Fix tensorrt interpreter for tf 2.15, which now returns a list instead of a dictionary. Fix tensorrt interpreter for tf 2.15, which now returns a list instead of a dictionary. --- donkeycar/parts/interpreter.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/donkeycar/parts/interpreter.py b/donkeycar/parts/interpreter.py index b4cf1bd5a..651ecef7c 100755 --- a/donkeycar/parts/interpreter.py +++ b/donkeycar/parts/interpreter.py @@ -347,12 +347,14 @@ def load(self, model_path: str) -> None: def predict_from_dict(self, input_dict): for k, v in input_dict.items(): input_dict[k] = self.expand_and_convert(v) - out_dict = self.graph_func(**input_dict) + out_list = self.graph_func(**input_dict) # Squeeze here because we send a batch of size one, so pick first # element. To return the order of outputs as defined in the model we # need to iterate through the model's output shapes here - outputs = [out_dict[k].numpy().squeeze(axis=0) for k in - self.output_keys] + # outputs = [out_dict[k].numpy().squeeze(axis=0) for k in + # self.output_keys] + outputs = [k.numpy().squeeze(axis=0) for k in out_list] + # don't return list if output is 1d return outputs if len(outputs) > 1 else outputs[0] From 20851f8d13e3e70b7a1c4b8c4e956296bf5a671c Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Sun, 31 Mar 2024 16:10:18 +0100 Subject: [PATCH 07/10] Peg pytorch to version 2.1 to avoid incompatibilities with nvidia libraries and fix pytorch warnings caused by using legacy warn() method instead of warning(). --- donkeycar/management/kivy_ui.py | 2 +- donkeycar/parts/actuator.py | 18 +++++++++--------- donkeycar/parts/controller.py | 6 +++--- donkeycar/parts/kinematics.py | 6 +++--- donkeycar/parts/pins.py | 4 ++-- donkeycar/parts/serial_port.py | 8 ++++---- donkeycar/parts/tachometer.py | 2 +- donkeycar/tests/test_keras.py | 2 +- setup.cfg | 2 +- 9 files changed, 25 insertions(+), 25 deletions(-) diff --git a/donkeycar/management/kivy_ui.py b/donkeycar/management/kivy_ui.py index 6842ccaf3..8a42c2245 100644 --- a/donkeycar/management/kivy_ui.py +++ b/donkeycar/management/kivy_ui.py @@ -148,7 +148,7 @@ def read_file(self): Logger.info(f'Donkeyrc: Donkey file {self.file_path} loaded.') return data else: - Logger.warn(f'Donkeyrc: Donkey file {self.file_path} does not ' + Logger.warning(f'Donkeyrc: Donkey file {self.file_path} does not ' f'exist.') return {} diff --git a/donkeycar/parts/actuator.py b/donkeycar/parts/actuator.py index fdf761a74..848feebc3 100644 --- a/donkeycar/parts/actuator.py +++ b/donkeycar/parts/actuator.py @@ -18,7 +18,7 @@ try: import RPi.GPIO as GPIO except ImportError as e: - logger.warn(f"RPi.GPIO was not imported. {e}") + logger.warning(f"RPi.GPIO was not imported. {e}") globals()["GPIO"] = None from donkeycar.parts.pins import OutputPin, PwmPin, PinState @@ -800,10 +800,10 @@ def run(self, throttle:float) -> None: where 1 is full forward and -1 is full backwards. """ if throttle is None: - logger.warn("TwoWheelSteeringThrottle throttle is None") + logger.warning("TwoWheelSteeringThrottle throttle is None") return if throttle > 1 or throttle < -1: - logger.warn( f"TwoWheelSteeringThrottle throttle is {throttle}, but it must be between 1(forward) and -1(reverse)") + logger.warning( f"TwoWheelSteeringThrottle throttle is {throttle}, but it must be between 1(forward) and -1(reverse)") throttle = clamp(throttle, -1, 1) self.speed = throttle @@ -843,16 +843,16 @@ def run(self, throttle:float, steering:float) -> Tuple[float, float]: where 1 is full forward and -1 is full backwards. """ if throttle is None: - logger.warn("TwoWheelSteeringThrottle throttle is None") + logger.warning("TwoWheelSteeringThrottle throttle is None") return if steering is None: - logger.warn("TwoWheelSteeringThrottle steering is None") + logger.warning("TwoWheelSteeringThrottle steering is None") return if throttle > 1 or throttle < -1: - logger.warn( f"TwoWheelSteeringThrottle throttle is {throttle}, but it must be between 1(forward) and -1(reverse)") + logger.warning( f"TwoWheelSteeringThrottle throttle is {throttle}, but it must be between 1(forward) and -1(reverse)") throttle = clamp(throttle, -1, 1) if steering > 1 or steering < -1: - logger.warn( f"TwoWheelSteeringThrottle steering is {steering}, but it must be between 1(right) and -1(left)") + logger.warning( f"TwoWheelSteeringThrottle steering is {steering}, but it must be between 1(right) and -1(left)") steering = clamp(steering, -1, 1) left_motor_speed = throttle @@ -920,10 +920,10 @@ def run(self, throttle:float) -> None: where 1 is full forward and -1 is full backwards. """ if throttle is None: - logger.warn("TwoWheelSteeringThrottle throttle is None") + logger.warning("TwoWheelSteeringThrottle throttle is None") return if throttle > 1 or throttle < -1: - logger.warn( f"TwoWheelSteeringThrottle throttle is {throttle}, but it must be between 1(forward) and -1(reverse)") + logger.warning( f"TwoWheelSteeringThrottle throttle is {throttle}, but it must be between 1(forward) and -1(reverse)") throttle = clamp(throttle, -1, 1) self.speed = throttle diff --git a/donkeycar/parts/controller.py b/donkeycar/parts/controller.py index 3c556525a..ab8ef1680 100644 --- a/donkeycar/parts/controller.py +++ b/donkeycar/parts/controller.py @@ -42,11 +42,11 @@ def init(self): except ModuleNotFoundError: self.num_axes = 0 self.num_buttons = 0 - logger.warn("no support for fnctl module. joystick not enabled.") + logger.warning("no support for fnctl module. joystick not enabled.") return False if not os.path.exists(self.dev_fn): - logger.warn(f"{self.dev_fn} is missing") + logger.warning(f"{self.dev_fn} is missing") return False ''' @@ -965,7 +965,7 @@ def emergency_stop(self): ''' initiate a series of steps to try to stop the vehicle as quickly as possible ''' - logger.warn('E-Stop!!!') + logger.warning('E-Stop!!!') self.mode = "user" self.recording = False self.constant_throttle = False diff --git a/donkeycar/parts/kinematics.py b/donkeycar/parts/kinematics.py index 360c740de..7fb9e9ab6 100644 --- a/donkeycar/parts/kinematics.py +++ b/donkeycar/parts/kinematics.py @@ -582,7 +582,7 @@ def run(self, steering) -> float: return 0 if steering > 1 or steering < -1: - logger.warn(f"steering = {steering}, but must be between 1(right) and -1(left)") + logger.warning(f"steering = {steering}, but must be between 1(right) and -1(left)") steering = clamp(steering, -1, 1) @@ -632,14 +632,14 @@ def differential_steering(throttle: float, steering: float, steering_zero: float logger.error("throttle must be a number") return 0, 0 if throttle > 1 or throttle < -1: - logger.warn(f"throttle = {throttle}, but must be between 1(right) and -1(left)") + logger.warning(f"throttle = {throttle}, but must be between 1(right) and -1(left)") throttle = clamp(throttle, -1, 1) if not is_number_type(steering): logger.error("steering must be a number") return 0, 0 if steering > 1 or steering < -1: - logger.warn(f"steering = {steering}, but must be between 1(right) and -1(left)") + logger.warning(f"steering = {steering}, but must be between 1(right) and -1(left)") steering = clamp(steering, -1, 1) left_throttle = throttle diff --git a/donkeycar/parts/pins.py b/donkeycar/parts/pins.py index f9c4091c5..3607ab1c4 100644 --- a/donkeycar/parts/pins.py +++ b/donkeycar/parts/pins.py @@ -400,7 +400,7 @@ def pwm_pin( gpio_pin_pull = [None, GPIO.PUD_OFF, GPIO.PUD_DOWN, GPIO.PUD_UP] gpio_pin_scheme = {PinScheme.BOARD: GPIO.BOARD, PinScheme.BCM: GPIO.BCM} except ImportError: - logger.warn("RPi.GPIO was not imported.") + logger.warning("RPi.GPIO was not imported.") globals()["GPIO"] = None @@ -753,7 +753,7 @@ def duty_cycle(self, duty: float) -> None: pigpio_pin_edge = [None, pigpio.RISING_EDGE, pigpio.FALLING_EDGE, pigpio.EITHER_EDGE] pigpio_pin_pull = [None, pigpio.PUD_OFF, pigpio.PUD_DOWN, pigpio.PUD_UP] except ImportError: - logger.warn("pigpio was not imported.") + logger.warning("pigpio was not imported.") globals()["pigpio"] = None diff --git a/donkeycar/parts/serial_port.py b/donkeycar/parts/serial_port.py index cb9b84bf6..248d78ef4 100644 --- a/donkeycar/parts/serial_port.py +++ b/donkeycar/parts/serial_port.py @@ -92,7 +92,7 @@ def readBytes(self, count:int=0) -> Tuple[bool, bytes]: input = self.ser.read(count) return (waiting, input) except (serial.serialutil.SerialException, TypeError): - logger.warn("failed reading bytes from serial port") + logger.warning("failed reading bytes from serial port") return (False, b'') def read(self, count:int=0) -> Tuple[bool, str]: @@ -135,11 +135,11 @@ def readln(self) -> Tuple[bool, str]: input = buffer.decode(self.charset) return (waiting, input) except (serial.serialutil.SerialException, TypeError): - logger.warn("failed reading line from serial port") + logger.warning("failed reading line from serial port") return (False, "") except UnicodeDecodeError: # the first read often includes mis-framed garbase - logger.warn("failed decoding unicode line from serial port") + logger.warning("failed decoding unicode line from serial port") return (False, "") def writeBytes(self, value:bytes): @@ -150,7 +150,7 @@ def writeBytes(self, value:bytes): try: self.ser.write(value) except (serial.serialutil.SerialException, TypeError): - logger.warn("Can't write to serial port") + logger.warning("Can't write to serial port") def write(self, value:str): """ diff --git a/donkeycar/parts/tachometer.py b/donkeycar/parts/tachometer.py index 26d7b02fa..39cc40b9f 100644 --- a/donkeycar/parts/tachometer.py +++ b/donkeycar/parts/tachometer.py @@ -310,7 +310,7 @@ def __init__(self, gpio_pin: InputPin, debounce_ns:int=0, debug=False): self.debounce_ns:int = debounce_ns self.debounce_time:int = 0 if self.debounce_ns > 0: - logger.warn("GpioEncoder debounce_ns will be ignored.") + logger.warning("GpioEncoder debounce_ns will be ignored.") self.lock = threading.Lock() def _cb(self): diff --git a/donkeycar/tests/test_keras.py b/donkeycar/tests/test_keras.py index 366bba788..b807a190e 100644 --- a/donkeycar/tests/test_keras.py +++ b/donkeycar/tests/test_keras.py @@ -78,7 +78,7 @@ def test_keras_vs_tflite_and_tensorrt(keras_pilot, tmp_dir): # lstm cells are not yet supported in tensor RT out3 = k_trt.run(*args) assert out3 == approx(out1, rel=TOLERANCE, abs=TOLERANCE) - print("\n", out1, out2, out3) + print('keras:', out1, 'tflite:', out2, 'trt:', out3) diff --git a/setup.cfg b/setup.cfg index 22aaea135..095c03697 100644 --- a/setup.cfg +++ b/setup.cfg @@ -86,7 +86,7 @@ dev = mypy torch = - torch + torch=2.1.* pytorch-lightning torchvision torchaudio From 79ba47ba7c9ba550b8e6b0f8e99ef3209fbcd354 Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Sun, 31 Mar 2024 19:39:13 +0100 Subject: [PATCH 08/10] Switch of fastai trying to use cuda as this crashes if an Nvidia GPU is found. Correct torch version in setup. --- donkeycar/parts/fastai.py | 4 ++-- setup.cfg | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/donkeycar/parts/fastai.py b/donkeycar/parts/fastai.py index f7b9a8523..97f9c422d 100644 --- a/donkeycar/parts/fastai.py +++ b/donkeycar/parts/fastai.py @@ -158,8 +158,8 @@ def train(self, model = self.interpreter.model dataLoader = DataLoaders.from_dsets(train_data, validation_data, bs=batch_size, shuffle=False) - if torch.cuda.is_available(): - dataLoader.cuda() + # if torch.cuda.is_available(): + # dataLoader.cuda() #dataLoaderTest = self.dataBlock.dataloaders.test_dl(validation_data, with_labels=True) #print(dataLoader.train[0]) diff --git a/setup.cfg b/setup.cfg index 095c03697..76ca5e58b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -86,7 +86,7 @@ dev = mypy torch = - torch=2.1.* + torch==2.1.* pytorch-lightning torchvision torchaudio From ab384355ad952891755c6743528e1ea004547c15 Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Sun, 31 Mar 2024 19:42:57 +0100 Subject: [PATCH 09/10] Add comment and remove commented code. --- donkeycar/parts/fastai.py | 3 +-- donkeycar/parts/interpreter.py | 2 -- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/donkeycar/parts/fastai.py b/donkeycar/parts/fastai.py index 97f9c422d..bd8953543 100644 --- a/donkeycar/parts/fastai.py +++ b/donkeycar/parts/fastai.py @@ -158,11 +158,10 @@ def train(self, model = self.interpreter.model dataLoader = DataLoaders.from_dsets(train_data, validation_data, bs=batch_size, shuffle=False) + # old way of enabling gpu now crashes with torch 2.1.* # if torch.cuda.is_available(): # dataLoader.cuda() - #dataLoaderTest = self.dataBlock.dataloaders.test_dl(validation_data, with_labels=True) - #print(dataLoader.train[0]) callbacks = [ EarlyStoppingCallback(monitor='valid_loss', diff --git a/donkeycar/parts/interpreter.py b/donkeycar/parts/interpreter.py index 651ecef7c..8af44e748 100755 --- a/donkeycar/parts/interpreter.py +++ b/donkeycar/parts/interpreter.py @@ -351,8 +351,6 @@ def predict_from_dict(self, input_dict): # Squeeze here because we send a batch of size one, so pick first # element. To return the order of outputs as defined in the model we # need to iterate through the model's output shapes here - # outputs = [out_dict[k].numpy().squeeze(axis=0) for k in - # self.output_keys] outputs = [k.numpy().squeeze(axis=0) for k in out_list] # don't return list if output is 1d From 442a93fe575674ac9a62201e2607c0b4924a1e25 Mon Sep 17 00:00:00 2001 From: DocGarbanzo Date: Sun, 31 Mar 2024 20:53:04 +0100 Subject: [PATCH 10/10] Bump version --- donkeycar/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/donkeycar/__init__.py b/donkeycar/__init__.py index aae7c9c76..8011dee56 100644 --- a/donkeycar/__init__.py +++ b/donkeycar/__init__.py @@ -3,7 +3,7 @@ from pyfiglet import Figlet import logging -__version__ = '5.1.dev1' +__version__ = '5.1.dev2' logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper())