From f46eda20725f7fc0d691258570523c3b5b129bc6 Mon Sep 17 00:00:00 2001 From: Martin Pulec Date: Mon, 8 Apr 2024 12:07:24 +0200 Subject: [PATCH] vdec/gpujpeg: set the default GJ params (probe) At least the struct gpujpeg_image_parameters is input/output - the pixel_format is read from there (to evaluate if alpha formats should be presented). So better initialize it correctly - now it perhaps doesn't matter but to be future-proof. --- src/video_decompress/gpujpeg.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/video_decompress/gpujpeg.c b/src/video_decompress/gpujpeg.c index b6f356c88..33d396eda 100644 --- a/src/video_decompress/gpujpeg.c +++ b/src/video_decompress/gpujpeg.c @@ -183,8 +183,11 @@ static int gpujpeg_decompress_reconfigure(void *state, struct video_desc desc, static decompress_status gpujpeg_probe_internal_codec(unsigned char *buffer, size_t len, struct pixfmt_desc *internal_prop) { struct gpujpeg_image_parameters image_params = { 0 }; + gpujpeg_image_set_default_parameters(&image_params); #if GPUJPEG_VERSION_INT >= GPUJPEG_MK_VERSION_INT(0, 20, 0) - struct gpujpeg_parameters params = { .verbose = MAX(0, log_level - LOG_LEVEL_INFO) }; + struct gpujpeg_parameters params; + gpujpeg_set_default_parameters(¶ms); + params.verbose = MAX(0, log_level - LOG_LEVEL_INFO); if (gpujpeg_decoder_get_image_info(buffer, len, &image_params, ¶ms, NULL) != 0) { #else if (gpujpeg_decoder_get_image_info(buffer, len, &image_params, NULL, MAX(0, log_level - LOG_LEVEL_INFO)) != 0) {