diff --git a/.github/workflows/param_check.yml b/.github/workflows/param_check.yml
new file mode 100644
index 0000000000..b3670e2092
--- /dev/null
+++ b/.github/workflows/param_check.yml
@@ -0,0 +1,232 @@
+name: Carbonix Parameter Validation
+on:
+  push:
+    branches:
+      - CxPilot
+      - master
+    paths-ignore:
+      # remove other vehicles
+      - 'AntennaTracker/**'
+      - 'ArduCopter/**'
+      - 'ArduSub/**'
+      - 'Blimp/**'
+      - 'Rover/**'
+      # remove non unrelated tools
+      - 'Tools/AP_Bootloader/**'
+      - 'Tools/bootloaders/**'
+      - 'Tools/CHDK-Script/**'
+      - 'Tools/CodeStyle/**'
+      - 'Tools/completion/**'
+      - 'Tools/CPUInfo/**'
+      - 'Tools/debug/**'
+      - 'Tools/environment_install/**'
+      - 'Tools/FilterTestTool/**'
+      - 'Tools/Frame_params/**'
+      - 'Tools/geotag/**'
+      - 'Tools/GIT_Test/**'
+      - 'Tools/gittools/**'
+      - 'Tools/Hello/**'
+      - 'Tools/IO_Firmware/**'
+      - 'Tools/Linux_HAL_Essentials/**'
+      - 'Tools/LogAnalyzer/**'
+      - 'Tools/Pozyx/**'
+      - 'Tools/PrintVersion.py'
+      - 'Tools/Replay/**'
+      - 'Tools/ros2/**'
+      - 'Tools/simulink/**'
+      - 'Tools/UDP_Proxy/**'
+      - 'Tools/vagrant/**'
+      - 'Tools/Vicon/**'
+      # Discard python file from Tools/scripts as not used
+      - 'Tools/scripts/**.py'
+      - 'Tools/scripts/build_sizes/**'
+      - 'Tools/scripts/build_tests/**'
+      - 'Tools/scripts/CAN/**'
+      - 'Tools/scripts/signing/**'
+      # Remove vehicles autotest
+      - 'Tools/autotest/antennatracker.py'
+      - 'Tools/autotest/arducopter.py'
+      - 'Tools/autotest/arduplane.py'
+      - 'Tools/autotest/ardusub.py'
+      - 'Tools/autotest/balancebot.py'
+      - 'Tools/autotest/helicopter.py'
+      - 'Tools/autotest/location.txt'
+      - 'Tools/autotest/rover.py'
+      - 'Tools/autotest/sailboat.py'
+      - 'Tools/autotest/swarminit.txt'
+      # Remove markdown files as irrelevant
+      - '**.md'
+      # Remove dotfile at root directory
+      - './.dir-locals.el'
+      - './.dockerignore'
+      - './.editorconfig'
+      - './.flake8'
+      - './.gitattributes'
+      - './.github'
+      - './.gitignore'
+      - './.pre-commit-config.yaml'
+      - './.pydevproject'
+      - './.valgrind-suppressions'
+      - './.valgrindrc'
+      - 'Dockerfile'
+      - 'Vagrantfile'
+      - 'Makefile'
+      # Remove some directories check
+      - '.vscode/**'
+      - '.github/ISSUE_TEMPLATE/**'
+      # Remove change on other workflows
+      - '.github/workflows/test_environment.yml'
+      - '.github/workflows/cache_cleanup.yml'
+      - '.github/workflows/cygwin_build.yml'
+      - '.github/workflows/test_ccache.yml'
+      - '.github/workflows/test_linux_sbc.yml'
+      - '.github/workflows/test_scripts.yml'
+      - '.github/workflows/test_sitl_periph.yml'
+      - '.github/workflows/test_sitl_sub.yml'
+      - '.github/workflows/ccache.env'
+      - '.github/workflows/esp32_build.yml'
+      - '.github/workflows/test_chibios.yml'
+      - '.github/workflows/test_replay.yml'
+      - '.github/workflows/test_sitl_blimp.yml'
+      - '.github/workflows/test_sitl_plane.yml'
+      - '.github/workflows/test_sitl_tracker.yml'
+      - '.github/workflows/colcon.yml'
+      - '.github/workflows/test_dds.yml'
+      - '.github/workflows/test_scripting.yml'
+      - '.github/workflows/test_sitl_copter.yml'
+      - '.github/workflows/test_sitl_rover.yml'
+      - '.github/workflows/test_unit_tests.yml'
+
+  pull_request:
+    paths-ignore:
+      # remove other vehicles
+      - 'AntennaTracker/**'
+      - 'ArduCopter/**'
+      - 'ArduSub/**'
+      - 'Blimp/**'
+      - 'Rover/**'
+      # remove non unrelated tools
+      - 'Tools/AP_Bootloader/**'
+      - 'Tools/bootloaders/**'
+      - 'Tools/CHDK-Script/**'
+      - 'Tools/CodeStyle/**'
+      - 'Tools/completion/**'
+      - 'Tools/CPUInfo/**'
+      - 'Tools/debug/**'
+      - 'Tools/environment_install/**'
+      - 'Tools/FilterTestTool/**'
+      - 'Tools/Frame_params/**'
+      - 'Tools/geotag/**'
+      - 'Tools/GIT_Test/**'
+      - 'Tools/gittools/**'
+      - 'Tools/Hello/**'
+      - 'Tools/IO_Firmware/**'
+      - 'Tools/Linux_HAL_Essentials/**'
+      - 'Tools/LogAnalyzer/**'
+      - 'Tools/Pozyx/**'
+      - 'Tools/PrintVersion.py'
+      - 'Tools/Replay/**'
+      - 'Tools/ros2/**'
+      - 'Tools/simulink/**'
+      - 'Tools/UDP_Proxy/**'
+      - 'Tools/vagrant/**'
+      - 'Tools/Vicon/**'
+      # Discard python file from Tools/scripts as not used
+      - 'Tools/scripts/**.py'
+      - 'Tools/scripts/build_sizes/**'
+      - 'Tools/scripts/build_tests/**'
+      - 'Tools/scripts/CAN/**'
+      - 'Tools/scripts/signing/**'
+      # Remove vehicles autotest
+      - 'Tools/autotest/antennatracker.py'
+      - 'Tools/autotest/arducopter.py'
+      - 'Tools/autotest/arduplane.py'
+      - 'Tools/autotest/ardusub.py'
+      - 'Tools/autotest/balancebot.py'
+      - 'Tools/autotest/helicopter.py'
+      - 'Tools/autotest/location.txt'
+      - 'Tools/autotest/rover.py'
+      - 'Tools/autotest/sailboat.py'
+      - 'Tools/autotest/swarminit.txt'
+      # Remove markdown files as irrelevant
+      - '**.md'
+      # Remove dotfile at root directory
+      - './.dir-locals.el'
+      - './.dockerignore'
+      - './.editorconfig'
+      - './.flake8'
+      - './.gitattributes'
+      - './.github'
+      - './.gitignore'
+      - './.pre-commit-config.yaml'
+      - './.pydevproject'
+      - './.valgrind-suppressions'
+      - './.valgrindrc'
+      - 'Dockerfile'
+      - 'Vagrantfile'
+      - 'Makefile'
+      # Remove some directories check
+      - '.vscode/**'
+      - '.github/ISSUE_TEMPLATE/**'
+      # Remove change on other workflows
+      - '.github/workflows/test_environment.yml'
+      - '.github/workflows/cache_cleanup.yml'
+      - '.github/workflows/cygwin_build.yml'
+      - '.github/workflows/test_ccache.yml'
+      - '.github/workflows/test_linux_sbc.yml'
+      - '.github/workflows/test_scripts.yml'
+      - '.github/workflows/test_sitl_periph.yml'
+      - '.github/workflows/test_sitl_sub.yml'
+      - '.github/workflows/ccache.env'
+      - '.github/workflows/esp32_build.yml'
+      - '.github/workflows/test_chibios.yml'
+      - '.github/workflows/test_replay.yml'
+      - '.github/workflows/test_sitl_blimp.yml'
+      - '.github/workflows/test_sitl_plane.yml'
+      - '.github/workflows/test_sitl_tracker.yml'
+      - '.github/workflows/colcon.yml'
+      - '.github/workflows/test_dds.yml'
+      - '.github/workflows/test_scripting.yml'
+      - '.github/workflows/test_sitl_copter.yml'
+      - '.github/workflows/test_sitl_rover.yml'
+      - '.github/workflows/test_unit_tests.yml'
+
+  workflow_dispatch:
+
+concurrency:
+  group: ci-${{github.workflow}}-${{ github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  run-python-script:
+    runs-on: ubuntu-22.04  
+    container: ardupilot/ardupilot-dev-base:v0.1.3
+
+    steps:
+      - name: Checkout Code
+        uses: actions/checkout@v4
+
+      - name: Param Check Unittests
+        run: >
+          python Tools/Carbonix_scripts/param_check_unittests.py
+
+      - name: Check Periph Parameters
+        run: >
+          python Tools/Carbonix_scripts/param_check.py
+          --vehicle=AP_Periph
+          libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/cpn_params/**/*.par*m
+          libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/payloads/**/cpn*.par*m
+          libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/payloads/**/CPN*.par*m
+
+      - name: Check Plane Parameters
+        run: >
+          python Tools/Carbonix_scripts/param_check.py
+          --vehicle=Plane
+          libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/defaults.parm
+          libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/payloads/**/Cube*.par*m
+          libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/payloads/**/cube*.par*m
+          libraries/AP_HAL_ChibiOS/hwdef/CubeOrange-Volanti/defaults.parm
+          libraries/AP_HAL_ChibiOS/hwdef/CubeOrange-Ottano/defaults.parm
+          
+
+        
\ No newline at end of file
diff --git a/Tools/Carbonix_scripts/param_check.py b/Tools/Carbonix_scripts/param_check.py
new file mode 100755
index 0000000000..ebebd8a3f5
--- /dev/null
+++ b/Tools/Carbonix_scripts/param_check.py
@@ -0,0 +1,417 @@
+#!/usr/bin/env python
+
+"""
+Parameter File Validation Script
+
+This script validates a set of parameter files against the generated metadata
+for a specific firmware type (e.g. Plane, AP_Periph). The following checks are
+performed on each parameter in the files:
+
+- Ensure that the parameter exists in the metadata.
+- Validate that the parameter's value falls within a specified range, if applicable.
+- Confirm that the parameter's value matches one of the predefined valid values.
+- Check bitmask parameters to ensure all set bits are valid according to the metadata.
+
+Usage:
+    python validate_params.py --vehicle <vehicle_type> <parameter_files>
+
+Where:
+    - `<vehicle_type>` is the type of vehicle for which to generate metadata.
+      Valid values are: Sub, Plane, Blimp, Copter, Tracker, Rover, AP_Periph.
+    - `<parameter_files>` is a list of parameter file paths to validate,
+       supporting wildcard patterns.
+
+Example:
+    python Tools/Carbonix_scripts/param_check.py --vehicle Plane Tools/autotest/default_params/*plane.parm
+
+AP_FLAKE8_CLEAN
+
+"""
+
+import os
+import json
+import re
+import glob
+import subprocess
+from argparse import ArgumentParser
+
+
+def parse_arguments():
+    """Parses command-line arguments, and expands any wildcard patterns."""
+    parser = ArgumentParser(description="Validate parameter files")
+    parser.add_argument("files", nargs='+', help="Parameter files to validate")
+    parser.add_argument(
+        "--vehicle", required=True, help="Vehicle type to generate for"
+    )
+
+    # Add flags for enabling/disabling checks
+    parser.add_argument("--no-missing", action="store_true", help="Disable missing check")
+    parser.add_argument("--no-readonly", action="store_true", help="Disable read-only check")
+    parser.add_argument("--no-bitmask", action="store_true", help="Disable bitmask check")
+    parser.add_argument("--no-range", action="store_true", help="Disable range check")
+    parser.add_argument("--no-values", action="store_true", help="Disable values check")
+
+    args = parser.parse_args()
+
+    # Expand any wildcards in the list of files
+    files = []
+    for pattern in args.files:
+        files += glob.glob(pattern, recursive=True)
+    args.files = files
+
+    return args
+
+
+def main():
+    """Main function for the script.
+
+    Generates the metadata for the specified vehicle, and checks each file
+    against the metadata. It prints a success or failure message for each file
+    and exits with an error code if any file fails validation.
+    """
+    args = parse_arguments()
+
+    if not args.files:
+        print('No files found')
+        exit(1)
+
+    metadata = generate_metadata(args.vehicle)
+
+    # Dictionary to store error messages for each file
+    messages = {} # {filename: [error messages]}
+
+    # Check each file, and store any error messages
+    for file in args.files:
+        msgs = check_file(file, metadata, args)
+        messages[os.path.relpath(file)] = msgs
+
+    # Print the success/failure for each file
+    for file in messages:
+        if not messages[file]:
+            print(f'{file}: Passed')
+        else:
+            print(f'{file}: Failed')
+            for msg in messages[file]:
+                print(f'  {msg}')
+
+    # Check if any files failed (i.e. have error messages)
+    if any(messages[file] for file in messages):
+        exit(1)
+
+
+def check_file(file, metadata, args=None):
+    """Checks a single parameter file against the metadata.
+
+    Loads the parameters from the specified file and validates each parameter
+    against the provided metadata dictionary. It returns a list of error
+    messages for any invalid parameters found, or an empty list if all
+    parameters are valid.
+
+    Args:
+        file (str): The path to the parameter file to be checked.
+        metadata (dict): A dictionary containing the parameter metadata.
+        args (Namespace): An optional namespace containing flags to skip checks.
+
+    Returns:
+        list: A list of error messages if any parameters are invalid.
+    """
+    params = load_params(file)
+
+    skip_missing = args.no_missing if args else False
+
+    msgs = []
+    for param in params:
+        if param not in metadata:
+            if not skip_missing:
+                msgs.append(f'{param} not found in metadata')
+        else:
+            value, checks_disabled = params[param]
+            # If checks are disabled in a comment, then we enable all checks
+            # regardless of args (by passing None). We want to enforce that
+            # the DISABLE_CHECKS in the comment is necessary.
+            msg = check_param(
+                param,
+                value,
+                metadata[param],
+                args if not checks_disabled else None,
+            )
+            # Assert that the DISABLE_CHECKS flag is necessary
+            if checks_disabled:
+                msg = (
+                    f'{param} does not need DISABLE_CHECKS'
+                    if msg is None
+                    else None
+                )
+            msgs.append(msg) if msg is not None else None
+
+    return msgs
+
+
+def load_params(file):
+    """Loads a parameter file and returns a dictionary of parameters.
+
+    Reads the specified parameter file, skipping any lines that are comments or
+    contain tags like @include or @delete. It builds a name-value dictionary of
+    the parameters in the file.
+
+    Args:
+        file (str): The path to the parameter file to be loaded.
+
+    Returns:
+        dict: A dictionary where the keys are parameter names and the values
+        are tuples containing the parameter value as a float and a boolean
+        indicating if checks are disabled.
+    """
+    with open(file, 'r') as file_object:
+        lines = file_object.readlines()
+
+    params = {}
+    for i, line in enumerate(lines):
+        # Strip whitespace
+        processed_line = line.strip()
+        # Skip any lines that start with @ to ignore includes and deletes
+        if processed_line.startswith('@'):
+            continue
+        # Strip out stuff like the @READONLY tag
+        processed_line = re.sub(r'@\S*', '', processed_line)
+
+        # Handle comments
+        comment = ''
+        if '#' in processed_line:
+            i = processed_line.index('#')
+            comment = processed_line[i + 1:].strip()
+            processed_line = processed_line[:i].strip()
+
+        # Check for DISABLE_CHECKS flag
+        checks_disabled = 'DISABLE_CHECKS' in comment
+
+        # Check that a valid reason is provided if checks are disabled
+        if checks_disabled and len(''.join(comment.split('DISABLE_CHECKS')).strip()) < 5:
+            print(f'Error: an explanation is required for disabled checks: `{line.strip()}`')
+            print(f'File: {file}:{i+1}')
+            exit(1)
+
+        # Strip whitespace again
+        processed_line = processed_line.strip()
+        # Skip any empty lines
+        if not processed_line:
+            continue
+        # Split on , or any whitespace
+        parts = re.split(r'[,\s]+', processed_line, 1)
+        try:
+            value = parts[1]
+            if '.' in value:
+                value = float(value)  # Convert hex to float
+            else:
+                # int(x, 0) will handle 0x and 0b prefixes
+                value = float(int(value, 0))  # Convert hex to float
+            params[parts[0]] = (value, checks_disabled)
+        except (IndexError, ValueError):
+            print(f'Error parsing line: `{line.strip()}`')
+            print(f'File: {file}:{i+1}')
+            exit(1)
+
+    return params
+
+
+def generate_metadata(vehicle):
+    """Generates and returns metadata for a specific vehicle.
+
+    Runs an external script to generate the metadata for the specified vehicle
+    in JSON format. It checks if the metadata file was successfully created and
+    updated, loads the metadata, flattens it to remove parameter groups, and
+    returns it as a dictionary. The temporary metadata file is then deleted
+    after processing.
+
+    Args:
+        vehicle (str): The type of vehicle for which to generate metadata.
+
+    Returns:
+        dict: A dictionary with parameter names as keys and metadata for that
+        parameter as values.
+
+    Raises:
+        SystemExit: If the metadata generation fails, the metadata file is not
+        created, or the file was not updated.
+    """
+
+    print(f'Generating metadata for vehicle {vehicle}...', end=' ')
+
+    metadata_script = os.path.join(
+        os.path.dirname(__file__), '../autotest/param_metadata/param_parse.py'
+    )
+    metadata_script = os.path.abspath(metadata_script)
+    metadata_file = 'apm.pdef.json'
+    previous_mtime = os.path.getmtime(metadata_file) if os.path.exists(metadata_file) else 0
+
+    try:
+        subprocess.run(
+            ['python3', metadata_script, f'--vehicle={vehicle}', '--format=json'],
+            check=True,
+            capture_output=True,
+            text=True
+        )
+    except subprocess.CalledProcessError as e:
+        print(f"Error generating metadata: {e.stderr}")
+        exit(1)
+
+    if not os.path.exists(metadata_file):
+        print(f"Error: Metadata file '{metadata_file}' was not created.")
+        exit(1)
+
+    current_mtime = os.path.getmtime(metadata_file)
+    if current_mtime <= previous_mtime:
+        print(f"Error: Metadata file '{metadata_file}' was not updated.")
+        exit(1)
+
+    print('Done')
+
+    with open(metadata_file, 'r') as file:
+        json_file = json.load(file)
+
+    # Delete the metadata file, as we don't need it anymore
+    os.remove(metadata_file)
+
+    # Flatten the json file to remove the parameter groups
+    metadata = {}
+    for group in json_file:
+        metadata.update(json_file[group])
+
+    return metadata
+
+
+def check_param(name, value, metadata, args=None):
+    """Checks a single parameter against its metadata definition.
+
+    Validates the specified parameter. If the metadata contains multiple types
+    of validity fields (e.g. Range and Values), only the first one encountered
+    will be checked in this priority order: ReadOnly, Bitmask, Range, Values.
+
+    Args:
+        name (str): The name of the parameter to be checked.
+        value (float): The value of the parameter as a float.
+        metadata (dict): A dictionary containing metadata for the parameter.
+        args (Namespace): An optional namespace containing flags to skip checks.
+
+    Returns:
+        str: An error message if the parameter is invalid, or None otherwise.
+    """
+    # List of checks with their corresponding skip flags and check functions
+    checks = [
+        (
+            'ReadOnly',
+            args.no_readonly if args else False,
+            lambda: f'{name} is read only' if metadata['ReadOnly'] else None,
+        ),
+        (
+            'Bitmask',
+            args.no_bitmask if args else False,
+            lambda: check_bitmask(name, value, metadata['Bitmask']),
+        ),
+        (
+            'Range',
+            args.no_range if args else False,
+            lambda: check_range(name, value, metadata['Range']),
+        ),
+        (
+            'Values',
+            args.no_values if args else False,
+            lambda: check_values(name, value, metadata['Values']),
+        ),
+    ]
+
+    for key, skip_flag, check_func in checks:
+        if key in metadata:
+            return check_func() if not skip_flag else None
+
+    # If none of the above, it's automatically valid
+    return None
+
+
+def check_bitmask(name, value, metadata):
+    """Validates a parameter against its bitmask metadata.
+
+    Checks if the parameter value is a positive integer and if each bit set in
+    the value corresponds to a valid bit in the metadata. It returns an error
+    message if any invalid bits are set.
+
+    Args:
+        name (str): The name of the parameter being checked.
+        value (float): The value of the parameter as a float.
+        metadata (dict): A dictionary containing bitmask definitions.
+
+    Returns:
+        str: An error message if the parameter is invalid, or None otherwise.
+    """
+    if not float(value).is_integer():
+        return f'{name}: {value:g} is not an integer'
+
+    value = int(float(value))
+
+    # Maximum number of expected bits
+    N = 64
+
+    if value < 0:
+        return f'{name}: {value} is negative'
+
+    if value >= (1 << N):
+        return f'{name}: {value} is larger than {N} bits'
+
+    # Loop through the set bits and confirm all correspond to something
+    for i in range(64):
+        # Break if we've checked the highest set bit
+        if value < (1 << i):
+            break
+        # Check if the bit is set, and if so, if it's described in the metadata
+        if value & (1 << i) and str(i) not in metadata:
+            return f'{name}: bit {i} is not valid'
+
+    return None
+
+
+def check_range(name, value, metadata):
+    """Validates a parameter against its range metadata.
+
+    Checks if the parameter value falls within the defined minimum and maximum
+    range in the metadata. It returns an error message if the value is out of
+    range.
+
+    Args:
+        name (str): The name of the parameter being checked.
+        value (float): The value of the parameter as a float.
+        metadata (dict): A dictionary containing the 'low' and 'high' range.
+
+    Returns:
+        str: An error message if the parameter is invalid, or None otherwise.
+    """
+
+    if value < float(metadata['low']):
+        return f'{name}: {value:g} is below minimum value {metadata["low"]}'
+    if value > float(metadata['high']):
+        return f'{name}: {value:g} is above maximum value {metadata["high"]}'
+
+    return None
+
+
+def check_values(name, value, metadata):
+    """Validates a parameter against its list of valid values.
+
+    Checks if the parameter value is one of the valid values listed in the
+    metadata. It returns an error message if the value is not in the list.
+
+    Args:
+        name (str): The name of the parameter being checked.
+        value (float): The value of the parameter as a float.
+        metadata (dict): A dictionary containing valid values for the parameter.
+
+    Returns:
+        str: An error message if the parameter is invalid, or None otherwise.
+    """
+    if f'{float(value):g}' not in metadata:
+        return f'{name}: {value:g} is not a valid value'
+
+    return None
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Tools/Carbonix_scripts/param_check_unittests.py b/Tools/Carbonix_scripts/param_check_unittests.py
new file mode 100755
index 0000000000..e45fa81a65
--- /dev/null
+++ b/Tools/Carbonix_scripts/param_check_unittests.py
@@ -0,0 +1,269 @@
+#!/usr/bin/env python
+
+"""
+Parameter File Checker Unit Tests
+
+AP_FLAKE8_CLEAN
+"""
+
+import os
+import time
+import unittest
+from unittest.mock import MagicMock, patch, mock_open
+from param_check import (
+    load_params,
+    check_param,
+    check_range,
+    check_values,
+    check_bitmask,
+    generate_metadata,
+    check_file,
+    main
+)
+
+
+class TestParamCheck(unittest.TestCase):
+
+    def test_load_params(self):
+        # Mock parameter file content
+        mock_file_content = '''
+        PARAM1, 1
+        PARAM2\t0x10 @READONLY # Comment
+        # Comment
+        @tag
+        @include filename.parm
+        PARAM3   42.5#comment
+        PARAM4, 0b1111 #DISABLE_CHECKS for some reason
+        '''
+
+        with patch('builtins.open', mock_open(read_data=mock_file_content)):
+            params = load_params('fake_file.parm')
+
+        # Test the correct parsing of parameters and DISABLE_CHECKS flag
+        self.assertEqual(params['PARAM1'], (1.0, False))
+        self.assertEqual(params['PARAM2'], (16.0, False))
+        self.assertEqual(params['PARAM3'], (42.5, False))
+        self.assertEqual(params['PARAM4'], (15.0, True))
+
+        # Bad parameter lines that should fail to parse and exit the program
+        bad_lines = [
+            'PARAM4 1 2\n',
+            'PARAM4\n',
+            'P#ARAM4 1\n',
+            'PARAM4, 0x10.0\n',
+        ]
+        for line in bad_lines:
+            content = mock_file_content + line
+            with patch('builtins.open', mock_open(read_data=content)):
+                with self.assertRaises(SystemExit):
+                    load_params('fake_file.parm')
+
+        content = mock_file_content + 'PARAM4, 0x10 #DISABLE_CHECKS: \n'
+        # Test that the program exits when DISABLE_CHECKS does not have a reason
+        with patch('builtins.open', mock_open(read_data=content)):
+            with self.assertRaises(SystemExit):
+                load_params('fake_file.parm')
+
+    def test_check_range(self):
+        # Mock metadata and parameters
+        metadata = {'low': 0.0, 'high': 100.0}
+        self.assertIsNone(check_range('PARAM', 50.0, metadata))
+        self.assertEqual(check_range('PARAM', -1.0, metadata), 'PARAM: -1 is below minimum value 0.0')
+        self.assertEqual(check_range('PARAM', 101.0, metadata), 'PARAM: 101 is above maximum value 100.0')
+
+    def test_check_values(self):
+        # Mock metadata and parameters
+        metadata = {'0': 'Off', '1': 'On'}
+        self.assertIsNone(check_values('PARAM', 0, metadata))
+        self.assertEqual(check_values('PARAM', 2, metadata), 'PARAM: 2 is not a valid value')
+
+    def test_check_bitmask(self):
+        # Mock metadata and parameters
+        metadata = {'0': 'Bit0', '1': 'Bit1'}
+        self.assertIsNone(check_bitmask('PARAM', 3.0, metadata))
+        self.assertEqual(check_bitmask('PARAM', 4.0, metadata), 'PARAM: bit 2 is not valid')
+        self.assertEqual(check_bitmask('PARAM', 1.5, metadata), 'PARAM: 1.5 is not an integer')
+
+    @patch('subprocess.run')
+    @patch('os.remove')
+    @patch('os.path.getmtime')
+    @patch('os.path.exists', return_value=True)
+    @patch('builtins.open', new_callable=mock_open, read_data='{}')
+    @patch('json.load', return_value={'GROUP1': {'PARAM1': {}}, 'GROUP2': {'PARAM2': {}}})
+    def test_generate_metadata(self, mock_json, mock_open, mock_exists, mock_getmtime, mock_remove, mock_subprocess):
+        # When the function calls getmtime, it will return the current time
+        mock_getmtime.side_effect = lambda path: time.time()
+
+        # Call the function
+        metadata = generate_metadata('Plane')
+
+        # Test subprocess was called correctly
+        metadata_script = os.path.join(
+            os.path.dirname(__file__), '../autotest/param_metadata/param_parse.py'
+        )
+        metadata_script = os.path.abspath(metadata_script)
+        mock_subprocess.assert_called_once_with(
+            ['python3', metadata_script, '--vehicle=Plane', '--format=json'],
+            check=True,
+            capture_output=True,
+            text=True
+        )
+
+        # Test that the metadata was loaded and that the json was flattened
+        self.assertEqual(metadata, {'PARAM1': {}, 'PARAM2': {}})
+
+    def test_check_param(self):
+        metadata = {
+            'Description': 'This is a test parameter',
+            'ReadOnly': True,
+            'Bitmask': {'0': 'Bit0', '1': 'Bit1', '8': 'Bit8'},
+            'Range': {'low': '0.0', 'high': '100.0'},
+            'Values': {'0': 'Off', '1': 'On'}
+        }
+        args = type('', (), {})()  # Creating a simple object to simulate args
+        args.no_readonly = False
+        args.no_bitmask = False
+        args.no_range = False
+        args.no_values = False
+
+        # Test ReadOnly
+        self.assertEqual(check_param('PARAM', 0, metadata, args), 'PARAM is read only')
+        args.no_readonly = True
+        self.assertIsNone(check_param('PARAM', 0, metadata, args))
+
+        # Test Bitmask
+        del metadata['ReadOnly']  # Remove ReadOnly to test the next priority
+        self.assertIsNone(check_param('PARAM', 256.0, metadata, args)) # 256 would fail the other checks, but pass bitmask
+        self.assertEqual(check_param('PARAM', 1.5, metadata, args), 'PARAM: 1.5 is not an integer')
+        self.assertEqual(check_param('PARAM', -1, metadata, args), 'PARAM: -1 is negative')
+        self.assertEqual(
+            check_param('PARAM', 18446744073709551616, metadata, args),
+            'PARAM: 18446744073709551616 is larger than 64 bits'
+        )
+        self.assertEqual(check_param('PARAM', 4, metadata, args), 'PARAM: bit 2 is not valid')
+        args.no_bitmask = True
+        self.assertIsNone(check_param('PARAM', 4, metadata, args))
+
+        # Test Range
+        del metadata['Bitmask']  # Remove Bitmask to test the next priority
+        self.assertIsNone(check_param('PARAM', 50, metadata, args)) # 50 will fail the values check, but pass the range check
+        self.assertEqual(check_param('PARAM', 101, metadata, args), 'PARAM: 101 is above maximum value 100.0')
+        self.assertEqual(check_param('PARAM', -1, metadata, args), 'PARAM: -1 is below minimum value 0.0')
+        args.no_range = True
+        self.assertIsNone(check_param('PARAM', -1, metadata, args))
+
+        # Test Values
+        del metadata['Range']  # Remove Range to test the next priority
+        self.assertIsNone(check_param('PARAM', 0, metadata, args))
+        self.assertEqual(check_param('PARAM', 2, metadata, args), 'PARAM: 2 is not a valid value')
+        args.no_values = True
+        self.assertIsNone(check_param('PARAM', 2, metadata, args))
+
+        # Test parameter with no range, bitmask, or value restrictions
+        del metadata['Values']
+        self.assertIsNone(check_param('PARAM', 0, metadata, args)) # Should pass no matter what
+
+    @patch('param_check.check_param')
+    def test_check_file(self, mock_check_param):
+        mock_args = type('', (), {})()  # Creating a simple object to simulate args
+        mock_args.no_missing = False
+
+        # Case 1: All parameters pass their checks
+        mock_file_content = "PARAM1, 10\nPARAM2, 20\n"
+        mock_metadata = {'PARAM1': {}, 'PARAM2': {}}
+        # Mock check_param to return None for all params, indicating they pass validation
+        mock_check_param.side_effect = lambda name, value, metadata, args: None
+        with patch('builtins.open', mock_open(read_data=mock_file_content)):
+            msgs = check_file('fake_file.parm', mock_metadata, mock_args)
+        # Check that no error messages were returned
+        self.assertEqual(msgs, [])
+        # Check that check_param was called for each parameter
+        self.assertEqual(mock_check_param.call_count, 2)
+
+        # Case 2: Missing parameter (PARAM3 not in metadata)
+        mock_file_content += "PARAM3, 30\n"
+        with patch('builtins.open', mock_open(read_data=mock_file_content)):
+            msgs = check_file('fake_file.parm', mock_metadata, mock_args)
+        # Check that a missing parameter error is reported
+        self.assertEqual(msgs, ['PARAM3 not found in metadata'])
+
+        # Case 3: Missing parameter but with no-missing flag
+        mock_args.no_missing = True
+        with patch('builtins.open', mock_open(read_data=mock_file_content)):
+            msgs = check_file('fake_file.parm', mock_metadata, mock_args)
+        # Check that no error messages are returned when no-missing is enabled
+        self.assertEqual(msgs, [])
+
+        # Case 4: Valid parameter with DISABLE_CHECKS flag, and invalid parameter
+        # (should report both errors)
+        mock_file_content = "PARAM1, 50.0 # DISABLE_CHECKS: reason\nPARAM2, 0\n"
+        # Mock check_param so PARAM1 is valid, but not PARAM2
+        mock_check_param.side_effect = lambda name, value, metadata, args: (
+            None if name in ['PARAM1'] else f'{name}: Error'
+        )
+        with patch('builtins.open', mock_open(read_data=mock_file_content)):
+            msgs = check_file('fake_file.parm', mock_metadata, mock_args)
+        self.assertEqual(
+            msgs,
+            [
+                'PARAM1 does not need DISABLE_CHECKS',
+                'PARAM2: Error',
+            ],
+        )
+
+        # Case 5: Invalid parameter but with DISABLE_CHECKS (should pass)
+        mock_file_content = "PARAM1, 150.0\nPARAM2, 200.0 # DISABLE_CHECKS: reason\n"
+        with patch('builtins.open', mock_open(read_data=mock_file_content)):
+            msgs = check_file('fake_file.parm', mock_metadata, mock_args)
+        # Check that no error messages are returned because DISABLE_CHECKS is valid
+        self.assertEqual(msgs, [])
+
+    @patch('param_check.parse_arguments')
+    @patch('param_check.generate_metadata')
+    @patch('param_check.check_file')
+    @patch('builtins.print')
+    def test_main(self, mock_print, mock_check_file, mock_generate_metadata, mock_parse_arguments):
+
+        # Setup mock for parse_arguments
+        mock_args = MagicMock()
+        mock_args.vehicle = 'Plane'
+        mock_args.files = ['file1.parm', 'file2.parm']
+        mock_parse_arguments.return_value = mock_args
+
+        # Setup mock for generate_metadata
+        mock_generate_metadata.return_value = {'PARAM1': {}, 'PARAM2': {}}
+
+        # Setup mock for check_file
+        mock_check_file.side_effect = lambda file, metadata, args: [] if file == 'file1.parm' else ['Error']
+
+        # Call main function
+        with patch('sys.argv', ['param_check.py']):
+            with self.assertRaises(SystemExit):
+                main()
+
+        # Check that generate_metadata was called correctly
+        mock_generate_metadata.assert_called_once_with('Plane')
+
+        # Check that check_file was called for each expanded file
+        self.assertEqual(mock_check_file.call_count, 2)
+        mock_check_file.assert_any_call('file1.parm', {'PARAM1': {}, 'PARAM2': {}}, mock_args)
+        mock_check_file.assert_any_call('file2.parm', {'PARAM1': {}, 'PARAM2': {}}, mock_args)
+
+        # Check that print was called correctly
+        mock_print.assert_any_call('file1.parm: Passed')
+        mock_print.assert_any_call('file2.parm: Failed')
+        mock_print.assert_any_call('  Error')
+
+        # Test that the program exits when no files are provided
+        mock_print.reset_mock()
+        mock_args.files = []
+        mock_parse_arguments.return_value = mock_args
+        with patch('sys.argv', ['param_check.py']):
+            with self.assertRaises(SystemExit):
+                main()
+
+        mock_print.assert_called_once_with('No files found')
+
+
+if __name__ == '__main__':
+    unittest.main()