Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new post processing filter - rotation filter #13499

Merged
merged 13 commits into from
Nov 18, 2024
Prev Previous commit
Next Next commit
add unit test
  • Loading branch information
noacoohen committed Nov 14, 2024
commit c0dc22ee816ec7036ad9c117550f906a2411e175
133 changes: 133 additions & 0 deletions unit-tests/post-processing/test-rotation-filter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
# License: Apache 2.0. See LICENSE file in root directory.
# Copyright(c) 2024 Intel Corporation. All Rights Reserved.

#temporary fix to prevent the test from running on Win_SH_Py_DDS_CI
#test:donotrun:dds
Nir-Az marked this conversation as resolved.
Show resolved Hide resolved
#test:donotrun:!nightly
Nir-Az marked this conversation as resolved.
Show resolved Hide resolved

from rspy import test, repo
import pyrealsense2 as rs
import numpy as np

# Parameters for frame creation and depth intrinsics
input_res_x = 640
input_res_y = 480
focal_length = 600
depth_units = 0.001
stereo_baseline_mm = 50
frames = 5 # Number of frames to process


# Function to create depth intrinsics directly
def create_depth_intrinsics():
depth_intrinsics = rs.intrinsics()
depth_intrinsics.width = input_res_x
depth_intrinsics.height = input_res_y
depth_intrinsics.ppx = input_res_x / 2.0
depth_intrinsics.ppy = input_res_y / 2.0
depth_intrinsics.fx = focal_length
depth_intrinsics.fy = focal_length
depth_intrinsics.model = rs.distortion.brown_conrady
depth_intrinsics.coeffs = [0, 0, 0, 0, 0]
return depth_intrinsics


# Function to create a video stream with specified parameters
def create_video_stream(depth_intrinsics):
vs = rs.video_stream()
vs.type = rs.stream.depth
vs.index = 0
vs.uid = 0
vs.width = input_res_x
vs.height = input_res_y
vs.fps = 30
vs.bpp = 2
vs.fmt = rs.format.z16
vs.intrinsics = depth_intrinsics
return vs


# Function to create a synthetic frame
def create_frame(depth_stream_profile, index):
frame = rs.software_video_frame()
data = np.arange(input_res_x * input_res_y, dtype=np.uint16).reshape((input_res_y, input_res_x))
frame.pixels = data.tobytes()
frame.bpp = 2
frame.stride = input_res_x * 2
frame.timestamp = index * 33
frame.domain = rs.timestamp_domain.system_time
frame.frame_number = index
frame.profile = depth_stream_profile.as_video_stream_profile()
frame.depth_units = depth_units
return frame


# Function to validate rotated results based on the angle
def validate_rotation_results(filtered_frame, angle):
rotated_height = filtered_frame.profile.as_video_stream_profile().height()
rotated_width = filtered_frame.profile.as_video_stream_profile().width()

# Reshape the rotated data according to its actual dimensions
rotated_data = np.frombuffer(filtered_frame.get_data(), dtype=np.uint16).reshape((rotated_height, rotated_width))

# Original data for comparison
original_data = np.arange(input_res_x * input_res_y, dtype=np.uint16).reshape((input_res_y, input_res_x))

# Determine the expected rotated result based on the angle
if angle == 90:
expected_data = np.rot90(original_data, k=-1) # Clockwise
elif angle == 180:
expected_data = np.rot90(original_data, k=2) # 180 degrees
elif angle == -90:
expected_data = np.rot90(original_data, k=1) # Counterclockwise

# Convert numpy arrays to lists before comparison
rotated_list = rotated_data.flatten().tolist()
expected_list = expected_data.flatten().tolist()

# Compare the flattened lists
test.check_equal_lists(rotated_list, expected_list)
Nir-Az marked this conversation as resolved.
Show resolved Hide resolved


################################################################################################
with test.closure("Test rotation filter"):
# Set up software device and depth sensor
sw_dev = rs.software_device()
depth_sensor = sw_dev.add_sensor("Depth")

# Initialize intrinsics and video stream profile
depth_intrinsics = create_depth_intrinsics()
vs = create_video_stream(depth_intrinsics)
depth_stream_profile = depth_sensor.add_video_stream(vs)

sync = rs.syncer()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we need a syncer?


# Define rotation angles to test
rotation_angles = [90, 180, -90]
for angle in rotation_angles:
rotation_filter = rs.rotation_filter()
rotation_filter.set_option(rs.option.rotation, angle)

# Start depth sensor
depth_sensor.open(depth_stream_profile)
depth_sensor.start(sync)

for i in range(frames):
# Create and process each frame
frame = create_frame(depth_stream_profile, i)
depth_sensor.on_video_frame(frame)

# Wait for frames and apply rotation filter
fset = sync.wait_for_frames()
depth = fset.first_or_default(rs.stream.depth)
filtered_depth = rotation_filter.process(depth)

# Validate rotated frame results
validate_rotation_results(filtered_depth, angle)

# Stop and close the sensor after each angle test
depth_sensor.stop()
depth_sensor.close()

################################################################################################
test.print_results_and_exit()
3 changes: 3 additions & 0 deletions wrappers/python/pyrs_processing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,9 @@ void init_processing(py::module &m) {
decimation_filter.def(py::init<>())
.def(py::init<float>(), "magnitude"_a);

py::class_< rs2::rotation_filter, rs2::filter > rotation_filter(m, "rotation_filter","Performs rotation of frames." );
rotation_filter.def( py::init<>() ).def( py::init< float >(), "value"_a );

py::class_<rs2::temporal_filter, rs2::filter> temporal_filter(m, "temporal_filter", "Temporal filter smooths the image by calculating multiple frames "
"with alpha and delta settings. Alpha defines the weight of current frame, and delta defines the"
"threshold for edge classification and preserving.");
Expand Down
Loading