diff --git a/src/devices/openxrheadset/CMakeLists.txt b/src/devices/openxrheadset/CMakeLists.txt index a4b9d59..e7f32cc 100644 --- a/src/devices/openxrheadset/CMakeLists.txt +++ b/src/devices/openxrheadset/CMakeLists.txt @@ -80,6 +80,7 @@ set(yarp_openxrheadset_driver_SRCS PosePublisher.cpp CustomPosePublisher.cpp FilteredPosePublisher.cpp + ExpressionsManager.cpp ) set(yarp_openxrheadset_driver_HDRS @@ -91,6 +92,7 @@ set(yarp_openxrheadset_driver_HDRS PosePublisher.h CustomPosePublisher.h FilteredPosePublisher.h + ExpressionsManager.h ) set (THRIFTS thrifts/OpenXrHeadsetCommands.thrift) diff --git a/src/devices/openxrheadset/ExpressionsManager.cpp b/src/devices/openxrheadset/ExpressionsManager.cpp new file mode 100644 index 0000000..c5c6270 --- /dev/null +++ b/src/devices/openxrheadset/ExpressionsManager.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2024 Istituto Italiano di Tecnologia (IIT) + * All rights reserved. + * + * This software may be modified and distributed under the terms of the + * BSD-2-Clause license. See the accompanying LICENSE file for details. + */ + +#include + +bool ExpressionsManager::configure(const std::string& prefix, bool eyeSupported, bool lipSupported, bool gazeSupported) +{ + m_eyeSupported = eyeSupported; + m_lipSupported = lipSupported; + m_gazeSupported = gazeSupported; + m_eyeExpressionsPortName = prefix + "/expressions/eye"; + m_lipExpressionsPortName = prefix + "/expressions/lip"; + m_gazePortName = prefix + "/expressions/gaze"; + + if (m_eyeSupported) + { + if (!m_eyeExpressionsPort.open(m_eyeExpressionsPortName)) + { + return false; + } + } + + if (m_lipSupported) + { + if (!m_lipExpressionsPort.open(m_lipExpressionsPortName)) + { + return false; + } + } + + if (m_gazeSupported) + { + if (!m_gazePort.open(m_gazePortName)) + { + return false; + } + } + + return true; +} + +void ExpressionsManager::setExpressions(const std::vector& eyeExpressions, const std::vector& lipExpressions) +{ + if (m_eyeSupported) + { + yarp::sig::Vector& eyeExpressionsVector = m_eyeExpressionsPort.prepare(); + eyeExpressionsVector.resize(eyeExpressions.size()); + for (size_t i = 0; i < eyeExpressions.size(); ++i) + { + eyeExpressionsVector[i] = eyeExpressions[i]; + } + m_eyeExpressionsPort.write(); + } + + if (m_lipSupported) + { + yarp::sig::Vector& lipExpressionsVector = m_lipExpressionsPort.prepare(); + lipExpressionsVector.resize(lipExpressions.size()); + for (size_t i = 0; i < lipExpressions.size(); ++i) + { + lipExpressionsVector[i] = lipExpressions[i]; + } + m_lipExpressionsPort.write(); + } +} + +void ExpressionsManager::setGaze(const OpenXrInterface::Pose& headPose, const OpenXrInterface::Pose& gaze) +{ + if (!m_gazeSupported || !gaze.positionValid || !headPose.positionValid || !headPose.rotationValid) + { + return; + } + + Eigen::Vector3f gazeDirectionInHead = headPose.rotation.inverse() * gaze.rotation * Eigen::Vector3f::UnitZ(); + + yarp::sig::Vector& gazeVector = m_gazePort.prepare(); + gazeVector.resize(3); + gazeVector[0] = gazeDirectionInHead.x(); + gazeVector[1] = gazeDirectionInHead.y(); + gazeVector[2] = gazeDirectionInHead.z(); + m_gazePort.write(); +} + +void ExpressionsManager::close() +{ + m_eyeExpressionsPort.close(); + m_lipExpressionsPort.close(); + m_gazePort.close(); +} + +std::string ExpressionsManager::getEyeExpressionsPortName() const +{ + return m_eyeExpressionsPortName; +} + +std::string ExpressionsManager::getLipExpressionsPortName() const +{ + return m_lipExpressionsPortName; +} + +std::string ExpressionsManager::getGazePortName() const +{ + return m_gazePortName; +} diff --git a/src/devices/openxrheadset/ExpressionsManager.h b/src/devices/openxrheadset/ExpressionsManager.h new file mode 100644 index 0000000..5c71929 --- /dev/null +++ b/src/devices/openxrheadset/ExpressionsManager.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2024 Istituto Italiano di Tecnologia (IIT) + * All rights reserved. + * + * This software may be modified and distributed under the terms of the + * BSD-2-Clause license. See the accompanying LICENSE file for details. + */ + +#ifndef YARP_DEV_EXPRESSIONSMANAGER_H +#define YARP_DEV_EXPRESSIONSMANAGER_H + +#include +#include + +#include +#include +#include + +class ExpressionsManager +{ + yarp::os::BufferedPort m_eyeExpressionsPort; + yarp::os::BufferedPort m_lipExpressionsPort; + yarp::os::BufferedPort m_gazePort; + bool m_eyeSupported{ false }; + bool m_lipSupported{ false }; + bool m_gazeSupported{ false }; + std::string m_eyeExpressionsPortName; + std::string m_lipExpressionsPortName; + std::string m_gazePortName; + +public: + + bool configure(const std::string& prefix, bool eyeSupported, bool lipSupported, bool gazeSupported); + + void setExpressions(const std::vector& eyeExpressions, const std::vector& lipExpressions); + + void setGaze(const OpenXrInterface::Pose& headPose, const OpenXrInterface::Pose& gaze); + + void close(); + + std::string getEyeExpressionsPortName() const; + + std::string getLipExpressionsPortName() const; + + std::string getGazePortName() const; +}; + + + +#endif // YARP_DEV_EXPRESSIONSMANAGER_H \ No newline at end of file diff --git a/src/devices/openxrheadset/FilteredPosePublisher.cpp b/src/devices/openxrheadset/FilteredPosePublisher.cpp index 7cb9d95..3e55efc 100644 --- a/src/devices/openxrheadset/FilteredPosePublisher.cpp +++ b/src/devices/openxrheadset/FilteredPosePublisher.cpp @@ -146,6 +146,13 @@ void FilteredPosePublisher::updateInputPose(const OpenXrInterface::NamedPoseVelo { return; } - - PosePublisher::updateInputPose(filterJumps(input)); + switch (input.filterType) + { + case PoseFilterType::JUMP_FILTER: + PosePublisher::updateInputPose(filterJumps(input)); + break; + default: + PosePublisher::updateInputPose(input); + break; + } } diff --git a/src/devices/openxrheadset/OpenXrHeadset.cpp b/src/devices/openxrheadset/OpenXrHeadset.cpp index c2798c8..451db79 100644 --- a/src/devices/openxrheadset/OpenXrHeadset.cpp +++ b/src/devices/openxrheadset/OpenXrHeadset.cpp @@ -262,6 +262,11 @@ bool yarp::dev::OpenXrHeadset::open(yarp::os::Searchable &cfg) m_openXrInterfaceSettings.posesPredictionInMs = cfg.check("vr_poses_prediction_in_ms", yarp::os::Value(0.0)).asFloat64(); m_openXrInterfaceSettings.hideWindow = (m_useNativeQuadLayers && !cfg.check("hide_window")) || (cfg.check("hide_window") && (cfg.find("hide_window").isNull() || cfg.find("hide_window").asBool())); m_openXrInterfaceSettings.renderInPlaySpace = cfg.check("render_in_play_space") && (cfg.find("render_in_play_space").isNull() || cfg.find("render_in_play_space").asBool()); + bool noGaze = cfg.check("no_gaze") && (cfg.find("no_gaze").isNull() || cfg.find("no_gaze").asBool()); + m_openXrInterfaceSettings.useGaze = !noGaze; + + bool noExpressions = cfg.check("no_expressions") && (cfg.find("no_expressions").isNull() || cfg.find("no_expressions").asBool()); + m_openXrInterfaceSettings.useExpressions = !noExpressions; m_getStickAsAxis = cfg.check("stick_as_axis", yarp::os::Value(false)).asBool(); m_rootFrame = cfg.check("tf_root_frame", yarp::os::Value("openxr_origin")).asString(); @@ -462,6 +467,12 @@ bool yarp::dev::OpenXrHeadset::threadInit() slide.layer.setImage(slide.options.initialSlide); } + // We know if the expressions are supported only after the initialization of the OpenXrInterface + m_expressionsManager.configure(m_prefix, + m_openXrInterface.eyeExpressionsSupported(), + m_openXrInterface.lipExpressionsSupported(), + m_openXrInterface.gazeSupported()); + this->yarp().attachAsServer(this->m_rpcPort); if (!m_rpcPort.open(m_rpcPortName)) { @@ -507,6 +518,7 @@ void yarp::dev::OpenXrHeadset::threadRelease() m_labels.clear(); m_slides.clear(); m_eyesManager.close(); + m_expressionsManager.close(); m_openXrInterface.close(); @@ -618,6 +630,9 @@ void yarp::dev::OpenXrHeadset::run() m_posesManager.setTransformFromRawToRootFrame(m_rootFrameRawHRootFrame); m_posesManager.publishFrames(); + + m_expressionsManager.setExpressions(m_openXrInterface.eyeExpressions(), m_openXrInterface.lipExpressions()); + m_expressionsManager.setGaze(m_openXrInterface.headPose(), m_openXrInterface.gazePose()); } else { @@ -963,6 +978,13 @@ bool yarp::dev::OpenXrHeadset::setInterCameraDistance(const double distance) return m_eyesManager.setInterCameraDistance(distance); } +double yarp::dev::OpenXrHeadset::getIPD() +{ + std::lock_guard lock(m_mutex); + + return m_openXrInterface.ipd(); +} + std::string yarp::dev::OpenXrHeadset::getLeftImageControlPortName() { std::lock_guard lock(m_mutex); @@ -1060,6 +1082,42 @@ bool yarp::dev::OpenXrHeadset::restartJoypadControlServer() return startJoypadControlServer(); } +bool yarp::dev::OpenXrHeadset::eyeExpressionsEnabled() +{ + std::lock_guard lock(m_mutex); + return m_openXrInterface.eyeExpressionsSupported(); +} + +std::string yarp::dev::OpenXrHeadset::getEyeExpressionsPortName() +{ + std::lock_guard lock(m_mutex); + return m_expressionsManager.getEyeExpressionsPortName(); +} + +bool yarp::dev::OpenXrHeadset::lipExpressionsEnabled() +{ + std::lock_guard lock(m_mutex); + return m_openXrInterface.lipExpressionsSupported(); +} + +std::string yarp::dev::OpenXrHeadset::getLipExpressionsPortName() +{ + std::lock_guard lock(m_mutex); + return m_expressionsManager.getLipExpressionsPortName(); +} + +bool yarp::dev::OpenXrHeadset::gazeEnabled() +{ + std::lock_guard lock(m_mutex); + return m_openXrInterface.gazeSupported(); +} + +std::string yarp::dev::OpenXrHeadset::getGazePortName() +{ + std::lock_guard lock(m_mutex); + return m_expressionsManager.getGazePortName(); +} + bool yarp::dev::OpenXrHeadset::startJoypadControlServer() { stopJoypadControlServer(); diff --git a/src/devices/openxrheadset/OpenXrHeadset.h b/src/devices/openxrheadset/OpenXrHeadset.h index 5379aa4..557d62b 100644 --- a/src/devices/openxrheadset/OpenXrHeadset.h +++ b/src/devices/openxrheadset/OpenXrHeadset.h @@ -31,6 +31,7 @@ #include #include #include +#include #include #include @@ -86,14 +87,14 @@ class yarp::dev::OpenXrHeadset : public yarp::dev::DeviceDriver, //OpenXrHeadsetCommands /** * Get the current interaction profile for the left hand - * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller or htc_vive_controller + * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller, htc_vive_controller, or htc_vive_focus3_controller * @return a string indicating the interaction profile in use. */ virtual std::string getLeftHandInteractionProfile() override; /** * Get the current interaction profile for the right hand - * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller or htc_vive_controller + * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller, htc_vive_controller, or htc_vive_focus3_controller * @return a string indicating the interaction profile in use. */ virtual std::string getRightHandInteractionProfile() override; @@ -190,7 +191,7 @@ class yarp::dev::OpenXrHeadset : public yarp::dev::DeviceDriver, /** * Get the current lateral distance between the visualization of the robot cameras. - * @return The IPD in meters. + * @return The distance in meters. */ virtual double getInterCameraDistance() override; @@ -201,6 +202,12 @@ class yarp::dev::OpenXrHeadset : public yarp::dev::DeviceDriver, */ virtual bool setInterCameraDistance(const double distance) override; + /** + * Get the current IPD (Inter Pupillary Distance) of the VR eyes. + * @return The IPD in meters + */ + virtual double getIPD() override; + /** * Get the name of the port trough which it is possible to control the left image. * @return the name of the port to control the left image. @@ -270,6 +277,42 @@ class yarp::dev::OpenXrHeadset : public yarp::dev::DeviceDriver, */ virtual bool restartJoypadControlServer() override; + /** + * Check if the eye expressions are enabled + * @return True if the eye expressions are enabled, false otherwise + */ + virtual bool eyeExpressionsEnabled() override; + + /** + * Get the name of the port trough which it is possible to get the eye expressions. + * @return the name of the port to get the eye expressions. + */ + virtual std::string getEyeExpressionsPortName() override; + + /** + * Check if the lip expressions are enabled + * @return True if the lip expressions are enabled, false otherwise + */ + virtual bool lipExpressionsEnabled() override; + + /** + * Get the name of the port trough which it is possible to get the lip expressions. + * @return the name of the port to get the lip expressions. + */ + virtual std::string getLipExpressionsPortName() override; + + /** + * Check if the gaze acquisition is enabled + * @return True if the gaze acquisition is enabled, false otherwise + */ + virtual bool gazeEnabled() override; + + /** + * Get the name of the port trough which it is possible to get the gaze position. + * @return the name of the port to get the gaze position. + */ + virtual std::string getGazePortName() override; + private: /** @@ -323,6 +366,8 @@ class yarp::dev::OpenXrHeadset : public yarp::dev::DeviceDriver, EyesManager m_eyesManager; + ExpressionsManager m_expressionsManager; + std::vector m_huds; std::vector m_labels; std::vector m_slides; diff --git a/src/devices/openxrheadset/OpenXrInterface.cpp b/src/devices/openxrheadset/OpenXrInterface.cpp index b6cb366..21b6dca 100644 --- a/src/devices/openxrheadset/OpenXrInterface.cpp +++ b/src/devices/openxrheadset/OpenXrInterface.cpp @@ -40,7 +40,11 @@ bool OpenXrInterface::checkExtensions() bool opengl_supported = false; bool depth_supported = false; bool debug_supported = false; + bool gaze_supported = false; + bool htc_facial_tracking_supported = false; + std::stringstream supported_extensions; + supported_extensions << "Supported extensions: " <htc_trackers_supported = true; } + + if (strcmp(XR_HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME, ext_props[i].extensionName) == 0) { + m_pimpl->focus3_supported = true; + } + + if (strcmp(XR_HTC_FACIAL_TRACKING_EXTENSION_NAME, ext_props[i].extensionName) == 0) { + htc_facial_tracking_supported = true; + } + + if (strcmp(XR_EXT_EYE_GAZE_INTERACTION_EXTENSION_NAME, ext_props[i].extensionName) == 0) { + gaze_supported = true; + } + + supported_extensions << std::endl << " - " << ext_props[i].extensionName; } + yCInfo(OPENXRHEADSET) << supported_extensions.str(); + // A graphics extension like OpenGL is required to draw anything in VR if (!opengl_supported) { yCError(OPENXRHEADSET) << "Runtime does not support OpenGL extension!"; @@ -79,6 +99,20 @@ bool OpenXrInterface::checkExtensions() yCWarning(OPENXRHEADSET) << "Runtime does not support the HTC Vive Trackers!"; } + if (!m_pimpl->focus3_supported) { + yCWarning(OPENXRHEADSET) << "Runtime does not support the HTC Vive Focus 3 controllers!"; + } + + if (!htc_facial_tracking_supported) { + yCWarning(OPENXRHEADSET) << "Runtime does not support the HTC Vive Facial Tracking!"; + m_pimpl->use_expressions = false; + } + + if (!gaze_supported) { + yCWarning(OPENXRHEADSET) << "Runtime does not support the eye gaze extension!"; + m_pimpl->use_gaze = false; + } + return true; } @@ -118,6 +152,18 @@ bool OpenXrInterface::prepareXrInstance() { requestedExtensions.push_back(XR_HTCX_VIVE_TRACKER_INTERACTION_EXTENSION_NAME); } + if (m_pimpl->focus3_supported) + { + requestedExtensions.push_back(XR_HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_EXTENSION_NAME); + } + if (m_pimpl->use_expressions) + { + requestedExtensions.push_back(XR_HTC_FACIAL_TRACKING_EXTENSION_NAME); + } + if (m_pimpl->use_gaze) + { + requestedExtensions.push_back(XR_EXT_EYE_GAZE_INTERACTION_EXTENSION_NAME); + } // Populate the info to create the instance XrInstanceCreateInfo instanceCreateInfo @@ -211,6 +257,24 @@ bool OpenXrInterface::prepareXrInstance() return false; } + if (m_pimpl->use_expressions) + { + result = xrGetInstanceProcAddr(m_pimpl->instance, "xrCreateFacialTrackerHTC", + (PFN_xrVoidFunction*)&(m_pimpl->pfn_xrCreateFacialTrackerHTC)); + if (!m_pimpl->checkXrOutput(result, "Failed to get the function to create the HTC facial tracker!")) + return false; + + result = xrGetInstanceProcAddr(m_pimpl->instance, "xrDestroyFacialTrackerHTC", + (PFN_xrVoidFunction*)&(m_pimpl->pfn_xrDestroyFacialTrackerHTC)); + if (!m_pimpl->checkXrOutput(result, "Failed to get the function to destroy the HTC facial tracker!")) + return false; + + result = xrGetInstanceProcAddr(m_pimpl->instance, "xrGetFacialExpressionsHTC", + (PFN_xrVoidFunction*)&(m_pimpl->pfn_xrGetFacialExpressionsHTC)); + if (!m_pimpl->checkXrOutput(result, "Failed to get the function to get the HTC facial expressions!")) + return false; + } + return true; } @@ -228,7 +292,7 @@ bool OpenXrInterface::prepareXrSystem() yCInfo(OPENXRHEADSET) << "Successfully got XrSystem with id" << m_pimpl->system_id << "for HMD form factor"; - printSystemProperties(); + checkSystemProperties(); uint32_t view_count = 0; // We first get the number of view configurations for the STEREO type @@ -299,12 +363,37 @@ bool OpenXrInterface::prepareXrSystem() return true; } -void OpenXrInterface::printSystemProperties() +void OpenXrInterface::checkSystemProperties() { XrSystemProperties system_props; system_props.type = XR_TYPE_SYSTEM_PROPERTIES; system_props.next = NULL; + void** next_chain = &system_props.next; + + XrSystemFacialTrackingPropertiesHTC facial_tracking_props; + facial_tracking_props.type = XR_TYPE_SYSTEM_FACIAL_TRACKING_PROPERTIES_HTC; + facial_tracking_props.next = NULL; + facial_tracking_props.supportEyeFacialTracking = XR_FALSE; + facial_tracking_props.supportLipFacialTracking = XR_FALSE; + + if (m_pimpl->use_expressions) + { + *next_chain = &facial_tracking_props; + next_chain = &facial_tracking_props.next; + } + + XrSystemEyeGazeInteractionPropertiesEXT eye_gaze_props; + eye_gaze_props.type = XR_TYPE_SYSTEM_EYE_GAZE_INTERACTION_PROPERTIES_EXT; + eye_gaze_props.next = NULL; + eye_gaze_props.supportsEyeGazeInteraction = XR_FALSE; + + if (m_pimpl->use_gaze) + { + *next_chain = &eye_gaze_props; + next_chain = &eye_gaze_props.next; + } + XrResult result = xrGetSystemProperties(m_pimpl->instance, m_pimpl->system_id, &system_props); if (!XR_SUCCEEDED(result)) { @@ -321,6 +410,25 @@ void OpenXrInterface::printSystemProperties() system_props.graphicsProperties.maxSwapchainImageWidth); yCInfo(OPENXRHEADSET, "\tOrientation Tracking: %d", system_props.trackingProperties.orientationTracking); yCInfo(OPENXRHEADSET, "\tPosition Tracking : %d", system_props.trackingProperties.positionTracking); + + if (m_pimpl->use_expressions) + { + yCInfo(OPENXRHEADSET, "Facial tracking properties for system %lu: Eye tracking %d, Lip tracking %d", + system_props.systemId, facial_tracking_props.supportEyeFacialTracking, facial_tracking_props.supportLipFacialTracking); + m_pimpl->htc_eye_facial_tracking_supported = facial_tracking_props.supportEyeFacialTracking; + m_pimpl->htc_lip_facial_tracking_supported = facial_tracking_props.supportLipFacialTracking; + } + + if (m_pimpl->use_gaze) + { + yCInfo(OPENXRHEADSET, "Eye gaze properties for system %lu: Eye gaze %d", + system_props.systemId, eye_gaze_props.supportsEyeGazeInteraction); + if (!eye_gaze_props.supportsEyeGazeInteraction) + { + yCWarning(OPENXRHEADSET) << "The runtime does not seem to support eye gaze interaction! Trying to use it anyway."; + } + } + } bool OpenXrInterface::prepareGL() @@ -417,6 +525,44 @@ bool OpenXrInterface::prepareXrSession() if (! m_pimpl->checkXrOutput(result, "Failed to create view space!")) return false; + if (m_pimpl->htc_eye_facial_tracking_supported) + { + XrFacialTrackerCreateInfoHTC facial_tracker_create_info = { + .type = XR_TYPE_FACIAL_TRACKER_CREATE_INFO_HTC, + .next = NULL, + .facialTrackingType = XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC, + }; + result = m_pimpl->pfn_xrCreateFacialTrackerHTC(m_pimpl->session, &facial_tracker_create_info, &m_pimpl->htc_eye_facial_tracker); + if (!m_pimpl->checkXrOutput(result, "Failed to create eye facial tracker! Avoiding using it.")) + { + m_pimpl->htc_eye_facial_tracking_supported = false; + } + else + { + yCInfo(OPENXRHEADSET) << "Successfully created eye facial tracker!"; + m_pimpl->htc_eye_expressions.resize(XR_FACIAL_EXPRESSION_EYE_COUNT_HTC, 0.0); + } + } + + if (m_pimpl->htc_lip_facial_tracking_supported) + { + XrFacialTrackerCreateInfoHTC facial_tracker_create_info = { + .type = XR_TYPE_FACIAL_TRACKER_CREATE_INFO_HTC, + .next = NULL, + .facialTrackingType = XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC, + }; + result = m_pimpl->pfn_xrCreateFacialTrackerHTC(m_pimpl->session, &facial_tracker_create_info, &m_pimpl->htc_lip_facial_tracker); + if (!m_pimpl->checkXrOutput(result, "Failed to create lip facial tracker! Avoiding using it.")) + { + m_pimpl->htc_lip_facial_tracking_supported = false; + } + else + { + yCInfo(OPENXRHEADSET) << "Successfully created lip facial tracker!"; + m_pimpl->htc_lip_expressions.resize(XR_FACIAL_EXPRESSION_LIP_COUNT_HTC, 0.0); + } + } + return true; } @@ -666,6 +812,51 @@ bool OpenXrInterface::prepareXrActions() right_hand.inputsDeclarations[HTC_VIVE_INTERACTION_PROFILE_TAG] = vive_left_inputs; //the inputs from the left and right hand are the same + if (m_pimpl->focus3_supported) + { + InputActionsDeclaration& focus3_left_inputs = left_hand.inputsDeclarations[HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_PROFILE_TAG]; + InputActionsDeclaration& focus3_right_inputs = right_hand.inputsDeclarations[HTC_VIVE_FOCUS3_CONTROLLER_INTERACTION_PROFILE_TAG]; + + focus3_left_inputs.poses = + { + {"/input/grip/pose", "grip"} + }; + focus3_right_inputs.poses = focus3_left_inputs.poses; + + + focus3_left_inputs.buttons = + { + //We avoid the menu button because it is used to open SteamVR when using the Streaming Hub + {"/input/x/click", "x"}, + {"/input/y/click", "y"}, + {"/input/trigger/click", "trigger_click"}, + {"/input/squeeze/click", "squeeze_click"}, + {"/input/thumbstick/click", "thumbstick_click"} + }; + focus3_right_inputs.buttons = + { + {"/input/a/click", "a"}, + {"/input/b/click", "b"}, + {"/input/trigger/click", "trigger_click"}, + {"/input/squeeze/click", "squeeze_click"}, + {"/input/thumbstick/click", "thumbstick_click"} + }; + + focus3_left_inputs.axes = + { + {"/input/trigger/value", "trigger"}, + {"/input/squeeze/value", "squeeze"} + }; + focus3_right_inputs.axes = focus3_left_inputs.axes; + + focus3_left_inputs.thumbsticks = + { + {"/input/thumbstick", "thumbstick"} + }; + + focus3_right_inputs.thumbsticks = focus3_left_inputs.thumbsticks; + } + std::vector topLevelPathsDeclaration = {left_hand, //The left hand should always come first in this list right_hand}; //The right hand should always come second in this list @@ -729,6 +920,24 @@ bool OpenXrInterface::prepareXrActions() interactionProfilesPrefixes.push_back({HTC_VIVE_TRACKER_INTERACTION_PROFILE_TAG, "vive_tracker_"}); } + if (m_pimpl->use_gaze) + { + InputActionsDeclaration gazeInputs; + + gazeInputs.poses = + { + {"/input/gaze_ext/pose", "pose", PoseFilterType::NONE} + }; + + TopLevelPathDeclaration gaze; + gaze.stringPath = "/user/eyes_ext"; + gaze.actionNamePrefix = "eyes_"; + gaze.inputsDeclarations[GAZE_INTERACTION_PROFILE_TAG] = gazeInputs; + + topLevelPathsDeclaration.push_back(gaze); //The gaze should be the last one in this list + interactionProfilesPrefixes.push_back({ GAZE_INTERACTION_PROFILE_TAG, "gaze_" }); + } + if (!m_pimpl->fillActionBindings(interactionProfilesPrefixes, topLevelPathsDeclaration)) return false; @@ -927,6 +1136,8 @@ void OpenXrInterface::updateXrSpaces() m_pimpl->mid_views_pose_inverted.orientation = toXr(toEigen(m_pimpl->mid_views_pose.orientation).inverse()); m_pimpl->mid_views_pose_inverted.position = toXr(toEigen(m_pimpl->mid_views_pose_inverted.orientation) * -toEigen(m_pimpl->mid_views_pose.position)); + m_pimpl->ipd = (toEigen(m_pimpl->views[1].pose.position) - toEigen(m_pimpl->views[0].pose.position)).norm(); + for (size_t i = 0; i < m_pimpl->views.size(); ++i) { #ifdef DEBUG_RENDERING_LOCATION @@ -993,6 +1204,44 @@ void OpenXrInterface::updateXrActions() m_pimpl->checkXrOutput(result, "Failed to update the status of %s!", thumbstick.name.c_str()); //Continue anyway } } + + if (m_pimpl->htc_eye_facial_tracking_supported) + { + XrFacialExpressionsHTC expressions = { .type = XR_TYPE_FACIAL_EXPRESSIONS_HTC, + .next = NULL, + .isActive = XR_TRUE, + .sampleTime = m_pimpl->frame_state.predictedDisplayTime, + .expressionCount = static_cast(m_pimpl->htc_eye_expressions.size()), + .expressionWeightings = m_pimpl->htc_eye_expressions.data() }; + result = m_pimpl->pfn_xrGetFacialExpressionsHTC(m_pimpl->htc_eye_facial_tracker, &expressions); + if (!m_pimpl->checkXrOutput(result, "Failed to get the facial expressions of the eye tracker!") || !expressions.isActive) + { + if (!expressions.isActive) + { + yCWarningThrottle(OPENXRHEADSET, 5.0, "The eye facial tracker is not active!"); + } + m_pimpl->htc_eye_expressions.assign(m_pimpl->htc_eye_expressions.size(), 0.0); + } + } + + if (m_pimpl->htc_lip_facial_tracking_supported) + { + XrFacialExpressionsHTC expressions = { .type = XR_TYPE_FACIAL_EXPRESSIONS_HTC, + .next = NULL, + .isActive = XR_TRUE, + .sampleTime = m_pimpl->frame_state.predictedDisplayTime, + .expressionCount = static_cast(m_pimpl->htc_lip_expressions.size()), + .expressionWeightings = m_pimpl->htc_lip_expressions.data() }; + result = m_pimpl->pfn_xrGetFacialExpressionsHTC(m_pimpl->htc_lip_facial_tracker, &expressions); + if (!m_pimpl->checkXrOutput(result, "Failed to get the facial expressions of the lip tracker!") || !expressions.isActive) + { + if (!expressions.isActive) + { + yCWarningThrottle(OPENXRHEADSET, 5.0, "The lip facial tracker is not active!"); + } + m_pimpl->htc_lip_expressions.assign(m_pimpl->htc_lip_expressions.size(), 0.0); + } + } } bool OpenXrInterface::updateInteractionProfiles() @@ -1387,6 +1636,8 @@ bool OpenXrInterface::initialize(const OpenXrInterfaceSettings &settings) m_pimpl->hideWindow = settings.hideWindow; m_pimpl->renderInPlaySpace = settings.renderInPlaySpace; + m_pimpl->use_gaze = settings.useGaze; + m_pimpl->use_expressions = settings.useExpressions; #ifdef DEBUG_RENDERING_LOCATION m_pimpl->renderInPlaySpace = true; @@ -1555,6 +1806,11 @@ bool OpenXrInterface::isRunning() const return m_pimpl->initialized && !m_pimpl->closing; } +float OpenXrInterface::ipd() const +{ + return m_pimpl->ipd; +} + OpenXrInterface::Pose OpenXrInterface::headPose() const { return XrSpaceLocationToPose(m_pimpl->view_space_location); @@ -1757,10 +2013,62 @@ bool OpenXrInterface::shouldResetLocalReferenceSpace() return shouldReset; } +bool OpenXrInterface::eyeExpressionsSupported() const +{ + return m_pimpl->htc_eye_facial_tracking_supported; +} + +bool OpenXrInterface::lipExpressionsSupported() const +{ + return m_pimpl->htc_lip_facial_tracking_supported; +} + +const std::vector& OpenXrInterface::eyeExpressions() const +{ + return m_pimpl->htc_eye_expressions; +} + +const std::vector& OpenXrInterface::lipExpressions() const +{ + return m_pimpl->htc_lip_expressions; +} + +bool OpenXrInterface::gazeSupported() const +{ + return m_pimpl->use_gaze; +} + +OpenXrInterface::Pose OpenXrInterface::gazePose() const +{ + if (!m_pimpl->use_gaze) + { + return Pose(); + } + const std::vector& currentPoses = m_pimpl->top_level_paths.back().currentActions().poses; //eyes are in the last position + if (currentPoses.size() == 0) //no pose in the current interaction profile + { + return OpenXrInterface::Pose(); + } + + return currentPoses.front().pose; +} + void OpenXrInterface::close() { m_pimpl->closing = true; + if (m_pimpl->htc_eye_facial_tracking_supported) + { + m_pimpl->pfn_xrDestroyFacialTrackerHTC(m_pimpl->htc_eye_facial_tracker); + m_pimpl->htc_eye_facial_tracking_supported = false; + } + + if (m_pimpl->htc_lip_facial_tracking_supported) + { + m_pimpl->pfn_xrDestroyFacialTrackerHTC(m_pimpl->htc_lip_facial_tracker); + m_pimpl->htc_lip_facial_tracking_supported = false; + } + if (m_pimpl->glFrameBufferId != 0) { glDeleteFramebuffers(1, &(m_pimpl->glFrameBufferId)); m_pimpl->glFrameBufferId = 0; diff --git a/src/devices/openxrheadset/OpenXrInterface.h b/src/devices/openxrheadset/OpenXrInterface.h index 00f1595..d29d321 100644 --- a/src/devices/openxrheadset/OpenXrInterface.h +++ b/src/devices/openxrheadset/OpenXrInterface.h @@ -61,11 +61,19 @@ class IOpenXrQuadLayer virtual void setEnabled(bool enabled) = 0; }; +enum class PoseFilterType +{ + NONE, + JUMP_FILTER +}; + struct OpenXrInterfaceSettings { double posesPredictionInMs{0.0}; bool hideWindow{false}; bool renderInPlaySpace{false}; + bool useGaze{ true }; + bool useExpressions{ true }; }; class OpenXrInterface @@ -82,7 +90,7 @@ class OpenXrInterface bool prepareXrSystem(); - void printSystemProperties(); + void checkSystemProperties(); bool prepareGL(); @@ -141,6 +149,7 @@ class OpenXrInterface std::string name; Pose pose; Velocity velocity; + PoseFilterType filterType; static NamedPoseVelocity Identity(const std::string& name); }; @@ -169,6 +178,8 @@ class OpenXrInterface bool isRunning() const; + float ipd() const; + Pose headPose() const; Velocity headVelocity() const; @@ -197,6 +208,18 @@ class OpenXrInterface bool shouldResetLocalReferenceSpace(); + bool eyeExpressionsSupported() const; + + bool lipExpressionsSupported() const; + + const std::vector& eyeExpressions() const; + + const std::vector& lipExpressions() const; + + bool gazeSupported() const; + + Pose gazePose() const; + void close(); }; diff --git a/src/devices/openxrheadset/impl/OpenXrInterfaceImpl.cpp b/src/devices/openxrheadset/impl/OpenXrInterfaceImpl.cpp index 761db55..7e62ff6 100644 --- a/src/devices/openxrheadset/impl/OpenXrInterfaceImpl.cpp +++ b/src/devices/openxrheadset/impl/OpenXrInterfaceImpl.cpp @@ -289,6 +289,7 @@ bool OpenXrInterface::Implementation::fillActionBindings(const std::vector @@ -112,9 +114,14 @@ struct ActionDeclaration std::string nameSuffix; }; +struct PoseActionDeclaration : public ActionDeclaration +{ + PoseFilterType filterType{ PoseFilterType::JUMP_FILTER }; +}; + struct InputActionsDeclaration { - std::vector poses; + std::vector poses; std::vector buttons; @@ -277,6 +284,8 @@ class OpenXrInterface::Implementation // position of a frame in the middle of the eyes, oriented as the first eye XrPosef mid_views_pose_inverted; + float ipd = 0.06f; + // List of top level paths to retrieve the state of each action std::vector top_level_paths; @@ -292,6 +301,38 @@ class OpenXrInterface::Implementation // Map defining which tracker is connected std::unordered_map htc_trackers_status; + // flag to check if the HTC VIVE Focus3 controllers are supported by the runtime. + bool focus3_supported = false; + + //flag to check if facial tracking is supported by the headset. + bool htc_eye_facial_tracking_supported = false; + + //flag to check if lip facial tracking is supported by the headset. + bool htc_lip_facial_tracking_supported = false; + + // Pointer to function to create the HTC facial tracker + PFN_xrCreateFacialTrackerHTC pfn_xrCreateFacialTrackerHTC = NULL; + + // Pointer to function to destroy the HTC facial tracker + PFN_xrDestroyFacialTrackerHTC pfn_xrDestroyFacialTrackerHTC = NULL; + + // Pointer to function to get the facial expressions + PFN_xrGetFacialExpressionsHTC pfn_xrGetFacialExpressionsHTC = NULL; + + // Handles for eye and lip tracking + XrFacialTrackerHTC htc_eye_facial_tracker = XR_NULL_HANDLE; + XrFacialTrackerHTC htc_lip_facial_tracker = XR_NULL_HANDLE; + + // Stucts to store the facial expressions + std::vector htc_eye_expressions; + std::vector htc_lip_expressions; + + // Flag to enable the use of expressions + bool use_expressions = true; + + // Flag to enable the use of gaze + bool use_gaze = true; + // state of the application XrSessionState state = XR_SESSION_STATE_UNKNOWN; diff --git a/src/devices/openxrheadset/thrifts/OpenXrHeadsetCommands.thrift b/src/devices/openxrheadset/thrifts/OpenXrHeadsetCommands.thrift index 499fe51..a772b8f 100644 --- a/src/devices/openxrheadset/thrifts/OpenXrHeadsetCommands.thrift +++ b/src/devices/openxrheadset/thrifts/OpenXrHeadsetCommands.thrift @@ -10,14 +10,14 @@ service OpenXrHeadsetCommands { /** * Get the current interaction profile for the left hand - * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller or htc_vive_controller + * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller, htc_vive_controller, or htc_vive_focus3_controller * @return a string indicating the interaction profile in use. */ string getLeftHandInteractionProfile(); /** * Get the current interaction profile for the right hand - * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller or htc_vive_controller + * It returns a string that can be one between none, khr_simple_controller, oculus_touch_controller, htc_vive_controller, or htc_vive_focus3_controller * @return a string indicating the interaction profile in use. */ string getRightHandInteractionProfile(); @@ -114,7 +114,7 @@ service OpenXrHeadsetCommands /** * Get the current lateral distance between the visualization of the robot cameras. - * @return The IPD in meters. + * @return The distance in meters. */ double getInterCameraDistance(); @@ -125,6 +125,12 @@ service OpenXrHeadsetCommands */ bool setInterCameraDistance(1:double distance); + /** + * Get the current IPD (Inter Pupillary Distance) of the VR eyes. + * @return The IPD in meters + */ + double getIPD(); + /** * Get the name of the port trough which it is possible to control the left image. * @return the name of the port to control the left image. @@ -180,7 +186,7 @@ service OpenXrHeadsetCommands */ bool setCustomPoseRelativeOrientation(1:string customFrameName, 2:double angle1, 3:double angle2, 4:double angle3); - /** + /** * Reset the transforms all the published tranforms. * This will also delete all the transforms currently stored in the transform server, * so also the static poses will be published again. This must be used with caution, @@ -188,9 +194,46 @@ service OpenXrHeadsetCommands */ bool resetTransforms(); - /** + /** * Start the joypad control server. The server will restart if already started. * @return True if the server is started successfully, false otherwise. */ + bool restartJoypadControlServer(); + + /** + * Check if the eye expressions are enabled + * @return True if the eye expressions are enabled, false otherwise + */ + bool eyeExpressionsEnabled(); + + /** + * Get the name of the port trough which it is possible to get the eye expressions. + * @return the name of the port to get the eye expressions. + */ + string getEyeExpressionsPortName(); + + /** + * Check if the lip expressions are enabled + * @return True if the lip expressions are enabled, false otherwise + */ + bool lipExpressionsEnabled(); + + /** + * Get the name of the port trough which it is possible to get the lip expressions. + * @return the name of the port to get the lip expressions. + */ + string getLipExpressionsPortName(); + + /** + * Check if the gaze acquisition is enabled + * @return True if the gaze acquisition is enabled, false otherwise + */ + bool gazeEnabled(); + + /** + * Get the name of the port trough which it is possible to get the gaze position. + * @return the name of the port to get the gaze position. + */ + string getGazePortName(); }