From a83986052dfa69ccf3c3e5c365dd9fb29945c943 Mon Sep 17 00:00:00 2001
From: MaxWang-MS <68253937+MaxWang-MS@users.noreply.github.com>
Date: Fri, 24 Jun 2022 16:19:45 -0700
Subject: [PATCH] Add support for "select" voice command with OpenXR (#10661)
* Add support for "select" voice command with OpenXR
* Update MicrosoftOpenXRGGVHand.cs
---
.../OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs | 65 ++++++++++++
.../Scripts/MicrosoftOpenXRGGVHand.cs.meta | 11 ++
.../OpenXR/Scripts/OpenXRDeviceManager.cs | 100 ++++++++++++++++++
3 files changed, 176 insertions(+)
create mode 100644 Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs
create mode 100644 Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs.meta
diff --git a/Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs b/Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs
new file mode 100644
index 00000000000..2056a5fe4c0
--- /dev/null
+++ b/Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs
@@ -0,0 +1,65 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+using Microsoft.MixedReality.Toolkit.Input;
+using Microsoft.MixedReality.Toolkit.Utilities;
+using Microsoft.MixedReality.Toolkit.XRSDK.Input;
+using UnityEngine;
+
+namespace Microsoft.MixedReality.Toolkit.XRSDK.OpenXR
+{
+ ///
+ /// A GGV (Gaze, Gesture, and Voice) hand instance for OpenXR.
+ /// Used only for the purposes of acting on the select keyword detected by HoloLens 2.
+ ///
+ [MixedRealityController(
+ SupportedControllerType.GGVHand,
+ new[] { Handedness.Left, Handedness.Right, Handedness.None })]
+ internal class MicrosoftOpenXRGGVHand : GenericXRSDKController
+ {
+ public MicrosoftOpenXRGGVHand(
+ TrackingState trackingState,
+ Handedness controllerHandedness,
+ IMixedRealityInputSource inputSource = null,
+ MixedRealityInteractionMapping[] interactions = null)
+ : base(trackingState, controllerHandedness, inputSource, interactions, new SimpleHandDefinition(controllerHandedness))
+ { }
+
+ internal void UpdateVoiceState(bool isPressed)
+ {
+ MixedRealityInteractionMapping interactionMapping = null;
+
+ for (int i = 0; i < Interactions?.Length; i++)
+ {
+ MixedRealityInteractionMapping currentInteractionMapping = Interactions[i];
+
+ if (currentInteractionMapping.AxisType == AxisType.Digital && currentInteractionMapping.InputType == DeviceInputType.Select)
+ {
+ interactionMapping = currentInteractionMapping;
+ break;
+ }
+ }
+
+ if (interactionMapping == null)
+ {
+ return;
+ }
+
+ interactionMapping.BoolData = isPressed;
+
+ // If our value changed raise it.
+ if (interactionMapping.Changed)
+ {
+ // Raise input system event if it's enabled
+ if (interactionMapping.BoolData)
+ {
+ CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction);
+ }
+ else
+ {
+ CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction);
+ }
+ }
+ }
+ }
+}
diff --git a/Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs.meta b/Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs.meta
new file mode 100644
index 00000000000..2f2abe1f769
--- /dev/null
+++ b/Assets/MRTK/Providers/OpenXR/Scripts/MicrosoftOpenXRGGVHand.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: f5a8da3ddac5dc245989c9515ccec423
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Assets/MRTK/Providers/OpenXR/Scripts/OpenXRDeviceManager.cs b/Assets/MRTK/Providers/OpenXR/Scripts/OpenXRDeviceManager.cs
index 7866bad4f84..ba8b383b46e 100644
--- a/Assets/MRTK/Providers/OpenXR/Scripts/OpenXRDeviceManager.cs
+++ b/Assets/MRTK/Providers/OpenXR/Scripts/OpenXRDeviceManager.cs
@@ -16,6 +16,7 @@
#if MSFT_OPENXR && WINDOWS_UWP
using Microsoft.MixedReality.OpenXR;
using Microsoft.MixedReality.Toolkit.Windows.Input;
+using Windows.UI.Input.Spatial;
#endif // MSFT_OPENXR && WINDOWS_UWP
namespace Microsoft.MixedReality.Toolkit.XRSDK.OpenXR
@@ -80,6 +81,7 @@ public override void Enable()
#if MSFT_OPENXR && WINDOWS_UWP
CreateGestureRecognizers();
+ SpatialInteractionManager.SourcePressed += SpatialInteractionManager_SourcePressed;
#endif // MSFT_OPENXR && WINDOWS_UWP
base.Enable();
@@ -114,6 +116,26 @@ public override void Update()
base.Update();
CheckForGestures();
+
+ if (shouldSendVoiceEvents)
+ {
+ MicrosoftOpenXRGGVHand controller = GetOrAddVoiceController();
+ if (controller != null)
+ {
+ // RaiseOnInputDown for "select"
+ controller.UpdateVoiceState(true);
+ // RaiseOnInputUp for "select"
+ controller.UpdateVoiceState(false);
+
+ // On WMR, the voice recognizer does not actually register the phrase 'select'
+ // when you add it to the speech commands profile. Therefore, simulate
+ // the "select" voice command running to ensure that we get a select voice command
+ // registered. This is used by FocusProvider to detect when the select pointer is active.
+ Service?.RaiseSpeechCommandRecognized(controller.InputSource, RecognitionConfidenceLevel.High, TimeSpan.MinValue, DateTime.Now, new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None));
+ }
+
+ shouldSendVoiceEvents = false;
+ }
}
///
@@ -140,6 +162,14 @@ public override void Disable()
#endif
navigationGestureRecognizer = null;
+ SpatialInteractionManager.SourcePressed -= SpatialInteractionManager_SourcePressed;
+
+ if (voiceController != null)
+ {
+ RemoveControllerFromScene(voiceController);
+ voiceController = null;
+ }
+
base.Disable();
}
#endif // MSFT_OPENXR && WINDOWS_UWP
@@ -529,5 +559,75 @@ private GenericXRSDKController FindMatchingController(GestureHandedness gestureH
#endif // MSFT_OPENXR && WINDOWS_UWP
#endregion Gesture implementation
+
+ #region SpatialInteractionManager event and helpers
+
+#if MSFT_OPENXR && WINDOWS_UWP
+ ///
+ /// SDK Interaction Source Pressed Event handler. Used only for voice.
+ ///
+ /// SDK source pressed event arguments
+ private void SpatialInteractionManager_SourcePressed(SpatialInteractionManager sender, SpatialInteractionSourceEventArgs args)
+ {
+ if (args.State.Source.Kind == SpatialInteractionSourceKind.Voice)
+ {
+ shouldSendVoiceEvents = true;
+ }
+ }
+
+ private MicrosoftOpenXRGGVHand voiceController = null;
+ private bool shouldSendVoiceEvents = false;
+
+ private MicrosoftOpenXRGGVHand GetOrAddVoiceController()
+ {
+ if (voiceController != null)
+ {
+ return voiceController;
+ }
+
+ IMixedRealityInputSource inputSource = Service?.RequestNewGenericInputSource("Mixed Reality Voice", sourceType: InputSourceType.Voice);
+ MicrosoftOpenXRGGVHand detectedController = new MicrosoftOpenXRGGVHand(TrackingState.NotTracked, Utilities.Handedness.None, inputSource);
+
+ if (!detectedController.Enabled)
+ {
+ // Controller failed to be setup correctly.
+ // Return null so we don't raise the source detected.
+ return null;
+ }
+
+ for (int i = 0; i < detectedController.InputSource?.Pointers?.Length; i++)
+ {
+ detectedController.InputSource.Pointers[i].Controller = detectedController;
+ }
+
+ Service?.RaiseSourceDetected(detectedController.InputSource, detectedController);
+
+ voiceController = detectedController;
+ return voiceController;
+ }
+
+ private SpatialInteractionManager spatialInteractionManager = null;
+
+ ///
+ /// Provides access to the current native SpatialInteractionManager.
+ ///
+ private SpatialInteractionManager SpatialInteractionManager
+ {
+ get
+ {
+ if (spatialInteractionManager == null)
+ {
+ UnityEngine.WSA.Application.InvokeOnUIThread(() =>
+ {
+ spatialInteractionManager = SpatialInteractionManager.GetForCurrentView();
+ }, true);
+ }
+
+ return spatialInteractionManager;
+ }
+ }
+#endif // MSFT_OPENXR && WINDOWS_UWP
+
+ #endregion SpatialInteractionManager events
}
}