3 Commits

Author SHA1 Message Date
Sean Lu
2bfa2ad4c7 version 2.5.0 2024-12-06 15:44:37 +08:00
Sean Lu
dfdcd0fd7f version 2.4.2 2024-10-30 16:02:07 +08:00
babelsw_hsu
5ac252bf2e Changed: The License file.
Symptom: Plugin
Root Cause: N/A
Solution: Changed the License file.
Project: VIVE OpenXR Unity Plugin
Note:
2024-09-27 17:49:27 +08:00
977 changed files with 434259 additions and 77762 deletions

4
LICENSE.txt Normal file
View File

@@ -0,0 +1,4 @@
Copyright © HTC Corporation, LLC and its affiliates. All rights reserved.
Your use of this SDK, sample, or tool is subject to HTC VIVE SDK License Agreement, available at https://developer.vive.com/resources/downloads/licenses-and-agreements/

View File

@@ -1,6 +1,7 @@
// Copyright HTC Corporation All Rights Reserved. // Copyright HTC Corporation All Rights Reserved.
using System; using System;
using System.IO; using System.IO;
using System.Reflection;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
using System.Xml; using System.Xml;
@@ -12,7 +13,6 @@ using UnityEngine;
using UnityEngine.XR.OpenXR; using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features; using UnityEngine.XR.OpenXR.Features;
using UnityEngine.XR.OpenXR.Features.Interactions; using UnityEngine.XR.OpenXR.Features.Interactions;
using VIVE.OpenXR.FacialTracking;
using VIVE.OpenXR.Hand; using VIVE.OpenXR.Hand;
using VIVE.OpenXR.Tracker; using VIVE.OpenXR.Tracker;
@@ -152,7 +152,7 @@ namespace VIVE.OpenXR.Editor
m_IsEnabled = enabled; m_IsEnabled = enabled;
sb.Clear().Append(LOG_TAG).Append(m_IsEnabled ? "Enable " : "Disable ").Append("Simultaneous Interaction."); DEBUG(sb); //sb.Clear().Append(LOG_TAG).Append(m_IsEnabled ? "Enable " : "Disable ").Append("Simultaneous Interaction."); DEBUG(sb);
} }
[MenuItem(MENU_NAME, validate = true, priority = 601)] [MenuItem(MENU_NAME, validate = true, priority = 601)]
@@ -255,6 +255,10 @@ namespace VIVE.OpenXR.Editor
bool enableTracker = false; bool enableTracker = false;
bool enableEyetracking = false; bool enableEyetracking = false;
bool enableLipexpression = false; bool enableLipexpression = false;
const string kHandTrackingExtension = "XR_EXT_hand_tracking";
const string kFacialTrackingExtension = "XR_HTC_facial_tracking";
const string kHandInteractionHTC = "XR_HTC_hand_interaction";
const string kHandInteractionEXT = "XR_EXT_hand_interaction";
var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android); var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
if (null == settings) if (null == settings)
@@ -279,16 +283,34 @@ namespace VIVE.OpenXR.Editor
foreach (var feature in settings.GetFeatures<OpenXRFeature>()) foreach (var feature in settings.GetFeatures<OpenXRFeature>())
{ {
if (feature is ViveHandTracking && feature.enabled) if (!feature.enabled) { continue; }
FieldInfo fieldInfoOpenXrExtensionStrings = typeof(OpenXRFeature).GetField(
"openxrExtensionStrings",
BindingFlags.NonPublic | BindingFlags.Instance);
if (fieldInfoOpenXrExtensionStrings != null)
{
var openXrExtensionStringsArray =
((string)fieldInfoOpenXrExtensionStrings.GetValue(feature)).Split(' ');
foreach (string stringItem in openXrExtensionStringsArray)
{
if (string.IsNullOrEmpty(stringItem)) { continue; }
if (stringItem.Equals(kHandTrackingExtension) ||
stringItem.Equals(kHandInteractionHTC) ||
stringItem.Equals(kHandInteractionEXT))
{ {
enableHandtracking = true; enableHandtracking = true;
} }
if (feature is ViveFacialTracking && feature.enabled) if (stringItem.Equals(kFacialTrackingExtension))
{ {
enableEyetracking = true; enableEyetracking = true;
enableLipexpression = true; enableLipexpression = true;
} }
} }
}
}
if (enableHandtracking) if (enableHandtracking)
{ {

View File

@@ -83,6 +83,15 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
static GUIContent Label_IsDynamicLayer = new GUIContent("Dynamic Layer", "Specify whether Layer needs to be updated each frame or not."); static GUIContent Label_IsDynamicLayer = new GUIContent("Dynamic Layer", "Specify whether Layer needs to be updated each frame or not.");
SerializedProperty Property_IsDynamicLayer; SerializedProperty Property_IsDynamicLayer;
static string PropertyName_IsCustomRects = "isCustomRects";
static GUIContent Label_IsCustomRects = new GUIContent("Customize Rects", "Using a single texture as a stereo image");
SerializedProperty Property_IsCustomRects;
static string PropertyName_CustomRects = "customRects";
static GUIContent Label_CustomRects = new GUIContent("Customize Rects Type", "Specify the customize rects type of the left texture.");
SerializedProperty Property_CustomRects;
static string PropertyName_ApplyColorScaleBias = "applyColorScaleBias"; static string PropertyName_ApplyColorScaleBias = "applyColorScaleBias";
static GUIContent Label_ApplyColorScaleBias = new GUIContent("Apply Color Scale Bias", "Color scale and bias are applied to a layer color during composition, after its conversion to premultiplied alpha representation. LayerColor = LayerColor * colorScale + colorBias"); static GUIContent Label_ApplyColorScaleBias = new GUIContent("Apply Color Scale Bias", "Color scale and bias are applied to a layer color during composition, after its conversion to premultiplied alpha representation. LayerColor = LayerColor * colorScale + colorBias");
SerializedProperty Property_ApplyColorScaleBias; SerializedProperty Property_ApplyColorScaleBias;
@@ -114,15 +123,18 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
private bool showLayerParams = true, showColorScaleBiasParams = true; private bool showLayerParams = true, showColorScaleBiasParams = true;
private bool showExternalSurfaceParams = false; //private bool showExternalSurfaceParams = false;
Rect FullRect = new Rect(0, 0, 1, 1);
Rect LeftRightRect = new Rect(0, 0, 0.5f, 1);
Rect TopDownRect = new Rect(0, 0.5f, 1, 0.5f);
public override void OnInspectorGUI() public override void OnInspectorGUI()
{ {
if (Property_LayerType == null) Property_LayerType = serializedObject.FindProperty(PropertyName_LayerType); if (Property_LayerType == null) Property_LayerType = serializedObject.FindProperty(PropertyName_LayerType);
if (Property_CompositionDepth == null) Property_CompositionDepth = serializedObject.FindProperty(PropertyName_CompositionDepth); if (Property_CompositionDepth == null) Property_CompositionDepth = serializedObject.FindProperty(PropertyName_CompositionDepth);
if (Property_LayerShape == null) Property_LayerShape = serializedObject.FindProperty(PropertyName_LayerShape); if (Property_LayerShape == null) Property_LayerShape = serializedObject.FindProperty(PropertyName_LayerShape);
if (Property_LayerVisibility == null) Property_LayerVisibility = serializedObject.FindProperty(PropertyName_LayerVisibility); if (Property_LayerVisibility == null) Property_LayerVisibility = serializedObject.FindProperty(PropertyName_LayerVisibility);
if (Property_CustomRects == null) Property_CustomRects = serializedObject.FindProperty(PropertyName_CustomRects);
if (Property_LockMode == null) Property_LockMode = serializedObject.FindProperty(PropertyName_LockMode); if (Property_LockMode == null) Property_LockMode = serializedObject.FindProperty(PropertyName_LockMode);
if (Property_QuadWidth == null) Property_QuadWidth = serializedObject.FindProperty(PropertyName_QuadWidth); if (Property_QuadWidth == null) Property_QuadWidth = serializedObject.FindProperty(PropertyName_QuadWidth);
if (Property_QuadHeight == null) Property_QuadHeight = serializedObject.FindProperty(PropertyName_QuadHeight); if (Property_QuadHeight == null) Property_QuadHeight = serializedObject.FindProperty(PropertyName_QuadHeight);
@@ -134,6 +146,7 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
if (Property_ExternalSurfaceWidth == null) Property_ExternalSurfaceWidth = serializedObject.FindProperty(PropertyName_ExternalSurfaceWidth); if (Property_ExternalSurfaceWidth == null) Property_ExternalSurfaceWidth = serializedObject.FindProperty(PropertyName_ExternalSurfaceWidth);
if (Property_ExternalSurfaceHeight == null) Property_ExternalSurfaceHeight = serializedObject.FindProperty(PropertyName_ExternalSurfaceHeight); if (Property_ExternalSurfaceHeight == null) Property_ExternalSurfaceHeight = serializedObject.FindProperty(PropertyName_ExternalSurfaceHeight);
if (Property_IsDynamicLayer == null) Property_IsDynamicLayer = serializedObject.FindProperty(PropertyName_IsDynamicLayer); if (Property_IsDynamicLayer == null) Property_IsDynamicLayer = serializedObject.FindProperty(PropertyName_IsDynamicLayer);
if (Property_IsCustomRects == null) Property_IsCustomRects = serializedObject.FindProperty(PropertyName_IsCustomRects);
if (Property_ApplyColorScaleBias == null) Property_ApplyColorScaleBias = serializedObject.FindProperty(PropertyName_ApplyColorScaleBias); if (Property_ApplyColorScaleBias == null) Property_ApplyColorScaleBias = serializedObject.FindProperty(PropertyName_ApplyColorScaleBias);
if (Property_SolidEffect == null) Property_SolidEffect = serializedObject.FindProperty(PropertyName_SolidEffect); if (Property_SolidEffect == null) Property_SolidEffect = serializedObject.FindProperty(PropertyName_SolidEffect);
if (Property_ColorScale == null) Property_ColorScale = serializedObject.FindProperty(PropertyName_ColorScale); if (Property_ColorScale == null) Property_ColorScale = serializedObject.FindProperty(PropertyName_ColorScale);
@@ -311,6 +324,12 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
{ {
if (GUILayout.Button("Show Cylinder Preview")) if (GUILayout.Button("Show Cylinder Preview"))
{ {
Rect srcRectLeft = FullRect;
if (targetCompositionLayer.isCustomRects && targetCompositionLayer.customRects == CompositionLayer.CustomRectsType.LeftRight)
srcRectLeft = LeftRightRect;
if (targetCompositionLayer.isCustomRects && targetCompositionLayer.customRects == CompositionLayer.CustomRectsType.TopDown)
srcRectLeft = TopDownRect;
targetCompositionLayer.isPreviewingCylinder = true; targetCompositionLayer.isPreviewingCylinder = true;
Vector3[] cylinderVertices = CompositionLayer.MeshGenerationHelper.GenerateCylinderVertex(targetCompositionLayer.CylinderAngleOfArc, targetCompositionLayer.CylinderRadius, targetCompositionLayer.CylinderHeight); Vector3[] cylinderVertices = CompositionLayer.MeshGenerationHelper.GenerateCylinderVertex(targetCompositionLayer.CylinderAngleOfArc, targetCompositionLayer.CylinderRadius, targetCompositionLayer.CylinderHeight);
//Add components to Game Object //Add components to Game Object
@@ -330,6 +349,8 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
if (targetCompositionLayer.texture != null) if (targetCompositionLayer.texture != null)
{ {
cylinderMeshRenderer.sharedMaterial.mainTexture = targetCompositionLayer.texture; cylinderMeshRenderer.sharedMaterial.mainTexture = targetCompositionLayer.texture;
cylinderMeshRenderer.sharedMaterial.mainTextureOffset = srcRectLeft.position;
cylinderMeshRenderer.sharedMaterial.mainTextureScale = srcRectLeft.size;
} }
//Generate Mesh //Generate Mesh
@@ -409,6 +430,12 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
{ {
if (GUILayout.Button("Show Quad Preview")) if (GUILayout.Button("Show Quad Preview"))
{ {
Rect srcRectLeft = FullRect;
if (targetCompositionLayer.isCustomRects && targetCompositionLayer.customRects == CompositionLayer.CustomRectsType.LeftRight)
srcRectLeft = LeftRightRect;
if (targetCompositionLayer.isCustomRects && targetCompositionLayer.customRects == CompositionLayer.CustomRectsType.TopDown)
srcRectLeft = TopDownRect;
targetCompositionLayer.isPreviewingQuad = true; targetCompositionLayer.isPreviewingQuad = true;
//Generate vertices //Generate vertices
Vector3[] quadVertices = CompositionLayer.MeshGenerationHelper.GenerateQuadVertex(targetCompositionLayer.quadWidth, targetCompositionLayer.quadHeight); Vector3[] quadVertices = CompositionLayer.MeshGenerationHelper.GenerateQuadVertex(targetCompositionLayer.quadWidth, targetCompositionLayer.quadHeight);
@@ -430,6 +457,8 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
if (targetCompositionLayer.texture != null) if (targetCompositionLayer.texture != null)
{ {
quadMeshRenderer.sharedMaterial.mainTexture = targetCompositionLayer.texture; quadMeshRenderer.sharedMaterial.mainTexture = targetCompositionLayer.texture;
quadMeshRenderer.sharedMaterial.mainTextureOffset = srcRectLeft.position;
quadMeshRenderer.sharedMaterial.mainTextureScale = srcRectLeft.size;
} }
//Generate Mesh //Generate Mesh
quadMeshFilter.mesh = CompositionLayer.MeshGenerationHelper.GenerateQuadMesh(quadVertices); quadMeshFilter.mesh = CompositionLayer.MeshGenerationHelper.GenerateQuadMesh(quadVertices);
@@ -440,11 +469,18 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
//Rect UI For textures //Rect UI For textures
Rect labelRect = EditorGUILayout.GetControlRect(); Rect labelRect = EditorGUILayout.GetControlRect();
EditorGUI.LabelField(new Rect(labelRect.x, labelRect.y, labelRect.width / 2, labelRect.height), new GUIContent("Texture", "Texture to be rendered on the layer")); EditorGUI.LabelField(new Rect(labelRect.x, labelRect.y, labelRect.width / 2, labelRect.height), new GUIContent("Left Texture", "Texture used for the left eye"));
EditorGUI.LabelField(new Rect(labelRect.x + labelRect.width / 2, labelRect.y, labelRect.width / 2, labelRect.height), new GUIContent("Right Texture", "Texture used for the right eye"));
Rect textureRect = EditorGUILayout.GetControlRect(GUILayout.Height(64)); Rect textureRect = EditorGUILayout.GetControlRect(GUILayout.Height(64));
targetCompositionLayer.texture = (Texture)EditorGUI.ObjectField(new Rect(textureRect.x, textureRect.y, 64, textureRect.height), targetCompositionLayer.texture, typeof(Texture), true); targetCompositionLayer.texture = (Texture)EditorGUI.ObjectField(new Rect(textureRect.x, textureRect.y, 64, textureRect.height), targetCompositionLayer.texture, typeof(Texture), true);
targetCompositionLayer.textureRight = (Texture)EditorGUI.ObjectField(new Rect(textureRect.x + textureRect.width / 2, textureRect.y, 64, textureRect.height), targetCompositionLayer.textureRight, typeof(Texture), true);
if (null == targetCompositionLayer.textureLeft)
{
targetCompositionLayer.texture = targetCompositionLayer.textureRight;
//myScript.textures[1] = right;
}
EditorGUILayout.PropertyField(Property_LayerVisibility, new GUIContent(Label_LayerVisibility)); EditorGUILayout.PropertyField(Property_LayerVisibility, new GUIContent(Label_LayerVisibility));
serializedObject.ApplyModifiedProperties(); serializedObject.ApplyModifiedProperties();
@@ -456,7 +492,7 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
//serializedObject.ApplyModifiedProperties(); //serializedObject.ApplyModifiedProperties();
//if (targetCompositionLayer.isExternalSurface) //if (targetCompositionLayer.isExternalSurface)
if (false) /*if (false)
{ {
EditorGUI.indentLevel++; EditorGUI.indentLevel++;
showExternalSurfaceParams = EditorGUILayout.Foldout(showExternalSurfaceParams, "External Surface Parameters"); showExternalSurfaceParams = EditorGUILayout.Foldout(showExternalSurfaceParams, "External Surface Parameters");
@@ -469,8 +505,21 @@ namespace VIVE.OpenXR.CompositionLayer.Editor
serializedObject.ApplyModifiedProperties(); serializedObject.ApplyModifiedProperties();
} }
EditorGUI.indentLevel--; EditorGUI.indentLevel--;
}*/
if (targetCompositionLayer.textureLeft == targetCompositionLayer.textureRight || targetCompositionLayer.textureRight == null)
{
EditorGUILayout.PropertyField(Property_IsCustomRects, Label_IsCustomRects);
serializedObject.ApplyModifiedProperties();
} }
if (targetCompositionLayer.isCustomRects)
{
EditorGUILayout.PropertyField(Property_CustomRects, new GUIContent(Label_CustomRects));
serializedObject.ApplyModifiedProperties();
}
EditorGUILayout.Space();
EditorGUILayout.PropertyField(Property_ApplyColorScaleBias, Label_ApplyColorScaleBias); EditorGUILayout.PropertyField(Property_ApplyColorScaleBias, Label_ApplyColorScaleBias);
serializedObject.ApplyModifiedProperties(); serializedObject.ApplyModifiedProperties();

View File

@@ -0,0 +1,166 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEditor.PackageManager;
using UnityEditor.PackageManager.Requests;
using UnityEngine;
public static class PackageManagerHelper
{
private static bool s_wasPreparing;
private static bool m_wasAdded;
private static bool s_wasRemoved;
private static ListRequest m_listRequest;
private static AddRequest m_addRequest;
private static RemoveRequest m_removeRequest;
private static string s_fallbackIdentifier;
public static bool isPreparingList
{
get
{
if (m_listRequest == null) { return s_wasPreparing = true; }
switch (m_listRequest.Status)
{
case StatusCode.InProgress:
return s_wasPreparing = true;
case StatusCode.Failure:
if (!s_wasPreparing)
{
Debug.LogError("Something wrong when adding package to list. error:" + m_listRequest.Error.errorCode + "(" + m_listRequest.Error.message + ")");
}
break;
case StatusCode.Success:
break;
}
return s_wasPreparing = false;
}
}
public static bool isAddingToList
{
get
{
if (m_addRequest == null) { return m_wasAdded = false; }
switch (m_addRequest.Status)
{
case StatusCode.InProgress:
return m_wasAdded = true;
case StatusCode.Failure:
if (!m_wasAdded)
{
AddRequest request = m_addRequest;
m_addRequest = null;
if (string.IsNullOrEmpty(s_fallbackIdentifier))
{
Debug.LogError("Something wrong when adding package to list. error:" + request.Error.errorCode + "(" + request.Error.message + ")");
}
else
{
Debug.Log("Failed to install package: \"" + request.Error.message + "\". Retry with fallback identifier \"" + s_fallbackIdentifier + "\"");
AddToPackageList(s_fallbackIdentifier);
}
s_fallbackIdentifier = null;
}
break;
case StatusCode.Success:
if (!m_wasAdded)
{
m_addRequest = null;
s_fallbackIdentifier = null;
ResetPackageList();
}
break;
}
return m_wasAdded = false;
}
}
public static bool isRemovingFromList
{
get
{
if (m_removeRequest == null) { return s_wasRemoved = false; }
switch (m_removeRequest.Status)
{
case StatusCode.InProgress:
return s_wasRemoved = true;
case StatusCode.Failure:
if (!s_wasRemoved)
{
var request = m_removeRequest;
m_removeRequest = null;
Debug.LogError("Something wrong when removing package from list. error:" + m_removeRequest.Error.errorCode + "(" + m_removeRequest.Error.message + ")");
}
break;
case StatusCode.Success:
if (!s_wasRemoved)
{
m_removeRequest = null;
ResetPackageList();
}
break;
}
return s_wasRemoved = false;
}
}
public static void PreparePackageList()
{
if (m_listRequest != null) { return; }
m_listRequest = Client.List(true, true);
}
public static void ResetPackageList()
{
s_wasPreparing = false;
m_listRequest = null;
}
public static bool IsPackageInList(string name, out UnityEditor.PackageManager.PackageInfo packageInfo)
{
packageInfo = null;
if (m_listRequest == null || m_listRequest.Result == null) return false;
foreach (var package in m_listRequest.Result)
{
if (package.name.Equals(name))
{
packageInfo = package;
return true;
}
}
return false;
}
public static void AddToPackageList(string identifier, string fallbackIdentifier = null)
{
Debug.Assert(m_addRequest == null);
m_addRequest = Client.Add(identifier);
s_fallbackIdentifier = fallbackIdentifier;
}
public static void RemovePackage(string identifier)
{
Debug.Assert(m_removeRequest == null);
m_removeRequest = Client.Remove(identifier);
}
public static PackageCollection GetPackageList()
{
if (m_listRequest == null || m_listRequest.Result == null)
{
return null;
}
return m_listRequest.Result;
}
}

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: a00aae5e5e4790c429467d7a03b4a6de guid: 43952515f295bac4385e71851692047d
MonoImporter: MonoImporter:
externalObjects: {} externalObjects: {}
serializedVersion: 2 serializedVersion: 2

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: ba676113e4d2dfc4095675f6fb934d9e guid: 5f1198e3724eb5b44a705edc6d6bae06
folderAsset: yes folderAsset: yes
DefaultImporter: DefaultImporter:
externalObjects: {} externalObjects: {}

View File

@@ -0,0 +1,24 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using UnityEngine;
#if UNITY_EDITOR
namespace VIVE.OpenXR.Editor
{
[Serializable]
public class PreferenceAvatarAsset : ScriptableObject
{
public const string AssetPath = "Assets/VIVE/OpenXR/Preferences/PreferenceAvatarAsset.asset";
// VRM constants
public const string kVrm0Package = "UniVRM-0.109.0_7aff.unitypackage";
public const string kVrm0Asset = "Assets/VRM.meta";
public const string kVrm1Package = "VRM-0.109.0_7aff.unitypackage";
public const string kVrm1Asset = "Assets/VRM10.meta";
public bool SupportVrm0 = false;
public bool SupportVrm1 = false;
}
}
#endif

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: b048c9b388bf8d34e9814b272da7ddb5 guid: 94f6766384418a0418eb5ebdb371be20
MonoImporter: MonoImporter:
externalObjects: {} externalObjects: {}
serializedVersion: 2 serializedVersion: 2

View File

@@ -0,0 +1,296 @@
// Copyright HTC Corporation All Rights Reserved.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System.Text;
using System.IO;
using System.Linq;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.PackageManager;
using UnityEditor.PackageManager.Requests;
using UnityEditor.XR.Management.Metadata;
namespace VIVE.OpenXR.Editor
{
[InitializeOnLoad]
public static class ViveOpenXRPreference
{
#region Log
static StringBuilder m_sb = null;
static StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
const string LOG_TAG = "VIVE.OpenXR.Editor.ViveOpenXRPreference";
static void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
static void ERROR(StringBuilder msg) { Debug.LogErrorFormat("{0} {1}", LOG_TAG, msg); }
#endregion
static ViveOpenXRPreference()
{
EditorApplication.update += OnUpdate;
}
#region Scripting Symbols
internal struct ScriptingDefinedSettings
{
public string[] scriptingDefinedSymbols;
public BuildTargetGroup[] targetGroups;
public ScriptingDefinedSettings(string[] symbols, BuildTargetGroup[] groups)
{
scriptingDefinedSymbols = symbols;
targetGroups = groups;
}
}
const string DEFINE_USE_VRM_0_x = "USE_VRM_0_x";
static readonly ScriptingDefinedSettings m_ScriptDefineSettingVrm0 = new ScriptingDefinedSettings(
new string[] { DEFINE_USE_VRM_0_x, },
new BuildTargetGroup[] { BuildTargetGroup.Android, }
);
static void AddScriptingDefineSymbols(ScriptingDefinedSettings setting)
{
for (int group_index = 0; group_index < setting.targetGroups.Length; group_index++)
{
var group = setting.targetGroups[group_index];
string definesString = PlayerSettings.GetScriptingDefineSymbolsForGroup(group);
List<string> allDefines = definesString.Split(';').ToList();
for (int symbol_index = 0; symbol_index < setting.scriptingDefinedSymbols.Length; symbol_index++)
{
if (!allDefines.Contains(setting.scriptingDefinedSymbols[symbol_index]))
{
sb.Clear().Append("AddDefineSymbols() ").Append(setting.scriptingDefinedSymbols[symbol_index]).Append(" to group ").Append(group); DEBUG(sb);
allDefines.Add(setting.scriptingDefinedSymbols[symbol_index]);
}
else
{
sb.Clear().Append("AddDefineSymbols() ").Append(setting.scriptingDefinedSymbols[symbol_index]).Append(" already existed."); DEBUG(sb);
}
}
PlayerSettings.SetScriptingDefineSymbolsForGroup(
group,
string.Join(";", allDefines.ToArray())
);
}
}
static void RemoveScriptingDefineSymbols(ScriptingDefinedSettings setting)
{
for (int group_index = 0; group_index < setting.targetGroups.Length; group_index++)
{
var group = setting.targetGroups[group_index];
string definesString = PlayerSettings.GetScriptingDefineSymbolsForGroup(group);
List<string> allDefines = definesString.Split(';').ToList();
for (int symbol_index = 0; symbol_index < setting.scriptingDefinedSymbols.Length; symbol_index++)
{
if (allDefines.Contains(setting.scriptingDefinedSymbols[symbol_index]))
{
sb.Clear().Append("RemoveDefineSymbols() ").Append(setting.scriptingDefinedSymbols[symbol_index]).Append(" from group ").Append(group); DEBUG(sb);
allDefines.Remove(setting.scriptingDefinedSymbols[symbol_index]);
}
else
{
sb.Clear().Append("RemoveDefineSymbols() ").Append(setting.scriptingDefinedSymbols[symbol_index]).Append(" already existed."); DEBUG(sb);
}
}
PlayerSettings.SetScriptingDefineSymbolsForGroup(
group,
string.Join(";", allDefines.ToArray())
);
}
}
static bool HasScriptingDefineSymbols(ScriptingDefinedSettings setting)
{
for (int group_index = 0; group_index < setting.targetGroups.Length; group_index++)
{
var group = setting.targetGroups[group_index];
string definesString = PlayerSettings.GetScriptingDefineSymbolsForGroup(group);
List<string> allDefines = definesString.Split(';').ToList();
for (int symbol_index = 0; symbol_index < setting.scriptingDefinedSymbols.Length; symbol_index++)
{
if (!allDefines.Contains(setting.scriptingDefinedSymbols[symbol_index]))
{
return false;
}
}
}
return true;
}
const string XR_LOADER_OPENXR_NAME = "UnityEngine.XR.OpenXR.OpenXRLoader";
internal static bool ViveOpenXRAndroidAssigned { get { return XRPackageMetadataStore.IsLoaderAssigned(XR_LOADER_OPENXR_NAME, BuildTargetGroup.Android); } }
static PreferenceAvatarAsset m_AssetAvatar = null;
static void CheckPreferenceAssets()
{
if (File.Exists(PreferenceAvatarAsset.AssetPath))
{
m_AssetAvatar = AssetDatabase.LoadAssetAtPath(PreferenceAvatarAsset.AssetPath, typeof(PreferenceAvatarAsset)) as PreferenceAvatarAsset;
}
else
{
string folderPath = PreferenceAvatarAsset.AssetPath.Substring(0, PreferenceAvatarAsset.AssetPath.LastIndexOf('/'));
DirectoryInfo folder = Directory.CreateDirectory(folderPath);
sb.Clear().Append("CheckPreferenceAssets() Creates folder: Assets/").Append(folder.Name); DEBUG(sb);
m_AssetAvatar = ScriptableObject.CreateInstance(typeof(PreferenceAvatarAsset)) as PreferenceAvatarAsset;
m_AssetAvatar.SupportVrm0 = false;
m_AssetAvatar.SupportVrm1 = false;
sb.Clear().Append("CheckPreferenceAssets() Creates the asset: ").Append(PreferenceAvatarAsset.AssetPath); DEBUG(sb);
AssetDatabase.CreateAsset(m_AssetAvatar, PreferenceAvatarAsset.AssetPath);
}
}
static void OnUpdate()
{
if (!ViveOpenXRAndroidAssigned) { return; }
CheckPreferenceAssets();
if (m_AssetAvatar)
{
// Adds the script symbol if VRM0 is imported.
if (File.Exists(PreferenceAvatarAsset.kVrm0Asset))
{
if (!HasScriptingDefineSymbols(m_ScriptDefineSettingVrm0))
{
sb.Clear().Append("OnUpdate() Adds m_ScriptDefineSettingVrm0."); DEBUG(sb);
AddScriptingDefineSymbols(m_ScriptDefineSettingVrm0);
}
m_AssetAvatar.SupportVrm0 = true;
}
else
{
if (HasScriptingDefineSymbols(m_ScriptDefineSettingVrm0))
{
sb.Clear().Append("OnUpdate() Removes m_ScriptDefineSettingVrm0."); DEBUG(sb);
RemoveScriptingDefineSymbols(m_ScriptDefineSettingVrm0);
}
m_AssetAvatar.SupportVrm0 = false;
}
m_AssetAvatar.SupportVrm1 = File.Exists(PreferenceAvatarAsset.kVrm1Asset);
}
}
#endregion
#region Preferences
const string kPreferenceName = "VIVE OpenXR";
private static GUIContent m_Vrm0Option = new GUIContent("VRM 0", "Avatar format.");
private static GUIContent m_Vrm1Option = new GUIContent("VRM 1", "Avatar format.");
internal static void ImportModule(string packagePath, bool interactive = false)
{
string target = Path.Combine("Packages/com.htc.upm.vive.openxr/UnityPackages~", packagePath);
sb.Clear().Append("ImportModule: " + target); DEBUG(sb);
AssetDatabase.ImportPackage(target, interactive);
}
static bool avatarOption = true;
#pragma warning disable 0618
[PreferenceItem(kPreferenceName)]
#pragma warning restore 0618
private static void OnPreferencesGUI()
{
if (EditorApplication.isCompiling)
{
EditorGUILayout.LabelField("Compiling...");
return;
}
if (PackageManagerHelper.isAddingToList)
{
EditorGUILayout.LabelField("Installing packages...");
return;
}
if (PackageManagerHelper.isRemovingFromList)
{
EditorGUILayout.LabelField("Removing packages...");
return;
}
PackageManagerHelper.PreparePackageList();
if (PackageManagerHelper.isPreparingList)
{
EditorGUILayout.LabelField("Checking Packages...");
return;
}
CheckPreferenceAssets();
GUIStyle sectionTitleStyle = new GUIStyle(EditorStyles.label);
sectionTitleStyle.fontSize = 16;
sectionTitleStyle.richText = true;
sectionTitleStyle.fontStyle = FontStyle.Bold;
#region Avatar
GUILayout.BeginHorizontal();
GUILayout.Space(10);
GUILayout.Label("Avatar", sectionTitleStyle);
GUILayout.EndHorizontal();
GUIStyle foldoutStyle = EditorStyles.foldout;
foldoutStyle.fontSize = 14;
foldoutStyle.fontStyle = FontStyle.Normal;
GUILayout.BeginHorizontal();
GUILayout.Space(20);
avatarOption = EditorGUILayout.Foldout(avatarOption, "Supported Format", foldoutStyle);
GUILayout.EndHorizontal();
foldoutStyle.fontSize = 12;
foldoutStyle.fontStyle = FontStyle.Normal;
if (m_AssetAvatar && avatarOption)
{
/// VRM 0
GUILayout.Space(5);
GUILayout.BeginHorizontal();
GUILayout.Space(35);
if (!m_AssetAvatar.SupportVrm0)
{
bool toggled = EditorGUILayout.ToggleLeft(m_Vrm0Option, false, GUILayout.Width(230f));
if (toggled)
{
sb.Clear().Append("OnPreferencesGUI() Adds ").Append(PreferenceAvatarAsset.kVrm0Package); DEBUG(sb);
ImportModule(PreferenceAvatarAsset.kVrm0Package);
}
}
else
{
EditorGUILayout.ToggleLeft(m_Vrm0Option, true, GUILayout.Width(230f));
}
GUILayout.EndHorizontal();
/// VRM 1
GUILayout.Space(5);
GUILayout.BeginHorizontal();
GUILayout.Space(35);
if (!m_AssetAvatar.SupportVrm1)
{
bool toggled = EditorGUILayout.ToggleLeft(m_Vrm1Option, false, GUILayout.Width(230f));
if (toggled)
{
sb.Clear().Append("OnPreferencesGUI() Adds ").Append(PreferenceAvatarAsset.kVrm1Package); DEBUG(sb);
ImportModule(PreferenceAvatarAsset.kVrm1Package);
}
}
else
{
EditorGUILayout.ToggleLeft(m_Vrm1Option, true, GUILayout.Width(230f));
}
GUILayout.EndHorizontal();
}
#endregion
}
#endregion
}
}
#endif

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: 484659e19359fb740ac4a253c3fa83c6 guid: 73bdd0b88ffae0e43a3a498347e6dea4
MonoImporter: MonoImporter:
externalObjects: {} externalObjects: {}
serializedVersion: 2 serializedVersion: 2

View File

@@ -1,30 +0,0 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
namespace VIVE.OpenXR.Editor
{
[CustomEditor(typeof(VIVEFocus3Feature))]
internal class VIVEFocus3FeatureEditor : UnityEditor.Editor
{
//private SerializedProperty enableHandTracking;
//private SerializedProperty enableTracker;
void OnEnable()
{
//enableHandTracking = serializedObject.FindProperty("enableHandTracking");
//enableTracker = serializedObject.FindProperty("enableTracker");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
//EditorGUILayout.PropertyField(enableHandTracking);
//EditorGUILayout.PropertyField(enableTracker);
serializedObject.ApplyModifiedProperties();
}
}
}
#endif

View File

@@ -0,0 +1,24 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEditor;
using VIVE.OpenXR.Feature;
namespace VIVE.OpenXR.Editor
{
[CustomEditor(typeof(ViveAnchor))]
internal class ViveAnchorEditor : UnityEditor.Editor
{
private SerializedProperty enablePersistedAnchor;
void OnEnable()
{
enablePersistedAnchor = serializedObject.FindProperty("enablePersistedAnchor");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(enablePersistedAnchor);
serializedObject.ApplyModifiedProperties();
}
}
}

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: 6acba111d20a438439e5d1152010efa5 guid: 9094698271e2abb4ab295256548772c3
MonoImporter: MonoImporter:
externalObjects: {} externalObjects: {}
serializedVersion: 2 serializedVersion: 2

View File

@@ -0,0 +1,44 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor;
namespace VIVE.OpenXR.Editor
{
public class ViveMenu : UnityEditor.Editor
{
private const string kMenuXR = "VIVE/XR/Convert Main Camera to ViveRig";
[MenuItem(kMenuXR, priority = 101)]
private static void ConvertToViveRig()
{
// 1. Removes default Camera
Camera cam = FindObjectOfType<Camera>();
if (cam != null && cam.transform.parent == null)
{
Debug.Log("ConvertToViveRig() remove " + cam.gameObject.name);
DestroyImmediate(cam.gameObject);
}
// 2. Loads ViveRig
if (GameObject.Find("ViveRig") == null && GameObject.Find("ViveRig(Clone)") == null)
{
GameObject prefab = Resources.Load<GameObject>("Prefabs/ViveRig");
if (prefab != null)
{
Debug.Log("ConvertToViveRig() load " + prefab.name);
GameObject inst = Instantiate(prefab, null);
if (inst != null)
{
inst.name = "ViveRig";
UnityEditor.SceneManagement.EditorSceneManager.MarkSceneDirty(SceneManager.GetActiveScene());
}
}
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0f78968df8bc5794393fb2016e223a6c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,64 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEditor;
using UnityEngine;
using VIVE.OpenXR.Feature;
namespace VIVE.OpenXR.Editor
{
[CustomEditor(typeof(ViveMockRuntime))]
internal class ViveMockRuntimeEditor : UnityEditor.Editor
{
private SerializedProperty enableFuture;
private SerializedProperty enableAnchor;
void OnEnable()
{
enableFuture = serializedObject.FindProperty("enableFuture");
enableAnchor = serializedObject.FindProperty("enableAnchor");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
// Show a text field for description
EditorGUILayout.HelpBox("VIVE's mock runtime. Used with OpenXR MockRuntime to test unsupported extensions and features on Editor.", MessageType.Info);
if (GUILayout.Button("Install MockRuntime Library")) {
InstallMockRuntimeLibrary();
}
// check if changed
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(enableFuture);
if (EditorGUI.EndChangeCheck()) {
if (!enableFuture.boolValue) {
enableAnchor.boolValue = false;
}
}
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(enableAnchor);
if (EditorGUI.EndChangeCheck()) {
if (enableAnchor.boolValue) {
enableFuture.boolValue = true;
}
}
serializedObject.ApplyModifiedProperties();
}
public void InstallMockRuntimeLibrary() {
string sourcePathName = "Packages/com.htc.upm.vive.openxr/MockRuntime~/Win64/ViveMockRuntime.dll";
string destPath = "Assets/Plugins/Win64";
string destPathName = "Assets/Plugins/Win64/ViveMockRuntime.dll";
// check if the folder exists. If not, create it.
if (!System.IO.Directory.Exists(destPath)) {
System.IO.Directory.CreateDirectory(destPath);
}
FileUtil.CopyFileOrDirectory(sourcePathName, destPathName);
AssetDatabase.Refresh();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: eedb4211aafd2cb4bae86fcc0e948f72
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,135 +0,0 @@
// Copyright HTC Corporation All Rights Reserved.
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.Management.Metadata;
namespace VIVE.OpenXR.Editor
{
[InitializeOnLoad]
public static class CheckIfVIVEEnabled
{
const string LOG_TAG = "VIVE.OpenXR.Editor.CheckIfVIVEEnabled";
static void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
const string VERSION_DEFINE_OPENXR = "USE_VIVE_OPENXR_1_0_0";
internal struct ScriptingDefinedSettings
{
public string[] scriptingDefinedSymbols;
public BuildTargetGroup[] targetGroups;
public ScriptingDefinedSettings(string[] symbols, BuildTargetGroup[] groups)
{
scriptingDefinedSymbols = symbols;
targetGroups = groups;
}
}
static readonly ScriptingDefinedSettings m_ScriptDefineSettingOpenXRAndroid = new ScriptingDefinedSettings(
new string[] { VERSION_DEFINE_OPENXR, },
new BuildTargetGroup[] { BuildTargetGroup.Android, }
);
const string XR_LOADER_OPENXR_NAME = "UnityEngine.XR.OpenXR.OpenXRLoader";
internal static bool ViveOpenXRAndroidAssigned { get { return XRPackageMetadataStore.IsLoaderAssigned(XR_LOADER_OPENXR_NAME, BuildTargetGroup.Android); } }
static void AddScriptingDefineSymbols(ScriptingDefinedSettings setting)
{
for (int group_index = 0; group_index < setting.targetGroups.Length; group_index++)
{
var group = setting.targetGroups[group_index];
string definesString = PlayerSettings.GetScriptingDefineSymbolsForGroup(group);
List<string> allDefines = definesString.Split(';').ToList();
for (int symbol_index = 0; symbol_index < setting.scriptingDefinedSymbols.Length; symbol_index++)
{
if (!allDefines.Contains(setting.scriptingDefinedSymbols[symbol_index]))
{
DEBUG("AddDefineSymbols() " + setting.scriptingDefinedSymbols[symbol_index] + " to group " + group);
allDefines.Add(setting.scriptingDefinedSymbols[symbol_index]);
}
else
{
DEBUG("AddDefineSymbols() " + setting.scriptingDefinedSymbols[symbol_index] + " already existed.");
}
}
PlayerSettings.SetScriptingDefineSymbolsForGroup(
group,
string.Join(";", allDefines.ToArray())
);
}
}
static void RemoveScriptingDefineSymbols(ScriptingDefinedSettings setting)
{
for (int group_index = 0; group_index < setting.targetGroups.Length; group_index++)
{
var group = setting.targetGroups[group_index];
string definesString = PlayerSettings.GetScriptingDefineSymbolsForGroup(group);
List<string> allDefines = definesString.Split(';').ToList();
for (int symbol_index = 0; symbol_index < setting.scriptingDefinedSymbols.Length; symbol_index++)
{
if (allDefines.Contains(setting.scriptingDefinedSymbols[symbol_index]))
{
DEBUG("RemoveDefineSymbols() " + setting.scriptingDefinedSymbols[symbol_index] + " from group " + group);
allDefines.Remove(setting.scriptingDefinedSymbols[symbol_index]);
}
else
{
DEBUG("RemoveDefineSymbols() " + setting.scriptingDefinedSymbols[symbol_index] + " already existed.");
}
}
PlayerSettings.SetScriptingDefineSymbolsForGroup(
group,
string.Join(";", allDefines.ToArray())
);
}
}
static bool HasScriptingDefineSymbols(ScriptingDefinedSettings setting)
{
for (int group_index = 0; group_index < setting.targetGroups.Length; group_index++)
{
var group = setting.targetGroups[group_index];
string definesString = PlayerSettings.GetScriptingDefineSymbolsForGroup(group);
List<string> allDefines = definesString.Split(';').ToList();
for (int symbol_index = 0; symbol_index < setting.scriptingDefinedSymbols.Length; symbol_index++)
{
if (!allDefines.Contains(setting.scriptingDefinedSymbols[symbol_index]))
{
return false;
}
}
}
return true;
}
static void CheckScriptingDefineSymbols()
{
// Adds the script symbol if Vive OpenXR Plugin - Android is imported and assigned in XR Plugin-in Management.
if (ViveOpenXRAndroidAssigned)
{
if (!HasScriptingDefineSymbols(m_ScriptDefineSettingOpenXRAndroid))
{
DEBUG("OnUpdate() Adds m_ScriptDefineSettingOpenXRAndroid.");
AddScriptingDefineSymbols(m_ScriptDefineSettingOpenXRAndroid);
}
}
// Removes the script symbol if Vive OpenXR Plugin - Android is uninstalled.
else
{
if (HasScriptingDefineSymbols(m_ScriptDefineSettingOpenXRAndroid))
{
DEBUG("OnUpdate() Removes m_ScriptDefineSettingOpenXRAndroid.");
RemoveScriptingDefineSymbols(m_ScriptDefineSettingOpenXRAndroid);
}
}
}
static void OnUpdate()
{
//CheckScriptingDefineSymbols();
}
static CheckIfVIVEEnabled()
{
EditorApplication.update += OnUpdate;
}
}
}
#endif

View File

@@ -19,14 +19,27 @@ namespace VIVE.OpenXR
"vive.openxr.feature.foveation", "vive.openxr.feature.foveation",
FacialTracking.ViveFacialTracking.featureId, FacialTracking.ViveFacialTracking.featureId,
PlaneDetection.VivePlaneDetection.featureId, PlaneDetection.VivePlaneDetection.featureId,
Anchor.ViveAnchor.featureId, VivePathEnumeration.featureId,
Feature.ViveAnchor.featureId,
DisplayRefreshRate.ViveDisplayRefreshRate.featureId,
Passthrough.VivePassthrough.featureId,
FirstPersonObserver.ViveFirstPersonObserver.FeatureId,
SecondaryViewConfiguration.ViveSecondaryViewConfiguration.FeatureId,
UserPresence.ViveUserPresence.featureId,
CompositionLayer.ViveCompositionLayerExtraSettings.featureId,
FrameSynchronization.ViveFrameSynchronization.featureId,
EyeTracker.ViveEyeTracker.featureId,
Feature.ViveMockRuntime.featureId,
Interaction.ViveInteractions.featureId,
}, },
UiName = "VIVE XR Support", UiName = "VIVE XR Support",
Description = "Necessary to deploy an VIVE XR compatible app.", Description = "Necessary to deploy an VIVE XR compatible app.",
FeatureSetId = "com.htc.vive.openxr.featureset.vivexr", FeatureSetId = "com.htc.vive.openxr.featureset.vivexr",
#if UNITY_ANDROID
DefaultFeatureIds = new string[] { VIVEFocus3Feature.featureId, VIVEFocus3Profile.featureId, }, DefaultFeatureIds = new string[] { VIVEFocus3Feature.featureId, VIVEFocus3Profile.featureId, },
SupportedBuildTargets = new BuildTargetGroup[] { BuildTargetGroup.Android } #endif
SupportedBuildTargets = new BuildTargetGroup[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone }
)] )]
sealed class VIVEFocus3FeatureSet { } sealed class ViveOpenXRFeatureSet { }
} }
#endif #endif

View File

@@ -0,0 +1,81 @@
using System;
using System.Reflection;
using UnityEditor;
using UnityEditor.Build.Reporting;
using UnityEditor.XR.OpenXR.Features;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine.XR.OpenXR;
using static VIVE.OpenXR.VIVEFocus3Feature;
namespace VIVE.OpenXR.Editor
{
public class ViveSpectatorCameraProcess : OpenXRFeatureBuildHooks
{
public override int callbackOrder => 1;
public override Type featureType => typeof(VIVEFocus3Feature);
/// <summary>
/// Enable or disable the "First Person Observer" extension according to the Spectator Camera Feature.
/// </summary>
/// <param name="enable">Type True if Spectator Camera Feature is enabled. Otherwise, type False.</param>
private static void SetFirstPersonObserver(in bool enable)
{
var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
foreach (OpenXRFeature feature in settings.GetFeatures<OpenXRFeature>())
{
FieldInfo fieldInfoOpenXrExtensionStrings = typeof(OpenXRFeature).GetField(
"openxrExtensionStrings",
BindingFlags.NonPublic | BindingFlags.Instance);
if (fieldInfoOpenXrExtensionStrings != null)
{
var openXrExtensionStringsArray =
((string)fieldInfoOpenXrExtensionStrings.GetValue(feature)).Split(' ');
foreach (var stringItem in openXrExtensionStringsArray)
{
if (string.IsNullOrEmpty(stringItem))
{
continue;
}
if (!string.Equals(stringItem, FirstPersonObserver.ViveFirstPersonObserver.OPEN_XR_EXTENSION_STRING))
{
continue;
}
feature.enabled = enable;
return;
}
}
}
}
#region The callbacks during the build process when your OpenXR Extension is enabled.
protected override void OnPreprocessBuildExt(BuildReport report)
{
if (IsViveSpectatorCameraEnabled())
{
SetFirstPersonObserver(true);
UnityEngine.Debug.Log("Enable \"First Person Observer\" extension due to the Spectator Camera Feature.");
}
else
{
SetFirstPersonObserver(false);
UnityEngine.Debug.Log("Disable \"First Person Observer\" extension because Spectator Camera Feature is closed.");
}
}
protected override void OnPostGenerateGradleAndroidProjectExt(string path)
{
}
protected override void OnPostprocessBuildExt(BuildReport report)
{
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4fcb7e5a984acb64bb9221b9b05c0517
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,54 +0,0 @@
================================================================================
Copyright 2017-2021, HTC Corporation. All rights reserved.
================================================================================
Unless otherwise provided herein or in the folder you download or use, the information in this Work is the exclusive property of HTC.
Please note that this Work includes VIVE SDK native binary which is subject to a sperate license agreement described below. You can find more detailed information about VIVE SDK and its available plugin software packages at VIVE developer resource page (https://developer.vive.com/resources/knowledgebase/wave-sdk/).
*VIVE SDK native binary:
Your use of VIVE SDK native binary will be subject to SDK License Agreement between you and HTC. You can find the text of license agreement at https://developer.vive.com/resources/knowledgebase/sdk-license-agreement-english-version/. Please read it carefully before using this Work.
*VIVE SDK Plugin Package:
Your use of plugin software package will be subject to the license terms contemplated herein. You can use, modify, share and/or reproduce the VIVE SDK Plugin Package in accordance with the Agreement herein.
If you do not agree to the terms of the Agreement, please do not use this Work.
The VIVE SDK native binary contains some third party software which separate license terms may apply. Please refer to the Accompanying License in a separate file named “VIVE SDK Native Binary Accompanying OSS License”.
================================================================================
License Terms for VIVE SDK Plugin Package
The works ("Work") herein refer to the software developed or owned by
HTC Corporation ("HTC") under the terms of the license. Unless otherwise
provided herein or in the folder you download or use, the information in
this Work is the exclusive property of HTC. HTC grants the
legal user the right to use the Work within the scope of the legitimate
development of software. No further right is granted under this license,
including but not limited to, distribution, reproduction and
modification. Any other usage of the Works shall be subject to the
written consent of HTC.
The use of the Work is permitted provided that the following conditions
are met:
* The Work is used in a source code form must retain the above
copyright notice, this list of conditions and the following
disclaimer.
* The Work is used in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distributions.
* Neither HTC nor the names of its contributors may be used to
endorse or promote products derived from this software without
specific prior written permission.
THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER
DEALINGS IN THE WORK.

View File

@@ -0,0 +1,4 @@
Copyright © HTC Corporation, LLC and its affiliates. All rights reserved.
Your use of this SDK, sample, or tool is subject to HTC VIVE SDK License Agreement, available at https://developer.vive.com/resources/downloads/licenses-and-agreements/

View File

@@ -0,0 +1,38 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR.Feature
{
public interface IViveFeatureWrapper
{
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddr);
public void OnInstanceDestroy();
}
public class ViveFeatureWrapperBase<T> where T : ViveFeatureWrapperBase<T>, new()
{
private static readonly Lazy<T> lazyInstance = new Lazy<T>(() => new T());
public static T Instance => lazyInstance.Value;
// Set true in yourfeature's OnInstanceCreate
public bool IsInited { get; protected set; } = false;
public OpenXRHelper.xrGetInstanceProcAddrDelegate xrGetInstanceProcAddr;
/// <summary>
/// Complete the xrGetInstanceProcAddr by set the pointer received in OnInstanceCreate
/// </summary>
/// <param name="intPtr"></param>
public void SetGetInstanceProcAddrPtr(IntPtr intPtr)
{
if (intPtr == null || intPtr == IntPtr.Zero)
throw new Exception("xrGetInstanceProcAddr is null");
xrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer<OpenXRHelper.xrGetInstanceProcAddrDelegate>(intPtr);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a27dc5505cdb29347aeda46676cedaa8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,80 @@
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR
{
public static class MemoryTools
{
/// <summary>
/// Convert the enum array to IntPtr. Should call <see cref="ReleaseRawMemory(IntPtr)"/> after use.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="array"></param>
/// <returns></returns>
public static IntPtr ToIntPtr<T>(T[] array) where T : Enum
{
int size = Marshal.SizeOf(typeof(T)) * array.Length;
IntPtr ptr = Marshal.AllocHGlobal(size);
int[] intArray = new int[array.Length];
for (int i = 0; i < array.Length; i++)
intArray[i] = (int)(object)array[i];
Marshal.Copy(intArray, 0, ptr, array.Length);
return ptr;
}
/// <summary>
/// Make the same size raw buffer from input array.
/// </summary>
/// <typeparam name="T">Data type could be primitive type or struct. Should call <see cref="ReleaseRawMemory(IntPtr)"/> after use.</typeparam>
/// <param name="refArray">The data array</param>
/// <returns>The memory handle. Should release by <see cref="ReleaseRawMemory(IntPtr)"/></returns>
public static IntPtr MakeRawMemory<T>(T[] refArray)
{
int size = Marshal.SizeOf(typeof(T)) * refArray.Length;
return Marshal.AllocHGlobal(size);
}
/// <summary>
/// Copy the raw memory to the array. You should make sure the array has the same size as the raw memory.
/// </summary>
/// <typeparam name="T">Convert the memory to this type array.</typeparam>
/// <param name="array">The output array.</param>
/// <param name="raw">The data source in raw memory form.</param>
/// <param name="count">Specify the copy count. Count should be less than array length.</param>
public static void CopyFromRawMemory<T>(T[] array, IntPtr raw, int count = 0)
{
int N = array.Length;
if (count > 0 && count < array.Length)
N = count;
int step = Marshal.SizeOf(typeof(T));
for (int i = 0; i < N; i++)
{
array[i] = Marshal.PtrToStructure<T>(IntPtr.Add(raw, i * step));
}
}
/// <summary>
/// Make the same size raw buffer from input array. Make sure the raw has enough size.
/// </summary>
/// <typeparam name="T">Convert this type array to raw memory.</typeparam>
/// <param name="raw">The output data in raw memory form</param>
/// <param name="array">The data source</param>
public static void CopyToRawMemory<T>(IntPtr raw, T[] array)
{
int step = Marshal.SizeOf(typeof(T));
for (int i = 0; i < array.Length; i++)
{
Marshal.StructureToPtr<T>(array[i], IntPtr.Add(raw, i * step), false);
}
}
/// <summary>
/// Release the raw memory handle which is created by <see cref="MakeRawMemory{T}(T[])"/>
/// </summary>
/// <param name="ptr"></param>
public static void ReleaseRawMemory(IntPtr ptr)
{
Marshal.FreeHGlobal(ptr);
}
}
}

View File

@@ -3,6 +3,7 @@
using System; using System;
using System.Runtime.InteropServices; using System.Runtime.InteropServices;
using UnityEngine; using UnityEngine;
using UnityEngine.Profiling;
namespace VIVE.OpenXR.Feature namespace VIVE.OpenXR.Feature
{ {
@@ -10,23 +11,16 @@ namespace VIVE.OpenXR.Feature
/// To use this wrapper, you need to call CommonWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate(), /// To use this wrapper, you need to call CommonWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate(),
/// and call CommonWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy(). /// and call CommonWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy().
/// </summary> /// </summary>
public class CommonWrapper public class CommonWrapper : ViveFeatureWrapperBase<CommonWrapper>, IViveFeatureWrapper
{ {
static CommonWrapper instance = null;
public static CommonWrapper Instance
{
get
{
if (instance == null)
instance = new CommonWrapper();
return instance;
}
}
bool isInited = false;
OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr;
OpenXRHelper.xrGetSystemPropertiesDelegate XrGetSystemProperties; OpenXRHelper.xrGetSystemPropertiesDelegate XrGetSystemProperties;
OpenXRHelper.xrCreateSwapchainDelegate XrCreateSwapchain;
OpenXRHelper.xrDestroySwapchainDelegate XrDestroySwapchain;
OpenXRHelper.xrEnumerateSwapchainFormatsDelegate XrEnumerateSwapchainFormats;
OpenXRHelper.xrEnumerateSwapchainImagesDelegate XrEnumerateSwapchainImages;
OpenXRHelper.xrWaitSwapchainImageDelegate XrWaitSwapchainImage;
OpenXRHelper.xrAcquireSwapchainImageDelegate XrAcquireSwapchainImage;
OpenXRHelper.xrReleaseSwapchainImageDelegate XrReleaseSwapchainImage;
/// <summary> /// <summary>
/// In feature's OnInstanceCreate(), call CommonWrapper.Instance.OnInstanceCreate() for init common APIs. /// In feature's OnInstanceCreate(), call CommonWrapper.Instance.OnInstanceCreate() for init common APIs.
@@ -35,32 +29,32 @@ namespace VIVE.OpenXR.Feature
/// <param name="xrGetInstanceProcAddr">Pass OpenXRFeature.xrGetInstanceProcAddr in.</param> /// <param name="xrGetInstanceProcAddr">Pass OpenXRFeature.xrGetInstanceProcAddr in.</param>
/// <returns></returns> /// <returns></returns>
/// <exception cref="Exception">If input data not valid.</exception> /// <exception cref="Exception">If input data not valid.</exception>
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddr) public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddrPtr)
{ {
if (isInited) return true; if (IsInited) return true;
if (xrInstance == 0) if (xrInstance == 0)
throw new Exception("CommonWrapper: xrInstance is null"); throw new Exception("CommonWrapper: xrInstance is null");
Debug.Log("CommonWrapper: OnInstanceCreate()"); Debug.Log("CommonWrapper: OnInstanceCreate()");
/// OpenXRFeature.xrGetInstanceProcAddr SetGetInstanceProcAddrPtr(xrGetInstanceProcAddrPtr);
if (xrGetInstanceProcAddr == null || xrGetInstanceProcAddr == IntPtr.Zero)
throw new Exception("CommonWrapper: xrGetInstanceProcAddr is null");
Debug.Log("CommonWrapper: Get function pointer of xrGetInstanceProcAddr.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(
xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
bool ret = true; bool ret = true;
IntPtr funcPtr = IntPtr.Zero; IntPtr funcPtr = IntPtr.Zero;
ret &= OpenXRHelper.GetXrFunctionDelegate(XrGetInstanceProcAddr, xrInstance, "xrGetSystemProperties", out XrGetSystemProperties); ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrGetSystemProperties", out XrGetSystemProperties);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrCreateSwapchain", out XrCreateSwapchain);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrDestroySwapchain", out XrDestroySwapchain);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrEnumerateSwapchainFormats", out XrEnumerateSwapchainFormats);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrEnumerateSwapchainImages", out XrEnumerateSwapchainImages);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrWaitSwapchainImage", out XrWaitSwapchainImage);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrAcquireSwapchainImage", out XrAcquireSwapchainImage);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrReleaseSwapchainImage", out XrReleaseSwapchainImage);
if (!ret) if (!ret)
throw new Exception("CommonWrapper: Get function pointers failed."); throw new Exception("CommonWrapper: Get function pointers failed.");
isInited = ret; IsInited = ret;
return ret; return ret;
} }
@@ -70,21 +64,20 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns> /// <returns></returns>
public void OnInstanceDestroy() public void OnInstanceDestroy()
{ {
isInited = false; IsInited = false;
XrGetInstanceProcAddr = null;
XrGetSystemProperties = null; XrGetSystemProperties = null;
Debug.Log("CommonWrapper: OnInstanceDestroy()"); Debug.Log("CommonWrapper: OnInstanceDestroy()");
} }
public XrResult GetInstanceProcAddr(XrInstance instance, string name, out IntPtr function) public XrResult GetInstanceProcAddr(XrInstance instance, string name, out IntPtr function)
{ {
if (isInited == false || XrGetInstanceProcAddr == null) if (IsInited == false || xrGetInstanceProcAddr == null)
{ {
function = IntPtr.Zero; function = IntPtr.Zero;
return XrResult.XR_ERROR_HANDLE_INVALID; return XrResult.XR_ERROR_HANDLE_INVALID;
} }
return XrGetInstanceProcAddr(instance, name, out function); return xrGetInstanceProcAddr(instance, name, out function);
} }
/// <summary> /// <summary>
@@ -97,7 +90,7 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns> /// <returns></returns>
public XrResult GetSystemProperties(XrInstance instance, XrSystemId systemId, ref XrSystemProperties properties) public XrResult GetSystemProperties(XrInstance instance, XrSystemId systemId, ref XrSystemProperties properties)
{ {
if (isInited == false || XrGetSystemProperties == null) if (IsInited == false || XrGetSystemProperties == null)
{ {
return XrResult.XR_ERROR_HANDLE_INVALID; return XrResult.XR_ERROR_HANDLE_INVALID;
} }
@@ -136,5 +129,115 @@ namespace VIVE.OpenXR.Feature
Marshal.FreeHGlobal(systemProperties.next); Marshal.FreeHGlobal(systemProperties.next);
return ret; return ret;
} }
public XrResult CreateSwapchain(XrSession session, ref XrSwapchainCreateInfo createInfo, out XrSwapchain swapchain)
{
if (IsInited == false || XrCreateSwapchain == null)
{
swapchain = default;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrCreateSwapchain(session, ref createInfo, out swapchain);
}
public XrResult DestroySwapchain(XrSwapchain swapchain)
{
if (IsInited == false || XrDestroySwapchain == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrDestroySwapchain(swapchain);
}
public XrResult EnumerateSwapchainFormats(XrSession session, uint formatCapacityInput, ref uint formatCountOutput, ref long[] formats)
{
if (IsInited == false || XrEnumerateSwapchainFormats == null)
{
formatCountOutput = 0;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
if (formatCapacityInput != 0 && (formats == null || formats.Length < formatCapacityInput))
return XrResult.XR_ERROR_SIZE_INSUFFICIENT;
if (formatCapacityInput == 0)
{
Debug.Log("CommonWrapper: EnumerateSwapchainFormats(ci=" + formatCapacityInput + ")");
return XrEnumerateSwapchainFormats(session, 0, ref formatCountOutput, IntPtr.Zero);
}
else
{
Debug.Log("CommonWrapper: EnumerateSwapchainFormats(ci=" + formatCapacityInput + ", formats=long[" + formats.Length + "])");
IntPtr formatsPtr = MemoryTools.MakeRawMemory(formats);
var ret = XrEnumerateSwapchainFormats(session, formatCapacityInput, ref formatCountOutput, formatsPtr);
if (ret == XrResult.XR_SUCCESS)
MemoryTools.CopyFromRawMemory(formats, formatsPtr, (int)formatCountOutput);
MemoryTools.ReleaseRawMemory(formatsPtr);
return ret;
}
}
public XrResult EnumerateSwapchainImages(XrSwapchain swapchain, uint imageCapacityInput, ref uint imageCountOutput, IntPtr imagesPtr)
{
if (IsInited == false || XrEnumerateSwapchainImages == null)
{
imageCountOutput = 0;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrEnumerateSwapchainImages(swapchain, imageCapacityInput, ref imageCountOutput, imagesPtr);
}
[DllImport("viveopenxr", EntryPoint = "CwAcquireSwapchainImage")]
public static extern XrResult CwAcquireSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageAcquireInfo acquireInfo, out uint index);
public XrResult AcquireSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageAcquireInfo acquireInfo, out uint index)
{
if (IsInited == false || XrAcquireSwapchainImage == null)
{
index = 0;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
Profiler.BeginSample("ASW: xrAcqScImg");
var res = XrAcquireSwapchainImage(swapchain, ref acquireInfo, out index);
Profiler.EndSample();
return res;
}
[DllImport("viveopenxr", EntryPoint = "CwWaitSwapchainImage")]
public static extern XrResult CwWaitSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageWaitInfo waitInfo);
public XrResult WaitSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageWaitInfo waitInfo)
{
if (IsInited == false || XrWaitSwapchainImage == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
Profiler.BeginSample("ASW: xrWaitScImg");
var res = XrWaitSwapchainImage(swapchain, ref waitInfo);
Profiler.EndSample();
return res;
}
[DllImport("viveopenxr", EntryPoint = "CwReleaseSwapchainImage")]
public static extern XrResult CwReleaseSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageReleaseInfo releaseInfo);
public XrResult ReleaseSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageReleaseInfo releaseInfo)
{
if (IsInited == false || XrReleaseSwapchainImage == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
// Add Profiler
Profiler.BeginSample("ASW: xrRelScImg");
var res = XrReleaseSwapchainImage(swapchain, ref releaseInfo);
Profiler.EndSample();
return res;
}
} }
} }

View File

@@ -0,0 +1,207 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using UnityEngine;
using UnityEngine.XR.OpenXR;
namespace VIVE.OpenXR.Feature
{
using XrFutureEXT = System.IntPtr;
/// <summary>
/// To use this wrapper,
/// 1. Add the "XR_EXT_Future" extension to the instance's enabled extensions list.
/// 2. Call FutureWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate().
/// 3. Call FutureWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy().
///
/// <see cref="VIVE.OpenXR.Toolkits.FutureTask.Poll"/> function helps make async Task.
/// </summary>
public class FutureWrapper : ViveFeatureWrapperBase<FutureWrapper>, IViveFeatureWrapper
{
public enum XrFutureStateEXT
{
None = 0, // Not defined in extension. A default value.
Pending = 1,
Ready = 2,
MAX = 0x7FFFFFFF
}
public struct XrFuturePollInfoEXT {
public XrStructureType type; // XR_TYPE_FUTURE_POLL_INFO_EXT
public IntPtr next;
public XrFutureEXT future;
}
public struct XrFuturePollResultEXT {
public XrStructureType type; // XR_TYPE_FUTURE_POLL_RESULT_EXT
public IntPtr next;
public XrFutureStateEXT state;
}
public struct XrFutureCancelInfoEXT
{
public XrStructureType type; // XR_TYPE_FUTURE_CANCEL_INFO_EXT
public IntPtr next;
public XrFutureEXT future;
}
public struct XrFutureCompletionBaseHeaderEXT
{
public XrStructureType type; // XR_TYPE_FUTURE_COMPLETION_EXT
public IntPtr next;
public XrResult futureResult;
}
public struct XrFutureCompletionEXT
{
public XrStructureType type; // XR_TYPE_FUTURE_COMPLETION_EXT
public IntPtr next;
public XrResult futureResult;
}
public delegate XrResult XrPollFutureEXTDelegate(XrInstance instance, ref XrFuturePollInfoEXT pollInfo, out XrFuturePollResultEXT pollResult);
public delegate XrResult XrCancelFutureEXTDelegate(XrInstance instance, ref XrFutureCancelInfoEXT cancelInfo);
XrPollFutureEXTDelegate XrPollFutureEXT;
XrCancelFutureEXTDelegate XrCancelFutureEXT;
XrInstance xrInstance;
/// <summary>
/// Features should call FutureWrapper.Instance.OnInstanceCreate() in their OnInstanceCreate().
/// </summary>
/// <param name="xrInstance"></param>
/// <param name="xrGetInstanceProcAddrPtr"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddrPtr)
{
if (IsInited) return true;
if (xrInstance == null)
throw new Exception("FutureWrapper: xrInstance is null");
this.xrInstance = xrInstance;
if (xrGetInstanceProcAddrPtr == null)
throw new Exception("FutureWrapper: xrGetInstanceProcAddr is null");
SetGetInstanceProcAddrPtr(xrGetInstanceProcAddrPtr);
Debug.Log("FutureWrapper: OnInstanceCreate()");
bool hasFuture = OpenXRRuntime.IsExtensionEnabled("XR_EXT_future");
if (!hasFuture)
{
Debug.LogError("FutureWrapper: XR_EXT_future is not enabled. Check your feature's kOpenxrExtensionString.");
return false;
}
bool ret = true;
IntPtr funcPtr = IntPtr.Zero;
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrPollFutureEXT", out XrPollFutureEXT);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrCancelFutureEXT", out XrCancelFutureEXT);
if (!ret)
{
Debug.LogError("FutureWrapper: Failed to get function pointer.");
return false;
}
IsInited = ret;
return ret;
}
public void OnInstanceDestroy()
{
Debug.Log("FutureWrapper: OnInstanceDestroy()");
IsInited = false;
XrPollFutureEXT = null;
XrCancelFutureEXT = null;
xrInstance = 0;
}
/// <summary>
/// Used to get the state of a future. If Ready, Call complete functions to get the result.
/// </summary>
/// <param name="pollInfo"></param>
/// <param name="pollResult"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult PollFuture(ref XrFuturePollInfoEXT pollInfo, out XrFuturePollResultEXT pollResult)
{
pollResult= new XrFuturePollResultEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_POLL_RESULT_EXT,
next = IntPtr.Zero,
state = XrFutureStateEXT.None
};
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
return XrPollFutureEXT(xrInstance, ref pollInfo, out pollResult);
}
/// <summary>
/// Used to get the state of a future. If Ready, Call complete functions to get the result.
/// </summary>
/// <param name="future"></param>
/// <param name="pollResult"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult PollFuture(XrFutureEXT future, out XrFuturePollResultEXT pollResult)
{
pollResult = new XrFuturePollResultEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_POLL_RESULT_EXT,
next = IntPtr.Zero,
state = XrFutureStateEXT.None
};
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
XrFuturePollInfoEXT pollInfo = new XrFuturePollInfoEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_POLL_INFO_EXT,
next = IntPtr.Zero,
future = future
};
return XrPollFutureEXT(xrInstance, ref pollInfo, out pollResult);
}
/// <summary>
/// This function cancels the future and signals that the async operation is not required.
/// After a future has been cancelled any functions using this future must return XR_ERROR_FUTURE_INVALID_EXT.
/// </summary>
/// <param name="cancelInfo"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult CancelFuture(ref XrFutureCancelInfoEXT cancelInfo)
{
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
return XrCancelFutureEXT(xrInstance, ref cancelInfo);
}
/// <summary>
/// <see cref="CancelFuture(ref XrFutureCancelInfoEXT)"/>
/// </summary>
/// <param name="future"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult CancelFuture(XrFutureEXT future)
{
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
XrFutureCancelInfoEXT cancelInfo = new XrFutureCancelInfoEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_CANCEL_INFO_EXT,
next = IntPtr.Zero,
future = future
};
return XrCancelFutureEXT(xrInstance, ref cancelInfo);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e8522c7af0a4127409a8800e1ddd5985
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,10 +1,5 @@
// Copyright HTC Corporation All Rights Reserved. // Copyright HTC Corporation All Rights Reserved.
// Remove FAKE_DATA if editor or windows is supported.
#if UNITY_EDITOR
#define FAKE_DATA
#endif
using System; using System;
using UnityEngine; using UnityEngine;
@@ -15,21 +10,8 @@ namespace VIVE.OpenXR.Feature
/// To use this wrapper, you need to call CommonWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate(), /// To use this wrapper, you need to call CommonWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate(),
/// and call CommonWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy(). /// and call CommonWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy().
/// </summary> /// </summary>
public class SpaceWrapper public class SpaceWrapper : ViveFeatureWrapperBase<SpaceWrapper>, IViveFeatureWrapper
{ {
static SpaceWrapper instance = null;
public static SpaceWrapper Instance
{
get
{
if (instance == null)
instance = new SpaceWrapper();
return instance;
}
}
bool isInited = false;
delegate XrResult DelegateXrLocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location); delegate XrResult DelegateXrLocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location);
delegate XrResult DelegateXrDestroySpace(XrSpace space); delegate XrResult DelegateXrDestroySpace(XrSpace space);
@@ -44,31 +26,30 @@ namespace VIVE.OpenXR.Feature
/// <param name="GetAddr"></param> /// <param name="GetAddr"></param>
/// <returns></returns> /// <returns></returns>
/// <exception cref="Exception"></exception> /// <exception cref="Exception"></exception>
public bool OnInstanceCreate(XrInstance xrInstance, OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr) public bool OnInstanceCreate(XrInstance xrInstance, IntPtr GetAddr)
{ {
if (isInited) return true; if (IsInited) return true;
if (xrInstance == null) if (xrInstance == null)
throw new Exception("ViveSpace: xrInstance is null"); throw new Exception("ViveSpace: xrInstance is null");
if (GetAddr == null) SetGetInstanceProcAddrPtr(GetAddr);
throw new Exception("ViveSpace: xrGetInstanceProcAddr is null");
Debug.Log("ViveSpace: OnInstanceCreate()"); Debug.Log("ViveSpace: OnInstanceCreate()");
bool ret = true; bool ret = true;
IntPtr funcPtr = IntPtr.Zero; IntPtr funcPtr = IntPtr.Zero;
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrCreateReferenceSpace", out XrCreateReferenceSpace); ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrCreateReferenceSpace", out XrCreateReferenceSpace);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrLocateSpace", out XrLocateSpace); ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrLocateSpace", out XrLocateSpace);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrDestroySpace", out XrDestroySpace); ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrDestroySpace", out XrDestroySpace);
isInited = ret; IsInited = ret;
return ret; return ret;
} }
public void OnInstanceDestroy() public void OnInstanceDestroy()
{ {
isInited = false; IsInited = false;
XrCreateReferenceSpace = null; XrCreateReferenceSpace = null;
XrLocateSpace = null; XrLocateSpace = null;
XrDestroySpace = null; XrDestroySpace = null;
@@ -77,8 +58,8 @@ namespace VIVE.OpenXR.Feature
/// <summary> /// <summary>
/// Create a reference space without create info. /// Create a reference space without create info.
/// Example: /// Example:
/// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_LOCAL, XrPosef.identity, out space); /// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_LOCAL, XrPosef.Identity, out space);
/// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_STAGE, XrPosef.identity, out space); /// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_STAGE, XrPosef.Identity, out space);
/// </summary> /// </summary>
/// <param name="session"></param> /// <param name="session"></param>
/// <param name="referenceSpaceType"></param> /// <param name="referenceSpaceType"></param>
@@ -87,8 +68,9 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns> /// <returns></returns>
public XrResult CreateReferenceSpace(XrSession session, XrReferenceSpaceType referenceSpaceType, XrPosef pose, out XrSpace space) public XrResult CreateReferenceSpace(XrSession session, XrReferenceSpaceType referenceSpaceType, XrPosef pose, out XrSpace space)
{ {
if (!isInited) space = 0;
throw new Exception("ViveSpace: not initialized"); if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
var createInfo = new XrReferenceSpaceCreateInfo(); var createInfo = new XrReferenceSpaceCreateInfo();
createInfo.type = XrStructureType.XR_TYPE_REFERENCE_SPACE_CREATE_INFO; createInfo.type = XrStructureType.XR_TYPE_REFERENCE_SPACE_CREATE_INFO;
@@ -107,24 +89,25 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns> /// <returns></returns>
public XrResult CreateReferenceSpace(XrSession session, XrReferenceSpaceCreateInfo createInfo, out XrSpace space) public XrResult CreateReferenceSpace(XrSession session, XrReferenceSpaceCreateInfo createInfo, out XrSpace space)
{ {
if (!isInited) space = 0;
throw new Exception("ViveSpace: not initialized"); if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
return XrCreateReferenceSpace(session, ref createInfo, out space); return XrCreateReferenceSpace(session, ref createInfo, out space);
} }
public XrResult LocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location) public XrResult LocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location)
{ {
if (!isInited) if (!IsInited)
throw new Exception("ViveSpace: not initialized"); return XrResult.XR_ERROR_HANDLE_INVALID;
Debug.Log($"LocateSpace(s={space}, bs={baseSpace}, t={time}"); //Debug.Log($"LocateSpace(s={space}, bs={baseSpace}, t={time}");
return XrLocateSpace(space, baseSpace, time, ref location); return XrLocateSpace(space, baseSpace, time, ref location);
} }
public XrResult DestroySpace(XrSpace space) public XrResult DestroySpace(XrSpace space)
{ {
if (!isInited) if (!IsInited)
throw new Exception("ViveSpace: not initialized"); return XrResult.XR_ERROR_HANDLE_INVALID;
Debug.Log($"DestroySpace({space})"); Debug.Log($"DestroySpace({space})");
return XrDestroySpace(space); return XrDestroySpace(space);
} }
@@ -157,19 +140,6 @@ namespace VIVE.OpenXR.Feature
public bool GetRelatedPose(XrSpace baseSpace, XrTime time, out UnityEngine.Pose pose) public bool GetRelatedPose(XrSpace baseSpace, XrTime time, out UnityEngine.Pose pose)
{ {
#if FAKE_DATA
if (Application.isEditor)
{
// make a random Pose
//var pos = new Vector3(UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f));
//var rot = new Quaternion(UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f));
var pos = Vector3.up;
var rot = Quaternion.identity;
rot.Normalize();
pose = new Pose(pos, rot);
return true;
}
#endif
// If the xrBaseSpace is changed, the pose will be updated. // If the xrBaseSpace is changed, the pose will be updated.
pose = default; pose = default;
XrSpaceLocation location = new XrSpaceLocation(); XrSpaceLocation location = new XrSpaceLocation();
@@ -179,14 +149,14 @@ namespace VIVE.OpenXR.Feature
if (ret != XrResult.XR_SUCCESS) if (ret != XrResult.XR_SUCCESS)
{ {
Debug.Log("Space: LocateSpace ret=" + ret); //Debug.Log("Space: LocateSpace ret=" + ret);
return false; return false;
} }
Debug.Log("Space: baseSpace=" + baseSpace + ", space=" + space + ", time=" + time + ", ret=" + ret); //Debug.Log("Space: baseSpace=" + baseSpace + ", space=" + space + ", time=" + time + ", ret=" + ret);
Debug.Log("Space: location.locationFlags=" + location.locationFlags); //Debug.Log("Space: location.locationFlags=" + location.locationFlags);
Debug.Log("Space: location.pose.position=" + location.pose.position.x + "," + location.pose.position.y + "," + location.pose.position.z); //Debug.Log("Space: location.pose.position=" + location.pose.position.x + "," + location.pose.position.y + "," + location.pose.position.z);
Debug.Log("Space: location.pose.orientation=" + location.pose.orientation.x + "," + location.pose.orientation.y + "," + location.pose.orientation.z + "," + location.pose.orientation.w); //Debug.Log("Space: location.pose.orientation=" + location.pose.orientation.x + "," + location.pose.orientation.y + "," + location.pose.orientation.z + "," + location.pose.orientation.w);
if ((location.locationFlags & XrSpaceLocationFlags.XR_SPACE_LOCATION_POSITION_VALID_BIT) > 0 && if ((location.locationFlags & XrSpaceLocationFlags.XR_SPACE_LOCATION_POSITION_VALID_BIT) > 0 &&
(location.locationFlags & XrSpaceLocationFlags.XR_SPACE_LOCATION_ORIENTATION_VALID_BIT) > 0) (location.locationFlags & XrSpaceLocationFlags.XR_SPACE_LOCATION_ORIENTATION_VALID_BIT) > 0)
{ {
@@ -211,7 +181,7 @@ namespace VIVE.OpenXR.Feature
// Managered resource // Managered resource
} }
// Non managered resource // Non managered resource
Debug.Log($"Space: DestroySpace({space})"); //Debug.Log($"Space: DestroySpace({space})");
SpaceWrapper.Instance.DestroySpace(space); SpaceWrapper.Instance.DestroySpace(space);
space = 0; space = 0;
disposed = true; disposed = true;

View File

@@ -3,6 +3,8 @@ using System.Runtime.InteropServices;
using System; using System;
using UnityEngine; using UnityEngine;
using AOT; using AOT;
using System.Collections.Generic;
using System.Text;
namespace VIVE.OpenXR namespace VIVE.OpenXR
{ {
@@ -17,12 +19,21 @@ namespace VIVE.OpenXR
/// For example: /// For example:
/// protected override IntPtr HookGetInstanceProcAddr(IntPtr func) /// protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
/// { /// {
/// return HtcInterceptors.Instance.HookGetInstanceProcAddr(func); /// return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
/// } /// }
/// </summary> /// </summary>
partial class ViveInterceptors partial class ViveInterceptors
{ {
public const string TAG = "Interceptors"; public const string TAG = "VIVE.OpenXR.ViveInterceptors";
static StringBuilder m_sb = null;
static StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
static void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", TAG, msg); }
static void ERROR(StringBuilder msg) { Debug.LogErrorFormat("{0} {1}", TAG, msg); }
public static ViveInterceptors instance = null; public static ViveInterceptors instance = null;
public static ViveInterceptors Instance public static ViveInterceptors Instance
@@ -37,11 +48,9 @@ namespace VIVE.OpenXR
public ViveInterceptors() public ViveInterceptors()
{ {
Debug.Log("HtcInterceptors"); Debug.Log("ViveInterceptors");
} }
bool isInited = false;
public delegate XrResult DelegateXrGetInstanceProcAddr(XrInstance instance, string name, out IntPtr function); public delegate XrResult DelegateXrGetInstanceProcAddr(XrInstance instance, string name, out IntPtr function);
private static readonly DelegateXrGetInstanceProcAddr hookXrGetInstanceProcAddrHandle = new DelegateXrGetInstanceProcAddr(XrGetInstanceProcAddrInterceptor); private static readonly DelegateXrGetInstanceProcAddr hookXrGetInstanceProcAddrHandle = new DelegateXrGetInstanceProcAddr(XrGetInstanceProcAddrInterceptor);
private static readonly IntPtr hookGetInstanceProcAddrHandlePtr = Marshal.GetFunctionPointerForDelegate(hookXrGetInstanceProcAddrHandle); private static readonly IntPtr hookGetInstanceProcAddrHandlePtr = Marshal.GetFunctionPointerForDelegate(hookXrGetInstanceProcAddrHandle);
@@ -50,8 +59,15 @@ namespace VIVE.OpenXR
[MonoPInvokeCallback(typeof(DelegateXrGetInstanceProcAddr))] [MonoPInvokeCallback(typeof(DelegateXrGetInstanceProcAddr))]
private static XrResult XrGetInstanceProcAddrInterceptor(XrInstance instance, string name, out IntPtr function) private static XrResult XrGetInstanceProcAddrInterceptor(XrInstance instance, string name, out IntPtr function)
{ {
// Used to check if the original function is already hooked.
if (instance == 0 && name == "ViveInterceptorHooked")
{
function = IntPtr.Zero;
return XrResult.XR_SUCCESS;
}
// Custom interceptors // Custom interceptors
if (name == "xrWaitFrame") if (name == "xrWaitFrame" && requiredFunctions.Contains(name))
{ {
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted."); Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function); var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
@@ -62,22 +78,105 @@ namespace VIVE.OpenXR
} }
return ret; return ret;
} }
if (name == "xrEndFrame" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
XrEndFrameOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrEndFrame>(function);
function = xrEndFrameInterceptorPtr;
}
return ret;
}
#if PERFORMANCE_TEST
if (name == "xrLocateSpace" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
XrLocateSpaceOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrLocateSpace>(function);
function = xrLocateSpaceInterceptorPtr;
}
return ret;
}
#endif
if (name == "xrPollEvent" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
xrPollEventOrigin = Marshal.GetDelegateForFunctionPointer < xrPollEventDelegate > (function);
function = xrPollEventPtr;
}
return ret;
}
if (name == "xrBeginSession" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
xrBeginSessionOrigin = Marshal.GetDelegateForFunctionPointer<xrBeginSessionDelegate>(function);
function = xrBeginSessionPtr;
}
return ret;
}
return XrGetInstanceProcAddrOriginal(instance, name, out function); return XrGetInstanceProcAddrOriginal(instance, name, out function);
} }
public IntPtr HookGetInstanceProcAddr(IntPtr func) public IntPtr HookGetInstanceProcAddr(IntPtr func)
{ {
Debug.Log($"{TAG}: registering our own xrGetInstanceProcAddr"); Debug.Log($"{TAG}: HookGetInstanceProcAddr");
if (XrGetInstanceProcAddrOriginal == null) if (XrGetInstanceProcAddrOriginal == null)
{ {
Debug.Log($"{TAG}: registering our own xrGetInstanceProcAddr");
XrGetInstanceProcAddrOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrGetInstanceProcAddr>(func); XrGetInstanceProcAddrOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrGetInstanceProcAddr>(func);
isInited = true;
#if UNITY_EDITOR
if (Application.isEditor) {
// This is a trick to check if the original function is already hooked by this class. Sometimes, the static XrGetInstanceProcAddrOriginal didn't work as expected.
Debug.Log($"{TAG}: Check if duplicate hooked by this script with instance=0 and \"ViveInterceptorHooked\" name. If following a loader error, ignore it.");
// E OpenXR-Loader: Error [SPEC | xrGetInstanceProcAddr | VUID-xrGetInstanceProcAddr-instance-parameter] : XR_NULL_HANDLE for instance but query for ViveInterceptorHooked requires a valid instance
// Call XrGetInstanceProcAddrOriginal to check if the original function is already hooked by this class
if (XrGetInstanceProcAddrOriginal(0, "ViveInterceptorHooked", out IntPtr function) == XrResult.XR_SUCCESS)
{
// If it is called successfully, it means the original function is already hooked. So we should return the original function.
Debug.Log($"{TAG}: Already hooked");
return func;
}
}
#endif
return hookGetInstanceProcAddrHandlePtr; return hookGetInstanceProcAddrHandlePtr;
} }
else else
{ {
// Dont return hookGetInstanceProcAddrHandlePtr again.
// If this hook function is called by multiple features, it should only work at the first time.
// If called by other features, it should return the original function.
return func; return func;
} }
} }
static readonly List<string> requiredFunctions = new List<string>();
/// <summary>
/// Call before <see cref="HookGetInstanceProcAddr" /> to add required functions."/>
/// </summary>
/// <param name="name"></param>
public void AddRequiredFunction(string name)
{
if (requiredFunctions.Contains(name)) return;
Debug.Log($"{TAG}: AddRequiredFunction({name})");
requiredFunctions.Add(name);
} }
} }
}

View File

@@ -0,0 +1,85 @@
// Copyright HTC Corporation All Rights Reserved.
#define DEBUG
using AOT;
using System;
using System.Runtime.InteropServices;
using UnityEngine.Profiling;
using VIVE.OpenXR.FrameSynchronization;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
#region xrBeginSession
public delegate XrResult xrBeginSessionDelegate(XrSession session, ref XrSessionBeginInfo beginInfo);
private static xrBeginSessionDelegate xrBeginSessionOrigin = null;
[MonoPInvokeCallback(typeof(xrBeginSessionDelegate))]
private static XrResult xrBeginSessionInterceptor(XrSession session, ref XrSessionBeginInfo beginInfo)
{
Profiler.BeginSample("ViveInterceptors:BeginSession");
XrResult result = XrResult.XR_ERROR_FUNCTION_UNSUPPORTED;
if (xrBeginSessionOrigin != null)
{
if (m_EnableFrameSynchronization)
{
frameSynchronizationSessionBeginInfo.mode = m_FrameSynchronizationMode;
frameSynchronizationSessionBeginInfo.next = beginInfo.next;
beginInfo.next = Marshal.AllocHGlobal(Marshal.SizeOf(frameSynchronizationSessionBeginInfo));
long offset = 0;
if (IntPtr.Size == 4)
offset = beginInfo.next.ToInt32();
else
offset = beginInfo.next.ToInt64();
IntPtr frame_synchronization_session_begin_info_ptr = new IntPtr(offset);
Marshal.StructureToPtr(frameSynchronizationSessionBeginInfo, frame_synchronization_session_begin_info_ptr, false);
#if DEBUG
if (IntPtr.Size == 4)
offset = beginInfo.next.ToInt32();
else
offset = beginInfo.next.ToInt64();
IntPtr fs_begin_info_ptr = new IntPtr(offset);
XrFrameSynchronizationSessionBeginInfoHTC fsBeginInfo = (XrFrameSynchronizationSessionBeginInfoHTC)Marshal.PtrToStructure(fs_begin_info_ptr, typeof(XrFrameSynchronizationSessionBeginInfoHTC));
sb.Clear().Append("xrBeginSessionInterceptor() beginInfo.next = (").Append(fsBeginInfo.type).Append(", ").Append(fsBeginInfo.mode).Append(")"); DEBUG(sb);
#endif
}
result = xrBeginSessionOrigin(session, ref beginInfo);
}
else
{
sb.Clear().Append("xrBeginSessionInterceptor() Not assign xrBeginSession!"); ERROR(sb);
}
Profiler.EndSample();
return result;
}
private static readonly xrBeginSessionDelegate xrBeginSession = new xrBeginSessionDelegate(xrBeginSessionInterceptor);
private static readonly IntPtr xrBeginSessionPtr = Marshal.GetFunctionPointerForDelegate(xrBeginSession);
#endregion
private static XrFrameSynchronizationSessionBeginInfoHTC frameSynchronizationSessionBeginInfo = XrFrameSynchronizationSessionBeginInfoHTC.identity;
private static bool m_EnableFrameSynchronization = false;
private static XrFrameSynchronizationModeHTC m_FrameSynchronizationMode = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC;
/// <summary>
/// Activate or deactivate the Frame Synchronization feature.
/// </summary>
/// <param name="active">True for activate</param>
/// <param name="mode">The <see cref="XrFrameSynchronizationModeHTC"/> used for Frame Synchronization.</param>
public void ActivateFrameSynchronization(bool active, XrFrameSynchronizationModeHTC mode)
{
m_EnableFrameSynchronization = active;
m_FrameSynchronizationMode = mode;
sb.Clear().Append("ActivateFrameSynchronization() ").Append(active ? "enable " : "disable ").Append(mode); DEBUG(sb);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8c222b96d7eb4ca4bb6390e07b1967bb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,129 @@
// Copyright HTC Corporation All Rights Reserved.
using AOT;
using System;
using System.Runtime.InteropServices;
using UnityEngine.Profiling;
using VIVE.OpenXR.DisplayRefreshRate;
using VIVE.OpenXR.Passthrough;
using VIVE.OpenXR.UserPresence;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
#region xrPollEvent
public delegate XrResult xrPollEventDelegate(XrInstance instance, ref XrEventDataBuffer eventData);
private static xrPollEventDelegate xrPollEventOrigin = null;
[MonoPInvokeCallback(typeof(xrPollEventDelegate))]
private static XrResult xrPollEventInterceptor(XrInstance instance, ref XrEventDataBuffer eventData)
{
Profiler.BeginSample("ViveInterceptors:WaitFrame");
XrResult result = XrResult.XR_SUCCESS;
if (xrPollEventOrigin != null)
{
result = xrPollEventOrigin(instance, ref eventData);
if (result == XrResult.XR_SUCCESS)
{
sb.Clear().Append("xrPollEventInterceptor() xrPollEvent ").Append(eventData.type); DEBUG(sb);
switch(eventData.type)
{
case XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_CHANGED_HTC:
if (XrEventDataPassthroughConfigurationImageRateChangedHTC.Get(eventData, out XrEventDataPassthroughConfigurationImageRateChangedHTC eventDataPassthroughConfigurationImageRate))
{
fromImageRate = eventDataPassthroughConfigurationImageRate.fromImageRate;
toImageRate = eventDataPassthroughConfigurationImageRate.toImageRate;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_CHANGED_HTC")
.Append(", fromImageRate.srcImageRate: ").Append(fromImageRate.srcImageRate)
.Append(", fromImageRatesrc.dstImageRate: ").Append(fromImageRate.dstImageRate)
.Append(", toImageRate.srcImageRate: ").Append(toImageRate.srcImageRate)
.Append(", toImageRate.dstImageRate: ").Append(toImageRate.dstImageRate);
DEBUG(sb);
VivePassthroughImageRateChanged.Send(fromImageRate.srcImageRate, fromImageRate.dstImageRate, toImageRate.srcImageRate, toImageRate.dstImageRate);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_CHANGED_HTC:
if (XrEventDataPassthroughConfigurationImageQualityChangedHTC.Get(eventData, out XrEventDataPassthroughConfigurationImageQualityChangedHTC eventDataPassthroughConfigurationImageQuality))
{
fromImageQuality = eventDataPassthroughConfigurationImageQuality.fromImageQuality;
toImageQuality = eventDataPassthroughConfigurationImageQuality.toImageQuality;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_CHANGED_HTC")
.Append(", fromImageQuality: ").Append(fromImageQuality.scale)
.Append(", toImageQuality: ").Append(toImageQuality.scale);
DEBUG(sb);
VivePassthroughImageQualityChanged.Send(fromImageQuality.scale, toImageQuality.scale);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB:
if(XrEventDataDisplayRefreshRateChangedFB.Get(eventData, out XrEventDataDisplayRefreshRateChangedFB eventDataDisplayRefreshRate))
{
fromDisplayRefreshRate = eventDataDisplayRefreshRate.fromDisplayRefreshRate;
toDisplayRefreshRate = eventDataDisplayRefreshRate.toDisplayRefreshRate;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB")
.Append(", fromDisplayRefreshRate: ").Append(fromDisplayRefreshRate)
.Append(", toDisplayRefreshRate: ").Append(toDisplayRefreshRate);
DEBUG(sb);
ViveDisplayRefreshRateChanged.Send(fromDisplayRefreshRate, toDisplayRefreshRate);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED:
if (XrEventDataSessionStateChanged.Get(eventData, out XrEventDataSessionStateChanged eventDataSession))
{
switch(eventDataSession.state)
{
case XrSessionState.XR_SESSION_STATE_READY:
isUserPresent = true;
break;
case XrSessionState.XR_SESSION_STATE_STOPPING:
isUserPresent = false;
break;
default:
break;
}
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED")
.Append(", session: ").Append(eventDataSession.session)
.Append(", state: ").Append(eventDataSession.state)
.Append(", isUserPresent: ").Append(isUserPresent);
DEBUG(sb);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_USER_PRESENCE_CHANGED_EXT:
if (XrEventDataUserPresenceChangedEXT.Get(eventData, out XrEventDataUserPresenceChangedEXT eventDataUserPresence))
{
isUserPresent = eventDataUserPresence.isUserPresent;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_USER_PRESENCE_CHANGED_EXT")
.Append(", session: ").Append(eventDataUserPresence.session)
.Append(", isUserPresent: ").Append(isUserPresent);
DEBUG(sb);
}
break;
default:
break;
}
}
//sb.Clear().Append("xrPollEventInterceptor() xrPollEvent result: ").Append(result).Append(", isUserPresent: ").Append(isUserPresent); DEBUG(sb);
}
Profiler.EndSample();
return result;
}
private static readonly xrPollEventDelegate xrPollEvent = new xrPollEventDelegate(xrPollEventInterceptor);
private static readonly IntPtr xrPollEventPtr = Marshal.GetFunctionPointerForDelegate(xrPollEvent);
#endregion
private static bool isUserPresent = true;
public bool IsUserPresent() { return isUserPresent; }
private static float fromDisplayRefreshRate, toDisplayRefreshRate;
public float FromDisplayRefreshRate() { return fromDisplayRefreshRate; }
public float ToDisplayRefreshRate() { return toDisplayRefreshRate; }
private static XrPassthroughConfigurationImageRateHTC fromImageRate, toImageRate;
private static XrPassthroughConfigurationImageQualityHTC fromImageQuality, toImageQuality;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c2cc5716d3f563f49a47da6c1bd8ccbe
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,89 @@
// Copyright HTC Corporation All Rights Reserved.
using System.Runtime.InteropServices;
using System;
using AOT;
using UnityEngine.Profiling;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
public struct XrCompositionLayerBaseHeader
{
public XrStructureType type; // This base structure itself has no associated XrStructureType value.
public System.IntPtr next;
public XrCompositionLayerFlags layerFlags;
public XrSpace space;
}
public struct XrFrameEndInfo
{
public XrStructureType type;
public System.IntPtr next;
public XrTime displayTime;
public XrEnvironmentBlendMode environmentBlendMode;
public uint layerCount;
public IntPtr layers; // XrCompositionLayerBaseHeader IntPtr array
}
public delegate XrResult DelegateXrEndFrame(XrSession session, ref XrFrameEndInfo frameEndInfo);
private static readonly DelegateXrEndFrame xrEndFrameInterceptorHandle = new DelegateXrEndFrame(XrEndFrameInterceptor);
private static readonly IntPtr xrEndFrameInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrEndFrameInterceptorHandle);
static DelegateXrEndFrame XrEndFrameOriginal = null;
[MonoPInvokeCallback(typeof(DelegateXrEndFrame))]
private static XrResult XrEndFrameInterceptor(XrSession session, ref XrFrameEndInfo frameEndInfo)
{
// instance must not null
//if (instance == null)
// return XrEndFrameOriginal(session, ref frameEndInfo);
Profiler.BeginSample("VI:EndFrame");
XrResult result = XrResult.XR_SUCCESS;
if (instance.BeforeOriginalEndFrame != null &&
!instance.BeforeOriginalEndFrame(session, ref frameEndInfo, ref result))
{
Profiler.EndSample();
return result;
}
result = XrEndFrameOriginal(session, ref frameEndInfo);
instance.AfterOriginalEndFrame?.Invoke(session, ref frameEndInfo, ref result);
Profiler.EndSample();
return result;
}
/// <summary>
/// If you return false, the original function will not be called.
/// </summary>
/// <param name="session"></param>
/// <param name="frameEndInfo"></param>
/// <param name="result"></param>
/// <returns></returns>
public delegate bool DelegateXrEndFrameInterceptor(XrSession session, ref XrFrameEndInfo frameEndInfo, ref XrResult result);
/// <summary>
/// Use this to intercept the original function. This will be called before the original function.
/// </summary>
public DelegateXrEndFrameInterceptor BeforeOriginalEndFrame;
/// <summary>
/// Use this to intercept the original function. This will be called after the original function.
/// </summary>
public DelegateXrEndFrameInterceptor AfterOriginalEndFrame;
#if PERFORMANCE_TEST
public delegate XrResult DelegateXrLocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location);
private static readonly DelegateXrLocateSpace xrLocateSpaceInterceptorHandle = new DelegateXrLocateSpace(XrLocateSpaceInterceptor);
private static readonly IntPtr xrLocateSpaceInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrLocateSpaceInterceptorHandle);
static DelegateXrLocateSpace XrLocateSpaceOriginal = null;
[MonoPInvokeCallback(typeof(DelegateXrLocateSpace))]
public static XrResult XrLocateSpaceInterceptor(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location)
{
Profiler.BeginSample("VI:LocateSpace");
var ret = XrLocateSpaceOriginal(space, baseSpace, time, ref location);
Profiler.EndSample();
return ret;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6bf7cf55d82ac6343b4eda92d1197a66
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -3,6 +3,7 @@ using System.Runtime.InteropServices;
using System; using System;
using UnityEngine; using UnityEngine;
using AOT; using AOT;
using UnityEngine.Profiling;
namespace VIVE.OpenXR namespace VIVE.OpenXR
{ {
@@ -24,6 +25,8 @@ namespace VIVE.OpenXR
public XrBool32 shouldRender; public XrBool32 shouldRender;
} }
bool isWaitFrameIntercepted = false;
public delegate XrResult DelegateXrWaitFrame(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState); public delegate XrResult DelegateXrWaitFrame(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState);
private static readonly DelegateXrWaitFrame xrWaitFrameInterceptorHandle = new DelegateXrWaitFrame(XrWaitFrameInterceptor); private static readonly DelegateXrWaitFrame xrWaitFrameInterceptorHandle = new DelegateXrWaitFrame(XrWaitFrameInterceptor);
private static readonly IntPtr xrWaitFrameInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrWaitFrameInterceptorHandle); private static readonly IntPtr xrWaitFrameInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrWaitFrameInterceptorHandle);
@@ -32,30 +35,74 @@ namespace VIVE.OpenXR
[MonoPInvokeCallback(typeof(DelegateXrWaitFrame))] [MonoPInvokeCallback(typeof(DelegateXrWaitFrame))]
private static XrResult XrWaitFrameInterceptor(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState) private static XrResult XrWaitFrameInterceptor(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState)
{ {
// instance must not null
//if (instance == null)
// return XrWaitFrameOriginal(session, ref frameWaitInfo, ref frameState);
Profiler.BeginSample("VI:WaitFrame");
instance.isWaitFrameIntercepted = true;
XrResult result = XrResult.XR_SUCCESS;
if (instance.BeforeOriginalWaitFrame != null &&
!instance.BeforeOriginalWaitFrame(session, ref frameWaitInfo, ref frameState, ref result))
{
Profiler.EndSample();
return result;
}
var ret = XrWaitFrameOriginal(session, ref frameWaitInfo, ref frameState); var ret = XrWaitFrameOriginal(session, ref frameWaitInfo, ref frameState);
instance.AfterOriginalWaitFrame?.Invoke(session, ref frameWaitInfo, ref frameState, ref result);
currentFrameState = frameState; currentFrameState = frameState;
return ret; Profiler.EndSample();
return result;
} }
static XrFrameState currentFrameState = new XrFrameState() { predictedDisplayTime = 0 }; static XrFrameState currentFrameState = new XrFrameState() { predictedDisplayTime = 0 };
/// <summary>
/// Get the waitframe's result: XrFrameState. This result used in update is not matching the current frame. Use it after onBeforeRender.
/// </summary>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrFrameState GetCurrentFrameState() public XrFrameState GetCurrentFrameState()
{ {
if (!isInited) throw new Exception("ViveInterceptors is not inited"); if (!isWaitFrameIntercepted) throw new Exception("ViveInterceptors is not intercepted");
return currentFrameState; return currentFrameState;
} }
/// <summary>
/// Must request xrWaitFrame before calling this function. This result used in update is not matching the current frame. Use it after onBeforeRender.
/// </summary>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrTime GetPredictTime() public XrTime GetPredictTime()
{ {
if (!isInited) throw new Exception("ViveInterceptors is not inited"); if (!isWaitFrameIntercepted) throw new Exception("ViveInterceptors is not intercepted");
Debug.Log($"{TAG}: XrWaitFrameInterceptor(predictedDisplayTime={currentFrameState.predictedDisplayTime}"); //Debug.Log($"{TAG}: XrWaitFrameInterceptor(predictedDisplayTime={currentFrameState.predictedDisplayTime}");
if (currentFrameState.predictedDisplayTime == 0) if (currentFrameState.predictedDisplayTime == 0)
return new XrTime((long)(1000000L * (Time.unscaledTimeAsDouble + 0.011f))); return new XrTime((long)(1000000L * (Time.unscaledTimeAsDouble + 0.011f)));
else else
return currentFrameState.predictedDisplayTime; return currentFrameState.predictedDisplayTime;
} }
/// <summary>
/// Register WaitFrame event
/// </summary>
/// <param name="session"></param>
/// <param name="frameWaitInfo"></param>
/// <param name="frameState"></param>
/// <param name="result"></param>
/// <returns></returns>
public delegate bool DelegateXrWaitFrameInterceptor(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState, ref XrResult result);
/// <summary>
/// Use this to intercept the original function. This will be called before the original function.
/// </summary>
public DelegateXrWaitFrameInterceptor BeforeOriginalWaitFrame;
/// <summary>
/// Use this to intercept the original function. This will be called after the original function.
/// </summary>
public DelegateXrWaitFrameInterceptor AfterOriginalWaitFrame;
#endregion XRWaitFrame #endregion XRWaitFrame
} }
} }

View File

@@ -0,0 +1,329 @@
using AOT;
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
namespace VIVE.OpenXR.Common.RenderThread
{
#region syncObject
public class Message
{
public bool isFree = true;
}
/// <summary>
/// MessagePool class manages a pool of message objects for reuse. You can enter any kind of message object.
/// However when obtain, the message object will not able to cast to the type you want.
/// You should only use one kind of message. Not mix different kind of message.
/// </summary>
public class MessagePool
{
// pool member is used to store message objects in a list.
// Note that the size of this list will dynamically adjust as needed but will not automatically shrink.
private readonly List<Message> pool = new List<Message>(2) { };
private int index = 0;
public MessagePool() { }
// Next method calculates the next index value for cycling through message objects in the pool.
private int Next(int value)
{
if (++value >= pool.Count)
value = 0;
return value;
}
// Obtain method retrieves a message object from the pool.
// Ensure proper state setup for the message after retrieval and call Release() to the message after use.
public T Obtain<T>() where T : Message, new()
{
int c = pool.Count;
int i = index;
for (int j = 0; j < c; i++, j++)
{
if (i >= c)
i = 0;
if (pool[i].isFree)
{
//Debug.LogError("Obtain idx=" + i);
index = i;
return (T)pool[i];
}
}
index = Next(i);
var newItem = new T()
{
isFree = true
};
pool.Insert(index, newItem);
Debug.Log("RT.MessagePool.Obtain() pool count=" + pool.Count);
return newItem;
}
// Lock method marks a message as "in use" to prevent other code from reusing it.
// This is already called to the message obtained from the pool.
public static void Lock(Message msg)
{
msg.isFree = false;
}
/// <summary>
/// Release method marks a message as "free" so that other code can reuse it.
/// You can use it in RenderThread. It will not trigger the GC event.
/// </summary>
/// <param name="msg"></param>
public static void Release(Message msg)
{
msg.isFree = true;
}
}
/// <summary>
/// PreAllocatedQueue class is a message queue based on MessagePool for preallocating message objects.
/// Its main functionality is to add message objects to the queue and retrieve them from the queue.
/// Messages should be enqueued in GameThread and dequeued in RenderThread.
/// In render thread, dequeue will not trigger the GC event. Because the queue is preallocated.
/// The 'lock' expression is not used for list's size change. Because lock should be avoid used in RenderThread.
/// Set the queueSize as the double count of message you want to pass to render thread in one frame, and the
/// list will never change size during runtime. Therefore we don't need to use 'lock' to protect the list.
/// </summary>
public class PreAllocatedQueue : MessagePool
{
// list member is used to store preallocated message objects in a list.
// Note that the size of this list is set during initialization and does not dynamically adjust.
private List<Message> list = new List<Message>();
private int queueBegin = 0;
private int queueEnd = 0;
/// <summary>
/// The queueSize should be the double count of message you want to pass to render thread in one frame.
/// </summary>
/// <param name="queueSize"></param>
public PreAllocatedQueue(int queueSize = 2) : base() {
for (int i = 0; i < queueSize; i++)
{
list.Add(null);
}
}
private int Next(int value)
{
if (++value >= list.Count)
value = 0;
return value;
}
/// <summary>
/// Enqueue method adds a message object to the queue.
/// If the queue is full, the new message is added to the end of the list.
///
/// This function is designed to use the message object obtained from the MessagePool.
/// Ensure only one type of message object is used in the queue.
///
/// Enqueue will increase the queue size if the queue is full. This may trigger GC.Alloc.
/// This function should be used in GameThread.
/// </summary>
/// <param name="msg"></param>
public void Enqueue(Message msg)
{
Lock(msg);
queueEnd = Next(queueEnd);
// If the queue is full, add the message to the end of the list. Should not let it happen.
// Use larger queue size to avoid this issue.
// If you see the error log here, you should increase the queue size in your design.
if (queueEnd == queueBegin)
{
// Should let Insert and queueBegin be atomic. No lock protection here.
list.Insert(queueEnd, msg);
queueBegin++;
Debug.LogError("RT.MessagePool.Enqueue() list count=" + list.Count);
}
else
{
list[queueEnd] = msg;
}
}
/// <summary>
/// Dequeue method retrieves a message object from the queue.
/// This method returns the first message object in the queue and removes it from the queue.
/// This function will not trigger the GC event. Free to use in RenderThread.
/// After use the Message, call Release() to the message.
/// </summary>
/// <returns></returns>
public Message Dequeue()
{
// No lock protection here. If list is not change size, it is safe.
// However if list changed size, it is safe in most case.
queueBegin = Next(queueBegin);
return list[queueBegin];
}
}
/// <summary>
/// RenderThreadTask class is used to execute specified tasks on the rendering thread.
/// You don't need to develop a native function to run your task on the rendering thread.
/// And you don't need to design how to pass data to render thread.
/// This class can be run in Unity Editor since Unity 2021. Test your code in Unity Editor can save your time.
///
/// You should only create RenderThreadTask as static readonly. Do not create RenderThreadTask in dynamic.
///
/// You should not run Unity.Engine code in RenderThread. It will cause the Unity.Engine to hang.
/// Any exception will not be caught and shown in RenderThread.
/// You should print your error message out to clearify your issue.
///
/// The 'lock' expression is not used here. Because I believe the lock is not necessary in this case.
/// And the lock will cause the performance issue. All the design here help you not to use 'lock'.
/// </summary>
public class RenderThreadTask
{
private static IntPtr GetFunctionPointerForDelegate(Delegate del)
{
return System.Runtime.InteropServices.Marshal.GetFunctionPointerForDelegate(del);
}
public delegate void RenderEventDelegate(int e);
private static readonly RenderEventDelegate handle = new RenderEventDelegate(RunSyncObjectInRenderThread);
private static readonly IntPtr handlePtr = GetFunctionPointerForDelegate(handle);
public delegate void Receiver(PreAllocatedQueue dataQueue);
// CommandList is used to store all RenderThreadTask objects.
// Do not create RenderThreadTask object in dynamic. It will cause the CommandList to increase infinitly.
private static List<RenderThreadTask> CommandList = new List<RenderThreadTask>();
private PreAllocatedQueue queue;
public PreAllocatedQueue Queue { get { return queue; } }
private readonly Receiver receiver;
private readonly int id;
/// <summary>
/// Input the receiver as render thread callback. The receiver will be executed in render thread.
/// queueSize should be the double count of message you want to pass to render thread in one frame.
/// </summary>
/// <param name="render">The callback in render thread.</param>
/// <param name="queueSize">If issue this event once in a frame, set queueSize as 2.</param>
/// <exception cref="ArgumentNullException"></exception>
public RenderThreadTask(Receiver render, int queueSize = 2)
{
queue = new PreAllocatedQueue(queueSize);
receiver = render;
if (receiver == null)
throw new ArgumentNullException("receiver should not be null");
CommandList.Add(this);
id = CommandList.IndexOf(this);
}
~RenderThreadTask()
{
try { CommandList.RemoveAt(id); } finally { }
}
void IssuePluginEvent(IntPtr callback, int eventID)
{
// Older version will hang after run script in render thread.
GL.IssuePluginEvent(callback, eventID);
return;
}
void IssuePluginEvent(CommandBuffer cmdBuf, IntPtr callback, int eventID)
{
cmdBuf.IssuePluginEvent(callback, eventID);
return;
}
/// <summary>
/// IssueEvent method submits this task's receiver, which is set in constructor, to be executed on the rendering thread.
/// </summary>
public void IssueEvent()
{
// Let the render thread run the RunSyncObjectInRenderThread(id)
IssuePluginEvent(handlePtr, id);
}
public void IssueInCommandBuffer(CommandBuffer cmdBuf)
{
// Let the render thread run the RunSyncObjectInRenderThread(id)
IssuePluginEvent(cmdBuf, handlePtr, id);
}
// Called by RunSyncObjectInRenderThread()
private void Receive()
{
receiver(queue);
}
// RunSyncObjectInRenderThread method is a static method used to execute a specified task on the rendering thread.
// This method is invoked by Unity's rendering event mechanism and does not need to be called directly by developers.
[MonoPInvokeCallback(typeof(RenderEventDelegate))]
private static void RunSyncObjectInRenderThread(int id)
{
CommandList[id].Receive();
}
}
#endregion
#region sample
// Not to compile this sample into your application. Just for reference. You can run this sample in Unity Editor and it will work.
#if UNITY_EDITOR
public class ViveRenderThreadTaskSample : MonoBehaviour
{
// Create your own message class.
internal class SampleMessage : Message
{
public int dataPassedToRenderThread;
}
// Use static readonly to create RenderThreadTask. Keep internal to avoid miss use by other developers.
internal static readonly RenderThreadTask sampleRenderThreadTask1 = new RenderThreadTask(SampleReceiver1);
// Different task use different RenderThreadTask and different recevier.
internal static readonly RenderThreadTask sampleRenderThreadTask2 = new RenderThreadTask(SampleReceiver2);
private static void SampleReceiver1(PreAllocatedQueue dataQueue)
{
var msg = dataQueue.Dequeue() as SampleMessage;
// no need to check msg if it is null because your design should avoid it.
// Keep data before release. Use local variable to keep data and release msg early. Should not keep the msg instance itself.
var data = msg.dataPassedToRenderThread;
// Make sure release the msg if finished. Other wise the memory will keep increasing when Obtain.
MessagePool.Release(msg);
Debug.Log("Task1, the data passed to render thread: " + data);
}
private static void SampleReceiver2(PreAllocatedQueue dataQueue)
{
var msg = dataQueue.Dequeue() as SampleMessage;
var data = msg.dataPassedToRenderThread;
MessagePool.Release(msg);
Debug.Log("Task2, the data passed to render thread: " + data);
}
// Send a message to the render thread every frame.
private void Update()
{
// Make sure only one kind of message object is used in the queue.
var msg = sampleRenderThreadTask1.Queue.Obtain<SampleMessage>();
msg.dataPassedToRenderThread = 123;
sampleRenderThreadTask1.Queue.Enqueue(msg);
sampleRenderThreadTask1.IssueEvent();
}
// Send a message to render thread when something clicked. Make sure only one click in one frame because the queue size is only two.
public void OnClicked()
{
// Reuse the same message type is ok.
var msg = sampleRenderThreadTask2.Queue.Obtain<SampleMessage>();
msg.dataPassedToRenderThread = 234;
sampleRenderThreadTask2.Queue.Enqueue(msg);
sampleRenderThreadTask2.IssueEvent();
}
}
#endif
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 251b4bedf6420fc4e84be778e501343f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -99,6 +99,12 @@ namespace VIVE.OpenXR.CompositionLayer
[SerializeField] [SerializeField]
public bool isExternalSurface = false; public bool isExternalSurface = false;
[SerializeField]
public bool isCustomRects = false;
[SerializeField]
public CustomRectsType customRects = CustomRectsType.TopDown;
[Tooltip("Width of external surface in pixels.")] [Tooltip("Width of external surface in pixels.")]
[SerializeField] [SerializeField]
public uint externalSurfaceWidth = 1280; public uint externalSurfaceWidth = 1280;
@@ -122,9 +128,13 @@ namespace VIVE.OpenXR.CompositionLayer
[SerializeField] [SerializeField]
public bool isProtectedSurface = false; public bool isProtectedSurface = false;
[SerializeField]
public Texture texture = null; public Texture texture = null;
private Texture m_TextureLeft => texture;
public Texture textureLeft { get { return m_TextureLeft; } }
public Texture textureRight = null;
[SerializeField] [SerializeField]
private uint renderPriority = 0; private uint renderPriority = 0;
public uint GetRenderPriority() { return renderPriority; } public uint GetRenderPriority() { return renderPriority; }
@@ -150,7 +160,7 @@ namespace VIVE.OpenXR.CompositionLayer
private MeshRenderer generatedFallbackMeshRenderer = null; private MeshRenderer generatedFallbackMeshRenderer = null;
private MeshFilter generatedFallbackMeshFilter = null; private MeshFilter generatedFallbackMeshFilter = null;
private LayerTextures layerTextures; private LayerTextures[] layerTextures = new LayerTextures[] {null, null};
private Material texture2DBlitMaterial; private Material texture2DBlitMaterial;
private GameObject compositionLayerPlaceholderPrefabGO = null; private GameObject compositionLayerPlaceholderPrefabGO = null;
@@ -165,10 +175,12 @@ namespace VIVE.OpenXR.CompositionLayer
private float previousCylinderArcLength = 1f; private float previousCylinderArcLength = 1f;
private float previousCylinderRadius = 1f; private float previousCylinderRadius = 1f;
private float previousAngleOfArc = 180f; private float previousAngleOfArc = 180f;
private Texture previousTexture = null; private Texture previousTextureLeft = null;
private Texture previousTextureRight = null;
private bool previousIsDynamicLayer = false; private bool previousIsDynamicLayer = false;
private int layerID; //For native private int layerID; //For native
private int layerIDRight; //For native
private bool isHeadLock = false; private bool isHeadLock = false;
private bool InitStatus = false; private bool InitStatus = false;
private bool isInitializationComplete = false; private bool isInitializationComplete = false;
@@ -232,10 +244,27 @@ namespace VIVE.OpenXR.CompositionLayer
if (layerID != 0) if (layerID != 0)
{ {
DEBUG("Init completed, ID: " + layerID); DEBUG("Init completed, ID: " + layerID);
layerTextures = new LayerTextures(imageCount); layerTextures[0] = new LayerTextures(imageCount);
InitStatus = true; InitStatus = true;
} }
if (textureRight != null && textureLeft != textureRight) {
layerIDRight = compositionLayerFeature.CompositionLayer_Init(externalSurfaceWidth, externalSurfaceHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount, true);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID right: " + layerIDRight);
layerTextures[1] = new LayerTextures(imageCount);
}
}
else if (isCustomRects) {
layerIDRight = compositionLayerFeature.CompositionLayer_Init(externalSurfaceWidth, externalSurfaceHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount, true);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID right: " + layerIDRight);
layerTextures[1] = new LayerTextures(imageCount);
}
}
taskQueue.Release(task); taskQueue.Release(task);
} }
}); });
@@ -243,6 +272,7 @@ namespace VIVE.OpenXR.CompositionLayer
CompositionLayerRenderThreadTask.IssueObtainSwapchainEvent(SetupExternalAndroidSurfaceSyncObjects); CompositionLayerRenderThreadTask.IssueObtainSwapchainEvent(SetupExternalAndroidSurfaceSyncObjects);
texture = new Texture2D((int)externalSurfaceWidth, (int)externalSurfaceHeight, TextureFormat.RGBA32, false, isLinear); texture = new Texture2D((int)externalSurfaceWidth, (int)externalSurfaceHeight, TextureFormat.RGBA32, false, isLinear);
textureRight = new Texture2D((int)externalSurfaceWidth, (int)externalSurfaceHeight, TextureFormat.RGBA32, false, isLinear);
DEBUG("CompositionLayerInit Ext Surf"); DEBUG("CompositionLayerInit Ext Surf");
@@ -250,16 +280,23 @@ namespace VIVE.OpenXR.CompositionLayer
return true; return true;
} }
if (texture == null) if (textureLeft == null)
{ {
ERROR("CompositionLayerInit: Source Texture not found, abort init."); ERROR("CompositionLayerInit: Source Texture not found, abort init.");
return false; return false;
} }
if (textureLeft != null && textureRight == null)
{
DEBUG("CompositionLayerInit: Using Left Texture as Right Texture.");
textureRight = textureLeft;
}
DEBUG("CompositionLayerInit"); DEBUG("CompositionLayerInit");
uint textureWidth = (uint)texture.width; uint textureWidth = (uint)textureLeft.width;
uint textureHeight = (uint)texture.height; uint textureHeight = (uint)textureLeft.height;
DEBUG("Init : textureWidth = " + textureWidth + " textureHeight = " + textureHeight);
CompositionLayerRenderThreadSyncObject ObtainLayerSwapchainSyncObject = new CompositionLayerRenderThreadSyncObject( CompositionLayerRenderThreadSyncObject ObtainLayerSwapchainSyncObject = new CompositionLayerRenderThreadSyncObject(
(taskQueue) => (taskQueue) =>
@@ -293,9 +330,28 @@ namespace VIVE.OpenXR.CompositionLayer
if (layerID != 0) if (layerID != 0)
{ {
DEBUG("Init completed, ID: " + layerID + ", Image Count: " + imageCount); DEBUG("Init completed, ID: " + layerID + ", Image Count: " + imageCount);
layerTextures = new LayerTextures(imageCount); layerTextures[0] = new LayerTextures(imageCount);
InitStatus = true; InitStatus = true;
} }
if (textureRight != null && textureLeft != textureRight) {
layerIDRight = compositionLayerFeature.CompositionLayer_Init(textureWidth, textureHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID Right: " + layerIDRight + ", Image Count: " + imageCount);
layerTextures[1] = new LayerTextures(imageCount);
}
}
else if (isCustomRects)
{
layerIDRight = compositionLayerFeature.CompositionLayer_Init(textureWidth, textureHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID Right: " + layerIDRight + ", Image Count: " + imageCount);
layerTextures[1] = new LayerTextures(imageCount);
}
}
taskQueue.Release(task); taskQueue.Release(task);
} }
@@ -310,18 +366,20 @@ namespace VIVE.OpenXR.CompositionLayer
previousCylinderArcLength = m_CylinderArcLength; previousCylinderArcLength = m_CylinderArcLength;
previousCylinderRadius = m_CylinderRadius; previousCylinderRadius = m_CylinderRadius;
previousAngleOfArc = m_CylinderAngleOfArc; previousAngleOfArc = m_CylinderAngleOfArc;
previousTexture = texture; previousTextureLeft = textureLeft;
previousTextureRight = textureRight;
previousIsDynamicLayer = isDynamicLayer; previousIsDynamicLayer = isDynamicLayer;
return true; return true;
} }
private bool textureAcquired = false; private bool[] textureAcquired = new bool[] {false, false};
private bool textureAcquiredOnce = false; private bool[] textureAcquiredOnce = new bool[] {false, false};
XrOffset2Di offset = new XrOffset2Di(); XrOffset2Di offset = new XrOffset2Di();
XrExtent2Di extent = new XrExtent2Di(); XrExtent2Di extent = new XrExtent2Di();
XrRect2Di rect = new XrRect2Di(); XrRect2Di rect = new XrRect2Di();
private bool SetLayerTexture()
private bool SetLayerTexture(int eyeid)
{ {
if (!isInitializationComplete || !isSynchronized) return false; if (!isInitializationComplete || !isSynchronized) return false;
@@ -332,10 +390,24 @@ namespace VIVE.OpenXR.CompositionLayer
offset.y = (int)externalSurfaceHeight; offset.y = (int)externalSurfaceHeight;
extent.width = (int)externalSurfaceWidth; extent.width = (int)externalSurfaceWidth;
extent.height = (int)-externalSurfaceHeight; extent.height = (int)-externalSurfaceHeight;
if (isCustomRects && customRects == CustomRectsType.TopDown)
{
extent.height = (int)-externalSurfaceHeight/2;
if (eyeid == 0)
offset.y = (int)(externalSurfaceHeight-externalSurfaceHeight/2);
}
else if (isCustomRects && customRects == CustomRectsType.LeftRight)
{
extent.width = (int)externalSurfaceWidth/2;
if (eyeid != 0)
offset.x = extent.width;
}
rect.offset = offset; rect.offset = offset;
rect.extent = extent; rect.extent = extent;
layerTextures.textureLayout = rect; layerTextures[eyeid].textureLayout = rect;
return true; //No need to process texture queues return true; //No need to process texture queues
} }
@@ -346,63 +418,73 @@ namespace VIVE.OpenXR.CompositionLayer
if (TextureParamsChanged()) if (TextureParamsChanged())
{ {
//Destroy queues //Destroy queues
DEBUG("SetLayerTexture: Texture params changed, need to re-init queues. layerID: " + layerID); DEBUG("SetLayerTexture: Texture params changed, need to re-init queues. layerID: " + ((eyeid ==0) ? layerID : layerIDRight));
DestroyCompositionLayer(); if (layerID != 0)
{
DestroyCompositionLayer(0);
layerID = 0;
}
if (layerIDRight != 0)
{
DestroyCompositionLayer(1);
layerIDRight = 0;
}
reinitializationNeeded = true; reinitializationNeeded = true;
return false; return false;
} }
} }
else else
{ {
ERROR("SetLayerTexture: No texture found. layerID: " + layerID); ERROR("SetLayerTexture: No texture found. layerID: " + ((eyeid ==0) ? layerID : layerIDRight));
return false; return false;
} }
if (isDynamicLayer || (!isDynamicLayer && !textureAcquiredOnce)) if (isDynamicLayer || (!isDynamicLayer && !textureAcquiredOnce[eyeid]))
{ {
//Get available texture id //Get available texture id
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>(); compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
uint currentImageIndex; uint currentImageIndex;
IntPtr newTextureID = compositionLayerFeature.CompositionLayer_GetTexture(layerID, out currentImageIndex); IntPtr newTextureID = compositionLayerFeature.CompositionLayer_GetTexture((eyeid ==0) ? layerID : layerIDRight, out currentImageIndex);
textureAcquired = true; textureAcquired[eyeid] = true;
textureAcquiredOnce = true; textureAcquiredOnce[eyeid] = true;
if (newTextureID == IntPtr.Zero) if (newTextureID == IntPtr.Zero)
{ {
ERROR("SetLayerTexture: Invalid Texture ID"); ERROR("SetLayerTexture: Invalid Texture ID");
if (compositionLayerFeature.CompositionLayer_ReleaseTexture(layerID)) if (compositionLayerFeature.CompositionLayer_ReleaseTexture((eyeid ==0) ? layerID : layerIDRight))
{ {
textureAcquired = false; textureAcquired[eyeid] = false;
} }
return false; return false;
} }
bool textureIDUpdated = false; bool textureIDUpdated = false;
layerTextures.currentAvailableTextureIndex = currentImageIndex; layerTextures[eyeid].currentAvailableTextureIndex = currentImageIndex;
IntPtr currentTextureID = layerTextures.GetCurrentAvailableTextureID(); IntPtr currentTextureID = layerTextures[eyeid].GetCurrentAvailableTextureID();
if (currentTextureID == IntPtr.Zero || currentTextureID != newTextureID) if (currentTextureID == IntPtr.Zero || currentTextureID != newTextureID)
{ {
DEBUG("SetLayerTexture: Update Texture ID. layerID: " + layerID); DEBUG("SetLayerTexture: Update Texture ID. layerID: " + ((eyeid ==0) ? layerID : layerIDRight));
layerTextures.SetCurrentAvailableTextureID(newTextureID); layerTextures[eyeid].SetCurrentAvailableTextureID(newTextureID);
textureIDUpdated = true; textureIDUpdated = true;
} }
if (layerTextures.GetCurrentAvailableTextureID() == IntPtr.Zero) if (layerTextures[eyeid].GetCurrentAvailableTextureID() == IntPtr.Zero)
{ {
ERROR("SetLayerTexture: Failed to get texture."); ERROR("SetLayerTexture: Failed to get texture.");
return false; return false;
} }
// Create external texture // Create external texture
if (layerTextures.GetCurrentAvailableExternalTexture() == null || textureIDUpdated) if (layerTextures[eyeid].GetCurrentAvailableExternalTexture() == null || textureIDUpdated)
{ {
DEBUG("SetLayerTexture: Create External Texture."); DEBUG("SetLayerTexture: Create External Texture.");
layerTextures.SetCurrentAvailableExternalTexture(Texture2D.CreateExternalTexture(texture.width, texture.height, TextureFormat.RGBA32, false, isLinear, layerTextures.GetCurrentAvailableTextureID())); layerTextures[eyeid].SetCurrentAvailableExternalTexture(Texture2D.CreateExternalTexture(texture.width, texture.height, TextureFormat.RGBA32, false, isLinear, layerTextures[eyeid].GetCurrentAvailableTextureID()));
} }
if (layerTextures.externalTextures[layerTextures.currentAvailableTextureIndex] == null) if (layerTextures[eyeid].externalTextures[layerTextures[eyeid].currentAvailableTextureIndex] == null)
{ {
ERROR("SetLayerTexture: Create External Texture Failed."); ERROR("SetLayerTexture: Create External Texture Failed.");
return false; return false;
@@ -411,28 +493,40 @@ namespace VIVE.OpenXR.CompositionLayer
//Set Texture Content //Set Texture Content
bool isContentSet = layerTextures.textureContentSet[layerTextures.currentAvailableTextureIndex]; bool isContentSet = layerTextures[eyeid].textureContentSet[layerTextures[eyeid].currentAvailableTextureIndex];
if (!isDynamicLayer && isContentSet) if (!isDynamicLayer && isContentSet)
{ {
return true; return true;
} }
int currentTextureWidth = layerTextures[eyeid].GetCurrentAvailableExternalTexture().width;
int currentTextureWidth = layerTextures.GetCurrentAvailableExternalTexture().width; int currentTextureHeight = layerTextures[eyeid].GetCurrentAvailableExternalTexture().height;
int currentTextureHeight = layerTextures.GetCurrentAvailableExternalTexture().height;
//Set Texture Layout //Set Texture Layout
offset.x = 0; offset.x = 0;
offset.y = 0; offset.y = 0;
extent.width = (int)currentTextureWidth; extent.width = (int)currentTextureWidth;
extent.height = (int)currentTextureHeight; extent.height = (int)currentTextureHeight;
if (isCustomRects && customRects == CustomRectsType.TopDown)
{
extent.height = (int)currentTextureHeight/2;
if (eyeid == 0)
offset.y = extent.height;
}
else if (isCustomRects && customRects == CustomRectsType.LeftRight)
{
extent.width = (int)currentTextureWidth/2;
if (eyeid != 0)
offset.x = extent.width;
}
rect.offset = offset; rect.offset = offset;
rect.extent = extent; rect.extent = extent;
layerTextures[eyeid].textureLayout = rect;
layerTextures.textureLayout = rect; RenderTexture srcTexture = ((eyeid == 0 || isCustomRects) ? textureLeft : textureRight) as RenderTexture;
//Blit and copy texture
RenderTexture srcTexture = texture as RenderTexture;
int msaaSamples = 1; int msaaSamples = 1;
if (srcTexture != null) if (srcTexture != null)
{ {
@@ -441,6 +535,8 @@ namespace VIVE.OpenXR.CompositionLayer
Material currentBlitMat = texture2DBlitMaterial; Material currentBlitMat = texture2DBlitMaterial;
DEBUG("RenderTextureDescriptor currentTextureWidth = " + currentTextureWidth + " currentTextureHeight = " + currentTextureHeight);
RenderTextureDescriptor rtDescriptor = new RenderTextureDescriptor(currentTextureWidth, currentTextureHeight, RenderTextureFormat.ARGB32, 0); RenderTextureDescriptor rtDescriptor = new RenderTextureDescriptor(currentTextureWidth, currentTextureHeight, RenderTextureFormat.ARGB32, 0);
rtDescriptor.msaaSamples = msaaSamples; rtDescriptor.msaaSamples = msaaSamples;
rtDescriptor.autoGenerateMips = false; rtDescriptor.autoGenerateMips = false;
@@ -454,8 +550,10 @@ namespace VIVE.OpenXR.CompositionLayer
} }
blitTempRT.DiscardContents(); blitTempRT.DiscardContents();
Texture dstTexture = layerTextures.GetCurrentAvailableExternalTexture(); Texture dstTexture = layerTextures[eyeid].GetCurrentAvailableExternalTexture();
Graphics.Blit(texture, blitTempRT, currentBlitMat);
Graphics.Blit((eyeid == 0) ? textureLeft : textureRight, blitTempRT, currentBlitMat);
Graphics.CopyTexture(blitTempRT, 0, 0, dstTexture, 0, 0); Graphics.CopyTexture(blitTempRT, 0, 0, dstTexture, 0, 0);
//DEBUG("Blit and CopyTexture complete."); //DEBUG("Blit and CopyTexture complete.");
@@ -470,12 +568,12 @@ namespace VIVE.OpenXR.CompositionLayer
return false; return false;
} }
layerTextures.textureContentSet[layerTextures.currentAvailableTextureIndex] = true; layerTextures[eyeid].textureContentSet[layerTextures[eyeid].currentAvailableTextureIndex] = true;
bool releaseTextureResult = compositionLayerFeature.CompositionLayer_ReleaseTexture(layerID); bool releaseTextureResult = compositionLayerFeature.CompositionLayer_ReleaseTexture((eyeid == 0) ? layerID : layerIDRight);
if (releaseTextureResult) if (releaseTextureResult)
{ {
textureAcquired = false; textureAcquired[eyeid] = false;
} }
return releaseTextureResult; return releaseTextureResult;
@@ -527,7 +625,7 @@ namespace VIVE.OpenXR.CompositionLayer
bool enabledColorScaleBiasInShader = false; bool enabledColorScaleBiasInShader = false;
XrCompositionLayerColorScaleBiasKHR CompositionLayerParamsColorScaleBias = new XrCompositionLayerColorScaleBiasKHR(); XrCompositionLayerColorScaleBiasKHR CompositionLayerParamsColorScaleBias = new XrCompositionLayerColorScaleBiasKHR();
private void SubmitCompositionLayer() //Call at onBeforeRender private void SubmitCompositionLayer(int eyeid, bool botheye) //Call at onBeforeRender
{ {
if (!isInitializationComplete && !isLayerReadyForSubmit) return; if (!isInitializationComplete && !isLayerReadyForSubmit) return;
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>(); compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
@@ -570,7 +668,7 @@ namespace VIVE.OpenXR.CompositionLayer
CompositionLayerParamsColorScaleBias.colorBias.a = 0.0f; CompositionLayerParamsColorScaleBias.colorBias.a = 0.0f;
} }
compositionLayerColorScaleBias.Submit_CompositionLayerColorBias(CompositionLayerParamsColorScaleBias, layerID); compositionLayerColorScaleBias.Submit_CompositionLayerColorBias(CompositionLayerParamsColorScaleBias, (eyeid == 0) ? layerID : layerIDRight);
} }
else if (enabledColorScaleBiasInShader) //Disable if color scale bias is no longer active else if (enabledColorScaleBiasInShader) //Disable if color scale bias is no longer active
{ {
@@ -583,13 +681,13 @@ namespace VIVE.OpenXR.CompositionLayer
{ {
default: default:
case LayerShape.Quad: case LayerShape.Quad:
compositionLayerFeature.Submit_CompositionLayerQuad(AssignCompositionLayerParamsQuad(), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, layerID); compositionLayerFeature.Submit_CompositionLayerQuad(AssignCompositionLayerParamsQuad(eyeid, botheye), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, (eyeid == 0) ? layerID : layerIDRight);
break; break;
case LayerShape.Cylinder: case LayerShape.Cylinder:
ViveCompositionLayerCylinder compositionLayerCylinderFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayerCylinder>(); ViveCompositionLayerCylinder compositionLayerCylinderFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayerCylinder>();
if (compositionLayerCylinderFeature != null && compositionLayerCylinderFeature.CylinderExtensionEnabled) if (compositionLayerCylinderFeature != null && compositionLayerCylinderFeature.CylinderExtensionEnabled)
{ {
compositionLayerCylinderFeature.Submit_CompositionLayerCylinder(AssignCompositionLayerParamsCylinder(), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, layerID); compositionLayerCylinderFeature.Submit_CompositionLayerCylinder(AssignCompositionLayerParamsCylinder(eyeid, botheye), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, (eyeid == 0) ? layerID : layerIDRight);
} }
break; break;
} }
@@ -601,22 +699,22 @@ namespace VIVE.OpenXR.CompositionLayer
public delegate void OnDestroyCompositionLayer(); public delegate void OnDestroyCompositionLayer();
public event OnDestroyCompositionLayer OnDestroyCompositionLayerDelegate = null; public event OnDestroyCompositionLayer OnDestroyCompositionLayerDelegate = null;
private void DestroyCompositionLayer() private void DestroyCompositionLayer(int eyeid)
{ {
if (!isInitializationComplete || layerTextures == null) if (layerTextures[eyeid] == null)
{ {
DEBUG("DestroyCompositionLayer: Layer already destroyed/not initialized."); DEBUG("DestroyCompositionLayer: Layer already destroyed/not initialized.");
return; return;
} }
DEBUG("DestroyCompositionLayer");
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>(); compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
if (textureAcquired) if (textureAcquired[eyeid])
{ {
DEBUG("DestroyCompositionLayer: textureAcquired, releasing."); DEBUG("DestroyCompositionLayer: textureAcquired, releasing.");
textureAcquired = !compositionLayerFeature.CompositionLayer_ReleaseTexture(layerID); textureAcquired[eyeid] = !compositionLayerFeature.CompositionLayer_ReleaseTexture((eyeid == 0) ? layerID : layerIDRight);
} }
CompositionLayerRenderThreadSyncObject DestroyLayerSwapchainSyncObject = new CompositionLayerRenderThreadSyncObject( CompositionLayerRenderThreadSyncObject DestroyLayerSwapchainSyncObject = new CompositionLayerRenderThreadSyncObject(
@@ -631,26 +729,26 @@ namespace VIVE.OpenXR.CompositionLayer
if (!compositionLayerFeature.CompositionLayer_Destroy(task.layerID)) if (!compositionLayerFeature.CompositionLayer_Destroy(task.layerID))
{ {
ERROR("estroyCompositionLayer: CompositionLayer_Destroy failed."); ERROR("DestroyCompositionLayer: CompositionLayer_Destroy failed : " + task.layerID);
} }
taskQueue.Release(task); taskQueue.Release(task);
} }
}); });
CompositionLayerRenderThreadTask.IssueDestroySwapchainEvent(DestroyLayerSwapchainSyncObject, layerID); CompositionLayerRenderThreadTask.IssueDestroySwapchainEvent(DestroyLayerSwapchainSyncObject, (eyeid == 0) ? layerID : layerIDRight);
InitStatus = false; InitStatus = false;
isLayerReadyForSubmit = false; isLayerReadyForSubmit = false;
isInitializationComplete = false; isInitializationComplete = false;
textureAcquiredOnce = false; textureAcquiredOnce[eyeid] = false;
foreach (Texture externalTexture in layerTextures.externalTextures) foreach (Texture externalTexture in layerTextures[eyeid].externalTextures)
{ {
DEBUG("DestroyCompositionLayer: External textures"); DEBUG("DestroyCompositionLayer: External textures");
if (externalTexture != null) Destroy(externalTexture); if (externalTexture != null) Destroy(externalTexture);
} }
layerTextures = null; layerTextures[eyeid] = null;
if (generatedFallbackMeshFilter != null && generatedFallbackMeshFilter.mesh != null) if (generatedFallbackMeshFilter != null && generatedFallbackMeshFilter.mesh != null)
{ {
@@ -696,7 +794,7 @@ namespace VIVE.OpenXR.CompositionLayer
private List<XRInputSubsystem> inputSubsystems = new List<XRInputSubsystem>(); private List<XRInputSubsystem> inputSubsystems = new List<XRInputSubsystem>();
XrCompositionLayerQuad CompositionLayerParamsQuad = new XrCompositionLayerQuad(); XrCompositionLayerQuad CompositionLayerParamsQuad = new XrCompositionLayerQuad();
XrExtent2Df quadSize = new XrExtent2Df(); XrExtent2Df quadSize = new XrExtent2Df();
private XrCompositionLayerQuad AssignCompositionLayerParamsQuad() private XrCompositionLayerQuad AssignCompositionLayerParamsQuad(int eyeid, bool botheye)
{ {
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>(); compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
@@ -722,7 +820,14 @@ namespace VIVE.OpenXR.CompositionLayer
break; break;
} }
CompositionLayerParamsQuad.subImage.imageRect = layerTextures.textureLayout; if (!botheye) {
if (eyeid == 0)
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_LEFT;
else
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_RIGHT;
}
CompositionLayerParamsQuad.subImage.imageRect = layerTextures[eyeid].textureLayout;
CompositionLayerParamsQuad.subImage.imageArrayIndex = 0; CompositionLayerParamsQuad.subImage.imageArrayIndex = 0;
GetCompositionLayerPose(ref CompositionLayerParamsQuad.pose); //Update isHeadLock GetCompositionLayerPose(ref CompositionLayerParamsQuad.pose); //Update isHeadLock
@@ -762,13 +867,14 @@ namespace VIVE.OpenXR.CompositionLayer
quadSize.width = m_QuadWidth; quadSize.width = m_QuadWidth;
quadSize.height = m_QuadHeight; quadSize.height = m_QuadHeight;
CompositionLayerParamsQuad.size = quadSize; CompositionLayerParamsQuad.size = quadSize;
return CompositionLayerParamsQuad; return CompositionLayerParamsQuad;
} }
XrCompositionLayerCylinderKHR CompositionLayerParamsCylinder = new XrCompositionLayerCylinderKHR(); XrCompositionLayerCylinderKHR CompositionLayerParamsCylinder = new XrCompositionLayerCylinderKHR();
private XrCompositionLayerCylinderKHR AssignCompositionLayerParamsCylinder() private XrCompositionLayerCylinderKHR AssignCompositionLayerParamsCylinder(int eyeid, bool botheye)
{ {
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>(); compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
@@ -830,7 +936,14 @@ namespace VIVE.OpenXR.CompositionLayer
break; break;
} }
CompositionLayerParamsCylinder.subImage.imageRect = layerTextures.textureLayout; if (!botheye) {
if (eyeid == 0)
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_LEFT;
else
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_RIGHT;
}
CompositionLayerParamsCylinder.subImage.imageRect = layerTextures[eyeid].textureLayout;
CompositionLayerParamsCylinder.subImage.imageArrayIndex = 0; CompositionLayerParamsCylinder.subImage.imageArrayIndex = 0;
GetCompositionLayerPose(ref CompositionLayerParamsCylinder.pose); GetCompositionLayerPose(ref CompositionLayerParamsCylinder.pose);
CompositionLayerParamsCylinder.radius = m_CylinderRadius; CompositionLayerParamsCylinder.radius = m_CylinderRadius;
@@ -959,9 +1072,21 @@ namespace VIVE.OpenXR.CompositionLayer
} }
public void TerminateLayer() public void TerminateLayer()
{
if (layerID != 0)
{ {
DEBUG("TerminateLayer: layerID: " + layerID); DEBUG("TerminateLayer: layerID: " + layerID);
DestroyCompositionLayer(); DestroyCompositionLayer(0);
layerID = 0;
}
if (layerIDRight != 0)
{
DEBUG("TerminateLayer: layerIDRight: " + layerIDRight);
DestroyCompositionLayer(1);
layerIDRight = 0;
}
if (placeholderGenerated && compositionLayerPlaceholderPrefabGO != null) if (placeholderGenerated && compositionLayerPlaceholderPrefabGO != null)
{ {
@@ -977,9 +1102,10 @@ namespace VIVE.OpenXR.CompositionLayer
public bool TextureParamsChanged() public bool TextureParamsChanged()
{ {
if (previousTexture != texture) if (previousTextureLeft != textureLeft || previousTextureRight != textureRight)
{ {
previousTexture = texture; previousTextureLeft = textureLeft;
previousTextureRight = textureRight;
return true; return true;
} }
@@ -1395,7 +1521,14 @@ namespace VIVE.OpenXR.CompositionLayer
return; return;
} }
if (SetLayerTexture()) bool isBotheye = (textureRight == null || textureLeft == textureRight);
if (isCustomRects)
{
isBotheye = false;
}
if (SetLayerTexture(0))
{ {
isLayerReadyForSubmit = true; isLayerReadyForSubmit = true;
} }
@@ -1405,7 +1538,22 @@ namespace VIVE.OpenXR.CompositionLayer
DEBUG("Composition Layer Lifecycle OnBeforeRender: Layer not ready for submit."); DEBUG("Composition Layer Lifecycle OnBeforeRender: Layer not ready for submit.");
return; return;
} }
SubmitCompositionLayer();
if (!isBotheye) {
if (SetLayerTexture(1))
{
isLayerReadyForSubmit = true;
}
if (!isLayerReadyForSubmit)
{
DEBUG("Composition Layer Lifecycle OnBeforeRender: Layer not ready for submit.");
return;
}
}
SubmitCompositionLayer(0, isBotheye);
if (!isBotheye)
SubmitCompositionLayer(1, isBotheye);
isLayerReadyForSubmit = false; //reset flag after submit isLayerReadyForSubmit = false; //reset flag after submit
} }
@@ -1635,6 +1783,12 @@ namespace VIVE.OpenXR.CompositionLayer
Right = 2, Right = 2,
} }
public enum CustomRectsType
{
LeftRight = 1,
TopDown = 2,
}
#if UNITY_EDITOR #if UNITY_EDITOR
public enum CylinderLayerParamAdjustmentMode public enum CylinderLayerParamAdjustmentMode
{ {
@@ -1950,6 +2104,7 @@ namespace VIVE.OpenXR.CompositionLayer
return radius; return radius;
} }
} }
#endregion #endregion
} }
} }

View File

@@ -11,6 +11,7 @@ using VIVE.OpenXR.CompositionLayer.Passthrough;
namespace VIVE.OpenXR.CompositionLayer.Passthrough namespace VIVE.OpenXR.CompositionLayer.Passthrough
{ {
[Obsolete("This class is deprecated. Please use PassthroughAPI instead.")]
public static class CompositionLayerPassthroughAPI public static class CompositionLayerPassthroughAPI
{ {
const string LOG_TAG = "CompositionLayerPassthroughAPI"; const string LOG_TAG = "CompositionLayerPassthroughAPI";
@@ -79,7 +80,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe. new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe.
XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PLANAR_HTC XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PLANAR_HTC
); );
XrResult res = XR_HTC_passthrough.xrCreatePassthroughHTC(createInfo, out passthrough); XrResult res = passthroughFeature.CreatePassthroughHTC(createInfo, out passthrough);
if(res == XrResult.XR_SUCCESS) if(res == XrResult.XR_SUCCESS)
{ {
ulong passthrough_ulong = passthrough; ulong passthrough_ulong = passthrough;
@@ -192,7 +193,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe. new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe.
XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PROJECTED_HTC XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PROJECTED_HTC
); );
XrResult res = XR_HTC_passthrough.xrCreatePassthroughHTC(createInfo, out passthrough); XrResult res = passthroughFeature.CreatePassthroughHTC(createInfo, out passthrough);
if (res == XrResult.XR_SUCCESS) if (res == XrResult.XR_SUCCESS)
{ {
ulong passthrough_ulong = passthrough; ulong passthrough_ulong = passthrough;
@@ -301,7 +302,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe. new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe.
XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PROJECTED_HTC XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PROJECTED_HTC
); );
XrResult res = XR_HTC_passthrough.xrCreatePassthroughHTC(createInfo, out passthrough); XrResult res = passthroughFeature.CreatePassthroughHTC(createInfo, out passthrough);
if (res == XrResult.XR_SUCCESS) if (res == XrResult.XR_SUCCESS)
{ {
ulong passthrough_ulong = passthrough; ulong passthrough_ulong = passthrough;
@@ -400,7 +401,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
} }
#if UNITY_STANDALONE #if UNITY_STANDALONE
XrPassthroughHTC passthrough = passthrough2Layer[passthroughID].passthrough; XrPassthroughHTC passthrough = passthrough2Layer[passthroughID].passthrough;
XR_HTC_passthrough.xrDestroyPassthroughHTC(passthrough); passthroughFeature.DestroyPassthroughHTC(passthrough);
passthrough2IsUnderLay.Remove(passthroughID); passthrough2IsUnderLay.Remove(passthroughID);
SubmitLayer(); SubmitLayer();
passthrough2Layer.Remove(passthroughID); passthrough2Layer.Remove(passthroughID);

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: 4e5bee8db40a5a941a38710195e3219e guid: a6509bdf37b3b364eb80cb0df68435a3
folderAsset: yes folderAsset: yes
DefaultImporter: DefaultImporter:
externalObjects: {} externalObjects: {}

View File

@@ -1,5 +1,5 @@
fileFormatVersion: 2 fileFormatVersion: 2
guid: ca57a546da07d9146aa710d82ec06e64 guid: 5e0cbfbe15682c542acc5675d4503f72
folderAsset: yes folderAsset: yes
DefaultImporter: DefaultImporter:
externalObjects: {} externalObjects: {}

View File

@@ -0,0 +1,46 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
using UnityEngine;
using VIVE.OpenXR.CompositionLayer;
namespace VIVE.OpenXR.Editor.CompositionLayer
{
[CustomEditor(typeof(ViveCompositionLayerExtraSettings))]
internal class ViveCompositionLayerEditorExtraSettings : UnityEditor.Editor
{
//private SerializedProperty SettingsEditorEnableSharpening;
static string PropertyName_SharpeningEnable = "SettingsEditorEnableSharpening";
static GUIContent Label_SharpeningEnable = new GUIContent("Enable Sharpening", "Enable Sharpening.");
SerializedProperty Property_SharpeningEnable;
static string PropertyName_SharpeningLevel = "SettingsEditorSharpeningLevel";
static GUIContent Label_SharpeningLevel = new GUIContent("Sharpening Level", "Select Sharpening Level.");
SerializedProperty Property_SharpeningLevel;
static string PropertyName_SharpeningMode = "SettingsEditorSharpeningMode";
static GUIContent Label_SharpeningMode = new GUIContent("Sharpening Mode", "Select Sharpening Mode.");
SerializedProperty Property_SharpeningMode;
void OnEnable()
{
Property_SharpeningEnable = serializedObject.FindProperty(PropertyName_SharpeningEnable);
Property_SharpeningMode = serializedObject.FindProperty(PropertyName_SharpeningMode);
Property_SharpeningLevel = serializedObject.FindProperty(PropertyName_SharpeningLevel);
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(Property_SharpeningEnable, new GUIContent(Label_SharpeningEnable));
EditorGUILayout.PropertyField(Property_SharpeningMode, new GUIContent(Label_SharpeningMode));
EditorGUILayout.PropertyField(Property_SharpeningLevel, new GUIContent(Label_SharpeningLevel));
serializedObject.ApplyModifiedProperties();
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a3dfbc6bb6d75454db700d2326157424
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 050772d662d04514ca3bb28fbe82ecd7
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,30 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
using VIVE.OpenXR.FrameSynchronization;
namespace VIVE.OpenXR.Editor.FrameSynchronization
{
[CustomEditor(typeof(ViveFrameSynchronization))]
public class ViveFrameSynchronizationEditor : UnityEditor.Editor
{
SerializedProperty m_SynchronizationMode;
private void OnEnable()
{
m_SynchronizationMode = serializedObject.FindProperty("m_SynchronizationMode");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(m_SynchronizationMode);
serializedObject.ApplyModifiedProperties();
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d25b2e9fff2d6724b865e0fbd609da9d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a8bd17374612cce468393aa1acc9fa89
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,184 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine;
using VIVE.OpenXR.Interaction;
#if UNITY_EDITOR
using UnityEditor;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
namespace VIVE.OpenXR.Editor.Interaction
{
[CustomEditor(typeof(ViveInteractions))]
public class ViveInteractionsEditor : UnityEditor.Editor
{
SerializedProperty m_ViveHandInteraction, m_ViveWristTracker, m_ViveXRTracker;
#if UNITY_ANDROID
SerializedProperty m_KHRHandInteraction;
#endif
private void OnEnable()
{
m_ViveHandInteraction = serializedObject.FindProperty("m_ViveHandInteraction");
m_ViveWristTracker = serializedObject.FindProperty("m_ViveWristTracker");
m_ViveXRTracker = serializedObject.FindProperty("m_ViveXRTracker");
#if UNITY_ANDROID
m_KHRHandInteraction = serializedObject.FindProperty("m_KHRHandInteraction");
#endif
}
public override void OnInspectorGUI()
{
serializedObject.Update();
#region GUI
GUIStyle boxStyleInfo = new GUIStyle(EditorStyles.helpBox);
boxStyleInfo.fontSize = 12;
boxStyleInfo.wordWrap = true;
GUIStyle boxStyleWarning = new GUIStyle(EditorStyles.helpBox);
boxStyleWarning.fontSize = 12;
boxStyleWarning.fontStyle = FontStyle.Bold;
boxStyleInfo.wordWrap = true;
// ViveHandInteraction
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The VIVE Hand Interaction feature enables hand selection and squeezing functions of XR_HTC_hand_interaction extension.\n" +
"Please note that enabling this feature impacts runtime performance.",
boxStyleInfo);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_ViveHandInteraction);
// ViveWristTracker
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The VIVE Wrist Tracker feature enables wrist tracker pose and button functions of XR_HTC_vive_wrist_tracker_interaction extension.\n" +
"Please note that enabling this feature impacts runtime performance.",
boxStyleInfo);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_ViveWristTracker);
// ViveXrTracker
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The VIVE XR Tracker feature enables ultimate tracker pose and button functions.\n" +
"WARNING:\n" +
"Please be aware that enabling this feature significantly affects runtime performance.",
boxStyleWarning);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_ViveXRTracker);
#if UNITY_ANDROID
// ViveHandInteractionExt
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The KHR Hand Interaction feature enables hand functions of XR_EXT_hand_interaction extension.\n" +
"Please note that enabling this feature impacts runtime performance.",
boxStyleInfo);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_KHRHandInteraction);
#endif
#endregion
ViveInteractions myScript = target as ViveInteractions;
if (myScript.enabled)
{
bool viveHandInteraction = myScript.UseViveHandInteraction();
bool viveWristTracker = myScript.UseViveWristTracker();
bool viveXrTracker = myScript.UseViveXrTracker();
bool khrHandInteraction = myScript.UseKhrHandInteraction();
OpenXRSettings settings = null;
#if UNITY_ANDROID
settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
#elif UNITY_STANDALONE
settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Standalone);
#endif
if (settings != null)
{
bool addPathEnumeration = false;
foreach (var feature in settings.GetFeatures<OpenXRInteractionFeature>())
{
if (feature is Hand.ViveHandInteraction) { feature.enabled = viveHandInteraction; }
if (feature is Tracker.ViveWristTracker) { feature.enabled = viveWristTracker; }
if (feature is Tracker.ViveXRTracker)
{
feature.enabled = viveXrTracker;
addPathEnumeration = viveXrTracker;
}
if (feature is Hand.ViveHandInteractionExt) { feature.enabled = khrHandInteraction; }
}
foreach (var feature in settings.GetFeatures<OpenXRFeature>())
{
if (addPathEnumeration && feature is VivePathEnumeration) { feature.enabled = true; }
}
}
}
serializedObject.ApplyModifiedProperties();
}
}
/*public class ViveInteractionsBuildHook : OpenXRFeatureBuildHooks
{
public override int callbackOrder => 1;
public override Type featureType => typeof(VIVEFocus3Feature);
protected override void OnPostGenerateGradleAndroidProjectExt(string path)
{
}
protected override void OnPostprocessBuildExt(BuildReport report)
{
}
protected override void OnPreprocessBuildExt(BuildReport report)
{
var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
if (settings != null)
{
foreach (var feature in settings.GetFeatures<OpenXRFeature>())
{
if (feature is ViveInteractions && feature.enabled)
{
bool viveHandInteraction= ((ViveInteractions)feature).UseViveHandInteraction();
bool viveWristTracker = ((ViveInteractions)feature).UseViveWristTracker();
bool viveXrTracker = ((ViveInteractions)feature).UseViveXrTracker();
bool khrHandInteraction = ((ViveInteractions)feature).UseKhrHandInteraction();
Debug.LogFormat($"ViveInteractionsBuildHook() viveHandInteraction: {viveHandInteraction}, viveWristTracker: {viveWristTracker}, viveXrTracker: {viveXrTracker}, khrHandInteraction: {khrHandInteraction}");
EnableInteraction(viveHandInteraction, viveWristTracker, viveXrTracker, khrHandInteraction);
break;
}
}
}
}
private static void EnableInteraction(
bool viveHandInteraction = false,
bool viveWristTracker = false,
bool viveXrTracker = false,
bool khrHandInteraction = false)
{
var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
if (settings == null) { return; }
foreach (var feature in settings.GetFeatures<OpenXRInteractionFeature>())
{
if (feature is Hand.ViveHandInteraction) { feature.enabled = viveHandInteraction; Debug.LogFormat($"EnableInteraction() ViveHandInteraction: {feature.enabled}"); }
if (feature is Tracker.ViveWristTracker) { feature.enabled = viveWristTracker; Debug.LogFormat($"EnableInteraction() ViveWristTracker: {feature.enabled}"); }
if (feature is Tracker.ViveXRTracker) { feature.enabled = viveXrTracker; Debug.LogFormat($"EnableInteraction() ViveXRTracker: {feature.enabled}"); }
if (feature is Hand.ViveHandInteractionExt) { feature.enabled = khrHandInteraction; Debug.LogFormat($"EnableInteraction() ViveHandInteractionExt: {feature.enabled}"); }
}
}
}*/
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c7e32703a3206194580e534565abcf91
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,44 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
namespace VIVE.OpenXR.Editor
{
[CustomEditor(typeof(VIVERig))]
public class VIVERigEditor : UnityEditor.Editor
{
SerializedProperty m_TrackingOrigin, m_CameraOffset, m_CameraHeight, m_ActionAsset;
private void OnEnable()
{
m_TrackingOrigin = serializedObject.FindProperty("m_TrackingOrigin");
m_CameraOffset = serializedObject.FindProperty("m_CameraOffset");
m_CameraHeight = serializedObject.FindProperty("m_CameraHeight");
m_ActionAsset = serializedObject.FindProperty("m_ActionAsset");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
VIVERig myScript = target as VIVERig;
EditorGUILayout.PropertyField(m_TrackingOrigin);
EditorGUILayout.PropertyField(m_CameraOffset);
EditorGUILayout.HelpBox(
"Set the height of camera when the Tracking Origin is Device.",
MessageType.Info);
EditorGUILayout.PropertyField(m_CameraHeight);
#if ENABLE_INPUT_SYSTEM
EditorGUILayout.PropertyField(m_ActionAsset);
#endif
serializedObject.ApplyModifiedProperties();
if (UnityEngine.GUI.changed)
EditorUtility.SetDirty((VIVERig)target);
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4766014dc7f94c8468710cc3fd265f90
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,180 @@
# XR_HTC_anchor XR_HTC_anchor_persistence
## Name String
XR_htc_anchor XR_HTC_anchor_persistence
## Revision
1
## Overview
This document provides an overview of how to use the AnchorManager to manage anchors in an OpenXR application, specifically using the XR_HTC_anchor and XR_HTC_anchor_persistence extensions.
Introduction
Anchors in OpenXR allow applications to track specific points in space over time. The XR_HTC_anchor extension provides the basic functionality for creating and managing anchors, while the XR_HTC_anchor_persistence extension allows anchors to be persisted across sessions. The AnchorManager class simplifies the use of these extensions by providing high-level methods for common operations.
Checking Extension Support
Before using any anchor-related functions, it's important to check if the extensions are supported on the current system.
```csharp
bool isAnchorSupported = AnchorManager.IsSupported();
bool isPersistedAnchorSupported = AnchorManager.IsPersistedAnchorSupported();
```
## Creating and Managing Anchors
### Creating an Anchor
To create a new anchor, use the CreateAnchor method. This method requires a Pose representing the anchor's position and orientation relative to the tracking space, and a name for the anchor.
```csharp
Pose anchorPose = new Pose(new Vector3(0, 0, 0), Quaternion.identity);
AnchorManager.Anchor newAnchor = AnchorManager.CreateAnchor(anchorPose, "MyAnchor");
```
### Getting an Anchor's Name
To retrieve the name of an existing anchor, use the GetSpatialAnchorName method.
```csharp
string anchorName;
bool success = AnchorManager.GetSpatialAnchorName(newAnchor, out anchorName);
if (success) {
Debug.Log("Anchor name: " + anchorName);
}
```
### Tracking Space and Pose
To get the current tracking space, use the GetTrackingSpace method. To retrieve the pose of an anchor relative to the current tracking space, use the GetTrackingSpacePose method.
```csharp
XrSpace trackingSpace = AnchorManager.GetTrackingSpace();
Pose anchorPose;
bool poseValid = AnchorManager.GetTrackingSpacePose(newAnchor, out anchorPose);
if (poseValid) {
Debug.Log("Anchor pose: " + anchorPose.position + ", " + anchorPose.rotation);
}
```
## Persisting Anchors
### Creating a Persisted Anchor Collection
To enable anchor persistence, create a persisted anchor collection using the CreatePersistedAnchorCollection method.
```csharp
Task createCollectionTask = AnchorManager.CreatePersistedAnchorCollection();
createCollectionTask.Wait();
```
### Persisting an Anchor
To persist an anchor, use the PersistAnchor method with the anchor and a unique name for the persisted anchor.
```csharp
string persistedAnchorName = "MyPersistedAnchor";
XrResult result = AnchorManager.PersistAnchor(newAnchor, persistedAnchorName);
if (result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor persisted successfully.");
}
```
### Unpersisting an Anchor
To remove a persisted anchor, use the UnpersistAnchor method with the name of the persisted anchor.
```csharp
XrResult result = AnchorManager.UnpersistAnchor(persistedAnchorName);
if (result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor unpersisted successfully.");
}
```
### Enumerating Persisted Anchors
To get a list of all persisted anchors, use the EnumeratePersistedAnchorNames method.
```csharp
string[] persistedAnchorNames;
XrResult result = AnchorManager.EnumeratePersistedAnchorNames(out persistedAnchorNames);
if (result == XrResult.XR_SUCCESS) {
foreach (var name in persistedAnchorNames) {
Debug.Log("Persisted anchor: " + name);
}
}
```
### Creating an Anchor from a Persisted Anchor
To create an anchor from a persisted anchor, use the CreateSpatialAnchorFromPersistedAnchor method.
```csharp
AnchorManager.Anchor trackableAnchor;
XrResult result = AnchorManager.CreateSpatialAnchorFromPersistedAnchor(persistedAnchorName, "NewAnchor", out trackableAnchor);
if (result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor created from persisted anchor.");
}
```
## Exporting and Importing Persisted Anchors
### Exporting a Persisted Anchor
To export a persisted anchor to a buffer, use the ExportPersistedAnchor method.
```csharp
Task<(XrResult, string, byte[])> exportTask = AnchorManager.ExportPersistedAnchor(persistedAnchorName);
exportTask.Wait();
var (exportResult, exportName, buffer) = exportTask.Result;
if (exportResult == XrResult.XR_SUCCESS) {
// Save buffer to a file or use as needed
File.WriteAllBytes("anchor.pa", buffer);
}
```
### Importing a Persisted Anchor
To import a persisted anchor from a buffer, use the ImportPersistedAnchor method.
```csharp
byte[] buffer = File.ReadAllBytes("anchor.pa");
Task<XrResult> importTask = AnchorManager.ImportPersistedAnchor(buffer);
importTask.Wait();
if (importTask.Result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor imported successfully.");
}
```
### Clearing Persisted Anchors
To clear all persisted anchors, use the ClearPersistedAnchors method.
```csharp
XrResult result = AnchorManager.ClearPersistedAnchors();
if (result == XrResult.XR_SUCCESS) {
Debug.Log("All persisted anchors cleared.");
}
```
## Conclusion
The AnchorManager class simplifies the management of anchors in OpenXR applications. By using the methods provided, you can easily create, persist, and manage anchors, ensuring that spatial data can be maintained across sessions. This document covers the basic operations; for more advanced usage, refer to the OpenXR specification and the implementation details of the AnchorManager class.

View File

@@ -1,4 +1,4 @@
// Copyright HTC Corporation All Rights Reserved. // Copyright HTC Corporation All Rights Reserved.
// Remove FAKE_DATA if editor or windows is supported. // Remove FAKE_DATA if editor or windows is supported.
#if UNITY_EDITOR #if UNITY_EDITOR
@@ -10,39 +10,56 @@ using System.Runtime.InteropServices;
using UnityEngine; using UnityEngine;
using UnityEngine.XR.OpenXR; using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features; using UnityEngine.XR.OpenXR.Features;
using VIVE.OpenXR.Feature;
#if UNITY_EDITOR #if UNITY_EDITOR
using UnityEditor; using UnityEditor;
using UnityEditor.XR.OpenXR.Features; using UnityEditor.XR.OpenXR.Features;
#endif #endif
namespace VIVE.OpenXR.Anchor namespace VIVE.OpenXR.Feature
{ {
using XrPersistedAnchorCollectionHTC = System.IntPtr;
#if UNITY_EDITOR #if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Anchor", [OpenXRFeature(UiName = "VIVE XR Anchor (Beta)",
Desc = "VIVE's implementaion of the XR_HTC_anchor.", Desc = "VIVE's implementaion of the XR_HTC_anchor.",
Company = "HTC", Company = "HTC",
DocumentationLink = "..\\Documentation", DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString, OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0", Version = "1.0.0",
BuildTargetGroups = new[] { BuildTargetGroup.Android }, BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
FeatureId = featureId FeatureId = featureId
)] )]
#endif #endif
public class ViveAnchor : OpenXRFeature public class ViveAnchor : OpenXRFeature
{ {
public const string kOpenxrExtensionString = "XR_HTC_anchor"; public const string kOpenxrExtensionString = "XR_HTC_anchor XR_EXT_future XR_HTC_anchor_persistence";
/// <summary> /// <summary>
/// The feature id string. This is used to give the feature a well known id for reference. /// The feature id string. This is used to give the feature a well known id for reference.
/// </summary> /// </summary>
public const string featureId = "vive.wave.openxr.feature.htcanchor"; public const string featureId = "vive.openxr.feature.htcanchor";
/// <summary>
/// Enable or disable the persisted anchor feature. Set it only valid in feature settings.
/// </summary>
public bool enablePersistedAnchor = true;
private XrInstance m_XrInstance = 0; private XrInstance m_XrInstance = 0;
private XrSession session = 0; private XrSession session = 0;
private XrSystemId m_XrSystemId = 0; private XrSystemId m_XrSystemId = 0;
private bool IsInited = false;
private bool IsPAInited = false;
private bool useFakeData = false;
#region struct, enum, const of this extensions #region struct, enum, const of this extensions
/// <summary>
/// An application can inspect whether the system is capable of anchor functionality by
/// chaining an XrSystemAnchorPropertiesHTC structure to the XrSystemProperties when calling
/// xrGetSystemProperties.The runtime must return XR_ERROR_FEATURE_UNSUPPORTED if
/// XrSystemAnchorPropertiesHTC::supportsAnchor was XR_FALSE.
/// supportsAnchor indicates if current system is capable of anchor functionality.
/// </summary>
public struct XrSystemAnchorPropertiesHTC public struct XrSystemAnchorPropertiesHTC
{ {
public XrStructureType type; public XrStructureType type;
@@ -50,11 +67,35 @@ namespace VIVE.OpenXR.Anchor
public XrBool32 supportsAnchor; public XrBool32 supportsAnchor;
} }
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)] /// <summary>
/// name is a null-terminated UTF-8 string whose length is less than or equal to XR_MAX_SPATIAL_ANCHOR_NAME_SIZE_HTC.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSpatialAnchorNameHTC public struct XrSpatialAnchorNameHTC
{ {
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 256)] [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)]
public string name; public byte[] name;
public XrSpatialAnchorNameHTC(string anchorName)
{
name = new byte[256];
byte[] utf8Bytes = System.Text.Encoding.UTF8.GetBytes(anchorName);
Array.Copy(utf8Bytes, name, Math.Min(utf8Bytes.Length, 255));
name[255] = 0;
}
public XrSpatialAnchorNameHTC(XrSpatialAnchorNameHTC anchorName)
{
name = new byte[256];
Array.Copy(anchorName.name, name, 256);
name[255] = 0;
}
public override readonly string ToString() {
if (name == null)
return string.Empty;
return System.Text.Encoding.UTF8.GetString(name).TrimEnd('\0');
}
} }
public struct XrSpatialAnchorCreateInfoHTC public struct XrSpatialAnchorCreateInfoHTC
@@ -66,70 +107,191 @@ namespace VIVE.OpenXR.Anchor
public XrSpatialAnchorNameHTC name; public XrSpatialAnchorNameHTC name;
} }
public struct XrPersistedAnchorCollectionAcquireInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
}
public struct XrPersistedAnchorCollectionAcquireCompletionHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrResult futureResult;
public System.IntPtr persistedAnchorCollection;
}
public struct XrSpatialAnchorPersistInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrSpace anchor;
public XrSpatialAnchorNameHTC persistedAnchorName;
}
public struct XrSpatialAnchorFromPersistedAnchorCreateInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public System.IntPtr persistedAnchorCollection;
public XrSpatialAnchorNameHTC persistedAnchorName;
public XrSpatialAnchorNameHTC spatialAnchorName;
}
public struct XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrResult futureResult;
public XrSpace anchor;
}
public struct XrPersistedAnchorPropertiesGetInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public uint maxPersistedAnchorCount;
}
#endregion #endregion
#region delegates and delegate instances #region delegates and delegate instances
delegate XrResult DelegateXrCreateSpatialAnchorHTC(XrSession session, ref XrSpatialAnchorCreateInfoHTC createInfo, ref XrSpace anchor); public delegate XrResult DelegateXrCreateSpatialAnchorHTC(XrSession session, ref XrSpatialAnchorCreateInfoHTC createInfo, ref XrSpace anchor);
delegate XrResult DelegateXrGetSpatialAnchorNameHTC(XrSpace anchor, ref XrSpatialAnchorNameHTC name); public delegate XrResult DelegateXrGetSpatialAnchorNameHTC(XrSpace anchor, ref XrSpatialAnchorNameHTC name);
public delegate XrResult DelegateXrAcquirePersistedAnchorCollectionAsyncHTC(XrSession session, ref XrPersistedAnchorCollectionAcquireInfoHTC acquireInfo, out IntPtr future);
public delegate XrResult DelegateXrAcquirePersistedAnchorCollectionCompleteHTC(IntPtr future, out XrPersistedAnchorCollectionAcquireCompletionHTC completion);
public delegate XrResult DelegateXrReleasePersistedAnchorCollectionHTC(IntPtr persistedAnchorCollection);
public delegate XrResult DelegateXrPersistSpatialAnchorAsyncHTC(XrPersistedAnchorCollectionHTC persistedAnchorCollection, ref XrSpatialAnchorPersistInfoHTC persistInfo, out IntPtr future);
public delegate XrResult DelegateXrPersistSpatialAnchorCompleteHTC(IntPtr future, out FutureWrapper.XrFutureCompletionEXT completion);
public delegate XrResult DelegateXrUnpersistSpatialAnchorHTC(IntPtr persistedAnchorCollection, ref XrSpatialAnchorNameHTC persistedAnchorName);
public delegate XrResult DelegateXrEnumeratePersistedAnchorNamesHTC( IntPtr persistedAnchorCollection, uint persistedAnchorNameCapacityInput, ref uint persistedAnchorNameCountOutput, [Out] XrSpatialAnchorNameHTC[] persistedAnchorNames);
public delegate XrResult DelegateXrCreateSpatialAnchorFromPersistedAnchorAsyncHTC(XrSession session, ref XrSpatialAnchorFromPersistedAnchorCreateInfoHTC spatialAnchorCreateInfo, out IntPtr future);
public delegate XrResult DelegateXrCreateSpatialAnchorFromPersistedAnchorCompleteHTC(IntPtr future, out XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC completion);
public delegate XrResult DelegateXrClearPersistedAnchorsHTC(IntPtr persistedAnchorCollection);
public delegate XrResult DelegateXrGetPersistedAnchorPropertiesHTC(IntPtr persistedAnchorCollection, ref XrPersistedAnchorPropertiesGetInfoHTC getInfo);
public delegate XrResult DelegateXrExportPersistedAnchorHTC(IntPtr persistedAnchorCollection, ref XrSpatialAnchorNameHTC persistedAnchorName, uint dataCapacityInput, ref uint dataCountOutput, [Out] byte[] data);
public delegate XrResult DelegateXrImportPersistedAnchorHTC(IntPtr persistedAnchorCollection, uint dataCount, [In] byte[] data);
public delegate XrResult DelegateXrGetPersistedAnchorNameFromBufferHTC(IntPtr persistedAnchorCollection, uint bufferCount, byte[] buffer, ref XrSpatialAnchorNameHTC name);
DelegateXrCreateSpatialAnchorHTC XrCreateSpatialAnchorHTC; DelegateXrCreateSpatialAnchorHTC XrCreateSpatialAnchorHTC;
DelegateXrGetSpatialAnchorNameHTC XrGetSpatialAnchorNameHTC; DelegateXrGetSpatialAnchorNameHTC XrGetSpatialAnchorNameHTC;
DelegateXrAcquirePersistedAnchorCollectionAsyncHTC XrAcquirePersistedAnchorCollectionAsyncHTC;
DelegateXrAcquirePersistedAnchorCollectionCompleteHTC XrAcquirePersistedAnchorCollectionCompleteHTC;
DelegateXrReleasePersistedAnchorCollectionHTC XrReleasePersistedAnchorCollectionHTC;
DelegateXrPersistSpatialAnchorAsyncHTC XrPersistSpatialAnchorAsyncHTC;
DelegateXrPersistSpatialAnchorCompleteHTC XrPersistSpatialAnchorCompleteHTC;
DelegateXrUnpersistSpatialAnchorHTC XrUnpersistSpatialAnchorHTC;
DelegateXrEnumeratePersistedAnchorNamesHTC XrEnumeratePersistedAnchorNamesHTC;
DelegateXrCreateSpatialAnchorFromPersistedAnchorAsyncHTC XrCreateSpatialAnchorFromPersistedAnchorAsyncHTC;
DelegateXrCreateSpatialAnchorFromPersistedAnchorCompleteHTC XrCreateSpatialAnchorFromPersistedAnchorCompleteHTC;
DelegateXrClearPersistedAnchorsHTC XrClearPersistedAnchorsHTC;
DelegateXrGetPersistedAnchorPropertiesHTC XrGetPersistedAnchorPropertiesHTC;
DelegateXrExportPersistedAnchorHTC XrExportPersistedAnchorHTC;
DelegateXrImportPersistedAnchorHTC XrImportPersistedAnchorHTC;
DelegateXrGetPersistedAnchorNameFromBufferHTC XrGetPersistedAnchorNameFromBufferHTC;
#endregion delegates and delegate instances #endregion delegates and delegate instances
#region override functions #region override functions
/// <inheritdoc />
protected override IntPtr HookGetInstanceProcAddr(IntPtr func) protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{ {
Debug.Log("ViveAnchor HookGetInstanceProcAddr() "); // For LocateSpace, need WaitFrame's predictedDisplayTime.
ViveInterceptors.Instance.AddRequiredFunction("xrWaitFrame");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func); return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
} }
/// <inheritdoc /> /// <inheritdoc />
protected override bool OnInstanceCreate(ulong xrInstance) protected override bool OnInstanceCreate(ulong xrInstance)
{ {
//Debug.Log("VIVEAnchor OnInstanceCreate() "); #if FAKE_DATA
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString)) Debug.LogError("ViveAnchor OnInstanceCreate() Use FakeData");
useFakeData = true;
#endif
IsInited = false;
bool ret = true;
ret &= CommonWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
ret &= SpaceWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
if (!ret)
{ {
Debug.LogWarning("ViveAnchor OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled."); Debug.LogError("ViveAnchor OnInstanceCreate() failed.");
return false;
}
//Debug.Log("VIVEAnchor OnInstanceCreate() ");
if (!OpenXRRuntime.IsExtensionEnabled("XR_HTC_anchor") && !useFakeData)
{
Debug.LogWarning("ViveAnchor OnInstanceCreate() XR_HTC_anchor is NOT enabled.");
return false;
}
IsInited = GetXrFunctionDelegates(xrInstance);
if (!IsInited)
{
Debug.LogError("ViveAnchor OnInstanceCreate() failed to get function delegates.");
return false; return false;
} }
m_XrInstance = xrInstance; m_XrInstance = xrInstance;
//Debug.Log("OnInstanceCreate() " + m_XrInstance);
CommonWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
SpaceWrapper.Instance.OnInstanceCreate(xrInstance, CommonWrapper.Instance.GetInstanceProcAddr);
return GetXrFunctionDelegates(m_XrInstance); bool hasFuture = FutureWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
// No error log because future will print.
#if FAKE_DATA
hasFuture = true;
#endif
IsPAInited = false;
bool hasPersistedAnchor = false;
do
{
if (!hasFuture)
{
Debug.LogWarning("ViveAnchor OnInstanceCreate() XR_HTC_anchor_persistence is NOT enabled because no XR_EXT_future.");
hasPersistedAnchor = false;
break;
}
hasPersistedAnchor = enablePersistedAnchor && OpenXRRuntime.IsExtensionEnabled("XR_HTC_anchor_persistence");
#if FAKE_DATA
hasPersistedAnchor = enablePersistedAnchor;
#endif
} while(false);
//Debug.Log("OnInstanceCreate() " + m_XrInstance);
if (hasPersistedAnchor)
IsPAInited = GetXrFunctionDelegatesPersistance(xrInstance);
if (!IsPAInited)
Debug.LogWarning("ViveAnchor OnInstanceCreate() XR_HTC_anchor_persistence is NOT enabled.");
return IsInited;
} }
protected override void OnInstanceDestroy(ulong xrInstance) protected override void OnInstanceDestroy(ulong xrInstance)
{ {
m_XrInstance = 0;
IsInited = false;
IsPAInited = false;
CommonWrapper.Instance.OnInstanceDestroy(); CommonWrapper.Instance.OnInstanceDestroy();
SpaceWrapper.Instance.OnInstanceDestroy(); SpaceWrapper.Instance.OnInstanceDestroy();
FutureWrapper.Instance.OnInstanceDestroy();
Debug.Log("ViveAnchor: OnInstanceDestroy()");
} }
/// <inheritdoc /> /// <inheritdoc />
protected override void OnSessionCreate(ulong xrSession) protected override void OnSessionCreate(ulong xrSession)
{ {
Debug.Log("ViveAnchor OnSessionCreate() "); //Debug.Log("ViveAnchor OnSessionCreate() ");
// here's one way you can grab the session
Debug.Log($"EXT: Got xrSession: {xrSession}");
session = xrSession; session = xrSession;
} }
/// <inheritdoc /> /// <inheritdoc />
protected override void OnSessionBegin(ulong xrSession) protected override void OnSessionDestroy(ulong xrSession)
{ {
Debug.Log("ViveAnchor OnSessionBegin() "); //Debug.Log("ViveAnchor OnSessionDestroy() ");
Debug.Log($"EXT: xrBeginSession: {xrSession}"); session = 0;
}
/// <inheritdoc />
protected override void OnSessionEnd(ulong xrSession)
{
Debug.Log("ViveAnchor OnSessionEnd() ");
Debug.Log($"EXT: about to xrEndSession: {xrSession}");
} }
// XXX Every millisecond the AppSpace switched from one space to another space. I don't know what is going on. // XXX Every millisecond the AppSpace switched from one space to another space. I don't know what is going on.
@@ -144,30 +306,53 @@ namespace VIVE.OpenXR.Anchor
protected override void OnSystemChange(ulong xrSystem) protected override void OnSystemChange(ulong xrSystem)
{ {
m_XrSystemId = xrSystem; m_XrSystemId = xrSystem;
Debug.Log("ViveAnchor OnSystemChange() " + m_XrSystemId); //Debug.Log("ViveAnchor OnSystemChange() " + m_XrSystemId);
} }
#endregion override functions #endregion override functions
private bool GetXrFunctionDelegates(XrInstance xrInstance) private bool GetXrFunctionDelegates(XrInstance inst)
{ {
Debug.Log("ViveAnchor GetXrFunctionDelegates() "); Debug.Log("ViveAnchor GetXrFunctionDelegates() ");
bool ret = true; bool ret = true;
IntPtr funcPtr = IntPtr.Zero;
OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr = CommonWrapper.Instance.GetInstanceProcAddr; // shorter name OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr = CommonWrapper.Instance.GetInstanceProcAddr; // shorter name
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrCreateSpatialAnchorHTC", out XrCreateSpatialAnchorHTC); ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrCreateSpatialAnchorHTC", out XrCreateSpatialAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrGetSpatialAnchorNameHTC", out XrGetSpatialAnchorNameHTC); ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrGetSpatialAnchorNameHTC", out XrGetSpatialAnchorNameHTC);
return ret;
}
private bool GetXrFunctionDelegatesPersistance(XrInstance inst)
{
Debug.Log("ViveAnchor GetXrFunctionDelegatesPersistance() ");
bool ret = true;
OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr = CommonWrapper.Instance.GetInstanceProcAddr; // shorter name
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrAcquirePersistedAnchorCollectionAsyncHTC", out XrAcquirePersistedAnchorCollectionAsyncHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrAcquirePersistedAnchorCollectionCompleteHTC", out XrAcquirePersistedAnchorCollectionCompleteHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrReleasePersistedAnchorCollectionHTC", out XrReleasePersistedAnchorCollectionHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrPersistSpatialAnchorAsyncHTC", out XrPersistSpatialAnchorAsyncHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrPersistSpatialAnchorCompleteHTC", out XrPersistSpatialAnchorCompleteHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrUnpersistSpatialAnchorHTC", out XrUnpersistSpatialAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrEnumeratePersistedAnchorNamesHTC", out XrEnumeratePersistedAnchorNamesHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrCreateSpatialAnchorFromPersistedAnchorAsyncHTC", out XrCreateSpatialAnchorFromPersistedAnchorAsyncHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrCreateSpatialAnchorFromPersistedAnchorCompleteHTC", out XrCreateSpatialAnchorFromPersistedAnchorCompleteHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrClearPersistedAnchorsHTC", out XrClearPersistedAnchorsHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrGetPersistedAnchorPropertiesHTC", out XrGetPersistedAnchorPropertiesHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrExportPersistedAnchorHTC", out XrExportPersistedAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrImportPersistedAnchorHTC", out XrImportPersistedAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrGetPersistedAnchorNameFromBufferHTC", out XrGetPersistedAnchorNameFromBufferHTC);
return ret; return ret;
} }
#region functions of extension #region functions of extension
/// <summary> /// <summary>
/// Helper function to get this feature' properties. /// Helper function to get this feature's properties.
/// See <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystemProperties">xrGetSystemProperties</see> /// See <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystemProperties">xrGetSystemProperties</see>
/// </summary> /// </summary>
/// <param name="anchorProperties">Output parameter to hold anchor properties.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult GetProperties(out XrSystemAnchorPropertiesHTC anchorProperties) public XrResult GetProperties(out XrSystemAnchorPropertiesHTC anchorProperties)
{ {
anchorProperties = new XrSystemAnchorPropertiesHTC(); anchorProperties = new XrSystemAnchorPropertiesHTC();
@@ -184,34 +369,293 @@ namespace VIVE.OpenXR.Anchor
return CommonWrapper.Instance.GetProperties(m_XrInstance, m_XrSystemId, ref anchorProperties); return CommonWrapper.Instance.GetProperties(m_XrInstance, m_XrSystemId, ref anchorProperties);
} }
/// <summary>
/// The CreateSpatialAnchor function creates a spatial anchor with specified base space and pose in the space.
/// The anchor is represented by an XrSpace and its pose can be tracked via xrLocateSpace.
/// Once the anchor is no longer needed, call xrDestroySpace to erase the anchor.
/// </summary>
/// <param name="createInfo">Information required to create the spatial anchor.</param>
/// <param name="anchor">Output parameter to hold the created anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult CreateSpatialAnchor(XrSpatialAnchorCreateInfoHTC createInfo, out XrSpace anchor) public XrResult CreateSpatialAnchor(XrSpatialAnchorCreateInfoHTC createInfo, out XrSpace anchor)
{ {
anchor = default; anchor = default;
#if FAKE_DATA if (!IsInited)
if (Application.isEditor) return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrResult.XR_SUCCESS; if (session == 0)
#endif return XrResult.XR_ERROR_SESSION_LOST;
var ret = XrCreateSpatialAnchorHTC(session, ref createInfo, ref anchor); var ret = XrCreateSpatialAnchorHTC(session, ref createInfo, ref anchor);
Debug.Log("ViveAnchor CreateSpatialAnchor() r=" + ret + ", a=" + anchor + ", bs=" + createInfo.space + //Debug.Log("ViveAnchor CreateSpatialAnchor() r=" + ret + ", a=" + anchor + ", bs=" + createInfo.space +
", pos=(" + createInfo.poseInSpace.position.x + "," + createInfo.poseInSpace.position.y + "," + createInfo.poseInSpace.position.z + // ", pos=(" + createInfo.poseInSpace.position.x + "," + createInfo.poseInSpace.position.y + "," + createInfo.poseInSpace.position.z +
"), rot=(" + createInfo.poseInSpace.orientation.x + "," + createInfo.poseInSpace.orientation.y + "," + createInfo.poseInSpace.orientation.z + "," + createInfo.poseInSpace.orientation.w + // "), rot=(" + createInfo.poseInSpace.orientation.x + "," + createInfo.poseInSpace.orientation.y + "," + createInfo.poseInSpace.orientation.z + "," + createInfo.poseInSpace.orientation.w +
"), n=" + createInfo.name.name); // "), n=" + createInfo.name.name);
return ret; return ret;
} }
/// <summary>
/// The GetSpatialAnchorName function retrieves the name of the spatial anchor.
/// </summary>
/// <param name="anchor">The XrSpace representing the anchor.</param>
/// <param name="name">Output parameter to hold the name of the anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult GetSpatialAnchorName(XrSpace anchor, out XrSpatialAnchorNameHTC name) public XrResult GetSpatialAnchorName(XrSpace anchor, out XrSpatialAnchorNameHTC name)
{ {
name = default; name = new XrSpatialAnchorNameHTC();
#if FAKE_DATA if (!IsInited)
if (Application.isEditor) return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
{
name.name = "fake anchor";
return XrResult.XR_SUCCESS;
}
#endif
return XrGetSpatialAnchorNameHTC(anchor, ref name); return XrGetSpatialAnchorNameHTC(anchor, ref name);
} }
/// <summary>
/// If the extension is supported and enabled, return true.
/// </summary>
/// <returns>True if persisted anchor extension is supported, false otherwise.</returns>
public bool IsPersistedAnchorSupported()
{
return IsPAInited;
}
/// <summary>
/// Creates a persisted anchor collection. This collection can be used to persist spatial anchors across sessions.
/// Many persisted anchor APIs need a persisted anchor collection to operate.
/// </summary>
/// <param name="future">Output the async future handle. Check the future to get the PersitedAnchorCollection handle.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult AcquirePersistedAnchorCollectionAsync(out IntPtr future)
{
future = IntPtr.Zero;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (session == 0)
return XrResult.XR_ERROR_SESSION_LOST;
XrPersistedAnchorCollectionAcquireInfoHTC acquireInfo = new XrPersistedAnchorCollectionAcquireInfoHTC
{
type = XrStructureType.XR_TYPE_PERSISTED_ANCHOR_COLLECTION_ACQUIRE_INFO_HTC,
next = IntPtr.Zero,
};
return XrAcquirePersistedAnchorCollectionAsyncHTC(session, ref acquireInfo, out future);
}
public XrResult AcquirePersistedAnchorCollectionComplete(IntPtr future, out XrPersistedAnchorCollectionAcquireCompletionHTC completion)
{
completion = new XrPersistedAnchorCollectionAcquireCompletionHTC();
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrAcquirePersistedAnchorCollectionCompleteHTC(future, out completion);
}
/// <summary>
/// Destroys the persisted anchor collection.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to be destroyed.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ReleasePersistedAnchorCollection(IntPtr persistedAnchorCollection)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrReleasePersistedAnchorCollectionHTC(persistedAnchorCollection);
}
/// <summary>
/// Persists a spatial anchor with the given name. The name should be unique.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="anchor">The spatial anchor to be persisted.</param>
/// <param name="name">The name of the persisted anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult PersistSpatialAnchorAsync(IntPtr persistedAnchorCollection, XrSpace anchor, XrSpatialAnchorNameHTC name, out IntPtr future)
{
future = IntPtr.Zero;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
XrSpatialAnchorPersistInfoHTC persistInfo = new XrSpatialAnchorPersistInfoHTC
{
type = XrStructureType.XR_TYPE_SPATIAL_ANCHOR_PERSIST_INFO_HTC,
anchor = anchor,
persistedAnchorName = name
};
return XrPersistSpatialAnchorAsyncHTC(persistedAnchorCollection, ref persistInfo, out future);
}
public XrResult PersistSpatialAnchorComplete(IntPtr future, out FutureWrapper.XrFutureCompletionEXT completion)
{
completion = new FutureWrapper.XrFutureCompletionEXT() {
type = XrStructureType.XR_TYPE_FUTURE_COMPLETION_EXT,
next = IntPtr.Zero,
futureResult = XrResult.XR_SUCCESS
};
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrPersistSpatialAnchorCompleteHTC(future, out completion);
}
/// <summary>
/// Unpersists the anchor with the given name.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="name">The name of the anchor to be unpersisted.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult UnpersistSpatialAnchor(IntPtr persistedAnchorCollection, XrSpatialAnchorNameHTC name)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrUnpersistSpatialAnchorHTC(persistedAnchorCollection, ref name);
}
/// <summary>
/// Enumerates all persisted anchor names.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="persistedAnchorNameCapacityInput">The capacity of the input buffer.</param>
/// <param name="persistedAnchorNameCountOutput">Output parameter to hold the count of persisted anchor names.</param>
/// <param name="persistedAnchorNames">Output parameter to hold the names of persisted anchors.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult EnumeratePersistedAnchorNames(IntPtr persistedAnchorCollection, uint persistedAnchorNameCapacityInput,
ref uint persistedAnchorNameCountOutput, ref XrSpatialAnchorNameHTC[] persistedAnchorNames)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrEnumeratePersistedAnchorNamesHTC(persistedAnchorCollection, persistedAnchorNameCapacityInput, ref persistedAnchorNameCountOutput, persistedAnchorNames);
}
/// <summary>
/// Creates a spatial anchor from a persisted anchor.
/// </summary>
/// <param name="spatialAnchorCreateInfo">Information required to create the spatial anchor from persisted anchor.</param>
/// <param name="anchor">Output parameter to hold the created spatial anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult CreateSpatialAnchorFromPersistedAnchorAsync(XrSpatialAnchorFromPersistedAnchorCreateInfoHTC spatialAnchorCreateInfo, out IntPtr future)
{
future = IntPtr.Zero;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (session == 0)
return XrResult.XR_ERROR_SESSION_LOST;
return XrCreateSpatialAnchorFromPersistedAnchorAsyncHTC(session, ref spatialAnchorCreateInfo, out future);
}
/// <summary>
/// When the future is ready, call this function to get the result.
/// </summary>
/// <param name="future"></param>
/// <param name="completion"></param>
/// <returns></returns>
public XrResult CreateSpatialAnchorFromPersistedAnchorComplete(IntPtr future, out XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC completion)
{
completion = new XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC()
{
type = XrStructureType.XR_TYPE_SPATIAL_ANCHOR_FROM_PERSISTED_ANCHOR_CREATE_COMPLETION_HTC,
next = IntPtr.Zero,
futureResult = XrResult.XR_SUCCESS,
anchor = 0
};
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrCreateSpatialAnchorFromPersistedAnchorCompleteHTC(future, out completion);
}
/// <summary>
/// Clears all persisted anchors.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ClearPersistedAnchors(IntPtr persistedAnchorCollection)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrClearPersistedAnchorsHTC(persistedAnchorCollection);
}
/// <summary>
/// Gets the properties of the persisted anchor.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="getInfo">Output parameter to hold the properties of the persisted anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult GetPersistedAnchorProperties(IntPtr persistedAnchorCollection, out XrPersistedAnchorPropertiesGetInfoHTC getInfo)
{
getInfo = new XrPersistedAnchorPropertiesGetInfoHTC
{
type = XrStructureType.XR_TYPE_PERSISTED_ANCHOR_PROPERTIES_GET_INFO_HTC
};
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrGetPersistedAnchorPropertiesHTC(persistedAnchorCollection, ref getInfo);
}
/// <summary>
/// Exports the persisted anchor to a buffer. The buffer can be used to import the anchor later or save to a file.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="persistedAnchorName">The name of the persisted anchor to be exported.</param>
/// <param name="data">Output parameter to hold the buffer containing the exported anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ExportPersistedAnchor(IntPtr persistedAnchorCollection, XrSpatialAnchorNameHTC persistedAnchorName, out byte[] data)
{
data = null;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
uint dataCountOutput = 0;
uint dataCapacityInput = 0;
XrResult ret = XrExportPersistedAnchorHTC(persistedAnchorCollection, ref persistedAnchorName, dataCapacityInput, ref dataCountOutput, null);
if (ret != XrResult.XR_SUCCESS)
{
Debug.LogError("ExportPersistedAnchor failed to get data size. ret=" + ret);
data = null;
return ret;
}
dataCapacityInput = dataCountOutput;
data = new byte[dataCountOutput];
ret = XrExportPersistedAnchorHTC(persistedAnchorCollection, ref persistedAnchorName, dataCapacityInput, ref dataCountOutput, data);
return ret;
}
/// <summary>
/// Imports the persisted anchor from a buffer. The buffer should be created by ExportPersistedAnchor.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="data">The buffer containing the persisted anchor data.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ImportPersistedAnchor(IntPtr persistedAnchorCollection, byte[] data)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrImportPersistedAnchorHTC(persistedAnchorCollection, (uint)data.Length, data);
}
/// <summary>
/// Gets the name of the persisted anchor from a buffer. The buffer should be created by ExportPersistedAnchor.
/// </summary>
/// <param name="persistedAnchorCollection"></param>
/// <param name="buffer"></param>
/// <param name="name"></param>
/// <returns></returns>
public XrResult GetPersistedAnchorNameFromBuffer(IntPtr persistedAnchorCollection, byte[] buffer, out XrSpatialAnchorNameHTC name)
{
name = new XrSpatialAnchorNameHTC();
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (buffer == null)
return XrResult.XR_ERROR_VALIDATION_FAILURE;
return XrGetPersistedAnchorNameFromBufferHTC(persistedAnchorCollection, (uint)buffer.Length, buffer, ref name);
}
#endregion #endregion
#region tools for user #region tools for user
@@ -223,7 +667,7 @@ namespace VIVE.OpenXR.Anchor
public XrSpace GetTrackingSpace() public XrSpace GetTrackingSpace()
{ {
var s = GetCurrentAppSpace(); var s = GetCurrentAppSpace();
Debug.Log("ViveAnchor GetTrackingSpace() s=" + s); //Debug.Log("ViveAnchor GetTrackingSpace() s=" + s);
return s; return s;
} }
#endregion #endregion

View File

@@ -0,0 +1,203 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEditor;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System;
using System.Runtime.InteropServices;
#if UNITY_EDITOR
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.CompositionLayer
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Composition Layer (Extra Settings) (Beta)",
Desc = "Enable this feature to use the Composition Layer Extra Settings.",
Company = "HTC",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionStrings,
Version = "1.0.0",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
FeatureId = featureId
)]
#endif
public class ViveCompositionLayerExtraSettings : OpenXRFeature
{
const string LOG_TAG = "VIVE.OpenXR.ViveCompositionLayer.ExtraSettings";
static void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
static void WARNING(string msg) { Debug.LogWarning(LOG_TAG + " " + msg); }
static void ERROR(string msg) { Debug.LogError(LOG_TAG + " " + msg); }
/// <summary>
/// Settings Editor Enable Sharpening or Not.
/// </summary>
public bool SettingsEditorEnableSharpening = false;
/// <summary>
/// Support Sharpening or Not.
/// </summary>
public bool supportSharpening = false;
/// <summary>
/// Settings Editor Sharpening Mode
/// </summary>
public XrSharpeningModeHTC SettingsEditorSharpeningMode = XrSharpeningModeHTC.FAST;
/// <summary>
/// Settings Editor Sharpening Levell
/// </summary>
[Range(0.0f, 1.0f)]
public float SettingsEditorSharpeningLevel = 1.0f;
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.compositionlayer.extrasettings";
/// <summary>
/// OpenXR specification.
/// </summary>
public const string kOpenxrExtensionStrings = "XR_HTC_composition_layer_extra_settings";
#region OpenXR Life Cycle
private bool m_XrInstanceCreated = false;
/// <summary>
/// The XR instance is created or not.
/// </summary>
public bool XrInstanceCreated
{
get { return m_XrInstanceCreated; }
}
private XrInstance m_XrInstance = 0;
protected override bool OnInstanceCreate(ulong xrInstance)
{
foreach (string kOpenxrExtensionString in kOpenxrExtensionStrings.Split(' '))
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
WARNING("OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled.");
}
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
DEBUG("OnInstanceCreate() " + m_XrInstance);
return true;
}
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstanceCreated = false;
DEBUG("OnInstanceDestroy() " + m_XrInstance);
}
private XrSystemId m_XrSystemId = 0;
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
DEBUG("OnSystemChange() " + m_XrSystemId);
}
private bool m_XrSessionCreated = false;
/// <summary>
/// The XR session is created or not.
/// </summary>
public bool XrSessionCreated
{
get { return m_XrSessionCreated; }
}
private XrSession m_XrSession = 0;
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
DEBUG("OnSessionCreate() " + m_XrSession);
}
private bool m_XrSessionEnding = false;
/// <summary>
/// The XR session is ending or not.
/// </summary>
public bool XrSessionEnding
{
get { return m_XrSessionEnding; }
}
protected override void OnSessionBegin(ulong xrSession)
{
m_XrSessionEnding = false;
DEBUG("OnSessionBegin() " + m_XrSession);
//enable Sharpening
if (OpenXRRuntime.IsExtensionEnabled("XR_HTC_composition_layer_extra_settings"))
{
ViveCompositionLayer_UpdateSystemProperties(m_XrInstance, m_XrSystemId);
supportSharpening = ViveCompositionLayer_IsSupportSharpening();
if (supportSharpening && SettingsEditorEnableSharpening)
{
EnableSharpening(SettingsEditorSharpeningMode, SettingsEditorSharpeningLevel);
}
}
}
protected override void OnSessionEnd(ulong xrSession)
{
m_XrSessionEnding = true;
DEBUG("OnSessionEnd() " + m_XrSession);
}
protected override void OnSessionDestroy(ulong xrSession)
{
m_XrSessionCreated = false;
DEBUG("OnSessionDestroy() " + xrSession);
}
#endregion
#region Wrapper Functions
private const string ExtLib = "viveopenxr";
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_UpdateSystemProperties")]
private static extern int VIVEOpenXR_ViveCompositionLayer_UpdateSystemProperties(XrInstance instance, XrSystemId system_id);
private int ViveCompositionLayer_UpdateSystemProperties(XrInstance instance, XrSystemId system_id)
{
return VIVEOpenXR_ViveCompositionLayer_UpdateSystemProperties(instance, system_id);
}
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_IsSupportSharpening")]
private static extern bool VIVEOpenXR_ViveCompositionLayer_IsSupportSharpening();
private bool ViveCompositionLayer_IsSupportSharpening()
{
return VIVEOpenXR_ViveCompositionLayer_IsSupportSharpening();
}
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_enableSharpening")]
private static extern int VIVEOpenXR_ViveCompositionLayer_enableSharpening(XrSharpeningModeHTC sharpeningMode, float sharpeningLevel);
/// <summary>
/// Enable the sharpening setting applying to the projection layer.
/// </summary>
/// <param name="sharpeningMode">The sharpening mode in <see cref="XrSharpeningModeHTC"/>.</param>
/// <param name="sharpeningLevel">The sharpening level in float [0, 1].</param>
/// <returns>True for success.</returns>
public bool EnableSharpening(XrSharpeningModeHTC sharpeningMode, float sharpeningLevel)
{
return (VIVEOpenXR_ViveCompositionLayer_enableSharpening(sharpeningMode, sharpeningLevel) == 0);
}
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_disableSharpening")]
private static extern int VIVEOpenXR_ViveCompositionLayer_DisableSharpening();
/// <summary>
/// Disable the sharpening setting on the projection layer.
/// </summary>
/// <returns>True for success</returns>
public bool DisableSharpening()
{
return (VIVEOpenXR_ViveCompositionLayer_DisableSharpening() == 0);
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f26de592e4135874baf6e64cc94183be
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,4 +1,4 @@
// Copyright HTC Corporation All Rights Reserved. // Copyright HTC Corporation All Rights Reserved.
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
@@ -67,65 +67,6 @@ namespace VIVE.OpenXR.CompositionLayer
} }
} }
public struct XrCompositionLayerFlags : IEquatable<UInt64>
{
private readonly UInt64 value;
public XrCompositionLayerFlags(UInt64 u)
{
value = u;
}
public static implicit operator UInt64(XrCompositionLayerFlags xrBool)
{
return xrBool.value;
}
public static implicit operator XrCompositionLayerFlags(UInt64 u)
{
return new XrCompositionLayerFlags(u);
}
public bool Equals(XrCompositionLayerFlags other)
{
return value == other.value;
}
public bool Equals(UInt64 other)
{
return value == other;
}
public override bool Equals(object obj)
{
return obj is XrCompositionLayerFlags && Equals((XrCompositionLayerFlags)obj);
}
public override int GetHashCode()
{
return value.GetHashCode();
}
public override string ToString()
{
return value.ToString();
}
public static bool operator ==(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.Equals(b); }
public static bool operator !=(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return !a.Equals(b); }
public static bool operator >=(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value >= b.value; }
public static bool operator <=(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value <= b.value; }
public static bool operator >(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value > b.value; }
public static bool operator <(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value < b.value; }
public static XrCompositionLayerFlags operator +(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value + b.value; }
public static XrCompositionLayerFlags operator -(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value - b.value; }
public static XrCompositionLayerFlags operator *(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value * b.value; }
public static XrCompositionLayerFlags operator /(XrCompositionLayerFlags a, XrCompositionLayerFlags b)
{
if (b.value == 0)
{
throw new DivideByZeroException();
}
return a.value / b.value;
}
}
public struct XrSwapchainCreateFlags : IEquatable<UInt64> public struct XrSwapchainCreateFlags : IEquatable<UInt64>
{ {
@@ -288,6 +229,36 @@ namespace VIVE.OpenXR.CompositionLayer
public XrColor4f colorScale; public XrColor4f colorScale;
public XrColor4f colorBias; public XrColor4f colorBias;
} }
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerSharpeningSettingHTC
{
public XrStructureType type;
public IntPtr next;
public XrSharpeningModeHTC mode;
public float sharpeningLevel;
}
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerSuperSamplingSettingHTC
{
public XrStructureType type;
public IntPtr next;
public XrSuperSamplingModeHTC mode;
}
public enum XrSharpeningModeHTC
{
FAST = 0,
NORMAL = 1,
QUALITY = 2,
AUTOMATIC = 3,
}
public enum XrSuperSamplingModeHTC
{
FAST = 0,
NORMAL = 1,
QUALITY = 2,
AUTOMATIC = 3,
}
public enum GraphicsAPI public enum GraphicsAPI
{ {
GLES3 = 1, GLES3 = 1,
@@ -410,29 +381,6 @@ namespace VIVE.OpenXR.CompositionLayer
} }
}; };
/// <summary> /// <summary>
/// The XrCompositionLayerBaseHeader structure is not intended to be directly used, but forms a basis for defining current and future structures containing composition layer information. The XrFrameEndInfo structure contains an array of pointers to these polymorphic header structures.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerBaseHeader
{
/// <summary>
/// The XrStructureType of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// Next is NULL or a pointer to the next structure in a structure chain, such as XrPassthroughMeshTransformInfoHTC.
/// </summary>
public IntPtr next;
/// <summary>
/// A bitmask of XrCompositionLayerFlagBits describing flags to apply to the layer.
/// </summary>
public XrCompositionLayerFlags layerFlags;
/// <summary>
/// The XrSpace in which the layer will be kept stable over time.
/// </summary>
public XrSpace space;
};
/// <summary>
/// The application can specify the XrPassthroughColorHTC to adjust the alpha value of the passthrough. The range is between 0.0f and 1.0f, 1.0f means opaque. /// The application can specify the XrPassthroughColorHTC to adjust the alpha value of the passthrough. The range is between 0.0f and 1.0f, 1.0f means opaque.
/// </summary> /// </summary>
[StructLayout(LayoutKind.Sequential)] [StructLayout(LayoutKind.Sequential)]

View File

@@ -18,7 +18,7 @@ using UnityEditor.XR.OpenXR.Features;
namespace VIVE.OpenXR.CompositionLayer.Passthrough namespace VIVE.OpenXR.CompositionLayer.Passthrough
{ {
#if UNITY_EDITOR #if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Composition Layer (Passthrough)", [OpenXRFeature(UiName = "VIVE XR Composition Layer (Passthrough) (Deprecated)",
Desc = "Enable this feature to use the HTC Passthrough feature.", Desc = "Enable this feature to use the HTC Passthrough feature.",
Company = "HTC", Company = "HTC",
DocumentationLink = "..\\Documentation", DocumentationLink = "..\\Documentation",
@@ -28,6 +28,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
FeatureId = featureId FeatureId = featureId
)] )]
#endif #endif
[Obsolete("This class is deprecated. Please use VivePassthrough instead.")]
public class ViveCompositionLayerPassthrough : OpenXRFeature public class ViveCompositionLayerPassthrough : OpenXRFeature
{ {
const string LOG_TAG = "VIVE.OpenXR.ViveCompositionLayerPassthrough"; const string LOG_TAG = "VIVE.OpenXR.ViveCompositionLayerPassthrough";

View File

@@ -7,6 +7,7 @@ using System.Runtime.InteropServices;
namespace VIVE.OpenXR.CompositionLayer.Passthrough namespace VIVE.OpenXR.CompositionLayer.Passthrough
{ {
[Obsolete("This enumeration is deprecated. Please use XrStructureType instead.")]
//[StructLayout(LayoutKind.Sequential)] //[StructLayout(LayoutKind.Sequential)]
public enum XrStructureTypeHTC public enum XrStructureTypeHTC
{ {
@@ -16,6 +17,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC = 1000317004, XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC = 1000317004,
} }
[Obsolete("This enumeration is deprecated. Please use VIVE.OpenXR.Passthrough.PassthroughLayerForm instead.")]
public enum PassthroughLayerForm public enum PassthroughLayerForm
{ {
///<summary> Fullscreen Passthrough Form</summary> ///<summary> Fullscreen Passthrough Form</summary>
@@ -24,6 +26,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
Projected = 1 Projected = 1
} }
[Obsolete("This enumeration is deprecated. Please use VIVE.OpenXR.Passthrough.ProjectedPassthroughSpaceType instead.")]
public enum ProjectedPassthroughSpaceType public enum ProjectedPassthroughSpaceType
{ {
///<summary> ///<summary>

View File

@@ -49,6 +49,13 @@ namespace VIVE.OpenXR.DisplayRefreshRate
public const string featureId = "vive.openxr.feature.displayrefreshrate"; public const string featureId = "vive.openxr.feature.displayrefreshrate";
#region OpenXR Life Cycle #region OpenXR Life Cycle
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
ViveInterceptors.Instance.AddRequiredFunction("xrPollEvent");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
private XrInstance m_XrInstance = 0; private XrInstance m_XrInstance = 0;
/// <summary> /// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done. /// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.

View File

@@ -0,0 +1,111 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine;
namespace VIVE.OpenXR.DisplayRefreshRate
{
// -------------------- 12.52. XR_FB_display_refresh_rate --------------------
#region New Structures
/// <summary>
/// On platforms which support dynamically adjusting the display refresh rate, application developers may request a specific display refresh rate in order to improve the overall user experience.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEventDataDisplayRefreshRateChangedFB
{
/// <summary>
/// The <see cref="XrStructureType"/> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// fromDisplayRefreshRate is the previous display refresh rate.
/// </summary>
public float fromDisplayRefreshRate;
/// <summary>
/// toDisplayRefreshRate is the new display refresh rate.
/// </summary>
public float toDisplayRefreshRate;
/// <summary>
/// The XR_FB_display_refresh_rate extension must be enabled prior to using XrEventDataDisplayRefreshRateChangedFB.
/// </summary>
public XrEventDataDisplayRefreshRateChangedFB(XrStructureType in_type, IntPtr in_next, float in_fromDisplayRefreshRate, float in_toDisplayRefreshRate)
{
type = in_type;
next = in_next;
fromDisplayRefreshRate = in_fromDisplayRefreshRate;
toDisplayRefreshRate = in_toDisplayRefreshRate;
}
/// <summary>
/// Retrieves the identity value of XrEventDataDisplayRefreshRateChangedFB.
/// </summary>
public static XrEventDataDisplayRefreshRateChangedFB identity
{
get
{
return new XrEventDataDisplayRefreshRateChangedFB(XrStructureType.XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB, IntPtr.Zero, 0.0f, 0.0f); // user is default present
}
}
public static bool Get(XrEventDataBuffer eventDataBuffer, out XrEventDataDisplayRefreshRateChangedFB eventDataDisplayRefreshRateChangedFB)
{
eventDataDisplayRefreshRateChangedFB = identity;
if (eventDataBuffer.type == XrStructureType.XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB)
{
eventDataDisplayRefreshRateChangedFB.next = eventDataBuffer.next;
eventDataDisplayRefreshRateChangedFB.fromDisplayRefreshRate = BitConverter.ToSingle(eventDataBuffer.varying, 0);
eventDataDisplayRefreshRateChangedFB.toDisplayRefreshRate = BitConverter.ToSingle(eventDataBuffer.varying, 4);
return true;
}
return false;
}
}
public static class ViveDisplayRefreshRateChanged
{
public delegate void OnDisplayRefreshRateChanged(float fromDisplayRefreshRate, float toDisplayRefreshRate);
public static void Listen(OnDisplayRefreshRateChanged callback)
{
if (!allEventListeners.Contains(callback))
allEventListeners.Add(callback);
}
public static void Remove(OnDisplayRefreshRateChanged callback)
{
if (allEventListeners.Contains(callback))
allEventListeners.Remove(callback);
}
public static void Send(float fromDisplayRefreshRate, float toDisplayRefreshRate)
{
int N = 0;
if (allEventListeners != null)
{
N = allEventListeners.Count;
for (int i = N - 1; i >= 0; i--)
{
OnDisplayRefreshRateChanged single = allEventListeners[i];
try
{
single(fromDisplayRefreshRate, toDisplayRefreshRate);
}
catch (Exception e)
{
Debug.Log("Event : " + e.ToString());
allEventListeners.Remove(single);
Debug.Log("Event : A listener is removed due to exception.");
}
}
}
}
private static List<OnDisplayRefreshRateChanged> allEventListeners = new List<OnDisplayRefreshRateChanged>();
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 146db425ea37c2746ad7c9ae08a5a480
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 65a24dbb45287c244bce088cb4a0a8aa
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,676 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System.Runtime.InteropServices;
using System;
using AOT;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.EyeTracker
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Eye Tracker (Beta)",
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "Support the eye tracker extension.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
FeatureId = featureId)]
#endif
public class ViveEyeTracker : OpenXRFeature
{
const string LOG_TAG = "VIVE.OpenXR.Eye.ViveEyeTracker";
void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
void WARNING(string msg) { Debug.LogWarning(LOG_TAG + " " + msg); }
void ERROR(string msg) { Debug.LogError(LOG_TAG + " " + msg); }
public const string kOpenxrExtensionString = "XR_HTC_eye_tracker";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.eye.tracker";
#region OpenXR Life Cycle
private bool m_XrInstanceCreated = false;
private XrInstance m_XrInstance = 0;
private static IntPtr xrGetInstanceProcAddr_prev;
private static IntPtr WaitFrame_prev;
private static XrFrameWaitInfo m_frameWaitInfo;
private static XrFrameState m_frameState;
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
UnityEngine.Debug.Log("EXT: registering our own xrGetInstanceProcAddr");
xrGetInstanceProcAddr_prev = func;
return Marshal.GetFunctionPointerForDelegate(m_intercept_xrWaitFrame_xrGetInstanceProcAddr);
}
[MonoPInvokeCallback(typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate))]
private static XrResult intercept_xrWaitFrame_xrGetInstanceProcAddr(XrInstance instance, string name, out IntPtr function)
{
if (xrGetInstanceProcAddr_prev == null || xrGetInstanceProcAddr_prev == IntPtr.Zero)
{
UnityEngine.Debug.LogError("xrGetInstanceProcAddr_prev is null");
function = IntPtr.Zero;
return XrResult.XR_ERROR_VALIDATION_FAILURE;
}
// Get delegate of old xrGetInstanceProcAddr.
var xrGetProc = Marshal.GetDelegateForFunctionPointer<OpenXRHelper.xrGetInstanceProcAddrDelegate>(xrGetInstanceProcAddr_prev);
XrResult result = xrGetProc(instance, name, out function);
if (name == "xrWaitFrame")
{
WaitFrame_prev = function;
m_intercept_xrWaitFrame = intercepted_xrWaitFrame;
function = Marshal.GetFunctionPointerForDelegate(m_intercept_xrWaitFrame); ;
UnityEngine.Debug.Log("Getting xrWaitFrame func");
}
return result;
}
[MonoPInvokeCallback(typeof(OpenXRHelper.xrWaitFrameDelegate))]
private static int intercepted_xrWaitFrame(ulong session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState)
{
// Get delegate of prev xrWaitFrame.
var xrWaitFrame = Marshal.GetDelegateForFunctionPointer<OpenXRHelper.xrWaitFrameDelegate>(WaitFrame_prev);
int res = xrWaitFrame(session, ref frameWaitInfo, ref frameState);
m_frameWaitInfo = frameWaitInfo;
m_frameState = frameState;
return res;
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
WARNING("OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled.");
return false;
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
DEBUG("OnInstanceCreate() " + m_XrInstance);
return GetXrFunctionDelegates(m_XrInstance);
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyInstance">xrDestroyInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
DEBUG("OnInstanceDestroy() " + xrInstance);
}
private XrSystemId m_XrSystemId = 0;
/// <summary>
/// Called when the <see cref="XrSystemId">XrSystemId</see> retrieved by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystem">xrGetSystem</see> is changed.
/// </summary>
/// <param name="xrSystem">The system id.</param>
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
DEBUG("OnSystemChange() " + m_XrSystemId);
}
private bool m_XrSessionCreated = false;
private XrSession m_XrSession = 0;
private bool hasEyeTracker = false;
private XrEyeTrackerHTC m_EyeTracker = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateSession">xrCreateSession</see> is done.
/// </summary>
/// <param name="xrSession">The created session ID.</param>
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
DEBUG("OnSessionCreate() " + m_XrSession);
if (CreateEyeTracker()) { DEBUG("OnSessionCreate() m_EyeTracker " + m_EyeTracker); }
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroySession">xrDestroySession</see> is done.
/// </summary>
/// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
DEBUG("OnSessionDestroy() " + xrSession);
// Eye Tracking is binding with xrSession so we destroy the trackers when xrSession is destroyed.
DestroyEyeTracker();
m_XrSession = 0;
m_XrSessionCreated = false;
}
#endregion
#region OpenXR function delegates
private static readonly OpenXRHelper.xrGetInstanceProcAddrDelegate m_intercept_xrWaitFrame_xrGetInstanceProcAddr
= new OpenXRHelper.xrGetInstanceProcAddrDelegate(intercept_xrWaitFrame_xrGetInstanceProcAddr);
private static OpenXRHelper.xrWaitFrameDelegate m_intercept_xrWaitFrame;
/// xrGetInstanceProcAddr
OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr;
/// xrGetSystemProperties
OpenXRHelper.xrGetSystemPropertiesDelegate xrGetSystemProperties;
private XrResult GetSystemProperties(ref XrSystemProperties properties)
{
if (!m_XrSessionCreated)
{
ERROR("GetSystemProperties() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("GetSystemProperties() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrGetSystemProperties(m_XrInstance, m_XrSystemId, ref properties);
}
/// xrDestroySpace
OpenXRHelper.xrDestroySpaceDelegate xrDestroySpace;
private XrResult DestroySpace(XrSpace space)
{
if (!m_XrSessionCreated)
{
ERROR("DestroySpace() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("DestroySpace() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrDestroySpace(space);
}
ViveEyeTrackerHelper.xrCreateEyeTrackerHTCDelegate xrCreateEyeTrackerHTC;
private XrResult CreateEyeTrackerHTC(ref XrEyeTrackerCreateInfoHTC createInfo, out XrEyeTrackerHTC eyeTracker)
{
if (!m_XrSessionCreated)
{
ERROR("CreateEyeTrackerHTC() XR_ERROR_SESSION_LOST.");
eyeTracker = 0;
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("CreateEyeTrackerHTC() XR_ERROR_INSTANCE_LOST.");
eyeTracker = 0;
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrCreateEyeTrackerHTC(m_XrSession, ref createInfo, out eyeTracker);
}
ViveEyeTrackerHelper.xrDestroyEyeTrackerHTCDelegate xrDestroyEyeTrackerHTC;
private XrResult DestroyEyeTrackerHTC(XrEyeTrackerHTC eyeTracker)
{
if (!m_XrSessionCreated)
{
ERROR("DestroyEyeTrackerHTC() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("DestroyEyeTrackerHTC() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrDestroyEyeTrackerHTC(eyeTracker);
}
ViveEyeTrackerHelper.xrGetEyeGazeDataHTCDelegate xrGetEyeGazeDataHTC;
private XrResult GetEyeGazeDataHTC(XrEyeTrackerHTC eyeTracker,ref XrEyeGazeDataInfoHTC gazeInfo, ref XrEyeGazeDataHTC eyeGazes)
{
if (!m_XrSessionCreated)
{
ERROR("GetEyeGazeDataHTC() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("GetEyeGazeDataHTC() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
XrResult res = xrGetEyeGazeDataHTC(eyeTracker,ref gazeInfo,ref eyeGazes);
return res;
}
ViveEyeTrackerHelper.xrGetEyePupilDataHTCDelegate xrGetEyePupilDataHTC;
private XrResult GetEyePupilDataHTC(XrEyeTrackerHTC eyeTracker,ref XrEyePupilDataInfoHTC pupilDataInfo,ref XrEyePupilDataHTC pupilData)
{
if (!m_XrSessionCreated)
{
ERROR("GetEyePupilData() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("GetEyePupilData() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrGetEyePupilDataHTC(eyeTracker,ref pupilDataInfo, ref pupilData);
}
ViveEyeTrackerHelper.xrGetEyeGeometricDataHTC xrGetEyeGeometricDataHTC;
private XrResult GetEyeGeometricDataHTC(XrEyeTrackerHTC eyeTracker,
ref XrEyeGeometricDataInfoHTC info,
ref XrEyeGeometricDataHTC eyeGeometricData)
{
if (!m_XrSessionCreated)
{
ERROR("GetEyeGeometricData() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("GetEyeGeometricData() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrGetEyeGeometricDataHTC(eyeTracker,ref info, ref eyeGeometricData);
}
private bool GetXrFunctionDelegates(XrInstance xrInstance)
{
/// xrGetInstanceProcAddr
if (xrGetInstanceProcAddr != null && xrGetInstanceProcAddr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetInstanceProcAddr.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(
xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
}
else
{
ERROR("xrGetInstanceProcAddr is null");
return false;
}
IntPtr funcPtr = IntPtr.Zero;
/// xrGetSystemProperties
if (XrGetInstanceProcAddr(xrInstance, "xrGetSystemProperties", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetSystemProperties.");
xrGetSystemProperties = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(OpenXRHelper.xrGetSystemPropertiesDelegate)) as OpenXRHelper.xrGetSystemPropertiesDelegate;
}
}
else
{
ERROR("xrGetSystemProperties");
return false;
}
/// xrDestroySpace
if (XrGetInstanceProcAddr(xrInstance, "xrDestroySpace", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrDestroySpace.");
xrDestroySpace = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(OpenXRHelper.xrDestroySpaceDelegate)) as OpenXRHelper.xrDestroySpaceDelegate;
}
}
else
{
ERROR("xrDestroySpace");
return false;
}
/// xrCreateEyeTrackerHTC
if (XrGetInstanceProcAddr(xrInstance, "xrCreateEyeTrackerHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrCreateEyeTrackerHTC.");
xrCreateEyeTrackerHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveEyeTrackerHelper.xrCreateEyeTrackerHTCDelegate)) as ViveEyeTrackerHelper.xrCreateEyeTrackerHTCDelegate;
}
}
else
{
ERROR("xrCreateEyeTrackerHTC");
return false;
}
/// xrDestroyEyeTrackerHTC
if (XrGetInstanceProcAddr(xrInstance, "xrDestroyEyeTrackerHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrDestroyEyeTrackerHTC.");
xrDestroyEyeTrackerHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveEyeTrackerHelper.xrDestroyEyeTrackerHTCDelegate)) as ViveEyeTrackerHelper.xrDestroyEyeTrackerHTCDelegate;
}
}
else
{
ERROR("xrDestroyEyeTrackerHTC");
return false;
}
/// xrGetEyeGazeDataHTC
if (XrGetInstanceProcAddr(xrInstance, "xrGetEyeGazeDataHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetEyeGazeDataHTC.");
xrGetEyeGazeDataHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveEyeTrackerHelper.xrGetEyeGazeDataHTCDelegate)) as ViveEyeTrackerHelper.xrGetEyeGazeDataHTCDelegate;
}
}
else
{
ERROR("xrGetEyeGazeDataHTC");
return false;
}
/// xrGetEyePupilDataHTC
if (XrGetInstanceProcAddr(xrInstance, "xrGetEyePupilDataHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetEyePupilDataHTC.");
xrGetEyePupilDataHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveEyeTrackerHelper.xrGetEyePupilDataHTCDelegate)) as ViveEyeTrackerHelper.xrGetEyePupilDataHTCDelegate;
}
}
else
{
ERROR("xrGetEyePupilDataHTC");
return false;
}
/// xrGetEyeGeometricDataHTC
if (XrGetInstanceProcAddr(xrInstance, "xrGetEyeGeometricDataHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetEyeGeometricDataHTC.");
xrGetEyeGeometricDataHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveEyeTrackerHelper.xrGetEyeGeometricDataHTC)) as ViveEyeTrackerHelper.xrGetEyeGeometricDataHTC;
}
}
else
{
ERROR("xrGetEyeGeometricDataHTC");
return false;
}
return true;
}
#endregion
XrSystemEyeTrackingPropertiesHTC eyeTrackingSystemProperties;
XrSystemProperties systemProperties;
private bool IsEyeTrackingSupported()
{
if (!m_XrSessionCreated)
{
ERROR("IsEyeTrackingSupported() session is not created.");
return false;
}
eyeTrackingSystemProperties.type = XrStructureType.XR_TYPE_SYSTEM_EYE_TRACKING_PROPERTIES_HTC;
systemProperties.type = XrStructureType.XR_TYPE_SYSTEM_PROPERTIES;
systemProperties.next = Marshal.AllocHGlobal(Marshal.SizeOf(eyeTrackingSystemProperties));
long offset = 0;
if (IntPtr.Size == 4)
offset = systemProperties.next.ToInt32();
else
offset = systemProperties.next.ToInt64();
IntPtr sys_eye_tracking_prop_ptr = new IntPtr(offset);
Marshal.StructureToPtr(eyeTrackingSystemProperties, sys_eye_tracking_prop_ptr, false);
if (GetSystemProperties(ref systemProperties) == XrResult.XR_SUCCESS)
{
if (IntPtr.Size == 4)
offset = systemProperties.next.ToInt32();
else
offset = systemProperties.next.ToInt64();
sys_eye_tracking_prop_ptr = new IntPtr(offset);
eyeTrackingSystemProperties = (XrSystemEyeTrackingPropertiesHTC)Marshal.PtrToStructure(sys_eye_tracking_prop_ptr, typeof(XrSystemEyeTrackingPropertiesHTC));
DEBUG("IsEyeTrackingSupported() XrSystemEyeTrackingPropertiesHTC.supportsEyeTracking: "
+ eyeTrackingSystemProperties.supportsEyeTracking);
return (eyeTrackingSystemProperties.supportsEyeTracking > 0);
}
else
{
ERROR("IsEyeTrackingSupported() GetSystemProperties failed.");
}
return false;
}
/// <summary>
/// An application can create an <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> handle using CreateEyeTracker.
/// </summary>
/// <param name="createInfo">The <see cref="XrEyeTrackerCreateInfoHTC">XrEyeTrackerCreateInfoHTC</see> used to specify the eye tracker.</param>
/// <param name="eyeTracker">The returned XrEyeTrackerHTC handle.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult CreateEyeTracker(XrEyeTrackerCreateInfoHTC createInfo, out XrEyeTrackerHTC eyeTracker)
{
if (hasEyeTracker)
{
eyeTracker = m_EyeTracker;
DEBUG("CreateEyeTracker() m_EyeTracker: " + eyeTracker + " already created before.");
return XrResult.XR_SUCCESS;
}
if (!IsEyeTrackingSupported())
{
ERROR("CreateEyeTracker() is NOT supported.");
eyeTracker = 0;
return XrResult.XR_ERROR_VALIDATION_FAILURE;
}
var result = CreateEyeTrackerHTC(ref createInfo, out eyeTracker);
DEBUG("CreateEyeTracker() " + result + ", eyeTracker: " + eyeTracker);
if (result == XrResult.XR_SUCCESS)
{
hasEyeTracker = true;
m_EyeTracker = eyeTracker;
DEBUG("CreateEyeTracker() m_EyeTracker " + m_EyeTracker);
}
return result;
}
/// <summary>
/// An application can create an <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> handle using CreateEyeTracker.
/// </summary>
/// <returns>True for success.</returns>
public bool CreateEyeTracker()
{
XrEyeTrackerCreateInfoHTC createInfo = new XrEyeTrackerCreateInfoHTC(
in_type: XrStructureType.XR_TYPE_EYE_TRACKER_CREATE_INFO_HTC,
in_next: IntPtr.Zero);
var result = CreateEyeTracker(createInfo, out XrEyeTrackerHTC value);
DEBUG("CreateEyeTracker() " + " tracker: " + value);
return result == XrResult.XR_SUCCESS;
}
/// <summary>
/// Releases the eye tracker and the underlying resources when the eye tracking experience is over.
/// </summary>
/// <param name="eyeTracker">An XrEyeTrackerHTC previously created by xrCreateEyeTrackerHTC.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult DestroyEyeTracker(XrEyeTrackerHTC eyeTracker)
{
XrResult result = DestroyEyeTrackerHTC(eyeTracker);
DEBUG("DestroyEyeTracker() " + eyeTracker + ", result: " + result);
return result;
}
/// <summary>
/// Releases the eye tracker and the underlying resources when the eye tracking experience is over.
/// </summary>
/// <returns>True for success.</returns>
public bool DestroyEyeTracker()
{
if (!hasEyeTracker)
{
DEBUG("DestroyEyeTracker() no " + "tracker.");
return true;
}
XrResult ret = XrResult.XR_ERROR_VALIDATION_FAILURE;
ret = DestroyEyeTracker(m_EyeTracker);
hasEyeTracker = false;
m_EyeTracker = 0;
return ret == XrResult.XR_SUCCESS;
}
private XrEyeGazeDataHTC m_gazes = new XrEyeGazeDataHTC();// = new XrEyeGazeDataHTC(XrStructureType.XR_TYPE_EYE_GAZE_DATA_HTC, IntPtr.Zero, 0);
/// <summary>
/// Retrieves an array of <see cref="XrSingleEyeGazeDataHTC">XrSingleEyeGazeDataHTC</see> containing the returned eye gaze directions.
/// </summary>
/// <param name="out_gazes">Output parameter to retrieve an array of <see cref="XrSingleEyeGazeDataHTC">XrSingleEyeGazeDataHTC</see>.</param>
/// <returns>True for success.</returns>
public bool GetEyeGazeData(out XrSingleEyeGazeDataHTC[] out_gazes)
{
m_gazes.type = XrStructureType.XR_TYPE_EYE_GAZE_DATA_HTC;
m_gazes.next = IntPtr.Zero;
m_gazes.time = m_frameState.predictedDisplayTime;
out_gazes = m_gazes.gaze;
XrEyeGazeDataInfoHTC gazeInfo = new XrEyeGazeDataInfoHTC(
in_type: XrStructureType.XR_TYPE_EYE_GAZE_DATA_INFO_HTC,
in_next: IntPtr.Zero,
in_baseSpace: GetCurrentAppSpace(),
in_time: m_frameState.predictedDisplayTime);
if (GetEyeGazeData(m_EyeTracker, gazeInfo, out m_gazes) == XrResult.XR_SUCCESS)
{
out_gazes = m_gazes.gaze;
return true;
}
return false;
}
/// <summary>
/// Retrieves the <see cref="XrEyeGazeDataHTC">XrEyeGazeDataHTC</see> data of a <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see>.
/// </summary>
/// <param name="eyeTracker">An <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> previously created by <see cref="ViveEyeTrackerHelper.xrCreateEyeTrackerHTCDelegate">xrCreateEyeTrackerHTC</see>.</param>
/// <param name="gazeInfo">The information to get eye gaze.</param>
/// <param name="eyeGazes">Output parameter to retrieve a pointer to <see cref="XrEyeGazeDataHTC">XrEyeGazeDataHTC</see> receiving the returned eye poses.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult GetEyeGazeData(XrEyeTrackerHTC eyeTracker, XrEyeGazeDataInfoHTC gazeInfo, out XrEyeGazeDataHTC eyeGazes)
{
m_gazes.type = XrStructureType.XR_TYPE_EYE_GAZE_DATA_HTC;
m_gazes.next = IntPtr.Zero;
m_gazes.time = m_frameState.predictedDisplayTime;
eyeGazes = m_gazes;
XrResult result = XrResult.XR_ERROR_VALIDATION_FAILURE;
result = GetEyeGazeDataHTC(eyeTracker,ref gazeInfo,ref m_gazes);
if (result == XrResult.XR_SUCCESS) { eyeGazes = m_gazes; }
return result;
}
private XrEyePupilDataHTC m_eyePupilData = new XrEyePupilDataHTC();
/// <summary>
/// Retrieves an array of <see cref="XrSingleEyePupilDataHTC">XrSingleEyePupilDataHTC</see> containing the returned data for user's pupils.
/// </summary>
/// <param name="pupilData">Output parameter to retrieve an array of <see cref="XrSingleEyePupilDataHTC">XrSingleEyePupilDataHTC</see>.</param>
/// <returns>XR_SUCCESS for success.</returns>
public bool GetEyePupilData(out XrSingleEyePupilDataHTC[] pupilData)
{
m_eyePupilData.type = XrStructureType.XR_TYPE_EYE_PUPIL_DATA_HTC;
m_eyePupilData.next = IntPtr.Zero;
m_eyePupilData.time = m_frameState.predictedDisplayTime;
pupilData = m_eyePupilData.pupilData;
XrEyePupilDataInfoHTC pupilDataInfo = new XrEyePupilDataInfoHTC(
in_type: XrStructureType.XR_TYPE_EYE_PUPIL_DATA_INFO_HTC,
in_next: IntPtr.Zero);
if (GetEyePupilData(m_EyeTracker, pupilDataInfo, out m_eyePupilData) == XrResult.XR_SUCCESS)
{
pupilData = m_eyePupilData.pupilData;
return true;
}
return false;
}
/// <summary>
/// Retrieves the <see cref="XrEyePupilDataHTC">XrEyePupilDataHTC</see> data of a <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see>.
/// </summary>
/// <param name="eyeTracker">An <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> previously created by <see cref="ViveEyeTrackerHelper.xrCreateEyeTrackerHTCDelegate">xrCreateEyeTrackerHTC</see>.</param>
/// <param name="pupilDataInfo">The information to get pupil data.</param>
/// <param name="pupilData">A pointer to <see cref="XrEyePupilDataHTC">XrEyePupilDataHTC</see> returned by the runtime.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult GetEyePupilData(XrEyeTrackerHTC eyeTracker, XrEyePupilDataInfoHTC pupilDataInfo, out XrEyePupilDataHTC pupilData)
{
m_eyePupilData.type = XrStructureType.XR_TYPE_EYE_PUPIL_DATA_HTC;
m_eyePupilData.next = IntPtr.Zero;
m_eyePupilData.time = m_frameState.predictedDisplayTime;
pupilData = m_eyePupilData;
XrResult result = XrResult.XR_ERROR_VALIDATION_FAILURE;
result = GetEyePupilDataHTC(eyeTracker,ref pupilDataInfo, ref m_eyePupilData);
if (result == XrResult.XR_SUCCESS) { pupilData = m_eyePupilData; }
return result;
}
private XrEyeGeometricDataHTC m_eyeGeometricData = new XrEyeGeometricDataHTC();//XrStructureType.XR_TYPE_EYE_GEOMETRIC_DATA_HTC, IntPtr.Zero, 0);
/// <param name="geometricData">Output parameter to retrieve an array of <see cref="XrSingleEyeGeometricDataHTC">XrSingleEyeGeometricDataHTC</see>.</param>
/// <returns>XR_SUCCESS for success.</returns>
public bool GetEyeGeometricData(out XrSingleEyeGeometricDataHTC[] geometricData)
{
m_eyeGeometricData.type = XrStructureType.XR_TYPE_EYE_GEOMETRIC_DATA_HTC;
m_eyeGeometricData.next = IntPtr.Zero;
m_eyeGeometricData.time = m_frameState.predictedDisplayTime;
geometricData = m_eyeGeometricData.eyeGeometricData;
XrEyeGeometricDataInfoHTC eyeGeometricDataInfo = new XrEyeGeometricDataInfoHTC(
in_type: XrStructureType.XR_TYPE_EYE_GEOMETRIC_DATA_INFO_HTC,
in_next: IntPtr.Zero);
if (GetEyeGeometricData(m_EyeTracker, eyeGeometricDataInfo, out m_eyeGeometricData) == XrResult.XR_SUCCESS)
{
geometricData = m_eyeGeometricData.eyeGeometricData;
return true;
}
return false;
}
/// <param name="eyeTracker">An <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> previously created by <see cref="ViveEyeTrackerHelper.xrCreateEyeTrackerHTCDelegate">xrCreateEyeTrackerHTC</see>.</param>
/// <param name="eyeGeometricDataInfo">A pointer to <see cref="XrEyeGeometricDataInfoHTC">XrEyeGeometricDataInfoHTC</see> structure.</param>
/// <param name="eyeGeometricData">A pointer to <see cref="XrEyeGeometricDataHTC">XrEyeGeometricDataHTC</see> returned by the runtime.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult GetEyeGeometricData(XrEyeTrackerHTC eyeTracker, XrEyeGeometricDataInfoHTC eyeGeometricDataInfo, out XrEyeGeometricDataHTC eyeGeometricData)
{
m_eyeGeometricData.type = XrStructureType.XR_TYPE_EYE_GEOMETRIC_DATA_HTC;
m_eyeGeometricData.next = IntPtr.Zero;
m_eyeGeometricData.time = m_frameState.predictedDisplayTime;
eyeGeometricData = m_eyeGeometricData;
XrResult result = XrResult.XR_ERROR_VALIDATION_FAILURE;
result = GetEyeGeometricDataHTC(eyeTracker,ref eyeGeometricDataInfo, ref m_eyeGeometricData);
if (result == XrResult.XR_SUCCESS) { eyeGeometricData = m_eyeGeometricData; }
return result;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9eca1674b64bae840af3a53d3ae576ec
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,426 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR.EyeTracker
{
/// <summary>
/// The XrEyeTrackerHTC handle represents the resources for eye tracking.
/// </summary>
public struct XrEyeTrackerHTC : IEquatable<UInt64>
{
private readonly UInt64 value;
public XrEyeTrackerHTC(UInt64 u)
{
value = u;
}
public static implicit operator UInt64(XrEyeTrackerHTC equatable)
{
return equatable.value;
}
public static implicit operator XrEyeTrackerHTC(UInt64 u)
{
return new XrEyeTrackerHTC(u);
}
public bool Equals(XrEyeTrackerHTC other)
{
return value == other.value;
}
public bool Equals(UInt64 other)
{
return value == other;
}
public override bool Equals(object obj)
{
return obj is XrEyeTrackerHTC && Equals((XrEyeTrackerHTC)obj);
}
public override int GetHashCode()
{
return value.GetHashCode();
}
public override string ToString()
{
return value.ToString();
}
public static bool operator ==(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.Equals(b); }
public static bool operator !=(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return !a.Equals(b); }
public static bool operator >=(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value >= b.value; }
public static bool operator <=(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value <= b.value; }
public static bool operator >(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value > b.value; }
public static bool operator <(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value < b.value; }
public static XrEyeTrackerHTC operator +(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value + b.value; }
public static XrEyeTrackerHTC operator -(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value - b.value; }
public static XrEyeTrackerHTC operator *(XrEyeTrackerHTC a, XrEyeTrackerHTC b) { return a.value * b.value; }
public static XrEyeTrackerHTC operator /(XrEyeTrackerHTC a, XrEyeTrackerHTC b)
{
if (b.value == 0)
{
throw new DivideByZeroException();
}
return a.value / b.value;
}
}
/// <summary>
/// The XrEyePositionHTC describes which eye is under tracking for the data retrieved from <see cref="XrEyeGazeDataHTC">XrEyeGazeDataHTC</see>, <see cref="XrEyePupilDataHTC">XrEyePupilDataHTC</see> or <see cref="XrEyeGeometricDataHTC">XrEyeGeometricDataHTC</see>.
/// </summary>
public enum XrEyePositionHTC
{
/// <summary>
/// Specifies the position of the left eye.
/// </summary>
XR_EYE_POSITION_LEFT_HTC = 0,
/// <summary>
/// Specifies the position of the right eye.
/// </summary>
XR_EYE_POSITION_RIGHT_HTC = 1,
XR_EYE_POSITION_COUNT_HTC = 2
};
/// <summary>
/// An application can inspect whether the system is capable of eye tracking input by extending the <see cref="XrSystemProperties">XrSystemProperties</see> with <see cref="XrSystemEyeTrackingPropertiesHTC">XrSystemEyeTrackingPropertiesHTC</see> structure when calling <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystemProperties">xrGetSystemProperties</see>.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSystemEyeTrackingPropertiesHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// Indicating if the current system is capable of receiving eye tracking input.
/// </summary>
public XrBool32 supportsEyeTracking;
};
/// <summary>
/// The XrEyeTrackerCreateInfoHTC structure describes the information to create an <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> handle.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEyeTrackerCreateInfoHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <param name="in_type">The <see cref="XrStructureType">XrStructureType</see> of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
public XrEyeTrackerCreateInfoHTC(XrStructureType in_type, IntPtr in_next)
{
type = in_type;
next = in_next;
}
};
/// <summary>
/// The XrEyeGazeDataInfoHTC structure describes the information to get eye gaze directions.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEyeGazeDataInfoHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// An <see cref="XrSpace">XrSpace</see> within which the returned eye poses will be represented.
/// </summary>
public XrSpace baseSpace;
/// <summary>
/// An <see cref="XrTime">XrTime</see> at which the eye gaze information is requested.
/// </summary>
public XrTime time;
/// <param name="in_type">The <see cref="XrStructureType">XrStructureType</see> of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
/// <param name="in_baseSpace">An <see cref="XrSpace">XrSpace</see> within which the returned eye poses will be represented.</param>
/// <param name="in_time">An <see cref="XrTime">XrTime</see> at which the eye gaze information is requested.</param>
public XrEyeGazeDataInfoHTC(XrStructureType in_type, IntPtr in_next, XrSpace in_baseSpace, XrTime in_time)
{
type = in_type;
next = in_next;
baseSpace = in_baseSpace;
time = in_time;
}
};
/// <summary>
/// The XrSingleEyeGazeDataHTC structure describes the validity and direction of a eye gaze observation.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSingleEyeGazeDataHTC
{
/// <summary>
/// An <see cref="XrBool32">XrBool32</see> indicating if the returned gazePose is valid. Callers should check the validity of pose prior to use.
/// </summary>
public XrBool32 isValid;
/// <summary>
/// An <see cref="XrPosef">XrPosef</see> describing the position and orientation of the user's eye. The pose is represented in the coordinate system provided by <see cref="XrEyeGazeDataInfoHTC">XrEyeGazeDataInfoHTC</see>::<see cref="XrEyeGazeDataInfoHTC.baseSpace">baseSpace</see>.
/// </summary>
public XrPosef gazePose;
/// <param name="in_isValid">An <see cref="XrBool32">XrBool32</see> indicating if the returned gazePose is valid. Callers should check the validity of pose prior to use.</param>
/// <param name="in_gazePose">An <see cref="XrPosef">XrPosef</see> describing the position and orientation of the user's eye. The pose is represented in the coordinate system provided by <see cref="XrEyeGazeDataInfoHTC">XrEyeGazeDataInfoHTC</see>::<see cref="XrEyeGazeDataInfoHTC.baseSpace">baseSpace</see>.</param>
public XrSingleEyeGazeDataHTC(XrBool32 in_isValid, XrPosef in_gazePose)
{
isValid = in_isValid;
gazePose = in_gazePose;
}
};
/// <summary>
/// The XrEyeGazeDataHTC structure returns the state of the eye gaze directions.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEyeGazeDataHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// An <see cref="XrTime">XrTime</see> at which the eye gaze information is requested.
/// </summary>
public XrTime time;
/// <summary>
/// An array of <see cref="XrSingleEyeGazeDataHTC">XrSingleEyeGazeDataHTC</see> receiving the returned eye gaze directions.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
public XrSingleEyeGazeDataHTC[] gaze;
};
/// <summary>
/// The XrEyePupilDataInfoHTC structure describes the information to get pupil data.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEyePupilDataInfoHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <param name="in_type">The <see cref="XrStructureType">XrStructureType</see> of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
public XrEyePupilDataInfoHTC(XrStructureType in_type, IntPtr in_next)
{
type = in_type;
next = in_next;
}
};
/// <summary>
/// The XrSingleEyePupilDataHTC structure describes the validity, diameter and position of a pupil observation.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSingleEyePupilDataHTC
{
/// <summary>
/// An <see cref="XrBool32">XrBool32</see> indicating if the returned pupilDiameter is valid. Callers should check the validity of diameter prior to use.
/// </summary>
public XrBool32 isDiameterValid;
/// <summary>
/// An <see cref="XrBool32">XrBool32</see> indicating if the returned pupilPosition is valid. Callers should check the validity of position prior to use.
/// </summary>
public XrBool32 isPositionValid;
/// <summary>
/// The diameter of pupil in millimeters.
/// </summary>
public float pupilDiameter;
/// <summary>
/// The position of pupil in sensor area which x and y are normalized in [0,1] with +Y up and +X to the right.
/// </summary>
public XrVector2f pupilPosition;
/// <param name="in_isDiameterValid">An <see cref="XrBool32">XrBool32</see> indicating if the returned gazePose is valid. Callers should check the validity of pose prior to use.</param>
/// <param name="in_isPositionValid">An <see cref="XrBool32">XrBool32</see> indicating if the returned pupilPosition is valid. Callers should check the validity of position prior to use.</param>
/// <param name="in_pupilDiameter">The diameter of pupil in millimeters.</param>
/// <param name="in_pupilPosition">The position of pupil in sensor area which x and y are normalized in [0,1]with +Y up and +X to the right.</param>
public XrSingleEyePupilDataHTC(XrBool32 in_isDiameterValid, XrBool32 in_isPositionValid, float in_pupilDiameter, XrVector2f in_pupilPosition)
{
isDiameterValid = in_isDiameterValid;
isPositionValid = in_isPositionValid;
pupilDiameter = in_pupilDiameter;
pupilPosition = in_pupilPosition;
}
};
/// <summary>
/// The XrEyePupilDataHTC structure returns the pupil data.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEyePupilDataHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// An <see cref="XrTime">XrTime</see> at which the pupil data was captured.
/// </summary>
public XrTime time;
/// <summary>
/// An array of <see cref="XrSingleEyePupilDataHTC">XrSingleEyePupilDataHTC</see> receiving the returned pupil data.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
public XrSingleEyePupilDataHTC[] pupilData;
};
/// <summary>
/// The XrEyeGeometricDataInfoHTC structure describes the information to get geometric related data.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEyeGeometricDataInfoHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <param name="in_type">The <see cref="XrStructureType">XrStructureType</see> of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
public XrEyeGeometricDataInfoHTC(XrStructureType in_type, IntPtr in_next)
{
type = in_type;
next = in_next;
}
};
/// <summary>
/// The XrSingleEyeGeometricDataHTC structure describes the geometric related data.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSingleEyeGeometricDataHTC
{
/// <summary>
/// A flag that indicates if the geometric data is valid. Callers should check the validity of the geometric data prior to use.
/// </summary>
public XrBool32 isValid;
/// <summary>
/// A value in range [0,1] representing the openness of the user's eye. When this value is zero, the eye closes normally. When this value is one, the eye opens normally. When this value goes higher, the eye approaches open.
/// </summary>
public float eyeOpenness;
/// <summary>
/// A value in range [0,1] representing how the user's eye open widely. When this value is zero, the eye opens normally. When this value goes higher, the eye opens wider.
public float eyeWide;
/// <summary>
/// A value in range [0,1] representing how the user's eye is closed. When this value is zero, the eye closes normally. When this value goes higher, the eye closes tighter.
/// </summary>
public float eyeSqueeze;
/// <param name="in_isValid">A flag that indicates if the geometric data is valid. Callers should check the validity of the geometric data prior to use.</param>
/// <param name="in_eyeOpenness">A value in range [0,1] representing the openness of the user's eye. When this value is zero, the eye closes normally. When this value is one, the eye opens normally. When this value goes higher, the eye approaches open.</param>
/// <param name="in_eyeWide">A value in range [0,1] representing how the user's eye open widely. When this value is zero, the eye opens normally. When this value goes higher, the eye opens wider.</param>
/// <param name="in_eyeSqueeze">A value in range [0,1] representing how the user's eye is closed. When this value is zero, the eye closes normally. When this value goes higher, the eye closes tighter.</param>
public XrSingleEyeGeometricDataHTC(XrBool32 in_isValid, float in_eyeOpenness, float in_eyeWide, float in_eyeSqueeze)
{
isValid = in_isValid;
eyeOpenness = in_eyeOpenness;
eyeWide = in_eyeWide;
eyeSqueeze = in_eyeSqueeze;
}
};
[StructLayout(LayoutKind.Sequential)]
public struct XrEyeGeometricDataHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// An <see cref="XrTime">XrTime</see> at which the returned eye data is tracked.
/// </summary>
public XrTime time;
/// <summary>
/// An array of <see cref="XrSingleEyeGeometricDataHTC">XrSingleEyeGeometricDataHTC</see> receiving the returned eye geometric data.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
public XrSingleEyeGeometricDataHTC[] eyeGeometricData;
};
public static class ViveEyeTrackerHelper
{
/// <param name="session">An XrSession in which the eye tracker will be active.</param>
/// <param name="createInfo">The <see cref="XrEyeTrackerCreateInfoHTC">XrEyeTrackerCreateInfoHTC</see> used to specify the eye tracker.</param>
/// <param name="eyeTracker">The returned <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> handle.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrCreateEyeTrackerHTCDelegate(
XrSession session,
ref XrEyeTrackerCreateInfoHTC createInfo,
out XrEyeTrackerHTC eyeTracker);
/// <param name="eyeTracker">An XrEyeTrackerHTC previously created by xrCreateEyeTrackerHTC.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrDestroyEyeTrackerHTCDelegate(
XrEyeTrackerHTC eyeTracker);
/// <summary>
/// Retrieves the <see cref="XrEyeGazeDataHTC">XrEyeGazeDataHTC</see> data of a <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see>.
/// </summary>
/// <param name="eyeTracker">An <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> previously created by <see cref="xrCreateEyeTrackerHTCDelegate">xrCreateEyeTrackerHTC</see>.</param>
/// <param name="gazeInfo">The information to get eye gaze.</param>
/// <param name="eyeGazes">A pointer to <see cref="XrEyeGazeDataHTC">XrEyeGazeDataHTC</see> receiving the returned eye poses.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrGetEyeGazeDataHTCDelegate(
XrEyeTrackerHTC eyeTracker,
ref XrEyeGazeDataInfoHTC gazeInfo,
ref XrEyeGazeDataHTC eyeGazes);
/// <param name="eyeTracker">An <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> previously created by <see cref="xrCreateEyeTrackerHTCDelegate">xrCreateEyeTrackerHTC</see>.</param>
/// <param name="pupilDataInfo">The information to get pupil data.</param>
/// <param name="pupilData">A pointer to <see cref="XrEyePupilDataHTC">XrEyePupilDataHTC</see> returned by the runtime.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrGetEyePupilDataHTCDelegate(
XrEyeTrackerHTC eyeTracker,
ref XrEyePupilDataInfoHTC pupilDataInfo,
ref XrEyePupilDataHTC pupilData);
/// <param name="eyeTracker">An <see cref="XrEyeTrackerHTC">XrEyeTrackerHTC</see> previously created by <see cref="xrCreateEyeTrackerHTCDelegate">xrCreateEyeTrackerHTC</see>.</param>
/// <param name="info">A pointer to <see cref="XrEyeGeometricDataInfoHTC">XrEyeGeometricDataInfoHTC</see> structure.</param>
/// <param name="eyeGeometricData">A pointer to <see cref="XrEyeGeometricDataHTC">XrEyeGeometricDataHTC</see> returned by the runtime.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrGetEyeGeometricDataHTC(
XrEyeTrackerHTC eyeTracker,
ref XrEyeGeometricDataInfoHTC info,
ref XrEyeGeometricDataHTC eyeGeometricData);
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e9a5198e29bcef243bf89dc19b46ce0d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -51,10 +51,10 @@ Through feeding the blend shape values of lip expression to an avatar, its facia
XR_LIP_EXPRESSION_MOUTH_UPPER_OVERTURN_HTC = 9, XR_LIP_EXPRESSION_MOUTH_UPPER_OVERTURN_HTC = 9,
XR_LIP_EXPRESSION_MOUTH_LOWER_OVERTURN_HTC = 10, XR_LIP_EXPRESSION_MOUTH_LOWER_OVERTURN_HTC = 10,
XR_LIP_EXPRESSION_MOUTH_POUT_HTC = 11, XR_LIP_EXPRESSION_MOUTH_POUT_HTC = 11,
XR_LIP_EXPRESSION_MOUTH_SMILE_RIGHT_HTC = 12, XR_LIP_EXPRESSION_MOUTH_RAISER_RIGHT_HTC = 12,
XR_LIP_EXPRESSION_MOUTH_SMILE_LEFT_HTC = 13, XR_LIP_EXPRESSION_MOUTH_RAISER_LEFT_HTC = 13,
XR_LIP_EXPRESSION_MOUTH_SAD_RIGHT_HTC = 14, XR_LIP_EXPRESSION_MOUTH_STRETCHER_RIGHT_HTC = 14,
XR_LIP_EXPRESSION_MOUTH_SAD_LEFT_HTC = 15, XR_LIP_EXPRESSION_MOUTH_STRETCHER_LEFT_HTC = 15,
XR_LIP_EXPRESSION_CHEEK_PUFF_RIGHT_HTC = 16, XR_LIP_EXPRESSION_CHEEK_PUFF_RIGHT_HTC = 16,
XR_LIP_EXPRESSION_CHEEK_PUFF_LEFT_HTC = 17, XR_LIP_EXPRESSION_CHEEK_PUFF_LEFT_HTC = 17,
XR_LIP_EXPRESSION_CHEEK_SUCK_HTC = 18, XR_LIP_EXPRESSION_CHEEK_SUCK_HTC = 18,

View File

@@ -67,9 +67,12 @@ namespace VIVE.OpenXR.FacialTracking
/// </summary> /// </summary>
/// <param name="xrInstance">The instance to destroy.</param> /// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance) protected override void OnInstanceDestroy(ulong xrInstance)
{
if (m_XrInstance == xrInstance)
{ {
m_XrInstanceCreated = false; m_XrInstanceCreated = false;
m_XrInstance = 0; m_XrInstance = 0;
}
DEBUG("OnInstanceDestroy() " + xrInstance); DEBUG("OnInstanceDestroy() " + xrInstance);
} }

View File

@@ -201,19 +201,19 @@ namespace VIVE.OpenXR.FacialTracking
/// <summary> /// <summary>
/// This blend shape raises the right side of the mouth further with a higher value. /// This blend shape raises the right side of the mouth further with a higher value.
/// </summary> /// </summary>
XR_LIP_EXPRESSION_MOUTH_SMILE_RIGHT_HTC = 12, XR_LIP_EXPRESSION_MOUTH_RAISER_RIGHT_HTC = 12,
/// <summary> /// <summary>
/// This blend shape raises the left side of the mouth further with a higher value. /// This blend shape raises the left side of the mouth further with a higher value.
/// </summary> /// </summary>
XR_LIP_EXPRESSION_MOUTH_SMILE_LEFT_HTC = 13, XR_LIP_EXPRESSION_MOUTH_RAISER_LEFT_HTC = 13,
/// <summary> /// <summary>
/// This blend shape lowers the right side of the mouth further with a higher value. /// This blend shape lowers the right side of the mouth further with a higher value.
/// </summary> /// </summary>
XR_LIP_EXPRESSION_MOUTH_SAD_RIGHT_HTC = 14, XR_LIP_EXPRESSION_MOUTH_STRETCHER_RIGHT_HTC = 14,
/// <summary> /// <summary>
/// This blend shape lowers the left side of the mouth further with a higher value. /// This blend shape lowers the left side of the mouth further with a higher value.
/// </summary> /// </summary>
XR_LIP_EXPRESSION_MOUTH_SAD_LEFT_HTC = 15, XR_LIP_EXPRESSION_MOUTH_STRETCHER_LEFT_HTC = 15,
/// <summary> /// <summary>
/// This blend shape puffs up the right side of the cheek further with a higher value. /// This blend shape puffs up the right side of the cheek further with a higher value.
/// </summary> /// </summary>
@@ -433,7 +433,7 @@ namespace VIVE.OpenXR.FacialTracking
/// </summary> /// </summary>
/// <param name="facialTracker">An <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> previously created by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateFacialTrackerHTC">xrCreateFacialTrackerHTC</see>.</param> /// <param name="facialTracker">An <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> previously created by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateFacialTrackerHTC">xrCreateFacialTrackerHTC</see>.</param>
/// <param name="facialExpressions">A pointer to <see cref="XrFacialExpressionsHTC">XrFacialExpressionsHTC</see> receiving the returned facial expressions.</param> /// <param name="facialExpressions">A pointer to <see cref="XrFacialExpressionsHTC">XrFacialExpressionsHTC</see> receiving the returned facial expressions.</param>
/// <returns></returns> /// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrGetFacialExpressionsHTCDelegate( public delegate XrResult xrGetFacialExpressionsHTCDelegate(
XrFacialTrackerHTC facialTracker, XrFacialTrackerHTC facialTracker,
ref XrFacialExpressionsHTC facialExpressions); ref XrFacialExpressionsHTC facialExpressions);

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6368702137725614d8d921ef6c1220f1
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e80a989be51974a4e88bdc41872d53c9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,185 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
using VIVE.OpenXR.SecondaryViewConfiguration;
namespace VIVE.OpenXR.FirstPersonObserver
{
/// <summary>
/// Name: FirstPersonObserver.cs
/// Role: OpenXR FirstPersonObserver Extension Class
/// Responsibility: The OpenXR extension implementation and its lifecycles logic in OpenXR
/// </summary>
#if UNITY_EDITOR
[OpenXRFeature(UiName = "XR MSFT First Person Observer",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Request the application to render an additional first-person view of the scene.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = OPEN_XR_EXTENSION_STRING,
Version = "1.0.0",
FeatureId = FeatureId,
Hidden = true)]
#endif
public class ViveFirstPersonObserver : OpenXRFeature
{
private static ViveFirstPersonObserver _instance;
/// <summary>
/// ViveFirstPersonObserver static instance (Singleton).
/// </summary>
public static ViveFirstPersonObserver Instance
{
get
{
if (_instance == null)
{
_instance =
OpenXRSettings.Instance.GetFeature<ViveFirstPersonObserver>();
}
return _instance;
}
}
/// <summary>
/// The log identification.
/// </summary>
private const string LogTag = "VIVE.OpenXR.FirstPersonObserver";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string FeatureId = "vive.openxr.feature.firstpersonobserver";
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_MSFT_first_person_observer">12.114. XR_MSFT_first_person_observer</see>.
/// </summary>
public const string OPEN_XR_EXTENSION_STRING = "XR_MSFT_first_person_observer";
/// <summary>
/// The flag represents whether the OpenXR loader created an instance or not.
/// </summary>
private bool XrInstanceCreated { get; set; } = false;
/// <summary>
/// The instance created through xrCreateInstance.
/// </summary>
private XrInstance XrInstance { get; set; } = 0;
/// <summary>
/// The function delegate declaration of xrGetInstanceProcAddr.
/// </summary>
private OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr { get; set; }
#region OpenXR life-cycle events
/// <summary>
/// Called after xrCreateInstance.
/// </summary>
/// <param name="xrInstance">Handle of the xrInstance.</param>
/// <returns>Returns true if successful. Returns false otherwise.</returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!IsExtensionEnabled())
{
Warning($"OnInstanceCreate() {OPEN_XR_EXTENSION_STRING} or " +
$"{ViveSecondaryViewConfiguration.OPEN_XR_EXTENSION_STRING} is NOT enabled.");
return false;
}
XrInstanceCreated = true;
XrInstance = xrInstance;
Debug("OnInstanceCreate() " + XrInstance);
if (!GetXrFunctionDelegates(XrInstance))
{
Error("Get function pointer of OpenXRFunctionPointerAccessor failed.");
return false;
}
Debug("Get function pointer of OpenXRFunctionPointerAccessor succeed.");
return base.OnInstanceCreate(xrInstance);
}
#endregion
/// <summary>
/// Get the OpenXR function via XrInstance.
/// </summary>
/// <param name="xrInstance">The XrInstance is provided by the Unity OpenXR Plugin.</param>
/// <returns>Return true if get successfully. False otherwise.</returns>
private bool GetXrFunctionDelegates(XrInstance xrInstance)
{
if (xrGetInstanceProcAddr != IntPtr.Zero)
{
Debug("Get function pointer of openXRFunctionPointerAccessor.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
if (XrGetInstanceProcAddr == null)
{
Error(
"Get function pointer of openXRFunctionPointerAccessor failed due to the XrGetInstanceProcAddr is null.");
return false;
}
}
else
{
Error(
"Get function pointer of openXRFunctionPointerAccessor failed due to the xrGetInstanceProcAddr is null.");
return false;
}
return true;
}
#region Utilities functions
/// <summary>
/// Check ViveFirstPersonObserver extension is enabled or not.
/// </summary>
/// <returns>Return true if enabled. False otherwise.</returns>
public static bool IsExtensionEnabled()
{
return OpenXRRuntime.IsExtensionEnabled(OPEN_XR_EXTENSION_STRING) &&
ViveSecondaryViewConfiguration.IsExtensionEnabled();
}
/// <summary>
/// Print log with tag "VIVE.OpenXR.SecondaryViewConfiguration".
/// </summary>
/// <param name="msg">The log you want to print.</param>
private static void Debug(string msg)
{
UnityEngine.Debug.Log(LogTag + " " + msg);
}
/// <summary>
/// Print warning message with tag "VIVE.OpenXR.SecondaryViewConfiguration".
/// </summary>
/// <param name="msg">The warning message you want to print.</param>
private static void Warning(string msg)
{
UnityEngine.Debug.LogWarning(LogTag + " " + msg);
}
/// <summary>
/// Print an error message with the tag "VIVE.OpenXR.SecondaryViewConfiguration."
/// </summary>
/// <param name="msg">The error message you want to print.</param>
private static void Error(string msg)
{
UnityEngine.Debug.LogError(LogTag + " " + msg);
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 311462c0560d6ec4ea9ed080a6a77a3b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -85,12 +85,12 @@ namespace VIVE.OpenXR
private static extern IntPtr intercept_xrGetInstanceProcAddr(IntPtr func); private static extern IntPtr intercept_xrGetInstanceProcAddr(IntPtr func);
[DllImport(ExtLib, EntryPoint = "applyFoveationHTC")] [DllImport(ExtLib, EntryPoint = "applyFoveationHTC")]
private static extern XrResult applyFoveationHTC(XrFoveationModeHTC mode, UInt32 configCount, XrFoveationConfigurationHTC[] configs, UInt64 flags); private static extern XrResult applyFoveationHTC(Foveation.XrFoveationModeHTC mode, UInt32 configCount, Foveation.XrFoveationConfigurationHTC[] configs, UInt64 flags);
/// <summary> /// <summary>
/// function to apply HTC Foveation /// function to apply HTC Foveation
/// </summary> /// </summary>
public static XrResult ApplyFoveationHTC(XrFoveationModeHTC mode, UInt32 configCount, XrFoveationConfigurationHTC[] configs, UInt64 flags = 0) public static XrResult ApplyFoveationHTC(Foveation.XrFoveationModeHTC mode, UInt32 configCount, Foveation.XrFoveationConfigurationHTC[] configs, UInt64 flags = 0)
{ {
//Debug.Log("Unity HTCFoveat:configCount " + configCount); //Debug.Log("Unity HTCFoveat:configCount " + configCount);
//if (configCount >=2) { //if (configCount >=2) {

View File

@@ -0,0 +1,38 @@
// Copyright HTC Corporation All Rights Reserved.
namespace VIVE.OpenXR.Foveation
{
#region 12.86. XR_HTC_foveation
/// <summary>
/// The XrFoveationModeHTC identifies the different foveation modes.
/// </summary>
public enum XrFoveationModeHTC
{
XR_FOVEATION_MODE_DISABLE_HTC = 0,
XR_FOVEATION_MODE_FIXED_HTC = 1,
XR_FOVEATION_MODE_DYNAMIC_HTC = 2,
XR_FOVEATION_MODE_CUSTOM_HTC = 3,
XR_FOVEATION_MODE_MAX_ENUM_HTC = 0x7FFFFFFF
}
/// <summary>
/// The XrFoveationLevelHTC identifies the pixel density drop level of periphery area.
/// </summary>
public enum XrFoveationLevelHTC
{
XR_FOVEATION_LEVEL_NONE_HTC = 0,
XR_FOVEATION_LEVEL_LOW_HTC = 1,
XR_FOVEATION_LEVEL_MEDIUM_HTC = 2,
XR_FOVEATION_LEVEL_HIGH_HTC = 3,
XR_FOVEATION_LEVEL_MAX_ENUM_HTC = 0x7FFFFFFF
}
/// <summary>
/// The XrFoveationConfigurationHTC structure contains the custom foveation settings for the corresponding views.
/// </summary>
public struct XrFoveationConfigurationHTC
{
public XrFoveationLevelHTC level;
public float clearFovDegree;
public XrVector2f focalCenterOffset;
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6b3c2ad651da4e5498f49d3a26038620
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 83e064b5ad501784c898651afc560f8e
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 73f495f4b0dd14245b3997ffbe23713a
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,19 @@
# 12.1. XR_HTC_frame_synchronization
## Overview
Traditional, runtime will use the latest frame which will cost jitter. With Frame Synchronization, the render frame will not be discarded for smooth gameplay experience.
However, if the GPU cannot consistently finish rendering on time (rendering more than one vsync at a time), jitter will still occur. Therefore, reducing GPU load is key to smooth gameplay.
## Name String
XR_HTC_frame_synchronization
## Revision
1
## New Enum Constants
[XrStructureType](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrStructureType) enumeration is extended with:
- XR_TYPE_FRAME_SYNCHRONIZATION_SESSION_BEGIN_INFO_HTC
## New Enums
- XrFrameSynchronizationModeHTC
## New Structures
- XrFrameSynchronizationSessionBeginInfoHTC
## VIVE Plugin
Enable "VIVE XR Frame Synchronization" in "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > OpenXR Feature Groups" to use the frame synchronization provided by VIVE OpenXR plugin.

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 190a1897e332b7f45893a24c3f696567
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a8078a459f75c5d419c46950680d6446
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,165 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Text;
using UnityEngine;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.FrameSynchronization
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Frame Synchronization (Beta)",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Support the Frame Synchronization extension.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
FeatureId = featureId)]
#endif
public class ViveFrameSynchronization : OpenXRFeature
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.FrameSynchronization.ViveFrameSynchronization";
StringBuilder m_sb = null;
StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
void ERROR(StringBuilder msg) { Debug.LogErrorFormat("{0} {1}", LOG_TAG, msg); }
#endregion
/// <summary>
/// The extension name of 12.1. XR_HTC_frame_synchronization.
/// </summary>
public const string kOpenxrExtensionString = "XR_HTC_frame_synchronization";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.framesynchronization";
#region OpenXR Life Cycle
/// <inheritdoc />
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
sb.Clear().Append("HookGetInstanceProcAddr() xrBeginSession"); DEBUG(sb);
ViveInterceptors.Instance.AddRequiredFunction("xrBeginSession");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
#pragma warning disable
private bool m_XrInstanceCreated = false;
#pragma warning enable
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
sb.Clear().Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false;
}
m_XrInstance = xrInstance;
m_XrInstanceCreated = true;
sb.Clear().Append("OnInstanceCreate() ").Append(m_XrInstance); DEBUG(sb);
ActivateFrameSynchronization(true);
return true;
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyInstance">xrDestroyInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
sb.Clear().Append("OnInstanceDestroy() ").Append(xrInstance).Append(", current: ").Append(m_XrInstance); DEBUG(sb);
if (m_XrInstance == xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
}
}
#pragma warning disable
private bool m_XrSessionCreated = false;
#pragma warning enable
private XrSession m_XrSession = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateSession">xrCreateSession</see> is done.
/// </summary>
/// <param name="xrSession">The created session ID.</param>
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
sb.Clear().Append("OnSessionCreate() ").Append(m_XrSession); DEBUG(sb);
}
protected override void OnSessionEnd(ulong xrSession)
{
sb.Clear().Append("OnSessionEnd() ").Append(xrSession).Append(", current: ").Append(m_XrSession); DEBUG(sb);
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroySession">xrDestroySession</see> is done.
/// </summary>
/// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
sb.Clear().Append("OnSessionDestroy() ").Append(xrSession).Append(", current: ").Append(m_XrSession); DEBUG(sb);
if (m_XrSession == xrSession)
{
m_XrSessionCreated = false;
m_XrSession = 0;
ActivateFrameSynchronization(false);
}
}
private XrSystemId m_XrSystemId = 0;
/// <summary>
/// Called when the <see cref="XrSystemId">XrSystemId</see> retrieved by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystem">xrGetSystem</see> is changed.
/// </summary>
/// <param name="xrSystem">The system id.</param>
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
sb.Clear().Append("OnSystemChange() " + m_XrSystemId); DEBUG(sb);
}
#endregion
[SerializeField]
internal SynchronizationModeHTC m_SynchronizationMode = SynchronizationModeHTC.Stablized;
/// <summary>
/// Activate or deactivate the Frame Synchronization feature.
/// </summary>
/// <param name="active">True for activate</param>
/// <param name="mode">The <see cref="XrFrameSynchronizationModeHTC"/> used for Frame Synchronization.</param>
private void ActivateFrameSynchronization(bool active)
{
sb.Clear().Append("ActivateFrameSynchronization() ").Append(active ? "enable " : "disable ").Append(m_SynchronizationMode); DEBUG(sb);
ViveInterceptors.Instance.ActivateFrameSynchronization(active, (XrFrameSynchronizationModeHTC)m_SynchronizationMode);
}
/// <summary>
/// Retrieves current frame synchronization mode.
/// </summary>
/// <returns>The mode of <see cref="SynchronizationModeHTC"/>.</returns>
public SynchronizationModeHTC GetSynchronizationMode() { return m_SynchronizationMode; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cb48e1e4de80ea8498ba4e9ff34adc32
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,67 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR.FrameSynchronization
{
/// <summary>
/// The enum alias of <see cref="XrFrameSynchronizationModeHTC"/>.
/// </summary>
public enum SynchronizationModeHTC : UInt32
{
Stablized = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC,
Prompt = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_PROMPT_HTC,
//Adaptive = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_ADAPTIVE_HTC,
}
// -------------------- 12.1. XR_HTC_frame_synchronization --------------------
#region New Enums
public enum XrFrameSynchronizationModeHTC : UInt32
{
XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC = 1,
XR_FRAME_SYNCHRONIZATION_MODE_PROMPT_HTC = 2,
XR_FRAME_SYNCHRONIZATION_MODE_ADAPTIVE_HTC = 3,
XR_FRAME_SYNCHRONIZATION_MODE_MAX_ENUM_HTC = 0x7FFFFFFF
}
#endregion
#region New Structures
/// <summary>
/// Traditional, runtime will use the latest frame which will cost jitter. With Frame Synchronization, the render frame will not be discarded for smooth gameplay experience.
/// However, if the GPU cannot consistently finish rendering on time(rendering more than one vsync at a time), jitter will still occur.Therefore, reducing GPU load is key to smooth gameplay.
/// The application can use Frame Synchronization by passing XrFrameSynchronizationSessionBeginInfoHTC at next of <see cref="XrSessionBeginInfo"/>.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrFrameSynchronizationSessionBeginInfoHTC
{
/// <summary>
/// The XrStructureType of this structure. It must be XR_TYPE_FRAME_SYNCHRONIZATION_SESSION_BEGIN_INFO_HTC.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// The frame synchronization mode to be used in this session.
/// </summary>
public XrFrameSynchronizationModeHTC mode;
public XrFrameSynchronizationSessionBeginInfoHTC(XrStructureType in_type, IntPtr in_next, XrFrameSynchronizationModeHTC in_mode)
{
type = in_type;
next = in_next;
mode = in_mode;
}
public static XrFrameSynchronizationSessionBeginInfoHTC identity {
get {
return new XrFrameSynchronizationSessionBeginInfoHTC(
XrStructureType.XR_TYPE_FRAME_SYNCHRONIZATION_SESSION_BEGIN_INFO_HTC,
IntPtr.Zero,
XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC);
}
}
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a2877a2048174774b8e8698f159199e9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -19,7 +19,7 @@ The application should use
## VIVE Plugin ## VIVE Plugin
After adding the "VIVE Focus3 Hand Interaction" to "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > Interaction Profiles", you can use the following Input Action Pathes. After adding the "VIVE XR Hand Interaction" to "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > Interaction Profiles", you can use the following Input Action Pathes.
### Left Hand ### Left Hand
- <ViveHandInteraction>{LeftHand}/selectValue: Presents the left hand pinch strength. - <ViveHandInteraction>{LeftHand}/selectValue: Presents the left hand pinch strength.
@@ -29,4 +29,69 @@ After adding the "VIVE Focus3 Hand Interaction" to "Project Settings > XR Plugin
- <ViveHandInteraction>{RightHand}/selectValue: Presents the right hand pinch strength. - <ViveHandInteraction>{RightHand}/selectValue: Presents the right hand pinch strength.
- <ViveHandInteraction>{RightHand}/pointerPose: Presents the right hand pinch pose. - <ViveHandInteraction>{RightHand}/pointerPose: Presents the right hand pinch pose.
Refer to the <VIVE OpenXR sample path>/Plugin/Input/ActionMap/InputActions.inputActions about the "Input Action Path" usage and the sample <VIVE OpenXR sample path>/Plugin/Input/OpenXRInput.unity. Refer to the <VIVE OpenXR sample path>/Samples/Commons/ActionMap/InputActions.inputActions about the "Input Action Path" usage in the sample <VIVE OpenXR sample path>/Samples/Input/OpenXRInput.unity.
--------------------
# 12.31. XR_EXT_hand_interaction
## Name String
XR_EXT_hand_interaction
## Revision
1
## Hand Interaction Profile
### Interaction profile path:
- /interaction_profiles/ext/hand_interaction_ext
### Valid for user paths:
- /user/hand/left
- /user/hand/right
### Supported input source
- <20>K/input/aim/pose
- <20>K/input/aim_activate_ext/value: a 1D analog input component indicating that the user activated the action on the target that the user is pointing at with the aim pose.
- <20>K/input/aim_activate_ext/ready_ext: a boolean input, where the value XR_TRUE indicates that the fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
- <20>K/input/grip/pose
- <20>K/input/grasp_ext/value: a 1D analog input component indicating that the user is making a fist.
- <20>K/input/grasp_ext/ready_ext: a boolean input, where the value XR_TRUE indicates that the hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
- <20>K/input/pinch_ext/pose
- <20>K/input/pinch_ext/value: a 1D analog input component indicating the extent which the user is bringing their finger and thumb together to perform a "pinch" gesture.
- <20>K/input/pinch_ext/ready_ext: a boolean input, where the value XR_TRUE indicates that the fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
- <20>K/input/poke_ext/pose
The <20>K/input/aim/pose is typically used for aiming at objects out of arm<72><6D>s reach. When using a hand interaction profile, it is typically paired with <20>K/input/aim_activate_ext/value to optimize aiming ray stability while performing the gesture. When using a controller interaction profile, the "aim" pose is typically paired with a trigger or a button for aim and fire operations.
The <20>K/input/grip/pose is typically used for holding a large object in the user<65><72>s hand. When using a hand interaction profile, it is typically paired with <20>K/input/grasp_ext/value for the user to directly manipulate an object held in a hand. When using a controller interaction profile, the "grip" pose is typically paired with a "squeeze" button or trigger that gives the user the sense of tightly holding an object.
The <20>K/input/pinch_ext/pose is typically used for directly manipulating a small object using the pinch gesture. When using a hand interaction profile, it is typically paired with the <20>K/input/pinch_ext/value gesture. When using a controller interaction profile, it is typically paired with a trigger manipulated with the index finger, which typically requires curling the index finger and applying pressure with the fingertip.
The <20>K/input/poke_ext/pose is typically used for contact-based interactions using the motion of the hand or fingertip. It typically does not pair with other hand gestures or buttons on the controller. The application typically uses a sphere collider with the "poke" pose to visualize the pose and detect touch with a virtual object.
## VIVE Plugin
After adding the "VIVE XR Hand Interaction Ext" to "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > Interaction Profiles", you can use the following Input Action Pathes.
### Left Hand
- <ViveHandInteraction>{LeftHand}/pointerPose: Presents the left hand aim pose used for aiming at objects out of arm<72><6D>s reach.
- <ViveHandInteraction>{LeftHand}/pointerValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the aimed-at target is being fully interacted with left hand.
- <ViveHandInteraction>{LeftHand}/pointerReady: XR_TRUE indicates that the left fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
- <ViveHandInteraction>{LeftHand}/gripPose: Presents the left hand grip pose used for holding a large object in the user<65><72>s hand.
- <ViveHandInteraction>{LeftHand}/gripValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the left fist is tightly closed.
- <ViveHandInteraction>{LeftHand}/gripReady: XR_TRUE indicates that the left hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
- <ViveHandInteraction>{LeftHand}/pinchPose: Presents the left hand pinch pose used for directly manipulating a small object using the pinch gesture.
- <ViveHandInteraction>{LeftHand}/pinchValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the left finger and thumb are touching each other.
- <ViveHandInteraction>{LeftHand}/pinchReady: XR_TRUE indicates that the left fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
- <ViveHandInteraction>{LeftHand}/pokePose: Presents the left hand poke pose used for contact-based interactions using the motion of the hand or fingertip.
### Right Hand
- <ViveHandInteraction>{RightHand}/pointerPose: Presents the right hand aim pose used for aiming at objects out of arm<72><6D>s reach.
- <ViveHandInteraction>{RightHand}/pointerValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the aimed-at target is being fully interacted with right hand.
- <ViveHandInteraction>{RightHand}/pointerReady: XR_TRUE indicates that the right fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
- <ViveHandInteraction>{RightHand}/gripPose: Presents the right hand grip pose used for holding a large object in the user<65><72>s hand.
- <ViveHandInteraction>{RightHand}/gripValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the right fist is tightly closed.
- <ViveHandInteraction>{RightHand}/gripReady: XR_TRUE indicates that the right hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
- <ViveHandInteraction>{RightHand}/pinchPose: Presents the right hand pinch pose used for directly manipulating a small object using the pinch gesture.
- <ViveHandInteraction>{RightHand}/pinchValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the right finger and thumb are touching each other.
- <ViveHandInteraction>{RightHand}/pinchReady: XR_TRUE indicates that the right fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
- <ViveHandInteraction>{RightHand}/pokePose: Presents the right hand poke pose used for contact-based interactions using the motion of the hand or fingertip.
Refer to the <VIVE OpenXR sample path>/Samples/HandInteractionExt/HandInteractionExt.inputActions about the "Input Action Path" usage in the sample <VIVE OpenXR sample path>/Samples/HandInteractionExt/HandInteractionExt.unity.

View File

@@ -31,10 +31,11 @@ namespace VIVE.OpenXR.Hand
/// </summary> /// </summary>
#if UNITY_EDITOR #if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Hand Interaction", [OpenXRFeature(UiName = "VIVE XR Hand Interaction",
Hidden = true,
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone }, BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC", Company = "HTC",
Desc = "Support for enabling the hand interaction profile. Will register the controller map for hand interaction if enabled.", Desc = "Support for enabling the VIVE hand interaction profile. Will register the controller map for hand interaction if enabled.",
DocumentationLink = "..\\Documentation", DocumentationLink = "https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XR_HTC_hand_interaction",
Version = "1.0.0", Version = "1.0.0",
OpenxrExtensionStrings = kOpenxrExtensionString, OpenxrExtensionStrings = kOpenxrExtensionString,
Category = FeatureCategory.Interaction, Category = FeatureCategory.Interaction,
@@ -42,6 +43,7 @@ namespace VIVE.OpenXR.Hand
#endif #endif
public class ViveHandInteraction : OpenXRInteractionFeature public class ViveHandInteraction : OpenXRInteractionFeature
{ {
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteraction "; const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteraction ";
StringBuilder m_sb = null; StringBuilder m_sb = null;
StringBuilder sb { StringBuilder sb {
@@ -50,8 +52,9 @@ namespace VIVE.OpenXR.Hand
return m_sb; return m_sb;
} }
} }
void DEBUG(StringBuilder msg) { Debug.Log(msg); } void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
void WARNING(StringBuilder msg) { Debug.LogWarning(msg); } void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
#endregion
/// <summary> /// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_hand_interaction">12.69. XR_HTC_hand_interaction</see>. /// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_hand_interaction">12.69. XR_HTC_hand_interaction</see>.
@@ -68,6 +71,7 @@ namespace VIVE.OpenXR.Hand
/// </summary> /// </summary>
private const string profile = "/interaction_profiles/htc/hand_interaction"; private const string profile = "/interaction_profiles/htc/hand_interaction";
#region Supported component paths
private const string leftHand = "/user/hand_htc/left"; private const string leftHand = "/user/hand_htc/left";
private const string rightHand = "/user/hand_htc/right"; private const string rightHand = "/user/hand_htc/right";
@@ -85,21 +89,22 @@ namespace VIVE.OpenXR.Hand
/// <summary> /// <summary>
/// Constant for a pose interaction binding '.../input/aim/pose' OpenXR Input Binding. Used by input subsystem to bind actions to physical inputs. /// Constant for a pose interaction binding '.../input/aim/pose' OpenXR Input Binding. Used by input subsystem to bind actions to physical inputs.
/// </summary> /// </summary>
private const string pointerPose = "/input/aim/pose"; public const string pointerPose = "/input/aim/pose";
/// <summary> /// <summary>
/// Constant for a pose interaction binding '.../input/grip/pose' OpenXR Input Binding. Used by input subsystem to bind actions to physical inputs. /// Constant for a pose interaction binding '.../input/grip/pose' OpenXR Input Binding. Used by input subsystem to bind actions to physical inputs.
/// </summary> /// </summary>
public const string devicePose = "/input/grip/pose"; public const string devicePose = "/input/grip/pose";
#endregion
[Preserve, InputControlLayout(displayName = "VIVE Hand Interaction (OpenXR)", commonUsages = new[] { "LeftHand", "RightHand" }, isGenericTypeOfDevice = true)] [Preserve, InputControlLayout(displayName = "VIVE Hand Interaction (OpenXR)", commonUsages = new[] { "LeftHand", "RightHand" }, isGenericTypeOfDevice = true)]
public class HandInteractionDevice : OpenXRDevice public class HandInteractionDevice : OpenXRDevice
{ {
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteraction.HandInteractionDevice"; const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteraction.HandInteractionDevice";
void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); } void DEBUG(string msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
/// <summary> /// <summary>
/// A [AxisControl](xref:UnityEngine.InputSystem.Controls.AxisControl) that represents the <see cref="ViveHandInteraction.selectValue"/> OpenXR binding. /// A <see cref="AxisControl"/> representing the <see cref="ViveHandInteraction.selectValue"/> OpenXR binding.
/// </summary> /// </summary>
[Preserve, InputControl(aliases = new[] { "selectAxis, pinchStrength" }, usage = "Select")] [Preserve, InputControl(aliases = new[] { "selectAxis, pinchStrength" }, usage = "Select")]
public AxisControl selectValue { get; private set; } public AxisControl selectValue { get; private set; }
@@ -125,23 +130,23 @@ namespace VIVE.OpenXR.Hand
/// <summary> /// <summary>
/// A [ButtonControl](xref:UnityEngine.InputSystem.Controls.ButtonControl) required for backwards compatibility with the XRSDK layouts. This represents the overall tracking state of the device. This value is equivalent to mapping devicePose/isTracked. /// A [ButtonControl](xref:UnityEngine.InputSystem.Controls.ButtonControl) required for backwards compatibility with the XRSDK layouts. This represents the overall tracking state of the device. This value is equivalent to mapping devicePose/isTracked.
/// </summary> /// </summary>
[Preserve, InputControl(offset = 8, usage = "IsTracked")] [Preserve, InputControl(offset = 8)]
public ButtonControl isTracked { get; private set; } public ButtonControl isTracked { get; private set; }
/// <summary> /// <summary>
/// A [IntegerControl](xref:UnityEngine.InputSystem.Controls.IntegerControl) required for backwards compatibility with the XRSDK layouts. This represents the bit flag set to indicate what data is valid. This value is equivalent to mapping devicePose/trackingState. /// A [IntegerControl](xref:UnityEngine.InputSystem.Controls.IntegerControl) required for backwards compatibility with the XRSDK layouts. This represents the bit flag set to indicate what data is valid. This value is equivalent to mapping devicePose/trackingState.
/// </summary> /// </summary>
[Preserve, InputControl(offset = 12, usage = "TrackingState")] [Preserve, InputControl(offset = 12)]
public IntegerControl trackingState { get; private set; } public IntegerControl trackingState { get; private set; }
/// <summary> /// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the device position. For the VIVE Focus 3 device, this is both the device and the pointer position. This value is equivalent to mapping devicePose/position. /// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the device position. This value is equivalent to mapping devicePose/position.
/// </summary> /// </summary>
[Preserve, InputControl(offset = 16, alias = "gripPosition")] [Preserve, InputControl(offset = 16, alias = "gripPosition")]
public Vector3Control devicePosition { get; private set; } public Vector3Control devicePosition { get; private set; }
/// <summary> /// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the device orientation. For the VIVE Focus 3 device, this is both the device and the pointer rotation. This value is equivalent to mapping devicePose/rotation. /// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the device orientation. This value is equivalent to mapping devicePose/rotation.
/// </summary> /// </summary>
[Preserve, InputControl(offset = 28, alias = "gripOrientation")] [Preserve, InputControl(offset = 28, alias = "gripOrientation")]
public QuaternionControl deviceRotation { get; private set; } public QuaternionControl deviceRotation { get; private set; }
@@ -184,16 +189,15 @@ namespace VIVE.OpenXR.Hand
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns> /// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance) protected override bool OnInstanceCreate(ulong xrInstance)
{ {
// Requires the eye tracking extension
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString)) if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{ {
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb); sb.Clear().Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false; return false;
} }
m_XrInstanceCreated = true; m_XrInstanceCreated = true;
m_XrInstance = xrInstance; m_XrInstance = xrInstance;
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() " + m_XrInstance); DEBUG(sb); sb.Clear().Append("OnInstanceCreate() " + m_XrInstance); DEBUG(sb);
return base.OnInstanceCreate(xrInstance); return base.OnInstanceCreate(xrInstance);
} }
@@ -205,9 +209,7 @@ namespace VIVE.OpenXR.Hand
/// </summary> /// </summary>
protected override void RegisterDeviceLayout() protected override void RegisterDeviceLayout()
{ {
sb.Clear().Append(LOG_TAG).Append("RegisterDeviceLayout() Layout: ").Append(kLayoutName) sb.Clear().Append("RegisterDeviceLayout() ").Append(kLayoutName).Append(", product: ").Append(kDeviceLocalizedName); DEBUG(sb);
.Append(", Product: ").Append(kDeviceLocalizedName);
DEBUG(sb);
InputSystem.RegisterLayout(typeof(HandInteractionDevice), InputSystem.RegisterLayout(typeof(HandInteractionDevice),
kLayoutName, kLayoutName,
matches: new InputDeviceMatcher() matches: new InputDeviceMatcher()
@@ -220,16 +222,36 @@ namespace VIVE.OpenXR.Hand
/// </summary> /// </summary>
protected override void UnregisterDeviceLayout() protected override void UnregisterDeviceLayout()
{ {
sb.Clear().Append(LOG_TAG).Append("UnregisterDeviceLayout() ").Append(kLayoutName); DEBUG(sb); sb.Clear().Append("UnregisterDeviceLayout() ").Append(kLayoutName); DEBUG(sb);
InputSystem.RemoveLayout(kLayoutName); InputSystem.RemoveLayout(kLayoutName);
} }
#if UNITY_XR_OPENXR_1_9_1
/// <summary>
/// Return interaction profile type. HandInteractionDevice profile is Device type.
/// </summary>
/// <returns>Interaction profile type.</returns>
protected override InteractionProfileType GetInteractionProfileType()
{
return typeof(HandInteractionDevice).IsSubclassOf(typeof(XRController)) ? InteractionProfileType.XRController : InteractionProfileType.Device;
}
/// <summary>
/// Return device layer out string used for registering device HandInteractionDevice in InputSystem.
/// </summary>
/// <returns>Device layout string.</returns>
protected override string GetDeviceLayoutName()
{
return kLayoutName;
}
#endif
/// <summary> /// <summary>
/// Registers action maps to Unity XR. /// Registers action maps to Unity XR.
/// </summary> /// </summary>
protected override void RegisterActionMapsWithRuntime() protected override void RegisterActionMapsWithRuntime()
{ {
sb.Clear().Append(LOG_TAG).Append("RegisterActionMapsWithRuntime() Action map vivehandinteraction") sb.Clear().Append("RegisterActionMapsWithRuntime() Action map vivehandinteraction")
.Append(", localizedName: ").Append(kDeviceLocalizedName) .Append(", localizedName: ").Append(kDeviceLocalizedName)
.Append(", desiredInteractionProfile").Append(profile); .Append(", desiredInteractionProfile").Append(profile);
DEBUG(sb); DEBUG(sb);

View File

@@ -0,0 +1,585 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine.Scripting;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine.InputSystem.Layouts;
using UnityEngine.InputSystem.XR;
using UnityEngine.InputSystem.Controls;
using UnityEngine.XR.OpenXR;
using UnityEngine;
using UnityEngine.InputSystem;
using System.Collections.Generic;
using UnityEngine.XR;
using UnityEngine.XR.OpenXR.Input;
using System.Text;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
#if USE_INPUT_SYSTEM_POSE_CONTROL // Scripting Define Symbol added by using OpenXR Plugin 1.6.0.
using PoseControl = UnityEngine.InputSystem.XR.PoseControl;
#else
using PoseControl = UnityEngine.XR.OpenXR.Input.PoseControl;
#endif
namespace VIVE.OpenXR.Hand
{
/// <summary>
/// This <see cref="OpenXRInteractionFeature"/> enables the use of hand interaction profiles in OpenXR. It enables <see cref="ViveHandInteractionExt.kOpenxrExtensionString">XR_EXT_hand_interaction</see> in the underyling runtime.
/// </summary>
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Hand Interaction Ext",
Hidden = true,
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Support for enabling the KHR hand interaction profile. Will register the controller map for hand interaction if enabled.",
DocumentationLink = "https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XR_EXT_hand_interaction",
Version = "1.0.0",
OpenxrExtensionStrings = kOpenxrExtensionString,
Category = FeatureCategory.Interaction,
FeatureId = featureId)]
#endif
public class ViveHandInteractionExt : OpenXRInteractionFeature
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteractionExt";
StringBuilder m_sb = null;
StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
#endregion
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_hand_interaction">12.69. XR_HTC_hand_interaction</see>.
/// </summary>
public const string kOpenxrExtensionString = "XR_EXT_hand_interaction";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.hand.interaction.ext";
[Preserve, InputControlLayout(displayName = "VIVE Hand Interaction Ext (OpenXR)", commonUsages = new[] { "LeftHand", "RightHand" })]
public class HandInteractionExtDevice : XRController
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteractionExt.HandInteractionExtDevice";
void DEBUG(string msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
#endregion
#region Action Path
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.grip"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, aliases = new[] { "device", "gripPose" }, usage = "Device")]
public PoseControl devicePose { get; private set; }
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.aim"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, alias = "aimPose", usage = "Pointer")]
public PoseControl pointer { get; private set; }
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.pinchPose"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, usage = "Pinch")]
public PoseControl pinchPose { get; private set; }
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.poke"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, alias = "indexTip", usage = "Poke")]
public PoseControl pokePose { get; private set; }
/// <summary>
/// A <see cref="AxisControl"/> representing information from the <see cref="ViveHandInteractionExt.graspValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(aliases = new[] { "gripValue" }, usage = "GraspValue")]
public AxisControl graspValue { get; private set; }
/// <summary>
/// A <see cref="ButtonControl"/> representing the <see cref="ViveHandInteractionExt.graspReady"/> OpenXR bindings, depending on handedness.
/// </summary>
[Preserve, InputControl(aliases = new[] { "isGrasped", "isGripped" }, usage = "GraspReady")]
public ButtonControl graspReady { get; private set; }
/// <summary>
/// A <see cref="AxisControl"/> representing information from the <see cref="ViveHandInteractionExt.pointerActivateValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(aliases = new[] { "pointerValue" }, usage = "PointerActivateValue")]
public AxisControl pointerActivateValue { get; private set; }
/// <summary>
/// A <see cref="ButtonControl"/> representing the <see cref="ViveHandInteractionExt.pointerActivateReady"/> OpenXR bindings, depending on handedness.
/// </summary>
[Preserve, InputControl(aliases = new[] { "isPointed", "pointerReady" }, usage = "PointerActivateReady")]
public ButtonControl pointerActivateReady { get; private set; }
/// <summary>
/// A <see cref="AxisControl"/> representing information from the <see cref="ViveHandInteractionExt.pinchValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(usage = "PinchValue")]
public AxisControl pinchValue { get; private set; }
/// <summary>
/// A <see cref="ButtonControl"/> representing the <see cref="ViveHandInteractionExt.pinchReady"/> OpenXR bindings, depending on handedness.
/// </summary>
[Preserve, InputControl(aliases = new[] { "isPinched" }, usage = "PinchReady")]
public ButtonControl pinchReady { get; private set; }
/// <summary>
/// A [ButtonControl](xref:UnityEngine.InputSystem.Controls.ButtonControl) required for backwards compatibility with the XRSDK layouts. This represents the overall tracking state of the device. This value is equivalent to mapping devicePose/isTracked.
/// </summary>
[Preserve, InputControl(offset = 2)]
new public ButtonControl isTracked { get; private set; }
/// <summary>
/// A [IntegerControl](xref:UnityEngine.InputSystem.Controls.IntegerControl) required for backwards compatibility with the XRSDK layouts. This represents the bit flag set to indicate what data is valid. This value is equivalent to mapping devicePose/trackingState.
/// </summary>
[Preserve, InputControl(offset = 4)]
new public IntegerControl trackingState { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the device position. This value is equivalent to mapping devicePose/position.
/// </summary>
[Preserve, InputControl(offset = 8, noisy = true, alias = "gripPosition")]
new public Vector3Control devicePosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the device orientation. This value is equivalent to mapping devicePose/rotation.
/// </summary>
[Preserve, InputControl(offset = 20, noisy = true, alias = "gripRotation")]
new public QuaternionControl deviceRotation { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the aim position. This value is equivalent to mapping pointer/position.
/// </summary>
[Preserve, InputControl(offset = 72, noisy = true)]
public Vector3Control pointerPosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the aim orientation. This value is equivalent to mapping pointer/rotation.
/// </summary>
[Preserve, InputControl(offset = 84, noisy = true)]
public QuaternionControl pointerRotation { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the pinch position. This value is equivalent to mapping pinchPose/position.
/// </summary>
[Preserve, InputControl(offset = 136, noisy = true)]
public Vector3Control pinchPosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the pinch orientation. This value is equivalent to mapping pinchPose/rotation.
/// </summary>
[Preserve, InputControl(offset = 148, noisy = true)]
public QuaternionControl pinchRotation { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the poke position. This value is equivalent to mapping pokePose/position.
/// </summary>
[Preserve, InputControl(offset = 200, noisy = true)]
public Vector3Control pokePosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the poke orientation. This value is equivalent to mapping pokePose/rotation.
/// </summary>
[Preserve, InputControl(offset = 212, noisy = true)]
public QuaternionControl pokeRotation { get; private set; }
#endregion
/// <summary>
/// Internal call used to assign controls to the the correct element.
/// </summary>
protected override void FinishSetup()
{
DEBUG("FinishSetup() interfaceName: " + description.interfaceName
+ ", deviceClass: " + description.deviceClass
+ ", product: " + description.product
+ ", serial: " + description.serial
+ ", version: " + description.version);
base.FinishSetup();
pointer = GetChildControl<PoseControl>("pointer");
pointerActivateValue = GetChildControl<AxisControl>("pointerActivateValue");
pointerActivateReady = GetChildControl<ButtonControl>("pointerActivateReady");
devicePose = GetChildControl<PoseControl>("devicePose");
graspValue = GetChildControl<AxisControl>("graspValue");
graspReady = GetChildControl<ButtonControl>("graspReady");
pinchPose = GetChildControl<PoseControl>("pinchPose");
pinchValue = GetChildControl<AxisControl>("pinchValue");
pinchReady = GetChildControl<ButtonControl>("pinchReady");
pokePose = GetChildControl<PoseControl>("pokePose");
}
}
/// <summary>
/// The interaction profile string used to reference the hand interaction input device.
/// </summary>
public const string profile = "/interaction_profiles/ext/hand_interaction_ext";
#region Supported component paths
/// <summary>
/// Constant for a pose interaction binding '.../input/aim/pose' OpenXR Input Binding.<br></br>
/// Typically used for aiming at objects out of arm¡¦s reach. When using a hand interaction profile, it is typically paired with <see cref="pointerActivateValue"/> to optimize aiming ray stability while performing the gesture.<br></br>
/// When using a controller interaction profile, the "aim" pose is typically paired with a trigger or a button for aim and fire operations.
/// </summary>
public const string aim = "/input/aim/pose";
/// <summary>
/// Constant for a float interaction binding '.../input/aim_activate_ext/value' OpenXR Input Binding.<br></br>
/// A 1D analog input component indicating that the user activated the action on the target that the user is pointing at with the aim pose.
/// </summary>
public const string pointerActivateValue = "/input/aim_activate_ext/value";
/// <summary>
/// Constant for a boolean interaction binding '.../input/aim_activate_ext/ready_ext' OpenXR Input Binding.<br></br>
/// A boolean input, where the value XR_TRUE indicates that the fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
/// </summary>
public const string pointerActivateReady = "/input/aim_activate_ext/ready_ext";
/// <summary>
/// Constant for a pose interaction binding '.../input/grip/pose' OpenXR Input Binding.<br></br>
/// Typically used for holding a large object in the user¡¦s hand. When using a hand interaction profile, it is typically paired with <see cref="graspValue"/> for the user to directly manipulate an object held in a hand.<br></br>
/// When using a controller interaction profile, the "grip" pose is typically paired with a "squeeze" button or trigger that gives the user the sense of tightly holding an object.
/// </summary>
public const string grip = "/input/grip/pose";
/// <summary>
/// Constant for a float interaction binding '.../input/grasp_ext/value' OpenXR Input Binding.<br></br>
/// A 1D analog input component indicating that the user is making a fist.
/// </summary>
public const string graspValue = "/input/grasp_ext/value";
/// <summary>
/// Constant for a boolean interaction binding '.../input/grasp_ext/ready_ext' OpenXR Input Binding.<br></br>
/// A boolean input, where the value XR_TRUE indicates that the hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
/// </summary>
public const string graspReady = "/input/grasp_ext/ready_ext";
/// <summary>
/// Constant for a pose interaction binding '.../input/pinch_ext/pose' OpenXR Input Binding.<br></br>
/// Typically used for directly manipulating a small object using the pinch gesture. When using a hand interaction profile, it is typically paired with the <see cref="pinchValue"/>.<br></br>
/// When using a controller interaction profile, it is typically paired with a trigger manipulated with the index finger, which typically requires curling the index finger and applying pressure with the fingertip.
/// </summary>
public const string pinchPose = "/input/pinch_ext/pose";
/// <summary>
/// Constant for a float interaction binding '.../input/pinch_ext/value' OpenXR Input Binding.<br></br>
/// A 1D analog input component indicating the extent which the user is bringing their finger and thumb together to perform a "pinch" gesture.
/// </summary>
public const string pinchValue = "/input/pinch_ext/value";
/// <summary>
/// Constant for a boolean interaction binding '.../input/pinch_ext/ready_ext' OpenXR Input Binding.<br></br>
/// A boolean input, where the value XR_TRUE indicates that the fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
/// </summary>
public const string pinchReady = "/input/pinch_ext/ready_ext";
/// <summary>
/// Constant for a pose interaction binding '.../input/poke_ext/pose' OpenXR Input Binding.<br></br>
/// Typically used for contact-based interactions using the motion of the hand or fingertip. It typically does not pair with other hand gestures or buttons on the controller. The application typically uses a sphere collider with the "poke" pose to visualize the pose and detect touch with a virtual object.
/// </summary>
public const string poke = "/input/poke_ext/pose";
#endregion
#pragma warning disable
private bool m_XrInstanceCreated = false;
#pragma warning restore
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
sb.Clear().Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false;
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
sb.Clear().Append("OnInstanceCreate() " + m_XrInstance); DEBUG(sb);
return base.OnInstanceCreate(xrInstance);
}
private const string kLayoutName = "ViveHandInteractionExt";
private const string kDeviceLocalizedName = "Vive Hand Interaction Ext OpenXR";
/// <summary>
/// Registers the <see cref="HandInteractionExtDevice"/> layout with the Input System.
/// </summary>
protected override void RegisterDeviceLayout()
{
sb.Clear().Append("RegisterDeviceLayout() ").Append(kLayoutName).Append(", product: ").Append(kDeviceLocalizedName); DEBUG(sb);
InputSystem.RegisterLayout(typeof(HandInteractionExtDevice),
kLayoutName,
matches: new InputDeviceMatcher()
.WithInterface(XRUtilities.InterfaceMatchAnyVersion)
.WithProduct(kDeviceLocalizedName));
}
/// <summary>
/// Removes the <see cref="HandInteractionExtDevice"/> layout from the Input System.
/// </summary>
protected override void UnregisterDeviceLayout()
{
sb.Clear().Append("UnregisterDeviceLayout() ").Append(kLayoutName); DEBUG(sb);
InputSystem.RemoveLayout(kLayoutName);
}
#if UNITY_XR_OPENXR_1_9_1
/// <summary>
/// Return interaction profile type. HandInteractionExtDevice profile is Device type.
/// </summary>
/// <returns>Interaction profile type.</returns>
protected override InteractionProfileType GetInteractionProfileType()
{
return typeof(HandInteractionExtDevice).IsSubclassOf(typeof(XRController)) ? InteractionProfileType.XRController : InteractionProfileType.Device;
}
/// <summary>
/// Return device layer out string used for registering device HandInteractionExtDevice in InputSystem.
/// </summary>
/// <returns>Device layout string.</returns>
protected override string GetDeviceLayoutName()
{
return kLayoutName;
}
#endif
/// <summary>
/// Registers action maps to Unity XR.
/// </summary>
protected override void RegisterActionMapsWithRuntime()
{
sb.Clear().Append("RegisterActionMapsWithRuntime() Action map vivehandinteractionext")
.Append(", localizedName: ").Append(kDeviceLocalizedName)
.Append(", desiredInteractionProfile").Append(profile);
DEBUG(sb);
ActionMapConfig actionMap = new ActionMapConfig()
{
name = "vivehandinteractionext",
localizedName = kDeviceLocalizedName,
desiredInteractionProfile = profile,
manufacturer = "HTC",
serialNumber = "",
deviceInfos = new List<DeviceConfig>()
{
new DeviceConfig()
{
characteristics = (InputDeviceCharacteristics)(InputDeviceCharacteristics.HandTracking | InputDeviceCharacteristics.HeldInHand | InputDeviceCharacteristics.TrackedDevice | InputDeviceCharacteristics.Left),
userPath = UserPaths.leftHand
},
new DeviceConfig()
{
characteristics = (InputDeviceCharacteristics)(InputDeviceCharacteristics.HandTracking | InputDeviceCharacteristics.HeldInHand | InputDeviceCharacteristics.TrackedDevice | InputDeviceCharacteristics.Right),
userPath = UserPaths.rightHand
}
},
actions = new List<ActionConfig>()
{
// Grip Pose
new ActionConfig()
{
name = "devicePose",
localizedName = "Grasp Pose",
type = ActionType.Pose,
usages = new List<string>()
{
"Device"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = grip,
interactionProfileName = profile,
}
}
},
// Grip Value
new ActionConfig()
{
name = "graspValue",
localizedName = "Grip Axis",
type = ActionType.Axis1D,
usages = new List<string>()
{
"GraspValue"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = graspValue,
interactionProfileName = profile,
}
}
},
// Grip Ready
new ActionConfig()
{
name = "graspReady",
localizedName = "Is Grasped",
type = ActionType.Binary,
usages = new List<string>()
{
"GraspReady"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = graspReady,
interactionProfileName = profile,
},
}
},
// Aim Pose
new ActionConfig()
{
name = "pointer",
localizedName = "Aim Pose",
type = ActionType.Pose,
usages = new List<string>()
{
"Pointer"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = aim,
interactionProfileName = profile,
}
}
},
// Aim Value
new ActionConfig()
{
name = "pointerActivateValue",
localizedName = "Pointer Axis",
type = ActionType.Axis1D,
usages = new List<string>()
{
"PointerActivateValue"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pointerActivateValue,
interactionProfileName = profile,
}
}
},
// Aim Ready
new ActionConfig()
{
name = "pointerActivateReady",
localizedName = "Is Pointed",
type = ActionType.Binary,
usages = new List<string>()
{
"PointerActivateReady"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pointerActivateReady,
interactionProfileName = profile,
},
}
},
// Pinch Pose
new ActionConfig()
{
name = "pinchPose",
localizedName = "Pinch Pose",
type = ActionType.Pose,
usages = new List<string>()
{
"Pinch"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pinchPose,
interactionProfileName = profile,
}
}
},
// Pinch Value
new ActionConfig()
{
name = "pinchValue",
localizedName = "Pinch Axis",
type = ActionType.Axis1D,
usages = new List<string>()
{
"PinchValue"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pinchValue,
interactionProfileName = profile,
}
}
},
// Pinch Ready
new ActionConfig()
{
name = "pinchReady",
localizedName = "Is Pinched",
type = ActionType.Binary,
usages = new List<string>()
{
"PinchReady"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pinchReady,
interactionProfileName = profile,
},
}
},
// Poke Pose
new ActionConfig()
{
name = "pokePose",
localizedName = "Index Tip",
type = ActionType.Pose,
usages = new List<string>()
{
"Poke"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = poke,
interactionProfileName = profile,
}
}
},
}
};
AddActionMap(actionMap);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e1477dbc8916dff4f8e21fc343efcd46
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,14 +1,18 @@
// Copyright HTC Corporation All Rights Reserved. // Copyright HTC Corporation All Rights Reserved.
using UnityEngine;
using UnityEngine.XR;
using UnityEngine.XR.OpenXR; using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features; using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System.Runtime.InteropServices;
using System; using System;
using System.Linq;
using UnityEngine.XR;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using AOT; using AOT;
using UnityEngine.InputSystem;
using UnityEngine.InputSystem.LowLevel;
#if UNITY_EDITOR #if UNITY_EDITOR
using UnityEditor; using UnityEditor;
using UnityEditor.XR.OpenXR.Features; using UnityEditor.XR.OpenXR.Features;
@@ -28,10 +32,23 @@ namespace VIVE.OpenXR.Hand
#endif #endif
public class ViveHandTracking : OpenXRFeature public class ViveHandTracking : OpenXRFeature
{ {
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandTracking "; const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandTracking ";
void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); } StringBuilder m_sb = null;
void WARNING(string msg) { Debug.LogWarning(LOG_TAG + " " + msg); } StringBuilder sb
void ERROR(string msg) { Debug.LogError(LOG_TAG + " " + msg); } {
get
{
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
void DEBUG(String msg) { Debug.Log(LOG_TAG + msg); }
void DEBUG(StringBuilder msg) { Debug.Log(msg); }
void WARNING(StringBuilder msg) { Debug.LogWarning(msg); }
void ERROR(String msg) { Debug.LogError(LOG_TAG + msg); }
void ERROR(StringBuilder msg) { Debug.LogError(msg); }
#endregion
/// <summary> /// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_EXT_hand_tracking">12.29 XR_EXT_hand_tracking</see>. /// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_EXT_hand_tracking">12.29 XR_EXT_hand_tracking</see>.
@@ -99,13 +116,14 @@ namespace VIVE.OpenXR.Hand
{ {
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString)) if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{ {
WARNING("OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled."); sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false; return false;
} }
m_XrInstanceCreated = true; m_XrInstanceCreated = true;
m_XrInstance = xrInstance; m_XrInstance = xrInstance;
DEBUG("OnInstanceCreate() " + m_XrInstance); InputSystem.onAfterUpdate += UpdateCallback;
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() ").Append(m_XrInstance); DEBUG(sb);
return GetXrFunctionDelegates(m_XrInstance); return GetXrFunctionDelegates(m_XrInstance);
} }
@@ -114,10 +132,14 @@ namespace VIVE.OpenXR.Hand
/// </summary> /// </summary>
/// <param name="xrInstance">The instance to destroy.</param> /// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance) protected override void OnInstanceDestroy(ulong xrInstance)
{
if (m_XrInstance == xrInstance)
{ {
m_XrInstanceCreated = false; m_XrInstanceCreated = false;
m_XrInstance = 0; m_XrInstance = 0;
DEBUG("OnInstanceDestroy() " + xrInstance); InputSystem.onAfterUpdate -= UpdateCallback;
}
sb.Clear().Append(LOG_TAG).Append("OnInstanceDestroy() ").Append(xrInstance); DEBUG(sb);
} }
private XrSystemId m_XrSystemId = 0; private XrSystemId m_XrSystemId = 0;
@@ -128,7 +150,7 @@ namespace VIVE.OpenXR.Hand
protected override void OnSystemChange(ulong xrSystem) protected override void OnSystemChange(ulong xrSystem)
{ {
m_XrSystemId = xrSystem; m_XrSystemId = xrSystem;
DEBUG("OnSystemChange() " + m_XrSystemId); sb.Clear().Append(LOG_TAG).Append("OnSystemChange() ").Append(m_XrSystemId); DEBUG(sb);
} }
private bool m_XrSessionCreated = false; private bool m_XrSessionCreated = false;
@@ -146,7 +168,7 @@ namespace VIVE.OpenXR.Hand
{ {
m_XrSession = xrSession; m_XrSession = xrSession;
m_XrSessionCreated = true; m_XrSessionCreated = true;
DEBUG("OnSessionCreate() " + m_XrSession); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() ").Append(m_XrSession); DEBUG(sb);
// Enumerate supported reference space types and create the XrSpace. // Enumerate supported reference space types and create the XrSpace.
XrReferenceSpaceType[] spaces = new XrReferenceSpaceType[Enum.GetNames(typeof(XrReferenceSpaceType)).Count()]; XrReferenceSpaceType[] spaces = new XrReferenceSpaceType[Enum.GetNames(typeof(XrReferenceSpaceType)).Count()];
@@ -158,7 +180,7 @@ namespace VIVE.OpenXR.Hand
spaces: out spaces[0]) == XrResult.XR_SUCCESS) spaces: out spaces[0]) == XrResult.XR_SUCCESS)
#pragma warning restore 0618 #pragma warning restore 0618
{ {
DEBUG("OnSessionCreate() spaceCountOutput: " + spaceCountOutput); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() spaceCountOutput: ").Append(spaceCountOutput); DEBUG(sb);
Array.Resize(ref spaces, (int)spaceCountOutput); Array.Resize(ref spaces, (int)spaceCountOutput);
#pragma warning disable 0618 #pragma warning disable 0618
@@ -186,7 +208,7 @@ namespace VIVE.OpenXR.Hand
#pragma warning restore 0618 #pragma warning restore 0618
{ {
hasReferenceSpaceLocal = true; hasReferenceSpaceLocal = true;
DEBUG("OnSessionCreate() CreateReferenceSpace LOCAL: " + m_ReferenceSpaceLocal); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() CreateReferenceSpace LOCAL: ").Append(m_ReferenceSpaceLocal); DEBUG(sb);
} }
else else
{ {
@@ -210,7 +232,7 @@ namespace VIVE.OpenXR.Hand
#pragma warning restore 0618 #pragma warning restore 0618
{ {
hasReferenceSpaceStage = true; hasReferenceSpaceStage = true;
DEBUG("OnSessionCreate() CreateReferenceSpace STAGE: " + m_ReferenceSpaceStage); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() CreateReferenceSpace STAGE: ").Append(m_ReferenceSpaceStage); DEBUG(sb);
} }
else else
{ {
@@ -220,7 +242,7 @@ namespace VIVE.OpenXR.Hand
} }
else else
{ {
ERROR("OnSessionCreate() EnumerateReferenceSpaces(" + spaceCountOutput + ") failed."); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() EnumerateReferenceSpaces(").Append(spaceCountOutput).Append(") failed."); ERROR(sb);
} }
} }
else else
@@ -233,7 +255,7 @@ namespace VIVE.OpenXR.Hand
{ {
hasLeftHandTracker = true; hasLeftHandTracker = true;
leftHandTracker = value; leftHandTracker = value;
DEBUG("OnSessionCreate() leftHandTracker " + leftHandTracker); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() leftHandTracker ").Append(leftHandTracker); DEBUG(sb);
} }
} }
{ // right hand tracker { // right hand tracker
@@ -241,7 +263,7 @@ namespace VIVE.OpenXR.Hand
{ {
hasRightHandTracker = true; hasRightHandTracker = true;
rightHandTracker = value; rightHandTracker = value;
DEBUG("OnSessionCreate() rightHandTracker " + rightHandTracker); sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() rightHandTracker ").Append(rightHandTracker); DEBUG(sb);
} }
} }
} }
@@ -252,7 +274,7 @@ namespace VIVE.OpenXR.Hand
/// <param name="xrSession">The session ID to destroy.</param> /// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession) protected override void OnSessionDestroy(ulong xrSession)
{ {
DEBUG("OnSessionDestroy() " + xrSession); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() ").Append(xrSession); DEBUG(sb);
// Reference Space is binding with xrSession so we destroy the xrSpace when xrSession is destroyed. // Reference Space is binding with xrSession so we destroy the xrSpace when xrSession is destroyed.
if (hasReferenceSpaceLocal) if (hasReferenceSpaceLocal)
@@ -261,12 +283,12 @@ namespace VIVE.OpenXR.Hand
if (DestroySpace(m_ReferenceSpaceLocal) == XrResult.XR_SUCCESS) if (DestroySpace(m_ReferenceSpaceLocal) == XrResult.XR_SUCCESS)
#pragma warning restore 0618 #pragma warning restore 0618
{ {
DEBUG("OnSessionDestroy() DestroySpace LOCAL " + m_ReferenceSpaceLocal); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace LOCAL ").Append(m_ReferenceSpaceLocal); DEBUG(sb);
m_ReferenceSpaceLocal = 0; m_ReferenceSpaceLocal = 0;
} }
else else
{ {
ERROR("OnSessionDestroy() DestroySpace LOCAL " + m_ReferenceSpaceLocal + " failed."); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace LOCAL ").Append(m_ReferenceSpaceLocal).Append(" failed."); ERROR(sb);
} }
hasReferenceSpaceLocal = false; hasReferenceSpaceLocal = false;
} }
@@ -276,12 +298,12 @@ namespace VIVE.OpenXR.Hand
if (DestroySpace(m_ReferenceSpaceStage) == XrResult.XR_SUCCESS) if (DestroySpace(m_ReferenceSpaceStage) == XrResult.XR_SUCCESS)
#pragma warning restore 0618 #pragma warning restore 0618
{ {
DEBUG("OnSessionDestroy() DestroySpace STAGE " + m_ReferenceSpaceStage); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace STAGE ").Append(m_ReferenceSpaceStage); DEBUG(sb);
m_ReferenceSpaceStage = 0; m_ReferenceSpaceStage = 0;
} }
else else
{ {
ERROR("OnSessionDestroy() DestroySpace STAGE " + m_ReferenceSpaceStage + " failed."); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace STAGE ").Append(m_ReferenceSpaceStage).Append(" failed."); ERROR(sb);
} }
hasReferenceSpaceStage = false; hasReferenceSpaceStage = false;
} }
@@ -291,11 +313,11 @@ namespace VIVE.OpenXR.Hand
{ {
if (DestroyHandTrackerEXT(leftHandTracker) == XrResult.XR_SUCCESS) if (DestroyHandTrackerEXT(leftHandTracker) == XrResult.XR_SUCCESS)
{ {
DEBUG("OnSessionDestroy() Left DestroyHandTrackerEXT " + leftHandTracker); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Left DestroyHandTrackerEXT ").Append(leftHandTracker); DEBUG(sb);
} }
else else
{ {
ERROR("OnSessionDestroy() Left DestroyHandTrackerEXT " + leftHandTracker + " failed."); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Left DestroyHandTrackerEXT ").Append(leftHandTracker).Append(" failed."); ERROR(sb);
} }
hasLeftHandTracker = false; hasLeftHandTracker = false;
} }
@@ -303,11 +325,11 @@ namespace VIVE.OpenXR.Hand
{ {
if (DestroyHandTrackerEXT(rightHandTracker) == XrResult.XR_SUCCESS) if (DestroyHandTrackerEXT(rightHandTracker) == XrResult.XR_SUCCESS)
{ {
DEBUG("OnSessionDestroy() Right DestroyHandTrackerEXT " + rightHandTracker); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Right DestroyHandTrackerEXT ").Append(rightHandTracker); DEBUG(sb);
} }
else else
{ {
ERROR("OnSessionDestroy() Right DestroyHandTrackerEXT " + rightHandTracker + " failed."); sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Right DestroyHandTrackerEXT ").Append(rightHandTracker).Append(" failed."); ERROR(sb);
} }
hasRightHandTracker = false; hasRightHandTracker = false;
} }
@@ -453,13 +475,13 @@ namespace VIVE.OpenXR.Hand
if (createInfo.hand == XrHandEXT.XR_HAND_LEFT_EXT && hasLeftHandTracker) if (createInfo.hand == XrHandEXT.XR_HAND_LEFT_EXT && hasLeftHandTracker)
{ {
DEBUG("CreateHandTrackerEXT() Left tracker " + leftHandTracker + " already created."); sb.Clear().Append(LOG_TAG).Append("CreateHandTrackerEXT() Left tracker ").Append(leftHandTracker).Append(" already created."); DEBUG(sb);
handTracker = leftHandTracker; handTracker = leftHandTracker;
return XrResult.XR_SUCCESS; return XrResult.XR_SUCCESS;
} }
if (createInfo.hand == XrHandEXT.XR_HAND_RIGHT_EXT && hasRightHandTracker) if (createInfo.hand == XrHandEXT.XR_HAND_RIGHT_EXT && hasRightHandTracker)
{ {
DEBUG("CreateHandTrackerEXT() Right tracker " + rightHandTracker + " already created."); sb.Clear().Append(LOG_TAG).Append("CreateHandTrackerEXT() Right tracker ").Append(rightHandTracker).Append(" already created."); DEBUG(sb);
handTracker = rightHandTracker; handTracker = rightHandTracker;
return XrResult.XR_SUCCESS; return XrResult.XR_SUCCESS;
} }
@@ -677,7 +699,7 @@ namespace VIVE.OpenXR.Hand
bool support = false; bool support = false;
for (int i = 0; i < spaceCountOutput; i++) for (int i = 0; i < spaceCountOutput; i++)
{ {
DEBUG("IsReferenceSpaceTypeSupported() supported space[" + i + "]: " + spaces[i]); sb.Clear().Append(LOG_TAG).Append("IsReferenceSpaceTypeSupported() supported space[").Append(i).Append("]: ").Append(spaces[i]); DEBUG(sb);
if (spaces[i] == space) { support = true; } if (spaces[i] == space) { support = true; }
} }
@@ -720,7 +742,7 @@ namespace VIVE.OpenXR.Hand
sys_hand_tracking_prop_ptr = new IntPtr(offset); sys_hand_tracking_prop_ptr = new IntPtr(offset);
handTrackingSystemProperties = (XrSystemHandTrackingPropertiesEXT)Marshal.PtrToStructure(sys_hand_tracking_prop_ptr, typeof(XrSystemHandTrackingPropertiesEXT)); handTrackingSystemProperties = (XrSystemHandTrackingPropertiesEXT)Marshal.PtrToStructure(sys_hand_tracking_prop_ptr, typeof(XrSystemHandTrackingPropertiesEXT));
DEBUG("IsHandTrackingSupported() XrSystemHandTrackingPropertiesEXT.supportsHandTracking: " + handTrackingSystemProperties.supportsHandTracking); sb.Clear().Append(LOG_TAG).Append("IsHandTrackingSupported() XrSystemHandTrackingPropertiesEXT.supportsHandTracking: ").Append((UInt32)handTrackingSystemProperties.supportsHandTracking); DEBUG(sb);
ret = handTrackingSystemProperties.supportsHandTracking > 0; ret = handTrackingSystemProperties.supportsHandTracking > 0;
} }
else else
@@ -736,7 +758,7 @@ namespace VIVE.OpenXR.Hand
{ {
if (!IsHandTrackingSupported()) if (!IsHandTrackingSupported())
{ {
ERROR("CreateHandTrackers() " + (isLeft ? "Left" : "Right") + " hand tracking is NOT supported."); sb.Clear().Append(LOG_TAG).Append("CreateHandTrackers() ").Append((isLeft ? "Left" : "Right")).Append(" hand tracking is NOT supported."); ERROR(sb);
handTracker = 0; handTracker = 0;
return false; return false;
} }
@@ -748,7 +770,7 @@ namespace VIVE.OpenXR.Hand
createInfo.handJointSet = XrHandJointSetEXT.XR_HAND_JOINT_SET_DEFAULT_EXT; createInfo.handJointSet = XrHandJointSetEXT.XR_HAND_JOINT_SET_DEFAULT_EXT;
var ret = CreateHandTrackerEXT(ref createInfo, out handTracker); var ret = CreateHandTrackerEXT(ref createInfo, out handTracker);
DEBUG("CreateHandTrackers() " + (isLeft ? "Left" : "Right") + " CreateHandTrackerEXT = " + ret); sb.Clear().Append(LOG_TAG).Append("CreateHandTrackers() ").Append((isLeft ? "Left" : "Right")).Append(" CreateHandTrackerEXT = ").Append(ret); DEBUG(sb);
return ret == XrResult.XR_SUCCESS; return ret == XrResult.XR_SUCCESS;
} }
@@ -773,20 +795,52 @@ namespace VIVE.OpenXR.Hand
return true; return true;
} }
private int lastUpdateFrameL = -1, lastUpdateFrameR = -1;
private void UpdateCallback()
{
// Only allow updating poses once at BeforeRender & Dynamic per frame.
if (InputState.currentUpdateType == InputUpdateType.BeforeRender ||
InputState.currentUpdateType == InputUpdateType.Dynamic)
{
lastUpdateFrameL = -1;
lastUpdateFrameR = -1;
}
}
private bool AllowUpdate(bool isLeft)
{
bool allow;
if (isLeft)
{
allow = (lastUpdateFrameL != Time.frameCount);
lastUpdateFrameL = Time.frameCount;
}
else
{
allow = (lastUpdateFrameR != Time.frameCount);
lastUpdateFrameR = Time.frameCount;
}
return allow;
}
/// <summary> /// <summary>
/// Retrieves the <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data. /// Retrieves the <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.
/// </summary> /// </summary>
/// <param name="isLeft">Left or right hand.</param> /// <param name="isLeft">Left or right hand.</param>
/// <param name="handJointLocation">Output parameter to retrieve <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.</param> /// <param name="handJointLocation">Output parameter to retrieve <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.</param>
/// <param name="timestamp">The hand tracking data timestamp.</param>
/// <returns>True for valid data.</returns> /// <returns>True for valid data.</returns>
public bool GetJointLocations(bool isLeft, out XrHandJointLocationEXT[] handJointLocation) public bool GetJointLocations(bool isLeft, out XrHandJointLocationEXT[] handJointLocation, out XrTime timestamp)
{ {
bool ret = false;
handJointLocation = isLeft ? jointLocationsL : jointLocationsR; handJointLocation = isLeft ? jointLocationsL : jointLocationsR;
timestamp = m_frameState.predictedDisplayTime;
if (!AllowUpdate(isLeft)) { return true; }
bool ret = false;
if (isLeft && !hasLeftHandTracker) { return ret; } if (isLeft && !hasLeftHandTracker) { return ret; }
if (!isLeft && !hasRightHandTracker) { return ret; } if (!isLeft && !hasRightHandTracker) { return ret; }
OpenXRHelper.Trace.Begin("GetJointLocations");
TrackingOriginModeFlags origin = GetTrackingOriginMode(); TrackingOriginModeFlags origin = GetTrackingOriginMode();
if (origin == TrackingOriginModeFlags.Unknown || origin == TrackingOriginModeFlags.Unbounded) { return ret; } if (origin == TrackingOriginModeFlags.Unknown || origin == TrackingOriginModeFlags.Unbounded) { return ret; }
XrSpace baseSpace = (origin == TrackingOriginModeFlags.Device ? m_ReferenceSpaceLocal : m_ReferenceSpaceStage); XrSpace baseSpace = (origin == TrackingOriginModeFlags.Device ? m_ReferenceSpaceLocal : m_ReferenceSpaceStage);
@@ -831,6 +885,8 @@ namespace VIVE.OpenXR.Hand
locateInfo: locateInfo, locateInfo: locateInfo,
locations: ref locations) == XrResult.XR_SUCCESS) locations: ref locations) == XrResult.XR_SUCCESS)
{ {
timestamp = locateInfo.time;
if (locations.isActive) if (locations.isActive)
{ {
if (IntPtr.Size == 4) if (IntPtr.Size == 4)
@@ -858,7 +914,19 @@ namespace VIVE.OpenXR.Hand
} }
Marshal.FreeHGlobal(locations.jointLocations); Marshal.FreeHGlobal(locations.jointLocations);
OpenXRHelper.Trace.End();
return ret; return ret;
} }
/// <summary>
/// Retrieves the <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.
/// </summary>
/// <param name="isLeft">Left or right hand.</param>
/// <param name="handJointLocation">Output parameter to retrieve <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.</param>
/// <returns>True for valid data.</returns>
public bool GetJointLocations(bool isLeft, out XrHandJointLocationEXT[] handJointLocation)
{
return GetJointLocations(isLeft, out handJointLocation, out XrTime timestamp);
}
} }
} }

Some files were not shown because too many files have changed in this diff Show More