version 2.5.0

This commit is contained in:
Sean Lu
2024-12-06 15:44:37 +08:00
parent dfdcd0fd7f
commit 2bfa2ad4c7
966 changed files with 238216 additions and 77239 deletions

View File

@@ -0,0 +1,38 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR.Feature
{
public interface IViveFeatureWrapper
{
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddr);
public void OnInstanceDestroy();
}
public class ViveFeatureWrapperBase<T> where T : ViveFeatureWrapperBase<T>, new()
{
private static readonly Lazy<T> lazyInstance = new Lazy<T>(() => new T());
public static T Instance => lazyInstance.Value;
// Set true in yourfeature's OnInstanceCreate
public bool IsInited { get; protected set; } = false;
public OpenXRHelper.xrGetInstanceProcAddrDelegate xrGetInstanceProcAddr;
/// <summary>
/// Complete the xrGetInstanceProcAddr by set the pointer received in OnInstanceCreate
/// </summary>
/// <param name="intPtr"></param>
public void SetGetInstanceProcAddrPtr(IntPtr intPtr)
{
if (intPtr == null || intPtr == IntPtr.Zero)
throw new Exception("xrGetInstanceProcAddr is null");
xrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer<OpenXRHelper.xrGetInstanceProcAddrDelegate>(intPtr);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a27dc5505cdb29347aeda46676cedaa8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,80 @@
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR
{
public static class MemoryTools
{
/// <summary>
/// Convert the enum array to IntPtr. Should call <see cref="ReleaseRawMemory(IntPtr)"/> after use.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="array"></param>
/// <returns></returns>
public static IntPtr ToIntPtr<T>(T[] array) where T : Enum
{
int size = Marshal.SizeOf(typeof(T)) * array.Length;
IntPtr ptr = Marshal.AllocHGlobal(size);
int[] intArray = new int[array.Length];
for (int i = 0; i < array.Length; i++)
intArray[i] = (int)(object)array[i];
Marshal.Copy(intArray, 0, ptr, array.Length);
return ptr;
}
/// <summary>
/// Make the same size raw buffer from input array.
/// </summary>
/// <typeparam name="T">Data type could be primitive type or struct. Should call <see cref="ReleaseRawMemory(IntPtr)"/> after use.</typeparam>
/// <param name="refArray">The data array</param>
/// <returns>The memory handle. Should release by <see cref="ReleaseRawMemory(IntPtr)"/></returns>
public static IntPtr MakeRawMemory<T>(T[] refArray)
{
int size = Marshal.SizeOf(typeof(T)) * refArray.Length;
return Marshal.AllocHGlobal(size);
}
/// <summary>
/// Copy the raw memory to the array. You should make sure the array has the same size as the raw memory.
/// </summary>
/// <typeparam name="T">Convert the memory to this type array.</typeparam>
/// <param name="array">The output array.</param>
/// <param name="raw">The data source in raw memory form.</param>
/// <param name="count">Specify the copy count. Count should be less than array length.</param>
public static void CopyFromRawMemory<T>(T[] array, IntPtr raw, int count = 0)
{
int N = array.Length;
if (count > 0 && count < array.Length)
N = count;
int step = Marshal.SizeOf(typeof(T));
for (int i = 0; i < N; i++)
{
array[i] = Marshal.PtrToStructure<T>(IntPtr.Add(raw, i * step));
}
}
/// <summary>
/// Make the same size raw buffer from input array. Make sure the raw has enough size.
/// </summary>
/// <typeparam name="T">Convert this type array to raw memory.</typeparam>
/// <param name="raw">The output data in raw memory form</param>
/// <param name="array">The data source</param>
public static void CopyToRawMemory<T>(IntPtr raw, T[] array)
{
int step = Marshal.SizeOf(typeof(T));
for (int i = 0; i < array.Length; i++)
{
Marshal.StructureToPtr<T>(array[i], IntPtr.Add(raw, i * step), false);
}
}
/// <summary>
/// Release the raw memory handle which is created by <see cref="MakeRawMemory{T}(T[])"/>
/// </summary>
/// <param name="ptr"></param>
public static void ReleaseRawMemory(IntPtr ptr)
{
Marshal.FreeHGlobal(ptr);
}
}
}

View File

@@ -3,6 +3,7 @@
using System;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.Profiling;
namespace VIVE.OpenXR.Feature
{
@@ -10,23 +11,16 @@ namespace VIVE.OpenXR.Feature
/// To use this wrapper, you need to call CommonWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate(),
/// and call CommonWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy().
/// </summary>
public class CommonWrapper
public class CommonWrapper : ViveFeatureWrapperBase<CommonWrapper>, IViveFeatureWrapper
{
static CommonWrapper instance = null;
public static CommonWrapper Instance
{
get
{
if (instance == null)
instance = new CommonWrapper();
return instance;
}
}
bool isInited = false;
OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr;
OpenXRHelper.xrGetSystemPropertiesDelegate XrGetSystemProperties;
OpenXRHelper.xrCreateSwapchainDelegate XrCreateSwapchain;
OpenXRHelper.xrDestroySwapchainDelegate XrDestroySwapchain;
OpenXRHelper.xrEnumerateSwapchainFormatsDelegate XrEnumerateSwapchainFormats;
OpenXRHelper.xrEnumerateSwapchainImagesDelegate XrEnumerateSwapchainImages;
OpenXRHelper.xrWaitSwapchainImageDelegate XrWaitSwapchainImage;
OpenXRHelper.xrAcquireSwapchainImageDelegate XrAcquireSwapchainImage;
OpenXRHelper.xrReleaseSwapchainImageDelegate XrReleaseSwapchainImage;
/// <summary>
/// In feature's OnInstanceCreate(), call CommonWrapper.Instance.OnInstanceCreate() for init common APIs.
@@ -35,32 +29,32 @@ namespace VIVE.OpenXR.Feature
/// <param name="xrGetInstanceProcAddr">Pass OpenXRFeature.xrGetInstanceProcAddr in.</param>
/// <returns></returns>
/// <exception cref="Exception">If input data not valid.</exception>
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddr)
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddrPtr)
{
if (isInited) return true;
if (IsInited) return true;
if (xrInstance == 0)
throw new Exception("CommonWrapper: xrInstance is null");
Debug.Log("CommonWrapper: OnInstanceCreate()");
/// OpenXRFeature.xrGetInstanceProcAddr
if (xrGetInstanceProcAddr == null || xrGetInstanceProcAddr == IntPtr.Zero)
throw new Exception("CommonWrapper: xrGetInstanceProcAddr is null");
Debug.Log("CommonWrapper: Get function pointer of xrGetInstanceProcAddr.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(
xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
SetGetInstanceProcAddrPtr(xrGetInstanceProcAddrPtr);
bool ret = true;
IntPtr funcPtr = IntPtr.Zero;
ret &= OpenXRHelper.GetXrFunctionDelegate(XrGetInstanceProcAddr, xrInstance, "xrGetSystemProperties", out XrGetSystemProperties);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrGetSystemProperties", out XrGetSystemProperties);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrCreateSwapchain", out XrCreateSwapchain);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrDestroySwapchain", out XrDestroySwapchain);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrEnumerateSwapchainFormats", out XrEnumerateSwapchainFormats);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrEnumerateSwapchainImages", out XrEnumerateSwapchainImages);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrWaitSwapchainImage", out XrWaitSwapchainImage);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrAcquireSwapchainImage", out XrAcquireSwapchainImage);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrReleaseSwapchainImage", out XrReleaseSwapchainImage);
if (!ret)
throw new Exception("CommonWrapper: Get function pointers failed.");
isInited = ret;
IsInited = ret;
return ret;
}
@@ -70,21 +64,20 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns>
public void OnInstanceDestroy()
{
isInited = false;
XrGetInstanceProcAddr = null;
IsInited = false;
XrGetSystemProperties = null;
Debug.Log("CommonWrapper: OnInstanceDestroy()");
}
public XrResult GetInstanceProcAddr(XrInstance instance, string name, out IntPtr function)
{
if (isInited == false || XrGetInstanceProcAddr == null)
if (IsInited == false || xrGetInstanceProcAddr == null)
{
function = IntPtr.Zero;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrGetInstanceProcAddr(instance, name, out function);
return xrGetInstanceProcAddr(instance, name, out function);
}
/// <summary>
@@ -97,7 +90,7 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns>
public XrResult GetSystemProperties(XrInstance instance, XrSystemId systemId, ref XrSystemProperties properties)
{
if (isInited == false || XrGetSystemProperties == null)
if (IsInited == false || XrGetSystemProperties == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
@@ -136,5 +129,115 @@ namespace VIVE.OpenXR.Feature
Marshal.FreeHGlobal(systemProperties.next);
return ret;
}
public XrResult CreateSwapchain(XrSession session, ref XrSwapchainCreateInfo createInfo, out XrSwapchain swapchain)
{
if (IsInited == false || XrCreateSwapchain == null)
{
swapchain = default;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrCreateSwapchain(session, ref createInfo, out swapchain);
}
public XrResult DestroySwapchain(XrSwapchain swapchain)
{
if (IsInited == false || XrDestroySwapchain == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrDestroySwapchain(swapchain);
}
public XrResult EnumerateSwapchainFormats(XrSession session, uint formatCapacityInput, ref uint formatCountOutput, ref long[] formats)
{
if (IsInited == false || XrEnumerateSwapchainFormats == null)
{
formatCountOutput = 0;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
if (formatCapacityInput != 0 && (formats == null || formats.Length < formatCapacityInput))
return XrResult.XR_ERROR_SIZE_INSUFFICIENT;
if (formatCapacityInput == 0)
{
Debug.Log("CommonWrapper: EnumerateSwapchainFormats(ci=" + formatCapacityInput + ")");
return XrEnumerateSwapchainFormats(session, 0, ref formatCountOutput, IntPtr.Zero);
}
else
{
Debug.Log("CommonWrapper: EnumerateSwapchainFormats(ci=" + formatCapacityInput + ", formats=long[" + formats.Length + "])");
IntPtr formatsPtr = MemoryTools.MakeRawMemory(formats);
var ret = XrEnumerateSwapchainFormats(session, formatCapacityInput, ref formatCountOutput, formatsPtr);
if (ret == XrResult.XR_SUCCESS)
MemoryTools.CopyFromRawMemory(formats, formatsPtr, (int)formatCountOutput);
MemoryTools.ReleaseRawMemory(formatsPtr);
return ret;
}
}
public XrResult EnumerateSwapchainImages(XrSwapchain swapchain, uint imageCapacityInput, ref uint imageCountOutput, IntPtr imagesPtr)
{
if (IsInited == false || XrEnumerateSwapchainImages == null)
{
imageCountOutput = 0;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
return XrEnumerateSwapchainImages(swapchain, imageCapacityInput, ref imageCountOutput, imagesPtr);
}
[DllImport("viveopenxr", EntryPoint = "CwAcquireSwapchainImage")]
public static extern XrResult CwAcquireSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageAcquireInfo acquireInfo, out uint index);
public XrResult AcquireSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageAcquireInfo acquireInfo, out uint index)
{
if (IsInited == false || XrAcquireSwapchainImage == null)
{
index = 0;
return XrResult.XR_ERROR_HANDLE_INVALID;
}
Profiler.BeginSample("ASW: xrAcqScImg");
var res = XrAcquireSwapchainImage(swapchain, ref acquireInfo, out index);
Profiler.EndSample();
return res;
}
[DllImport("viveopenxr", EntryPoint = "CwWaitSwapchainImage")]
public static extern XrResult CwWaitSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageWaitInfo waitInfo);
public XrResult WaitSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageWaitInfo waitInfo)
{
if (IsInited == false || XrWaitSwapchainImage == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
Profiler.BeginSample("ASW: xrWaitScImg");
var res = XrWaitSwapchainImage(swapchain, ref waitInfo);
Profiler.EndSample();
return res;
}
[DllImport("viveopenxr", EntryPoint = "CwReleaseSwapchainImage")]
public static extern XrResult CwReleaseSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageReleaseInfo releaseInfo);
public XrResult ReleaseSwapchainImage(XrSwapchain swapchain, ref XrSwapchainImageReleaseInfo releaseInfo)
{
if (IsInited == false || XrReleaseSwapchainImage == null)
{
return XrResult.XR_ERROR_HANDLE_INVALID;
}
// Add Profiler
Profiler.BeginSample("ASW: xrRelScImg");
var res = XrReleaseSwapchainImage(swapchain, ref releaseInfo);
Profiler.EndSample();
return res;
}
}
}

View File

@@ -0,0 +1,207 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using UnityEngine;
using UnityEngine.XR.OpenXR;
namespace VIVE.OpenXR.Feature
{
using XrFutureEXT = System.IntPtr;
/// <summary>
/// To use this wrapper,
/// 1. Add the "XR_EXT_Future" extension to the instance's enabled extensions list.
/// 2. Call FutureWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate().
/// 3. Call FutureWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy().
///
/// <see cref="VIVE.OpenXR.Toolkits.FutureTask.Poll"/> function helps make async Task.
/// </summary>
public class FutureWrapper : ViveFeatureWrapperBase<FutureWrapper>, IViveFeatureWrapper
{
public enum XrFutureStateEXT
{
None = 0, // Not defined in extension. A default value.
Pending = 1,
Ready = 2,
MAX = 0x7FFFFFFF
}
public struct XrFuturePollInfoEXT {
public XrStructureType type; // XR_TYPE_FUTURE_POLL_INFO_EXT
public IntPtr next;
public XrFutureEXT future;
}
public struct XrFuturePollResultEXT {
public XrStructureType type; // XR_TYPE_FUTURE_POLL_RESULT_EXT
public IntPtr next;
public XrFutureStateEXT state;
}
public struct XrFutureCancelInfoEXT
{
public XrStructureType type; // XR_TYPE_FUTURE_CANCEL_INFO_EXT
public IntPtr next;
public XrFutureEXT future;
}
public struct XrFutureCompletionBaseHeaderEXT
{
public XrStructureType type; // XR_TYPE_FUTURE_COMPLETION_EXT
public IntPtr next;
public XrResult futureResult;
}
public struct XrFutureCompletionEXT
{
public XrStructureType type; // XR_TYPE_FUTURE_COMPLETION_EXT
public IntPtr next;
public XrResult futureResult;
}
public delegate XrResult XrPollFutureEXTDelegate(XrInstance instance, ref XrFuturePollInfoEXT pollInfo, out XrFuturePollResultEXT pollResult);
public delegate XrResult XrCancelFutureEXTDelegate(XrInstance instance, ref XrFutureCancelInfoEXT cancelInfo);
XrPollFutureEXTDelegate XrPollFutureEXT;
XrCancelFutureEXTDelegate XrCancelFutureEXT;
XrInstance xrInstance;
/// <summary>
/// Features should call FutureWrapper.Instance.OnInstanceCreate() in their OnInstanceCreate().
/// </summary>
/// <param name="xrInstance"></param>
/// <param name="xrGetInstanceProcAddrPtr"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr xrGetInstanceProcAddrPtr)
{
if (IsInited) return true;
if (xrInstance == null)
throw new Exception("FutureWrapper: xrInstance is null");
this.xrInstance = xrInstance;
if (xrGetInstanceProcAddrPtr == null)
throw new Exception("FutureWrapper: xrGetInstanceProcAddr is null");
SetGetInstanceProcAddrPtr(xrGetInstanceProcAddrPtr);
Debug.Log("FutureWrapper: OnInstanceCreate()");
bool hasFuture = OpenXRRuntime.IsExtensionEnabled("XR_EXT_future");
if (!hasFuture)
{
Debug.LogError("FutureWrapper: XR_EXT_future is not enabled. Check your feature's kOpenxrExtensionString.");
return false;
}
bool ret = true;
IntPtr funcPtr = IntPtr.Zero;
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrPollFutureEXT", out XrPollFutureEXT);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrCancelFutureEXT", out XrCancelFutureEXT);
if (!ret)
{
Debug.LogError("FutureWrapper: Failed to get function pointer.");
return false;
}
IsInited = ret;
return ret;
}
public void OnInstanceDestroy()
{
Debug.Log("FutureWrapper: OnInstanceDestroy()");
IsInited = false;
XrPollFutureEXT = null;
XrCancelFutureEXT = null;
xrInstance = 0;
}
/// <summary>
/// Used to get the state of a future. If Ready, Call complete functions to get the result.
/// </summary>
/// <param name="pollInfo"></param>
/// <param name="pollResult"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult PollFuture(ref XrFuturePollInfoEXT pollInfo, out XrFuturePollResultEXT pollResult)
{
pollResult= new XrFuturePollResultEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_POLL_RESULT_EXT,
next = IntPtr.Zero,
state = XrFutureStateEXT.None
};
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
return XrPollFutureEXT(xrInstance, ref pollInfo, out pollResult);
}
/// <summary>
/// Used to get the state of a future. If Ready, Call complete functions to get the result.
/// </summary>
/// <param name="future"></param>
/// <param name="pollResult"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult PollFuture(XrFutureEXT future, out XrFuturePollResultEXT pollResult)
{
pollResult = new XrFuturePollResultEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_POLL_RESULT_EXT,
next = IntPtr.Zero,
state = XrFutureStateEXT.None
};
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
XrFuturePollInfoEXT pollInfo = new XrFuturePollInfoEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_POLL_INFO_EXT,
next = IntPtr.Zero,
future = future
};
return XrPollFutureEXT(xrInstance, ref pollInfo, out pollResult);
}
/// <summary>
/// This function cancels the future and signals that the async operation is not required.
/// After a future has been cancelled any functions using this future must return XR_ERROR_FUTURE_INVALID_EXT.
/// </summary>
/// <param name="cancelInfo"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult CancelFuture(ref XrFutureCancelInfoEXT cancelInfo)
{
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
return XrCancelFutureEXT(xrInstance, ref cancelInfo);
}
/// <summary>
/// <see cref="CancelFuture(ref XrFutureCancelInfoEXT)"/>
/// </summary>
/// <param name="future"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrResult CancelFuture(XrFutureEXT future)
{
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
XrFutureCancelInfoEXT cancelInfo = new XrFutureCancelInfoEXT()
{
type = XrStructureType.XR_TYPE_FUTURE_CANCEL_INFO_EXT,
next = IntPtr.Zero,
future = future
};
return XrCancelFutureEXT(xrInstance, ref cancelInfo);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e8522c7af0a4127409a8800e1ddd5985
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,10 +1,5 @@
// Copyright HTC Corporation All Rights Reserved.
// Remove FAKE_DATA if editor or windows is supported.
#if UNITY_EDITOR
#define FAKE_DATA
#endif
using System;
using UnityEngine;
@@ -15,21 +10,8 @@ namespace VIVE.OpenXR.Feature
/// To use this wrapper, you need to call CommonWrapper.Instance.OnInstanceCreate() in your feature's OnInstanceCreate(),
/// and call CommonWrapper.Instance.OnInstanceDestroy() in your feature's OnInstanceDestroy().
/// </summary>
public class SpaceWrapper
public class SpaceWrapper : ViveFeatureWrapperBase<SpaceWrapper>, IViveFeatureWrapper
{
static SpaceWrapper instance = null;
public static SpaceWrapper Instance
{
get
{
if (instance == null)
instance = new SpaceWrapper();
return instance;
}
}
bool isInited = false;
delegate XrResult DelegateXrLocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location);
delegate XrResult DelegateXrDestroySpace(XrSpace space);
@@ -44,31 +26,30 @@ namespace VIVE.OpenXR.Feature
/// <param name="GetAddr"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public bool OnInstanceCreate(XrInstance xrInstance, OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr)
public bool OnInstanceCreate(XrInstance xrInstance, IntPtr GetAddr)
{
if (isInited) return true;
if (IsInited) return true;
if (xrInstance == null)
throw new Exception("ViveSpace: xrInstance is null");
if (GetAddr == null)
throw new Exception("ViveSpace: xrGetInstanceProcAddr is null");
SetGetInstanceProcAddrPtr(GetAddr);
Debug.Log("ViveSpace: OnInstanceCreate()");
bool ret = true;
IntPtr funcPtr = IntPtr.Zero;
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrCreateReferenceSpace", out XrCreateReferenceSpace);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrLocateSpace", out XrLocateSpace);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrDestroySpace", out XrDestroySpace);
isInited = ret;
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrCreateReferenceSpace", out XrCreateReferenceSpace);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrLocateSpace", out XrLocateSpace);
ret &= OpenXRHelper.GetXrFunctionDelegate(xrGetInstanceProcAddr, xrInstance, "xrDestroySpace", out XrDestroySpace);
IsInited = ret;
return ret;
}
public void OnInstanceDestroy()
{
isInited = false;
IsInited = false;
XrCreateReferenceSpace = null;
XrLocateSpace = null;
XrDestroySpace = null;
@@ -77,8 +58,8 @@ namespace VIVE.OpenXR.Feature
/// <summary>
/// Create a reference space without create info.
/// Example:
/// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_LOCAL, XrPosef.identity, out space);
/// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_STAGE, XrPosef.identity, out space);
/// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_LOCAL, XrPosef.Identity, out space);
/// CreateReferenceSpace(session, XrReferenceSpaceType.XR_REFERENCE_SPACE_TYPE_STAGE, XrPosef.Identity, out space);
/// </summary>
/// <param name="session"></param>
/// <param name="referenceSpaceType"></param>
@@ -87,8 +68,9 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns>
public XrResult CreateReferenceSpace(XrSession session, XrReferenceSpaceType referenceSpaceType, XrPosef pose, out XrSpace space)
{
if (!isInited)
throw new Exception("ViveSpace: not initialized");
space = 0;
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
var createInfo = new XrReferenceSpaceCreateInfo();
createInfo.type = XrStructureType.XR_TYPE_REFERENCE_SPACE_CREATE_INFO;
@@ -107,24 +89,25 @@ namespace VIVE.OpenXR.Feature
/// <returns></returns>
public XrResult CreateReferenceSpace(XrSession session, XrReferenceSpaceCreateInfo createInfo, out XrSpace space)
{
if (!isInited)
throw new Exception("ViveSpace: not initialized");
space = 0;
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
return XrCreateReferenceSpace(session, ref createInfo, out space);
}
public XrResult LocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location)
{
if (!isInited)
throw new Exception("ViveSpace: not initialized");
Debug.Log($"LocateSpace(s={space}, bs={baseSpace}, t={time}");
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
//Debug.Log($"LocateSpace(s={space}, bs={baseSpace}, t={time}");
return XrLocateSpace(space, baseSpace, time, ref location);
}
public XrResult DestroySpace(XrSpace space)
{
if (!isInited)
throw new Exception("ViveSpace: not initialized");
if (!IsInited)
return XrResult.XR_ERROR_HANDLE_INVALID;
Debug.Log($"DestroySpace({space})");
return XrDestroySpace(space);
}
@@ -157,19 +140,6 @@ namespace VIVE.OpenXR.Feature
public bool GetRelatedPose(XrSpace baseSpace, XrTime time, out UnityEngine.Pose pose)
{
#if FAKE_DATA
if (Application.isEditor)
{
// make a random Pose
//var pos = new Vector3(UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f));
//var rot = new Quaternion(UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f), UnityEngine.Random.Range(-1f, 1f));
var pos = Vector3.up;
var rot = Quaternion.identity;
rot.Normalize();
pose = new Pose(pos, rot);
return true;
}
#endif
// If the xrBaseSpace is changed, the pose will be updated.
pose = default;
XrSpaceLocation location = new XrSpaceLocation();
@@ -179,14 +149,14 @@ namespace VIVE.OpenXR.Feature
if (ret != XrResult.XR_SUCCESS)
{
Debug.Log("Space: LocateSpace ret=" + ret);
//Debug.Log("Space: LocateSpace ret=" + ret);
return false;
}
Debug.Log("Space: baseSpace=" + baseSpace + ", space=" + space + ", time=" + time + ", ret=" + ret);
Debug.Log("Space: location.locationFlags=" + location.locationFlags);
Debug.Log("Space: location.pose.position=" + location.pose.position.x + "," + location.pose.position.y + "," + location.pose.position.z);
Debug.Log("Space: location.pose.orientation=" + location.pose.orientation.x + "," + location.pose.orientation.y + "," + location.pose.orientation.z + "," + location.pose.orientation.w);
//Debug.Log("Space: baseSpace=" + baseSpace + ", space=" + space + ", time=" + time + ", ret=" + ret);
//Debug.Log("Space: location.locationFlags=" + location.locationFlags);
//Debug.Log("Space: location.pose.position=" + location.pose.position.x + "," + location.pose.position.y + "," + location.pose.position.z);
//Debug.Log("Space: location.pose.orientation=" + location.pose.orientation.x + "," + location.pose.orientation.y + "," + location.pose.orientation.z + "," + location.pose.orientation.w);
if ((location.locationFlags & XrSpaceLocationFlags.XR_SPACE_LOCATION_POSITION_VALID_BIT) > 0 &&
(location.locationFlags & XrSpaceLocationFlags.XR_SPACE_LOCATION_ORIENTATION_VALID_BIT) > 0)
{
@@ -211,7 +181,7 @@ namespace VIVE.OpenXR.Feature
// Managered resource
}
// Non managered resource
Debug.Log($"Space: DestroySpace({space})");
//Debug.Log($"Space: DestroySpace({space})");
SpaceWrapper.Instance.DestroySpace(space);
space = 0;
disposed = true;

View File

@@ -3,81 +3,180 @@ using System.Runtime.InteropServices;
using System;
using UnityEngine;
using AOT;
using System.Collections.Generic;
using System.Text;
namespace VIVE.OpenXR
{
/// <summary>
/// This class is made for all features that need to intercept OpenXR API calls.
/// Some APIs will be called by Unity internally, and we need to intercept them in c# to get some information.
/// Append more interceptable functions for this class by adding a new partial class.
/// The partial class can help the delegate name be nice to read and search.
/// Please create per function in one partial class.
///
/// For all features want to use this class, please call <see cref="HookGetInstanceProcAddr" /> in your feature class.
/// For example:
/// protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
/// {
/// return HtcInterceptors.Instance.HookGetInstanceProcAddr(func);
/// }
/// </summary>
/// <summary>
/// This class is made for all features that need to intercept OpenXR API calls.
/// Some APIs will be called by Unity internally, and we need to intercept them in c# to get some information.
/// Append more interceptable functions for this class by adding a new partial class.
/// The partial class can help the delegate name be nice to read and search.
/// Please create per function in one partial class.
///
/// For all features want to use this class, please call <see cref="HookGetInstanceProcAddr" /> in your feature class.
/// For example:
/// protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
/// {
/// return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
/// }
/// </summary>
partial class ViveInterceptors
{
public const string TAG = "Interceptors";
public static ViveInterceptors instance = null;
public static ViveInterceptors Instance
{
get
{
if (instance == null)
instance = new ViveInterceptors();
return instance;
public const string TAG = "VIVE.OpenXR.ViveInterceptors";
static StringBuilder m_sb = null;
static StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
static void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", TAG, msg); }
static void ERROR(StringBuilder msg) { Debug.LogErrorFormat("{0} {1}", TAG, msg); }
public ViveInterceptors()
{
Debug.Log("HtcInterceptors");
}
public static ViveInterceptors instance = null;
public static ViveInterceptors Instance
{
get
{
if (instance == null)
instance = new ViveInterceptors();
return instance;
}
}
bool isInited = false;
public ViveInterceptors()
{
Debug.Log("ViveInterceptors");
}
public delegate XrResult DelegateXrGetInstanceProcAddr(XrInstance instance, string name, out IntPtr function);
private static readonly DelegateXrGetInstanceProcAddr hookXrGetInstanceProcAddrHandle = new DelegateXrGetInstanceProcAddr(XrGetInstanceProcAddrInterceptor);
private static readonly IntPtr hookGetInstanceProcAddrHandlePtr = Marshal.GetFunctionPointerForDelegate(hookXrGetInstanceProcAddrHandle);
static DelegateXrGetInstanceProcAddr XrGetInstanceProcAddrOriginal = null;
public delegate XrResult DelegateXrGetInstanceProcAddr(XrInstance instance, string name, out IntPtr function);
private static readonly DelegateXrGetInstanceProcAddr hookXrGetInstanceProcAddrHandle = new DelegateXrGetInstanceProcAddr(XrGetInstanceProcAddrInterceptor);
private static readonly IntPtr hookGetInstanceProcAddrHandlePtr = Marshal.GetFunctionPointerForDelegate(hookXrGetInstanceProcAddrHandle);
static DelegateXrGetInstanceProcAddr XrGetInstanceProcAddrOriginal = null;
[MonoPInvokeCallback(typeof(DelegateXrGetInstanceProcAddr))]
private static XrResult XrGetInstanceProcAddrInterceptor(XrInstance instance, string name, out IntPtr function)
{
// Custom interceptors
if (name == "xrWaitFrame")
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
XrWaitFrameOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrWaitFrame>(function);
function = xrWaitFrameInterceptorPtr;
}
return ret;
}
return XrGetInstanceProcAddrOriginal(instance, name, out function);
}
[MonoPInvokeCallback(typeof(DelegateXrGetInstanceProcAddr))]
private static XrResult XrGetInstanceProcAddrInterceptor(XrInstance instance, string name, out IntPtr function)
{
// Used to check if the original function is already hooked.
if (instance == 0 && name == "ViveInterceptorHooked")
{
function = IntPtr.Zero;
return XrResult.XR_SUCCESS;
}
// Custom interceptors
if (name == "xrWaitFrame" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
XrWaitFrameOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrWaitFrame>(function);
function = xrWaitFrameInterceptorPtr;
}
return ret;
}
if (name == "xrEndFrame" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
XrEndFrameOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrEndFrame>(function);
function = xrEndFrameInterceptorPtr;
}
return ret;
}
#if PERFORMANCE_TEST
if (name == "xrLocateSpace" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
XrLocateSpaceOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrLocateSpace>(function);
function = xrLocateSpaceInterceptorPtr;
}
return ret;
}
#endif
if (name == "xrPollEvent" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
xrPollEventOrigin = Marshal.GetDelegateForFunctionPointer < xrPollEventDelegate > (function);
function = xrPollEventPtr;
}
return ret;
}
if (name == "xrBeginSession" && requiredFunctions.Contains(name))
{
Debug.Log($"{TAG}: XrGetInstanceProcAddrInterceptor() {name} is intercepted.");
var ret = XrGetInstanceProcAddrOriginal(instance, name, out function);
if (ret == XrResult.XR_SUCCESS)
{
xrBeginSessionOrigin = Marshal.GetDelegateForFunctionPointer<xrBeginSessionDelegate>(function);
function = xrBeginSessionPtr;
}
return ret;
}
return XrGetInstanceProcAddrOriginal(instance, name, out function);
}
public IntPtr HookGetInstanceProcAddr(IntPtr func)
{
Debug.Log($"{TAG}: HookGetInstanceProcAddr");
if (XrGetInstanceProcAddrOriginal == null)
{
Debug.Log($"{TAG}: registering our own xrGetInstanceProcAddr");
XrGetInstanceProcAddrOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrGetInstanceProcAddr>(func);
#if UNITY_EDITOR
if (Application.isEditor) {
// This is a trick to check if the original function is already hooked by this class. Sometimes, the static XrGetInstanceProcAddrOriginal didn't work as expected.
Debug.Log($"{TAG}: Check if duplicate hooked by this script with instance=0 and \"ViveInterceptorHooked\" name. If following a loader error, ignore it.");
// E OpenXR-Loader: Error [SPEC | xrGetInstanceProcAddr | VUID-xrGetInstanceProcAddr-instance-parameter] : XR_NULL_HANDLE for instance but query for ViveInterceptorHooked requires a valid instance
// Call XrGetInstanceProcAddrOriginal to check if the original function is already hooked by this class
if (XrGetInstanceProcAddrOriginal(0, "ViveInterceptorHooked", out IntPtr function) == XrResult.XR_SUCCESS)
{
// If it is called successfully, it means the original function is already hooked. So we should return the original function.
Debug.Log($"{TAG}: Already hooked");
return func;
}
}
#endif
return hookGetInstanceProcAddrHandlePtr;
}
else
{
// Dont return hookGetInstanceProcAddrHandlePtr again.
// If this hook function is called by multiple features, it should only work at the first time.
// If called by other features, it should return the original function.
return func;
}
}
static readonly List<string> requiredFunctions = new List<string>();
/// <summary>
/// Call before <see cref="HookGetInstanceProcAddr" /> to add required functions."/>
/// </summary>
/// <param name="name"></param>
public void AddRequiredFunction(string name)
{
if (requiredFunctions.Contains(name)) return;
Debug.Log($"{TAG}: AddRequiredFunction({name})");
requiredFunctions.Add(name);
}
}
}
public IntPtr HookGetInstanceProcAddr(IntPtr func)
{
Debug.Log($"{TAG}: registering our own xrGetInstanceProcAddr");
if (XrGetInstanceProcAddrOriginal == null)
{
XrGetInstanceProcAddrOriginal = Marshal.GetDelegateForFunctionPointer<DelegateXrGetInstanceProcAddr>(func);
isInited = true;
return hookGetInstanceProcAddrHandlePtr;
}
else
{
return func;
}
}
}
}

View File

@@ -0,0 +1,85 @@
// Copyright HTC Corporation All Rights Reserved.
#define DEBUG
using AOT;
using System;
using System.Runtime.InteropServices;
using UnityEngine.Profiling;
using VIVE.OpenXR.FrameSynchronization;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
#region xrBeginSession
public delegate XrResult xrBeginSessionDelegate(XrSession session, ref XrSessionBeginInfo beginInfo);
private static xrBeginSessionDelegate xrBeginSessionOrigin = null;
[MonoPInvokeCallback(typeof(xrBeginSessionDelegate))]
private static XrResult xrBeginSessionInterceptor(XrSession session, ref XrSessionBeginInfo beginInfo)
{
Profiler.BeginSample("ViveInterceptors:BeginSession");
XrResult result = XrResult.XR_ERROR_FUNCTION_UNSUPPORTED;
if (xrBeginSessionOrigin != null)
{
if (m_EnableFrameSynchronization)
{
frameSynchronizationSessionBeginInfo.mode = m_FrameSynchronizationMode;
frameSynchronizationSessionBeginInfo.next = beginInfo.next;
beginInfo.next = Marshal.AllocHGlobal(Marshal.SizeOf(frameSynchronizationSessionBeginInfo));
long offset = 0;
if (IntPtr.Size == 4)
offset = beginInfo.next.ToInt32();
else
offset = beginInfo.next.ToInt64();
IntPtr frame_synchronization_session_begin_info_ptr = new IntPtr(offset);
Marshal.StructureToPtr(frameSynchronizationSessionBeginInfo, frame_synchronization_session_begin_info_ptr, false);
#if DEBUG
if (IntPtr.Size == 4)
offset = beginInfo.next.ToInt32();
else
offset = beginInfo.next.ToInt64();
IntPtr fs_begin_info_ptr = new IntPtr(offset);
XrFrameSynchronizationSessionBeginInfoHTC fsBeginInfo = (XrFrameSynchronizationSessionBeginInfoHTC)Marshal.PtrToStructure(fs_begin_info_ptr, typeof(XrFrameSynchronizationSessionBeginInfoHTC));
sb.Clear().Append("xrBeginSessionInterceptor() beginInfo.next = (").Append(fsBeginInfo.type).Append(", ").Append(fsBeginInfo.mode).Append(")"); DEBUG(sb);
#endif
}
result = xrBeginSessionOrigin(session, ref beginInfo);
}
else
{
sb.Clear().Append("xrBeginSessionInterceptor() Not assign xrBeginSession!"); ERROR(sb);
}
Profiler.EndSample();
return result;
}
private static readonly xrBeginSessionDelegate xrBeginSession = new xrBeginSessionDelegate(xrBeginSessionInterceptor);
private static readonly IntPtr xrBeginSessionPtr = Marshal.GetFunctionPointerForDelegate(xrBeginSession);
#endregion
private static XrFrameSynchronizationSessionBeginInfoHTC frameSynchronizationSessionBeginInfo = XrFrameSynchronizationSessionBeginInfoHTC.identity;
private static bool m_EnableFrameSynchronization = false;
private static XrFrameSynchronizationModeHTC m_FrameSynchronizationMode = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC;
/// <summary>
/// Activate or deactivate the Frame Synchronization feature.
/// </summary>
/// <param name="active">True for activate</param>
/// <param name="mode">The <see cref="XrFrameSynchronizationModeHTC"/> used for Frame Synchronization.</param>
public void ActivateFrameSynchronization(bool active, XrFrameSynchronizationModeHTC mode)
{
m_EnableFrameSynchronization = active;
m_FrameSynchronizationMode = mode;
sb.Clear().Append("ActivateFrameSynchronization() ").Append(active ? "enable " : "disable ").Append(mode); DEBUG(sb);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8c222b96d7eb4ca4bb6390e07b1967bb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,129 @@
// Copyright HTC Corporation All Rights Reserved.
using AOT;
using System;
using System.Runtime.InteropServices;
using UnityEngine.Profiling;
using VIVE.OpenXR.DisplayRefreshRate;
using VIVE.OpenXR.Passthrough;
using VIVE.OpenXR.UserPresence;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
#region xrPollEvent
public delegate XrResult xrPollEventDelegate(XrInstance instance, ref XrEventDataBuffer eventData);
private static xrPollEventDelegate xrPollEventOrigin = null;
[MonoPInvokeCallback(typeof(xrPollEventDelegate))]
private static XrResult xrPollEventInterceptor(XrInstance instance, ref XrEventDataBuffer eventData)
{
Profiler.BeginSample("ViveInterceptors:WaitFrame");
XrResult result = XrResult.XR_SUCCESS;
if (xrPollEventOrigin != null)
{
result = xrPollEventOrigin(instance, ref eventData);
if (result == XrResult.XR_SUCCESS)
{
sb.Clear().Append("xrPollEventInterceptor() xrPollEvent ").Append(eventData.type); DEBUG(sb);
switch(eventData.type)
{
case XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_CHANGED_HTC:
if (XrEventDataPassthroughConfigurationImageRateChangedHTC.Get(eventData, out XrEventDataPassthroughConfigurationImageRateChangedHTC eventDataPassthroughConfigurationImageRate))
{
fromImageRate = eventDataPassthroughConfigurationImageRate.fromImageRate;
toImageRate = eventDataPassthroughConfigurationImageRate.toImageRate;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_CHANGED_HTC")
.Append(", fromImageRate.srcImageRate: ").Append(fromImageRate.srcImageRate)
.Append(", fromImageRatesrc.dstImageRate: ").Append(fromImageRate.dstImageRate)
.Append(", toImageRate.srcImageRate: ").Append(toImageRate.srcImageRate)
.Append(", toImageRate.dstImageRate: ").Append(toImageRate.dstImageRate);
DEBUG(sb);
VivePassthroughImageRateChanged.Send(fromImageRate.srcImageRate, fromImageRate.dstImageRate, toImageRate.srcImageRate, toImageRate.dstImageRate);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_CHANGED_HTC:
if (XrEventDataPassthroughConfigurationImageQualityChangedHTC.Get(eventData, out XrEventDataPassthroughConfigurationImageQualityChangedHTC eventDataPassthroughConfigurationImageQuality))
{
fromImageQuality = eventDataPassthroughConfigurationImageQuality.fromImageQuality;
toImageQuality = eventDataPassthroughConfigurationImageQuality.toImageQuality;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_CHANGED_HTC")
.Append(", fromImageQuality: ").Append(fromImageQuality.scale)
.Append(", toImageQuality: ").Append(toImageQuality.scale);
DEBUG(sb);
VivePassthroughImageQualityChanged.Send(fromImageQuality.scale, toImageQuality.scale);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB:
if(XrEventDataDisplayRefreshRateChangedFB.Get(eventData, out XrEventDataDisplayRefreshRateChangedFB eventDataDisplayRefreshRate))
{
fromDisplayRefreshRate = eventDataDisplayRefreshRate.fromDisplayRefreshRate;
toDisplayRefreshRate = eventDataDisplayRefreshRate.toDisplayRefreshRate;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB")
.Append(", fromDisplayRefreshRate: ").Append(fromDisplayRefreshRate)
.Append(", toDisplayRefreshRate: ").Append(toDisplayRefreshRate);
DEBUG(sb);
ViveDisplayRefreshRateChanged.Send(fromDisplayRefreshRate, toDisplayRefreshRate);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED:
if (XrEventDataSessionStateChanged.Get(eventData, out XrEventDataSessionStateChanged eventDataSession))
{
switch(eventDataSession.state)
{
case XrSessionState.XR_SESSION_STATE_READY:
isUserPresent = true;
break;
case XrSessionState.XR_SESSION_STATE_STOPPING:
isUserPresent = false;
break;
default:
break;
}
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED")
.Append(", session: ").Append(eventDataSession.session)
.Append(", state: ").Append(eventDataSession.state)
.Append(", isUserPresent: ").Append(isUserPresent);
DEBUG(sb);
}
break;
case XrStructureType.XR_TYPE_EVENT_DATA_USER_PRESENCE_CHANGED_EXT:
if (XrEventDataUserPresenceChangedEXT.Get(eventData, out XrEventDataUserPresenceChangedEXT eventDataUserPresence))
{
isUserPresent = eventDataUserPresence.isUserPresent;
sb.Clear().Append("xrPollEventInterceptor() XR_TYPE_EVENT_DATA_USER_PRESENCE_CHANGED_EXT")
.Append(", session: ").Append(eventDataUserPresence.session)
.Append(", isUserPresent: ").Append(isUserPresent);
DEBUG(sb);
}
break;
default:
break;
}
}
//sb.Clear().Append("xrPollEventInterceptor() xrPollEvent result: ").Append(result).Append(", isUserPresent: ").Append(isUserPresent); DEBUG(sb);
}
Profiler.EndSample();
return result;
}
private static readonly xrPollEventDelegate xrPollEvent = new xrPollEventDelegate(xrPollEventInterceptor);
private static readonly IntPtr xrPollEventPtr = Marshal.GetFunctionPointerForDelegate(xrPollEvent);
#endregion
private static bool isUserPresent = true;
public bool IsUserPresent() { return isUserPresent; }
private static float fromDisplayRefreshRate, toDisplayRefreshRate;
public float FromDisplayRefreshRate() { return fromDisplayRefreshRate; }
public float ToDisplayRefreshRate() { return toDisplayRefreshRate; }
private static XrPassthroughConfigurationImageRateHTC fromImageRate, toImageRate;
private static XrPassthroughConfigurationImageQualityHTC fromImageQuality, toImageQuality;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c2cc5716d3f563f49a47da6c1bd8ccbe
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,89 @@
// Copyright HTC Corporation All Rights Reserved.
using System.Runtime.InteropServices;
using System;
using AOT;
using UnityEngine.Profiling;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
public struct XrCompositionLayerBaseHeader
{
public XrStructureType type; // This base structure itself has no associated XrStructureType value.
public System.IntPtr next;
public XrCompositionLayerFlags layerFlags;
public XrSpace space;
}
public struct XrFrameEndInfo
{
public XrStructureType type;
public System.IntPtr next;
public XrTime displayTime;
public XrEnvironmentBlendMode environmentBlendMode;
public uint layerCount;
public IntPtr layers; // XrCompositionLayerBaseHeader IntPtr array
}
public delegate XrResult DelegateXrEndFrame(XrSession session, ref XrFrameEndInfo frameEndInfo);
private static readonly DelegateXrEndFrame xrEndFrameInterceptorHandle = new DelegateXrEndFrame(XrEndFrameInterceptor);
private static readonly IntPtr xrEndFrameInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrEndFrameInterceptorHandle);
static DelegateXrEndFrame XrEndFrameOriginal = null;
[MonoPInvokeCallback(typeof(DelegateXrEndFrame))]
private static XrResult XrEndFrameInterceptor(XrSession session, ref XrFrameEndInfo frameEndInfo)
{
// instance must not null
//if (instance == null)
// return XrEndFrameOriginal(session, ref frameEndInfo);
Profiler.BeginSample("VI:EndFrame");
XrResult result = XrResult.XR_SUCCESS;
if (instance.BeforeOriginalEndFrame != null &&
!instance.BeforeOriginalEndFrame(session, ref frameEndInfo, ref result))
{
Profiler.EndSample();
return result;
}
result = XrEndFrameOriginal(session, ref frameEndInfo);
instance.AfterOriginalEndFrame?.Invoke(session, ref frameEndInfo, ref result);
Profiler.EndSample();
return result;
}
/// <summary>
/// If you return false, the original function will not be called.
/// </summary>
/// <param name="session"></param>
/// <param name="frameEndInfo"></param>
/// <param name="result"></param>
/// <returns></returns>
public delegate bool DelegateXrEndFrameInterceptor(XrSession session, ref XrFrameEndInfo frameEndInfo, ref XrResult result);
/// <summary>
/// Use this to intercept the original function. This will be called before the original function.
/// </summary>
public DelegateXrEndFrameInterceptor BeforeOriginalEndFrame;
/// <summary>
/// Use this to intercept the original function. This will be called after the original function.
/// </summary>
public DelegateXrEndFrameInterceptor AfterOriginalEndFrame;
#if PERFORMANCE_TEST
public delegate XrResult DelegateXrLocateSpace(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location);
private static readonly DelegateXrLocateSpace xrLocateSpaceInterceptorHandle = new DelegateXrLocateSpace(XrLocateSpaceInterceptor);
private static readonly IntPtr xrLocateSpaceInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrLocateSpaceInterceptorHandle);
static DelegateXrLocateSpace XrLocateSpaceOriginal = null;
[MonoPInvokeCallback(typeof(DelegateXrLocateSpace))]
public static XrResult XrLocateSpaceInterceptor(XrSpace space, XrSpace baseSpace, XrTime time, ref XrSpaceLocation location)
{
Profiler.BeginSample("VI:LocateSpace");
var ret = XrLocateSpaceOriginal(space, baseSpace, time, ref location);
Profiler.EndSample();
return ret;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6bf7cf55d82ac6343b4eda92d1197a66
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -3,59 +3,106 @@ using System.Runtime.InteropServices;
using System;
using UnityEngine;
using AOT;
using UnityEngine.Profiling;
namespace VIVE.OpenXR
{
partial class ViveInterceptors
{
#region XRWaitFrame
public struct XrFrameWaitInfo
{
public XrStructureType type;
public IntPtr next;
}
partial class ViveInterceptors
{
#region XRWaitFrame
public struct XrFrameWaitInfo
{
public XrStructureType type;
public IntPtr next;
}
public struct XrFrameState
{
public XrStructureType type;
public IntPtr next;
public XrTime predictedDisplayTime;
public XrDuration predictedDisplayPeriod;
public XrBool32 shouldRender;
}
public struct XrFrameState
{
public XrStructureType type;
public IntPtr next;
public XrTime predictedDisplayTime;
public XrDuration predictedDisplayPeriod;
public XrBool32 shouldRender;
}
public delegate XrResult DelegateXrWaitFrame(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState);
private static readonly DelegateXrWaitFrame xrWaitFrameInterceptorHandle = new DelegateXrWaitFrame(XrWaitFrameInterceptor);
private static readonly IntPtr xrWaitFrameInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrWaitFrameInterceptorHandle);
static DelegateXrWaitFrame XrWaitFrameOriginal = null;
bool isWaitFrameIntercepted = false;
[MonoPInvokeCallback(typeof(DelegateXrWaitFrame))]
private static XrResult XrWaitFrameInterceptor(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState)
{
var ret = XrWaitFrameOriginal(session, ref frameWaitInfo, ref frameState);
currentFrameState = frameState;
return ret;
}
public delegate XrResult DelegateXrWaitFrame(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState);
private static readonly DelegateXrWaitFrame xrWaitFrameInterceptorHandle = new DelegateXrWaitFrame(XrWaitFrameInterceptor);
private static readonly IntPtr xrWaitFrameInterceptorPtr = Marshal.GetFunctionPointerForDelegate(xrWaitFrameInterceptorHandle);
static DelegateXrWaitFrame XrWaitFrameOriginal = null;
static XrFrameState currentFrameState = new XrFrameState() { predictedDisplayTime = 0 };
[MonoPInvokeCallback(typeof(DelegateXrWaitFrame))]
private static XrResult XrWaitFrameInterceptor(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState)
{
// instance must not null
//if (instance == null)
// return XrWaitFrameOriginal(session, ref frameWaitInfo, ref frameState);
Profiler.BeginSample("VI:WaitFrame");
instance.isWaitFrameIntercepted = true;
XrResult result = XrResult.XR_SUCCESS;
if (instance.BeforeOriginalWaitFrame != null &&
!instance.BeforeOriginalWaitFrame(session, ref frameWaitInfo, ref frameState, ref result))
{
Profiler.EndSample();
return result;
}
var ret = XrWaitFrameOriginal(session, ref frameWaitInfo, ref frameState);
instance.AfterOriginalWaitFrame?.Invoke(session, ref frameWaitInfo, ref frameState, ref result);
currentFrameState = frameState;
Profiler.EndSample();
return result;
}
public XrFrameState GetCurrentFrameState()
{
if (!isInited) throw new Exception("ViveInterceptors is not inited");
static XrFrameState currentFrameState = new XrFrameState() { predictedDisplayTime = 0 };
return currentFrameState;
}
/// <summary>
/// Get the waitframe's result: XrFrameState. This result used in update is not matching the current frame. Use it after onBeforeRender.
/// </summary>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrFrameState GetCurrentFrameState()
{
if (!isWaitFrameIntercepted) throw new Exception("ViveInterceptors is not intercepted");
public XrTime GetPredictTime()
{
if (!isInited) throw new Exception("ViveInterceptors is not inited");
return currentFrameState;
}
Debug.Log($"{TAG}: XrWaitFrameInterceptor(predictedDisplayTime={currentFrameState.predictedDisplayTime}");
if (currentFrameState.predictedDisplayTime == 0)
return new XrTime((long)(1000000L * (Time.unscaledTimeAsDouble + 0.011f)));
else
return currentFrameState.predictedDisplayTime;
}
#endregion XRWaitFrame
}
}
/// <summary>
/// Must request xrWaitFrame before calling this function. This result used in update is not matching the current frame. Use it after onBeforeRender.
/// </summary>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public XrTime GetPredictTime()
{
if (!isWaitFrameIntercepted) throw new Exception("ViveInterceptors is not intercepted");
//Debug.Log($"{TAG}: XrWaitFrameInterceptor(predictedDisplayTime={currentFrameState.predictedDisplayTime}");
if (currentFrameState.predictedDisplayTime == 0)
return new XrTime((long)(1000000L * (Time.unscaledTimeAsDouble + 0.011f)));
else
return currentFrameState.predictedDisplayTime;
}
/// <summary>
/// Register WaitFrame event
/// </summary>
/// <param name="session"></param>
/// <param name="frameWaitInfo"></param>
/// <param name="frameState"></param>
/// <param name="result"></param>
/// <returns></returns>
public delegate bool DelegateXrWaitFrameInterceptor(XrSession session, ref XrFrameWaitInfo frameWaitInfo, ref XrFrameState frameState, ref XrResult result);
/// <summary>
/// Use this to intercept the original function. This will be called before the original function.
/// </summary>
public DelegateXrWaitFrameInterceptor BeforeOriginalWaitFrame;
/// <summary>
/// Use this to intercept the original function. This will be called after the original function.
/// </summary>
public DelegateXrWaitFrameInterceptor AfterOriginalWaitFrame;
#endregion XRWaitFrame
}
}

View File

@@ -0,0 +1,329 @@
using AOT;
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
namespace VIVE.OpenXR.Common.RenderThread
{
#region syncObject
public class Message
{
public bool isFree = true;
}
/// <summary>
/// MessagePool class manages a pool of message objects for reuse. You can enter any kind of message object.
/// However when obtain, the message object will not able to cast to the type you want.
/// You should only use one kind of message. Not mix different kind of message.
/// </summary>
public class MessagePool
{
// pool member is used to store message objects in a list.
// Note that the size of this list will dynamically adjust as needed but will not automatically shrink.
private readonly List<Message> pool = new List<Message>(2) { };
private int index = 0;
public MessagePool() { }
// Next method calculates the next index value for cycling through message objects in the pool.
private int Next(int value)
{
if (++value >= pool.Count)
value = 0;
return value;
}
// Obtain method retrieves a message object from the pool.
// Ensure proper state setup for the message after retrieval and call Release() to the message after use.
public T Obtain<T>() where T : Message, new()
{
int c = pool.Count;
int i = index;
for (int j = 0; j < c; i++, j++)
{
if (i >= c)
i = 0;
if (pool[i].isFree)
{
//Debug.LogError("Obtain idx=" + i);
index = i;
return (T)pool[i];
}
}
index = Next(i);
var newItem = new T()
{
isFree = true
};
pool.Insert(index, newItem);
Debug.Log("RT.MessagePool.Obtain() pool count=" + pool.Count);
return newItem;
}
// Lock method marks a message as "in use" to prevent other code from reusing it.
// This is already called to the message obtained from the pool.
public static void Lock(Message msg)
{
msg.isFree = false;
}
/// <summary>
/// Release method marks a message as "free" so that other code can reuse it.
/// You can use it in RenderThread. It will not trigger the GC event.
/// </summary>
/// <param name="msg"></param>
public static void Release(Message msg)
{
msg.isFree = true;
}
}
/// <summary>
/// PreAllocatedQueue class is a message queue based on MessagePool for preallocating message objects.
/// Its main functionality is to add message objects to the queue and retrieve them from the queue.
/// Messages should be enqueued in GameThread and dequeued in RenderThread.
/// In render thread, dequeue will not trigger the GC event. Because the queue is preallocated.
/// The 'lock' expression is not used for list's size change. Because lock should be avoid used in RenderThread.
/// Set the queueSize as the double count of message you want to pass to render thread in one frame, and the
/// list will never change size during runtime. Therefore we don't need to use 'lock' to protect the list.
/// </summary>
public class PreAllocatedQueue : MessagePool
{
// list member is used to store preallocated message objects in a list.
// Note that the size of this list is set during initialization and does not dynamically adjust.
private List<Message> list = new List<Message>();
private int queueBegin = 0;
private int queueEnd = 0;
/// <summary>
/// The queueSize should be the double count of message you want to pass to render thread in one frame.
/// </summary>
/// <param name="queueSize"></param>
public PreAllocatedQueue(int queueSize = 2) : base() {
for (int i = 0; i < queueSize; i++)
{
list.Add(null);
}
}
private int Next(int value)
{
if (++value >= list.Count)
value = 0;
return value;
}
/// <summary>
/// Enqueue method adds a message object to the queue.
/// If the queue is full, the new message is added to the end of the list.
///
/// This function is designed to use the message object obtained from the MessagePool.
/// Ensure only one type of message object is used in the queue.
///
/// Enqueue will increase the queue size if the queue is full. This may trigger GC.Alloc.
/// This function should be used in GameThread.
/// </summary>
/// <param name="msg"></param>
public void Enqueue(Message msg)
{
Lock(msg);
queueEnd = Next(queueEnd);
// If the queue is full, add the message to the end of the list. Should not let it happen.
// Use larger queue size to avoid this issue.
// If you see the error log here, you should increase the queue size in your design.
if (queueEnd == queueBegin)
{
// Should let Insert and queueBegin be atomic. No lock protection here.
list.Insert(queueEnd, msg);
queueBegin++;
Debug.LogError("RT.MessagePool.Enqueue() list count=" + list.Count);
}
else
{
list[queueEnd] = msg;
}
}
/// <summary>
/// Dequeue method retrieves a message object from the queue.
/// This method returns the first message object in the queue and removes it from the queue.
/// This function will not trigger the GC event. Free to use in RenderThread.
/// After use the Message, call Release() to the message.
/// </summary>
/// <returns></returns>
public Message Dequeue()
{
// No lock protection here. If list is not change size, it is safe.
// However if list changed size, it is safe in most case.
queueBegin = Next(queueBegin);
return list[queueBegin];
}
}
/// <summary>
/// RenderThreadTask class is used to execute specified tasks on the rendering thread.
/// You don't need to develop a native function to run your task on the rendering thread.
/// And you don't need to design how to pass data to render thread.
/// This class can be run in Unity Editor since Unity 2021. Test your code in Unity Editor can save your time.
///
/// You should only create RenderThreadTask as static readonly. Do not create RenderThreadTask in dynamic.
///
/// You should not run Unity.Engine code in RenderThread. It will cause the Unity.Engine to hang.
/// Any exception will not be caught and shown in RenderThread.
/// You should print your error message out to clearify your issue.
///
/// The 'lock' expression is not used here. Because I believe the lock is not necessary in this case.
/// And the lock will cause the performance issue. All the design here help you not to use 'lock'.
/// </summary>
public class RenderThreadTask
{
private static IntPtr GetFunctionPointerForDelegate(Delegate del)
{
return System.Runtime.InteropServices.Marshal.GetFunctionPointerForDelegate(del);
}
public delegate void RenderEventDelegate(int e);
private static readonly RenderEventDelegate handle = new RenderEventDelegate(RunSyncObjectInRenderThread);
private static readonly IntPtr handlePtr = GetFunctionPointerForDelegate(handle);
public delegate void Receiver(PreAllocatedQueue dataQueue);
// CommandList is used to store all RenderThreadTask objects.
// Do not create RenderThreadTask object in dynamic. It will cause the CommandList to increase infinitly.
private static List<RenderThreadTask> CommandList = new List<RenderThreadTask>();
private PreAllocatedQueue queue;
public PreAllocatedQueue Queue { get { return queue; } }
private readonly Receiver receiver;
private readonly int id;
/// <summary>
/// Input the receiver as render thread callback. The receiver will be executed in render thread.
/// queueSize should be the double count of message you want to pass to render thread in one frame.
/// </summary>
/// <param name="render">The callback in render thread.</param>
/// <param name="queueSize">If issue this event once in a frame, set queueSize as 2.</param>
/// <exception cref="ArgumentNullException"></exception>
public RenderThreadTask(Receiver render, int queueSize = 2)
{
queue = new PreAllocatedQueue(queueSize);
receiver = render;
if (receiver == null)
throw new ArgumentNullException("receiver should not be null");
CommandList.Add(this);
id = CommandList.IndexOf(this);
}
~RenderThreadTask()
{
try { CommandList.RemoveAt(id); } finally { }
}
void IssuePluginEvent(IntPtr callback, int eventID)
{
// Older version will hang after run script in render thread.
GL.IssuePluginEvent(callback, eventID);
return;
}
void IssuePluginEvent(CommandBuffer cmdBuf, IntPtr callback, int eventID)
{
cmdBuf.IssuePluginEvent(callback, eventID);
return;
}
/// <summary>
/// IssueEvent method submits this task's receiver, which is set in constructor, to be executed on the rendering thread.
/// </summary>
public void IssueEvent()
{
// Let the render thread run the RunSyncObjectInRenderThread(id)
IssuePluginEvent(handlePtr, id);
}
public void IssueInCommandBuffer(CommandBuffer cmdBuf)
{
// Let the render thread run the RunSyncObjectInRenderThread(id)
IssuePluginEvent(cmdBuf, handlePtr, id);
}
// Called by RunSyncObjectInRenderThread()
private void Receive()
{
receiver(queue);
}
// RunSyncObjectInRenderThread method is a static method used to execute a specified task on the rendering thread.
// This method is invoked by Unity's rendering event mechanism and does not need to be called directly by developers.
[MonoPInvokeCallback(typeof(RenderEventDelegate))]
private static void RunSyncObjectInRenderThread(int id)
{
CommandList[id].Receive();
}
}
#endregion
#region sample
// Not to compile this sample into your application. Just for reference. You can run this sample in Unity Editor and it will work.
#if UNITY_EDITOR
public class ViveRenderThreadTaskSample : MonoBehaviour
{
// Create your own message class.
internal class SampleMessage : Message
{
public int dataPassedToRenderThread;
}
// Use static readonly to create RenderThreadTask. Keep internal to avoid miss use by other developers.
internal static readonly RenderThreadTask sampleRenderThreadTask1 = new RenderThreadTask(SampleReceiver1);
// Different task use different RenderThreadTask and different recevier.
internal static readonly RenderThreadTask sampleRenderThreadTask2 = new RenderThreadTask(SampleReceiver2);
private static void SampleReceiver1(PreAllocatedQueue dataQueue)
{
var msg = dataQueue.Dequeue() as SampleMessage;
// no need to check msg if it is null because your design should avoid it.
// Keep data before release. Use local variable to keep data and release msg early. Should not keep the msg instance itself.
var data = msg.dataPassedToRenderThread;
// Make sure release the msg if finished. Other wise the memory will keep increasing when Obtain.
MessagePool.Release(msg);
Debug.Log("Task1, the data passed to render thread: " + data);
}
private static void SampleReceiver2(PreAllocatedQueue dataQueue)
{
var msg = dataQueue.Dequeue() as SampleMessage;
var data = msg.dataPassedToRenderThread;
MessagePool.Release(msg);
Debug.Log("Task2, the data passed to render thread: " + data);
}
// Send a message to the render thread every frame.
private void Update()
{
// Make sure only one kind of message object is used in the queue.
var msg = sampleRenderThreadTask1.Queue.Obtain<SampleMessage>();
msg.dataPassedToRenderThread = 123;
sampleRenderThreadTask1.Queue.Enqueue(msg);
sampleRenderThreadTask1.IssueEvent();
}
// Send a message to render thread when something clicked. Make sure only one click in one frame because the queue size is only two.
public void OnClicked()
{
// Reuse the same message type is ok.
var msg = sampleRenderThreadTask2.Queue.Obtain<SampleMessage>();
msg.dataPassedToRenderThread = 234;
sampleRenderThreadTask2.Queue.Enqueue(msg);
sampleRenderThreadTask2.IssueEvent();
}
}
#endif
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 251b4bedf6420fc4e84be778e501343f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -99,6 +99,12 @@ namespace VIVE.OpenXR.CompositionLayer
[SerializeField]
public bool isExternalSurface = false;
[SerializeField]
public bool isCustomRects = false;
[SerializeField]
public CustomRectsType customRects = CustomRectsType.TopDown;
[Tooltip("Width of external surface in pixels.")]
[SerializeField]
public uint externalSurfaceWidth = 1280;
@@ -122,8 +128,12 @@ namespace VIVE.OpenXR.CompositionLayer
[SerializeField]
public bool isProtectedSurface = false;
[SerializeField]
public Texture texture = null;
private Texture m_TextureLeft => texture;
public Texture textureLeft { get { return m_TextureLeft; } }
public Texture textureRight = null;
[SerializeField]
private uint renderPriority = 0;
@@ -150,7 +160,7 @@ namespace VIVE.OpenXR.CompositionLayer
private MeshRenderer generatedFallbackMeshRenderer = null;
private MeshFilter generatedFallbackMeshFilter = null;
private LayerTextures layerTextures;
private LayerTextures[] layerTextures = new LayerTextures[] {null, null};
private Material texture2DBlitMaterial;
private GameObject compositionLayerPlaceholderPrefabGO = null;
@@ -165,10 +175,12 @@ namespace VIVE.OpenXR.CompositionLayer
private float previousCylinderArcLength = 1f;
private float previousCylinderRadius = 1f;
private float previousAngleOfArc = 180f;
private Texture previousTexture = null;
private Texture previousTextureLeft = null;
private Texture previousTextureRight = null;
private bool previousIsDynamicLayer = false;
private int layerID; //For native
private int layerIDRight; //For native
private bool isHeadLock = false;
private bool InitStatus = false;
private bool isInitializationComplete = false;
@@ -232,10 +244,27 @@ namespace VIVE.OpenXR.CompositionLayer
if (layerID != 0)
{
DEBUG("Init completed, ID: " + layerID);
layerTextures = new LayerTextures(imageCount);
layerTextures[0] = new LayerTextures(imageCount);
InitStatus = true;
}
if (textureRight != null && textureLeft != textureRight) {
layerIDRight = compositionLayerFeature.CompositionLayer_Init(externalSurfaceWidth, externalSurfaceHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount, true);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID right: " + layerIDRight);
layerTextures[1] = new LayerTextures(imageCount);
}
}
else if (isCustomRects) {
layerIDRight = compositionLayerFeature.CompositionLayer_Init(externalSurfaceWidth, externalSurfaceHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount, true);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID right: " + layerIDRight);
layerTextures[1] = new LayerTextures(imageCount);
}
}
taskQueue.Release(task);
}
});
@@ -243,6 +272,7 @@ namespace VIVE.OpenXR.CompositionLayer
CompositionLayerRenderThreadTask.IssueObtainSwapchainEvent(SetupExternalAndroidSurfaceSyncObjects);
texture = new Texture2D((int)externalSurfaceWidth, (int)externalSurfaceHeight, TextureFormat.RGBA32, false, isLinear);
textureRight = new Texture2D((int)externalSurfaceWidth, (int)externalSurfaceHeight, TextureFormat.RGBA32, false, isLinear);
DEBUG("CompositionLayerInit Ext Surf");
@@ -250,16 +280,23 @@ namespace VIVE.OpenXR.CompositionLayer
return true;
}
if (texture == null)
if (textureLeft == null)
{
ERROR("CompositionLayerInit: Source Texture not found, abort init.");
return false;
}
if (textureLeft != null && textureRight == null)
{
DEBUG("CompositionLayerInit: Using Left Texture as Right Texture.");
textureRight = textureLeft;
}
DEBUG("CompositionLayerInit");
uint textureWidth = (uint)texture.width;
uint textureHeight = (uint)texture.height;
uint textureWidth = (uint)textureLeft.width;
uint textureHeight = (uint)textureLeft.height;
DEBUG("Init : textureWidth = " + textureWidth + " textureHeight = " + textureHeight);
CompositionLayerRenderThreadSyncObject ObtainLayerSwapchainSyncObject = new CompositionLayerRenderThreadSyncObject(
(taskQueue) =>
@@ -293,9 +330,28 @@ namespace VIVE.OpenXR.CompositionLayer
if (layerID != 0)
{
DEBUG("Init completed, ID: " + layerID + ", Image Count: " + imageCount);
layerTextures = new LayerTextures(imageCount);
layerTextures[0] = new LayerTextures(imageCount);
InitStatus = true;
}
if (textureRight != null && textureLeft != textureRight) {
layerIDRight = compositionLayerFeature.CompositionLayer_Init(textureWidth, textureHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID Right: " + layerIDRight + ", Image Count: " + imageCount);
layerTextures[1] = new LayerTextures(imageCount);
}
}
else if (isCustomRects)
{
layerIDRight = compositionLayerFeature.CompositionLayer_Init(textureWidth, textureHeight, graphicsAPI, isDynamicLayer, isProtectedSurface, out imageCount);
if (layerIDRight != 0)
{
DEBUG("Init completed, ID Right: " + layerIDRight + ", Image Count: " + imageCount);
layerTextures[1] = new LayerTextures(imageCount);
}
}
taskQueue.Release(task);
}
@@ -310,18 +366,20 @@ namespace VIVE.OpenXR.CompositionLayer
previousCylinderArcLength = m_CylinderArcLength;
previousCylinderRadius = m_CylinderRadius;
previousAngleOfArc = m_CylinderAngleOfArc;
previousTexture = texture;
previousTextureLeft = textureLeft;
previousTextureRight = textureRight;
previousIsDynamicLayer = isDynamicLayer;
return true;
}
private bool textureAcquired = false;
private bool textureAcquiredOnce = false;
private bool[] textureAcquired = new bool[] {false, false};
private bool[] textureAcquiredOnce = new bool[] {false, false};
XrOffset2Di offset = new XrOffset2Di();
XrExtent2Di extent = new XrExtent2Di();
XrRect2Di rect = new XrRect2Di();
private bool SetLayerTexture()
private bool SetLayerTexture(int eyeid)
{
if (!isInitializationComplete || !isSynchronized) return false;
@@ -332,10 +390,24 @@ namespace VIVE.OpenXR.CompositionLayer
offset.y = (int)externalSurfaceHeight;
extent.width = (int)externalSurfaceWidth;
extent.height = (int)-externalSurfaceHeight;
if (isCustomRects && customRects == CustomRectsType.TopDown)
{
extent.height = (int)-externalSurfaceHeight/2;
if (eyeid == 0)
offset.y = (int)(externalSurfaceHeight-externalSurfaceHeight/2);
}
else if (isCustomRects && customRects == CustomRectsType.LeftRight)
{
extent.width = (int)externalSurfaceWidth/2;
if (eyeid != 0)
offset.x = extent.width;
}
rect.offset = offset;
rect.extent = extent;
layerTextures.textureLayout = rect;
layerTextures[eyeid].textureLayout = rect;
return true; //No need to process texture queues
}
@@ -346,63 +418,73 @@ namespace VIVE.OpenXR.CompositionLayer
if (TextureParamsChanged())
{
//Destroy queues
DEBUG("SetLayerTexture: Texture params changed, need to re-init queues. layerID: " + layerID);
DestroyCompositionLayer();
DEBUG("SetLayerTexture: Texture params changed, need to re-init queues. layerID: " + ((eyeid ==0) ? layerID : layerIDRight));
if (layerID != 0)
{
DestroyCompositionLayer(0);
layerID = 0;
}
if (layerIDRight != 0)
{
DestroyCompositionLayer(1);
layerIDRight = 0;
}
reinitializationNeeded = true;
return false;
}
}
else
{
ERROR("SetLayerTexture: No texture found. layerID: " + layerID);
ERROR("SetLayerTexture: No texture found. layerID: " + ((eyeid ==0) ? layerID : layerIDRight));
return false;
}
if (isDynamicLayer || (!isDynamicLayer && !textureAcquiredOnce))
if (isDynamicLayer || (!isDynamicLayer && !textureAcquiredOnce[eyeid]))
{
//Get available texture id
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
uint currentImageIndex;
IntPtr newTextureID = compositionLayerFeature.CompositionLayer_GetTexture(layerID, out currentImageIndex);
IntPtr newTextureID = compositionLayerFeature.CompositionLayer_GetTexture((eyeid ==0) ? layerID : layerIDRight, out currentImageIndex);
textureAcquired = true;
textureAcquiredOnce = true;
textureAcquired[eyeid] = true;
textureAcquiredOnce[eyeid] = true;
if (newTextureID == IntPtr.Zero)
{
ERROR("SetLayerTexture: Invalid Texture ID");
if (compositionLayerFeature.CompositionLayer_ReleaseTexture(layerID))
if (compositionLayerFeature.CompositionLayer_ReleaseTexture((eyeid ==0) ? layerID : layerIDRight))
{
textureAcquired = false;
textureAcquired[eyeid] = false;
}
return false;
}
bool textureIDUpdated = false;
layerTextures.currentAvailableTextureIndex = currentImageIndex;
IntPtr currentTextureID = layerTextures.GetCurrentAvailableTextureID();
layerTextures[eyeid].currentAvailableTextureIndex = currentImageIndex;
IntPtr currentTextureID = layerTextures[eyeid].GetCurrentAvailableTextureID();
if (currentTextureID == IntPtr.Zero || currentTextureID != newTextureID)
{
DEBUG("SetLayerTexture: Update Texture ID. layerID: " + layerID);
layerTextures.SetCurrentAvailableTextureID(newTextureID);
DEBUG("SetLayerTexture: Update Texture ID. layerID: " + ((eyeid ==0) ? layerID : layerIDRight));
layerTextures[eyeid].SetCurrentAvailableTextureID(newTextureID);
textureIDUpdated = true;
}
if (layerTextures.GetCurrentAvailableTextureID() == IntPtr.Zero)
if (layerTextures[eyeid].GetCurrentAvailableTextureID() == IntPtr.Zero)
{
ERROR("SetLayerTexture: Failed to get texture.");
return false;
}
// Create external texture
if (layerTextures.GetCurrentAvailableExternalTexture() == null || textureIDUpdated)
if (layerTextures[eyeid].GetCurrentAvailableExternalTexture() == null || textureIDUpdated)
{
DEBUG("SetLayerTexture: Create External Texture.");
layerTextures.SetCurrentAvailableExternalTexture(Texture2D.CreateExternalTexture(texture.width, texture.height, TextureFormat.RGBA32, false, isLinear, layerTextures.GetCurrentAvailableTextureID()));
layerTextures[eyeid].SetCurrentAvailableExternalTexture(Texture2D.CreateExternalTexture(texture.width, texture.height, TextureFormat.RGBA32, false, isLinear, layerTextures[eyeid].GetCurrentAvailableTextureID()));
}
if (layerTextures.externalTextures[layerTextures.currentAvailableTextureIndex] == null)
if (layerTextures[eyeid].externalTextures[layerTextures[eyeid].currentAvailableTextureIndex] == null)
{
ERROR("SetLayerTexture: Create External Texture Failed.");
return false;
@@ -411,28 +493,40 @@ namespace VIVE.OpenXR.CompositionLayer
//Set Texture Content
bool isContentSet = layerTextures.textureContentSet[layerTextures.currentAvailableTextureIndex];
bool isContentSet = layerTextures[eyeid].textureContentSet[layerTextures[eyeid].currentAvailableTextureIndex];
if (!isDynamicLayer && isContentSet)
{
return true;
}
int currentTextureWidth = layerTextures.GetCurrentAvailableExternalTexture().width;
int currentTextureHeight = layerTextures.GetCurrentAvailableExternalTexture().height;
int currentTextureWidth = layerTextures[eyeid].GetCurrentAvailableExternalTexture().width;
int currentTextureHeight = layerTextures[eyeid].GetCurrentAvailableExternalTexture().height;
//Set Texture Layout
offset.x = 0;
offset.y = 0;
extent.width = (int)currentTextureWidth;
extent.height = (int)currentTextureHeight;
if (isCustomRects && customRects == CustomRectsType.TopDown)
{
extent.height = (int)currentTextureHeight/2;
if (eyeid == 0)
offset.y = extent.height;
}
else if (isCustomRects && customRects == CustomRectsType.LeftRight)
{
extent.width = (int)currentTextureWidth/2;
if (eyeid != 0)
offset.x = extent.width;
}
rect.offset = offset;
rect.extent = extent;
layerTextures[eyeid].textureLayout = rect;
layerTextures.textureLayout = rect;
//Blit and copy texture
RenderTexture srcTexture = texture as RenderTexture;
RenderTexture srcTexture = ((eyeid == 0 || isCustomRects) ? textureLeft : textureRight) as RenderTexture;
int msaaSamples = 1;
if (srcTexture != null)
{
@@ -441,6 +535,8 @@ namespace VIVE.OpenXR.CompositionLayer
Material currentBlitMat = texture2DBlitMaterial;
DEBUG("RenderTextureDescriptor currentTextureWidth = " + currentTextureWidth + " currentTextureHeight = " + currentTextureHeight);
RenderTextureDescriptor rtDescriptor = new RenderTextureDescriptor(currentTextureWidth, currentTextureHeight, RenderTextureFormat.ARGB32, 0);
rtDescriptor.msaaSamples = msaaSamples;
rtDescriptor.autoGenerateMips = false;
@@ -454,8 +550,10 @@ namespace VIVE.OpenXR.CompositionLayer
}
blitTempRT.DiscardContents();
Texture dstTexture = layerTextures.GetCurrentAvailableExternalTexture();
Graphics.Blit(texture, blitTempRT, currentBlitMat);
Texture dstTexture = layerTextures[eyeid].GetCurrentAvailableExternalTexture();
Graphics.Blit((eyeid == 0) ? textureLeft : textureRight, blitTempRT, currentBlitMat);
Graphics.CopyTexture(blitTempRT, 0, 0, dstTexture, 0, 0);
//DEBUG("Blit and CopyTexture complete.");
@@ -470,12 +568,12 @@ namespace VIVE.OpenXR.CompositionLayer
return false;
}
layerTextures.textureContentSet[layerTextures.currentAvailableTextureIndex] = true;
layerTextures[eyeid].textureContentSet[layerTextures[eyeid].currentAvailableTextureIndex] = true;
bool releaseTextureResult = compositionLayerFeature.CompositionLayer_ReleaseTexture(layerID);
bool releaseTextureResult = compositionLayerFeature.CompositionLayer_ReleaseTexture((eyeid == 0) ? layerID : layerIDRight);
if (releaseTextureResult)
{
textureAcquired = false;
textureAcquired[eyeid] = false;
}
return releaseTextureResult;
@@ -527,7 +625,7 @@ namespace VIVE.OpenXR.CompositionLayer
bool enabledColorScaleBiasInShader = false;
XrCompositionLayerColorScaleBiasKHR CompositionLayerParamsColorScaleBias = new XrCompositionLayerColorScaleBiasKHR();
private void SubmitCompositionLayer() //Call at onBeforeRender
private void SubmitCompositionLayer(int eyeid, bool botheye) //Call at onBeforeRender
{
if (!isInitializationComplete && !isLayerReadyForSubmit) return;
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
@@ -570,7 +668,7 @@ namespace VIVE.OpenXR.CompositionLayer
CompositionLayerParamsColorScaleBias.colorBias.a = 0.0f;
}
compositionLayerColorScaleBias.Submit_CompositionLayerColorBias(CompositionLayerParamsColorScaleBias, layerID);
compositionLayerColorScaleBias.Submit_CompositionLayerColorBias(CompositionLayerParamsColorScaleBias, (eyeid == 0) ? layerID : layerIDRight);
}
else if (enabledColorScaleBiasInShader) //Disable if color scale bias is no longer active
{
@@ -583,13 +681,13 @@ namespace VIVE.OpenXR.CompositionLayer
{
default:
case LayerShape.Quad:
compositionLayerFeature.Submit_CompositionLayerQuad(AssignCompositionLayerParamsQuad(), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, layerID);
compositionLayerFeature.Submit_CompositionLayerQuad(AssignCompositionLayerParamsQuad(eyeid, botheye), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, (eyeid == 0) ? layerID : layerIDRight);
break;
case LayerShape.Cylinder:
ViveCompositionLayerCylinder compositionLayerCylinderFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayerCylinder>();
if (compositionLayerCylinderFeature != null && compositionLayerCylinderFeature.CylinderExtensionEnabled)
{
compositionLayerCylinderFeature.Submit_CompositionLayerCylinder(AssignCompositionLayerParamsCylinder(), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, layerID);
compositionLayerCylinderFeature.Submit_CompositionLayerCylinder(AssignCompositionLayerParamsCylinder(eyeid, botheye), (OpenXR.CompositionLayer.LayerType)layerType, compositionDepth, (eyeid == 0) ? layerID : layerIDRight);
}
break;
}
@@ -601,22 +699,22 @@ namespace VIVE.OpenXR.CompositionLayer
public delegate void OnDestroyCompositionLayer();
public event OnDestroyCompositionLayer OnDestroyCompositionLayerDelegate = null;
private void DestroyCompositionLayer()
private void DestroyCompositionLayer(int eyeid)
{
if (!isInitializationComplete || layerTextures == null)
if (layerTextures[eyeid] == null)
{
DEBUG("DestroyCompositionLayer: Layer already destroyed/not initialized.");
return;
}
DEBUG("DestroyCompositionLayer");
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
if (textureAcquired)
if (textureAcquired[eyeid])
{
DEBUG("DestroyCompositionLayer: textureAcquired, releasing.");
textureAcquired = !compositionLayerFeature.CompositionLayer_ReleaseTexture(layerID);
textureAcquired[eyeid] = !compositionLayerFeature.CompositionLayer_ReleaseTexture((eyeid == 0) ? layerID : layerIDRight);
}
CompositionLayerRenderThreadSyncObject DestroyLayerSwapchainSyncObject = new CompositionLayerRenderThreadSyncObject(
@@ -631,26 +729,26 @@ namespace VIVE.OpenXR.CompositionLayer
if (!compositionLayerFeature.CompositionLayer_Destroy(task.layerID))
{
ERROR("estroyCompositionLayer: CompositionLayer_Destroy failed.");
ERROR("DestroyCompositionLayer: CompositionLayer_Destroy failed : " + task.layerID);
}
taskQueue.Release(task);
}
});
CompositionLayerRenderThreadTask.IssueDestroySwapchainEvent(DestroyLayerSwapchainSyncObject, layerID);
CompositionLayerRenderThreadTask.IssueDestroySwapchainEvent(DestroyLayerSwapchainSyncObject, (eyeid == 0) ? layerID : layerIDRight);
InitStatus = false;
isLayerReadyForSubmit = false;
isInitializationComplete = false;
textureAcquiredOnce = false;
textureAcquiredOnce[eyeid] = false;
foreach (Texture externalTexture in layerTextures.externalTextures)
foreach (Texture externalTexture in layerTextures[eyeid].externalTextures)
{
DEBUG("DestroyCompositionLayer: External textures");
if (externalTexture != null) Destroy(externalTexture);
}
layerTextures = null;
layerTextures[eyeid] = null;
if (generatedFallbackMeshFilter != null && generatedFallbackMeshFilter.mesh != null)
{
@@ -696,7 +794,7 @@ namespace VIVE.OpenXR.CompositionLayer
private List<XRInputSubsystem> inputSubsystems = new List<XRInputSubsystem>();
XrCompositionLayerQuad CompositionLayerParamsQuad = new XrCompositionLayerQuad();
XrExtent2Df quadSize = new XrExtent2Df();
private XrCompositionLayerQuad AssignCompositionLayerParamsQuad()
private XrCompositionLayerQuad AssignCompositionLayerParamsQuad(int eyeid, bool botheye)
{
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
@@ -722,7 +820,14 @@ namespace VIVE.OpenXR.CompositionLayer
break;
}
CompositionLayerParamsQuad.subImage.imageRect = layerTextures.textureLayout;
if (!botheye) {
if (eyeid == 0)
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_LEFT;
else
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_RIGHT;
}
CompositionLayerParamsQuad.subImage.imageRect = layerTextures[eyeid].textureLayout;
CompositionLayerParamsQuad.subImage.imageArrayIndex = 0;
GetCompositionLayerPose(ref CompositionLayerParamsQuad.pose); //Update isHeadLock
@@ -762,13 +867,14 @@ namespace VIVE.OpenXR.CompositionLayer
quadSize.width = m_QuadWidth;
quadSize.height = m_QuadHeight;
CompositionLayerParamsQuad.size = quadSize;
return CompositionLayerParamsQuad;
}
XrCompositionLayerCylinderKHR CompositionLayerParamsCylinder = new XrCompositionLayerCylinderKHR();
private XrCompositionLayerCylinderKHR AssignCompositionLayerParamsCylinder()
private XrCompositionLayerCylinderKHR AssignCompositionLayerParamsCylinder(int eyeid, bool botheye)
{
compositionLayerFeature = OpenXRSettings.Instance.GetFeature<ViveCompositionLayer>();
@@ -830,7 +936,14 @@ namespace VIVE.OpenXR.CompositionLayer
break;
}
CompositionLayerParamsCylinder.subImage.imageRect = layerTextures.textureLayout;
if (!botheye) {
if (eyeid == 0)
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_LEFT;
else
CompositionLayerParamsQuad.eyeVisibility = XrEyeVisibility.XR_EYE_VISIBILITY_RIGHT;
}
CompositionLayerParamsCylinder.subImage.imageRect = layerTextures[eyeid].textureLayout;
CompositionLayerParamsCylinder.subImage.imageArrayIndex = 0;
GetCompositionLayerPose(ref CompositionLayerParamsCylinder.pose);
CompositionLayerParamsCylinder.radius = m_CylinderRadius;
@@ -960,8 +1073,20 @@ namespace VIVE.OpenXR.CompositionLayer
public void TerminateLayer()
{
DEBUG("TerminateLayer: layerID: " + layerID);
DestroyCompositionLayer();
if (layerID != 0)
{
DEBUG("TerminateLayer: layerID: " + layerID);
DestroyCompositionLayer(0);
layerID = 0;
}
if (layerIDRight != 0)
{
DEBUG("TerminateLayer: layerIDRight: " + layerIDRight);
DestroyCompositionLayer(1);
layerIDRight = 0;
}
if (placeholderGenerated && compositionLayerPlaceholderPrefabGO != null)
{
@@ -977,9 +1102,10 @@ namespace VIVE.OpenXR.CompositionLayer
public bool TextureParamsChanged()
{
if (previousTexture != texture)
if (previousTextureLeft != textureLeft || previousTextureRight != textureRight)
{
previousTexture = texture;
previousTextureLeft = textureLeft;
previousTextureRight = textureRight;
return true;
}
@@ -1395,7 +1521,14 @@ namespace VIVE.OpenXR.CompositionLayer
return;
}
if (SetLayerTexture())
bool isBotheye = (textureRight == null || textureLeft == textureRight);
if (isCustomRects)
{
isBotheye = false;
}
if (SetLayerTexture(0))
{
isLayerReadyForSubmit = true;
}
@@ -1405,7 +1538,22 @@ namespace VIVE.OpenXR.CompositionLayer
DEBUG("Composition Layer Lifecycle OnBeforeRender: Layer not ready for submit.");
return;
}
SubmitCompositionLayer();
if (!isBotheye) {
if (SetLayerTexture(1))
{
isLayerReadyForSubmit = true;
}
if (!isLayerReadyForSubmit)
{
DEBUG("Composition Layer Lifecycle OnBeforeRender: Layer not ready for submit.");
return;
}
}
SubmitCompositionLayer(0, isBotheye);
if (!isBotheye)
SubmitCompositionLayer(1, isBotheye);
isLayerReadyForSubmit = false; //reset flag after submit
}
@@ -1635,6 +1783,12 @@ namespace VIVE.OpenXR.CompositionLayer
Right = 2,
}
public enum CustomRectsType
{
LeftRight = 1,
TopDown = 2,
}
#if UNITY_EDITOR
public enum CylinderLayerParamAdjustmentMode
{
@@ -1950,6 +2104,7 @@ namespace VIVE.OpenXR.CompositionLayer
return radius;
}
}
#endregion
}
}

View File

@@ -11,6 +11,7 @@ using VIVE.OpenXR.CompositionLayer.Passthrough;
namespace VIVE.OpenXR.CompositionLayer.Passthrough
{
[Obsolete("This class is deprecated. Please use PassthroughAPI instead.")]
public static class CompositionLayerPassthroughAPI
{
const string LOG_TAG = "CompositionLayerPassthroughAPI";
@@ -79,7 +80,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe.
XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PLANAR_HTC
);
XrResult res = XR_HTC_passthrough.xrCreatePassthroughHTC(createInfo, out passthrough);
XrResult res = passthroughFeature.CreatePassthroughHTC(createInfo, out passthrough);
if(res == XrResult.XR_SUCCESS)
{
ulong passthrough_ulong = passthrough;
@@ -192,7 +193,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe.
XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PROJECTED_HTC
);
XrResult res = XR_HTC_passthrough.xrCreatePassthroughHTC(createInfo, out passthrough);
XrResult res = passthroughFeature.CreatePassthroughHTC(createInfo, out passthrough);
if (res == XrResult.XR_SUCCESS)
{
ulong passthrough_ulong = passthrough;
@@ -301,7 +302,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
new IntPtr(6), //Enter IntPtr(0) for backward compatibility (using createPassthrough to enable the passthrough feature), or enter IntPtr(6) to enable the passthrough feature based on the layer submitted to endframe.
XrPassthroughFormHTC.XR_PASSTHROUGH_FORM_PROJECTED_HTC
);
XrResult res = XR_HTC_passthrough.xrCreatePassthroughHTC(createInfo, out passthrough);
XrResult res = passthroughFeature.CreatePassthroughHTC(createInfo, out passthrough);
if (res == XrResult.XR_SUCCESS)
{
ulong passthrough_ulong = passthrough;
@@ -400,7 +401,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
}
#if UNITY_STANDALONE
XrPassthroughHTC passthrough = passthrough2Layer[passthroughID].passthrough;
XR_HTC_passthrough.xrDestroyPassthroughHTC(passthrough);
passthroughFeature.DestroyPassthroughHTC(passthrough);
passthrough2IsUnderLay.Remove(passthroughID);
SubmitLayer();
passthrough2Layer.Remove(passthroughID);

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a6509bdf37b3b364eb80cb0df68435a3
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 5e0cbfbe15682c542acc5675d4503f72
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,46 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
using UnityEngine;
using VIVE.OpenXR.CompositionLayer;
namespace VIVE.OpenXR.Editor.CompositionLayer
{
[CustomEditor(typeof(ViveCompositionLayerExtraSettings))]
internal class ViveCompositionLayerEditorExtraSettings : UnityEditor.Editor
{
//private SerializedProperty SettingsEditorEnableSharpening;
static string PropertyName_SharpeningEnable = "SettingsEditorEnableSharpening";
static GUIContent Label_SharpeningEnable = new GUIContent("Enable Sharpening", "Enable Sharpening.");
SerializedProperty Property_SharpeningEnable;
static string PropertyName_SharpeningLevel = "SettingsEditorSharpeningLevel";
static GUIContent Label_SharpeningLevel = new GUIContent("Sharpening Level", "Select Sharpening Level.");
SerializedProperty Property_SharpeningLevel;
static string PropertyName_SharpeningMode = "SettingsEditorSharpeningMode";
static GUIContent Label_SharpeningMode = new GUIContent("Sharpening Mode", "Select Sharpening Mode.");
SerializedProperty Property_SharpeningMode;
void OnEnable()
{
Property_SharpeningEnable = serializedObject.FindProperty(PropertyName_SharpeningEnable);
Property_SharpeningMode = serializedObject.FindProperty(PropertyName_SharpeningMode);
Property_SharpeningLevel = serializedObject.FindProperty(PropertyName_SharpeningLevel);
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(Property_SharpeningEnable, new GUIContent(Label_SharpeningEnable));
EditorGUILayout.PropertyField(Property_SharpeningMode, new GUIContent(Label_SharpeningMode));
EditorGUILayout.PropertyField(Property_SharpeningLevel, new GUIContent(Label_SharpeningLevel));
serializedObject.ApplyModifiedProperties();
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a3dfbc6bb6d75454db700d2326157424
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 050772d662d04514ca3bb28fbe82ecd7
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,30 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
using VIVE.OpenXR.FrameSynchronization;
namespace VIVE.OpenXR.Editor.FrameSynchronization
{
[CustomEditor(typeof(ViveFrameSynchronization))]
public class ViveFrameSynchronizationEditor : UnityEditor.Editor
{
SerializedProperty m_SynchronizationMode;
private void OnEnable()
{
m_SynchronizationMode = serializedObject.FindProperty("m_SynchronizationMode");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(m_SynchronizationMode);
serializedObject.ApplyModifiedProperties();
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d25b2e9fff2d6724b865e0fbd609da9d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a8bd17374612cce468393aa1acc9fa89
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,184 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine;
using VIVE.OpenXR.Interaction;
#if UNITY_EDITOR
using UnityEditor;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
namespace VIVE.OpenXR.Editor.Interaction
{
[CustomEditor(typeof(ViveInteractions))]
public class ViveInteractionsEditor : UnityEditor.Editor
{
SerializedProperty m_ViveHandInteraction, m_ViveWristTracker, m_ViveXRTracker;
#if UNITY_ANDROID
SerializedProperty m_KHRHandInteraction;
#endif
private void OnEnable()
{
m_ViveHandInteraction = serializedObject.FindProperty("m_ViveHandInteraction");
m_ViveWristTracker = serializedObject.FindProperty("m_ViveWristTracker");
m_ViveXRTracker = serializedObject.FindProperty("m_ViveXRTracker");
#if UNITY_ANDROID
m_KHRHandInteraction = serializedObject.FindProperty("m_KHRHandInteraction");
#endif
}
public override void OnInspectorGUI()
{
serializedObject.Update();
#region GUI
GUIStyle boxStyleInfo = new GUIStyle(EditorStyles.helpBox);
boxStyleInfo.fontSize = 12;
boxStyleInfo.wordWrap = true;
GUIStyle boxStyleWarning = new GUIStyle(EditorStyles.helpBox);
boxStyleWarning.fontSize = 12;
boxStyleWarning.fontStyle = FontStyle.Bold;
boxStyleInfo.wordWrap = true;
// ViveHandInteraction
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The VIVE Hand Interaction feature enables hand selection and squeezing functions of XR_HTC_hand_interaction extension.\n" +
"Please note that enabling this feature impacts runtime performance.",
boxStyleInfo);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_ViveHandInteraction);
// ViveWristTracker
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The VIVE Wrist Tracker feature enables wrist tracker pose and button functions of XR_HTC_vive_wrist_tracker_interaction extension.\n" +
"Please note that enabling this feature impacts runtime performance.",
boxStyleInfo);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_ViveWristTracker);
// ViveXrTracker
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The VIVE XR Tracker feature enables ultimate tracker pose and button functions.\n" +
"WARNING:\n" +
"Please be aware that enabling this feature significantly affects runtime performance.",
boxStyleWarning);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_ViveXRTracker);
#if UNITY_ANDROID
// ViveHandInteractionExt
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Space(20);
GUILayout.Label(
"The KHR Hand Interaction feature enables hand functions of XR_EXT_hand_interaction extension.\n" +
"Please note that enabling this feature impacts runtime performance.",
boxStyleInfo);
GUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_KHRHandInteraction);
#endif
#endregion
ViveInteractions myScript = target as ViveInteractions;
if (myScript.enabled)
{
bool viveHandInteraction = myScript.UseViveHandInteraction();
bool viveWristTracker = myScript.UseViveWristTracker();
bool viveXrTracker = myScript.UseViveXrTracker();
bool khrHandInteraction = myScript.UseKhrHandInteraction();
OpenXRSettings settings = null;
#if UNITY_ANDROID
settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
#elif UNITY_STANDALONE
settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Standalone);
#endif
if (settings != null)
{
bool addPathEnumeration = false;
foreach (var feature in settings.GetFeatures<OpenXRInteractionFeature>())
{
if (feature is Hand.ViveHandInteraction) { feature.enabled = viveHandInteraction; }
if (feature is Tracker.ViveWristTracker) { feature.enabled = viveWristTracker; }
if (feature is Tracker.ViveXRTracker)
{
feature.enabled = viveXrTracker;
addPathEnumeration = viveXrTracker;
}
if (feature is Hand.ViveHandInteractionExt) { feature.enabled = khrHandInteraction; }
}
foreach (var feature in settings.GetFeatures<OpenXRFeature>())
{
if (addPathEnumeration && feature is VivePathEnumeration) { feature.enabled = true; }
}
}
}
serializedObject.ApplyModifiedProperties();
}
}
/*public class ViveInteractionsBuildHook : OpenXRFeatureBuildHooks
{
public override int callbackOrder => 1;
public override Type featureType => typeof(VIVEFocus3Feature);
protected override void OnPostGenerateGradleAndroidProjectExt(string path)
{
}
protected override void OnPostprocessBuildExt(BuildReport report)
{
}
protected override void OnPreprocessBuildExt(BuildReport report)
{
var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
if (settings != null)
{
foreach (var feature in settings.GetFeatures<OpenXRFeature>())
{
if (feature is ViveInteractions && feature.enabled)
{
bool viveHandInteraction= ((ViveInteractions)feature).UseViveHandInteraction();
bool viveWristTracker = ((ViveInteractions)feature).UseViveWristTracker();
bool viveXrTracker = ((ViveInteractions)feature).UseViveXrTracker();
bool khrHandInteraction = ((ViveInteractions)feature).UseKhrHandInteraction();
Debug.LogFormat($"ViveInteractionsBuildHook() viveHandInteraction: {viveHandInteraction}, viveWristTracker: {viveWristTracker}, viveXrTracker: {viveXrTracker}, khrHandInteraction: {khrHandInteraction}");
EnableInteraction(viveHandInteraction, viveWristTracker, viveXrTracker, khrHandInteraction);
break;
}
}
}
}
private static void EnableInteraction(
bool viveHandInteraction = false,
bool viveWristTracker = false,
bool viveXrTracker = false,
bool khrHandInteraction = false)
{
var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Android);
if (settings == null) { return; }
foreach (var feature in settings.GetFeatures<OpenXRInteractionFeature>())
{
if (feature is Hand.ViveHandInteraction) { feature.enabled = viveHandInteraction; Debug.LogFormat($"EnableInteraction() ViveHandInteraction: {feature.enabled}"); }
if (feature is Tracker.ViveWristTracker) { feature.enabled = viveWristTracker; Debug.LogFormat($"EnableInteraction() ViveWristTracker: {feature.enabled}"); }
if (feature is Tracker.ViveXRTracker) { feature.enabled = viveXrTracker; Debug.LogFormat($"EnableInteraction() ViveXRTracker: {feature.enabled}"); }
if (feature is Hand.ViveHandInteractionExt) { feature.enabled = khrHandInteraction; Debug.LogFormat($"EnableInteraction() ViveHandInteractionExt: {feature.enabled}"); }
}
}
}*/
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c7e32703a3206194580e534565abcf91
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,44 @@
// Copyright HTC Corporation All Rights Reserved.
#if UNITY_EDITOR
using UnityEditor;
namespace VIVE.OpenXR.Editor
{
[CustomEditor(typeof(VIVERig))]
public class VIVERigEditor : UnityEditor.Editor
{
SerializedProperty m_TrackingOrigin, m_CameraOffset, m_CameraHeight, m_ActionAsset;
private void OnEnable()
{
m_TrackingOrigin = serializedObject.FindProperty("m_TrackingOrigin");
m_CameraOffset = serializedObject.FindProperty("m_CameraOffset");
m_CameraHeight = serializedObject.FindProperty("m_CameraHeight");
m_ActionAsset = serializedObject.FindProperty("m_ActionAsset");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
VIVERig myScript = target as VIVERig;
EditorGUILayout.PropertyField(m_TrackingOrigin);
EditorGUILayout.PropertyField(m_CameraOffset);
EditorGUILayout.HelpBox(
"Set the height of camera when the Tracking Origin is Device.",
MessageType.Info);
EditorGUILayout.PropertyField(m_CameraHeight);
#if ENABLE_INPUT_SYSTEM
EditorGUILayout.PropertyField(m_ActionAsset);
#endif
serializedObject.ApplyModifiedProperties();
if (UnityEngine.GUI.changed)
EditorUtility.SetDirty((VIVERig)target);
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4766014dc7f94c8468710cc3fd265f90
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,180 @@
# XR_HTC_anchor XR_HTC_anchor_persistence
## Name String
XR_htc_anchor XR_HTC_anchor_persistence
## Revision
1
## Overview
This document provides an overview of how to use the AnchorManager to manage anchors in an OpenXR application, specifically using the XR_HTC_anchor and XR_HTC_anchor_persistence extensions.
Introduction
Anchors in OpenXR allow applications to track specific points in space over time. The XR_HTC_anchor extension provides the basic functionality for creating and managing anchors, while the XR_HTC_anchor_persistence extension allows anchors to be persisted across sessions. The AnchorManager class simplifies the use of these extensions by providing high-level methods for common operations.
Checking Extension Support
Before using any anchor-related functions, it's important to check if the extensions are supported on the current system.
```csharp
bool isAnchorSupported = AnchorManager.IsSupported();
bool isPersistedAnchorSupported = AnchorManager.IsPersistedAnchorSupported();
```
## Creating and Managing Anchors
### Creating an Anchor
To create a new anchor, use the CreateAnchor method. This method requires a Pose representing the anchor's position and orientation relative to the tracking space, and a name for the anchor.
```csharp
Pose anchorPose = new Pose(new Vector3(0, 0, 0), Quaternion.identity);
AnchorManager.Anchor newAnchor = AnchorManager.CreateAnchor(anchorPose, "MyAnchor");
```
### Getting an Anchor's Name
To retrieve the name of an existing anchor, use the GetSpatialAnchorName method.
```csharp
string anchorName;
bool success = AnchorManager.GetSpatialAnchorName(newAnchor, out anchorName);
if (success) {
Debug.Log("Anchor name: " + anchorName);
}
```
### Tracking Space and Pose
To get the current tracking space, use the GetTrackingSpace method. To retrieve the pose of an anchor relative to the current tracking space, use the GetTrackingSpacePose method.
```csharp
XrSpace trackingSpace = AnchorManager.GetTrackingSpace();
Pose anchorPose;
bool poseValid = AnchorManager.GetTrackingSpacePose(newAnchor, out anchorPose);
if (poseValid) {
Debug.Log("Anchor pose: " + anchorPose.position + ", " + anchorPose.rotation);
}
```
## Persisting Anchors
### Creating a Persisted Anchor Collection
To enable anchor persistence, create a persisted anchor collection using the CreatePersistedAnchorCollection method.
```csharp
Task createCollectionTask = AnchorManager.CreatePersistedAnchorCollection();
createCollectionTask.Wait();
```
### Persisting an Anchor
To persist an anchor, use the PersistAnchor method with the anchor and a unique name for the persisted anchor.
```csharp
string persistedAnchorName = "MyPersistedAnchor";
XrResult result = AnchorManager.PersistAnchor(newAnchor, persistedAnchorName);
if (result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor persisted successfully.");
}
```
### Unpersisting an Anchor
To remove a persisted anchor, use the UnpersistAnchor method with the name of the persisted anchor.
```csharp
XrResult result = AnchorManager.UnpersistAnchor(persistedAnchorName);
if (result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor unpersisted successfully.");
}
```
### Enumerating Persisted Anchors
To get a list of all persisted anchors, use the EnumeratePersistedAnchorNames method.
```csharp
string[] persistedAnchorNames;
XrResult result = AnchorManager.EnumeratePersistedAnchorNames(out persistedAnchorNames);
if (result == XrResult.XR_SUCCESS) {
foreach (var name in persistedAnchorNames) {
Debug.Log("Persisted anchor: " + name);
}
}
```
### Creating an Anchor from a Persisted Anchor
To create an anchor from a persisted anchor, use the CreateSpatialAnchorFromPersistedAnchor method.
```csharp
AnchorManager.Anchor trackableAnchor;
XrResult result = AnchorManager.CreateSpatialAnchorFromPersistedAnchor(persistedAnchorName, "NewAnchor", out trackableAnchor);
if (result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor created from persisted anchor.");
}
```
## Exporting and Importing Persisted Anchors
### Exporting a Persisted Anchor
To export a persisted anchor to a buffer, use the ExportPersistedAnchor method.
```csharp
Task<(XrResult, string, byte[])> exportTask = AnchorManager.ExportPersistedAnchor(persistedAnchorName);
exportTask.Wait();
var (exportResult, exportName, buffer) = exportTask.Result;
if (exportResult == XrResult.XR_SUCCESS) {
// Save buffer to a file or use as needed
File.WriteAllBytes("anchor.pa", buffer);
}
```
### Importing a Persisted Anchor
To import a persisted anchor from a buffer, use the ImportPersistedAnchor method.
```csharp
byte[] buffer = File.ReadAllBytes("anchor.pa");
Task<XrResult> importTask = AnchorManager.ImportPersistedAnchor(buffer);
importTask.Wait();
if (importTask.Result == XrResult.XR_SUCCESS) {
Debug.Log("Anchor imported successfully.");
}
```
### Clearing Persisted Anchors
To clear all persisted anchors, use the ClearPersistedAnchors method.
```csharp
XrResult result = AnchorManager.ClearPersistedAnchors();
if (result == XrResult.XR_SUCCESS) {
Debug.Log("All persisted anchors cleared.");
}
```
## Conclusion
The AnchorManager class simplifies the management of anchors in OpenXR applications. By using the methods provided, you can easily create, persist, and manage anchors, ensuring that spatial data can be maintained across sessions. This document covers the basic operations; for more advanced usage, refer to the OpenXR specification and the implementation details of the AnchorManager class.

View File

@@ -1,4 +1,4 @@
// Copyright HTC Corporation All Rights Reserved.
// Copyright HTC Corporation All Rights Reserved.
// Remove FAKE_DATA if editor or windows is supported.
#if UNITY_EDITOR
@@ -10,222 +10,666 @@ using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
using VIVE.OpenXR.Feature;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.Anchor
namespace VIVE.OpenXR.Feature
{
using XrPersistedAnchorCollectionHTC = System.IntPtr;
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Anchor",
Desc = "VIVE's implementaion of the XR_HTC_anchor.",
Company = "HTC",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
FeatureId = featureId
)]
[OpenXRFeature(UiName = "VIVE XR Anchor (Beta)",
Desc = "VIVE's implementaion of the XR_HTC_anchor.",
Company = "HTC",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
FeatureId = featureId
)]
#endif
public class ViveAnchor : OpenXRFeature
{
public const string kOpenxrExtensionString = "XR_HTC_anchor";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.wave.openxr.feature.htcanchor";
private XrInstance m_XrInstance = 0;
private XrSession session = 0;
private XrSystemId m_XrSystemId = 0;
public class ViveAnchor : OpenXRFeature
{
public const string kOpenxrExtensionString = "XR_HTC_anchor XR_EXT_future XR_HTC_anchor_persistence";
#region struct, enum, const of this extensions
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.htcanchor";
public struct XrSystemAnchorPropertiesHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrBool32 supportsAnchor;
}
/// <summary>
/// Enable or disable the persisted anchor feature. Set it only valid in feature settings.
/// </summary>
public bool enablePersistedAnchor = true;
private XrInstance m_XrInstance = 0;
private XrSession session = 0;
private XrSystemId m_XrSystemId = 0;
private bool IsInited = false;
private bool IsPAInited = false;
private bool useFakeData = false;
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
public struct XrSpatialAnchorNameHTC
{
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 256)]
public string name;
}
#region struct, enum, const of this extensions
public struct XrSpatialAnchorCreateInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrSpace space;
public XrPosef poseInSpace;
public XrSpatialAnchorNameHTC name;
}
/// <summary>
/// An application can inspect whether the system is capable of anchor functionality by
/// chaining an XrSystemAnchorPropertiesHTC structure to the XrSystemProperties when calling
/// xrGetSystemProperties.The runtime must return XR_ERROR_FEATURE_UNSUPPORTED if
/// XrSystemAnchorPropertiesHTC::supportsAnchor was XR_FALSE.
/// supportsAnchor indicates if current system is capable of anchor functionality.
/// </summary>
public struct XrSystemAnchorPropertiesHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrBool32 supportsAnchor;
}
#endregion
/// <summary>
/// name is a null-terminated UTF-8 string whose length is less than or equal to XR_MAX_SPATIAL_ANCHOR_NAME_SIZE_HTC.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSpatialAnchorNameHTC
{
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)]
public byte[] name;
#region delegates and delegate instances
delegate XrResult DelegateXrCreateSpatialAnchorHTC(XrSession session, ref XrSpatialAnchorCreateInfoHTC createInfo, ref XrSpace anchor);
delegate XrResult DelegateXrGetSpatialAnchorNameHTC(XrSpace anchor, ref XrSpatialAnchorNameHTC name);
public XrSpatialAnchorNameHTC(string anchorName)
{
name = new byte[256];
byte[] utf8Bytes = System.Text.Encoding.UTF8.GetBytes(anchorName);
Array.Copy(utf8Bytes, name, Math.Min(utf8Bytes.Length, 255));
name[255] = 0;
}
DelegateXrCreateSpatialAnchorHTC XrCreateSpatialAnchorHTC;
DelegateXrGetSpatialAnchorNameHTC XrGetSpatialAnchorNameHTC;
#endregion delegates and delegate instances
public XrSpatialAnchorNameHTC(XrSpatialAnchorNameHTC anchorName)
{
name = new byte[256];
Array.Copy(anchorName.name, name, 256);
name[255] = 0;
}
#region override functions
/// <inheritdoc />
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
Debug.Log("ViveAnchor HookGetInstanceProcAddr() ");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
public override readonly string ToString() {
if (name == null)
return string.Empty;
return System.Text.Encoding.UTF8.GetString(name).TrimEnd('\0');
}
}
/// <inheritdoc />
protected override bool OnInstanceCreate(ulong xrInstance)
{
//Debug.Log("VIVEAnchor OnInstanceCreate() ");
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
Debug.LogWarning("ViveAnchor OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled.");
return false;
}
public struct XrSpatialAnchorCreateInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrSpace space;
public XrPosef poseInSpace;
public XrSpatialAnchorNameHTC name;
}
m_XrInstance = xrInstance;
//Debug.Log("OnInstanceCreate() " + m_XrInstance);
CommonWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
SpaceWrapper.Instance.OnInstanceCreate(xrInstance, CommonWrapper.Instance.GetInstanceProcAddr);
public struct XrPersistedAnchorCollectionAcquireInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
}
return GetXrFunctionDelegates(m_XrInstance);
}
public struct XrPersistedAnchorCollectionAcquireCompletionHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrResult futureResult;
public System.IntPtr persistedAnchorCollection;
}
protected override void OnInstanceDestroy(ulong xrInstance)
{
CommonWrapper.Instance.OnInstanceDestroy();
SpaceWrapper.Instance.OnInstanceDestroy();
}
public struct XrSpatialAnchorPersistInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrSpace anchor;
public XrSpatialAnchorNameHTC persistedAnchorName;
}
/// <inheritdoc />
protected override void OnSessionCreate(ulong xrSession)
{
Debug.Log("ViveAnchor OnSessionCreate() ");
public struct XrSpatialAnchorFromPersistedAnchorCreateInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public System.IntPtr persistedAnchorCollection;
public XrSpatialAnchorNameHTC persistedAnchorName;
public XrSpatialAnchorNameHTC spatialAnchorName;
}
// here's one way you can grab the session
Debug.Log($"EXT: Got xrSession: {xrSession}");
session = xrSession;
}
public struct XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC
{
public XrStructureType type;
public System.IntPtr next;
public XrResult futureResult;
public XrSpace anchor;
}
/// <inheritdoc />
protected override void OnSessionBegin(ulong xrSession)
{
Debug.Log("ViveAnchor OnSessionBegin() ");
Debug.Log($"EXT: xrBeginSession: {xrSession}");
}
public struct XrPersistedAnchorPropertiesGetInfoHTC
{
public XrStructureType type;
public System.IntPtr next;
public uint maxPersistedAnchorCount;
}
/// <inheritdoc />
protected override void OnSessionEnd(ulong xrSession)
{
Debug.Log("ViveAnchor OnSessionEnd() ");
Debug.Log($"EXT: about to xrEndSession: {xrSession}");
}
#endregion
// XXX Every millisecond the AppSpace switched from one space to another space. I don't know what is going on.
//private ulong appSpace;
//protected override void OnAppSpaceChange(ulong space)
//{
// //Debug.Log($"VIVEAnchor OnAppSpaceChange({appSpace} -> {space})");
// appSpace = space;
//}
#region delegates and delegate instances
public delegate XrResult DelegateXrCreateSpatialAnchorHTC(XrSession session, ref XrSpatialAnchorCreateInfoHTC createInfo, ref XrSpace anchor);
public delegate XrResult DelegateXrGetSpatialAnchorNameHTC(XrSpace anchor, ref XrSpatialAnchorNameHTC name);
public delegate XrResult DelegateXrAcquirePersistedAnchorCollectionAsyncHTC(XrSession session, ref XrPersistedAnchorCollectionAcquireInfoHTC acquireInfo, out IntPtr future);
public delegate XrResult DelegateXrAcquirePersistedAnchorCollectionCompleteHTC(IntPtr future, out XrPersistedAnchorCollectionAcquireCompletionHTC completion);
public delegate XrResult DelegateXrReleasePersistedAnchorCollectionHTC(IntPtr persistedAnchorCollection);
public delegate XrResult DelegateXrPersistSpatialAnchorAsyncHTC(XrPersistedAnchorCollectionHTC persistedAnchorCollection, ref XrSpatialAnchorPersistInfoHTC persistInfo, out IntPtr future);
public delegate XrResult DelegateXrPersistSpatialAnchorCompleteHTC(IntPtr future, out FutureWrapper.XrFutureCompletionEXT completion);
public delegate XrResult DelegateXrUnpersistSpatialAnchorHTC(IntPtr persistedAnchorCollection, ref XrSpatialAnchorNameHTC persistedAnchorName);
public delegate XrResult DelegateXrEnumeratePersistedAnchorNamesHTC( IntPtr persistedAnchorCollection, uint persistedAnchorNameCapacityInput, ref uint persistedAnchorNameCountOutput, [Out] XrSpatialAnchorNameHTC[] persistedAnchorNames);
public delegate XrResult DelegateXrCreateSpatialAnchorFromPersistedAnchorAsyncHTC(XrSession session, ref XrSpatialAnchorFromPersistedAnchorCreateInfoHTC spatialAnchorCreateInfo, out IntPtr future);
public delegate XrResult DelegateXrCreateSpatialAnchorFromPersistedAnchorCompleteHTC(IntPtr future, out XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC completion);
public delegate XrResult DelegateXrClearPersistedAnchorsHTC(IntPtr persistedAnchorCollection);
public delegate XrResult DelegateXrGetPersistedAnchorPropertiesHTC(IntPtr persistedAnchorCollection, ref XrPersistedAnchorPropertiesGetInfoHTC getInfo);
public delegate XrResult DelegateXrExportPersistedAnchorHTC(IntPtr persistedAnchorCollection, ref XrSpatialAnchorNameHTC persistedAnchorName, uint dataCapacityInput, ref uint dataCountOutput, [Out] byte[] data);
public delegate XrResult DelegateXrImportPersistedAnchorHTC(IntPtr persistedAnchorCollection, uint dataCount, [In] byte[] data);
public delegate XrResult DelegateXrGetPersistedAnchorNameFromBufferHTC(IntPtr persistedAnchorCollection, uint bufferCount, byte[] buffer, ref XrSpatialAnchorNameHTC name);
/// <inheritdoc />
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
Debug.Log("ViveAnchor OnSystemChange() " + m_XrSystemId);
}
DelegateXrCreateSpatialAnchorHTC XrCreateSpatialAnchorHTC;
DelegateXrGetSpatialAnchorNameHTC XrGetSpatialAnchorNameHTC;
DelegateXrAcquirePersistedAnchorCollectionAsyncHTC XrAcquirePersistedAnchorCollectionAsyncHTC;
DelegateXrAcquirePersistedAnchorCollectionCompleteHTC XrAcquirePersistedAnchorCollectionCompleteHTC;
DelegateXrReleasePersistedAnchorCollectionHTC XrReleasePersistedAnchorCollectionHTC;
DelegateXrPersistSpatialAnchorAsyncHTC XrPersistSpatialAnchorAsyncHTC;
DelegateXrPersistSpatialAnchorCompleteHTC XrPersistSpatialAnchorCompleteHTC;
DelegateXrUnpersistSpatialAnchorHTC XrUnpersistSpatialAnchorHTC;
DelegateXrEnumeratePersistedAnchorNamesHTC XrEnumeratePersistedAnchorNamesHTC;
DelegateXrCreateSpatialAnchorFromPersistedAnchorAsyncHTC XrCreateSpatialAnchorFromPersistedAnchorAsyncHTC;
DelegateXrCreateSpatialAnchorFromPersistedAnchorCompleteHTC XrCreateSpatialAnchorFromPersistedAnchorCompleteHTC;
DelegateXrClearPersistedAnchorsHTC XrClearPersistedAnchorsHTC;
DelegateXrGetPersistedAnchorPropertiesHTC XrGetPersistedAnchorPropertiesHTC;
DelegateXrExportPersistedAnchorHTC XrExportPersistedAnchorHTC;
DelegateXrImportPersistedAnchorHTC XrImportPersistedAnchorHTC;
DelegateXrGetPersistedAnchorNameFromBufferHTC XrGetPersistedAnchorNameFromBufferHTC;
#endregion delegates and delegate instances
#endregion override functions
#region override functions
private bool GetXrFunctionDelegates(XrInstance xrInstance)
{
Debug.Log("ViveAnchor GetXrFunctionDelegates() ");
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
// For LocateSpace, need WaitFrame's predictedDisplayTime.
ViveInterceptors.Instance.AddRequiredFunction("xrWaitFrame");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
bool ret = true;
IntPtr funcPtr = IntPtr.Zero;
OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr = CommonWrapper.Instance.GetInstanceProcAddr; // shorter name
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrCreateSpatialAnchorHTC", out XrCreateSpatialAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, xrInstance, "xrGetSpatialAnchorNameHTC", out XrGetSpatialAnchorNameHTC);
/// <inheritdoc />
protected override bool OnInstanceCreate(ulong xrInstance)
{
#if FAKE_DATA
Debug.LogError("ViveAnchor OnInstanceCreate() Use FakeData");
useFakeData = true;
#endif
IsInited = false;
bool ret = true;
ret &= CommonWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
ret &= SpaceWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
return ret;
}
if (!ret)
{
Debug.LogError("ViveAnchor OnInstanceCreate() failed.");
return false;
}
#region functions of extension
/// <summary>
/// Helper function to get this feature' properties.
/// See <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystemProperties">xrGetSystemProperties</see>
/// </summary>
public XrResult GetProperties(out XrSystemAnchorPropertiesHTC anchorProperties)
{
anchorProperties = new XrSystemAnchorPropertiesHTC();
anchorProperties.type = XrStructureType.XR_TYPE_SYSTEM_ANCHOR_PROPERTIES_HTC;
//Debug.Log("VIVEAnchor OnInstanceCreate() ");
if (!OpenXRRuntime.IsExtensionEnabled("XR_HTC_anchor") && !useFakeData)
{
Debug.LogWarning("ViveAnchor OnInstanceCreate() XR_HTC_anchor is NOT enabled.");
return false;
}
IsInited = GetXrFunctionDelegates(xrInstance);
if (!IsInited)
{
Debug.LogError("ViveAnchor OnInstanceCreate() failed to get function delegates.");
return false;
}
m_XrInstance = xrInstance;
bool hasFuture = FutureWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
// No error log because future will print.
#if FAKE_DATA
hasFuture = true;
#endif
IsPAInited = false;
bool hasPersistedAnchor = false;
do
{
if (!hasFuture)
{
Debug.LogWarning("ViveAnchor OnInstanceCreate() XR_HTC_anchor_persistence is NOT enabled because no XR_EXT_future.");
hasPersistedAnchor = false;
break;
}
hasPersistedAnchor = enablePersistedAnchor && OpenXRRuntime.IsExtensionEnabled("XR_HTC_anchor_persistence");
#if FAKE_DATA
hasPersistedAnchor = enablePersistedAnchor;
#endif
} while(false);
//Debug.Log("OnInstanceCreate() " + m_XrInstance);
if (hasPersistedAnchor)
IsPAInited = GetXrFunctionDelegatesPersistance(xrInstance);
if (!IsPAInited)
Debug.LogWarning("ViveAnchor OnInstanceCreate() XR_HTC_anchor_persistence is NOT enabled.");
return IsInited;
}
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstance = 0;
IsInited = false;
IsPAInited = false;
CommonWrapper.Instance.OnInstanceDestroy();
SpaceWrapper.Instance.OnInstanceDestroy();
FutureWrapper.Instance.OnInstanceDestroy();
Debug.Log("ViveAnchor: OnInstanceDestroy()");
}
/// <inheritdoc />
protected override void OnSessionCreate(ulong xrSession)
{
//Debug.Log("ViveAnchor OnSessionCreate() ");
session = xrSession;
}
/// <inheritdoc />
protected override void OnSessionDestroy(ulong xrSession)
{
//Debug.Log("ViveAnchor OnSessionDestroy() ");
session = 0;
}
// XXX Every millisecond the AppSpace switched from one space to another space. I don't know what is going on.
//private ulong appSpace;
//protected override void OnAppSpaceChange(ulong space)
//{
// //Debug.Log($"VIVEAnchor OnAppSpaceChange({appSpace} -> {space})");
// appSpace = space;
//}
/// <inheritdoc />
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
//Debug.Log("ViveAnchor OnSystemChange() " + m_XrSystemId);
}
#endregion override functions
private bool GetXrFunctionDelegates(XrInstance inst)
{
Debug.Log("ViveAnchor GetXrFunctionDelegates() ");
bool ret = true;
OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr = CommonWrapper.Instance.GetInstanceProcAddr; // shorter name
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrCreateSpatialAnchorHTC", out XrCreateSpatialAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrGetSpatialAnchorNameHTC", out XrGetSpatialAnchorNameHTC);
return ret;
}
private bool GetXrFunctionDelegatesPersistance(XrInstance inst)
{
Debug.Log("ViveAnchor GetXrFunctionDelegatesPersistance() ");
bool ret = true;
OpenXRHelper.xrGetInstanceProcAddrDelegate GetAddr = CommonWrapper.Instance.GetInstanceProcAddr; // shorter name
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrAcquirePersistedAnchorCollectionAsyncHTC", out XrAcquirePersistedAnchorCollectionAsyncHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrAcquirePersistedAnchorCollectionCompleteHTC", out XrAcquirePersistedAnchorCollectionCompleteHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrReleasePersistedAnchorCollectionHTC", out XrReleasePersistedAnchorCollectionHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrPersistSpatialAnchorAsyncHTC", out XrPersistSpatialAnchorAsyncHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrPersistSpatialAnchorCompleteHTC", out XrPersistSpatialAnchorCompleteHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrUnpersistSpatialAnchorHTC", out XrUnpersistSpatialAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrEnumeratePersistedAnchorNamesHTC", out XrEnumeratePersistedAnchorNamesHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrCreateSpatialAnchorFromPersistedAnchorAsyncHTC", out XrCreateSpatialAnchorFromPersistedAnchorAsyncHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrCreateSpatialAnchorFromPersistedAnchorCompleteHTC", out XrCreateSpatialAnchorFromPersistedAnchorCompleteHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrClearPersistedAnchorsHTC", out XrClearPersistedAnchorsHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrGetPersistedAnchorPropertiesHTC", out XrGetPersistedAnchorPropertiesHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrExportPersistedAnchorHTC", out XrExportPersistedAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrImportPersistedAnchorHTC", out XrImportPersistedAnchorHTC);
ret &= OpenXRHelper.GetXrFunctionDelegate(GetAddr, inst, "xrGetPersistedAnchorNameFromBufferHTC", out XrGetPersistedAnchorNameFromBufferHTC);
return ret;
}
#region functions of extension
/// <summary>
/// Helper function to get this feature's properties.
/// See <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystemProperties">xrGetSystemProperties</see>
/// </summary>
/// <param name="anchorProperties">Output parameter to hold anchor properties.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult GetProperties(out XrSystemAnchorPropertiesHTC anchorProperties)
{
anchorProperties = new XrSystemAnchorPropertiesHTC();
anchorProperties.type = XrStructureType.XR_TYPE_SYSTEM_ANCHOR_PROPERTIES_HTC;
#if FAKE_DATA
if (Application.isEditor)
{
anchorProperties.type = XrStructureType.XR_TYPE_SYSTEM_ANCHOR_PROPERTIES_HTC;
anchorProperties.supportsAnchor = true;
return XrResult.XR_SUCCESS;
}
if (Application.isEditor)
{
anchorProperties.type = XrStructureType.XR_TYPE_SYSTEM_ANCHOR_PROPERTIES_HTC;
anchorProperties.supportsAnchor = true;
return XrResult.XR_SUCCESS;
}
#endif
return CommonWrapper.Instance.GetProperties(m_XrInstance, m_XrSystemId, ref anchorProperties);
}
return CommonWrapper.Instance.GetProperties(m_XrInstance, m_XrSystemId, ref anchorProperties);
}
public XrResult CreateSpatialAnchor(XrSpatialAnchorCreateInfoHTC createInfo, out XrSpace anchor)
{
anchor = default;
#if FAKE_DATA
if (Application.isEditor)
return XrResult.XR_SUCCESS;
#endif
var ret = XrCreateSpatialAnchorHTC(session, ref createInfo, ref anchor);
Debug.Log("ViveAnchor CreateSpatialAnchor() r=" + ret + ", a=" + anchor + ", bs=" + createInfo.space +
", pos=(" + createInfo.poseInSpace.position.x + "," + createInfo.poseInSpace.position.y + "," + createInfo.poseInSpace.position.z +
"), rot=(" + createInfo.poseInSpace.orientation.x + "," + createInfo.poseInSpace.orientation.y + "," + createInfo.poseInSpace.orientation.z + "," + createInfo.poseInSpace.orientation.w +
"), n=" + createInfo.name.name);
return ret;
}
/// <summary>
/// The CreateSpatialAnchor function creates a spatial anchor with specified base space and pose in the space.
/// The anchor is represented by an XrSpace and its pose can be tracked via xrLocateSpace.
/// Once the anchor is no longer needed, call xrDestroySpace to erase the anchor.
/// </summary>
/// <param name="createInfo">Information required to create the spatial anchor.</param>
/// <param name="anchor">Output parameter to hold the created anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult CreateSpatialAnchor(XrSpatialAnchorCreateInfoHTC createInfo, out XrSpace anchor)
{
anchor = default;
if (!IsInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (session == 0)
return XrResult.XR_ERROR_SESSION_LOST;
public XrResult GetSpatialAnchorName(XrSpace anchor, out XrSpatialAnchorNameHTC name)
{
name = default;
#if FAKE_DATA
if (Application.isEditor)
{
name.name = "fake anchor";
return XrResult.XR_SUCCESS;
}
#endif
return XrGetSpatialAnchorNameHTC(anchor, ref name);
}
var ret = XrCreateSpatialAnchorHTC(session, ref createInfo, ref anchor);
//Debug.Log("ViveAnchor CreateSpatialAnchor() r=" + ret + ", a=" + anchor + ", bs=" + createInfo.space +
// ", pos=(" + createInfo.poseInSpace.position.x + "," + createInfo.poseInSpace.position.y + "," + createInfo.poseInSpace.position.z +
// "), rot=(" + createInfo.poseInSpace.orientation.x + "," + createInfo.poseInSpace.orientation.y + "," + createInfo.poseInSpace.orientation.z + "," + createInfo.poseInSpace.orientation.w +
// "), n=" + createInfo.name.name);
return ret;
}
#endregion
/// <summary>
/// The GetSpatialAnchorName function retrieves the name of the spatial anchor.
/// </summary>
/// <param name="anchor">The XrSpace representing the anchor.</param>
/// <param name="name">Output parameter to hold the name of the anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult GetSpatialAnchorName(XrSpace anchor, out XrSpatialAnchorNameHTC name)
{
name = new XrSpatialAnchorNameHTC();
if (!IsInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrGetSpatialAnchorNameHTC(anchor, ref name);
}
#region tools for user
/// <summary>
/// If the extension is supported and enabled, return true.
/// </summary>
/// <returns>True if persisted anchor extension is supported, false otherwise.</returns>
public bool IsPersistedAnchorSupported()
{
return IsPAInited;
}
/// <summary>
/// According to XRInputSubsystem's tracking origin mode, return the corresponding XrSpace.
/// </summary>
/// <returns></returns>
public XrSpace GetTrackingSpace()
{
var s = GetCurrentAppSpace();
Debug.Log("ViveAnchor GetTrackingSpace() s=" + s);
return s;
}
#endregion
}
}
/// <summary>
/// Creates a persisted anchor collection. This collection can be used to persist spatial anchors across sessions.
/// Many persisted anchor APIs need a persisted anchor collection to operate.
/// </summary>
/// <param name="future">Output the async future handle. Check the future to get the PersitedAnchorCollection handle.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult AcquirePersistedAnchorCollectionAsync(out IntPtr future)
{
future = IntPtr.Zero;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (session == 0)
return XrResult.XR_ERROR_SESSION_LOST;
XrPersistedAnchorCollectionAcquireInfoHTC acquireInfo = new XrPersistedAnchorCollectionAcquireInfoHTC
{
type = XrStructureType.XR_TYPE_PERSISTED_ANCHOR_COLLECTION_ACQUIRE_INFO_HTC,
next = IntPtr.Zero,
};
return XrAcquirePersistedAnchorCollectionAsyncHTC(session, ref acquireInfo, out future);
}
public XrResult AcquirePersistedAnchorCollectionComplete(IntPtr future, out XrPersistedAnchorCollectionAcquireCompletionHTC completion)
{
completion = new XrPersistedAnchorCollectionAcquireCompletionHTC();
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrAcquirePersistedAnchorCollectionCompleteHTC(future, out completion);
}
/// <summary>
/// Destroys the persisted anchor collection.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to be destroyed.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ReleasePersistedAnchorCollection(IntPtr persistedAnchorCollection)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrReleasePersistedAnchorCollectionHTC(persistedAnchorCollection);
}
/// <summary>
/// Persists a spatial anchor with the given name. The name should be unique.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="anchor">The spatial anchor to be persisted.</param>
/// <param name="name">The name of the persisted anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult PersistSpatialAnchorAsync(IntPtr persistedAnchorCollection, XrSpace anchor, XrSpatialAnchorNameHTC name, out IntPtr future)
{
future = IntPtr.Zero;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
XrSpatialAnchorPersistInfoHTC persistInfo = new XrSpatialAnchorPersistInfoHTC
{
type = XrStructureType.XR_TYPE_SPATIAL_ANCHOR_PERSIST_INFO_HTC,
anchor = anchor,
persistedAnchorName = name
};
return XrPersistSpatialAnchorAsyncHTC(persistedAnchorCollection, ref persistInfo, out future);
}
public XrResult PersistSpatialAnchorComplete(IntPtr future, out FutureWrapper.XrFutureCompletionEXT completion)
{
completion = new FutureWrapper.XrFutureCompletionEXT() {
type = XrStructureType.XR_TYPE_FUTURE_COMPLETION_EXT,
next = IntPtr.Zero,
futureResult = XrResult.XR_SUCCESS
};
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrPersistSpatialAnchorCompleteHTC(future, out completion);
}
/// <summary>
/// Unpersists the anchor with the given name.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="name">The name of the anchor to be unpersisted.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult UnpersistSpatialAnchor(IntPtr persistedAnchorCollection, XrSpatialAnchorNameHTC name)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrUnpersistSpatialAnchorHTC(persistedAnchorCollection, ref name);
}
/// <summary>
/// Enumerates all persisted anchor names.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="persistedAnchorNameCapacityInput">The capacity of the input buffer.</param>
/// <param name="persistedAnchorNameCountOutput">Output parameter to hold the count of persisted anchor names.</param>
/// <param name="persistedAnchorNames">Output parameter to hold the names of persisted anchors.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult EnumeratePersistedAnchorNames(IntPtr persistedAnchorCollection, uint persistedAnchorNameCapacityInput,
ref uint persistedAnchorNameCountOutput, ref XrSpatialAnchorNameHTC[] persistedAnchorNames)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrEnumeratePersistedAnchorNamesHTC(persistedAnchorCollection, persistedAnchorNameCapacityInput, ref persistedAnchorNameCountOutput, persistedAnchorNames);
}
/// <summary>
/// Creates a spatial anchor from a persisted anchor.
/// </summary>
/// <param name="spatialAnchorCreateInfo">Information required to create the spatial anchor from persisted anchor.</param>
/// <param name="anchor">Output parameter to hold the created spatial anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult CreateSpatialAnchorFromPersistedAnchorAsync(XrSpatialAnchorFromPersistedAnchorCreateInfoHTC spatialAnchorCreateInfo, out IntPtr future)
{
future = IntPtr.Zero;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (session == 0)
return XrResult.XR_ERROR_SESSION_LOST;
return XrCreateSpatialAnchorFromPersistedAnchorAsyncHTC(session, ref spatialAnchorCreateInfo, out future);
}
/// <summary>
/// When the future is ready, call this function to get the result.
/// </summary>
/// <param name="future"></param>
/// <param name="completion"></param>
/// <returns></returns>
public XrResult CreateSpatialAnchorFromPersistedAnchorComplete(IntPtr future, out XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC completion)
{
completion = new XrSpatialAnchorFromPersistedAnchorCreateCompletionHTC()
{
type = XrStructureType.XR_TYPE_SPATIAL_ANCHOR_FROM_PERSISTED_ANCHOR_CREATE_COMPLETION_HTC,
next = IntPtr.Zero,
futureResult = XrResult.XR_SUCCESS,
anchor = 0
};
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrCreateSpatialAnchorFromPersistedAnchorCompleteHTC(future, out completion);
}
/// <summary>
/// Clears all persisted anchors.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ClearPersistedAnchors(IntPtr persistedAnchorCollection)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrClearPersistedAnchorsHTC(persistedAnchorCollection);
}
/// <summary>
/// Gets the properties of the persisted anchor.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="getInfo">Output parameter to hold the properties of the persisted anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult GetPersistedAnchorProperties(IntPtr persistedAnchorCollection, out XrPersistedAnchorPropertiesGetInfoHTC getInfo)
{
getInfo = new XrPersistedAnchorPropertiesGetInfoHTC
{
type = XrStructureType.XR_TYPE_PERSISTED_ANCHOR_PROPERTIES_GET_INFO_HTC
};
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrGetPersistedAnchorPropertiesHTC(persistedAnchorCollection, ref getInfo);
}
/// <summary>
/// Exports the persisted anchor to a buffer. The buffer can be used to import the anchor later or save to a file.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="persistedAnchorName">The name of the persisted anchor to be exported.</param>
/// <param name="data">Output parameter to hold the buffer containing the exported anchor.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ExportPersistedAnchor(IntPtr persistedAnchorCollection, XrSpatialAnchorNameHTC persistedAnchorName, out byte[] data)
{
data = null;
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
uint dataCountOutput = 0;
uint dataCapacityInput = 0;
XrResult ret = XrExportPersistedAnchorHTC(persistedAnchorCollection, ref persistedAnchorName, dataCapacityInput, ref dataCountOutput, null);
if (ret != XrResult.XR_SUCCESS)
{
Debug.LogError("ExportPersistedAnchor failed to get data size. ret=" + ret);
data = null;
return ret;
}
dataCapacityInput = dataCountOutput;
data = new byte[dataCountOutput];
ret = XrExportPersistedAnchorHTC(persistedAnchorCollection, ref persistedAnchorName, dataCapacityInput, ref dataCountOutput, data);
return ret;
}
/// <summary>
/// Imports the persisted anchor from a buffer. The buffer should be created by ExportPersistedAnchor.
/// </summary>
/// <param name="persistedAnchorCollection">The persisted anchor collection to operate.</param>
/// <param name="data">The buffer containing the persisted anchor data.</param>
/// <returns>XrResult indicating success or failure.</returns>
public XrResult ImportPersistedAnchor(IntPtr persistedAnchorCollection, byte[] data)
{
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
return XrImportPersistedAnchorHTC(persistedAnchorCollection, (uint)data.Length, data);
}
/// <summary>
/// Gets the name of the persisted anchor from a buffer. The buffer should be created by ExportPersistedAnchor.
/// </summary>
/// <param name="persistedAnchorCollection"></param>
/// <param name="buffer"></param>
/// <param name="name"></param>
/// <returns></returns>
public XrResult GetPersistedAnchorNameFromBuffer(IntPtr persistedAnchorCollection, byte[] buffer, out XrSpatialAnchorNameHTC name)
{
name = new XrSpatialAnchorNameHTC();
if (!IsPAInited)
return XrResult.XR_ERROR_EXTENSION_NOT_PRESENT;
if (buffer == null)
return XrResult.XR_ERROR_VALIDATION_FAILURE;
return XrGetPersistedAnchorNameFromBufferHTC(persistedAnchorCollection, (uint)buffer.Length, buffer, ref name);
}
#endregion
#region tools for user
/// <summary>
/// According to XRInputSubsystem's tracking origin mode, return the corresponding XrSpace.
/// </summary>
/// <returns></returns>
public XrSpace GetTrackingSpace()
{
var s = GetCurrentAppSpace();
//Debug.Log("ViveAnchor GetTrackingSpace() s=" + s);
return s;
}
#endregion
}
}

View File

@@ -0,0 +1,203 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEditor;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System;
using System.Runtime.InteropServices;
#if UNITY_EDITOR
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.CompositionLayer
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Composition Layer (Extra Settings) (Beta)",
Desc = "Enable this feature to use the Composition Layer Extra Settings.",
Company = "HTC",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionStrings,
Version = "1.0.0",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
FeatureId = featureId
)]
#endif
public class ViveCompositionLayerExtraSettings : OpenXRFeature
{
const string LOG_TAG = "VIVE.OpenXR.ViveCompositionLayer.ExtraSettings";
static void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
static void WARNING(string msg) { Debug.LogWarning(LOG_TAG + " " + msg); }
static void ERROR(string msg) { Debug.LogError(LOG_TAG + " " + msg); }
/// <summary>
/// Settings Editor Enable Sharpening or Not.
/// </summary>
public bool SettingsEditorEnableSharpening = false;
/// <summary>
/// Support Sharpening or Not.
/// </summary>
public bool supportSharpening = false;
/// <summary>
/// Settings Editor Sharpening Mode
/// </summary>
public XrSharpeningModeHTC SettingsEditorSharpeningMode = XrSharpeningModeHTC.FAST;
/// <summary>
/// Settings Editor Sharpening Levell
/// </summary>
[Range(0.0f, 1.0f)]
public float SettingsEditorSharpeningLevel = 1.0f;
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.compositionlayer.extrasettings";
/// <summary>
/// OpenXR specification.
/// </summary>
public const string kOpenxrExtensionStrings = "XR_HTC_composition_layer_extra_settings";
#region OpenXR Life Cycle
private bool m_XrInstanceCreated = false;
/// <summary>
/// The XR instance is created or not.
/// </summary>
public bool XrInstanceCreated
{
get { return m_XrInstanceCreated; }
}
private XrInstance m_XrInstance = 0;
protected override bool OnInstanceCreate(ulong xrInstance)
{
foreach (string kOpenxrExtensionString in kOpenxrExtensionStrings.Split(' '))
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
WARNING("OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled.");
}
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
DEBUG("OnInstanceCreate() " + m_XrInstance);
return true;
}
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstanceCreated = false;
DEBUG("OnInstanceDestroy() " + m_XrInstance);
}
private XrSystemId m_XrSystemId = 0;
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
DEBUG("OnSystemChange() " + m_XrSystemId);
}
private bool m_XrSessionCreated = false;
/// <summary>
/// The XR session is created or not.
/// </summary>
public bool XrSessionCreated
{
get { return m_XrSessionCreated; }
}
private XrSession m_XrSession = 0;
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
DEBUG("OnSessionCreate() " + m_XrSession);
}
private bool m_XrSessionEnding = false;
/// <summary>
/// The XR session is ending or not.
/// </summary>
public bool XrSessionEnding
{
get { return m_XrSessionEnding; }
}
protected override void OnSessionBegin(ulong xrSession)
{
m_XrSessionEnding = false;
DEBUG("OnSessionBegin() " + m_XrSession);
//enable Sharpening
if (OpenXRRuntime.IsExtensionEnabled("XR_HTC_composition_layer_extra_settings"))
{
ViveCompositionLayer_UpdateSystemProperties(m_XrInstance, m_XrSystemId);
supportSharpening = ViveCompositionLayer_IsSupportSharpening();
if (supportSharpening && SettingsEditorEnableSharpening)
{
EnableSharpening(SettingsEditorSharpeningMode, SettingsEditorSharpeningLevel);
}
}
}
protected override void OnSessionEnd(ulong xrSession)
{
m_XrSessionEnding = true;
DEBUG("OnSessionEnd() " + m_XrSession);
}
protected override void OnSessionDestroy(ulong xrSession)
{
m_XrSessionCreated = false;
DEBUG("OnSessionDestroy() " + xrSession);
}
#endregion
#region Wrapper Functions
private const string ExtLib = "viveopenxr";
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_UpdateSystemProperties")]
private static extern int VIVEOpenXR_ViveCompositionLayer_UpdateSystemProperties(XrInstance instance, XrSystemId system_id);
private int ViveCompositionLayer_UpdateSystemProperties(XrInstance instance, XrSystemId system_id)
{
return VIVEOpenXR_ViveCompositionLayer_UpdateSystemProperties(instance, system_id);
}
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_IsSupportSharpening")]
private static extern bool VIVEOpenXR_ViveCompositionLayer_IsSupportSharpening();
private bool ViveCompositionLayer_IsSupportSharpening()
{
return VIVEOpenXR_ViveCompositionLayer_IsSupportSharpening();
}
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_enableSharpening")]
private static extern int VIVEOpenXR_ViveCompositionLayer_enableSharpening(XrSharpeningModeHTC sharpeningMode, float sharpeningLevel);
/// <summary>
/// Enable the sharpening setting applying to the projection layer.
/// </summary>
/// <param name="sharpeningMode">The sharpening mode in <see cref="XrSharpeningModeHTC"/>.</param>
/// <param name="sharpeningLevel">The sharpening level in float [0, 1].</param>
/// <returns>True for success.</returns>
public bool EnableSharpening(XrSharpeningModeHTC sharpeningMode, float sharpeningLevel)
{
return (VIVEOpenXR_ViveCompositionLayer_enableSharpening(sharpeningMode, sharpeningLevel) == 0);
}
[DllImportAttribute(ExtLib, EntryPoint = "viveCompositionLayer_disableSharpening")]
private static extern int VIVEOpenXR_ViveCompositionLayer_DisableSharpening();
/// <summary>
/// Disable the sharpening setting on the projection layer.
/// </summary>
/// <returns>True for success</returns>
public bool DisableSharpening()
{
return (VIVEOpenXR_ViveCompositionLayer_DisableSharpening() == 0);
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f26de592e4135874baf6e64cc94183be
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,4 +1,4 @@
// Copyright HTC Corporation All Rights Reserved.
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Collections.Generic;
@@ -67,65 +67,6 @@ namespace VIVE.OpenXR.CompositionLayer
}
}
public struct XrCompositionLayerFlags : IEquatable<UInt64>
{
private readonly UInt64 value;
public XrCompositionLayerFlags(UInt64 u)
{
value = u;
}
public static implicit operator UInt64(XrCompositionLayerFlags xrBool)
{
return xrBool.value;
}
public static implicit operator XrCompositionLayerFlags(UInt64 u)
{
return new XrCompositionLayerFlags(u);
}
public bool Equals(XrCompositionLayerFlags other)
{
return value == other.value;
}
public bool Equals(UInt64 other)
{
return value == other;
}
public override bool Equals(object obj)
{
return obj is XrCompositionLayerFlags && Equals((XrCompositionLayerFlags)obj);
}
public override int GetHashCode()
{
return value.GetHashCode();
}
public override string ToString()
{
return value.ToString();
}
public static bool operator ==(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.Equals(b); }
public static bool operator !=(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return !a.Equals(b); }
public static bool operator >=(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value >= b.value; }
public static bool operator <=(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value <= b.value; }
public static bool operator >(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value > b.value; }
public static bool operator <(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value < b.value; }
public static XrCompositionLayerFlags operator +(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value + b.value; }
public static XrCompositionLayerFlags operator -(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value - b.value; }
public static XrCompositionLayerFlags operator *(XrCompositionLayerFlags a, XrCompositionLayerFlags b) { return a.value * b.value; }
public static XrCompositionLayerFlags operator /(XrCompositionLayerFlags a, XrCompositionLayerFlags b)
{
if (b.value == 0)
{
throw new DivideByZeroException();
}
return a.value / b.value;
}
}
public struct XrSwapchainCreateFlags : IEquatable<UInt64>
{
@@ -288,6 +229,36 @@ namespace VIVE.OpenXR.CompositionLayer
public XrColor4f colorScale;
public XrColor4f colorBias;
}
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerSharpeningSettingHTC
{
public XrStructureType type;
public IntPtr next;
public XrSharpeningModeHTC mode;
public float sharpeningLevel;
}
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerSuperSamplingSettingHTC
{
public XrStructureType type;
public IntPtr next;
public XrSuperSamplingModeHTC mode;
}
public enum XrSharpeningModeHTC
{
FAST = 0,
NORMAL = 1,
QUALITY = 2,
AUTOMATIC = 3,
}
public enum XrSuperSamplingModeHTC
{
FAST = 0,
NORMAL = 1,
QUALITY = 2,
AUTOMATIC = 3,
}
public enum GraphicsAPI
{
GLES3 = 1,
@@ -410,29 +381,6 @@ namespace VIVE.OpenXR.CompositionLayer
}
};
/// <summary>
/// The XrCompositionLayerBaseHeader structure is not intended to be directly used, but forms a basis for defining current and future structures containing composition layer information. The XrFrameEndInfo structure contains an array of pointers to these polymorphic header structures.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerBaseHeader
{
/// <summary>
/// The XrStructureType of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// Next is NULL or a pointer to the next structure in a structure chain, such as XrPassthroughMeshTransformInfoHTC.
/// </summary>
public IntPtr next;
/// <summary>
/// A bitmask of XrCompositionLayerFlagBits describing flags to apply to the layer.
/// </summary>
public XrCompositionLayerFlags layerFlags;
/// <summary>
/// The XrSpace in which the layer will be kept stable over time.
/// </summary>
public XrSpace space;
};
/// <summary>
/// The application can specify the XrPassthroughColorHTC to adjust the alpha value of the passthrough. The range is between 0.0f and 1.0f, 1.0f means opaque.
/// </summary>
[StructLayout(LayoutKind.Sequential)]

View File

@@ -18,7 +18,7 @@ using UnityEditor.XR.OpenXR.Features;
namespace VIVE.OpenXR.CompositionLayer.Passthrough
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Composition Layer (Passthrough)",
[OpenXRFeature(UiName = "VIVE XR Composition Layer (Passthrough) (Deprecated)",
Desc = "Enable this feature to use the HTC Passthrough feature.",
Company = "HTC",
DocumentationLink = "..\\Documentation",
@@ -28,6 +28,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
FeatureId = featureId
)]
#endif
[Obsolete("This class is deprecated. Please use VivePassthrough instead.")]
public class ViveCompositionLayerPassthrough : OpenXRFeature
{
const string LOG_TAG = "VIVE.OpenXR.ViveCompositionLayerPassthrough";

View File

@@ -7,6 +7,7 @@ using System.Runtime.InteropServices;
namespace VIVE.OpenXR.CompositionLayer.Passthrough
{
[Obsolete("This enumeration is deprecated. Please use XrStructureType instead.")]
//[StructLayout(LayoutKind.Sequential)]
public enum XrStructureTypeHTC
{
@@ -16,6 +17,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC = 1000317004,
}
[Obsolete("This enumeration is deprecated. Please use VIVE.OpenXR.Passthrough.PassthroughLayerForm instead.")]
public enum PassthroughLayerForm
{
///<summary> Fullscreen Passthrough Form</summary>
@@ -24,6 +26,7 @@ namespace VIVE.OpenXR.CompositionLayer.Passthrough
Projected = 1
}
[Obsolete("This enumeration is deprecated. Please use VIVE.OpenXR.Passthrough.ProjectedPassthroughSpaceType instead.")]
public enum ProjectedPassthroughSpaceType
{
///<summary>

View File

@@ -49,7 +49,14 @@ namespace VIVE.OpenXR.DisplayRefreshRate
public const string featureId = "vive.openxr.feature.displayrefreshrate";
#region OpenXR Life Cycle
private XrInstance m_XrInstance = 0;
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
ViveInterceptors.Instance.AddRequiredFunction("xrPollEvent");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>

View File

@@ -0,0 +1,111 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine;
namespace VIVE.OpenXR.DisplayRefreshRate
{
// -------------------- 12.52. XR_FB_display_refresh_rate --------------------
#region New Structures
/// <summary>
/// On platforms which support dynamically adjusting the display refresh rate, application developers may request a specific display refresh rate in order to improve the overall user experience.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrEventDataDisplayRefreshRateChangedFB
{
/// <summary>
/// The <see cref="XrStructureType"/> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// fromDisplayRefreshRate is the previous display refresh rate.
/// </summary>
public float fromDisplayRefreshRate;
/// <summary>
/// toDisplayRefreshRate is the new display refresh rate.
/// </summary>
public float toDisplayRefreshRate;
/// <summary>
/// The XR_FB_display_refresh_rate extension must be enabled prior to using XrEventDataDisplayRefreshRateChangedFB.
/// </summary>
public XrEventDataDisplayRefreshRateChangedFB(XrStructureType in_type, IntPtr in_next, float in_fromDisplayRefreshRate, float in_toDisplayRefreshRate)
{
type = in_type;
next = in_next;
fromDisplayRefreshRate = in_fromDisplayRefreshRate;
toDisplayRefreshRate = in_toDisplayRefreshRate;
}
/// <summary>
/// Retrieves the identity value of XrEventDataDisplayRefreshRateChangedFB.
/// </summary>
public static XrEventDataDisplayRefreshRateChangedFB identity
{
get
{
return new XrEventDataDisplayRefreshRateChangedFB(XrStructureType.XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB, IntPtr.Zero, 0.0f, 0.0f); // user is default present
}
}
public static bool Get(XrEventDataBuffer eventDataBuffer, out XrEventDataDisplayRefreshRateChangedFB eventDataDisplayRefreshRateChangedFB)
{
eventDataDisplayRefreshRateChangedFB = identity;
if (eventDataBuffer.type == XrStructureType.XR_TYPE_EVENT_DATA_DISPLAY_REFRESH_RATE_CHANGED_FB)
{
eventDataDisplayRefreshRateChangedFB.next = eventDataBuffer.next;
eventDataDisplayRefreshRateChangedFB.fromDisplayRefreshRate = BitConverter.ToSingle(eventDataBuffer.varying, 0);
eventDataDisplayRefreshRateChangedFB.toDisplayRefreshRate = BitConverter.ToSingle(eventDataBuffer.varying, 4);
return true;
}
return false;
}
}
public static class ViveDisplayRefreshRateChanged
{
public delegate void OnDisplayRefreshRateChanged(float fromDisplayRefreshRate, float toDisplayRefreshRate);
public static void Listen(OnDisplayRefreshRateChanged callback)
{
if (!allEventListeners.Contains(callback))
allEventListeners.Add(callback);
}
public static void Remove(OnDisplayRefreshRateChanged callback)
{
if (allEventListeners.Contains(callback))
allEventListeners.Remove(callback);
}
public static void Send(float fromDisplayRefreshRate, float toDisplayRefreshRate)
{
int N = 0;
if (allEventListeners != null)
{
N = allEventListeners.Count;
for (int i = N - 1; i >= 0; i--)
{
OnDisplayRefreshRateChanged single = allEventListeners[i];
try
{
single(fromDisplayRefreshRate, toDisplayRefreshRate);
}
catch (Exception e)
{
Debug.Log("Event : " + e.ToString());
allEventListeners.Remove(single);
Debug.Log("Event : A listener is removed due to exception.");
}
}
}
}
private static List<OnDisplayRefreshRateChanged> allEventListeners = new List<OnDisplayRefreshRateChanged>();
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 146db425ea37c2746ad7c9ae08a5a480
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -15,8 +15,8 @@ using UnityEditor.XR.OpenXR.Features;
namespace VIVE.OpenXR.EyeTracker
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Eye Tracker",
BuildTargetGroups = new[] { BuildTargetGroup.Standalone },
[OpenXRFeature(UiName = "VIVE XR Eye Tracker (Beta)",
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "Support the eye tracker extension.",
DocumentationLink = "..\\Documentation",

View File

@@ -51,10 +51,10 @@ Through feeding the blend shape values of lip expression to an avatar, its facia
XR_LIP_EXPRESSION_MOUTH_UPPER_OVERTURN_HTC = 9,
XR_LIP_EXPRESSION_MOUTH_LOWER_OVERTURN_HTC = 10,
XR_LIP_EXPRESSION_MOUTH_POUT_HTC = 11,
XR_LIP_EXPRESSION_MOUTH_SMILE_RIGHT_HTC = 12,
XR_LIP_EXPRESSION_MOUTH_SMILE_LEFT_HTC = 13,
XR_LIP_EXPRESSION_MOUTH_SAD_RIGHT_HTC = 14,
XR_LIP_EXPRESSION_MOUTH_SAD_LEFT_HTC = 15,
XR_LIP_EXPRESSION_MOUTH_RAISER_RIGHT_HTC = 12,
XR_LIP_EXPRESSION_MOUTH_RAISER_LEFT_HTC = 13,
XR_LIP_EXPRESSION_MOUTH_STRETCHER_RIGHT_HTC = 14,
XR_LIP_EXPRESSION_MOUTH_STRETCHER_LEFT_HTC = 15,
XR_LIP_EXPRESSION_CHEEK_PUFF_RIGHT_HTC = 16,
XR_LIP_EXPRESSION_CHEEK_PUFF_LEFT_HTC = 17,
XR_LIP_EXPRESSION_CHEEK_SUCK_HTC = 18,

View File

@@ -68,8 +68,11 @@ namespace VIVE.OpenXR.FacialTracking
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
if (m_XrInstance == xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
}
DEBUG("OnInstanceDestroy() " + xrInstance);
}

View File

@@ -201,19 +201,19 @@ namespace VIVE.OpenXR.FacialTracking
/// <summary>
/// This blend shape raises the right side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SMILE_RIGHT_HTC = 12,
XR_LIP_EXPRESSION_MOUTH_RAISER_RIGHT_HTC = 12,
/// <summary>
/// This blend shape raises the left side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SMILE_LEFT_HTC = 13,
XR_LIP_EXPRESSION_MOUTH_RAISER_LEFT_HTC = 13,
/// <summary>
/// This blend shape lowers the right side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SAD_RIGHT_HTC = 14,
XR_LIP_EXPRESSION_MOUTH_STRETCHER_RIGHT_HTC = 14,
/// <summary>
/// This blend shape lowers the left side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SAD_LEFT_HTC = 15,
XR_LIP_EXPRESSION_MOUTH_STRETCHER_LEFT_HTC = 15,
/// <summary>
/// This blend shape puffs up the right side of the cheek further with a higher value.
/// </summary>
@@ -433,7 +433,7 @@ namespace VIVE.OpenXR.FacialTracking
/// </summary>
/// <param name="facialTracker">An <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> previously created by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateFacialTrackerHTC">xrCreateFacialTrackerHTC</see>.</param>
/// <param name="facialExpressions">A pointer to <see cref="XrFacialExpressionsHTC">XrFacialExpressionsHTC</see> receiving the returned facial expressions.</param>
/// <returns></returns>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrGetFacialExpressionsHTCDelegate(
XrFacialTrackerHTC facialTracker,
ref XrFacialExpressionsHTC facialExpressions);

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6368702137725614d8d921ef6c1220f1
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e80a989be51974a4e88bdc41872d53c9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,185 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
using VIVE.OpenXR.SecondaryViewConfiguration;
namespace VIVE.OpenXR.FirstPersonObserver
{
/// <summary>
/// Name: FirstPersonObserver.cs
/// Role: OpenXR FirstPersonObserver Extension Class
/// Responsibility: The OpenXR extension implementation and its lifecycles logic in OpenXR
/// </summary>
#if UNITY_EDITOR
[OpenXRFeature(UiName = "XR MSFT First Person Observer",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Request the application to render an additional first-person view of the scene.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = OPEN_XR_EXTENSION_STRING,
Version = "1.0.0",
FeatureId = FeatureId,
Hidden = true)]
#endif
public class ViveFirstPersonObserver : OpenXRFeature
{
private static ViveFirstPersonObserver _instance;
/// <summary>
/// ViveFirstPersonObserver static instance (Singleton).
/// </summary>
public static ViveFirstPersonObserver Instance
{
get
{
if (_instance == null)
{
_instance =
OpenXRSettings.Instance.GetFeature<ViveFirstPersonObserver>();
}
return _instance;
}
}
/// <summary>
/// The log identification.
/// </summary>
private const string LogTag = "VIVE.OpenXR.FirstPersonObserver";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string FeatureId = "vive.openxr.feature.firstpersonobserver";
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_MSFT_first_person_observer">12.114. XR_MSFT_first_person_observer</see>.
/// </summary>
public const string OPEN_XR_EXTENSION_STRING = "XR_MSFT_first_person_observer";
/// <summary>
/// The flag represents whether the OpenXR loader created an instance or not.
/// </summary>
private bool XrInstanceCreated { get; set; } = false;
/// <summary>
/// The instance created through xrCreateInstance.
/// </summary>
private XrInstance XrInstance { get; set; } = 0;
/// <summary>
/// The function delegate declaration of xrGetInstanceProcAddr.
/// </summary>
private OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr { get; set; }
#region OpenXR life-cycle events
/// <summary>
/// Called after xrCreateInstance.
/// </summary>
/// <param name="xrInstance">Handle of the xrInstance.</param>
/// <returns>Returns true if successful. Returns false otherwise.</returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!IsExtensionEnabled())
{
Warning($"OnInstanceCreate() {OPEN_XR_EXTENSION_STRING} or " +
$"{ViveSecondaryViewConfiguration.OPEN_XR_EXTENSION_STRING} is NOT enabled.");
return false;
}
XrInstanceCreated = true;
XrInstance = xrInstance;
Debug("OnInstanceCreate() " + XrInstance);
if (!GetXrFunctionDelegates(XrInstance))
{
Error("Get function pointer of OpenXRFunctionPointerAccessor failed.");
return false;
}
Debug("Get function pointer of OpenXRFunctionPointerAccessor succeed.");
return base.OnInstanceCreate(xrInstance);
}
#endregion
/// <summary>
/// Get the OpenXR function via XrInstance.
/// </summary>
/// <param name="xrInstance">The XrInstance is provided by the Unity OpenXR Plugin.</param>
/// <returns>Return true if get successfully. False otherwise.</returns>
private bool GetXrFunctionDelegates(XrInstance xrInstance)
{
if (xrGetInstanceProcAddr != IntPtr.Zero)
{
Debug("Get function pointer of openXRFunctionPointerAccessor.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
if (XrGetInstanceProcAddr == null)
{
Error(
"Get function pointer of openXRFunctionPointerAccessor failed due to the XrGetInstanceProcAddr is null.");
return false;
}
}
else
{
Error(
"Get function pointer of openXRFunctionPointerAccessor failed due to the xrGetInstanceProcAddr is null.");
return false;
}
return true;
}
#region Utilities functions
/// <summary>
/// Check ViveFirstPersonObserver extension is enabled or not.
/// </summary>
/// <returns>Return true if enabled. False otherwise.</returns>
public static bool IsExtensionEnabled()
{
return OpenXRRuntime.IsExtensionEnabled(OPEN_XR_EXTENSION_STRING) &&
ViveSecondaryViewConfiguration.IsExtensionEnabled();
}
/// <summary>
/// Print log with tag "VIVE.OpenXR.SecondaryViewConfiguration".
/// </summary>
/// <param name="msg">The log you want to print.</param>
private static void Debug(string msg)
{
UnityEngine.Debug.Log(LogTag + " " + msg);
}
/// <summary>
/// Print warning message with tag "VIVE.OpenXR.SecondaryViewConfiguration".
/// </summary>
/// <param name="msg">The warning message you want to print.</param>
private static void Warning(string msg)
{
UnityEngine.Debug.LogWarning(LogTag + " " + msg);
}
/// <summary>
/// Print an error message with the tag "VIVE.OpenXR.SecondaryViewConfiguration."
/// </summary>
/// <param name="msg">The error message you want to print.</param>
private static void Error(string msg)
{
UnityEngine.Debug.LogError(LogTag + " " + msg);
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 311462c0560d6ec4ea9ed080a6a77a3b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -85,12 +85,12 @@ namespace VIVE.OpenXR
private static extern IntPtr intercept_xrGetInstanceProcAddr(IntPtr func);
[DllImport(ExtLib, EntryPoint = "applyFoveationHTC")]
private static extern XrResult applyFoveationHTC(XrFoveationModeHTC mode, UInt32 configCount, XrFoveationConfigurationHTC[] configs, UInt64 flags);
private static extern XrResult applyFoveationHTC(Foveation.XrFoveationModeHTC mode, UInt32 configCount, Foveation.XrFoveationConfigurationHTC[] configs, UInt64 flags);
/// <summary>
/// function to apply HTC Foveation
/// </summary>
public static XrResult ApplyFoveationHTC(XrFoveationModeHTC mode, UInt32 configCount, XrFoveationConfigurationHTC[] configs, UInt64 flags = 0)
public static XrResult ApplyFoveationHTC(Foveation.XrFoveationModeHTC mode, UInt32 configCount, Foveation.XrFoveationConfigurationHTC[] configs, UInt64 flags = 0)
{
//Debug.Log("Unity HTCFoveat:configCount " + configCount);
//if (configCount >=2) {

View File

@@ -0,0 +1,38 @@
// Copyright HTC Corporation All Rights Reserved.
namespace VIVE.OpenXR.Foveation
{
#region 12.86. XR_HTC_foveation
/// <summary>
/// The XrFoveationModeHTC identifies the different foveation modes.
/// </summary>
public enum XrFoveationModeHTC
{
XR_FOVEATION_MODE_DISABLE_HTC = 0,
XR_FOVEATION_MODE_FIXED_HTC = 1,
XR_FOVEATION_MODE_DYNAMIC_HTC = 2,
XR_FOVEATION_MODE_CUSTOM_HTC = 3,
XR_FOVEATION_MODE_MAX_ENUM_HTC = 0x7FFFFFFF
}
/// <summary>
/// The XrFoveationLevelHTC identifies the pixel density drop level of periphery area.
/// </summary>
public enum XrFoveationLevelHTC
{
XR_FOVEATION_LEVEL_NONE_HTC = 0,
XR_FOVEATION_LEVEL_LOW_HTC = 1,
XR_FOVEATION_LEVEL_MEDIUM_HTC = 2,
XR_FOVEATION_LEVEL_HIGH_HTC = 3,
XR_FOVEATION_LEVEL_MAX_ENUM_HTC = 0x7FFFFFFF
}
/// <summary>
/// The XrFoveationConfigurationHTC structure contains the custom foveation settings for the corresponding views.
/// </summary>
public struct XrFoveationConfigurationHTC
{
public XrFoveationLevelHTC level;
public float clearFovDegree;
public XrVector2f focalCenterOffset;
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6b3c2ad651da4e5498f49d3a26038620
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 83e064b5ad501784c898651afc560f8e
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 73f495f4b0dd14245b3997ffbe23713a
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,19 @@
# 12.1. XR_HTC_frame_synchronization
## Overview
Traditional, runtime will use the latest frame which will cost jitter. With Frame Synchronization, the render frame will not be discarded for smooth gameplay experience.
However, if the GPU cannot consistently finish rendering on time (rendering more than one vsync at a time), jitter will still occur. Therefore, reducing GPU load is key to smooth gameplay.
## Name String
XR_HTC_frame_synchronization
## Revision
1
## New Enum Constants
[XrStructureType](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrStructureType) enumeration is extended with:
- XR_TYPE_FRAME_SYNCHRONIZATION_SESSION_BEGIN_INFO_HTC
## New Enums
- XrFrameSynchronizationModeHTC
## New Structures
- XrFrameSynchronizationSessionBeginInfoHTC
## VIVE Plugin
Enable "VIVE XR Frame Synchronization" in "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > OpenXR Feature Groups" to use the frame synchronization provided by VIVE OpenXR plugin.

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 190a1897e332b7f45893a24c3f696567
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a8078a459f75c5d419c46950680d6446
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,165 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Text;
using UnityEngine;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.FrameSynchronization
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Frame Synchronization (Beta)",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Support the Frame Synchronization extension.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
FeatureId = featureId)]
#endif
public class ViveFrameSynchronization : OpenXRFeature
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.FrameSynchronization.ViveFrameSynchronization";
StringBuilder m_sb = null;
StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
void ERROR(StringBuilder msg) { Debug.LogErrorFormat("{0} {1}", LOG_TAG, msg); }
#endregion
/// <summary>
/// The extension name of 12.1. XR_HTC_frame_synchronization.
/// </summary>
public const string kOpenxrExtensionString = "XR_HTC_frame_synchronization";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.framesynchronization";
#region OpenXR Life Cycle
/// <inheritdoc />
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
sb.Clear().Append("HookGetInstanceProcAddr() xrBeginSession"); DEBUG(sb);
ViveInterceptors.Instance.AddRequiredFunction("xrBeginSession");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
#pragma warning disable
private bool m_XrInstanceCreated = false;
#pragma warning enable
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
sb.Clear().Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false;
}
m_XrInstance = xrInstance;
m_XrInstanceCreated = true;
sb.Clear().Append("OnInstanceCreate() ").Append(m_XrInstance); DEBUG(sb);
ActivateFrameSynchronization(true);
return true;
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyInstance">xrDestroyInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
sb.Clear().Append("OnInstanceDestroy() ").Append(xrInstance).Append(", current: ").Append(m_XrInstance); DEBUG(sb);
if (m_XrInstance == xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
}
}
#pragma warning disable
private bool m_XrSessionCreated = false;
#pragma warning enable
private XrSession m_XrSession = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateSession">xrCreateSession</see> is done.
/// </summary>
/// <param name="xrSession">The created session ID.</param>
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
sb.Clear().Append("OnSessionCreate() ").Append(m_XrSession); DEBUG(sb);
}
protected override void OnSessionEnd(ulong xrSession)
{
sb.Clear().Append("OnSessionEnd() ").Append(xrSession).Append(", current: ").Append(m_XrSession); DEBUG(sb);
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroySession">xrDestroySession</see> is done.
/// </summary>
/// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
sb.Clear().Append("OnSessionDestroy() ").Append(xrSession).Append(", current: ").Append(m_XrSession); DEBUG(sb);
if (m_XrSession == xrSession)
{
m_XrSessionCreated = false;
m_XrSession = 0;
ActivateFrameSynchronization(false);
}
}
private XrSystemId m_XrSystemId = 0;
/// <summary>
/// Called when the <see cref="XrSystemId">XrSystemId</see> retrieved by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystem">xrGetSystem</see> is changed.
/// </summary>
/// <param name="xrSystem">The system id.</param>
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
sb.Clear().Append("OnSystemChange() " + m_XrSystemId); DEBUG(sb);
}
#endregion
[SerializeField]
internal SynchronizationModeHTC m_SynchronizationMode = SynchronizationModeHTC.Stablized;
/// <summary>
/// Activate or deactivate the Frame Synchronization feature.
/// </summary>
/// <param name="active">True for activate</param>
/// <param name="mode">The <see cref="XrFrameSynchronizationModeHTC"/> used for Frame Synchronization.</param>
private void ActivateFrameSynchronization(bool active)
{
sb.Clear().Append("ActivateFrameSynchronization() ").Append(active ? "enable " : "disable ").Append(m_SynchronizationMode); DEBUG(sb);
ViveInterceptors.Instance.ActivateFrameSynchronization(active, (XrFrameSynchronizationModeHTC)m_SynchronizationMode);
}
/// <summary>
/// Retrieves current frame synchronization mode.
/// </summary>
/// <returns>The mode of <see cref="SynchronizationModeHTC"/>.</returns>
public SynchronizationModeHTC GetSynchronizationMode() { return m_SynchronizationMode; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cb48e1e4de80ea8498ba4e9ff34adc32
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,67 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR.FrameSynchronization
{
/// <summary>
/// The enum alias of <see cref="XrFrameSynchronizationModeHTC"/>.
/// </summary>
public enum SynchronizationModeHTC : UInt32
{
Stablized = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC,
Prompt = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_PROMPT_HTC,
//Adaptive = XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_ADAPTIVE_HTC,
}
// -------------------- 12.1. XR_HTC_frame_synchronization --------------------
#region New Enums
public enum XrFrameSynchronizationModeHTC : UInt32
{
XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC = 1,
XR_FRAME_SYNCHRONIZATION_MODE_PROMPT_HTC = 2,
XR_FRAME_SYNCHRONIZATION_MODE_ADAPTIVE_HTC = 3,
XR_FRAME_SYNCHRONIZATION_MODE_MAX_ENUM_HTC = 0x7FFFFFFF
}
#endregion
#region New Structures
/// <summary>
/// Traditional, runtime will use the latest frame which will cost jitter. With Frame Synchronization, the render frame will not be discarded for smooth gameplay experience.
/// However, if the GPU cannot consistently finish rendering on time(rendering more than one vsync at a time), jitter will still occur.Therefore, reducing GPU load is key to smooth gameplay.
/// The application can use Frame Synchronization by passing XrFrameSynchronizationSessionBeginInfoHTC at next of <see cref="XrSessionBeginInfo"/>.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrFrameSynchronizationSessionBeginInfoHTC
{
/// <summary>
/// The XrStructureType of this structure. It must be XR_TYPE_FRAME_SYNCHRONIZATION_SESSION_BEGIN_INFO_HTC.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// The frame synchronization mode to be used in this session.
/// </summary>
public XrFrameSynchronizationModeHTC mode;
public XrFrameSynchronizationSessionBeginInfoHTC(XrStructureType in_type, IntPtr in_next, XrFrameSynchronizationModeHTC in_mode)
{
type = in_type;
next = in_next;
mode = in_mode;
}
public static XrFrameSynchronizationSessionBeginInfoHTC identity {
get {
return new XrFrameSynchronizationSessionBeginInfoHTC(
XrStructureType.XR_TYPE_FRAME_SYNCHRONIZATION_SESSION_BEGIN_INFO_HTC,
IntPtr.Zero,
XrFrameSynchronizationModeHTC.XR_FRAME_SYNCHRONIZATION_MODE_STABILIZED_HTC);
}
}
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a2877a2048174774b8e8698f159199e9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -19,7 +19,7 @@ The application should use
## VIVE Plugin
After adding the "VIVE Focus3 Hand Interaction" to "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > Interaction Profiles", you can use the following Input Action Pathes.
After adding the "VIVE XR Hand Interaction" to "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > Interaction Profiles", you can use the following Input Action Pathes.
### Left Hand
- <ViveHandInteraction>{LeftHand}/selectValue: Presents the left hand pinch strength.
@@ -29,4 +29,69 @@ After adding the "VIVE Focus3 Hand Interaction" to "Project Settings > XR Plugin
- <ViveHandInteraction>{RightHand}/selectValue: Presents the right hand pinch strength.
- <ViveHandInteraction>{RightHand}/pointerPose: Presents the right hand pinch pose.
Refer to the <VIVE OpenXR sample path>/Plugin/Input/ActionMap/InputActions.inputActions about the "Input Action Path" usage and the sample <VIVE OpenXR sample path>/Plugin/Input/OpenXRInput.unity.
Refer to the <VIVE OpenXR sample path>/Samples/Commons/ActionMap/InputActions.inputActions about the "Input Action Path" usage in the sample <VIVE OpenXR sample path>/Samples/Input/OpenXRInput.unity.
--------------------
# 12.31. XR_EXT_hand_interaction
## Name String
XR_EXT_hand_interaction
## Revision
1
## Hand Interaction Profile
### Interaction profile path:
- /interaction_profiles/ext/hand_interaction_ext
### Valid for user paths:
- /user/hand/left
- /user/hand/right
### Supported input source
- <20>K/input/aim/pose
- <20>K/input/aim_activate_ext/value: a 1D analog input component indicating that the user activated the action on the target that the user is pointing at with the aim pose.
- <20>K/input/aim_activate_ext/ready_ext: a boolean input, where the value XR_TRUE indicates that the fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
- <20>K/input/grip/pose
- <20>K/input/grasp_ext/value: a 1D analog input component indicating that the user is making a fist.
- <20>K/input/grasp_ext/ready_ext: a boolean input, where the value XR_TRUE indicates that the hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
- <20>K/input/pinch_ext/pose
- <20>K/input/pinch_ext/value: a 1D analog input component indicating the extent which the user is bringing their finger and thumb together to perform a "pinch" gesture.
- <20>K/input/pinch_ext/ready_ext: a boolean input, where the value XR_TRUE indicates that the fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
- <20>K/input/poke_ext/pose
The <20>K/input/aim/pose is typically used for aiming at objects out of arm<72><6D>s reach. When using a hand interaction profile, it is typically paired with <20>K/input/aim_activate_ext/value to optimize aiming ray stability while performing the gesture. When using a controller interaction profile, the "aim" pose is typically paired with a trigger or a button for aim and fire operations.
The <20>K/input/grip/pose is typically used for holding a large object in the user<65><72>s hand. When using a hand interaction profile, it is typically paired with <20>K/input/grasp_ext/value for the user to directly manipulate an object held in a hand. When using a controller interaction profile, the "grip" pose is typically paired with a "squeeze" button or trigger that gives the user the sense of tightly holding an object.
The <20>K/input/pinch_ext/pose is typically used for directly manipulating a small object using the pinch gesture. When using a hand interaction profile, it is typically paired with the <20>K/input/pinch_ext/value gesture. When using a controller interaction profile, it is typically paired with a trigger manipulated with the index finger, which typically requires curling the index finger and applying pressure with the fingertip.
The <20>K/input/poke_ext/pose is typically used for contact-based interactions using the motion of the hand or fingertip. It typically does not pair with other hand gestures or buttons on the controller. The application typically uses a sphere collider with the "poke" pose to visualize the pose and detect touch with a virtual object.
## VIVE Plugin
After adding the "VIVE XR Hand Interaction Ext" to "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > Interaction Profiles", you can use the following Input Action Pathes.
### Left Hand
- <ViveHandInteraction>{LeftHand}/pointerPose: Presents the left hand aim pose used for aiming at objects out of arm<72><6D>s reach.
- <ViveHandInteraction>{LeftHand}/pointerValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the aimed-at target is being fully interacted with left hand.
- <ViveHandInteraction>{LeftHand}/pointerReady: XR_TRUE indicates that the left fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
- <ViveHandInteraction>{LeftHand}/gripPose: Presents the left hand grip pose used for holding a large object in the user<65><72>s hand.
- <ViveHandInteraction>{LeftHand}/gripValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the left fist is tightly closed.
- <ViveHandInteraction>{LeftHand}/gripReady: XR_TRUE indicates that the left hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
- <ViveHandInteraction>{LeftHand}/pinchPose: Presents the left hand pinch pose used for directly manipulating a small object using the pinch gesture.
- <ViveHandInteraction>{LeftHand}/pinchValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the left finger and thumb are touching each other.
- <ViveHandInteraction>{LeftHand}/pinchReady: XR_TRUE indicates that the left fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
- <ViveHandInteraction>{LeftHand}/pokePose: Presents the left hand poke pose used for contact-based interactions using the motion of the hand or fingertip.
### Right Hand
- <ViveHandInteraction>{RightHand}/pointerPose: Presents the right hand aim pose used for aiming at objects out of arm<72><6D>s reach.
- <ViveHandInteraction>{RightHand}/pointerValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the aimed-at target is being fully interacted with right hand.
- <ViveHandInteraction>{RightHand}/pointerReady: XR_TRUE indicates that the right fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
- <ViveHandInteraction>{RightHand}/gripPose: Presents the right hand grip pose used for holding a large object in the user<65><72>s hand.
- <ViveHandInteraction>{RightHand}/gripValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the right fist is tightly closed.
- <ViveHandInteraction>{RightHand}/gripReady: XR_TRUE indicates that the right hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
- <ViveHandInteraction>{RightHand}/pinchPose: Presents the right hand pinch pose used for directly manipulating a small object using the pinch gesture.
- <ViveHandInteraction>{RightHand}/pinchValue: Can be used as either a boolean or float action type, where the value XR_TRUE or 1.0f represents that the right finger and thumb are touching each other.
- <ViveHandInteraction>{RightHand}/pinchReady: XR_TRUE indicates that the right fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
- <ViveHandInteraction>{RightHand}/pokePose: Presents the right hand poke pose used for contact-based interactions using the motion of the hand or fingertip.
Refer to the <VIVE OpenXR sample path>/Samples/HandInteractionExt/HandInteractionExt.inputActions about the "Input Action Path" usage in the sample <VIVE OpenXR sample path>/Samples/HandInteractionExt/HandInteractionExt.unity.

View File

@@ -31,10 +31,11 @@ namespace VIVE.OpenXR.Hand
/// </summary>
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Hand Interaction",
BuildTargetGroups = new[] { BuildTargetGroup.Android , BuildTargetGroup.Standalone},
Hidden = true,
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "Support for enabling the hand interaction profile. Will register the controller map for hand interaction if enabled.",
DocumentationLink = "..\\Documentation",
Desc = "Support for enabling the VIVE hand interaction profile. Will register the controller map for hand interaction if enabled.",
DocumentationLink = "https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XR_HTC_hand_interaction",
Version = "1.0.0",
OpenxrExtensionStrings = kOpenxrExtensionString,
Category = FeatureCategory.Interaction,
@@ -42,6 +43,7 @@ namespace VIVE.OpenXR.Hand
#endif
public class ViveHandInteraction : OpenXRInteractionFeature
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteraction ";
StringBuilder m_sb = null;
StringBuilder sb {
@@ -50,8 +52,9 @@ namespace VIVE.OpenXR.Hand
return m_sb;
}
}
void DEBUG(StringBuilder msg) { Debug.Log(msg); }
void WARNING(StringBuilder msg) { Debug.LogWarning(msg); }
void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
#endregion
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_hand_interaction">12.69. XR_HTC_hand_interaction</see>.
@@ -68,6 +71,7 @@ namespace VIVE.OpenXR.Hand
/// </summary>
private const string profile = "/interaction_profiles/htc/hand_interaction";
#region Supported component paths
private const string leftHand = "/user/hand_htc/left";
private const string rightHand = "/user/hand_htc/right";
@@ -85,21 +89,22 @@ namespace VIVE.OpenXR.Hand
/// <summary>
/// Constant for a pose interaction binding '.../input/aim/pose' OpenXR Input Binding. Used by input subsystem to bind actions to physical inputs.
/// </summary>
private const string pointerPose = "/input/aim/pose";
public const string pointerPose = "/input/aim/pose";
/// <summary>
/// Constant for a pose interaction binding '.../input/grip/pose' OpenXR Input Binding. Used by input subsystem to bind actions to physical inputs.
/// </summary>
public const string devicePose = "/input/grip/pose";
#endregion
[Preserve, InputControlLayout(displayName = "VIVE Hand Interaction (OpenXR)", commonUsages = new[] { "LeftHand", "RightHand" }, isGenericTypeOfDevice = true)]
public class HandInteractionDevice : OpenXRDevice
{
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteraction.HandInteractionDevice";
void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
void DEBUG(string msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
/// <summary>
/// A [AxisControl](xref:UnityEngine.InputSystem.Controls.AxisControl) that represents the <see cref="ViveHandInteraction.selectValue"/> OpenXR binding.
/// A <see cref="AxisControl"/> representing the <see cref="ViveHandInteraction.selectValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(aliases = new[] { "selectAxis, pinchStrength" }, usage = "Select")]
public AxisControl selectValue { get; private set; }
@@ -122,29 +127,29 @@ namespace VIVE.OpenXR.Hand
[Preserve, InputControl(offset = 0, alias = "aimPose", usage = "Pointer")]
public PoseControl pointerPose { get; private set; }
/// <summary>
/// A [ButtonControl](xref:UnityEngine.InputSystem.Controls.ButtonControl) required for backwards compatibility with the XRSDK layouts. This represents the overall tracking state of the device. This value is equivalent to mapping devicePose/isTracked.
/// </summary>
[Preserve, InputControl(offset = 8, usage = "IsTracked")]
public ButtonControl isTracked { get; private set; }
/// <summary>
/// A [ButtonControl](xref:UnityEngine.InputSystem.Controls.ButtonControl) required for backwards compatibility with the XRSDK layouts. This represents the overall tracking state of the device. This value is equivalent to mapping devicePose/isTracked.
/// </summary>
[Preserve, InputControl(offset = 8)]
public ButtonControl isTracked { get; private set; }
/// <summary>
/// A [IntegerControl](xref:UnityEngine.InputSystem.Controls.IntegerControl) required for backwards compatibility with the XRSDK layouts. This represents the bit flag set to indicate what data is valid. This value is equivalent to mapping devicePose/trackingState.
/// </summary>
[Preserve, InputControl(offset = 12, usage = "TrackingState")]
public IntegerControl trackingState { get; private set; }
/// <summary>
/// A [IntegerControl](xref:UnityEngine.InputSystem.Controls.IntegerControl) required for backwards compatibility with the XRSDK layouts. This represents the bit flag set to indicate what data is valid. This value is equivalent to mapping devicePose/trackingState.
/// </summary>
[Preserve, InputControl(offset = 12)]
public IntegerControl trackingState { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the device position. For the VIVE Focus 3 device, this is both the device and the pointer position. This value is equivalent to mapping devicePose/position.
/// </summary>
[Preserve, InputControl(offset = 16, alias = "gripPosition")]
public Vector3Control devicePosition { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the device position. This value is equivalent to mapping devicePose/position.
/// </summary>
[Preserve, InputControl(offset = 16, alias = "gripPosition")]
public Vector3Control devicePosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the device orientation. For the VIVE Focus 3 device, this is both the device and the pointer rotation. This value is equivalent to mapping devicePose/rotation.
/// </summary>
[Preserve, InputControl(offset = 28, alias = "gripOrientation")]
public QuaternionControl deviceRotation { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the device orientation. This value is equivalent to mapping devicePose/rotation.
/// </summary>
[Preserve, InputControl(offset = 28, alias = "gripOrientation")]
public QuaternionControl deviceRotation { get; private set; }
/// <summary>
@@ -184,16 +189,15 @@ namespace VIVE.OpenXR.Hand
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
// Requires the eye tracking extension
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
sb.Clear().Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false;
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() " + m_XrInstance); DEBUG(sb);
sb.Clear().Append("OnInstanceCreate() " + m_XrInstance); DEBUG(sb);
return base.OnInstanceCreate(xrInstance);
}
@@ -205,14 +209,12 @@ namespace VIVE.OpenXR.Hand
/// </summary>
protected override void RegisterDeviceLayout()
{
sb.Clear().Append(LOG_TAG).Append("RegisterDeviceLayout() Layout: ").Append(kLayoutName)
.Append(", Product: ").Append(kDeviceLocalizedName);
DEBUG(sb);
sb.Clear().Append("RegisterDeviceLayout() ").Append(kLayoutName).Append(", product: ").Append(kDeviceLocalizedName); DEBUG(sb);
InputSystem.RegisterLayout(typeof(HandInteractionDevice),
kLayoutName,
matches: new InputDeviceMatcher()
.WithInterface(XRUtilities.InterfaceMatchAnyVersion)
.WithProduct(kDeviceLocalizedName));
kLayoutName,
matches: new InputDeviceMatcher()
.WithInterface(XRUtilities.InterfaceMatchAnyVersion)
.WithProduct(kDeviceLocalizedName));
}
/// <summary>
@@ -220,16 +222,36 @@ namespace VIVE.OpenXR.Hand
/// </summary>
protected override void UnregisterDeviceLayout()
{
sb.Clear().Append(LOG_TAG).Append("UnregisterDeviceLayout() ").Append(kLayoutName); DEBUG(sb);
sb.Clear().Append("UnregisterDeviceLayout() ").Append(kLayoutName); DEBUG(sb);
InputSystem.RemoveLayout(kLayoutName);
}
#if UNITY_XR_OPENXR_1_9_1
/// <summary>
/// Return interaction profile type. HandInteractionDevice profile is Device type.
/// </summary>
/// <returns>Interaction profile type.</returns>
protected override InteractionProfileType GetInteractionProfileType()
{
return typeof(HandInteractionDevice).IsSubclassOf(typeof(XRController)) ? InteractionProfileType.XRController : InteractionProfileType.Device;
}
/// <summary>
/// Return device layer out string used for registering device HandInteractionDevice in InputSystem.
/// </summary>
/// <returns>Device layout string.</returns>
protected override string GetDeviceLayoutName()
{
return kLayoutName;
}
#endif
/// <summary>
/// Registers action maps to Unity XR.
/// </summary>
protected override void RegisterActionMapsWithRuntime()
{
sb.Clear().Append(LOG_TAG).Append("RegisterActionMapsWithRuntime() Action map vivehandinteraction")
sb.Clear().Append("RegisterActionMapsWithRuntime() Action map vivehandinteraction")
.Append(", localizedName: ").Append(kDeviceLocalizedName)
.Append(", desiredInteractionProfile").Append(profile);
DEBUG(sb);

View File

@@ -0,0 +1,585 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine.Scripting;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine.InputSystem.Layouts;
using UnityEngine.InputSystem.XR;
using UnityEngine.InputSystem.Controls;
using UnityEngine.XR.OpenXR;
using UnityEngine;
using UnityEngine.InputSystem;
using System.Collections.Generic;
using UnityEngine.XR;
using UnityEngine.XR.OpenXR.Input;
using System.Text;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
#if USE_INPUT_SYSTEM_POSE_CONTROL // Scripting Define Symbol added by using OpenXR Plugin 1.6.0.
using PoseControl = UnityEngine.InputSystem.XR.PoseControl;
#else
using PoseControl = UnityEngine.XR.OpenXR.Input.PoseControl;
#endif
namespace VIVE.OpenXR.Hand
{
/// <summary>
/// This <see cref="OpenXRInteractionFeature"/> enables the use of hand interaction profiles in OpenXR. It enables <see cref="ViveHandInteractionExt.kOpenxrExtensionString">XR_EXT_hand_interaction</see> in the underyling runtime.
/// </summary>
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Hand Interaction Ext",
Hidden = true,
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Support for enabling the KHR hand interaction profile. Will register the controller map for hand interaction if enabled.",
DocumentationLink = "https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XR_EXT_hand_interaction",
Version = "1.0.0",
OpenxrExtensionStrings = kOpenxrExtensionString,
Category = FeatureCategory.Interaction,
FeatureId = featureId)]
#endif
public class ViveHandInteractionExt : OpenXRInteractionFeature
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteractionExt";
StringBuilder m_sb = null;
StringBuilder sb {
get {
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
#endregion
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_hand_interaction">12.69. XR_HTC_hand_interaction</see>.
/// </summary>
public const string kOpenxrExtensionString = "XR_EXT_hand_interaction";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.hand.interaction.ext";
[Preserve, InputControlLayout(displayName = "VIVE Hand Interaction Ext (OpenXR)", commonUsages = new[] { "LeftHand", "RightHand" })]
public class HandInteractionExtDevice : XRController
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandInteractionExt.HandInteractionExtDevice";
void DEBUG(string msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
#endregion
#region Action Path
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.grip"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, aliases = new[] { "device", "gripPose" }, usage = "Device")]
public PoseControl devicePose { get; private set; }
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.aim"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, alias = "aimPose", usage = "Pointer")]
public PoseControl pointer { get; private set; }
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.pinchPose"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, usage = "Pinch")]
public PoseControl pinchPose { get; private set; }
/// <summary>
/// A <see cref="PoseControl"/> representing the <see cref="ViveHandInteractionExt.poke"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(offset = 0, alias = "indexTip", usage = "Poke")]
public PoseControl pokePose { get; private set; }
/// <summary>
/// A <see cref="AxisControl"/> representing information from the <see cref="ViveHandInteractionExt.graspValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(aliases = new[] { "gripValue" }, usage = "GraspValue")]
public AxisControl graspValue { get; private set; }
/// <summary>
/// A <see cref="ButtonControl"/> representing the <see cref="ViveHandInteractionExt.graspReady"/> OpenXR bindings, depending on handedness.
/// </summary>
[Preserve, InputControl(aliases = new[] { "isGrasped", "isGripped" }, usage = "GraspReady")]
public ButtonControl graspReady { get; private set; }
/// <summary>
/// A <see cref="AxisControl"/> representing information from the <see cref="ViveHandInteractionExt.pointerActivateValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(aliases = new[] { "pointerValue" }, usage = "PointerActivateValue")]
public AxisControl pointerActivateValue { get; private set; }
/// <summary>
/// A <see cref="ButtonControl"/> representing the <see cref="ViveHandInteractionExt.pointerActivateReady"/> OpenXR bindings, depending on handedness.
/// </summary>
[Preserve, InputControl(aliases = new[] { "isPointed", "pointerReady" }, usage = "PointerActivateReady")]
public ButtonControl pointerActivateReady { get; private set; }
/// <summary>
/// A <see cref="AxisControl"/> representing information from the <see cref="ViveHandInteractionExt.pinchValue"/> OpenXR binding.
/// </summary>
[Preserve, InputControl(usage = "PinchValue")]
public AxisControl pinchValue { get; private set; }
/// <summary>
/// A <see cref="ButtonControl"/> representing the <see cref="ViveHandInteractionExt.pinchReady"/> OpenXR bindings, depending on handedness.
/// </summary>
[Preserve, InputControl(aliases = new[] { "isPinched" }, usage = "PinchReady")]
public ButtonControl pinchReady { get; private set; }
/// <summary>
/// A [ButtonControl](xref:UnityEngine.InputSystem.Controls.ButtonControl) required for backwards compatibility with the XRSDK layouts. This represents the overall tracking state of the device. This value is equivalent to mapping devicePose/isTracked.
/// </summary>
[Preserve, InputControl(offset = 2)]
new public ButtonControl isTracked { get; private set; }
/// <summary>
/// A [IntegerControl](xref:UnityEngine.InputSystem.Controls.IntegerControl) required for backwards compatibility with the XRSDK layouts. This represents the bit flag set to indicate what data is valid. This value is equivalent to mapping devicePose/trackingState.
/// </summary>
[Preserve, InputControl(offset = 4)]
new public IntegerControl trackingState { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the device position. This value is equivalent to mapping devicePose/position.
/// </summary>
[Preserve, InputControl(offset = 8, noisy = true, alias = "gripPosition")]
new public Vector3Control devicePosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the device orientation. This value is equivalent to mapping devicePose/rotation.
/// </summary>
[Preserve, InputControl(offset = 20, noisy = true, alias = "gripRotation")]
new public QuaternionControl deviceRotation { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the aim position. This value is equivalent to mapping pointer/position.
/// </summary>
[Preserve, InputControl(offset = 72, noisy = true)]
public Vector3Control pointerPosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the aim orientation. This value is equivalent to mapping pointer/rotation.
/// </summary>
[Preserve, InputControl(offset = 84, noisy = true)]
public QuaternionControl pointerRotation { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the pinch position. This value is equivalent to mapping pinchPose/position.
/// </summary>
[Preserve, InputControl(offset = 136, noisy = true)]
public Vector3Control pinchPosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the pinch orientation. This value is equivalent to mapping pinchPose/rotation.
/// </summary>
[Preserve, InputControl(offset = 148, noisy = true)]
public QuaternionControl pinchRotation { get; private set; }
/// <summary>
/// A [Vector3Control](xref:UnityEngine.InputSystem.Controls.Vector3Control) required for backwards compatibility with the XRSDK layouts. This is the poke position. This value is equivalent to mapping pokePose/position.
/// </summary>
[Preserve, InputControl(offset = 200, noisy = true)]
public Vector3Control pokePosition { get; private set; }
/// <summary>
/// A [QuaternionControl](xref:UnityEngine.InputSystem.Controls.QuaternionControl) required for backwards compatibility with the XRSDK layouts. This is the poke orientation. This value is equivalent to mapping pokePose/rotation.
/// </summary>
[Preserve, InputControl(offset = 212, noisy = true)]
public QuaternionControl pokeRotation { get; private set; }
#endregion
/// <summary>
/// Internal call used to assign controls to the the correct element.
/// </summary>
protected override void FinishSetup()
{
DEBUG("FinishSetup() interfaceName: " + description.interfaceName
+ ", deviceClass: " + description.deviceClass
+ ", product: " + description.product
+ ", serial: " + description.serial
+ ", version: " + description.version);
base.FinishSetup();
pointer = GetChildControl<PoseControl>("pointer");
pointerActivateValue = GetChildControl<AxisControl>("pointerActivateValue");
pointerActivateReady = GetChildControl<ButtonControl>("pointerActivateReady");
devicePose = GetChildControl<PoseControl>("devicePose");
graspValue = GetChildControl<AxisControl>("graspValue");
graspReady = GetChildControl<ButtonControl>("graspReady");
pinchPose = GetChildControl<PoseControl>("pinchPose");
pinchValue = GetChildControl<AxisControl>("pinchValue");
pinchReady = GetChildControl<ButtonControl>("pinchReady");
pokePose = GetChildControl<PoseControl>("pokePose");
}
}
/// <summary>
/// The interaction profile string used to reference the hand interaction input device.
/// </summary>
public const string profile = "/interaction_profiles/ext/hand_interaction_ext";
#region Supported component paths
/// <summary>
/// Constant for a pose interaction binding '.../input/aim/pose' OpenXR Input Binding.<br></br>
/// Typically used for aiming at objects out of arm¡¦s reach. When using a hand interaction profile, it is typically paired with <see cref="pointerActivateValue"/> to optimize aiming ray stability while performing the gesture.<br></br>
/// When using a controller interaction profile, the "aim" pose is typically paired with a trigger or a button for aim and fire operations.
/// </summary>
public const string aim = "/input/aim/pose";
/// <summary>
/// Constant for a float interaction binding '.../input/aim_activate_ext/value' OpenXR Input Binding.<br></br>
/// A 1D analog input component indicating that the user activated the action on the target that the user is pointing at with the aim pose.
/// </summary>
public const string pointerActivateValue = "/input/aim_activate_ext/value";
/// <summary>
/// Constant for a boolean interaction binding '.../input/aim_activate_ext/ready_ext' OpenXR Input Binding.<br></br>
/// A boolean input, where the value XR_TRUE indicates that the fingers to perform the "aim_activate" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing an "aim_activate" gesture.
/// </summary>
public const string pointerActivateReady = "/input/aim_activate_ext/ready_ext";
/// <summary>
/// Constant for a pose interaction binding '.../input/grip/pose' OpenXR Input Binding.<br></br>
/// Typically used for holding a large object in the user¡¦s hand. When using a hand interaction profile, it is typically paired with <see cref="graspValue"/> for the user to directly manipulate an object held in a hand.<br></br>
/// When using a controller interaction profile, the "grip" pose is typically paired with a "squeeze" button or trigger that gives the user the sense of tightly holding an object.
/// </summary>
public const string grip = "/input/grip/pose";
/// <summary>
/// Constant for a float interaction binding '.../input/grasp_ext/value' OpenXR Input Binding.<br></br>
/// A 1D analog input component indicating that the user is making a fist.
/// </summary>
public const string graspValue = "/input/grasp_ext/value";
/// <summary>
/// Constant for a boolean interaction binding '.../input/grasp_ext/ready_ext' OpenXR Input Binding.<br></br>
/// A boolean input, where the value XR_TRUE indicates that the hand performing the grasp action is properly tracked by the hand tracking device and it is observed to be ready to perform or is performing the grasp action.
/// </summary>
public const string graspReady = "/input/grasp_ext/ready_ext";
/// <summary>
/// Constant for a pose interaction binding '.../input/pinch_ext/pose' OpenXR Input Binding.<br></br>
/// Typically used for directly manipulating a small object using the pinch gesture. When using a hand interaction profile, it is typically paired with the <see cref="pinchValue"/>.<br></br>
/// When using a controller interaction profile, it is typically paired with a trigger manipulated with the index finger, which typically requires curling the index finger and applying pressure with the fingertip.
/// </summary>
public const string pinchPose = "/input/pinch_ext/pose";
/// <summary>
/// Constant for a float interaction binding '.../input/pinch_ext/value' OpenXR Input Binding.<br></br>
/// A 1D analog input component indicating the extent which the user is bringing their finger and thumb together to perform a "pinch" gesture.
/// </summary>
public const string pinchValue = "/input/pinch_ext/value";
/// <summary>
/// Constant for a boolean interaction binding '.../input/pinch_ext/ready_ext' OpenXR Input Binding.<br></br>
/// A boolean input, where the value XR_TRUE indicates that the fingers used to perform the "pinch" gesture are properly tracked by the hand tracking device and the hand shape is observed to be ready to perform or is performing a "pinch" gesture.
/// </summary>
public const string pinchReady = "/input/pinch_ext/ready_ext";
/// <summary>
/// Constant for a pose interaction binding '.../input/poke_ext/pose' OpenXR Input Binding.<br></br>
/// Typically used for contact-based interactions using the motion of the hand or fingertip. It typically does not pair with other hand gestures or buttons on the controller. The application typically uses a sphere collider with the "poke" pose to visualize the pose and detect touch with a virtual object.
/// </summary>
public const string poke = "/input/poke_ext/pose";
#endregion
#pragma warning disable
private bool m_XrInstanceCreated = false;
#pragma warning restore
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
sb.Clear().Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false;
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
sb.Clear().Append("OnInstanceCreate() " + m_XrInstance); DEBUG(sb);
return base.OnInstanceCreate(xrInstance);
}
private const string kLayoutName = "ViveHandInteractionExt";
private const string kDeviceLocalizedName = "Vive Hand Interaction Ext OpenXR";
/// <summary>
/// Registers the <see cref="HandInteractionExtDevice"/> layout with the Input System.
/// </summary>
protected override void RegisterDeviceLayout()
{
sb.Clear().Append("RegisterDeviceLayout() ").Append(kLayoutName).Append(", product: ").Append(kDeviceLocalizedName); DEBUG(sb);
InputSystem.RegisterLayout(typeof(HandInteractionExtDevice),
kLayoutName,
matches: new InputDeviceMatcher()
.WithInterface(XRUtilities.InterfaceMatchAnyVersion)
.WithProduct(kDeviceLocalizedName));
}
/// <summary>
/// Removes the <see cref="HandInteractionExtDevice"/> layout from the Input System.
/// </summary>
protected override void UnregisterDeviceLayout()
{
sb.Clear().Append("UnregisterDeviceLayout() ").Append(kLayoutName); DEBUG(sb);
InputSystem.RemoveLayout(kLayoutName);
}
#if UNITY_XR_OPENXR_1_9_1
/// <summary>
/// Return interaction profile type. HandInteractionExtDevice profile is Device type.
/// </summary>
/// <returns>Interaction profile type.</returns>
protected override InteractionProfileType GetInteractionProfileType()
{
return typeof(HandInteractionExtDevice).IsSubclassOf(typeof(XRController)) ? InteractionProfileType.XRController : InteractionProfileType.Device;
}
/// <summary>
/// Return device layer out string used for registering device HandInteractionExtDevice in InputSystem.
/// </summary>
/// <returns>Device layout string.</returns>
protected override string GetDeviceLayoutName()
{
return kLayoutName;
}
#endif
/// <summary>
/// Registers action maps to Unity XR.
/// </summary>
protected override void RegisterActionMapsWithRuntime()
{
sb.Clear().Append("RegisterActionMapsWithRuntime() Action map vivehandinteractionext")
.Append(", localizedName: ").Append(kDeviceLocalizedName)
.Append(", desiredInteractionProfile").Append(profile);
DEBUG(sb);
ActionMapConfig actionMap = new ActionMapConfig()
{
name = "vivehandinteractionext",
localizedName = kDeviceLocalizedName,
desiredInteractionProfile = profile,
manufacturer = "HTC",
serialNumber = "",
deviceInfos = new List<DeviceConfig>()
{
new DeviceConfig()
{
characteristics = (InputDeviceCharacteristics)(InputDeviceCharacteristics.HandTracking | InputDeviceCharacteristics.HeldInHand | InputDeviceCharacteristics.TrackedDevice | InputDeviceCharacteristics.Left),
userPath = UserPaths.leftHand
},
new DeviceConfig()
{
characteristics = (InputDeviceCharacteristics)(InputDeviceCharacteristics.HandTracking | InputDeviceCharacteristics.HeldInHand | InputDeviceCharacteristics.TrackedDevice | InputDeviceCharacteristics.Right),
userPath = UserPaths.rightHand
}
},
actions = new List<ActionConfig>()
{
// Grip Pose
new ActionConfig()
{
name = "devicePose",
localizedName = "Grasp Pose",
type = ActionType.Pose,
usages = new List<string>()
{
"Device"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = grip,
interactionProfileName = profile,
}
}
},
// Grip Value
new ActionConfig()
{
name = "graspValue",
localizedName = "Grip Axis",
type = ActionType.Axis1D,
usages = new List<string>()
{
"GraspValue"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = graspValue,
interactionProfileName = profile,
}
}
},
// Grip Ready
new ActionConfig()
{
name = "graspReady",
localizedName = "Is Grasped",
type = ActionType.Binary,
usages = new List<string>()
{
"GraspReady"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = graspReady,
interactionProfileName = profile,
},
}
},
// Aim Pose
new ActionConfig()
{
name = "pointer",
localizedName = "Aim Pose",
type = ActionType.Pose,
usages = new List<string>()
{
"Pointer"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = aim,
interactionProfileName = profile,
}
}
},
// Aim Value
new ActionConfig()
{
name = "pointerActivateValue",
localizedName = "Pointer Axis",
type = ActionType.Axis1D,
usages = new List<string>()
{
"PointerActivateValue"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pointerActivateValue,
interactionProfileName = profile,
}
}
},
// Aim Ready
new ActionConfig()
{
name = "pointerActivateReady",
localizedName = "Is Pointed",
type = ActionType.Binary,
usages = new List<string>()
{
"PointerActivateReady"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pointerActivateReady,
interactionProfileName = profile,
},
}
},
// Pinch Pose
new ActionConfig()
{
name = "pinchPose",
localizedName = "Pinch Pose",
type = ActionType.Pose,
usages = new List<string>()
{
"Pinch"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pinchPose,
interactionProfileName = profile,
}
}
},
// Pinch Value
new ActionConfig()
{
name = "pinchValue",
localizedName = "Pinch Axis",
type = ActionType.Axis1D,
usages = new List<string>()
{
"PinchValue"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pinchValue,
interactionProfileName = profile,
}
}
},
// Pinch Ready
new ActionConfig()
{
name = "pinchReady",
localizedName = "Is Pinched",
type = ActionType.Binary,
usages = new List<string>()
{
"PinchReady"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = pinchReady,
interactionProfileName = profile,
},
}
},
// Poke Pose
new ActionConfig()
{
name = "pokePose",
localizedName = "Index Tip",
type = ActionType.Pose,
usages = new List<string>()
{
"Poke"
},
bindings = new List<ActionBinding>()
{
new ActionBinding()
{
interactionPath = poke,
interactionProfileName = profile,
}
}
},
}
};
AddActionMap(actionMap);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e1477dbc8916dff4f8e21fc343efcd46
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,14 +1,18 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine;
using UnityEngine.XR;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System.Runtime.InteropServices;
using System;
using System.Linq;
using UnityEngine.XR;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using AOT;
using UnityEngine.InputSystem;
using UnityEngine.InputSystem.LowLevel;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
@@ -18,7 +22,7 @@ namespace VIVE.OpenXR.Hand
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Hand Tracking",
BuildTargetGroups = new[] { BuildTargetGroup.Android , BuildTargetGroup.Standalone },
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "Support the Hand Tracking extension.",
DocumentationLink = "..\\Documentation",
@@ -28,10 +32,23 @@ namespace VIVE.OpenXR.Hand
#endif
public class ViveHandTracking : OpenXRFeature
{
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandTracking";
void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
void WARNING(string msg) { Debug.LogWarning(LOG_TAG + " " + msg); }
void ERROR(string msg) { Debug.LogError(LOG_TAG + " " + msg); }
#region Log
const string LOG_TAG = "VIVE.OpenXR.Hand.ViveHandTracking ";
StringBuilder m_sb = null;
StringBuilder sb
{
get
{
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
void DEBUG(String msg) { Debug.Log(LOG_TAG + msg); }
void DEBUG(StringBuilder msg) { Debug.Log(msg); }
void WARNING(StringBuilder msg) { Debug.LogWarning(msg); }
void ERROR(String msg) { Debug.LogError(LOG_TAG + msg); }
void ERROR(StringBuilder msg) { Debug.LogError(msg); }
#endregion
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_EXT_hand_tracking">12.29 XR_EXT_hand_tracking</see>.
@@ -99,13 +116,14 @@ namespace VIVE.OpenXR.Hand
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
WARNING("OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled.");
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() ").Append(kOpenxrExtensionString).Append(" is NOT enabled."); WARNING(sb);
return false;
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
DEBUG("OnInstanceCreate() " + m_XrInstance);
InputSystem.onAfterUpdate += UpdateCallback;
sb.Clear().Append(LOG_TAG).Append("OnInstanceCreate() ").Append(m_XrInstance); DEBUG(sb);
return GetXrFunctionDelegates(m_XrInstance);
}
@@ -115,9 +133,13 @@ namespace VIVE.OpenXR.Hand
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
DEBUG("OnInstanceDestroy() " + xrInstance);
if (m_XrInstance == xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
InputSystem.onAfterUpdate -= UpdateCallback;
}
sb.Clear().Append(LOG_TAG).Append("OnInstanceDestroy() ").Append(xrInstance); DEBUG(sb);
}
private XrSystemId m_XrSystemId = 0;
@@ -128,7 +150,7 @@ namespace VIVE.OpenXR.Hand
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
DEBUG("OnSystemChange() " + m_XrSystemId);
sb.Clear().Append(LOG_TAG).Append("OnSystemChange() ").Append(m_XrSystemId); DEBUG(sb);
}
private bool m_XrSessionCreated = false;
@@ -146,7 +168,7 @@ namespace VIVE.OpenXR.Hand
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
DEBUG("OnSessionCreate() " + m_XrSession);
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() ").Append(m_XrSession); DEBUG(sb);
// Enumerate supported reference space types and create the XrSpace.
XrReferenceSpaceType[] spaces = new XrReferenceSpaceType[Enum.GetNames(typeof(XrReferenceSpaceType)).Count()];
@@ -158,7 +180,7 @@ namespace VIVE.OpenXR.Hand
spaces: out spaces[0]) == XrResult.XR_SUCCESS)
#pragma warning restore 0618
{
DEBUG("OnSessionCreate() spaceCountOutput: " + spaceCountOutput);
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() spaceCountOutput: ").Append(spaceCountOutput); DEBUG(sb);
Array.Resize(ref spaces, (int)spaceCountOutput);
#pragma warning disable 0618
@@ -186,7 +208,7 @@ namespace VIVE.OpenXR.Hand
#pragma warning restore 0618
{
hasReferenceSpaceLocal = true;
DEBUG("OnSessionCreate() CreateReferenceSpace LOCAL: " + m_ReferenceSpaceLocal);
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() CreateReferenceSpace LOCAL: ").Append(m_ReferenceSpaceLocal); DEBUG(sb);
}
else
{
@@ -210,7 +232,7 @@ namespace VIVE.OpenXR.Hand
#pragma warning restore 0618
{
hasReferenceSpaceStage = true;
DEBUG("OnSessionCreate() CreateReferenceSpace STAGE: " + m_ReferenceSpaceStage);
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() CreateReferenceSpace STAGE: ").Append(m_ReferenceSpaceStage); DEBUG(sb);
}
else
{
@@ -220,7 +242,7 @@ namespace VIVE.OpenXR.Hand
}
else
{
ERROR("OnSessionCreate() EnumerateReferenceSpaces(" + spaceCountOutput + ") failed.");
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() EnumerateReferenceSpaces(").Append(spaceCountOutput).Append(") failed."); ERROR(sb);
}
}
else
@@ -233,7 +255,7 @@ namespace VIVE.OpenXR.Hand
{
hasLeftHandTracker = true;
leftHandTracker = value;
DEBUG("OnSessionCreate() leftHandTracker " + leftHandTracker);
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() leftHandTracker ").Append(leftHandTracker); DEBUG(sb);
}
}
{ // right hand tracker
@@ -241,7 +263,7 @@ namespace VIVE.OpenXR.Hand
{
hasRightHandTracker = true;
rightHandTracker = value;
DEBUG("OnSessionCreate() rightHandTracker " + rightHandTracker);
sb.Clear().Append(LOG_TAG).Append("OnSessionCreate() rightHandTracker ").Append(rightHandTracker); DEBUG(sb);
}
}
}
@@ -252,7 +274,7 @@ namespace VIVE.OpenXR.Hand
/// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
DEBUG("OnSessionDestroy() " + xrSession);
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() ").Append(xrSession); DEBUG(sb);
// Reference Space is binding with xrSession so we destroy the xrSpace when xrSession is destroyed.
if (hasReferenceSpaceLocal)
@@ -261,12 +283,12 @@ namespace VIVE.OpenXR.Hand
if (DestroySpace(m_ReferenceSpaceLocal) == XrResult.XR_SUCCESS)
#pragma warning restore 0618
{
DEBUG("OnSessionDestroy() DestroySpace LOCAL " + m_ReferenceSpaceLocal);
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace LOCAL ").Append(m_ReferenceSpaceLocal); DEBUG(sb);
m_ReferenceSpaceLocal = 0;
}
else
{
ERROR("OnSessionDestroy() DestroySpace LOCAL " + m_ReferenceSpaceLocal + " failed.");
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace LOCAL ").Append(m_ReferenceSpaceLocal).Append(" failed."); ERROR(sb);
}
hasReferenceSpaceLocal = false;
}
@@ -276,12 +298,12 @@ namespace VIVE.OpenXR.Hand
if (DestroySpace(m_ReferenceSpaceStage) == XrResult.XR_SUCCESS)
#pragma warning restore 0618
{
DEBUG("OnSessionDestroy() DestroySpace STAGE " + m_ReferenceSpaceStage);
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace STAGE ").Append(m_ReferenceSpaceStage); DEBUG(sb);
m_ReferenceSpaceStage = 0;
}
else
{
ERROR("OnSessionDestroy() DestroySpace STAGE " + m_ReferenceSpaceStage + " failed.");
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() DestroySpace STAGE ").Append(m_ReferenceSpaceStage).Append(" failed."); ERROR(sb);
}
hasReferenceSpaceStage = false;
}
@@ -291,11 +313,11 @@ namespace VIVE.OpenXR.Hand
{
if (DestroyHandTrackerEXT(leftHandTracker) == XrResult.XR_SUCCESS)
{
DEBUG("OnSessionDestroy() Left DestroyHandTrackerEXT " + leftHandTracker);
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Left DestroyHandTrackerEXT ").Append(leftHandTracker); DEBUG(sb);
}
else
{
ERROR("OnSessionDestroy() Left DestroyHandTrackerEXT " + leftHandTracker + " failed.");
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Left DestroyHandTrackerEXT ").Append(leftHandTracker).Append(" failed."); ERROR(sb);
}
hasLeftHandTracker = false;
}
@@ -303,11 +325,11 @@ namespace VIVE.OpenXR.Hand
{
if (DestroyHandTrackerEXT(rightHandTracker) == XrResult.XR_SUCCESS)
{
DEBUG("OnSessionDestroy() Right DestroyHandTrackerEXT " + rightHandTracker);
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Right DestroyHandTrackerEXT ").Append(rightHandTracker); DEBUG(sb);
}
else
{
ERROR("OnSessionDestroy() Right DestroyHandTrackerEXT " + rightHandTracker + " failed.");
sb.Clear().Append(LOG_TAG).Append("OnSessionDestroy() Right DestroyHandTrackerEXT ").Append(rightHandTracker).Append(" failed."); ERROR(sb);
}
hasRightHandTracker = false;
}
@@ -453,13 +475,13 @@ namespace VIVE.OpenXR.Hand
if (createInfo.hand == XrHandEXT.XR_HAND_LEFT_EXT && hasLeftHandTracker)
{
DEBUG("CreateHandTrackerEXT() Left tracker " + leftHandTracker + " already created.");
sb.Clear().Append(LOG_TAG).Append("CreateHandTrackerEXT() Left tracker ").Append(leftHandTracker).Append(" already created."); DEBUG(sb);
handTracker = leftHandTracker;
return XrResult.XR_SUCCESS;
}
if (createInfo.hand == XrHandEXT.XR_HAND_RIGHT_EXT && hasRightHandTracker)
{
DEBUG("CreateHandTrackerEXT() Right tracker " + rightHandTracker + " already created.");
sb.Clear().Append(LOG_TAG).Append("CreateHandTrackerEXT() Right tracker ").Append(rightHandTracker).Append(" already created."); DEBUG(sb);
handTracker = rightHandTracker;
return XrResult.XR_SUCCESS;
}
@@ -677,7 +699,7 @@ namespace VIVE.OpenXR.Hand
bool support = false;
for (int i = 0; i < spaceCountOutput; i++)
{
DEBUG("IsReferenceSpaceTypeSupported() supported space[" + i + "]: " + spaces[i]);
sb.Clear().Append(LOG_TAG).Append("IsReferenceSpaceTypeSupported() supported space[").Append(i).Append("]: ").Append(spaces[i]); DEBUG(sb);
if (spaces[i] == space) { support = true; }
}
@@ -720,7 +742,7 @@ namespace VIVE.OpenXR.Hand
sys_hand_tracking_prop_ptr = new IntPtr(offset);
handTrackingSystemProperties = (XrSystemHandTrackingPropertiesEXT)Marshal.PtrToStructure(sys_hand_tracking_prop_ptr, typeof(XrSystemHandTrackingPropertiesEXT));
DEBUG("IsHandTrackingSupported() XrSystemHandTrackingPropertiesEXT.supportsHandTracking: " + handTrackingSystemProperties.supportsHandTracking);
sb.Clear().Append(LOG_TAG).Append("IsHandTrackingSupported() XrSystemHandTrackingPropertiesEXT.supportsHandTracking: ").Append((UInt32)handTrackingSystemProperties.supportsHandTracking); DEBUG(sb);
ret = handTrackingSystemProperties.supportsHandTracking > 0;
}
else
@@ -736,7 +758,7 @@ namespace VIVE.OpenXR.Hand
{
if (!IsHandTrackingSupported())
{
ERROR("CreateHandTrackers() " + (isLeft ? "Left" : "Right") + " hand tracking is NOT supported.");
sb.Clear().Append(LOG_TAG).Append("CreateHandTrackers() ").Append((isLeft ? "Left" : "Right")).Append(" hand tracking is NOT supported."); ERROR(sb);
handTracker = 0;
return false;
}
@@ -748,7 +770,7 @@ namespace VIVE.OpenXR.Hand
createInfo.handJointSet = XrHandJointSetEXT.XR_HAND_JOINT_SET_DEFAULT_EXT;
var ret = CreateHandTrackerEXT(ref createInfo, out handTracker);
DEBUG("CreateHandTrackers() " + (isLeft ? "Left" : "Right") + " CreateHandTrackerEXT = " + ret);
sb.Clear().Append(LOG_TAG).Append("CreateHandTrackers() ").Append((isLeft ? "Left" : "Right")).Append(" CreateHandTrackerEXT = ").Append(ret); DEBUG(sb);
return ret == XrResult.XR_SUCCESS;
}
@@ -773,20 +795,52 @@ namespace VIVE.OpenXR.Hand
return true;
}
private int lastUpdateFrameL = -1, lastUpdateFrameR = -1;
private void UpdateCallback()
{
// Only allow updating poses once at BeforeRender & Dynamic per frame.
if (InputState.currentUpdateType == InputUpdateType.BeforeRender ||
InputState.currentUpdateType == InputUpdateType.Dynamic)
{
lastUpdateFrameL = -1;
lastUpdateFrameR = -1;
}
}
private bool AllowUpdate(bool isLeft)
{
bool allow;
if (isLeft)
{
allow = (lastUpdateFrameL != Time.frameCount);
lastUpdateFrameL = Time.frameCount;
}
else
{
allow = (lastUpdateFrameR != Time.frameCount);
lastUpdateFrameR = Time.frameCount;
}
return allow;
}
/// <summary>
/// Retrieves the <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.
/// </summary>
/// <param name="isLeft">Left or right hand.</param>
/// <param name="handJointLocation">Output parameter to retrieve <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.</param>
/// <param name="timestamp">The hand tracking data timestamp.</param>
/// <returns>True for valid data.</returns>
public bool GetJointLocations(bool isLeft, out XrHandJointLocationEXT[] handJointLocation)
public bool GetJointLocations(bool isLeft, out XrHandJointLocationEXT[] handJointLocation, out XrTime timestamp)
{
bool ret = false;
handJointLocation = isLeft ? jointLocationsL : jointLocationsR;
timestamp = m_frameState.predictedDisplayTime;
if (!AllowUpdate(isLeft)) { return true; }
bool ret = false;
if (isLeft && !hasLeftHandTracker) { return ret; }
if (!isLeft && !hasRightHandTracker) { return ret; }
OpenXRHelper.Trace.Begin("GetJointLocations");
TrackingOriginModeFlags origin = GetTrackingOriginMode();
if (origin == TrackingOriginModeFlags.Unknown || origin == TrackingOriginModeFlags.Unbounded) { return ret; }
XrSpace baseSpace = (origin == TrackingOriginModeFlags.Device ? m_ReferenceSpaceLocal : m_ReferenceSpaceStage);
@@ -831,6 +885,8 @@ namespace VIVE.OpenXR.Hand
locateInfo: locateInfo,
locations: ref locations) == XrResult.XR_SUCCESS)
{
timestamp = locateInfo.time;
if (locations.isActive)
{
if (IntPtr.Size == 4)
@@ -858,7 +914,19 @@ namespace VIVE.OpenXR.Hand
}
Marshal.FreeHGlobal(locations.jointLocations);
OpenXRHelper.Trace.End();
return ret;
}
/// <summary>
/// Retrieves the <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.
/// </summary>
/// <param name="isLeft">Left or right hand.</param>
/// <param name="handJointLocation">Output parameter to retrieve <see cref="XrHandJointLocationEXT"> XrHandJointLocationEXT </see> data.</param>
/// <returns>True for valid data.</returns>
public bool GetJointLocations(bool isLeft, out XrHandJointLocationEXT[] handJointLocation)
{
return GetJointLocations(isLeft, out handJointLocation, out XrTime timestamp);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: ed4392e61290f34419960f1139486f9f
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,154 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEditor;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System.Text;
#if UNITY_EDITOR
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.Interaction
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR - Interaction Group",
Category = "Interactions",
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "VIVE interaction profiles management.",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "2.5.0",
FeatureId = featureId)]
#endif
public class ViveInteractions : OpenXRFeature
{
#region Log
const string LOG_TAG = "VIVE.OpenXR.Interaction.ViveInteractions ";
static StringBuilder m_sb = null;
static StringBuilder sb
{
get
{
if (m_sb == null) { m_sb = new StringBuilder(); }
return m_sb;
}
}
static void DEBUG(StringBuilder msg) { Debug.LogFormat("{0} {1}", LOG_TAG, msg); }
static void WARNING(StringBuilder msg) { Debug.LogWarningFormat("{0} {1}", LOG_TAG, msg); }
static void ERROR(StringBuilder msg) { Debug.LogErrorFormat("{0} {1}", LOG_TAG, msg); }
#endregion
public const string kOpenxrExtensionString = "";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.interactions";
#region OpenXR Life Cycle
#pragma warning disable
private bool m_XrInstanceCreated = false;
#pragma warning enable
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
m_XrInstance = xrInstance;
m_XrInstanceCreated = true;
sb.Clear().Append("OnInstanceCreate() ").Append(m_XrInstance); DEBUG(sb);
return true;
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyInstance">xrDestroyInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
sb.Clear().Append("OnInstanceDestroy() ").Append(xrInstance).Append(", current: ").Append(m_XrInstance); DEBUG(sb);
if (m_XrInstance == xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
}
}
#pragma warning disable
private bool m_XrSessionCreated = false;
#pragma warning enable
private XrSession m_XrSession = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateSession">xrCreateSession</see> is done.
/// </summary>
/// <param name="xrSession">The created session ID.</param>
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
sb.Clear().Append("OnSessionCreate() ").Append(m_XrSession); DEBUG(sb);
}
protected override void OnSessionEnd(ulong xrSession)
{
sb.Clear().Append("OnSessionEnd() ").Append(xrSession).Append(", current: ").Append(m_XrSession); DEBUG(sb);
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroySession">xrDestroySession</see> is done.
/// </summary>
/// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
sb.Clear().Append("OnSessionDestroy() ").Append(xrSession).Append(", current: ").Append(m_XrSession); DEBUG(sb);
if (m_XrSession == xrSession)
{
m_XrSessionCreated = false;
m_XrSession = 0;
}
}
private XrSystemId m_XrSystemId = 0;
/// <summary>
/// Called when the <see cref="XrSystemId">XrSystemId</see> retrieved by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystem">xrGetSystem</see> is changed.
/// </summary>
/// <param name="xrSystem">The system id.</param>
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
sb.Clear().Append("OnSystemChange() " + m_XrSystemId); DEBUG(sb);
}
#endregion
[SerializeField]
internal bool m_ViveHandInteraction = false;
/// <summary>
/// Checks if using <see href="https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XR_HTC_hand_interaction">XR_HTC_hand_interaction</see> or not.
/// </summary>
/// <returns>True for using.</returns>
public bool UseViveHandInteraction() { return m_ViveHandInteraction; }
[SerializeField]
internal bool m_ViveWristTracker = false;
/// <summary>
/// Checks if using <see href="https://business.vive.com/eu/product/vive-wrist-tracker/">VIVE Wrist Tracker</see> or not.
/// </summary>
/// <returns>True for using.</returns>
public bool UseViveWristTracker() { return m_ViveWristTracker; }
[SerializeField]
internal bool m_ViveXRTracker = false;
/// <summary>
/// Checks if using <see href="https://business.vive.com/eu/product/vive-ultimate-tracker/">VIVE Ultimate Tracker</see> or not.
/// </summary>
/// <returns>True for using.</returns>
public bool UseViveXrTracker() { return m_ViveXRTracker; }
[SerializeField]
internal bool m_KHRHandInteraction = false;
/// <summary>
/// Checks if using <see href="https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XR_EXT_hand_interaction">XR_EXT_hand_interaction</see> or not.
/// </summary>
/// <returns>True for using.</returns>
public bool UseKhrHandInteraction() { return m_KHRHandInteraction; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9396fe2350b43e04db60471cc512653e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b6c319d9f4e5e4c40b3e41224f10021c
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,76 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.XR.OpenXR.Features;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.Feature
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR MockRuntime",
Desc = "VIVE's mock runtime. Used with OpenXR MockRuntime to test features on Editor.",
Company = "HTC",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
BuildTargetGroups = new[] { BuildTargetGroup.Standalone },
FeatureId = featureId
)]
#endif
public class ViveMockRuntime : OpenXRFeature
{
public const string kOpenxrExtensionString = "";
[DllImport("ViveMockRuntime", EntryPoint = "HookGetInstanceProcAddr")]
public static extern IntPtr HookGetInstanceProcAddrFake(IntPtr func);
//AddRequiredFeature
[DllImport("ViveMockRuntime", EntryPoint = "AddRequiredFeature")]
public static extern void AddRequiredFeature(string featureName);
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.mockruntime";
public bool enableFuture = false;
public bool enableAnchor = false;
#region override functions
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
IntPtr nextProcAddr = func;
if (Application.isEditor)
{
Debug.Log("ViveMockRuntime: HookGetInstanceProcAddr");
try
{
AddRequiredFeature("Future");
AddRequiredFeature("Anchor");
nextProcAddr = HookGetInstanceProcAddrFake(nextProcAddr);
}
catch (DllNotFoundException ex)
{
Debug.LogError("ViveMockRuntime: DLL not found: " + ex.Message);
}
catch (EntryPointNotFoundException ex)
{
Debug.LogError("ViveMockRuntime: Function not found in DLL: " + ex.Message);
}
catch (Exception ex)
{
Debug.LogError("ViveMockRuntime: Unexpected error: " + ex.Message);
}
}
return nextProcAddr;
}
#endregion override functions
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 453412f6d2b83664fb0383acec06862d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 806671d2057229649a8ef0aaf2816393
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 9b16e408c09007b46bddec7439822d52
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,30 @@
# 12.89. XR_HTC_passthrough
## Name String
XR_HTC_passthrough
## Revision
1
## New Object Types
- [XrPassthroughHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrPassthroughHTC)
## New Enum Constants
[XrObjectType](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrObjectType) enumeration is extended with:
- XR_OBJECT_TYPE_PASSTHROUGH_HTC
[XrStructureType](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrStructureType) enumeration is extended with:
- XR_TYPE_PASSTHROUGH_CREATE_INFO_HTC
- XR_TYPE_PASSTHROUGH_COLOR_HTC
- XR_TYPE_PASSTHROUGH_MESH_TRANSFORM_INFO_HTC
- XR_TYPE_COMPOSITION_LAYER_PASSTHROUGH_HTC
## New Enums
- [XrPassthroughFormHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrPassthroughFormHTC)
## New Structures
- [XrPassthroughCreateInfoHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrPassthroughCreateInfoHTC)
- [XrPassthroughColorHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrPassthroughColorHTC)
- [XrPassthroughMeshTransformInfoHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrPassthroughMeshTransformInfoHTC)
- [XrCompositionLayerPassthroughHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrCompositionLayerPassthroughHTC)
## New Functions
- [xrCreatePassthroughHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#xrCreatePassthroughHTC)
- [xrDestroyPassthroughHTC](https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#xrDestroyPassthroughHTC)
## VIVE Plugin
Enable "VIVE XR Passthrough" in "Project Settings > XR Plugin-in Management > OpenXR > Android Tab > OpenXR Feature Groups" to use the Passthrough feature provided by VIVE OpenXR plugin.

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 0fddf83b59e7c194493074db7cc7aebb
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a624175b30ff07b47920ec9a9699a1ea
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 78bb1f33f93dd034d90b1d1a27a5eb27
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,534 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine;
namespace VIVE.OpenXR.Passthrough
{
/// <summary>
/// The forms of passthrough layer.
/// </summary>
public enum PassthroughLayerForm
{
///<summary> Fullscreen Passthrough Form</summary>
Planar = 0,
///<summary> Projected Passthrough Form</summary>
Projected = 1
}
/// <summary>
/// The types of passthrough space.
/// </summary>
public enum ProjectedPassthroughSpaceType
{
///<summary>
/// XR_REFERENCE_SPACE_TYPE_VIEW at (0,0,0) with orientation (0,0,0,1)
///</summary>
Headlock = 0,
///<summary>
/// When TrackingOriginMode is TrackingOriginModeFlags.Floor:
/// XR_REFERENCE_SPACE_TYPE_STAGE at (0,0,0) with orientation (0,0,0,1)
///
/// When TrackingOriginMode is TrackingOriginModeFlags.Device:
/// XR_REFERENCE_SPACE_TYPE_LOCAL at (0,0,0) with orientation (0,0,0,1)
///
///</summary>
Worldlock = 1
}
// -------------------- 12.88. XR_HTC_passthrough --------------------
#region New Object Types
/// <summary>
/// An application can create an <see href="https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#XrPassthroughHTC">XrPassthroughHTC</see> handle by calling <see href="https://registry.khronos.org/OpenXR/specs/1.1/html/xrspec.html#xrCreatePassthroughHTC">xrCreatePassthroughHTC</see>. The returned passthrough handle can be subsequently used in API calls.
/// </summary>
public struct XrPassthroughHTC : IEquatable<UInt64>
{
private readonly UInt64 value;
public XrPassthroughHTC(UInt64 u)
{
value = u;
}
public static implicit operator UInt64(XrPassthroughHTC equatable)
{
return equatable.value;
}
public static implicit operator XrPassthroughHTC(UInt64 u)
{
return new XrPassthroughHTC(u);
}
public bool Equals(XrPassthroughHTC other)
{
return value == other.value;
}
public bool Equals(UInt64 other)
{
return value == other;
}
public override bool Equals(object obj)
{
return obj is XrPassthroughHTC && Equals((XrPassthroughHTC)obj);
}
public override int GetHashCode()
{
return value.GetHashCode();
}
public override string ToString()
{
return value.ToString();
}
public static bool operator ==(XrPassthroughHTC a, XrPassthroughHTC b) { return a.Equals(b); }
public static bool operator !=(XrPassthroughHTC a, XrPassthroughHTC b) { return !a.Equals(b); }
public static bool operator >=(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value >= b.value; }
public static bool operator <=(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value <= b.value; }
public static bool operator >(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value > b.value; }
public static bool operator <(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value < b.value; }
public static XrPassthroughHTC operator +(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value + b.value; }
public static XrPassthroughHTC operator -(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value - b.value; }
public static XrPassthroughHTC operator *(XrPassthroughHTC a, XrPassthroughHTC b) { return a.value * b.value; }
public static XrPassthroughHTC operator /(XrPassthroughHTC a, XrPassthroughHTC b)
{
if (b.value == 0)
{
throw new DivideByZeroException();
}
return a.value / b.value;
}
}
#endregion
#region New Enums
/// <summary>
/// The XrPassthroughFormHTC enumeration identifies the form of the passthrough, presenting the passthrough fill the full screen or project onto a specified mesh.
/// </summary>
public enum XrPassthroughFormHTC
{
/// <summary>
/// Presents the passthrough with full of the entire screen..
/// </summary>
XR_PASSTHROUGH_FORM_PLANAR_HTC = 0,
/// <summary>
/// Presents the passthrough projecting onto a custom mesh.
/// </summary>
XR_PASSTHROUGH_FORM_PROJECTED_HTC = 1,
};
#endregion
#region New Structures
/// <summary>
/// The XrPassthroughCreateInfoHTC structure describes the information to create an <see cref="XrPassthroughCreateInfoHTC">XrPassthroughCreateInfoHTC</see> handle.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrPassthroughCreateInfoHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// The form specifies the form of passthrough.
/// </summary>
public XrPassthroughFormHTC form;
/// <param name="in_type">The <see cref="XrStructureType">XrStructureType</see> of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
/// <param name="in_facialTrackingType">An XrFacialTrackingTypeHTC which describes which type of facial tracking should be used for this handle.</param>
public XrPassthroughCreateInfoHTC(XrStructureType in_type, IntPtr in_next, XrPassthroughFormHTC in_form)
{
type = in_type;
next = in_next;
form = in_form;
}
};
/// <summary>
/// The application can specify the XrPassthroughColorHTC to adjust the alpha value of the passthrough. The range is between 0.0f and 1.0f, 1.0f means opaque.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrPassthroughColorHTC
{
/// <summary>
/// The XrStructureType of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// Next is NULL or a pointer to the next structure in a structure chain, such as XrPassthroughMeshTransformInfoHTC.
/// </summary>
public IntPtr next;
/// <summary>
/// The alpha value of the passthrough in the range [0, 1].
/// </summary>
public float alpha;
public XrPassthroughColorHTC(XrStructureType in_type, IntPtr in_next, float in_alpha)
{
type = in_type;
next = in_next;
alpha = in_alpha;
}
};
/// <summary>
/// The XrPassthroughMeshTransformInfoHTC structure describes the mesh and transformation.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrPassthroughMeshTransformInfoHTC
{
/// <summary>
/// The XrStructureType of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// Next is NULL or a pointer to the next structure in a structure chain.
/// </summary>
public IntPtr next;
/// <summary>
/// The count of vertices array in the mesh.
/// </summary>
public UInt32 vertexCount;
/// <summary>
/// An array of XrVector3f. The size of the array must be equal to vertexCount.
/// </summary>
public XrVector3f[] vertices;
/// <summary>
/// The count of indices array in the mesh.
/// </summary>
public UInt32 indexCount;
/// <summary>
/// An array of triangle indices. The size of the array must be equal to indexCount.
/// </summary>
public UInt32[] indices;
/// <summary>
/// The XrSpace that defines the projected passthrough's base space for transformations.
/// </summary>
public XrSpace baseSpace;
/// <summary>
/// The XrTime that defines the time at which the transform is applied.
/// </summary>
public XrTime time;
/// <summary>
/// The XrPosef that defines the pose of the mesh
/// </summary>
public XrPosef pose;
/// <summary>
/// The XrVector3f that defines the scale of the mesh
/// </summary>
public XrVector3f scale;
public XrPassthroughMeshTransformInfoHTC(XrStructureType in_type, IntPtr in_next, UInt32 in_vertexCount,
XrVector3f[] in_vertices, UInt32 in_indexCount, UInt32[] in_indices, XrSpace in_baseSpace, XrTime in_time,
XrPosef in_pose, XrVector3f in_scale)
{
type = in_type;
next = in_next;
vertexCount = in_vertexCount;
vertices = in_vertices;
indexCount = in_indexCount;
indices = in_indices;
baseSpace = in_baseSpace;
time = in_time;
pose = in_pose;
scale = in_scale;
}
};
/// <summary>
/// A pointer to XrCompositionLayerPassthroughHTC may be submitted in xrEndFrame as a pointer to the base structure XrCompositionLayerBaseHeader, in the desired layer order, to request the runtime to composite a passthrough layer into the final frame output.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrCompositionLayerPassthroughHTC
{
/// <summary>
/// The XrStructureType of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// Next is NULL or a pointer to the next structure in a structure chain, such as XrPassthroughMeshTransformInfoHTC.
/// </summary>
public IntPtr next;
/// <summary>
/// A bitmask of XrCompositionLayerFlagBits describing flags to apply to the layer.
/// </summary>
public XrCompositionLayerFlags layerFlags;
/// <summary>
/// The XrSpace that specifies the layer¡¦s space - must be XR_NULL_HANDLE.
/// </summary>
public XrSpace space;
/// <summary>
/// The XrPassthroughHTC previously created by xrCreatePassthroughHTC.
/// </summary>
public XrPassthroughHTC passthrough;
/// <summary>
/// The XrPassthroughColorHTC describing the color information with the alpha value of the passthrough layer.
/// </summary>
public XrPassthroughColorHTC color;
public XrCompositionLayerPassthroughHTC(XrStructureType in_type, IntPtr in_next, XrCompositionLayerFlags in_layerFlags,
XrSpace in_space, XrPassthroughHTC in_passthrough, XrPassthroughColorHTC in_color)
{
type = in_type;
next = in_next;
layerFlags = in_layerFlags;
space = in_space;
passthrough = in_passthrough;
color = in_color;
}
};
[StructLayout(LayoutKind.Sequential)]
public struct XrPassthroughConfigurationBaseHeaderHTC
{
public XrStructureType type;
public IntPtr next;
};
[StructLayout(LayoutKind.Sequential, Pack = 8)]
public struct XrPassthroughConfigurationImageRateHTC
{
public XrStructureType type;
public IntPtr next;
public float srcImageRate;
public float dstImageRate;
};
[StructLayout(LayoutKind.Sequential, Pack = 8)]
public struct XrPassthroughConfigurationImageQualityHTC
{
public XrStructureType type;
public IntPtr next;
public float scale;
};
[StructLayout(LayoutKind.Sequential)]
public struct XrEventDataPassthroughConfigurationImageRateChangedHTC
{
public XrStructureType type;
public IntPtr next;
public XrPassthroughConfigurationImageRateHTC fromImageRate;
public XrPassthroughConfigurationImageRateHTC toImageRate;
public XrEventDataPassthroughConfigurationImageRateChangedHTC(XrStructureType in_type, IntPtr in_next, XrPassthroughConfigurationImageRateHTC in_fromImageRate, XrPassthroughConfigurationImageRateHTC in_toImageRate)
{
type = in_type;
next = in_next;
fromImageRate = in_fromImageRate;
toImageRate = in_toImageRate;
}
public static XrEventDataPassthroughConfigurationImageRateChangedHTC identity
{
get
{
return new XrEventDataPassthroughConfigurationImageRateChangedHTC(
XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_CHANGED_HTC,
IntPtr.Zero,
new XrPassthroughConfigurationImageRateHTC { type = XrStructureType.XR_TYPE_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_HTC, next = IntPtr.Zero },
new XrPassthroughConfigurationImageRateHTC { type = XrStructureType.XR_TYPE_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_HTC, next = IntPtr.Zero }); // user is default present
}
}
public static bool Get(XrEventDataBuffer eventDataBuffer, out XrEventDataPassthroughConfigurationImageRateChangedHTC eventDataPassthroughConfigurationImageRate)
{
eventDataPassthroughConfigurationImageRate = identity;
if (eventDataBuffer.type == XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_RATE_CHANGED_HTC)
{
eventDataPassthroughConfigurationImageRate.next = eventDataBuffer.next;
eventDataPassthroughConfigurationImageRate.fromImageRate.type = (XrStructureType)BitConverter.ToUInt32(eventDataBuffer.varying, 0);
eventDataPassthroughConfigurationImageRate.fromImageRate.next = (IntPtr)BitConverter.ToInt64(eventDataBuffer.varying, 8);
eventDataPassthroughConfigurationImageRate.fromImageRate.srcImageRate = BitConverter.ToSingle(eventDataBuffer.varying, 16);
eventDataPassthroughConfigurationImageRate.fromImageRate.dstImageRate = BitConverter.ToSingle(eventDataBuffer.varying, 20);
eventDataPassthroughConfigurationImageRate.toImageRate.type = (XrStructureType)BitConverter.ToUInt32(eventDataBuffer.varying, 24);
eventDataPassthroughConfigurationImageRate.toImageRate.next = (IntPtr)BitConverter.ToInt64(eventDataBuffer.varying, 32);
eventDataPassthroughConfigurationImageRate.toImageRate.srcImageRate = BitConverter.ToSingle(eventDataBuffer.varying, 40);
eventDataPassthroughConfigurationImageRate.toImageRate.dstImageRate = BitConverter.ToSingle(eventDataBuffer.varying, 44);
return true;
}
return false;
}
};
[StructLayout(LayoutKind.Sequential)]
public struct XrEventDataPassthroughConfigurationImageQualityChangedHTC
{
public XrStructureType type;
public IntPtr next;
public XrPassthroughConfigurationImageQualityHTC fromImageQuality;
public XrPassthroughConfigurationImageQualityHTC toImageQuality;
public XrEventDataPassthroughConfigurationImageQualityChangedHTC(XrStructureType in_type, IntPtr in_next, XrPassthroughConfigurationImageQualityHTC in_fromImageQuality, XrPassthroughConfigurationImageQualityHTC in_toImageQuality)
{
type = in_type;
next = in_next;
fromImageQuality = in_fromImageQuality;
toImageQuality = in_toImageQuality;
}
public static XrEventDataPassthroughConfigurationImageQualityChangedHTC identity
{
get
{
return new XrEventDataPassthroughConfigurationImageQualityChangedHTC(
XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_CHANGED_HTC,
IntPtr.Zero,
new XrPassthroughConfigurationImageQualityHTC { type = XrStructureType.XR_TYPE_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_HTC, next = IntPtr.Zero },
new XrPassthroughConfigurationImageQualityHTC { type = XrStructureType.XR_TYPE_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_HTC, next = IntPtr.Zero }); // user is default present
}
}
public static bool Get(XrEventDataBuffer eventDataBuffer, out XrEventDataPassthroughConfigurationImageQualityChangedHTC ventDataPassthroughConfigurationImageQuality)
{
ventDataPassthroughConfigurationImageQuality = identity;
if (eventDataBuffer.type == XrStructureType.XR_TYPE_EVENT_DATA_PASSTHROUGH_CONFIGURATION_IMAGE_QUALITY_CHANGED_HTC)
{
ventDataPassthroughConfigurationImageQuality.next = eventDataBuffer.next;
ventDataPassthroughConfigurationImageQuality.fromImageQuality.type = (XrStructureType)BitConverter.ToUInt32(eventDataBuffer.varying, 0);
ventDataPassthroughConfigurationImageQuality.fromImageQuality.next = (IntPtr)BitConverter.ToInt64(eventDataBuffer.varying, 8);
ventDataPassthroughConfigurationImageQuality.fromImageQuality.scale = BitConverter.ToSingle(eventDataBuffer.varying, 16);
ventDataPassthroughConfigurationImageQuality.toImageQuality.type = (XrStructureType)BitConverter.ToUInt32(eventDataBuffer.varying, 24);
ventDataPassthroughConfigurationImageQuality.toImageQuality.next = (IntPtr)BitConverter.ToInt64(eventDataBuffer.varying, 32);
ventDataPassthroughConfigurationImageQuality.toImageQuality.scale = BitConverter.ToSingle(eventDataBuffer.varying, 40);
return true;
}
return false;
}
};
[StructLayout(LayoutKind.Sequential)]
public struct XrSystemPassthroughConfigurationPropertiesHTC
{
public XrStructureType type;
public IntPtr next;
public XrBool32 supportsImageRate;
public XrBool32 supportsImageQuality;
};
#endregion
#region New Functions
public static class VivePassthroughHelper
{
/// <summary>
/// The delegate function of <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreatePassthroughHTC">xrCreatePassthroughHTC</see>.
/// </summary>
/// <param name="session">An <see cref="XrSession">XrSession</see> in which the passthrough will be active.</param>
/// <param name="createInfo">createInfo is a pointer to an <see cref="XrPassthroughCreateInfoHTC">XrPassthroughCreateInfoHTC</see> structure containing information about how to create the passthrough.</param>
/// <param name="passthrough">passthrough is a pointer to a handle in which the created <see cref="XrPassthroughHTC">XrPassthroughHTC</see> is returned.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrCreatePassthroughHTCDelegate(
XrSession session,
XrPassthroughCreateInfoHTC createInfo,
out XrPassthroughHTC passthrough);
/// <summary>
/// The delegate function of <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyPassthroughHTC">xrDestroyFacialTrackerHTC</see>.
/// </summary>
/// <param name="passthrough">passthrough is the <see cref="XrPassthroughHTC">XrPassthroughHTC</see> to be destroyed..</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrDestroyPassthroughHTCDelegate(
XrPassthroughHTC passthrough);
public delegate XrResult xrEnumeratePassthroughImageRatesHTCDelegate(
XrSession session,
[In] UInt32 imageRateCapacityInput,
ref UInt32 imageRateCountOutput,
[In, Out] XrPassthroughConfigurationImageRateHTC[] imageRates);
public delegate XrResult xrGetPassthroughConfigurationHTCDelegate(
XrSession session,
IntPtr/*ref XrPassthroughConfigurationBaseHeaderHTC*/ config);
public delegate XrResult xrSetPassthroughConfigurationHTCDelegate(
XrSession session,
IntPtr/*ref XrPassthroughConfigurationBaseHeaderHTC*/ config);
}
public static class VivePassthroughImageQualityChanged
{
public delegate void OnImageQualityChanged(float fromQuality, float toQuality);
public static void Listen(OnImageQualityChanged callback)
{
if (!allEventListeners.Contains(callback))
allEventListeners.Add(callback);
}
public static void Remove(OnImageQualityChanged callback)
{
if (allEventListeners.Contains(callback))
allEventListeners.Remove(callback);
}
public static void Send(float fromQuality, float toQuality)
{
int N = 0;
if (allEventListeners != null)
{
N = allEventListeners.Count;
for (int i = N - 1; i >= 0; i--)
{
OnImageQualityChanged single = allEventListeners[i];
try
{
single(fromQuality, toQuality);
}
catch (Exception e)
{
Debug.Log("Event : " + e.ToString());
allEventListeners.Remove(single);
Debug.Log("Event : A listener is removed due to exception.");
}
}
}
}
private static List<OnImageQualityChanged> allEventListeners = new List<OnImageQualityChanged>();
}
public static class VivePassthroughImageRateChanged
{
public delegate void OnImageRateChanged(float fromSrcImageRate, float fromDestImageRate, float toSrcImageRate, float toDestImageRate);
public static void Listen(OnImageRateChanged callback)
{
if (!allEventListeners.Contains(callback))
allEventListeners.Add(callback);
}
public static void Remove(OnImageRateChanged callback)
{
if (allEventListeners.Contains(callback))
allEventListeners.Remove(callback);
}
public static void Send(float fromSrcImageRate, float fromDestImageRate, float toSrcImageRate, float toDestImageRate)
{
int N = 0;
if (allEventListeners != null)
{
N = allEventListeners.Count;
for (int i = N - 1; i >= 0; i--)
{
OnImageRateChanged single = allEventListeners[i];
try
{
single(fromSrcImageRate, fromDestImageRate, toSrcImageRate, toDestImageRate);
}
catch (Exception e)
{
Debug.Log("Event : " + e.ToString());
allEventListeners.Remove(single);
Debug.Log("Event : A listener is removed due to exception.");
}
}
}
}
private static List<OnImageRateChanged> allEventListeners = new List<OnImageRateChanged>();
}
#endregion
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fa28fb90ea5134443bb348ad98be69bd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -16,7 +16,7 @@ using UnityEditor.XR.OpenXR.Features;
namespace VIVE.OpenXR
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Path Enumeration",
[OpenXRFeature(UiName = "VIVE XR Path Enumeration (Beta)",
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "The extension provides more flexibility for the user paths and input/output source paths related to an interaction profile. Developers can use this extension to obtain the path that the user has decided on.",

View File

@@ -0,0 +1,101 @@
# XR_EXT_plane_detection
## Name String
XR_EXT_plane_detection
## Revision
1
## Overview
The PlaneDetectionManager class provides functionalities for managing plane detection using the VIVE XR SDK. It includes methods to check feature support, create and destroy plane detectors, and helper functions for interacting with the plane detection extension.
## Plane Detection Workflow
1. Check Feature Support:
```csharp
bool isSupported = PlaneDetectionManager.IsSupported();
```
Ensure the plane detection feature is supported before attempting to create a plane detector.
1. Create Plane Detector:
```csharp
PlaneDetector planeDetector = PlaneDetectionManager.CreatePlaneDetector();
```
Create a plane detector instance to begin detecting planes.
1. Begin Plane Detection:
```csharp
XrResult result = planeDetector.BeginPlaneDetection();
```
Start the plane detection process.
1. Get Plane Detection State:
```csharp
XrPlaneDetectionStateEXT state = planeDetector.GetPlaneDetectionState();
```
Check the current state of the plane detection process.
1. Retrieve Plane Detections:
```csharp
List<PlaneDetectorLocation> locations;
XrResult result = planeDetector.GetPlaneDetections(out locations);
```
Retrieve the detected planes.
1. Get Plane Vertices:
```csharp
Plane plane = planeDetector.GetPlane(planeId);
```
Retrieve the vertices of a specific plane.
1. Destroy Plane Detector:
```csharp
PlaneDetectionManager.DestroyPlaneDetector(planeDetector);
```
Destroy the plane detector to release resources.
## Example Usage
Here's a basic example of how to use the PlaneDetectionManager to detect planes:
```csharp
if (PlaneDetectionManager.IsSupported())
{
var planeDetector = PlaneDetectionManager.CreatePlaneDetector();
if (planeDetector != null)
{
planeDetector.BeginPlaneDetection();
XrPlaneDetectionStateEXT state = planeDetector.GetPlaneDetectionState();
if (state == XrPlaneDetectionStateEXT.DONE_EXT)
{
List<PlaneDetectorLocation> locations;
if (planeDetector.GetPlaneDetections(out locations) == XrResult.XR_SUCCESS)
{
foreach (var location in locations)
{
// Process detected planes
}
}
}
PlaneDetectionManager.DestroyPlaneDetector(planeDetector);
}
}
```
This example checks if the plane detection feature is supported, creates a plane detector, begins the plane detection process, retrieves the detected planes, and finally destroys the plane detector to release resources.

View File

@@ -225,6 +225,7 @@ planeDetector);
#region override functions
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
ViveInterceptors.Instance.AddRequiredFunction("xrWaitFrame");
return ViveInterceptors.Instance.HookGetInstanceProcAddr(func);
}
@@ -243,7 +244,7 @@ planeDetector);
//Debug.Log("OnInstanceCreate() " + m_XrInstance);
CommonWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
SpaceWrapper.Instance.OnInstanceCreate(xrInstance, CommonWrapper.Instance.GetInstanceProcAddr);
SpaceWrapper.Instance.OnInstanceCreate(xrInstance, xrGetInstanceProcAddr);
return GetXrFunctionDelegates(m_XrInstance);
}
@@ -527,4 +528,4 @@ planeDetector);
}
#endregion
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: d0d8ee71cab5ed846878c5673dc3d121
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b3b3ceea1f858a94ab34224b3d148a28
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,126 @@
// Copyright HTC Corporation All Rights Reserved.
using System.Collections;
using UnityEngine;
namespace VIVE.OpenXR.SecondaryViewConfiguration
{
/// <summary>
/// Name: AccessDebugTexture.cs
/// Role: General script
/// Responsibility: To assess the debug texture from SpectatorCameraBased.cs
/// </summary>
[RequireComponent(typeof(Renderer))]
public class AccessDebugTexture : MonoBehaviour
{
private static SpectatorCameraBased SpectatorCameraBased => SpectatorCameraBased.Instance;
// Some variables related to time definition for access SpectatorCameraBased class resources
private const float WaitSpectatorCameraBasedInitTime = 1.5f;
private const float WaitSpectatorCameraBasedPeriodTime = .5f;
private const float WaitSpectatorCameraBasedMaxTime = 10f;
private const int QuadrupleCheckIsRecordingCount = 4;
/// <summary>
/// The GameObject Renderer component
/// </summary>
private Renderer Renderer { get; set; }
/// <summary>
/// The default value of material in Renderer component
/// </summary>
private Material DefaultMaterial { get; set; }
/// <summary>
/// Set the Renderer material as debug material
/// </summary>
private void SetDebugMaterial()
{
Debug.Log("SetDebugMaterial");
if (SpectatorCameraBased)
{
if (SpectatorCameraBased.SpectatorCameraViewMaterial)
{
Renderer.material = SpectatorCameraBased.SpectatorCameraViewMaterial;
}
else
{
Debug.Log("No debug material set on SpectatorCameraBased.");
}
}
}
/// <summary>
/// Set the Renderer material as default material
/// </summary>
private void SetDefaultMaterial()
{
Debug.Log("SetDefaultMaterial");
Renderer.material = DefaultMaterial ? DefaultMaterial : null;
}
private IEnumerator Start()
{
float waitingTime = WaitSpectatorCameraBasedMaxTime;
bool getSpectatorCameraBased = false;
yield return new WaitForSeconds(WaitSpectatorCameraBasedInitTime);
do
{
if (!SpectatorCameraBased)
{
yield return new WaitForSeconds(WaitSpectatorCameraBasedPeriodTime);
waitingTime -= WaitSpectatorCameraBasedPeriodTime;
continue;
}
// Set -1 if accessed SpectatorCameraBased so we can break the while loop
waitingTime = -1;
getSpectatorCameraBased = true;
Renderer = GetComponent<Renderer>();
DefaultMaterial = Renderer.material;
SpectatorCameraBased.OnSpectatorStart += SetDebugMaterial;
SpectatorCameraBased.OnSpectatorStop += SetDefaultMaterial;
} while (waitingTime > 0);
if (!getSpectatorCameraBased)
{
Debug.Log($"Try to get SpectatorCameraBased " +
$"{WaitSpectatorCameraBasedMaxTime / WaitSpectatorCameraBasedPeriodTime} times but fail.");
Debug.Log("Destroy AccessDebugTexture now.");
Destroy(this);
yield break;
}
int quadrupleCheckCount = QuadrupleCheckIsRecordingCount;
while (quadrupleCheckCount > 0)
{
if (SpectatorCameraBased.IsRecording)
{
Debug.Log("Recording. Set debug material.");
SpectatorCameraBased.OnSpectatorStart?.Invoke();
break;
}
quadrupleCheckCount--;
yield return null;
Debug.Log("No recording. Keep default material.");
}
}
private void OnDestroy()
{
Renderer.material = DefaultMaterial ? DefaultMaterial : null;
if (SpectatorCameraBased)
{
SpectatorCameraBased.OnSpectatorStart -= SetDebugMaterial;
SpectatorCameraBased.OnSpectatorStop -= SetDefaultMaterial;
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 69583d931f8502647afcc5c3266f4d2c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,146 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using UnityEngine;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace VIVE.OpenXR.SecondaryViewConfiguration
{
/// <summary>
/// Name: SpectatorCameraBased.Editor.cs
/// Role: General script use in Unity Editor only
/// Responsibility: Display the SpectatorCameraBased.cs in Unity Inspector
/// </summary>
public partial class SpectatorCameraBased
{
#if UNITY_EDITOR
[SerializeField, Tooltip("State of debugging the spectator camera or not")]
private bool isDebugSpectatorCamera;
/// <summary>
/// State of debugging the spectator camera or not
/// </summary>
public bool IsDebugSpectatorCamera
{
get => isDebugSpectatorCamera;
set
{
isDebugSpectatorCamera = value;
if (!value)
{
IsRecording = false;
}
}
}
[CustomEditor(typeof(SpectatorCameraBased))]
public class SpectatorCameraBasedEditor : UnityEditor.Editor
{
public override void OnInspectorGUI()
{
// Just return if not "SpectatorCameraBased" class
if (!(target is SpectatorCameraBased))
{
return;
}
serializedObject.Update();
EditorGUI.BeginChangeCheck();
DrawGUI();
if (EditorGUI.EndChangeCheck())
{
Debug.Log("SpectatorCameraBased script is changed.");
EditorUtility.SetDirty(target);
}
serializedObject.ApplyModifiedProperties();
}
private void DrawGUI()
{
var script = (SpectatorCameraBased)target;
EditorGUI.BeginChangeCheck();
var currentSpectatorCameraViewMaterial = EditorGUILayout.ObjectField(
"Spectator Camera View Material",
script.SpectatorCameraViewMaterial,
typeof(Material),
false) as Material;
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, currentSpectatorCameraViewMaterial
? "Change Spectator Camera View Material"
: "Set Spectator Camera View Material as NULL");
script.SpectatorCameraViewMaterial = currentSpectatorCameraViewMaterial;
}
EditorGUI.BeginChangeCheck();
var currentIsDebugSpectatorCamera =
EditorGUILayout.Toggle("Active Spectator Camera Debugging", script.IsDebugSpectatorCamera);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Change IsDebugSpectatorCamera Value");
script.IsDebugSpectatorCamera = currentIsDebugSpectatorCamera;
}
if (script.IsDebugSpectatorCamera)
{
EditorGUI.BeginChangeCheck();
var currentIsRecording =
EditorGUILayout.Toggle("Active Spectator Camera Recording", script.IsRecording);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Change IsRecording Value");
script.IsRecording = currentIsRecording;
}
}
else
{
script.IsRecording = false;
}
if (script.IsDebugSpectatorCamera && GUILayout.Button("Load \"Simple_Demo_2\" scene for testing"))
{
if (DoesSceneExist("Simple_Demo_2"))
{
SceneManager.LoadScene("Simple_Demo_2");
}
else
{
Debug.LogWarning("Simple_Demo_2 scene not found. Please add it in build setting first.");
}
}
}
}
/// <summary>
/// Returns true if the scene 'name' exists and is in your Build settings, false otherwise.
/// </summary>
private static bool DoesSceneExist(string name)
{
if (string.IsNullOrEmpty(name))
{
return false;
}
for (int i = 0; i < SceneManager.sceneCountInBuildSettings; i++)
{
var scenePath = SceneUtility.GetScenePathByBuildIndex(i);
var lastSlash = scenePath.LastIndexOf("/", StringComparison.Ordinal);
var sceneName = scenePath.Substring(lastSlash + 1, scenePath.LastIndexOf(".", StringComparison.Ordinal) - lastSlash - 1);
if (string.Compare(name, sceneName, StringComparison.OrdinalIgnoreCase) == 0)
{
return true;
}
}
return false;
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a040f975f97360649b37c7c9706d3970
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,836 @@
// Copyright HTC Corporation All Rights Reserved.
using System;
using UnityEngine;
using UnityEngine.SceneManagement;
using VIVE.OpenXR.FirstPersonObserver;
using VIVE.OpenXR.SecondaryViewConfiguration;
namespace VIVE.OpenXR.SecondaryViewConfiguration
{
/// <summary>
/// Name: SpectatorCameraBased.cs
/// Role: The base class cooperating with OpenXR SecondaryViewConfiguration Extension in Unity MonoBehaviour lifecycle (Singleton)
/// Responsibility: The handler responsible for the cooperation between the Unity MonoBehaviour lifecycle and OpenXR framework lifecycle
/// </summary>
public partial class SpectatorCameraBased : MonoBehaviour
{
private static SpectatorCameraBased _instance;
/// <summary>
/// SpectatorCameraBased static instance (Singleton)
/// </summary>
public static SpectatorCameraBased Instance => _instance;
#region Default value definition
/// <summary>
/// Camera texture width
/// </summary>
private const int TextureWidthDefault = 1920;
/// <summary>
/// Camera texture height
/// </summary>
private const int TextureHeightDefault = 1080;
/// <summary>
/// Camera GameObject Based Name
/// </summary>
private const string CameraGameObjectBasedName = "Spectator Camera Based Object";
/// <summary>
/// To define how long of the time (second) that the recording state is changed
/// </summary>
public const float RECORDING_STATE_CHANGE_THRESHOLD_IN_SECOND = 1f;
#endregion
#if !UNITY_EDITOR && UNITY_ANDROID
#region OpenXR Extension
/// <summary>
/// ViveFirstPersonObserver OpenXR extension
/// </summary>
private static ViveFirstPersonObserver FirstPersonObserver => ViveFirstPersonObserver.Instance;
/// <summary>
/// ViveSecondaryViewConfiguration OpenXR extension
/// </summary>
private static ViveSecondaryViewConfiguration SecondaryViewConfiguration => ViveSecondaryViewConfiguration.Instance;
#endregion
#region Locker and flag for multithread safety of texture updating
/// <summary>
/// Locker of NeedReInitTexture variables
/// </summary>
private readonly object _needReInitTextureLock = new object();
/// <summary>
/// State of whether re-init is needed for camera texture
/// </summary>
private bool NeedReInitTexture { get; set; }
/// <summary>
/// Locker of NeedUpdateTexture variables
/// </summary>
private readonly object _needUpdateTextureLock = new object();
/// <summary>
/// State of whether updated camera texture is needed
/// </summary>
private bool NeedUpdateTexture { get; set; }
#endregion
#endif
#region Spectator camera texture variables
/// <summary>
/// Camera texture size
/// </summary>
private Vector2 CameraTargetTextureSize { get; set; }
/// <summary>
/// Camera texture
/// </summary>
private RenderTexture CameraTargetTexture { get; set; }
#endregion
/// <summary>
/// GameObject of the spectator camera
/// </summary>
public GameObject SpectatorCameraGameObject { get; private set; }
/// <summary>
/// Camera component of the spectator camera
/// </summary>
public Camera SpectatorCamera { get; private set; }
public Camera MainCamera { get; private set; }
#region Debug Variables
[SerializeField] private Material spectatorCameraViewMaterial;
/// <summary>
/// Material that show the spectator camera view
/// </summary>
public Material SpectatorCameraViewMaterial
{
get => spectatorCameraViewMaterial;
set
{
spectatorCameraViewMaterial = value;
if (spectatorCameraViewMaterial && SpectatorCamera)
{
spectatorCameraViewMaterial.mainTexture = SpectatorCamera.targetTexture;
}
}
}
#endregion
private bool _followHmd;
/// <summary>
/// Is the spectator camera following the HMD or not
/// </summary>
public bool FollowHmd
{
get => _followHmd;
set
{
_followHmd = value;
if (SpectatorCamera.transform.parent != null &&
(SpectatorCamera.transform.localPosition != Vector3.zero ||
SpectatorCamera.transform.localRotation != Quaternion.identity))
{
Debug.Log("The local position or rotation should not be modified. Will reset the SpectatorCamera transform.");
SpectatorCamera.transform.localPosition = Vector3.zero;
SpectatorCamera.transform.localRotation = Quaternion.identity;
}
}
}
/// <summary>
/// State of allowing capture the 360 image or not
/// </summary>
public static bool IsAllowSpectatorCameraCapture360Image =>
#if !UNITY_EDITOR && UNITY_ANDROID
SecondaryViewConfiguration.IsAllowSpectatorCameraCapture360Image
#else
true
#endif
;
/// <summary>
/// SpectatorCameraBased init success or not
/// </summary>
private bool InitSuccess { get; set; }
/// <summary>
/// State of whether the app is not be focusing by the user
/// </summary>
private bool IsInBackground { get; set; }
[SerializeField, Tooltip("State of whether the spectator camera is recording currently")]
private bool isRecording;
/// <summary>
/// State of whether the spectator camera is recording currently
/// </summary>
public bool IsRecording
{
get => isRecording;
set
{
isRecording = value;
if (value)
{
if (IsPerformedStartRecordingCallback)
{
return;
}
IsPerformedStartRecordingCallback = true;
IsPerformedCloseRecordingCallback = false;
OnSpectatorStart?.Invoke();
}
else
{
if (IsPerformedCloseRecordingCallback ||
/* Because OpenXR periodically changes the spectator enabled flag, we need
to consider checking the state with a time delay so that we can make sure
it is changing for a long while or just periodically. */
Math.Abs(LastRecordingStateIsDisableTime - LastRecordingStateIsActiveTime) <
RECORDING_STATE_CHANGE_THRESHOLD_IN_SECOND)
{
return;
}
IsPerformedCloseRecordingCallback = true;
IsPerformedStartRecordingCallback = false;
OnSpectatorStop?.Invoke();
}
}
}
/// <summary>
/// The last time of the recording state that is active.
/// </summary>
public float LastRecordingStateIsActiveTime { get; private set; }
/// <summary>
/// The last time of the recording state that is disable.
/// </summary>
public float LastRecordingStateIsDisableTime { get; private set; }
/// <summary>
/// Flag denotes the callback is performed when the recording state changes to active
/// </summary>
private bool IsPerformedStartRecordingCallback { get; set; }
/// <summary>
/// Flag denotes the callback is performed when the recording state changes to disable
/// </summary>
private bool IsPerformedCloseRecordingCallback { get; set; }
#region Public variables for register the delegate callback functions
/// <summary>
/// Delegate type for spectator camera callbacks.
/// A delegate declaration that can encapsulate a method that takes no argument and returns void.
/// </summary>
public delegate void SpectatorCameraCallback();
/// <summary>
/// Delegate that custom code is executed when the spectator camera state changes to active.
/// </summary>
public SpectatorCameraCallback OnSpectatorStart;
/// <summary>
/// Delegate that custom code is executed when the spectator camera state changes to disable.
/// </summary>
public SpectatorCameraCallback OnSpectatorStop;
#endregion
#if !UNITY_EDITOR && UNITY_ANDROID
/// <summary>
/// Set the flag NeedReInitTexture as true
/// </summary>
/// <param name="size">The re-init texture size</param>
private void OnTextureSizeUpdated(Vector2 size)
{
lock (_needReInitTextureLock)
{
NeedReInitTexture = true;
CameraTargetTextureSize = size;
}
}
/// <summary>
/// Set the flag NeedUpdateTexture as true
/// </summary>
private void OnTextureUpdated()
{
lock (_needUpdateTextureLock)
{
NeedUpdateTexture = true;
}
}
/// <summary>
/// Init the projection matrix of spectator camera
/// </summary>
/// <param name="left">The position of the left vertical plane of the viewing frustum</param>
/// <param name="right">The position of the right vertical plane of the viewing frustum</param>
/// <param name="top">The position of the top horizontal plane of the viewing frustum</param>
/// <param name="bottom">The position of the bottom horizontal plane of the viewing frustum</param>
private void OnFovUpdated(float left, float right, float top, float bottom)
{
#region Modify the camera projection matrix (No need, just for reference)
/*
if (SpectatorCamera)
{
float far = SpectatorCamera.farClipPlane;
float near = SpectatorCamera.nearClipPlane;
SpectatorCamera.projectionMatrix = new Matrix4x4()
{
[0, 0] = 2f / (right - left),
[0, 1] = 0,
[0, 2] = (right + left) / (right - left),
[0, 3] = 0,
[1, 0] = 0,
[1, 1] = 2f / (top - bottom),
[1, 2] = (top + bottom) / (top - bottom),
[1, 3] = 0,
[2, 0] = 0,
[2, 1] = 0,
[2, 2] = -(far + near) / (far - near),
[2, 3] = -(2f * far * near) / (far - near),
[3, 0] = 0,
[3, 1] = 0,
[3, 2] = -1f,
[3, 3] = 0,
};
}
*/
#endregion
}
#endif
/// <summary>
/// Init the camera texture
/// </summary>
private void InitCameraTargetTexture()
{
if (CameraTargetTextureSize.x == 0 || CameraTargetTextureSize.y == 0)
{
#if !UNITY_EDITOR && UNITY_ANDROID
if (SecondaryViewConfiguration.TextureSize.x == 0 || SecondaryViewConfiguration.TextureSize.y == 0)
{
CameraTargetTextureSize = new Vector2(TextureWidthDefault, TextureHeightDefault);
}
else
{
CameraTargetTextureSize = SecondaryViewConfiguration.TextureSize;
}
#else
CameraTargetTextureSize = new Vector2(TextureWidthDefault, TextureHeightDefault);
#endif
}
if (!CameraTargetTexture)
{
// Texture is not create yet. Create it.
CameraTargetTexture = new RenderTexture
(
(int)CameraTargetTextureSize.x,
(int)CameraTargetTextureSize.y,
24,
RenderTextureFormat.ARGB32
);
InitPostProcessing();
return;
}
if (CameraTargetTexture.width == (int)CameraTargetTextureSize.x &&
CameraTargetTexture.height == (int)CameraTargetTextureSize.y)
{
// Texture size is same, just return.
return;
}
// Release the last time resource
SpectatorCamera.targetTexture = null;
if (SpectatorCameraViewMaterial)
{
SpectatorCameraViewMaterial.mainTexture = null;
}
CameraTargetTexture.Release();
// Re-init
CameraTargetTexture.width = (int)CameraTargetTextureSize.x;
CameraTargetTexture.height = (int)CameraTargetTextureSize.y;
CameraTargetTexture.depth = 24;
CameraTargetTexture.format = RenderTextureFormat.ARGB32;
InitPostProcessing();
return;
void InitPostProcessing()
{
if (!CameraTargetTexture.IsCreated())
{
Debug.Log("The RenderTexture is not create yet. Will create it.");
bool created = CameraTargetTexture.Create();
Debug.Log($"Try to create RenderTexture: {created}");
if (created)
{
SpectatorCamera.targetTexture = CameraTargetTexture;
if (SpectatorCameraViewMaterial)
{
SpectatorCameraViewMaterial.mainTexture = SpectatorCamera.targetTexture;
}
}
}
else
{
Debug.Log("The RenderTexture is already created.");
}
}
}
#if !UNITY_EDITOR && UNITY_ANDROID
/// <summary>
/// Update camera texture and then copy data of the camera texture to native texture space
/// </summary>
private void SecondViewTextureUpdate()
{
if (SecondaryViewConfiguration.MyTexture)
{
SpectatorCamera.enabled = true;
SpectatorCamera.Render();
SpectatorCamera.enabled = false;
if (SpectatorCamera.targetTexture)
{
// Copy Unity texture data to native texture
Graphics.CopyTexture(
SpectatorCamera.targetTexture,
0,
0,
SecondaryViewConfiguration.MyTexture,
0,
0);
}
else
{
Debug.LogError("Cannot copy the rendering data because the camera target texture is null!");
}
// Call native function that finishes the texture update
ViveSecondaryViewConfiguration.ReleaseSecondaryViewTexture();
}
else
{
Debug.LogError("Cannot copy the rendering data because SecondaryViewConfiguration.MyTexture is null!");
}
}
#endif
/// <summary>
/// Set the main texture of SpectatorCameraViewMaterial material as spectator camera texture
/// </summary>
private void SetCameraBasedTargetTexture2SpectatorCameraViewMaterial()
{
if (SpectatorCameraViewMaterial)
{
SpectatorCameraViewMaterial.mainTexture = SpectatorCamera.targetTexture;
}
}
/// <summary>
/// Set the main texture of SpectatorCameraViewMaterial material as NULL value
/// </summary>
private void SetNull2SpectatorCameraViewMaterial()
{
if (SpectatorCameraViewMaterial)
{
SpectatorCameraViewMaterial.mainTexture = null;
}
}
/// <summary>
/// Set whether the current camera viewpoint comes from HMD or not
/// </summary>
/// <param name="isViewFromHmd">The bool value represents the current view of whether the spectator camera is coming from hmd or not.</param>
public void SetViewFromHmd(bool isViewFromHmd)
{
#if !UNITY_EDITOR && UNITY_ANDROID
ViveSecondaryViewConfiguration.SetViewFromHmd(isViewFromHmd);
#endif
FollowHmd = isViewFromHmd;
}
/// <summary>
/// Get MainCamera in the current scene.
/// </summary>
/// <returns>The Camera component with MainCamera tag in the current scene</returns>
public static Camera GetMainCamera()
{
return Camera.main;
}
#region Unity life-cycle event
private void Start()
{
InitSuccess = false;
if (_instance != null && _instance != this)
{
Debug.Log("Destroy the SpectatorCameraBased");
if (SpectatorCameraViewMaterial)
{
Debug.Log("Copy SpectatorCameraBased material setting before destroy.");
_instance.SpectatorCameraViewMaterial = SpectatorCameraViewMaterial;
}
DestroyImmediate(this);
return;
}
else
{
_instance = this;
// To prevent this from being destroyed on load, check whether this gameObject has a parent;
// if so, set it to no game parent.
if (transform.parent != null)
{
transform.SetParent(null);
}
DontDestroyOnLoad(_instance.gameObject);
}
#if !UNITY_EDITOR && UNITY_ANDROID
if (SecondaryViewConfiguration && FirstPersonObserver)
{
// To check, "XR_MSFT_first_person_observer" is enough because it
// requires "XR_MSFT_secondary_view_configuration" to be enabled also.
if (!ViveFirstPersonObserver.IsExtensionEnabled())
{
Debug.LogWarning(
$"The OpenXR extension, {ViveSecondaryViewConfiguration.OPEN_XR_EXTENSION_STRING} " +
$"or {ViveFirstPersonObserver.OPEN_XR_EXTENSION_STRING}, is disabled. " +
"Please enable the extension before building the app.");
Debug.Log("Destroy the SpectatorCameraBased");
DestroyImmediate(this);
return;
}
SecondaryViewConfiguration.onTextureSizeUpdated += OnTextureSizeUpdated;
SecondaryViewConfiguration.onTextureUpdated += OnTextureUpdated;
SecondaryViewConfiguration.onFovUpdated += OnFovUpdated;
}
else
{
Debug.LogError(
"Cannot find the static instance of ViveSecondaryViewConfiguration or ViveFirstPersonObserver," +
" pls reopen the app later.");
Debug.Log("Destroy the SpectatorCameraBased");
DestroyImmediate(this);
return;
}
bool isSecondaryViewAlreadyEnabled = SecondaryViewConfiguration.IsEnabled;
Debug.Log(
$"The state of ViveSecondaryViewConfiguration.IsEnabled is {isSecondaryViewAlreadyEnabled}");
lock (_needReInitTextureLock)
{
NeedReInitTexture = isSecondaryViewAlreadyEnabled;
}
lock (_needUpdateTextureLock)
{
NeedUpdateTexture = isSecondaryViewAlreadyEnabled;
}
IsRecording = isSecondaryViewAlreadyEnabled;
#endif
SpectatorCameraGameObject = new GameObject(CameraGameObjectBasedName)
{
transform = { position = Vector3.zero, rotation = Quaternion.identity }
};
DontDestroyOnLoad(SpectatorCameraGameObject);
SpectatorCamera = SpectatorCameraGameObject.AddComponent<Camera>();
SpectatorCamera.stereoTargetEye = StereoTargetEyeMask.None;
MainCamera = GetMainCamera();
if (MainCamera != null)
{
// Set spectator camera to render after the main camera
SpectatorCamera.depth = MainCamera.depth + 1;
}
// Manually call Render() function once time at Start()
// because it can reduce the performance impact of first-time calls at SecondViewTextureUpdate
SpectatorCamera.Render();
SpectatorCamera.enabled = false;
FollowHmd = true;
IsInBackground = false;
IsPerformedStartRecordingCallback = false;
IsPerformedCloseRecordingCallback = false;
LastRecordingStateIsActiveTime = 0f;
LastRecordingStateIsDisableTime = 0f;
OnSpectatorStart += SetCameraBasedTargetTexture2SpectatorCameraViewMaterial;
OnSpectatorStop += SetNull2SpectatorCameraViewMaterial;
SceneManager.sceneLoaded += OnSceneLoaded;
#if !UNITY_EDITOR && UNITY_ANDROID
if (isSecondaryViewAlreadyEnabled)
{
OnSpectatorStart?.Invoke();
}
#endif
#if UNITY_EDITOR
OnSpectatorStart += () => { SpectatorCamera.enabled = true; };
OnSpectatorStop += () => { SpectatorCamera.enabled = false; };
CameraTargetTextureSize = new Vector2
(
TextureWidthDefault,
TextureHeightDefault
);
InitCameraTargetTexture();
SpectatorCamera.enabled = IsDebugSpectatorCamera && IsRecording;
#endif
InitSuccess = true;
}
private void LateUpdate()
{
if (!InitSuccess)
{
return;
}
if (IsInBackground)
{
return;
}
if (SpectatorCamera.transform.parent != null &&
(SpectatorCamera.transform.localPosition != Vector3.zero ||
SpectatorCamera.transform.localRotation != Quaternion.identity))
{
Debug.Log("The local position or rotation should not be modified. Will reset the SpectatorCamera transform.");
SpectatorCamera.transform.localPosition = Vector3.zero;
SpectatorCamera.transform.localRotation = Quaternion.identity;
}
if (FollowHmd)
{
if (MainCamera != null || (MainCamera = GetMainCamera()) != null)
{
Transform spectatorCameraTransform = SpectatorCamera.transform;
Transform hmdCameraTransform = MainCamera.transform;
spectatorCameraTransform.position = hmdCameraTransform.position;
spectatorCameraTransform.rotation = hmdCameraTransform.rotation;
}
}
else
{
#if !UNITY_EDITOR && UNITY_ANDROID
if (!SecondaryViewConfiguration.IsStopped)
{
Transform referenceTransform = SpectatorCamera.transform;
// Left-handed coordinate system (Unity) -> right-handed coordinate system (OpenXR)
var spectatorCameraPositionInOpenXRSpace = new XrVector3f
(
referenceTransform.position.x,
referenceTransform.position.y,
-referenceTransform.position.z
);
var spectatorCameraQuaternionInOpenXRSpace = new XrQuaternionf
(
referenceTransform.rotation.x,
referenceTransform.rotation.y,
-referenceTransform.rotation.z,
-referenceTransform.rotation.w
);
var spectatorCameraPose = new XrPosef
(
spectatorCameraQuaternionInOpenXRSpace,
spectatorCameraPositionInOpenXRSpace
);
ViveSecondaryViewConfiguration.SetNonHmdViewPose(spectatorCameraPose);
}
#endif
}
#if !UNITY_EDITOR && UNITY_ANDROID
IsRecording = SecondaryViewConfiguration.IsEnabled;
#endif
if (IsRecording)
{
LastRecordingStateIsActiveTime = Time.unscaledTime;
}
else
{
LastRecordingStateIsDisableTime = Time.unscaledTime;
if (!IsPerformedCloseRecordingCallback &&
/* Because OpenXR periodically changes the spectator enabled flag, we need
to consider checking the state with a time delay so that we can make sure
it is changing for a long while or just periodically. */
Math.Abs(LastRecordingStateIsDisableTime - LastRecordingStateIsActiveTime) >
RECORDING_STATE_CHANGE_THRESHOLD_IN_SECOND)
{
IsPerformedCloseRecordingCallback = true;
IsPerformedStartRecordingCallback = false;
OnSpectatorStop?.Invoke();
}
return;
}
#if !UNITY_EDITOR && UNITY_ANDROID
lock (_needReInitTextureLock)
{
if (NeedReInitTexture)
{
NeedReInitTexture = false;
InitCameraTargetTexture();
}
}
lock (_needUpdateTextureLock)
{
if (NeedUpdateTexture)
{
NeedUpdateTexture = false;
ViveSecondaryViewConfiguration.SetStateSecondaryViewImageDataReady(false);
SecondViewTextureUpdate();
ViveSecondaryViewConfiguration.SetStateSecondaryViewImageDataReady(true);
}
}
#endif
}
private void OnApplicationFocus(bool hasFocus)
{
if (!InitSuccess)
{
Debug.Log("Init unsuccessfully, just return from SpectatorCameraBased.OnApplicationFocus.");
return;
}
Debug.Log($"SpectatorCameraBased.OnApplicationFocus: {hasFocus}");
}
private void OnApplicationPause(bool pauseStatus)
{
if (!InitSuccess)
{
Debug.Log("Init unsuccessfully, just return from SpectatorCameraBased.OnApplicationPause.");
return;
}
Debug.Log($"SpectatorCameraBased.OnApplicationPause: {pauseStatus}");
#if !UNITY_EDITOR && UNITY_ANDROID
// Need to re-create the swapchain when recording is active and Unity app is resumed
if (SecondaryViewConfiguration.IsEnabled && !pauseStatus)
{
ViveSecondaryViewConfiguration.RequireReinitSwapchain();
}
#endif
IsInBackground = pauseStatus;
}
private void OnDestroy()
{
if (!InitSuccess)
{
Debug.Log("Init unsuccessfully, just return from SpectatorCameraBased.OnDestroy.");
return;
}
Debug.Log("SpectatorCameraBased.OnDestroy");
#if !UNITY_EDITOR && UNITY_ANDROID
SecondaryViewConfiguration.onTextureSizeUpdated -= OnTextureSizeUpdated;
SecondaryViewConfiguration.onTextureUpdated -= OnTextureUpdated;
#endif
if (SpectatorCamera)
{
SpectatorCamera.targetTexture = null;
}
if (SpectatorCameraViewMaterial)
{
SpectatorCameraViewMaterial.mainTexture = null;
}
if (CameraTargetTexture)
{
Destroy(CameraTargetTexture);
}
#if !UNITY_EDITOR && UNITY_ANDROID
ViveSecondaryViewConfiguration.ReleaseAllResources();
#endif
}
private void OnSceneLoaded(Scene scene, LoadSceneMode mode)
{
if (!InitSuccess)
{
Debug.Log("Init unsuccessfully, just return from SpectatorCameraBased.OnSceneLoaded.");
return;
}
Debug.Log($"SpectatorCameraBased.OnSceneLoaded: {scene.name}");
MainCamera = GetMainCamera();
#if !UNITY_EDITOR && UNITY_ANDROID
if (!SecondaryViewConfiguration.IsStopped)
{
// Need to re-init the swapchain when recording is active and new Unity scene is loaded
ViveSecondaryViewConfiguration.RequireReinitSwapchain();
}
#endif
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 47a6afbbc5e705041b2851122f306f76
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,93 @@
// Copyright HTC Corporation All Rights Reserved.
using UnityEngine;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace VIVE.OpenXR.SecondaryViewConfiguration
{
/// <summary>
/// Name: SecondaryViewConfiguration.Editor.cs
/// Role: General script use in Unity Editor only
/// Responsibility: Display the SecondaryViewConfiguration.cs in Unity Project Settings
/// </summary>
public partial class ViveSecondaryViewConfiguration
{
[field: SerializeField] internal bool IsAllowSpectatorCameraCapture360Image { get; set; }
[field: SerializeField] internal bool IsEnableDebugLog { get; set; }
#if UNITY_EDITOR
[CustomEditor(typeof(ViveSecondaryViewConfiguration))]
public class ViveSecondaryViewConfigurationEditor : UnityEditor.Editor
{
public override void OnInspectorGUI()
{
// Just return if not "ViveSecondaryViewConfiguration" class
if (!(target is ViveSecondaryViewConfiguration))
{
return;
}
serializedObject.Update();
DrawGUI();
serializedObject.ApplyModifiedProperties();
}
private void DrawGUI()
{
var script = (ViveSecondaryViewConfiguration)target;
EditorGUI.BeginChangeCheck();
var currentIsAllowSpectatorCameraCapture360Image =
EditorGUILayout.Toggle("Allow capture panorama", script.IsAllowSpectatorCameraCapture360Image);
if (EditorGUI.EndChangeCheck())
{
if (currentIsAllowSpectatorCameraCapture360Image && !PlayerSettings.enable360StereoCapture)
{
const string acceptButtonString =
"OK";
const string cancelButtonString =
"Cancel";
const string openCapture360ImageAdditionRequestTitle =
"Additional Request of Capturing 360 Image throughout the Spectator Camera";
const string openCapture360ImageAdditionRequestDescription =
"Allow the spectator camera to capture 360 images. Addition Request:\n" +
"1.) Open the \"enable360StereoCapture\" in the Unity Player Setting " +
"Page.";
bool acceptDialog1 = EditorUtility.DisplayDialog(
openCapture360ImageAdditionRequestTitle,
openCapture360ImageAdditionRequestDescription,
acceptButtonString,
cancelButtonString);
if (acceptDialog1)
{
PlayerSettings.enable360StereoCapture = true;
}
else
{
return;
}
}
Undo.RecordObject(target, "Modified ViveSecondaryViewConfiguration IsAllowSpectatorCameraCapture360Image");
EditorUtility.SetDirty(target);
script.IsAllowSpectatorCameraCapture360Image = currentIsAllowSpectatorCameraCapture360Image;
}
EditorGUI.BeginChangeCheck();
var currentIsEnableDebugLog =
EditorGUILayout.Toggle("Print log for debugging", script.IsEnableDebugLog);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Modified ViveSecondaryViewConfiguration IsEnableDebugLog");
EditorUtility.SetDirty(target);
script.IsEnableDebugLog = currentIsEnableDebugLog;
}
}
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8d6f293344377c74c910f4121af00a7b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,898 @@
// Copyright HTC Corporation All Rights Reserved.
using AOT;
using System;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.SecondaryViewConfiguration
{
/// <summary>
/// Name: SecondaryViewConfiguration.cs
/// Role: OpenXR SecondaryViewConfiguration Extension Class
/// Responsibility: The OpenXR extension implementation and its lifecycles logic in OpenXR
/// </summary>
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Spectator Camera (Beta)",
BuildTargetGroups = new[] { BuildTargetGroup.Android },
Company = "HTC",
Desc = "Allows an application to enable support for one or more secondary view configurations.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = OPEN_XR_EXTENSION_STRING,
Version = "1.0.0",
FeatureId = FeatureId)]
#endif
public partial class ViveSecondaryViewConfiguration : OpenXRFeature
{
#region Varibles
private static ViveSecondaryViewConfiguration _instance;
/// <summary>
/// ViveSecondaryViewConfiguration static instance (Singleton).
/// </summary>
public static ViveSecondaryViewConfiguration Instance
{
get
{
if (_instance == null)
{
_instance =
OpenXRSettings.Instance.GetFeature<ViveSecondaryViewConfiguration>();
}
return _instance;
}
}
#region OpenXR variables related to definition
/// <summary>
/// The log identification.
/// </summary>
private const string LogTag = "VIVE.OpenXR.SecondaryViewConfiguration";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string FeatureId = "vive.openxr.feature.secondaryviewconfiguration";
/// <summary>
/// OpenXR specification <a href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_MSFT_secondary_view_configuration">12.122. XR_MSFT_secondary_view_configuration</a>.
/// </summary>
public const string OPEN_XR_EXTENSION_STRING = "XR_MSFT_secondary_view_configuration";
/// <summary>
/// The extension library name.
/// </summary>
private const string ExtLib = "libviveopenxr";
#endregion
#region OpenXR variables related to its life-cycle
/// <summary>
/// The flag represents whether the OpenXR loader created an instance or not.
/// </summary>
private bool XrInstanceCreated { get; set; } = false;
/// <summary>
/// The flag represents whether the OpenXR loader created a session or not.
/// </summary>
private bool XrSessionCreated { get; set; } = false;
/// <summary>
/// The flag represents whether the OpenXR loader started a session or not.
/// </summary>
private bool XrSessionStarted { get; set; } = false;
/// <summary>
/// The instance created through xrCreateInstance.
/// </summary>
private XrInstance XrInstance { get; set; } = 0;
/// <summary>
/// An XrSystemId is an opaque atom used by the runtime to identify a system.
/// </summary>
private XrSystemId XrSystemId { get; set; } = 0;
/// <summary>
/// A session represents an applications intention to display XR content to the user.
/// </summary>
private XrSession XrSession { get; set; } = 0;
/// <summary>
/// New possible session lifecycle states.
/// </summary>
private XrSessionState XrSessionNewState { get; set; } = XrSessionState.XR_SESSION_STATE_UNKNOWN;
/// <summary>
/// The previous state possible session lifecycle states.
/// </summary>
private XrSessionState XrSessionOldState { get; set; } = XrSessionState.XR_SESSION_STATE_UNKNOWN;
/// <summary>
/// The function delegate declaration of xrGetInstanceProcAddr.
/// </summary>
private OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr { get; set; }
#endregion
#region Variables related to handle agent functions
/// <summary>
/// A delegate declaration can encapsulate the method that takes a boolean argument and returns void. This declaration should only be used in the function "SecondaryViewConfigurationInterceptOpenXRMethod".
/// </summary>
private delegate void SetSecondaryViewConfigurationStateDelegate(bool isEnabled);
/// <summary>
/// A delegate declaration can encapsulate the method that takes a boolean argument and returns void. This declaration should only be used in the function "SecondaryViewConfigurationInterceptOpenXRMethod".
/// </summary>
private delegate void StopEnableSecondaryViewConfigurationDelegate(bool isStopped);
/// <summary>
/// A delegate declaration can encapsulate the method that takes a boolean argument and returns void. This declaration should only be used in the function "SecondaryViewConfigurationInterceptOpenXRMethod".
/// </summary>
private delegate void SetTextureSizeDelegate(UInt32 width, UInt32 height);
/// <summary>
/// A delegate declaration can encapsulate the method that takes a boolean argument and returns void. This declaration should only be used in the function "SecondaryViewConfigurationInterceptOpenXRMethod".
/// </summary>
private delegate void SetFovDelegate(XrFovf fov);
#endregion
#region Variables related to callback functions instantiation
/// <summary>
/// A delegate declaration can encapsulate the method that takes a Vector2 argument and returns void.
/// </summary>
public delegate void OnTextureSizeUpdatedDelegate(Vector2 size);
/// <summary>
/// The instantiation of the delegate OnTextureSizeUpdatedDelegate. This will be called when the texture size coming from the native plugin is updated.
/// </summary>
public OnTextureSizeUpdatedDelegate onTextureSizeUpdated;
/// <summary>
/// A delegate declaration can encapsulate the method that takes no argument and returns void.
/// </summary>
public delegate void OnTextureUpdatedDelegate();
/// <summary>
/// The instantiation of the delegate OnTextureUpdatedDelegate. This will be called when the texture coming from the native plugin is updated.
/// </summary>
public OnTextureUpdatedDelegate onTextureUpdated;
/// <summary>
/// A delegate declaration can encapsulate the method that takes four floating-point arguments, left, right, up, and down, respectively, and returns void.
/// </summary>
public delegate void OnFovUpdatedDelegate(float left, float right, float up, float down);
/// <summary>
/// The instantiation of the delegate OnFovUpdatedDelegate. This will be called when the fov setting coming from the native plugin is updated.
/// </summary>
public OnFovUpdatedDelegate onFovUpdated;
#endregion
#region Rendering and texture varibles
/// <summary>
/// The graphics backend that the current application is used.
/// </summary>
private static GraphicsAPI MyGraphicsAPI
{
get
{
return SystemInfo.graphicsDeviceType switch
{
GraphicsDeviceType.OpenGLES3 => GraphicsAPI.GLES3,
GraphicsDeviceType.Vulkan => GraphicsAPI.Vulkan,
_ => GraphicsAPI.Unknown
};
}
}
private Vector2 _textureSize;
/// <summary>
/// The value of texture size coming from the native plugin.
/// </summary>
public Vector2 TextureSize
{
get => _textureSize;
private set
{
_textureSize = value;
onTextureSizeUpdated?.Invoke(value);
}
}
private Texture _myTexture;
/// <summary>
/// The texture handler adopts the native 2D texture object coming from the native plugin.
/// </summary>
public Texture MyTexture
{
get => _myTexture;
private set
{
_myTexture = value;
onTextureUpdated?.Invoke();
}
}
#endregion
#region The variables (flag) represent the state related to this OpenXR extension
/// <summary>
/// The state of the second view configuration comes from runtime.
/// </summary>
public bool IsEnabled { get; set; }
/// <summary>
/// The flag represents to whether the second view configuration is disabled or not.
/// </summary>
public bool IsStopped { get; set; }
/// <summary>
/// The flag represents to whether the "SpectatorCameraBased" script exists in the Unity scene.
/// </summary>
private bool IsExistSpectatorCameraBased { get; set; }
#endregion
#endregion
#region Function
#region OpenXR life-cycle functions
/// <summary>
/// Called after xrCreateInstance.
/// </summary>
/// <param name="xrInstance">Handle of the xrInstance.</param>
/// <returns>Returns true if successful. Returns false otherwise.</returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!IsExtensionEnabled())
{
Warning("OnInstanceCreate() " + OPEN_XR_EXTENSION_STRING + " is NOT enabled.");
return false;
}
var mySpectatorCameraBasedGameObject = GetSpectatorCameraBased();
IsExistSpectatorCameraBased = mySpectatorCameraBasedGameObject != null;
XrInstanceCreated = true;
XrInstance = xrInstance;
Debug("OnInstanceCreate() " + XrInstance);
if (!GetXrFunctionDelegates(XrInstance))
{
Error("Get function pointer of OpenXRFunctionPointerAccessor failed.");
return false;
}
Debug("Get function pointer of OpenXRFunctionPointerAccessor succeed.");
return base.OnInstanceCreate(xrInstance);
}
/// <summary>
/// Called after xrGetSystem
/// </summary>
/// <param name="xrSystem">Handle of the xrSystemId</param>
protected override void OnSystemChange(ulong xrSystem)
{
XrSystemId = xrSystem;
Debug("OnSystemChange() " + XrSystemId);
base.OnSystemChange(xrSystem);
}
/// <summary>
/// Called after xrCreateSession.
/// </summary>
/// <param name="xrSession">Handle of the xrSession.</param>
protected override void OnSessionCreate(ulong xrSession)
{
XrSessionCreated = true;
XrSession = xrSession;
Debug("OnSessionCreate() " + XrSession);
base.OnSessionCreate(xrSession);
}
/// <summary>
/// Called after xrSessionBegin.
/// </summary>
/// <param name="xrSession">Handle of the xrSession.</param>
protected override void OnSessionBegin(ulong xrSession)
{
XrSessionStarted = true;
Debug("OnSessionBegin() " + XrSessionStarted);
base.OnSessionBegin(xrSession);
}
/// <summary>
/// Called when the OpenXR loader receives the XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED event from the runtime signaling that the XrSessionState has changed.
/// </summary>
/// <param name="oldState">Previous state.</param>
/// <param name="newState">New state.</param>
protected override void OnSessionStateChange(int oldState, int newState)
{
Debug("OnSessionStateChange() oldState: " + oldState + " newState:" + newState);
if (Enum.IsDefined(typeof(XrSessionState), oldState))
{
XrSessionOldState = (XrSessionState)oldState;
}
else
{
Warning("OnSessionStateChange() oldState undefined");
}
if (Enum.IsDefined(typeof(XrSessionState), newState))
{
XrSessionNewState = (XrSessionState)newState;
}
else
{
Warning("OnSessionStateChange() newState undefined");
}
base.OnSessionStateChange(oldState, newState);
}
/// <summary>
/// Called to hook xrGetInstanceProcAddr. Returning a different function pointer allows intercepting any OpenXR method.
/// </summary>
/// <param name="func">xrGetInstanceProcAddr native function pointer.</param>
/// <returns>Function pointer that Unity will use to look up OpenXR native functions.</returns>
protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
{
Debug("HookGetInstanceProcAddr Start");
if (MyGraphicsAPI is GraphicsAPI.GLES3 or GraphicsAPI.Vulkan)
{
Debug($"The app graphics API is {MyGraphicsAPI}");
return SecondaryViewConfigurationInterceptOpenXRMethod
(
MyGraphicsAPI,
SystemInfo.graphicsUVStartsAtTop,
func,
SetSecondaryViewConfigurationState,
StopEnableSecondaryViewConfiguration,
SetTextureSize,
SetFov
);
}
Error(
"The render backend is not supported. Requires OpenGL or Vulkan backend for secondary view configuration feature.");
return base.HookGetInstanceProcAddr(func);
}
/// <summary>
/// Called before xrEndSession.
/// </summary>
/// <param name="xrSession">Handle of the xrSession.</param>
protected override void OnSessionEnd(ulong xrSession)
{
XrSessionStarted = false;
Debug("OnSessionEnd() " + XrSession);
base.OnSessionEnd(xrSession);
}
/// <summary>
/// Called before xrDestroySession.
/// </summary>
/// <param name="xrSession">Handle of the xrSession.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
XrSessionCreated = false;
Debug("OnSessionDestroy() " + xrSession);
base.OnSessionDestroy(xrSession);
}
/// <summary>
/// Called before xrDestroyInstance.
/// </summary>
/// <param name="xrInstance">Handle of the xrInstance.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
XrInstanceCreated = false;
XrInstance = 0;
Debug("OnInstanceDestroy() " + xrInstance);
base.OnInstanceDestroy(xrInstance);
}
#endregion
#region Handle agent functions
/// <summary>
/// This function is defined as the "SetSecondaryViewConfigurationStateDelegate" delegate function.
/// <b>Please be careful that this function should ONLY be called by native plug-ins.
/// THIS FUNCTION IS NOT DESIGNED FOR CALLING FROM THE UNITY ENGINE SIDE.</b>
/// </summary>
/// <param name="isEnabled">The state of the second view configuration comes from runtime. True if enabled. False otherwise.</param>
[MonoPInvokeCallback(typeof(SetSecondaryViewConfigurationStateDelegate))]
private static void SetSecondaryViewConfigurationState(bool isEnabled)
{
Instance.IsEnabled = isEnabled;
if (Instance.IsEnableDebugLog)
{
Debug($"SetSecondaryViewConfigurationState: Instance.IsEnabled set as {Instance.IsEnabled}");
}
}
/// <summary>
/// This function is defined as the "StopEnableSecondaryViewConfigurationDelegate" delegate function.
/// <b>Please be careful that this function should ONLY be called by native plug-ins.
/// THIS FUNCTION IS NOT DESIGNED FOR CALLING FROM THE UNITY ENGINE SIDE.</b>
/// </summary>
/// <param name="isStopped">The flag refers to whether the second view configuration is disabled or not. True if the second view configuration is disabled. False otherwise.</param>
[MonoPInvokeCallback(typeof(StopEnableSecondaryViewConfigurationDelegate))]
private static void StopEnableSecondaryViewConfiguration(bool isStopped)
{
Instance.IsStopped = isStopped;
if (Instance.IsEnableDebugLog)
{
Debug($"StopEnableSecondaryViewConfiguration: Instance.IsStopped set as {Instance.IsStopped}");
}
}
/// <summary>
/// This function is defined as the "SetTextureSizeDelegate" delegate function.
/// <b>Please be careful that this function should ONLY be called by native plug-ins.
/// THIS FUNCTION IS NOT DESIGNED FOR CALLING FROM THE UNITY ENGINE SIDE.</b>
/// </summary>
/// <param name="width">The texture width comes from runtime.</param>
/// <param name="height">The texture height comes from runtime.</param>
[MonoPInvokeCallback(typeof(SetTextureSizeDelegate))]
private static void SetTextureSize(uint width, uint height)
{
if (!Instance.IsExistSpectatorCameraBased)
{
CreateSpectatorCameraBased();
}
Instance.TextureSize = new Vector2(width, height);
if (Instance.IsEnableDebugLog)
{
Debug($"SetTextureSize width: {Instance.TextureSize.x}, height: {Instance.TextureSize.y}");
}
IntPtr texPtr = GetSecondaryViewTextureId(out uint imageIndex);
if (Instance.IsEnableDebugLog)
{
Debug($"SetTextureSize texPtr: {texPtr}, imageIndex: {imageIndex}");
}
if (texPtr == IntPtr.Zero)
{
Error($"SetTextureSize texPtr is invalid: {texPtr}");
return;
}
if (Instance.IsEnableDebugLog)
{
Debug("Get ptr successfully");
}
Instance.MyTexture = Texture2D.CreateExternalTexture(
(int)Instance.TextureSize.x,
(int)Instance.TextureSize.y,
TextureFormat.RGBA32,
false,
false,
texPtr);
#region For development usage (Just for reference)
/*
if (Instance.IsEnableDebugLog)
{
Debug("Create texture successfully");
Debug($"Instance.MyTexture.height: {Instance.MyTexture.height}");
Debug($"Instance.MyTexture.width: {Instance.MyTexture.width}");
Debug($"Instance.MyTexture.dimension: {Instance.MyTexture.dimension}");
Debug($"Instance.MyTexture.anisoLevel: {Instance.MyTexture.anisoLevel}");
Debug($"Instance.MyTexture.filterMode: {Instance.MyTexture.filterMode}");
Debug($"Instance.MyTexture.wrapMode: {Instance.MyTexture.wrapMode}");
Debug($"Instance.MyTexture.graphicsFormat: {Instance.MyTexture.graphicsFormat}");
Debug($"Instance.MyTexture.isReadable: {Instance.MyTexture.isReadable}");
Debug($"Instance.MyTexture.texelSize: {Instance.MyTexture.texelSize}");
Debug($"Instance.MyTexture.mipmapCount: {Instance.MyTexture.mipmapCount}");
Debug($"Instance.MyTexture.updateCount: {Instance.MyTexture.updateCount}");
Debug($"Instance.MyTexture.mipMapBias: {Instance.MyTexture.mipMapBias}");
Debug($"Instance.MyTexture.wrapModeU: {Instance.MyTexture.wrapModeU}");
Debug($"Instance.MyTexture.wrapModeV: {Instance.MyTexture.wrapModeV}");
Debug($"Instance.MyTexture.wrapModeW: {Instance.MyTexture.wrapModeW}");
Debug($"Instance.MyTexture.filterMode: {Instance.MyTexture.name}");
Debug($"Instance.MyTexture.hideFlags: {Instance.MyTexture.hideFlags}");
Debug($"Instance.MyTexture.GetInstanceID(): {Instance.MyTexture.GetInstanceID()}");
Debug($"Instance.MyTexture.GetType(): {Instance.MyTexture.GetType()}");
Debug($"Instance.MyTexture.GetNativeTexturePtr(): {Instance.MyTexture.GetNativeTexturePtr()}");
// Print imageContentsHash will cause an error
// Debug($"Instance.MyTexture.imageContentsHash: {Instance.MyTexture.imageContentsHash}");
}
*/
#endregion
}
/// <summary>
/// This function is defined as the "SetFovDelegate" delegate function.
/// <b>Please be careful that this function should ONLY be called by native plug-ins.
/// THIS FUNCTION IS NOT DESIGNED FOR CALLING FROM THE UNITY ENGINE SIDE.</b>
/// </summary>
/// <param name="fov">The fov value comes from runtime.</param>
[MonoPInvokeCallback(typeof(SetFovDelegate))]
private static void SetFov(XrFovf fov)
{
if (Instance.IsEnableDebugLog)
{
Debug($"fov.AngleDown {fov.angleDown}");
Debug($"fov.AngleLeft {fov.angleLeft}");
Debug($"fov.AngleRight {fov.angleRight}");
Debug($"fov.AngleUp {fov.angleUp}");
}
Instance.onFovUpdated?.Invoke(fov.angleLeft, fov.angleRight, fov.angleUp, fov.angleDown);
}
#endregion
#region C++ interop functions
/// <summary>
/// Call this function to trigger the native plug-in that gets a specific OpenXR function for services to the
/// Unity engine, such as dispatching the Unity data to runtime and returning the data from runtime to the
/// Unity engine.
/// </summary>
/// <param name="xrInstance">The XrInstance is provided by the Unity OpenXR Plugin.</param>
/// <param name="xrGetInstanceProcAddrFuncPtr">Accessor for xrGetInstanceProcAddr function pointer.</param>
/// <returns>Return true if get successfully. False otherwise.</returns>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "secondary_view_configuration_get_function_address")]
private static extern bool SecondaryViewConfigurationGetFunctionAddress
(
XrInstance xrInstance,
IntPtr xrGetInstanceProcAddrFuncPtr
);
/// <summary>
/// Call this function to dispatch/hook all OpenXR functions to native plug-ins.
/// </summary>
/// <param name="graphicsAPI">The graphics backend adopted in the Unity engine.</param>
/// <param name="graphicsUVStartsAtTop">The bool value represents whether the texture UV coordinate convention for this platform has Y starting at the top of the image.</param>
/// <param name="func">xrGetInstanceProcAddr native function pointer.</param>
/// <param name="setSecondaryViewConfigurationStateDelegate">The delegate function pointer that functions types as "SetSecondaryViewConfigurationStateDelegate".</param>
/// <param name="stopEnableSecondaryViewConfigurationDelegate">The delegate function pointer that functions types as "StopEnableSecondaryViewConfigurationDelegate".</param>
/// <param name="setTextureSizeDelegate">The delegate function pointer that functions types as "SetTextureSizeDelegate".</param>
/// <param name="setFovDelegate">The delegate function pointer that functions types as "SetFovDelegate".</param>
/// <returns></returns>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "secondary_view_configuration_intercept_openxr_method")]
private static extern IntPtr SecondaryViewConfigurationInterceptOpenXRMethod
(
GraphicsAPI graphicsAPI,
bool graphicsUVStartsAtTop,
IntPtr func,
[MarshalAs(UnmanagedType.FunctionPtr)]
SetSecondaryViewConfigurationStateDelegate setSecondaryViewConfigurationStateDelegate,
[MarshalAs(UnmanagedType.FunctionPtr)]
StopEnableSecondaryViewConfigurationDelegate stopEnableSecondaryViewConfigurationDelegate,
[MarshalAs(UnmanagedType.FunctionPtr)] SetTextureSizeDelegate setTextureSizeDelegate,
[MarshalAs(UnmanagedType.FunctionPtr)] SetFovDelegate setFovDelegate
);
/// <summary>
/// Call this function to get the current swapchain image handler (its ID and memory address).
/// </summary>
/// <param name="imageIndex">The current handler index.</param>
/// <returns>The current handler memory address.</returns>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "get_secondary_view_texture_id")]
private static extern IntPtr GetSecondaryViewTextureId
(
out UInt32 imageIndex
);
/// <summary>
/// Call this function to tell native plug-in submit the swapchain image immediately.
/// </summary>
/// <returns>Return true if submit the swapchain image successfully. False otherwise.</returns>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "release_secondary_view_texture")]
public static extern bool ReleaseSecondaryViewTexture();
/// <summary>
/// Call this function to release all resources in native plug-in. Please be careful that this function should
/// ONLY call in the Unity "OnDestroy" lifecycle event in the class "SpectatorCameraBased".
/// </summary>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "release_all_resources")]
public static extern void ReleaseAllResources();
/// <summary>
/// Call this function if requiring swapchain re-initialization. The native plug-in will set a re-initialization
/// flag. Once the secondary view is enabled after that, the swapchain will re-init immediately.
/// </summary>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "require_reinit_swapchain")]
public static extern void RequireReinitSwapchain();
/// <summary>
/// Call this function to tell the native plug-in where the current spectator camera source comes from.
/// </summary>
/// <param name="isViewFromHmd">Please set true if the source comes from hmd. Otherwise, please set false.</param>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "set_view_from_hmd")]
public static extern void SetViewFromHmd(bool isViewFromHmd);
/// <summary>
/// Call this function to tell the non-hmd pose to the native plug-in.
/// </summary>
/// <param name="pose">The current non-hmd pose</param>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "set_non_hmd_view_pose")]
public static extern void SetNonHmdViewPose(XrPosef pose);
/// <summary>
/// Call this function to tell the native plug-in whether the texture data is ready or not.
/// </summary>
/// <param name="isReady">The texture data written by Unity Engine is ready or not.</param>
[DllImport(
dllName: ExtLib,
CallingConvention = CallingConvention.Cdecl,
EntryPoint = "set_state_secondary_view_image_data_ready")]
public static extern void SetStateSecondaryViewImageDataReady(bool isReady);
#endregion
#region Utilities functions
/// <summary>
/// Check ViveSecondaryViewConfiguration extension is enabled or not.
/// </summary>
/// <returns>Return true if enabled. False otherwise.</returns>
public static bool IsExtensionEnabled()
{
#if UNITY_2022_1_OR_NEWER
return OpenXRRuntime.IsExtensionEnabled(OPEN_XR_EXTENSION_STRING);
#else
// Does not support 2021 or lower
return false;
#endif
}
/// <summary>
/// Get the OpenXR function via XrInstance.
/// </summary>
/// <param name="xrInstance">The XrInstance is provided by the Unity OpenXR Plugin.</param>
/// <returns>Return true if get successfully. False otherwise.</returns>
private bool GetXrFunctionDelegates(XrInstance xrInstance)
{
if (xrGetInstanceProcAddr != IntPtr.Zero)
{
Debug("Get function pointer of openXRFunctionPointerAccessor.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
if (XrGetInstanceProcAddr == null)
{
Error(
"Get function pointer of openXRFunctionPointerAccessor failed due to the XrGetInstanceProcAddr is null.");
return false;
}
}
else
{
Error(
"Get function pointer of openXRFunctionPointerAccessor failed due to the xrGetInstanceProcAddr is null.");
return false;
}
Debug("Try to get the function pointer for XR_MSFT_secondary_view_configuration.");
return SecondaryViewConfigurationGetFunctionAddress(xrInstance, xrGetInstanceProcAddr);
#region Get function in C# (Just for reference)
/* if (GetOpenXRDelegateFunction(
XrGetInstanceProcAddr,
xrInstance,
"xrEnumerateViewConfigurations",
out _xrEnumerateViewConfigurations) is false)
{
Error("Get delegate function of XrEnumerateViewConfigurations failed.");
return false;
} */
#endregion
}
/// <summary>
/// Get the specific OpenXR function.
/// </summary>
/// <param name="openXRFunctionPointerAccessor">The function pointer accessor provide by OpenXR.</param>
/// <param name="openXRInstance">The XrInstance is provided by the Unity OpenXR Plugin.</param>
/// <param name="functionName">The specific OpenXR function.</param>
/// <param name="delegateFunction">Override value. The specific OpenXR function.</param>
/// <typeparam name="T">The class of the delegate function.</typeparam>
/// <returns>Return true if get successfully. False otherwise.</returns>
private static bool GetOpenXRDelegateFunction<T>
(
in OpenXRHelper.xrGetInstanceProcAddrDelegate openXRFunctionPointerAccessor,
in XrInstance openXRInstance,
in string functionName,
out T delegateFunction
) where T : class
{
delegateFunction = default(T);
if (openXRFunctionPointerAccessor == null || openXRInstance == 0 || string.IsNullOrEmpty(functionName))
{
Error($"Get OpenXR delegate function, {functionName}, failed due to the invalid parameter(s).");
return false;
}
XrResult getFunctionState = openXRFunctionPointerAccessor(openXRInstance, functionName, out IntPtr funcPtr);
bool funcPtrIsNull = funcPtr == IntPtr.Zero;
Debug("Get OpenXR delegate function, " + functionName + ", state: " + getFunctionState);
Debug("Get OpenXR delegate function, " + functionName + ", funcPtrIsNull: " + funcPtrIsNull);
if (getFunctionState != XrResult.XR_SUCCESS || funcPtrIsNull)
{
Error(
$"Get OpenXR delegate function, {functionName}, failed due to the native error or invalid return.");
return false;
}
try
{
delegateFunction = Marshal.GetDelegateForFunctionPointer(funcPtr, typeof(T)) as T;
}
catch (Exception e)
{
Error($"Get OpenXR delegate function, {functionName}, failed due to the exception: {e.Message}");
return false;
}
Debug($"Get OpenXR delegate function, {functionName}, succeed.");
return true;
}
/// <summary>
/// Print log with tag "VIVE.OpenXR.SecondaryViewConfiguration".
/// </summary>
/// <param name="msg">The log you want to print.</param>
private static void Debug(string msg)
{
UnityEngine.Debug.Log(LogTag + " " + msg);
}
/// <summary>
/// Print warning message with tag "VIVE.OpenXR.SecondaryViewConfiguration".
/// </summary>
/// <param name="msg">The warning message you want to print.</param>
private static void Warning(string msg)
{
UnityEngine.Debug.LogWarning(LogTag + " " + msg);
}
/// <summary>
/// Print an error message with the tag "VIVE.OpenXR.SecondaryViewConfiguration."
/// </summary>
/// <param name="msg">The error message you want to print.</param>
private static void Error(string msg)
{
UnityEngine.Debug.LogError(LogTag + " " + msg);
}
/// <summary>
/// Get the SpectatorCameraBased component in the current Unity scene.
/// </summary>
/// <returns>SpectatorCameraBased array if there are any SpectatorCameraBased components. Otherwise, return null.</returns>
private static SpectatorCameraBased[] GetSpectatorCameraBased()
{
var spectatorCameraBasedArray = (SpectatorCameraBased[])FindObjectsOfType(typeof(SpectatorCameraBased));
return (spectatorCameraBasedArray != null && spectatorCameraBasedArray.Length > 0)
? spectatorCameraBasedArray
: null;
}
/// <summary>
/// Create a GameObject that includes SpectatorCameraBased script in Unity scene for cooperation with extension native plugins.
/// </summary>
private static void CreateSpectatorCameraBased()
{
if (IsExtensionEnabled())
{
Debug($"Instance.IsExistSpectatorCameraBased = {Instance.IsExistSpectatorCameraBased}");
Instance.IsExistSpectatorCameraBased = true;
if (GetSpectatorCameraBased() != null)
{
Debug("No need to add SpectatorCameraBased because the scene already exist.");
return;
}
Debug("Start to add SpectatorCameraBased.");
var spectatorCameraBase =
new GameObject("Spectator Camera Base", typeof(SpectatorCameraBased))
{
transform =
{
position = Vector3.zero,
rotation = Quaternion.identity
}
};
Debug($"Create Spectator Camera Base GameObject successfully: {spectatorCameraBase != null}");
Debug(
$"Included SpectatorCameraBased component: {spectatorCameraBase.GetComponent<SpectatorCameraBased>() != null}");
}
else
{
Debug("Create Spectator Camera Base GameObject failed because the related extensions are not enabled.");
}
}
#endregion
#endregion
#region Enum definition
/// <summary>
/// The enum definition of supporting rendering backend.
/// </summary>
private enum GraphicsAPI
{
Unknown = 0,
GLES3 = 1,
Vulkan = 2
}
#endregion
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8f3afcc00e190534d8c0c8ebd2246d84
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Some files were not shown because too many files have changed in this diff Show More