version 2.0.0

This commit is contained in:
srl87
2023-09-14 18:17:47 +08:00
parent 13e9d00b37
commit ca21423a06
953 changed files with 125887 additions and 21229 deletions

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 734a59e6cccf84f4f9407d01c4d88727
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,87 @@
# 12.68. XR_HTC_facial_tracking
## Name String
XR_HTC_facial_tracking
## Revision
1
## Overview
This extension allows an application to track and integrate users' eye and lip movements, empowering developers to read intention and model facial expressions.
## VIVE Plugin
Through feeding the blend shape values of eye expression to an avatar, its facial expression can be animated with the player<65><72>s eye movement. The following enumerations show the facial expression of eye blend shape.
public enum XrEyeExpressionHTC
{
XR_EYE_EXPRESSION_LEFT_BLINK_HTC = 0,
XR_EYE_EXPRESSION_LEFT_WIDE_HTC = 1,
XR_EYE_EXPRESSION_RIGHT_BLINK_HTC = 2,
XR_EYE_EXPRESSION_RIGHT_WIDE_HTC = 3,
XR_EYE_EXPRESSION_LEFT_SQUEEZE_HTC = 4,
XR_EYE_EXPRESSION_RIGHT_SQUEEZE_HTC = 5,
XR_EYE_EXPRESSION_LEFT_DOWN_HTC = 6,
XR_EYE_EXPRESSION_RIGHT_DOWN_HTC = 7,
XR_EYE_EXPRESSION_LEFT_OUT_HTC = 8,
XR_EYE_EXPRESSION_RIGHT_IN_HTC = 9,
XR_EYE_EXPRESSION_LEFT_IN_HTC = 10,
XR_EYE_EXPRESSION_RIGHT_OUT_HTC = 11,
XR_EYE_EXPRESSION_LEFT_UP_HTC = 12,
XR_EYE_EXPRESSION_RIGHT_UP_HTC = 13,
XR_EYE_EXPRESSION_MAX_ENUM_HTC = 14
};
You can use the following API to retrieve the array of eye expression values if the return value is true.
using VIVE.OpenXR.FacialTracking;
bool ViveFacialTracking.GetFacialExpressions(XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC, out float[] expressionWeightings);
Through feeding the blend shape values of lip expression to an avatar, its facial expression can be animated with the player<65><72>s lip movement. The following enumerations show the facial expression of lip blend shape values.
public enum XrLipExpressionHTC
{
XR_LIP_EXPRESSION_JAW_RIGHT_HTC = 0,
XR_LIP_EXPRESSION_JAW_LEFT_HTC = 1,
XR_LIP_EXPRESSION_JAW_FORWARD_HTC = 2,
XR_LIP_EXPRESSION_JAW_OPEN_HTC = 3,
XR_LIP_EXPRESSION_MOUTH_APE_SHAPE_HTC = 4,
XR_LIP_EXPRESSION_MOUTH_UPPER_RIGHT_HTC = 5,
XR_LIP_EXPRESSION_MOUTH_UPPER_LEFT_HTC = 6,
XR_LIP_EXPRESSION_MOUTH_LOWER_RIGHT_HTC = 7,
XR_LIP_EXPRESSION_MOUTH_LOWER_LEFT_HTC = 8,
XR_LIP_EXPRESSION_MOUTH_UPPER_OVERTURN_HTC = 9,
XR_LIP_EXPRESSION_MOUTH_LOWER_OVERTURN_HTC = 10,
XR_LIP_EXPRESSION_MOUTH_POUT_HTC = 11,
XR_LIP_EXPRESSION_MOUTH_SMILE_RIGHT_HTC = 12,
XR_LIP_EXPRESSION_MOUTH_SMILE_LEFT_HTC = 13,
XR_LIP_EXPRESSION_MOUTH_SAD_RIGHT_HTC = 14,
XR_LIP_EXPRESSION_MOUTH_SAD_LEFT_HTC = 15,
XR_LIP_EXPRESSION_CHEEK_PUFF_RIGHT_HTC = 16,
XR_LIP_EXPRESSION_CHEEK_PUFF_LEFT_HTC = 17,
XR_LIP_EXPRESSION_CHEEK_SUCK_HTC = 18,
XR_LIP_EXPRESSION_MOUTH_UPPER_UPRIGHT_HTC = 19,
XR_LIP_EXPRESSION_MOUTH_UPPER_UPLEFT_HTC = 20,
XR_LIP_EXPRESSION_MOUTH_LOWER_DOWNRIGHT_HTC = 21,
XR_LIP_EXPRESSION_MOUTH_LOWER_DOWNLEFT_HTC = 22,
XR_LIP_EXPRESSION_MOUTH_UPPER_INSIDE_HTC = 23,
XR_LIP_EXPRESSION_MOUTH_LOWER_INSIDE_HTC = 24,
XR_LIP_EXPRESSION_MOUTH_LOWER_OVERLAY_HTC = 25,
XR_LIP_EXPRESSION_TONGUE_LONGSTEP1_HTC = 26,
XR_LIP_EXPRESSION_TONGUE_LEFT_HTC = 27,
XR_LIP_EXPRESSION_TONGUE_RIGHT_HTC = 28,
XR_LIP_EXPRESSION_TONGUE_UP_HTC = 29,
XR_LIP_EXPRESSION_TONGUE_DOWN_HTC = 30,
XR_LIP_EXPRESSION_TONGUE_ROLL_HTC = 31,
XR_LIP_EXPRESSION_TONGUE_LONGSTEP2_HTC = 32,
XR_LIP_EXPRESSION_TONGUE_UPRIGHT_MORPH_HTC = 33,
XR_LIP_EXPRESSION_TONGUE_UPLEFT_MORPH_HTC = 34,
XR_LIP_EXPRESSION_TONGUE_DOWNRIGHT_MORPH_HTC = 35,
XR_LIP_EXPRESSION_TONGUE_DOWNLEFT_MORPH_HTC = 36,
XR_LIP_EXPRESSION_MAX_ENUM_HTC = 37
};
You can use the following API to retrieve the array of eye expression values if the return value is true.
using VIVE.OpenXR.FacialTracking;
bool ViveFacialTracking.GetFacialExpressions(XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC, out float[] expressionWeightings);

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: be0ee5c3ad3e1b64590d426f9d79b274
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3aca591eccf98834c902458afd7c5bc0
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,595 @@
// "VIVE SDK
// © 2020 HTC Corporation. All Rights Reserved.
//
// Unless otherwise required by copyright law and practice,
// upon the execution of HTC SDK license agreement,
// HTC grants you access to and use of the VIVE SDK(s).
// You shall fully comply with all of HTCs SDK license agreement terms and
// conditions signed by you and all SDK and API requirements,
// specifications, and documentation provided by HTC to You."
using UnityEngine.XR.OpenXR;
using UnityEngine.XR.OpenXR.Features;
using UnityEngine;
using System.Runtime.InteropServices;
using System;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
#endif
namespace VIVE.OpenXR.FacialTracking
{
#if UNITY_EDITOR
[OpenXRFeature(UiName = "VIVE XR Facial Tracking",
BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
Company = "HTC",
Desc = "Support the facial tracking extension.",
DocumentationLink = "..\\Documentation",
OpenxrExtensionStrings = kOpenxrExtensionString,
Version = "1.0.0",
FeatureId = featureId)]
#endif
public class ViveFacialTracking : OpenXRFeature
{
const string LOG_TAG = "VIVE.OpenXR.FacialTracking.ViveFacialTracking";
void DEBUG(string msg) { Debug.Log(LOG_TAG + " " + msg); }
void WARNING(string msg) { Debug.LogWarning(LOG_TAG + " " + msg); }
void ERROR(string msg) { Debug.LogError(LOG_TAG + " " + msg); }
/// <summary>
/// OpenXR specification <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_facial_tracking">12.68. XR_HTC_facial_tracking</see>.
/// </summary>
public const string kOpenxrExtensionString = "XR_HTC_facial_tracking";
/// <summary>
/// The feature id string. This is used to give the feature a well known id for reference.
/// </summary>
public const string featureId = "vive.openxr.feature.facial.tracking";
#region OpenXR Life Cycle
private bool m_XrInstanceCreated = false;
private XrInstance m_XrInstance = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateInstance">xrCreateInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The created instance.</param>
/// <returns>True for valid <see cref="XrInstance">XrInstance</see></returns>
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (!OpenXRRuntime.IsExtensionEnabled(kOpenxrExtensionString))
{
WARNING("OnInstanceCreate() " + kOpenxrExtensionString + " is NOT enabled.");
return false;
}
m_XrInstanceCreated = true;
m_XrInstance = xrInstance;
DEBUG("OnInstanceCreate() " + m_XrInstance);
return GetXrFunctionDelegates(m_XrInstance);
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyInstance">xrDestroyInstance</see> is done.
/// </summary>
/// <param name="xrInstance">The instance to destroy.</param>
protected override void OnInstanceDestroy(ulong xrInstance)
{
m_XrInstanceCreated = false;
m_XrInstance = 0;
DEBUG("OnInstanceDestroy() " + xrInstance);
}
private XrSystemId m_XrSystemId = 0;
/// <summary>
/// Called when the <see cref="XrSystemId">XrSystemId</see> retrieved by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystem">xrGetSystem</see> is changed.
/// </summary>
/// <param name="xrSystem">The system id.</param>
protected override void OnSystemChange(ulong xrSystem)
{
m_XrSystemId = xrSystem;
DEBUG("OnSystemChange() " + m_XrSystemId);
}
private bool m_XrSessionCreated = false;
private XrSession m_XrSession = 0;
private bool hasEyeTracker = false, hasLipTracker = false;
private XrFacialTrackerHTC m_EyeTracker = 0, m_LipTracker = 0;
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateSession">xrCreateSession</see> is done.
/// </summary>
/// <param name="xrSession">The created session ID.</param>
protected override void OnSessionCreate(ulong xrSession)
{
m_XrSession = xrSession;
m_XrSessionCreated = true;
DEBUG("OnSessionCreate() " + m_XrSession);
if (CreateFacialTracker(XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC)) { DEBUG("OnSessionCreate() m_EyeTracker " + m_EyeTracker); }
if (CreateFacialTracker(XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC)) { DEBUG("OnSessionCreate() m_LipTracker " + m_LipTracker); }
}
/// <summary>
/// Called when <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroySession">xrDestroySession</see> is done.
/// </summary>
/// <param name="xrSession">The session ID to destroy.</param>
protected override void OnSessionDestroy(ulong xrSession)
{
DEBUG("OnSessionDestroy() " + xrSession);
// Facial Tracking is binding with xrSession so we destroy the trackers when xrSession is destroyed.
DestroyFacialTracker(XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC);
DestroyFacialTracker(XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC);
m_XrSession = 0;
m_XrSessionCreated = false;
}
#endregion
#region OpenXR function delegates
/// xrGetInstanceProcAddr
OpenXRHelper.xrGetInstanceProcAddrDelegate XrGetInstanceProcAddr;
/// xrGetSystemProperties
OpenXRHelper.xrGetSystemPropertiesDelegate xrGetSystemProperties;
private XrResult GetSystemProperties(ref XrSystemProperties properties)
{
if (!m_XrSessionCreated)
{
ERROR("GetSystemProperties() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("GetSystemProperties() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrGetSystemProperties(m_XrInstance, m_XrSystemId, ref properties);
}
/// xrDestroySpace
OpenXRHelper.xrDestroySpaceDelegate xrDestroySpace;
private XrResult DestroySpace(XrSpace space)
{
if (!m_XrSessionCreated)
{
ERROR("DestroySpace() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("DestroySpace() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrDestroySpace(space);
}
ViveFacialTrackingHelper.xrCreateFacialTrackerHTCDelegate xrCreateFacialTrackerHTC;
private XrResult CreateFacialTrackerHTC(XrFacialTrackerCreateInfoHTC createInfo, out XrFacialTrackerHTC facialTracker)
{
if (!m_XrSessionCreated)
{
ERROR("CreateFacialTrackerHTC() XR_ERROR_SESSION_LOST.");
facialTracker = 0;
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("CreateFacialTrackerHTC() XR_ERROR_INSTANCE_LOST.");
facialTracker = 0;
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrCreateFacialTrackerHTC(m_XrSession, createInfo, out facialTracker);
}
ViveFacialTrackingHelper.xrDestroyFacialTrackerHTCDelegate xrDestroyFacialTrackerHTC;
private XrResult DestroyFacialTrackerHTC(XrFacialTrackerHTC facialTracker)
{
if (!m_XrSessionCreated)
{
ERROR("DestroyFacialTrackerHTC() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("DestroyFacialTrackerHTC() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrDestroyFacialTrackerHTC(facialTracker);
}
ViveFacialTrackingHelper.xrGetFacialExpressionsHTCDelegate xrGetFacialExpressionsHTC;
private XrResult GetFacialExpressionsHTC(XrFacialTrackerHTC facialTracker, ref XrFacialExpressionsHTC facialExpressions)
{
if (!m_XrSessionCreated)
{
ERROR("GetFacialExpressionsHTC() XR_ERROR_SESSION_LOST.");
return XrResult.XR_ERROR_SESSION_LOST;
}
if (!m_XrInstanceCreated)
{
ERROR("GetFacialExpressionsHTC() XR_ERROR_INSTANCE_LOST.");
return XrResult.XR_ERROR_INSTANCE_LOST;
}
return xrGetFacialExpressionsHTC(facialTracker, ref facialExpressions);
}
private bool GetXrFunctionDelegates(XrInstance xrInstance)
{
/// xrGetInstanceProcAddr
if (xrGetInstanceProcAddr != null && xrGetInstanceProcAddr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetInstanceProcAddr.");
XrGetInstanceProcAddr = Marshal.GetDelegateForFunctionPointer(
xrGetInstanceProcAddr,
typeof(OpenXRHelper.xrGetInstanceProcAddrDelegate)) as OpenXRHelper.xrGetInstanceProcAddrDelegate;
}
else
{
ERROR("xrGetInstanceProcAddr");
return false;
}
IntPtr funcPtr = IntPtr.Zero;
/// xrGetSystemProperties
if (XrGetInstanceProcAddr(xrInstance, "xrGetSystemProperties", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetSystemProperties.");
xrGetSystemProperties = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(OpenXRHelper.xrGetSystemPropertiesDelegate)) as OpenXRHelper.xrGetSystemPropertiesDelegate;
}
}
else
{
ERROR("xrGetSystemProperties");
return false;
}
/// xrDestroySpace
if (XrGetInstanceProcAddr(xrInstance, "xrDestroySpace", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrDestroySpace.");
xrDestroySpace = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(OpenXRHelper.xrDestroySpaceDelegate)) as OpenXRHelper.xrDestroySpaceDelegate;
}
}
else
{
ERROR("xrDestroySpace");
return false;
}
/// xrCreateFacialTrackerHTC
if (XrGetInstanceProcAddr(xrInstance, "xrCreateFacialTrackerHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrCreateFacialTrackerHTC.");
xrCreateFacialTrackerHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveFacialTrackingHelper.xrCreateFacialTrackerHTCDelegate)) as ViveFacialTrackingHelper.xrCreateFacialTrackerHTCDelegate;
}
}
else
{
ERROR("xrCreateFacialTrackerHTC");
return false;
}
/// xrDestroyFacialTrackerHTC
if (XrGetInstanceProcAddr(xrInstance, "xrDestroyFacialTrackerHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrDestroyFacialTrackerHTC.");
xrDestroyFacialTrackerHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveFacialTrackingHelper.xrDestroyFacialTrackerHTCDelegate)) as ViveFacialTrackingHelper.xrDestroyFacialTrackerHTCDelegate;
}
}
else
{
ERROR("xrDestroyFacialTrackerHTC");
return false;
}
/// xrGetFacialExpressionsHTC
if (XrGetInstanceProcAddr(xrInstance, "xrGetFacialExpressionsHTC", out funcPtr) == XrResult.XR_SUCCESS)
{
if (funcPtr != IntPtr.Zero)
{
DEBUG("Get function pointer of xrGetFacialExpressionsHTC.");
xrGetFacialExpressionsHTC = Marshal.GetDelegateForFunctionPointer(
funcPtr,
typeof(ViveFacialTrackingHelper.xrGetFacialExpressionsHTCDelegate)) as ViveFacialTrackingHelper.xrGetFacialExpressionsHTCDelegate;
}
}
else
{
ERROR("xrGetFacialExpressionsHTC");
return false;
}
return true;
}
#endregion
XrSystemFacialTrackingPropertiesHTC facialTrackingSystemProperties;
XrSystemProperties systemProperties;
private bool IsFacialTrackingSupported(XrFacialTrackingTypeHTC facialTrackingType)
{
if (!m_XrSessionCreated)
{
ERROR("IsFacialTrackingSupported() session is not created.");
return false;
}
facialTrackingSystemProperties.type = XrStructureType.XR_TYPE_SYSTEM_FACIAL_TRACKING_PROPERTIES_HTC;
systemProperties.type = XrStructureType.XR_TYPE_SYSTEM_PROPERTIES;
systemProperties.next = Marshal.AllocHGlobal(Marshal.SizeOf(facialTrackingSystemProperties));
long offset = 0;
if (IntPtr.Size == 4)
offset = systemProperties.next.ToInt32();
else
offset = systemProperties.next.ToInt64();
IntPtr sys_facial_tracking_prop_ptr = new IntPtr(offset);
Marshal.StructureToPtr(facialTrackingSystemProperties, sys_facial_tracking_prop_ptr, false);
if (GetSystemProperties(ref systemProperties) == XrResult.XR_SUCCESS)
{
if (IntPtr.Size == 4)
offset = systemProperties.next.ToInt32();
else
offset = systemProperties.next.ToInt64();
sys_facial_tracking_prop_ptr = new IntPtr(offset);
facialTrackingSystemProperties = (XrSystemFacialTrackingPropertiesHTC)Marshal.PtrToStructure(sys_facial_tracking_prop_ptr, typeof(XrSystemFacialTrackingPropertiesHTC));
DEBUG("IsFacialTrackingSupported() XrSystemFacialTrackingPropertiesHTC.supportEyeFacialTracking: "
+ facialTrackingSystemProperties.supportEyeFacialTracking
+ ", supportLipFacialTracking: "
+ facialTrackingSystemProperties.supportLipFacialTracking);
return (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC ?
(facialTrackingSystemProperties.supportEyeFacialTracking > 0) : (facialTrackingSystemProperties.supportLipFacialTracking > 0));
}
else
{
ERROR("IsFacialTrackingSupported() GetSystemProperties failed.");
}
return false;
}
/// <summary>
/// An application can create an <see cref="XrFacialTrackingTypeHTC">XrFacialTrackerHTC</see> handle using CreateFacialTracker.
/// </summary>
/// <param name="facialTrackingType">The XrFacialTrackingTypeHTC describes which type of tracking the <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> is using.</param>
/// <param name="facialTracker">The returned XrFacialTrackerHTC handle.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult CreateFacialTracker(XrFacialTrackerCreateInfoHTC createInfo, out XrFacialTrackerHTC facialTracker)
{
if (createInfo.facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC && hasEyeTracker)
{
facialTracker = m_EyeTracker;
DEBUG("CreateFacialTracker() m_EyeTracker: " + facialTracker + " already created before.");
return XrResult.XR_SUCCESS;
}
if (createInfo.facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC && hasLipTracker)
{
facialTracker = m_LipTracker;
DEBUG("CreateFacialTracker() m_LipTracker: " + facialTracker + " already created before.");
return XrResult.XR_SUCCESS;
}
if (!IsFacialTrackingSupported(createInfo.facialTrackingType))
{
ERROR("CreateFacialTracker() " + createInfo.facialTrackingType + " is NOT supported.");
facialTracker = 0;
return XrResult.XR_ERROR_VALIDATION_FAILURE;
}
var result = CreateFacialTrackerHTC(createInfo, out facialTracker);
DEBUG("CreateFacialTracker() " + createInfo.facialTrackingType + ", CreateFacialTrackerHTC = " + result + ", facialTracker: " + facialTracker);
if (result == XrResult.XR_SUCCESS)
{
if (createInfo.facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC)
{
hasEyeTracker = true;
m_EyeTracker = facialTracker;
DEBUG("CreateFacialTracker() m_EyeTracker " + m_EyeTracker);
}
if (createInfo.facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC)
{
hasLipTracker = true;
m_LipTracker = facialTracker;
DEBUG("CreateFacialTracker() m_LipTracker " + m_LipTracker);
}
}
return result;
}
/// <summary>
/// An application can create an <see cref="XrFacialTrackingTypeHTC">XrFacialTrackerHTC</see> handle using CreateFacialTracker.
/// </summary>
/// <param name="facialTrackingType">The XrFacialTrackingTypeHTC describes which type of tracking the <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> is using.</param>
/// <returns>True for success.</returns>
public bool CreateFacialTracker(XrFacialTrackingTypeHTC facialTrackingType)
{
XrFacialTrackerCreateInfoHTC createInfo = new XrFacialTrackerCreateInfoHTC(
in_type: XrStructureType.XR_TYPE_FACIAL_TRACKER_CREATE_INFO_HTC,
in_next: IntPtr.Zero,
in_facialTrackingType: facialTrackingType);
var result = CreateFacialTracker(createInfo, out XrFacialTrackerHTC value);
DEBUG("CreateFacialTracker() " + createInfo.facialTrackingType + " tracker: " + value);
return result == XrResult.XR_SUCCESS;
}
/// <summary>
/// Releases the facial tracker and the underlying resources of the <see cref="XrFacialTrackingTypeHTC">facial tracking type</see> when finished with facial tracking experiences.
/// </summary>
/// <param name="facialTracker">Facial tracker in <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see>.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult DestroyFacialTracker(XrFacialTrackerHTC facialTracker)
{
XrResult result = DestroyFacialTrackerHTC(facialTracker);
DEBUG("DestroyFacialTracker() " + facialTracker + ", result: " + result);
return result;
}
/// <summary>
/// Releases the facial tracker and the underlying resources of the <see cref="XrFacialTrackingTypeHTC">facial tracking type</see> when finished with facial tracking experiences.
/// </summary>
/// <param name="facialTrackingType">The <see cref="XrFacialTrackingTypeHTC">XrFacialTrackingTypeHTC</see> describes which type of tracking the <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> is using.</param>
/// <returns>True for success.</returns>
public bool DestroyFacialTracker(XrFacialTrackingTypeHTC facialTrackingType)
{
if (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC && !hasEyeTracker)
{
DEBUG("DestroyFacialTracker() no " + facialTrackingType + "tracker.");
return true;
}
if (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC && !hasLipTracker)
{
DEBUG("DestroyFacialTracker() no " + facialTrackingType + "tracker.");
return true;
}
XrResult ret = XrResult.XR_ERROR_VALIDATION_FAILURE;
if (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC)
{
ret = DestroyFacialTracker(m_EyeTracker);
hasEyeTracker = false;
m_EyeTracker = 0;
}
if (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC)
{
ret = DestroyFacialTracker(m_LipTracker);
hasLipTracker = false;
m_LipTracker = 0;
}
return ret == XrResult.XR_SUCCESS;
}
private int eyeUpdateFrame = -1, lipUpdateFrame = -1;
private float[] defExpressionData = new float[(int)XrEyeExpressionHTC.XR_EYE_EXPRESSION_MAX_ENUM_HTC];
private float[] s_EyeExpressionData = new float[(int)XrEyeExpressionHTC.XR_EYE_EXPRESSION_MAX_ENUM_HTC];
private float[] s_LipExpressionData = new float[(int)XrLipExpressionHTC.XR_LIP_EXPRESSION_MAX_ENUM_HTC];
XrFacialExpressionsHTC facialExpressionsDef = new XrFacialExpressionsHTC(XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC, IntPtr.Zero, false, 0, 0, IntPtr.Zero);
XrFacialExpressionsHTC m_FacialExpressionsEye = new XrFacialExpressionsHTC(XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC, IntPtr.Zero, false, 0, 0, IntPtr.Zero);
XrFacialExpressionsHTC m_FacialExpressionsLip = new XrFacialExpressionsHTC(XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC, IntPtr.Zero, false, 0, 0, IntPtr.Zero);
/// <summary>
/// Retrieves an array of values of blend shapes for a facial expression on a given time.
/// </summary>
/// <param name="facialTrackingType">The <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XrFacialTrackingTypeHTC">XrFacialTrackingTypeHTC</see> describes which type of tracking the <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> is using.</param>
/// <param name="expressionWeightings">A float array filled in by the runtime, specifying the weightings for each blend shape. The array size is <see cref="XrEyeExpressionHTC.XR_EYE_EXPRESSION_MAX_ENUM_HTC">XR_EYE_EXPRESSION_MAX_ENUM_HTC</see> for eye expression and <see cref="XrLipExpressionHTC.XR_LIP_EXPRESSION_MAX_ENUM_HTC">XR_LIP_EXPRESSION_MAX_ENUM_HTC</see> for lip expression.</param>
/// <returns>True for success.</returns>
public bool GetFacialExpressions(XrFacialTrackingTypeHTC facialTrackingType, out float[] expressionWeightings)
{
expressionWeightings = defExpressionData;
if (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC)
{
if (GetFacialExpressions(m_EyeTracker, out XrFacialExpressionsHTC facialExpressions) == XrResult.XR_SUCCESS)
{
if (facialExpressions.isActive)
{
Marshal.Copy(facialExpressions.expressionWeightings, s_EyeExpressionData, 0, (int)facialExpressions.expressionCount);
expressionWeightings = s_EyeExpressionData;
return true;
}
}
}
if (facialTrackingType == XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC)
{
if (GetFacialExpressions(m_LipTracker, out XrFacialExpressionsHTC facialExpressions) == XrResult.XR_SUCCESS)
{
if (facialExpressions.isActive)
{
Marshal.Copy(facialExpressions.expressionWeightings, s_LipExpressionData, 0, (int)facialExpressions.expressionCount);
expressionWeightings = s_LipExpressionData;
return true;
}
}
}
return false;
}
/// <summary>
/// Retrieves the <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XrFacialExpressionsHTC">XrFacialExpressionsHTC</see> data of a <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XrFacialTrackerHTC">XrFacialTrackerHTC</see>.
/// </summary>
/// <param name="facialTracker">The <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XrFacialTrackerHTC">XrFacialTrackerHTC</see> handle represents the resources for an facial tracker of the specific facial tracking type.</param>
/// <param name="facialExpressions">Structure returns data of a lip facial expression or an eye facial expression.</param>
/// <returns>XR_SUCCESS for success.</returns>
public XrResult GetFacialExpressions(XrFacialTrackerHTC facialTracker, out XrFacialExpressionsHTC facialExpressions)
{
facialExpressions = facialExpressionsDef;
XrResult result = XrResult.XR_ERROR_VALIDATION_FAILURE;
if (facialTracker == m_EyeTracker)
{
if (eyeUpdateFrame == Time.frameCount)
{
facialExpressions = m_FacialExpressionsEye;
return XrResult.XR_SUCCESS;
}
eyeUpdateFrame = Time.frameCount;
// Initialize the XrFacialExpressionsHTC struct of Eye.
if (m_FacialExpressionsEye.expressionCount == 0)
{
m_FacialExpressionsEye.type = XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC;
m_FacialExpressionsEye.next = IntPtr.Zero;
m_FacialExpressionsEye.isActive = false;
m_FacialExpressionsEye.sampleTime = 0;
m_FacialExpressionsEye.expressionCount = (UInt32)XrEyeExpressionHTC.XR_EYE_EXPRESSION_MAX_ENUM_HTC;
m_FacialExpressionsEye.expressionWeightings = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(float)) * (int)m_FacialExpressionsEye.expressionCount);
}
result = GetFacialExpressionsHTC(facialTracker, ref m_FacialExpressionsEye);
if (result == XrResult.XR_SUCCESS) { facialExpressions = m_FacialExpressionsEye; }
}
if (facialTracker == m_LipTracker)
{
if (lipUpdateFrame == Time.frameCount)
{
facialExpressions = m_FacialExpressionsLip;
return XrResult.XR_SUCCESS;
}
lipUpdateFrame = Time.frameCount;
// Initialize the XrFacialExpressionsHTC struct of Lip.
if (m_FacialExpressionsLip.expressionCount == 0)
{
m_FacialExpressionsLip.type = XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC;
m_FacialExpressionsLip.next = IntPtr.Zero;
m_FacialExpressionsLip.isActive = false;
m_FacialExpressionsLip.sampleTime = 0;
m_FacialExpressionsLip.expressionCount = (UInt32)XrLipExpressionHTC.XR_LIP_EXPRESSION_MAX_ENUM_HTC;
m_FacialExpressionsLip.expressionWeightings = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(float)) * (int)m_FacialExpressionsLip.expressionCount);
}
result = GetFacialExpressionsHTC(facialTracker, ref m_FacialExpressionsLip);
if (result == XrResult.XR_SUCCESS) { facialExpressions = m_FacialExpressionsLip; }
}
return result;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 178a55074d1e6624d8cfbdc23367a56f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,449 @@
// "VIVE SDK
// © 2020 HTC Corporation. All Rights Reserved.
//
// Unless otherwise required by copyright law and practice,
// upon the execution of HTC SDK license agreement,
// HTC grants you access to and use of the VIVE SDK(s).
// You shall fully comply with all of HTCs SDK license agreement terms and
// conditions signed by you and all SDK and API requirements,
// specifications, and documentation provided by HTC to You."
using System;
using System.Runtime.InteropServices;
namespace VIVE.OpenXR.FacialTracking
{
/// <summary>
/// The XrFacialTrackerHTC handle represents the resources for an facial tracker of the specific facial tracking type.
/// </summary>
public struct XrFacialTrackerHTC : IEquatable<UInt64>
{
private readonly UInt64 value;
public XrFacialTrackerHTC(UInt64 u)
{
value = u;
}
public static implicit operator UInt64(XrFacialTrackerHTC equatable)
{
return equatable.value;
}
public static implicit operator XrFacialTrackerHTC(UInt64 u)
{
return new XrFacialTrackerHTC(u);
}
public bool Equals(XrFacialTrackerHTC other)
{
return value == other.value;
}
public bool Equals(UInt64 other)
{
return value == other;
}
public override bool Equals(object obj)
{
return obj is XrFacialTrackerHTC && Equals((XrFacialTrackerHTC)obj);
}
public override int GetHashCode()
{
return value.GetHashCode();
}
public override string ToString()
{
return value.ToString();
}
public static bool operator ==(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.Equals(b); }
public static bool operator !=(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return !a.Equals(b); }
public static bool operator >=(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value >= b.value; }
public static bool operator <=(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value <= b.value; }
public static bool operator >(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value > b.value; }
public static bool operator <(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value < b.value; }
public static XrFacialTrackerHTC operator +(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value + b.value; }
public static XrFacialTrackerHTC operator -(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value - b.value; }
public static XrFacialTrackerHTC operator *(XrFacialTrackerHTC a, XrFacialTrackerHTC b) { return a.value * b.value; }
public static XrFacialTrackerHTC operator /(XrFacialTrackerHTC a, XrFacialTrackerHTC b)
{
if (b.value == 0)
{
throw new DivideByZeroException();
}
return a.value / b.value;
}
}
/// <summary>
/// The XrFacialTrackingTypeHTC describes which type of tracking the <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> is using.
/// </summary>
public enum XrFacialTrackingTypeHTC
{
/// <summary>
/// Specifies this handle will observe eye expressions, with values indexed by <see cref="XrEyeExpressionHTC">XrEyeExpressionHTC</see> whose count is <see cref="ViveFacialTrackingHelper.XR_FACIAL_EXPRESSION_EYE_COUNT_HTC">XR_FACIAL_EXPRESSION_EYE_COUNT_HTC</see>.
/// </summary>
XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC = 1,
/// <summary>
/// Specifies this handle will observe lip expressions, with values indexed by <see cref="XrLipExpressionHTC">XrLipExpressionHTC</see> whose count is <see cref="ViveFacialTrackingHelper.XR_FACIAL_EXPRESSION_LIP_COUNT_HTC">XR_FACIAL_EXPRESSION_LIP_COUNT_HTC</see>.
/// </summary>
XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC = 2,
};
/// <summary>
/// Indicates the eye expressions. Refer to <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XrEyeExpressionHTC">XrEyeExpressionHTC</see> about the illustrations.
/// </summary>
public enum XrEyeExpressionHTC
{
/// <summary>
/// This blend shape influences blinking of the right eye. When this value goes higher, left eye approaches close.
/// </summary>
XR_EYE_EXPRESSION_LEFT_BLINK_HTC = 0,
/// <summary>
/// This blend shape keeps left eye wide and at that time XR_EYE_EXPRESSION_LEFT_BLINK_HTC value is 0.
/// </summary>
XR_EYE_EXPRESSION_LEFT_WIDE_HTC = 1,
/// <summary>
/// This blend shape influences blinking of the right eye. When this value goes higher, right eye approaches close.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_BLINK_HTC = 2,
/// <summary>
/// This blend shape keeps right eye wide and at that time XR_EYE_EXPRESSION_RIGHT_BLINK_HTC value is 0.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_WIDE_HTC = 3,
/// <summary>
/// The blend shape closes eye tightly and at that time XR_EYE_EXPRESSION_LEFT_BLINK_HTC value is 1.
/// </summary>
XR_EYE_EXPRESSION_LEFT_SQUEEZE_HTC = 4,
/// <summary>
/// The blend shape closes eye tightly and at that time XR_EYE_EXPRESSION_RIGHT_BLINK_HTC value is 1.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_SQUEEZE_HTC = 5,
/// <summary>
/// This blendShape influences the muscles around the left eye, moving these muscles further downward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_LEFT_DOWN_HTC = 6,
/// <summary>
/// This blendShape influences the muscles around the right eye, moving these muscles further downward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_DOWN_HTC = 7,
/// <summary>
/// This blendShape influences the muscles around the left eye, moving these muscles further leftward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_LEFT_OUT_HTC = 8,
/// <summary>
/// This blendShape influences the muscles around the right eye, moving these muscles further leftward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_IN_HTC = 9,
/// <summary>
/// This blendShape influences the muscles around the left eye, moving these muscles further rightward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_LEFT_IN_HTC = 10,
/// <summary>
/// This blendShape influences the muscles around the right eye, moving these muscles further rightward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_OUT_HTC = 11,
/// <summary>
/// This blendShape influences the muscles around the left eye, moving these muscles further upward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_LEFT_UP_HTC = 12,
/// <summary>
/// This blendShape influences the muscles around the right eye, moving these muscles further upward with a higher value.
/// </summary>
XR_EYE_EXPRESSION_RIGHT_UP_HTC = 13,
XR_EYE_EXPRESSION_MAX_ENUM_HTC = 14
};
/// <summary>
/// Indicates the lip expressions. Refer to <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XrLipExpressionHTC">XrLipExpressionHTC</see> about the illustrations.
/// </summary>
public enum XrLipExpressionHTC
{
/// <summary>
/// This blend shape moves the jaw further rightward with a higher value.
/// </summary>
XR_LIP_EXPRESSION_JAW_RIGHT_HTC = 0,
/// <summary>
/// This blend shape moves the jaw further leftward with a higher value.
/// </summary>
XR_LIP_EXPRESSION_JAW_LEFT_HTC = 1,
/// <summary>
/// This blend shape moves the jaw forward with a higher value.
/// </summary>
XR_LIP_EXPRESSION_JAW_FORWARD_HTC = 2,
/// <summary>
/// This blend shape opens the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_JAW_OPEN_HTC = 3,
/// <summary>
/// This blend shape stretches the jaw further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_APE_SHAPE_HTC = 4,
/// <summary>
/// This blend shape moves your upper lip rightward.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_UPPER_RIGHT_HTC = 5,
/// <summary>
/// This blend shape moves your upper lip leftward.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_UPPER_LEFT_HTC = 6,
/// <summary>
/// This blend shape moves your lower lip rightward.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_RIGHT_HTC = 7,
/// <summary>
/// This blend shape moves your lower lip leftward.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_LEFT_HTC = 8,
/// <summary>
/// This blend shape pouts your upper lip. Can be used with <see cref="XrLipExpressionHTC.XR_LIP_EXPRESSION_MOUTH_UPPER_UPRIGHT_HTC">XR_LIP_EXPRESSION_MOUTH_UPPER_UPRIGHT_HTC</see> and <see cref="XrLipExpressionHTC.XR_LIP_EXPRESSION_MOUTH_UPPER_UPLEFT_HTC">XR_LIP_EXPRESSION_MOUTH_UPPER_UPLEFT_HTC</see> to complete upper O mouth shape.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_UPPER_OVERTURN_HTC = 9,
/// <summary>
/// This blend shape pouts your lower lip. Can be used with <see cref="XrLipExpressionHTC.XR_LIP_EXPRESSION_MOUTH_UPPER_UPRIGHT_HTC">XR_LIP_EXPRESSION_MOUTH_UPPER_UPRIGHT_HTC</see> and <see cref="XrLipExpressionHTC.XR_LIP_EXPRESSION_MOUTH_LOWER_DOWNRIGHT_HTC">XR_LIP_EXPRESSION_MOUTH_LOWER_DOWNRIGHT_HTC</see> to complete upper O mouth shape.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_OVERTURN_HTC = 10,
/// <summary>
/// This blend shape allows the lips to pout more with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_POUT_HTC = 11,
/// <summary>
/// This blend shape raises the right side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SMILE_RIGHT_HTC = 12,
/// <summary>
/// This blend shape raises the left side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SMILE_LEFT_HTC = 13,
/// <summary>
/// This blend shape lowers the right side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SAD_RIGHT_HTC = 14,
/// <summary>
/// This blend shape lowers the left side of the mouth further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_SAD_LEFT_HTC = 15,
/// <summary>
/// This blend shape puffs up the right side of the cheek further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_CHEEK_PUFF_RIGHT_HTC = 16,
/// <summary>
/// This blend shape puffs up the left side of the cheek further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_CHEEK_PUFF_LEFT_HTC = 17,
/// <summary>
/// This blend shape sucks in the cheeks on both sides further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_CHEEK_SUCK_HTC = 18,
/// <summary>
/// This blend shape raises the right upper lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_UPPER_UPRIGHT_HTC = 19,
/// <summary>
/// This blend shape raises the left upper lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_UPPER_UPLEFT_HTC = 20,
/// <summary>
/// This blend shape lowers the right lower lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_DOWNRIGHT_HTC = 21,
/// <summary>
/// This blend shape lowers the left lower lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_DOWNLEFT_HTC = 22,
/// <summary>
/// This blend shape rolls in the upper lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_UPPER_INSIDE_HTC = 23,
/// <summary>
/// This blend shape rolls in the lower lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_INSIDE_HTC = 24,
/// <summary>
/// This blend shape stretches the lower lip further and lays it on the upper lip further with a higher value.
/// </summary>
XR_LIP_EXPRESSION_MOUTH_LOWER_OVERLAY_HTC = 25,
/// <summary>
/// This blend shape sticks the tongue out slightly.
///
/// In step 1 of extending the tongue, the main action of the tongue is to lift up, and the elongated length only extends to a little bit beyond the teeth.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_LONGSTEP1_HTC = 26,
/// <summary>
/// This blend shape sticks the tongue out and left extremely.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_LEFT_HTC = 27,
/// <summary>
/// This blend shape sticks the tongue out and right extremely.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_RIGHT_HTC = 28,
/// <summary>
/// This blend shape sticks the tongue out and up extremely.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_UP_HTC = 29,
/// <summary>
/// This blend shape sticks the tongue out and down extremely.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_DOWN_HTC = 30,
/// <summary>
/// This blend shape sticks the tongue out with roll type.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_ROLL_HTC = 31,
/// <summary>
/// This blend shape sticks the tongue out extremely.
///
/// Continuing the step 1, it extends the tongue to the longest.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_LONGSTEP2_HTC = 32,
/// <summary>
/// This blend shape doesnt make sense. When both the right and up blend shapes appear at the same time, the tongue will be deformed.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_UPRIGHT_MORPH_HTC = 33,
/// <summary>
/// This blend shape doesnt make sense. When both the left and up blend shapes appear at the same time, the tongue will be deformed.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_UPLEFT_MORPH_HTC = 34,
/// <summary>
/// This blend shape doesnt make sense. When both the right and down blend shapes appear at the same time, the tongue will be deformed.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_DOWNRIGHT_MORPH_HTC = 35,
/// <summary>
/// This blend shape doesnt make sense. When both the left and down blend shapes appear at the same time, the tongue will be deformed.
/// </summary>
XR_LIP_EXPRESSION_TONGUE_DOWNLEFT_MORPH_HTC = 36,
XR_LIP_EXPRESSION_MAX_ENUM_HTC = 37
};
/// <summary>
/// An application can inspect whether the system is capable of two of the facial tracking by extending the <see cref="XrSystemProperties">XrSystemProperties</see> with <see cref="XrSystemFacialTrackingPropertiesHTC">XrSystemFacialTrackingPropertiesHTC</see> structure when calling <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetSystemProperties">xrGetSystemProperties</see>.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrSystemFacialTrackingPropertiesHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// Indicates if the current system is capable of generating eye expressions.
/// </summary>
public XrBool32 supportEyeFacialTracking;
/// <summary>
/// Indicates if the current system is capable of generating lip expressions.
/// </summary>
public XrBool32 supportLipFacialTracking;
};
/// <summary>
/// The XrFacialTrackerCreateInfoHTC structure describes the information to create an <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> handle.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrFacialTrackerCreateInfoHTC
{
/// <summary>
/// The <see cref="XrStructureType">XrStructureType</see> of this structure.
/// </summary>
public XrStructureType type;
/// <summary>
/// NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.
/// </summary>
public IntPtr next;
/// <summary>
/// An XrFacialTrackingTypeHTC which describes which type of facial tracking should be used for this handle.
/// </summary>
public XrFacialTrackingTypeHTC facialTrackingType;
/// <param name="in_type">The <see cref="XrStructureType">XrStructureType</see> of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
/// <param name="in_facialTrackingType">An XrFacialTrackingTypeHTC which describes which type of facial tracking should be used for this handle.</param>
public XrFacialTrackerCreateInfoHTC(XrStructureType in_type, IntPtr in_next, XrFacialTrackingTypeHTC in_facialTrackingType)
{
type = in_type;
next = in_next;
facialTrackingType = in_facialTrackingType;
}
};
/// <summary>
/// XrFacialExpressionsHTC structure returns data of a lip facial expression or an eye facial expression.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct XrFacialExpressionsHTC
{
/// <summary>The XrStructureType of this structure.</summary>
public XrStructureType type;
/// <summary>NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</summary>
public IntPtr next;
/// <summary>An XrBool32 indicating if the facial tracker is active.</summary>
public XrBool32 isActive;
/// <summary>When in time the expression is expressed.</summary>
public XrTime sampleTime;
/// <summary>A uint32_t describing the count of elements in expressionWeightings array.</summary>
public UInt32 expressionCount;
/// <summary>A float array filled in by the runtime, specifying the weightings for each blend shape.</summary>
public IntPtr expressionWeightings;
/// <param name="in_type">The XrStructureType of this structure.</param>
/// <param name="in_next">NULL or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR or this extension.</param>
/// <param name="in_isActive">An XrBool32 indicating if the facial tracker is active.</param>
/// <param name="in_sampleTime">When in time the expression is expressed.</param>
/// <param name="in_expressionCount">>A uint32_t describing the count of elements in expressionWeightings array.</param>
/// <param name="in_expressionWeightings">A float array filled in by the runtime, specifying the weightings for each blend shape.</param>
public XrFacialExpressionsHTC(
XrStructureType in_type,
IntPtr in_next,
XrBool32 in_isActive,
XrTime in_sampleTime,
UInt32 in_expressionCount,
IntPtr in_expressionWeightings)
{
type = in_type;
next = in_next;
isActive = in_isActive;
sampleTime = in_sampleTime;
expressionCount = in_expressionCount;
expressionWeightings = in_expressionWeightings;
}
};
public static class ViveFacialTrackingHelper
{
/// <summary> The number of blend shapes in an expression of type <see cref="XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC">XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC</see>. </summary>
public const UInt32 XR_FACIAL_EXPRESSION_EYE_COUNT_HTC = 14;
/// <summary> The number of blend shapes in an expression of type <see cref="XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC">XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC.</see> </summary>
public const UInt32 XR_FACIAL_EXPRESSION_LIP_COUNT_HTC = 37;
/// <summary>
/// The delegate function of <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateFacialTrackerHTC">xrCreateFacialTrackerHTC</see>.
/// </summary>
/// <param name="session">An XrSession in which the facial expression will be active.</param>
/// <param name="createInfo">The <see cref="XrFacialTrackerCreateInfoHTC">XrFacialTrackerCreateInfoHTC</see> used to specify the facial tracking type.</param>
/// <param name="facialTracker">The returned <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> handle.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrCreateFacialTrackerHTCDelegate(
XrSession session,
XrFacialTrackerCreateInfoHTC createInfo,
out XrFacialTrackerHTC facialTracker);
/// <summary>
/// The delegate function of <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrDestroyFacialTrackerHTC">xrDestroyFacialTrackerHTC</see>.
/// </summary>
/// <param name="facialTracker">An XrFacialTrackerHTC previously created by xrCreateFacialTrackerHTC.</param>
/// <returns>XR_SUCCESS for success.</returns>
public delegate XrResult xrDestroyFacialTrackerHTCDelegate(
XrFacialTrackerHTC facialTracker);
/// <summary>
/// The delegate function of <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrGetFacialExpressionsHTC">xrGetFacialExpressionsHTC</see>.
/// </summary>
/// <param name="facialTracker">An <see cref="XrFacialTrackerHTC">XrFacialTrackerHTC</see> previously created by <see href="https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#xrCreateFacialTrackerHTC">xrCreateFacialTrackerHTC</see>.</param>
/// <param name="facialExpressions">A pointer to <see cref="XrFacialExpressionsHTC">XrFacialExpressionsHTC</see> receiving the returned facial expressions.</param>
/// <returns></returns>
public delegate XrResult xrGetFacialExpressionsHTCDelegate(
XrFacialTrackerHTC facialTracker,
ref XrFacialExpressionsHTC facialExpressions);
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0ff2614ff91df1649bef349243f26eff
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: