3 Commits

Author SHA1 Message Date
Andy_YC_Chen
8c0baa899c Update package file to 1.0.13 2023-06-26 16:37:49 +08:00
VR164000
2c956c9090 Reanme installer 2023-06-16 15:30:18 +08:00
VR164000
0206889c0b add installer 2023-06-15 11:48:52 +08:00
13 changed files with 45 additions and 12 deletions

Binary file not shown.

View File

@@ -1 +1,10 @@
# VIVE-OpenXR-PC
# VIVE OpenXR Plugin - Windows
The "VIVE OpenXR Plugin - Windows" plugin package contains some OpenXR Unity features for developers to use.
## How to install:
### 1. Download the unitypackage from the github.
https://github.com/ViveSoftware/VIVE-OpenXR-PC/blob/master/Installer/ViveOpenXRInstaller.unitypackage
### 2. Import the unitypackage file in the Unity editor.
### 3. Select the menu on the Unity UI "VIVE/OpenXR_PC Installer", press "Install or Update latest version".
Note: For "Install specific version", please copy the release version from https://github.com/ViveSoftware/VIVE-OpenXR-PC/releases,
then paste the version(for example: "1.0.12") to install the specific version.

View File

@@ -1,4 +1,4 @@
# **VIVE OpenXR Plugin - Windows** For Unity - v1.0.12
# **VIVE OpenXR Plugin - Windows** For Unity - v1.0.13
Copyright HTC Corporation. All Rights Reserved.
**VIVE OpenXR Plugin - Windows**: This plugin provides support for openxr based on the following specifications.
@@ -10,6 +10,11 @@ Copyright HTC Corporation. All Rights Reserved.
- [Hand Interaction](https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_HTC_hand_interaction)
- [Palm pose](https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#XR_EXT_palm_pose)
---
## Changes for v1.0.13 - 2023/06/26
- Fix problem that "OpenXR.Input.PoseControl" and "OpenXR.Input.Pose" are deprecated after OpenXR Plugin 1.6.0.
1. When **USE_INPUT_SYSTEM_POSE_CONTROL** is defined, switch to use InputSystem.XR.PoseControl; otherwise, use OpenXR.Input.PoseControl for backward compatibility.
2. When **USE_INPUT_SYSTEM_POSE_CONTROL** is defined, switch to use InputSystem.XR.PoseState; otherwise, use OpenXR.Input.Pose for backward compatibility.
## Changes for v1.0.12 - 2023/06/02
- Remove Eye gaze sample.It is recommended to use the Controller sample provided by the Unity OpenXR Plugin to test eye gaze.

View File

@@ -9,15 +9,18 @@ using UnityEngine.InputSystem.XR;
using UnityEditor;
#endif
#if USE_INPUT_SYSTEM_POSE_CONTROL
using PoseControl = UnityEngine.InputSystem.XR.PoseControl;
#else
using PoseControl = UnityEngine.XR.OpenXR.Input.PoseControl;
#endif
namespace UnityEngine.XR.OpenXR.Features
{
/// <summary>
/// This <see cref="OpenXRInteractionFeature"/> enables the use of HTC Vive Controllers interaction profiles in OpenXR.
/// </summary>
#if UNITY_EDITOR
#if UNITY_EDITOR
[UnityEditor.XR.OpenXR.Features.OpenXRFeature(UiName = "HTC Vive Cosmos Controller Support",
BuildTargetGroups = new[] { BuildTargetGroup.Standalone, BuildTargetGroup.WSA },
Company = "HTC",

View File

@@ -9,7 +9,7 @@ To help software developers create an application with actual facial expressions
1. **Edit** > **Project Settings** > **XR Plug-in Management** > Select **OpenXR** , click Exclamation mark next to it then choose **Fix All**.
2. **Edit** > **Project Settings** > **XR Plug-in Management** > **OpenXR** > Add Interaction profiles for your device.
3. **Edit** > **Project Settings** > **XR Plug-in Management** > **OpenXR** > Select **Facial Tracking** under **VIVE OpenXR** Feature Groups.
4. In the Unity Project window, select the sample scene file in **Assets** > **Samples** > **VIVE OpenXR Plugin - Windows** > **1.0.12** > **FacialTracking Example** > **Scenes** > **FaceSample.unity** then click Play.
4. In the Unity Project window, select the sample scene file in **Assets** > **Samples** > **VIVE OpenXR Plugin - Windows** > **1.0.13** > **FacialTracking Example** > **Scenes** > **FaceSample.unity** then click Play.
## How to use VIVE OpenXR Facial Tracking Unity Feature
1. Import VIVE OpenXR Plugin - Windows

View File

@@ -9,7 +9,11 @@ using UnityEngine.XR.OpenXR.Input;
using UnityEditor;
#endif
#if USE_INPUT_SYSTEM_POSE_CONTROL
using PoseControl = UnityEngine.InputSystem.XR.PoseControl;
#else
using PoseControl = UnityEngine.XR.OpenXR.Input.PoseControl;
#endif
namespace UnityEngine.XR.OpenXR.Features
{

View File

@@ -9,7 +9,11 @@ using UnityEngine.XR.OpenXR.Input;
using UnityEditor;
#endif
#if USE_INPUT_SYSTEM_POSE_CONTROL
using PoseControl = UnityEngine.InputSystem.XR.PoseControl;
#else
using PoseControl = UnityEngine.XR.OpenXR.Input.PoseControl;
#endif
namespace UnityEngine.XR.OpenXR.Features
{

View File

@@ -9,7 +9,7 @@ To help software developers create an application for locating hand joints with
1. **Edit** > **Project Settings** > **XR Plug-in Management** > Select **OpenXR** , click Exclamation mark next to it then choose **Fix All**.
2. **Edit** > **Project Settings** > **XR Plug-in Management** > **OpenXR** > Add Interaction profiles for your device.
3. **Edit** > **Project Settings** > **XR Plug-in Management** > **OpenXR** > Select **Hand Tracking** under **VIVE OpenXR** Feature Groups.
4. In the Unity Project window, select the sample scene file in **Assets** > **Samples** > **VIVE OpenXR Plugin - Windows** > **1.0.12** > **HandTracking Example** > **Scenes** > **HandTrackingScene.unity** then click Play.
4. In the Unity Project window, select the sample scene file in **Assets** > **Samples** > **VIVE OpenXR Plugin - Windows** > **1.0.13** > **HandTracking Example** > **Scenes** > **HandTrackingScene.unity** then click Play.
## Use VIVE OpenXR Hand Tracking Unity Feature to draw skeleton hand.
1. Import VIVE OpenXR Plugin - Windows

View File

@@ -2,7 +2,7 @@
// <auto-generated>
// This code was auto-generated by com.unity.inputsystem:InputActionCodeGenerator
// version 1.4.1
// from Assets/Samples/VIVE OpenXR Plugin - Windows/1.0.12/HandTracking Example/Scripts/InputMaster.inputactions
// from Assets/Samples/VIVE OpenXR Plugin - Windows/1.0.13/HandTracking Example/Scripts/InputMaster.inputactions
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.

View File

@@ -3,6 +3,11 @@ using UnityEngine.InputSystem;
using UnityEngine.UI;
using UnityEngine;
#if USE_INPUT_SYSTEM_POSE_CONTROL
using Pose = UnityEngine.InputSystem.XR.PoseState;
#else
using Pose = UnityEngine.XR.OpenXR.Input.Pose;
#endif
public class LineRender : MonoBehaviour
{
@@ -22,7 +27,7 @@ public class LineRender : MonoBehaviour
&& actionReferencePose.action.enabled && actionReferencePose.action.controls.Count > 0)
{
//GazeRayRenderer.SetActive(true);
UnityEngine.XR.OpenXR.Input.Pose poseval = actionReferencePose.action.ReadValue<UnityEngine.XR.OpenXR.Input.Pose>();
Pose poseval = actionReferencePose.action.ReadValue<Pose>();
Quaternion gazeRotation = poseval.rotation;
Quaternion orientation = new Quaternion(
1 * (gazeRotation.x),

View File

@@ -13,7 +13,7 @@ public class EnablePassThrough_cmd : MonoBehaviour
// Start is called before the first frame update
private void Awake() {
#if UNITY_EDITOR
path = System.IO.Path.Combine(Directory.GetCurrentDirectory() + "\\Assets\\Samples\\VIVE OpenXR Plugin - Windows\\1.0.12\\SceneUnderstanding Example\\PassThrough Plugin\\OfflineTool.exe");
path = System.IO.Path.Combine(Directory.GetCurrentDirectory() + "\\Assets\\Samples\\VIVE OpenXR Plugin - Windows\\1.0.13\\SceneUnderstanding Example\\PassThrough Plugin\\OfflineTool.exe");
#elif UNITY_STANDALONE
path = System.IO.Path.Combine(Directory.GetCurrentDirectory() + "/OfflineTool.exe");
#endif

View File

@@ -8,7 +8,7 @@ Demonstrate configuring, calculating and generating mesh of surrounding environm
1. **Edit** > **Project Settings** > **XR Plug-in Management** > Select **OpenXR** , click Exclamation mark next to it > choose **Fix All**.
2. **Edit** > **Project Settings** > **XR Plug-in Management** > **OpenXR** > Add Interaction profiles for your device.
3. **Edit** > **Project Settings** > **XR Plug-in Management** > **OpenXR** > Select **Scene UnderStanding** and **Meshing Subsystem** under **VIVE OpenXR** Feature Groups.
4. In the Unity Project window, select the sample scene file in **Assets** > **Samples** > **VIVE OpenXR Plugin - Windows** > **1.0.12** > **SceneUnderstanding Example** > **Meshing Subsystem Feature** > **MeshingFeature.unity** then click Play.
4. In the Unity Project window, select the sample scene file in **Assets** > **Samples** > **VIVE OpenXR Plugin - Windows** > **1.0.13** > **SceneUnderstanding Example** > **Meshing Subsystem Feature** > **MeshingFeature.unity** then click Play.
## How to use VIVE OpenXR SceneUnderstanding Unity Feature
For the available OpenXR SceneUnderstanding functions, please refer to **SceneUnderstanding.cs**.

View File

@@ -1,7 +1,7 @@
{
"name": "com.htc.upm.vive.openxr",
"displayName": "VIVE OpenXR Plugin - Windows",
"version": "1.0.12",
"version": "1.0.13",
"unity": "2020.3",
"description": "VIVE OpenXR Plugin - Windows provides (1) Vive Facial Tracking (2) Vive Cosmos Controller (3) Scene Understanding (4) Hand Tracking (5) Vive Focus3 Controller support for Unity OpenXR (6) Hand Interaction (7) Palm pose.",
"keywords": [