1
0

TrackedPoseDriver.cs 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Runtime.CompilerServices;
  4. #if ENABLE_VR || ENABLE_AR
  5. using UnityEngine.XR;
  6. using UnityEngine.Experimental.XR.Interaction;
  7. #endif
  8. [assembly: InternalsVisibleTo("UnityEditor.SpatialTracking")]
  9. namespace UnityEngine.SpatialTracking
  10. {
  11. internal class TrackedPoseDriverDataDescription
  12. {
  13. internal struct PoseData
  14. {
  15. public List<string> PoseNames;
  16. public List<TrackedPoseDriver.TrackedPose> Poses;
  17. }
  18. internal static List<PoseData> DeviceData = new List<PoseData>
  19. {
  20. // Generic XR Device
  21. new PoseData
  22. {
  23. PoseNames = new List<string>
  24. {
  25. "Left Eye", "Right Eye", "Center Eye - HMD Reference", "Head", "Color Camera"
  26. },
  27. Poses = new List<TrackedPoseDriver.TrackedPose>
  28. {
  29. TrackedPoseDriver.TrackedPose.LeftEye,
  30. TrackedPoseDriver.TrackedPose.RightEye,
  31. TrackedPoseDriver.TrackedPose.Center,
  32. TrackedPoseDriver.TrackedPose.Head,
  33. TrackedPoseDriver.TrackedPose.ColorCamera
  34. }
  35. },
  36. // generic controller
  37. new PoseData
  38. {
  39. PoseNames = new List<string>
  40. {
  41. "Left Controller", "Right Controller"
  42. },
  43. Poses = new List<TrackedPoseDriver.TrackedPose>
  44. {
  45. TrackedPoseDriver.TrackedPose.LeftPose,
  46. TrackedPoseDriver.TrackedPose.RightPose
  47. }
  48. },
  49. // generic remote
  50. new PoseData
  51. {
  52. PoseNames = new List<string>
  53. {
  54. "Device Pose"
  55. },
  56. Poses = new List<TrackedPoseDriver.TrackedPose>
  57. {
  58. TrackedPoseDriver.TrackedPose.RemotePose,
  59. }
  60. },
  61. };
  62. }
  63. /// <summary>
  64. /// Bitflag enum which represents what data was set on an associated Pose struct
  65. /// </summary>
  66. [Flags]
  67. public enum PoseDataFlags
  68. {
  69. /// <summary>
  70. /// No data was actually set on the pose
  71. /// </summary>
  72. NoData = 0,
  73. /// <summary>
  74. /// If this flag is set, position data was updated on the associated pose struct
  75. /// </summary>
  76. Position = 1 << 0,
  77. /// <summary>
  78. /// If this flag is set, rotation data was updated on the associated pose struct
  79. /// </summary>
  80. Rotation = 1 << 1,
  81. }
  82. /// <summary>
  83. /// The PoseDataSource class acts as a container for the GetDatafromSource method call that should be used by PoseProviders wanting to query data for a particular pose.
  84. /// </summary>
  85. static public class PoseDataSource
  86. {
  87. #if ENABLE_AR || ENABLE_VR
  88. static internal List<XR.XRNodeState> nodeStates = new List<XR.XRNodeState>();
  89. static internal PoseDataFlags GetNodePoseData(XR.XRNode node, out Pose resultPose)
  90. {
  91. PoseDataFlags retData = PoseDataFlags.NoData;
  92. XR.InputTracking.GetNodeStates(nodeStates);
  93. foreach (XR.XRNodeState nodeState in nodeStates)
  94. {
  95. if (nodeState.nodeType == node)
  96. {
  97. if(nodeState.TryGetPosition(out resultPose.position))
  98. {
  99. retData |= PoseDataFlags.Position;
  100. }
  101. if (nodeState.TryGetRotation(out resultPose.rotation))
  102. {
  103. retData |= PoseDataFlags.Rotation;
  104. }
  105. return retData;
  106. }
  107. }
  108. resultPose = Pose.identity;
  109. return retData;
  110. }
  111. #endif
  112. /// <summary>The GetDatafromSource method is used to query data from the XRNode subsystem based on the provided pose source.</summary>
  113. /// <param name = "poseSource" > The pose source to request data for.</param>
  114. /// <param name = "resultPose" > The resulting pose data.</param>
  115. /// <returns>True, if the pose source is valid, otherwise false.</returns>
  116. static public bool TryGetDataFromSource(TrackedPoseDriver.TrackedPose poseSource, out Pose resultPose)
  117. {
  118. return GetDataFromSource(poseSource, out resultPose) == (PoseDataFlags.Position | PoseDataFlags.Rotation);
  119. }
  120. /// <summary>The GetDatafromSource method is used to query data from the XRNode subsystem based on the provided pose source.</summary>
  121. /// <param name = "poseSource" > The pose source to request data for.</param>
  122. /// <param name = "resultPose" > The resulting pose data. This function will return the Center Eye pose if the Color Camera pose is not available. </param>
  123. /// <returns>Retuns a bitflag which represents which data has been retrieved from the provided pose source</returns>
  124. static public PoseDataFlags GetDataFromSource(TrackedPoseDriver.TrackedPose poseSource, out Pose resultPose)
  125. {
  126. #if ENABLE_AR || ENABLE_VR
  127. switch (poseSource)
  128. {
  129. case TrackedPoseDriver.TrackedPose.RemotePose:
  130. {
  131. PoseDataFlags retFlags = GetNodePoseData(XR.XRNode.RightHand, out resultPose);
  132. if (retFlags == PoseDataFlags.NoData)
  133. return GetNodePoseData(XR.XRNode.LeftHand, out resultPose);
  134. return retFlags;
  135. }
  136. case TrackedPoseDriver.TrackedPose.LeftEye:
  137. {
  138. return GetNodePoseData(XR.XRNode.LeftEye, out resultPose);
  139. }
  140. case TrackedPoseDriver.TrackedPose.RightEye:
  141. {
  142. return GetNodePoseData(XR.XRNode.RightEye, out resultPose);
  143. }
  144. case TrackedPoseDriver.TrackedPose.Head:
  145. {
  146. return GetNodePoseData(XR.XRNode.Head, out resultPose);
  147. }
  148. case TrackedPoseDriver.TrackedPose.Center:
  149. {
  150. return GetNodePoseData(XR.XRNode.CenterEye, out resultPose);
  151. }
  152. case TrackedPoseDriver.TrackedPose.LeftPose:
  153. {
  154. return GetNodePoseData(XR.XRNode.LeftHand, out resultPose);
  155. }
  156. case TrackedPoseDriver.TrackedPose.RightPose:
  157. {
  158. return GetNodePoseData(XR.XRNode.RightHand, out resultPose);
  159. }
  160. case TrackedPoseDriver.TrackedPose.ColorCamera:
  161. {
  162. // We fall back to CenterEye because we can't currently extend the XRNode structure, nor are we ready to replace it.
  163. return GetNodePoseData(XR.XRNode.CenterEye, out resultPose);
  164. }
  165. default:
  166. {
  167. Debug.LogWarningFormat("Unable to retrieve pose data for poseSource: {0}", poseSource.ToString());
  168. break;
  169. }
  170. }
  171. #endif
  172. resultPose = Pose.identity;
  173. return PoseDataFlags.NoData;
  174. }
  175. }
  176. // The DefaultExecutionOrder is needed because TrackedPoseDriver does some
  177. // of its work in regular Update and FixedUpdate calls, but this needs to
  178. // be done before regular user scripts have their own Update and
  179. // FixedUpdate calls, in order that they correctly get the values for this
  180. // frame and not the previous.
  181. // -32000 is the minimal possible execution order value; -30000 makes it
  182. // unlikely users chose lower values for their scripts by accident, but
  183. // still allows for the possibility.
  184. /// <summary>
  185. /// The TrackedPoseDriver component applies the current Pose value of a tracked device to the transform of the GameObject.
  186. /// TrackedPoseDriver can track multiple types of devices including XR HMDs, controllers, and remotes.
  187. /// </summary>
  188. [DefaultExecutionOrder(-30000)]
  189. [Serializable]
  190. [AddComponentMenu("XR/Tracked Pose Driver")]
  191. [HelpURL("https://docs.unity3d.com/Packages/com.unity.xr.legacyinputhelpers@2.1/manual/index.html")]
  192. public class TrackedPoseDriver : MonoBehaviour
  193. {
  194. /// <summary>
  195. /// The device being tracked by the tracked pose driver
  196. /// </summary>
  197. public enum DeviceType
  198. {
  199. /// <summary>
  200. /// An XR Controller, use this value for controllers
  201. /// </summary>
  202. GenericXRDevice = 0,
  203. /// <summary>
  204. /// An Generic XR Devices, use this value for HMD and AR Mobile device tracking
  205. /// </summary>
  206. GenericXRController = 1,
  207. /// <summary>
  208. /// An XR Remote, use this value for mobile remotes
  209. /// </summary>
  210. GenericXRRemote = 2
  211. }
  212. /// <summary>
  213. /// The list of endpoints that users can track with the <see cref="TrackedPoseDriver"/>
  214. /// </summary>
  215. public enum TrackedPose
  216. {
  217. /// <summary>
  218. /// The left eye of a HMD style device
  219. /// </summary>
  220. LeftEye = 0,
  221. /// <summary>
  222. /// The right eye of a HMD style device
  223. /// </summary>
  224. RightEye = 1,
  225. /// <summary>
  226. /// The center eye of a HMD style device, this is usually the default for most HMDs
  227. /// </summary>
  228. Center = 2,
  229. /// <summary>
  230. /// The head eye of a HMD style device
  231. /// </summary>
  232. Head = 3,
  233. /// <summary>
  234. /// The left hand controller pose
  235. /// </summary>
  236. LeftPose = 4,
  237. /// <summary>
  238. /// The right hand controller pose
  239. /// </summary>
  240. RightPose = 5,
  241. /// <summary>
  242. /// The color camera of a mobile device
  243. /// </summary>
  244. ColorCamera = 6,
  245. /// <summary>
  246. /// No Longer Used
  247. /// </summary>
  248. DepthCameraDeprecated = 7,
  249. /// <summary>
  250. /// No Longer Used
  251. /// </summary>
  252. FisheyeCameraDeprected = 8,
  253. /// <summary>
  254. /// No Longer Used
  255. /// </summary>
  256. DeviceDeprecated = 9,
  257. /// <summary>
  258. /// The pose of a mobile remote
  259. /// </summary>
  260. RemotePose = 10,
  261. }
  262. [SerializeField]
  263. DeviceType m_Device;
  264. /// <summary>
  265. /// This is used to indicate which pose the TrackedPoseDriver is currently tracking.
  266. /// </summary>
  267. public DeviceType deviceType
  268. {
  269. get { return m_Device; }
  270. internal set { m_Device = value; }
  271. }
  272. [SerializeField]
  273. TrackedPose m_PoseSource = TrackedPoseDriver.TrackedPose.Center;
  274. /// <summary>
  275. /// The pose being tracked by the tracked pose driver
  276. /// </summary>
  277. public TrackedPose poseSource
  278. {
  279. get { return m_PoseSource; }
  280. internal set { m_PoseSource = value; }
  281. }
  282. /// <summary>
  283. /// This method is used to set the device / pose pair for the SpatialTracking.TrackedPoseDriver. setting an invalid combination of these values will return false.
  284. /// </summary>
  285. /// <param name="deviceType">The device type that we wish to track </param>
  286. /// <param name="pose">The pose source that we wish to track</param>
  287. /// <returns>true if the values provided are sensible, otherwise false</returns>
  288. public bool SetPoseSource(DeviceType deviceType, TrackedPose pose)
  289. {
  290. if ((int)deviceType < TrackedPoseDriverDataDescription.DeviceData.Count)
  291. {
  292. TrackedPoseDriverDataDescription.PoseData val = TrackedPoseDriverDataDescription.DeviceData[(int)deviceType];
  293. for (int i = 0; i < val.Poses.Count; ++i)
  294. {
  295. if (val.Poses[i] == pose)
  296. {
  297. this.deviceType = deviceType;
  298. poseSource = pose;
  299. return true;
  300. }
  301. }
  302. }
  303. return false;
  304. }
  305. #if ENABLE_VR || ENABLE_AR
  306. [SerializeField]
  307. BasePoseProvider m_PoseProviderComponent = null;
  308. /// <summary>
  309. /// Optional: This field holds the reference to the PoseProvider instance that, if set, will be used to override the behavior of
  310. /// the TrackedPoseDriver. When this field is empty, the TrackedPoseDriver will operate as per usual, with pose data being
  311. /// retrieved from the device or pose settings of the TrackedPoseDriver. When this field is set, the pose data will be
  312. /// provided by the attached PoseProvider. The device or pose fields will be hidden as they are no longer used to
  313. /// control the parent GameObject Transform.
  314. /// </summary>
  315. public BasePoseProvider poseProviderComponent
  316. {
  317. get { return m_PoseProviderComponent; }
  318. set
  319. {
  320. m_PoseProviderComponent = value;
  321. }
  322. }
  323. #endif
  324. PoseDataFlags GetPoseData(DeviceType device, TrackedPose poseSource, out Pose resultPose)
  325. {
  326. #if ENABLE_VR || ENABLE_AR
  327. if (m_PoseProviderComponent != null)
  328. {
  329. return m_PoseProviderComponent.GetPoseFromProvider(out resultPose);
  330. }
  331. #endif
  332. return PoseDataSource.GetDataFromSource(poseSource, out resultPose);
  333. }
  334. /// <summary>
  335. /// This enum is used to indicate which parts of the pose will be applied to the parent transform
  336. /// </summary>
  337. public enum TrackingType
  338. {
  339. /// <summary>
  340. /// With this setting, both the pose's rotation and position will be applied to the parent transform
  341. /// </summary>
  342. RotationAndPosition,
  343. /// <summary>
  344. /// With this setting, only the pose's rotation will be applied to the parent transform
  345. /// </summary>
  346. RotationOnly,
  347. /// <summary>
  348. /// With this setting, only the pose's position will be applied to the parent transform
  349. /// </summary>
  350. PositionOnly
  351. }
  352. [SerializeField]
  353. TrackingType m_TrackingType;
  354. /// <summary>
  355. /// The tracking type being used by the tracked pose driver
  356. /// </summary>
  357. public TrackingType trackingType
  358. {
  359. get { return m_TrackingType; }
  360. set { m_TrackingType = value; }
  361. }
  362. /// <summary>
  363. /// The update type being used by the tracked pose driver
  364. /// </summary>
  365. public enum UpdateType
  366. {
  367. /// <summary>
  368. /// Sample input at both update, and directly before rendering. For smooth head pose tracking,
  369. /// we recommend using this value as it will provide the lowest input latency for the device.
  370. /// This is the default value for the UpdateType option
  371. /// </summary>
  372. UpdateAndBeforeRender,
  373. /// <summary>
  374. /// Only sample input during the update phase of the frame.
  375. /// </summary>
  376. Update,
  377. /// <summary>
  378. /// Only sample input directly before rendering
  379. /// </summary>
  380. BeforeRender,
  381. }
  382. [SerializeField]
  383. UpdateType m_UpdateType = UpdateType.UpdateAndBeforeRender;
  384. /// <summary>
  385. /// The update type being used by the tracked pose driver
  386. /// </summary>
  387. public UpdateType updateType
  388. {
  389. get { return m_UpdateType; }
  390. set { m_UpdateType = value; }
  391. }
  392. [SerializeField]
  393. bool m_UseRelativeTransform = false;
  394. /// <summary>
  395. /// This is used to indicate whether the TrackedPoseDriver will use the object's original transform as its basis.
  396. /// </summary>
  397. public bool UseRelativeTransform
  398. {
  399. get { return m_UseRelativeTransform; }
  400. set { m_UseRelativeTransform = value; }
  401. }
  402. /// <summary>
  403. /// The origin pose is the offset applied to any tracking data. This is only used when in legacy compatibility mode.
  404. /// </summary>
  405. protected Pose m_OriginPose;
  406. /// <summary>
  407. /// originPose is an offset applied to any tracking data read from this object.
  408. /// Setting this value should be reserved for dealing with edge-cases, such as
  409. /// achieving parity between room-scale (floor centered) and stationary (head centered)
  410. /// tracking - without having to alter the transform hierarchy.
  411. /// For user locomotion and gameplay purposes you are usually better off just
  412. /// moving the parent transform of this object.
  413. /// </summary>
  414. public Pose originPose
  415. {
  416. get { return m_OriginPose; }
  417. set { m_OriginPose = value; }
  418. }
  419. private void CacheLocalPosition()
  420. {
  421. m_OriginPose.position = transform.localPosition;
  422. m_OriginPose.rotation = transform.localRotation;
  423. }
  424. private void ResetToCachedLocalPosition()
  425. {
  426. SetLocalTransform(m_OriginPose.position, m_OriginPose.rotation, PoseDataFlags.Position | PoseDataFlags.Rotation);
  427. }
  428. /// <inheritdoc />
  429. protected virtual void Awake()
  430. {
  431. CacheLocalPosition();
  432. #if UNITY_2019_3_OR_NEWER
  433. // deprecated functionality in 2020.1
  434. #else
  435. if (HasStereoCamera())
  436. {
  437. #if ENABLE_AR || ENABLE_VR
  438. XRDevice.DisableAutoXRCameraTracking(GetComponent<Camera>(), true);
  439. #endif
  440. }
  441. #endif
  442. }
  443. /// <inheritdoc />
  444. protected virtual void OnDestroy()
  445. {
  446. #if UNITY_2019_3_OR_NEWER
  447. // deprecated functionality in 2020.1
  448. #else
  449. if (HasStereoCamera())
  450. {
  451. #if ENABLE_AR || ENABLE_VR
  452. XRDevice.DisableAutoXRCameraTracking(GetComponent<Camera>(), false);
  453. #endif
  454. }
  455. #endif
  456. }
  457. /// <inheritdoc />
  458. protected virtual void OnEnable()
  459. {
  460. Application.onBeforeRender += OnBeforeRender;
  461. }
  462. /// <inheritdoc />
  463. protected virtual void OnDisable()
  464. {
  465. // remove delegate registration
  466. ResetToCachedLocalPosition();
  467. Application.onBeforeRender -= OnBeforeRender;
  468. }
  469. /// <inheritdoc />
  470. protected virtual void FixedUpdate()
  471. {
  472. if (m_UpdateType == UpdateType.Update ||
  473. m_UpdateType == UpdateType.UpdateAndBeforeRender)
  474. {
  475. PerformUpdate();
  476. }
  477. }
  478. /// <inheritdoc />
  479. protected virtual void Update()
  480. {
  481. if (m_UpdateType == UpdateType.Update ||
  482. m_UpdateType == UpdateType.UpdateAndBeforeRender)
  483. {
  484. PerformUpdate();
  485. }
  486. }
  487. /// <inheritdoc />
  488. // For the same reason as DefaultExecutionOrder, a callback order is specified to
  489. // apply the pose to the Transform before default user scripts execute.
  490. [BeforeRenderOrder(-30000)]
  491. protected virtual void OnBeforeRender()
  492. {
  493. if (m_UpdateType == UpdateType.BeforeRender ||
  494. m_UpdateType == UpdateType.UpdateAndBeforeRender)
  495. {
  496. PerformUpdate();
  497. }
  498. }
  499. /// <summary>
  500. /// Sets the transform that is being driven by the <see cref="TrackedPoseDriver"/>. will only correct set the rotation or position depending on the <see cref="PoseDataFlags"/>
  501. /// </summary>
  502. /// <param name="newPosition">The position to apply.</param>
  503. /// <param name="newRotation">The rotation to apply.</param>
  504. /// <param name="poseFlags">The flags indiciating which of the position/rotation values are provided by the calling code.</param>
  505. protected virtual void SetLocalTransform(Vector3 newPosition, Quaternion newRotation, PoseDataFlags poseFlags)
  506. {
  507. if ((m_TrackingType == TrackingType.RotationAndPosition ||
  508. m_TrackingType == TrackingType.RotationOnly) &&
  509. (poseFlags & PoseDataFlags.Rotation) > 0)
  510. {
  511. transform.localRotation = newRotation;
  512. }
  513. if ((m_TrackingType == TrackingType.RotationAndPosition ||
  514. m_TrackingType == TrackingType.PositionOnly) &&
  515. (poseFlags & PoseDataFlags.Position) > 0)
  516. {
  517. transform.localPosition = newPosition;
  518. }
  519. }
  520. /// <summary>
  521. /// This is only used when running in legacy mode, and will fake the behavior of the old implicit camera tracking. This will transform by the origin pose if necessary.
  522. /// </summary>
  523. /// <param name="pose">Pose to transform by the origin if in relative transform mode.</param>
  524. /// <returns>The pose, with the applied transform if in Relative Transform mode.</returns>
  525. protected Pose TransformPoseByOriginIfNeeded(Pose pose)
  526. {
  527. if (m_UseRelativeTransform)
  528. {
  529. return pose.GetTransformedBy(m_OriginPose);
  530. }
  531. else
  532. {
  533. return pose;
  534. }
  535. }
  536. private bool HasStereoCamera()
  537. {
  538. Camera camera = GetComponent<Camera>();
  539. return camera != null && camera.stereoEnabled;
  540. }
  541. /// <summary>
  542. /// PerformUpdate queries the data from the selected pose source, and then calls <see cref="SetLocalTransform"/> to apply the pose.
  543. /// </summary>
  544. protected virtual void PerformUpdate()
  545. {
  546. if (!enabled)
  547. return;
  548. Pose currentPose = new Pose();
  549. currentPose = Pose.identity;
  550. PoseDataFlags poseFlags = GetPoseData(m_Device, m_PoseSource, out currentPose);
  551. if(poseFlags != PoseDataFlags.NoData)
  552. {
  553. Pose localPose = TransformPoseByOriginIfNeeded(currentPose);
  554. SetLocalTransform(localPose.position, localPose.rotation, poseFlags);
  555. }
  556. }
  557. }
  558. }