IMHOTEP Framework
 All Classes Namespaces Functions Variables Enumerations Enumerator Properties Pages
SteamVR_TrackedCamera.cs
1 //======= Copyright (c) Valve Corporation, All rights reserved. ===============
2 //
3 // Purpose: Provides access to video feed and poses of tracked cameras.
4 //
5 // Usage:
6 // var source = SteamVR_TrackedCamera.Distorted();
7 // var source = SteamVR_TrackedCamera.Undistorted();
8 // or
9 // var undistorted = true; // or false
10 // var source = SteamVR_TrackedCamera.Source(undistorted);
11 //
12 // - Distorted feeds are the decoded images from the camera.
13 // - Undistorted feeds correct for the camera lens distortion (a.k.a. fisheye)
14 // to make straight lines straight.
15 //
16 // VideoStreamTexture objects must be symmetrically Acquired and Released to
17 // ensure the video stream is activated, and shutdown properly once there are
18 // no more consumers. You only need to Acquire once when starting to use a
19 // stream, and Release when you are done using it (as opposed to every frame).
20 //
21 //=============================================================================
22 
23 using UnityEngine;
24 using Valve.VR;
25 
27 {
28  public class VideoStreamTexture
29  {
30  public VideoStreamTexture(uint deviceIndex, bool undistorted)
31  {
32  this.undistorted = undistorted;
33  videostream = Stream(deviceIndex);
34  }
35  public bool undistorted { get; private set; }
36  public uint deviceIndex { get { return videostream.deviceIndex; } }
37  public bool hasCamera { get { return videostream.hasCamera; } }
38  public bool hasTracking { get { Update(); return header.standingTrackedDevicePose.bPoseIsValid; } }
39  public uint frameId { get { Update(); return header.nFrameSequence; } }
40  public VRTextureBounds_t frameBounds { get; private set; }
41  public EVRTrackedCameraFrameType frameType { get { return undistorted ? EVRTrackedCameraFrameType.Undistorted : EVRTrackedCameraFrameType.Distorted; } }
42 
43  Texture2D _texture;
44  public Texture2D texture { get { Update(); return _texture; } }
45 
46  public SteamVR_Utils.RigidTransform transform { get { Update(); return new SteamVR_Utils.RigidTransform(header.standingTrackedDevicePose.mDeviceToAbsoluteTracking); } }
47  public Vector3 velocity { get { Update(); var pose = header.standingTrackedDevicePose; return new Vector3(pose.vVelocity.v0, pose.vVelocity.v1, -pose.vVelocity.v2); } }
48  public Vector3 angularVelocity { get { Update(); var pose = header.standingTrackedDevicePose; return new Vector3(-pose.vAngularVelocity.v0, -pose.vAngularVelocity.v1, pose.vAngularVelocity.v2); } }
49 
50  public TrackedDevicePose_t GetPose() { Update(); return header.standingTrackedDevicePose; }
51 
52  public ulong Acquire()
53  {
54  return videostream.Acquire();
55  }
56  public ulong Release()
57  {
58  var result = videostream.Release();
59 
60  if (videostream.handle == 0)
61  {
62  Object.Destroy(_texture);
63  _texture = null;
64  }
65 
66  return result;
67  }
68 
69  int prevFrameCount = -1;
70  void Update()
71  {
72  if (Time.frameCount == prevFrameCount)
73  return;
74 
75  prevFrameCount = Time.frameCount;
76 
77  if (videostream.handle == 0)
78  return;
79 
80  var vr = SteamVR.instance;
81  if (vr == null)
82  return;
83 
84  var trackedCamera = OpenVR.TrackedCamera;
85  if (trackedCamera == null)
86  return;
87 
88  var nativeTex = System.IntPtr.Zero;
89  var deviceTexture = (_texture != null) ? _texture : new Texture2D(2, 2);
90  var headerSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(header.GetType());
91 
92  if (vr.textureType == ETextureType.OpenGL)
93  {
94  if (glTextureId != 0)
95  trackedCamera.ReleaseVideoStreamTextureGL(videostream.handle, glTextureId);
96 
97  if (trackedCamera.GetVideoStreamTextureGL(videostream.handle, frameType, ref glTextureId, ref header, headerSize) != EVRTrackedCameraError.None)
98  return;
99 
100  nativeTex = (System.IntPtr)glTextureId;
101  }
102  else if (vr.textureType == ETextureType.DirectX)
103  {
104  if (trackedCamera.GetVideoStreamTextureD3D11(videostream.handle, frameType, deviceTexture.GetNativeTexturePtr(), ref nativeTex, ref header, headerSize) != EVRTrackedCameraError.None)
105  return;
106  }
107 
108  if (_texture == null)
109  {
110  _texture = Texture2D.CreateExternalTexture((int)header.nWidth, (int)header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);
111 
112  uint width = 0, height = 0;
113  var frameBounds = new VRTextureBounds_t();
114  if (trackedCamera.GetVideoStreamTextureSize(deviceIndex, frameType, ref frameBounds, ref width, ref height) == EVRTrackedCameraError.None)
115  {
116  // Account for textures being upside-down in Unity.
117  frameBounds.vMin = 1.0f - frameBounds.vMin;
118  frameBounds.vMax = 1.0f - frameBounds.vMax;
119  this.frameBounds = frameBounds;
120  }
121  }
122  else
123  {
124  _texture.UpdateExternalTexture(nativeTex);
125  }
126  }
127 
128  uint glTextureId;
129  VideoStream videostream;
131  }
132 
133  #region Top level accessors.
134 
135  public static VideoStreamTexture Distorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
136  {
137  if (distorted == null)
138  distorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
139  if (distorted[deviceIndex] == null)
140  distorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, false);
141  return distorted[deviceIndex];
142  }
143 
144  public static VideoStreamTexture Undistorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
145  {
146  if (undistorted == null)
147  undistorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
148  if (undistorted[deviceIndex] == null)
149  undistorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, true);
150  return undistorted[deviceIndex];
151  }
152 
153  public static VideoStreamTexture Source(bool undistorted, int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
154  {
155  return undistorted ? Undistorted(deviceIndex) : Distorted(deviceIndex);
156  }
157 
158  private static VideoStreamTexture[] distorted, undistorted;
159 
160  #endregion
161 
162  #region Internal class to manage lifetime of video streams (per device).
163 
164  class VideoStream
165  {
166  public VideoStream(uint deviceIndex)
167  {
168  this.deviceIndex = deviceIndex;
169  var trackedCamera = OpenVR.TrackedCamera;
170  if (trackedCamera != null)
171  trackedCamera.HasCamera(deviceIndex, ref _hasCamera);
172  }
173  public uint deviceIndex { get; private set; }
174 
175  ulong _handle;
176  public ulong handle { get { return _handle; } }
177 
178  bool _hasCamera;
179  public bool hasCamera { get { return _hasCamera; } }
180 
181  ulong refCount;
182  public ulong Acquire()
183  {
184  if (_handle == 0 && hasCamera)
185  {
186  var trackedCamera = OpenVR.TrackedCamera;
187  if (trackedCamera != null)
188  trackedCamera.AcquireVideoStreamingService(deviceIndex, ref _handle);
189  }
190  return ++refCount;
191  }
192  public ulong Release()
193  {
194  if (refCount > 0 && --refCount == 0 && _handle != 0)
195  {
196  var trackedCamera = OpenVR.TrackedCamera;
197  if (trackedCamera != null)
198  trackedCamera.ReleaseVideoStreamingService(_handle);
199  _handle = 0;
200  }
201  return refCount;
202  }
203  }
204 
205  static VideoStream Stream(uint deviceIndex)
206  {
207  if (videostreams == null)
208  videostreams = new VideoStream[OpenVR.k_unMaxTrackedDeviceCount];
209  if (videostreams[deviceIndex] == null)
210  videostreams[deviceIndex] = new VideoStream(deviceIndex);
211  return videostreams[deviceIndex];
212  }
213 
214  static VideoStream[] videostreams;
215 
216  #endregion
217 }
218