1 /********************************************************************************/ /**
\r
2 \file OVR_CAPI_Util.h
\r
3 \brief This header provides LibOVR utility function declarations
\r
4 \copyright Copyright 2015-2016 Oculus VR, LLC All Rights reserved.
\r
5 *************************************************************************************/
\r
7 #ifndef OVR_CAPI_Util_h
\r
8 #define OVR_CAPI_Util_h
\r
10 #include "OVR_CAPI.h"
\r
16 /// Enumerates modifications to the projection matrix based on the application's needs.
\r
18 /// \see ovrMatrix4f_Projection
\r
20 typedef enum ovrProjectionModifier_ {
\r
21 /// Use for generating a default projection matrix that is:
\r
23 /// * Near depth values stored in the depth buffer are smaller than far depth values.
\r
24 /// * Both near and far are explicitly defined.
\r
25 /// * With a clipping range that is (0 to w).
\r
26 ovrProjection_None = 0x00,
\r
28 /// Enable if using left-handed transformations in your application.
\r
29 ovrProjection_LeftHanded = 0x01,
\r
31 /// After the projection transform is applied, far values stored in the depth buffer will be less
\r
32 /// than closer depth values.
\r
33 /// NOTE: Enable only if the application is using a floating-point depth buffer for proper
\r
35 ovrProjection_FarLessThanNear = 0x02,
\r
37 /// When this flag is used, the zfar value pushed into ovrMatrix4f_Projection() will be ignored
\r
38 /// NOTE: Enable only if ovrProjection_FarLessThanNear is also enabled where the far clipping
\r
39 /// plane will be pushed to infinity.
\r
40 ovrProjection_FarClipAtInfinity = 0x04,
\r
42 /// Enable if the application is rendering with OpenGL and expects a projection matrix with a
\r
43 /// clipping range of (-w to w).
\r
44 /// Ignore this flag if your application already handles the conversion from D3D range (0 to w) to
\r
46 ovrProjection_ClipRangeOpenGL = 0x08,
\r
47 } ovrProjectionModifier;
\r
49 /// Return values for ovr_Detect.
\r
53 typedef struct OVR_ALIGNAS(8) ovrDetectResult_ {
\r
54 /// Is ovrFalse when the Oculus Service is not running.
\r
55 /// This means that the Oculus Service is either uninstalled or stopped.
\r
56 /// IsOculusHMDConnected will be ovrFalse in this case.
\r
57 /// Is ovrTrue when the Oculus Service is running.
\r
58 /// This means that the Oculus Service is installed and running.
\r
59 /// IsOculusHMDConnected will reflect the state of the HMD.
\r
60 ovrBool IsOculusServiceRunning;
\r
62 /// Is ovrFalse when an Oculus HMD is not detected.
\r
63 /// If the Oculus Service is not running, this will be ovrFalse.
\r
64 /// Is ovrTrue when an Oculus HMD is detected.
\r
65 /// This implies that the Oculus Service is also installed and running.
\r
66 ovrBool IsOculusHMDConnected;
\r
68 OVR_UNUSED_STRUCT_PAD(pad0, 6) ///< \internal struct padding
\r
72 OVR_STATIC_ASSERT(sizeof(ovrDetectResult) == 8, "ovrDetectResult size mismatch");
\r
74 /// Modes used to generate Touch Haptics from audio PCM buffer.
\r
76 typedef enum ovrHapticsGenMode_ {
\r
77 /// Point sample original signal at Haptics frequency
\r
78 ovrHapticsGenMode_PointSample,
\r
79 ovrHapticsGenMode_Count
\r
80 } ovrHapticsGenMode;
\r
82 /// Store audio PCM data (as 32b float samples) for an audio channel.
\r
83 /// Note: needs to be released with ovr_ReleaseAudioChannelData to avoid memory leak.
\r
85 typedef struct ovrAudioChannelData_ {
\r
86 /// Samples stored as floats [-1.0f, 1.0f].
\r
87 const float* Samples;
\r
89 /// Number of samples
\r
92 /// Frequency (e.g. 44100)
\r
94 } ovrAudioChannelData;
\r
96 /// Store a full Haptics clip, which can be used as data source for multiple ovrHapticsBuffers.
\r
98 typedef struct ovrHapticsClip_ {
\r
99 /// Samples stored in opaque format
\r
100 const void* Samples;
\r
102 /// Number of samples
\r
106 /// Detects Oculus Runtime and Device Status
\r
108 /// Checks for Oculus Runtime and Oculus HMD device status without loading the LibOVRRT
\r
109 /// shared library. This may be called before ovr_Initialize() to help decide whether or
\r
110 /// not to initialize LibOVR.
\r
112 /// \param[in] timeoutMilliseconds Specifies a timeout to wait for HMD to be attached or 0 to poll.
\r
114 /// \return Returns an ovrDetectResult object indicating the result of detection.
\r
116 /// \see ovrDetectResult
\r
118 OVR_PUBLIC_FUNCTION(ovrDetectResult) ovr_Detect(int timeoutMilliseconds);
\r
120 // On the Windows platform,
\r
122 /// This is the Windows Named Event name that is used to check for HMD connected state.
\r
123 #define OVR_HMD_CONNECTED_EVENT_NAME L"OculusHMDConnected"
\r
126 /// Used to generate projection from ovrEyeDesc::Fov.
\r
128 /// \param[in] fov Specifies the ovrFovPort to use.
\r
129 /// \param[in] znear Distance to near Z limit.
\r
130 /// \param[in] zfar Distance to far Z limit.
\r
131 /// \param[in] projectionModFlags A combination of the ovrProjectionModifier flags.
\r
133 /// \return Returns the calculated projection matrix.
\r
135 /// \see ovrProjectionModifier
\r
137 OVR_PUBLIC_FUNCTION(ovrMatrix4f)
\r
138 ovrMatrix4f_Projection(ovrFovPort fov, float znear, float zfar, unsigned int projectionModFlags);
\r
140 /// Extracts the required data from the result of ovrMatrix4f_Projection.
\r
142 /// \param[in] projection Specifies the project matrix from which to
\r
143 /// extract ovrTimewarpProjectionDesc.
\r
144 /// \param[in] projectionModFlags A combination of the ovrProjectionModifier flags.
\r
145 /// \return Returns the extracted ovrTimewarpProjectionDesc.
\r
146 /// \see ovrTimewarpProjectionDesc
\r
148 OVR_PUBLIC_FUNCTION(ovrTimewarpProjectionDesc)
\r
149 ovrTimewarpProjectionDesc_FromProjection(ovrMatrix4f projection, unsigned int projectionModFlags);
\r
151 /// Generates an orthographic sub-projection.
\r
153 /// Used for 2D rendering, Y is down.
\r
155 /// \param[in] projection The perspective matrix that the orthographic matrix is derived from.
\r
156 /// \param[in] orthoScale Equal to 1.0f / pixelsPerTanAngleAtCenter.
\r
157 /// \param[in] orthoDistance Equal to the distance from the camera in meters, such as 0.8m.
\r
158 /// \param[in] HmdToEyeOffsetX Specifies the offset of the eye from the center.
\r
160 /// \return Returns the calculated projection matrix.
\r
162 OVR_PUBLIC_FUNCTION(ovrMatrix4f)
\r
163 ovrMatrix4f_OrthoSubProjection(
\r
164 ovrMatrix4f projection,
\r
165 ovrVector2f orthoScale,
\r
166 float orthoDistance,
\r
167 float HmdToEyeOffsetX);
\r
169 /// Computes offset eye poses based on headPose returned by ovrTrackingState.
\r
171 /// \param[in] headPose Indicates the HMD position and orientation to use for the calculation.
\r
172 /// \param[in] hmdToEyePose Can be ovrEyeRenderDesc.HmdToEyePose returned from
\r
173 /// ovr_GetRenderDesc. For monoscopic rendering, use a position vector that is average
\r
174 /// of the two position vectors for each eyes.
\r
175 /// \param[out] outEyePoses If outEyePoses are used for rendering, they should be passed to
\r
176 /// ovr_SubmitFrame in ovrLayerEyeFov::RenderPose or ovrLayerEyeFovDepth::RenderPose.
\r
178 #undef ovr_CalcEyePoses
\r
179 OVR_PUBLIC_FUNCTION(void)
\r
180 ovr_CalcEyePoses(ovrPosef headPose, const ovrVector3f hmdToEyeOffset[2], ovrPosef outEyePoses[2]);
\r
181 OVR_PRIVATE_FUNCTION(void)
\r
182 ovr_CalcEyePoses2(ovrPosef headPose, const ovrPosef HmdToEyePose[2], ovrPosef outEyePoses[2]);
\r
183 #define ovr_CalcEyePoses ovr_CalcEyePoses2
\r
185 /// Returns the predicted head pose in outHmdTrackingState and offset eye poses in outEyePoses.
\r
187 /// This is a thread-safe function where caller should increment frameIndex with every frame
\r
188 /// and pass that index where applicable to functions called on the rendering thread.
\r
189 /// Assuming outEyePoses are used for rendering, it should be passed as a part of ovrLayerEyeFov.
\r
190 /// The caller does not need to worry about applying HmdToEyePose to the returned outEyePoses
\r
193 /// \param[in] hmd Specifies an ovrSession previously returned by ovr_Create.
\r
194 /// \param[in] frameIndex Specifies the targeted frame index, or 0 to refer to one frame after
\r
195 /// the last time ovr_SubmitFrame was called.
\r
196 /// \param[in] latencyMarker Specifies that this call is the point in time where
\r
197 /// the "App-to-Mid-Photon" latency timer starts from. If a given ovrLayer
\r
198 /// provides "SensorSampleTimestamp", that will override the value stored here.
\r
199 /// \param[in] hmdToEyePose Can be ovrEyeRenderDesc.HmdToEyePose returned from
\r
200 /// ovr_GetRenderDesc. For monoscopic rendering, use a position vector that is average
\r
201 /// of the two position vectors for each eyes.
\r
202 /// \param[out] outEyePoses The predicted eye poses.
\r
203 /// \param[out] outSensorSampleTime The time when this function was called. May be NULL, in which
\r
204 /// case it is ignored.
\r
206 #undef ovr_GetEyePoses
\r
207 OVR_PUBLIC_FUNCTION(void)
\r
209 ovrSession session,
\r
210 long long frameIndex,
\r
211 ovrBool latencyMarker,
\r
212 const ovrVector3f hmdToEyeOffset[2],
\r
213 ovrPosef outEyePoses[2],
\r
214 double* outSensorSampleTime);
\r
215 OVR_PRIVATE_FUNCTION(void)
\r
217 ovrSession session,
\r
218 long long frameIndex,
\r
219 ovrBool latencyMarker,
\r
220 const ovrPosef HmdToEyePose[2],
\r
221 ovrPosef outEyePoses[2],
\r
222 double* outSensorSampleTime);
\r
223 #define ovr_GetEyePoses ovr_GetEyePoses2
\r
225 /// Tracking poses provided by the SDK come in a right-handed coordinate system. If an application
\r
226 /// is passing in ovrProjection_LeftHanded into ovrMatrix4f_Projection, then it should also use
\r
227 /// this function to flip the HMD tracking poses to be left-handed.
\r
229 /// While this utility function is intended to convert a left-handed ovrPosef into a right-handed
\r
230 /// coordinate system, it will also work for converting right-handed to left-handed since the
\r
231 /// flip operation is the same for both cases.
\r
233 /// \param[in] inPose that is right-handed
\r
234 /// \param[out] outPose that is requested to be left-handed (can be the same pointer to inPose)
\r
236 OVR_PUBLIC_FUNCTION(void) ovrPosef_FlipHandedness(const ovrPosef* inPose, ovrPosef* outPose);
\r
238 /// Reads an audio channel from Wav (Waveform Audio File) data.
\r
239 /// Input must be a byte buffer representing a valid Wav file. Audio samples from the specified
\r
240 /// channel are read,
\r
241 /// converted to float [-1.0f, 1.0f] and returned through ovrAudioChannelData.
\r
243 /// Supported formats: PCM 8b, 16b, 32b and IEEE float (little-endian only).
\r
245 /// \param[out] outAudioChannel output audio channel data.
\r
246 /// \param[in] inputData a binary buffer representing a valid Wav file data.
\r
247 /// \param[in] dataSizeInBytes size of the buffer in bytes.
\r
248 /// \param[in] stereoChannelToUse audio channel index to extract (0 for mono).
\r
250 OVR_PUBLIC_FUNCTION(ovrResult)
\r
251 ovr_ReadWavFromBuffer(
\r
252 ovrAudioChannelData* outAudioChannel,
\r
253 const void* inputData,
\r
254 int dataSizeInBytes,
\r
255 int stereoChannelToUse);
\r
257 /// Generates playable Touch Haptics data from an audio channel.
\r
259 /// \param[out] outHapticsClip generated Haptics clip.
\r
260 /// \param[in] audioChannel input audio channel data.
\r
261 /// \param[in] genMode mode used to convert and audio channel data to Haptics data.
\r
263 OVR_PUBLIC_FUNCTION(ovrResult)
\r
264 ovr_GenHapticsFromAudioData(
\r
265 ovrHapticsClip* outHapticsClip,
\r
266 const ovrAudioChannelData* audioChannel,
\r
267 ovrHapticsGenMode genMode);
\r
269 /// Releases memory allocated for ovrAudioChannelData. Must be called to avoid memory leak.
\r
270 /// \param[in] audioChannel pointer to an audio channel
\r
272 OVR_PUBLIC_FUNCTION(void) ovr_ReleaseAudioChannelData(ovrAudioChannelData* audioChannel);
\r
274 /// Releases memory allocated for ovrHapticsClip. Must be called to avoid memory leak.
\r
275 /// \param[in] hapticsClip pointer to a haptics clip
\r
277 OVR_PUBLIC_FUNCTION(void) ovr_ReleaseHapticsClip(ovrHapticsClip* hapticsClip);
\r
283 #endif // Header include guard
\r