1
0
mirror of https://git.dev.opencascade.org/repos/occt.git synced 2025-08-09 13:22:24 +03:00

0029384: Visualization, TKOpenGl - basic integration with OpenVR

V3d_View::AutoZFit() is now called only before redraw
within methods V3d_View::Redraw() and V3d_View::Update().

Graphic3d_CView now holds Aspect_ExtendedRealitySession object.
Aspect_OpenVRSession implements new interface via optional OpenVR library.
Graphic3d_CView::ProcessInput() - added new interface method
which should be called for processing positional input (head tracking).

Graphic3d_Camera now allows setting custom stereoscopic Projection matrices.

OpenGl_Context::Camera() - context now holds Camera object
in addition to active camera matrices.

genproj.tcl has been extended to handle optional CSF_OpenVR dependency.
This commit is contained in:
kgv
2020-04-16 18:44:50 +03:00
parent 2615c2d705
commit b40cdc2b55
69 changed files with 4819 additions and 267 deletions

View File

@@ -13,6 +13,7 @@
#include <Graphic3d_CView.hxx>
#include <Aspect_OpenVRSession.hxx>
#include <Graphic3d_Layer.hxx>
#include <Graphic3d_MapIteratorOfMapOfStructure.hxx>
#include <Graphic3d_StructureManager.hxx>
@@ -32,7 +33,8 @@ Graphic3d_CView::Graphic3d_CView (const Handle(Graphic3d_StructureManager)& theM
myIsActive (Standard_False),
myIsRemoved (Standard_False),
myShadingModel (Graphic3d_TOSM_FRAGMENT),
myVisualization (Graphic3d_TOV_WIREFRAME)
myVisualization (Graphic3d_TOV_WIREFRAME),
myUnitFactor (1.0)
{
myId = myStructureManager->Identification (this);
}
@@ -43,6 +45,7 @@ Graphic3d_CView::Graphic3d_CView (const Handle(Graphic3d_StructureManager)& theM
//=======================================================================
Graphic3d_CView::~Graphic3d_CView()
{
myXRSession.Nullify();
if (!IsRemoved())
{
myStructureManager->UnIdentification (this);
@@ -1083,3 +1086,311 @@ void Graphic3d_CView::SetShadingModel (Graphic3d_TypeOfShadingModel theModel)
myShadingModel = theModel;
}
// =======================================================================
// function : SetUnitFactor
// purpose :
// =======================================================================
void Graphic3d_CView::SetUnitFactor (Standard_Real theFactor)
{
if (theFactor <= 0.0)
{
throw Standard_ProgramError ("Graphic3d_CView::SetUnitFactor() - invalid unit factor");
}
myUnitFactor = theFactor;
if (!myXRSession.IsNull())
{
myXRSession->SetUnitFactor (theFactor);
}
}
// =======================================================================
// function : IsActiveXR
// purpose :
// =======================================================================
bool Graphic3d_CView::IsActiveXR() const
{
return !myXRSession.IsNull()
&& myXRSession->IsOpen();
}
// =======================================================================
// function : InitXR
// purpose :
// =======================================================================
bool Graphic3d_CView::InitXR()
{
if (myXRSession.IsNull())
{
myXRSession = new Aspect_OpenVRSession();
myXRSession->SetUnitFactor (myUnitFactor);
}
if (!myXRSession->IsOpen())
{
myXRSession->Open();
if (myBackXRCamera.IsNull())
{
// backup camera properties
myBackXRCamera = new Graphic3d_Camera (myCamera);
}
}
return myXRSession->IsOpen();
}
// =======================================================================
// function : ReleaseXR
// purpose :
// =======================================================================
void Graphic3d_CView::ReleaseXR()
{
if (!myXRSession.IsNull())
{
if (myXRSession->IsOpen()
&& !myBackXRCamera.IsNull())
{
// restore projection properties overridden by HMD
myCamera->SetFOV2d (myBackXRCamera->FOV2d());
myCamera->SetFOVy (myBackXRCamera->FOVy());
myCamera->SetAspect(myBackXRCamera->Aspect());
myCamera->SetIOD (myBackXRCamera->GetIODType(), myBackXRCamera->IOD());
myCamera->SetZFocus(myBackXRCamera->ZFocusType(), myBackXRCamera->ZFocus());
myCamera->ResetCustomProjection();
myBackXRCamera.Nullify();
}
myXRSession->Close();
}
}
//=======================================================================
//function : ProcessXRInput
//purpose :
//=======================================================================
void Graphic3d_CView::ProcessXRInput()
{
if (myRenderParams.StereoMode == Graphic3d_StereoMode_OpenVR
&& myCamera->ProjectionType() == Graphic3d_Camera::Projection_Stereo)
{
InitXR();
}
else
{
ReleaseXR();
}
if (!IsActiveXR())
{
myBaseXRCamera.Nullify();
myPosedXRCamera.Nullify();
return;
}
myXRSession->ProcessEvents();
Invalidate();
myCamera->SetFOV2d (myRenderParams.HmdFov2d);
myCamera->SetAspect(myXRSession->Aspect());
myCamera->SetFOVy (myXRSession->FieldOfView());
myCamera->SetIOD (Graphic3d_Camera::IODType_Absolute, myXRSession->IOD());
myCamera->SetZFocus(Graphic3d_Camera::FocusType_Absolute, 1.0 * myUnitFactor);
// VR APIs tend to decompose camera orientation-projection matrices into the following components:
// @begincode
// Model * [View * Eye^-1] * [Projection]
// @endcode
// so that Eye position is encoded into Orientation matrix, and there should be 2 Orientation matrices and 2 Projection matrices to make the stereo.
// Graphic3d_Camera historically follows different decomposition, with Eye position encoded into Projection matrix,
// so that there is only 1 Orientation matrix (matching mono view) and 2 Projection matrices.
if (myXRSession->HasProjectionFrustums())
{
// note that this definition does not include a small forward/backward offset from head to eye
myCamera->SetCustomStereoFrustums (myXRSession->ProjectionFrustum (Aspect_Eye_Left),
myXRSession->ProjectionFrustum (Aspect_Eye_Right));
}
else
{
const Graphic3d_Mat4d aPoseL = myXRSession->HeadToEyeTransform (Aspect_Eye_Left);
const Graphic3d_Mat4d aPoseR = myXRSession->HeadToEyeTransform (Aspect_Eye_Right);
const Graphic3d_Mat4d aProjL = myXRSession->ProjectionMatrix (Aspect_Eye_Left, myCamera->ZNear(), myCamera->ZFar());
const Graphic3d_Mat4d aProjR = myXRSession->ProjectionMatrix (Aspect_Eye_Right, myCamera->ZNear(), myCamera->ZFar());
myCamera->SetCustomStereoProjection (aProjL * aPoseL, aProjR * aPoseR);
}
myBaseXRCamera = myCamera;
if (myPosedXRCamera.IsNull())
{
myPosedXRCamera = new Graphic3d_Camera();
}
SynchronizeXRBaseToPosedCamera();
}
//=======================================================================
//function : SynchronizeXRBaseToPosedCamera
//purpose :
//=======================================================================
void Graphic3d_CView::SynchronizeXRBaseToPosedCamera()
{
if (!myPosedXRCamera.IsNull())
{
ComputeXRPosedCameraFromBase (*myPosedXRCamera, myXRSession->HeadPose());
}
}
//=======================================================================
//function : ComputeXRPosedCameraFromBase
//purpose :
//=======================================================================
void Graphic3d_CView::ComputeXRPosedCameraFromBase (Graphic3d_Camera& theCam,
const gp_Trsf& theXRTrsf) const
{
theCam.Copy (myBaseXRCamera);
// convert head pose into camera transformation
const gp_Ax3 anAxVr (gp::Origin(), gp::DZ(), gp::DX());
const gp_Ax3 aCameraCS (gp::Origin(), -myBaseXRCamera->Direction(), -myBaseXRCamera->SideRight());
gp_Trsf aTrsfCS;
aTrsfCS.SetTransformation (aCameraCS, anAxVr);
const gp_Trsf aTrsfToCamera = aTrsfCS * theXRTrsf * aTrsfCS.Inverted();
gp_Trsf aTrsfToEye;
aTrsfToEye.SetTranslation (myBaseXRCamera->Eye().XYZ());
const gp_Trsf aTrsf = aTrsfToEye * aTrsfToCamera;
const gp_Dir anUpNew = myBaseXRCamera->Up().Transformed (aTrsf);
const gp_Dir aDirNew = myBaseXRCamera->Direction().Transformed (aTrsf);
const gp_Pnt anEyeNew = gp::Origin().Translated (aTrsf.TranslationPart());
theCam.SetUp (anUpNew);
theCam.SetDirectionFromEye (aDirNew);
theCam.MoveEyeTo (anEyeNew);
}
//=======================================================================
//function : SynchronizeXRPosedToBaseCamera
//purpose :
//=======================================================================
void Graphic3d_CView::SynchronizeXRPosedToBaseCamera()
{
if (myPosedXRCameraCopy.IsNull()
|| myPosedXRCamera.IsNull()
|| myBaseXRCamera.IsNull()
|| myCamera != myPosedXRCamera)
{
return;
}
if (myPosedXRCameraCopy->Eye().IsEqual (myPosedXRCamera->Eye(), gp::Resolution())
&& (myPosedXRCameraCopy->Distance() - myPosedXRCamera->Distance()) <= gp::Resolution()
&& myPosedXRCameraCopy->Direction().IsEqual (myPosedXRCamera->Direction(), gp::Resolution())
&& myPosedXRCameraCopy->Up().IsEqual (myPosedXRCamera->Up(), gp::Resolution()))
{
// avoid floating point math in case of no changes
return;
}
// re-compute myBaseXRCamera from myPosedXRCamera by applying reversed head pose transformation
ComputeXRBaseCameraFromPosed (myPosedXRCamera, myXRSession->HeadPose());
myPosedXRCameraCopy->Copy (myPosedXRCamera);
}
//=======================================================================
//function : ComputeXRBaseCameraFromPosed
//purpose :
//=======================================================================
void Graphic3d_CView::ComputeXRBaseCameraFromPosed (const Graphic3d_Camera& theCamPosed,
const gp_Trsf& thePoseTrsf)
{
const gp_Ax3 anAxVr (gp::Origin(), gp::DZ(), gp::DX());
const gp_Ax3 aCameraCS (gp::Origin(), -myBaseXRCamera->Direction(), -myBaseXRCamera->SideRight());
gp_Trsf aTrsfCS;
aTrsfCS.SetTransformation (aCameraCS, anAxVr);
const gp_Trsf aTrsfToCamera = aTrsfCS * thePoseTrsf * aTrsfCS.Inverted();
const gp_Trsf aTrsfCamToHead = aTrsfToCamera.Inverted();
const gp_Dir anUpNew = theCamPosed.Up().Transformed (aTrsfCamToHead);
const gp_Dir aDirNew = theCamPosed.Direction().Transformed (aTrsfCamToHead);
const gp_Pnt anEyeNew = theCamPosed.Eye().Translated (aTrsfToCamera.TranslationPart().Reversed());
myBaseXRCamera->SetUp (anUpNew);
myBaseXRCamera->SetDirectionFromEye (aDirNew);
myBaseXRCamera->MoveEyeTo (anEyeNew);
}
//=======================================================================
//function : TurnViewXRCamera
//purpose :
//=======================================================================
void Graphic3d_CView::TurnViewXRCamera (const gp_Trsf& theTrsfTurn)
{
// use current eye position as an anchor
const Handle(Graphic3d_Camera)& aCamBase = myBaseXRCamera;
gp_Trsf aHeadTrsfLocal;
aHeadTrsfLocal.SetTranslationPart (myXRSession->HeadPose().TranslationPart());
const gp_Pnt anEyeAnchor = PoseXRToWorld (aHeadTrsfLocal).TranslationPart();
// turn the view
aCamBase->SetDirectionFromEye (aCamBase->Direction().Transformed (theTrsfTurn));
// recompute new eye
const gp_Ax3 anAxVr (gp::Origin(), gp::DZ(), gp::DX());
const gp_Ax3 aCameraCS (gp::Origin(), -aCamBase->Direction(), -aCamBase->SideRight());
gp_Trsf aTrsfCS;
aTrsfCS.SetTransformation (aCameraCS, anAxVr);
const gp_Trsf aTrsfToCamera = aTrsfCS * aHeadTrsfLocal * aTrsfCS.Inverted();
const gp_Pnt anEyeNew = anEyeAnchor.Translated (aTrsfToCamera.TranslationPart().Reversed());
aCamBase->MoveEyeTo (anEyeNew);
SynchronizeXRBaseToPosedCamera();
}
//=======================================================================
//function : SetupXRPosedCamera
//purpose :
//=======================================================================
void Graphic3d_CView::SetupXRPosedCamera()
{
if (!myPosedXRCamera.IsNull())
{
myCamera = myPosedXRCamera;
if (myPosedXRCameraCopy.IsNull())
{
myPosedXRCameraCopy = new Graphic3d_Camera();
}
myPosedXRCameraCopy->Copy (myPosedXRCamera);
}
}
//=======================================================================
//function : UnsetXRPosedCamera
//purpose :
//=======================================================================
void Graphic3d_CView::UnsetXRPosedCamera()
{
if (myCamera == myPosedXRCamera
&& !myBaseXRCamera.IsNull())
{
SynchronizeXRPosedToBaseCamera();
myCamera = myBaseXRCamera;
}
}
//=======================================================================
//function : DiagnosticInformation
//purpose :
//=======================================================================
void Graphic3d_CView::DiagnosticInformation (TColStd_IndexedDataMapOfStringString& theDict,
Graphic3d_DiagnosticInfo theFlags) const
{
if ((theFlags & Graphic3d_DiagnosticInfo_Device) != 0
&& !myXRSession.IsNull())
{
TCollection_AsciiString aVendor = myXRSession->GetString (Aspect_XRSession::InfoString_Vendor);
TCollection_AsciiString aDevice = myXRSession->GetString (Aspect_XRSession::InfoString_Device);
TCollection_AsciiString aTracker = myXRSession->GetString (Aspect_XRSession::InfoString_Tracker);
TCollection_AsciiString aSerial = myXRSession->GetString (Aspect_XRSession::InfoString_SerialNumber);
TCollection_AsciiString aDisplay = TCollection_AsciiString()
+ myXRSession->RecommendedViewport().x() + "x" + myXRSession->RecommendedViewport().y()
+ "@" + (int )Round (myXRSession->DisplayFrequency())
+ " [FOVy: " + (int )Round (myXRSession->FieldOfView()) + "]";
theDict.ChangeFromIndex (theDict.Add ("VRvendor", aVendor)) = aVendor;
theDict.ChangeFromIndex (theDict.Add ("VRdevice", aDevice)) = aDevice;
theDict.ChangeFromIndex (theDict.Add ("VRtracker", aTracker)) = aTracker;
theDict.ChangeFromIndex (theDict.Add ("VRdisplay", aDisplay)) = aDisplay;
theDict.ChangeFromIndex (theDict.Add ("VRserial", aSerial)) = aSerial;
}
}

View File

@@ -45,6 +45,7 @@
#include <Standard_Transient.hxx>
#include <TColStd_IndexedDataMapOfStringString.hxx>
class Aspect_XRSession;
class Graphic3d_CView;
class Graphic3d_GraphicDriver;
class Graphic3d_Layer;
@@ -427,8 +428,8 @@ public:
//! The format of returned information (e.g. key-value layout)
//! is NOT part of this API and can be changed at any time.
//! Thus application should not parse returned information to weed out specific parameters.
virtual void DiagnosticInformation (TColStd_IndexedDataMapOfStringString& theDict,
Graphic3d_DiagnosticInfo theFlags) const = 0;
Standard_EXPORT virtual void DiagnosticInformation (TColStd_IndexedDataMapOfStringString& theDict,
Graphic3d_DiagnosticInfo theFlags) const = 0;
//! Returns string with statistic performance info.
virtual TCollection_AsciiString StatisticInformation() const = 0;
@@ -436,6 +437,84 @@ public:
//! Fills in the dictionary with statistic performance info.
virtual void StatisticInformation (TColStd_IndexedDataMapOfStringString& theDict) const = 0;
public:
//! Return unit scale factor defined as scale factor for m (meters); 1.0 by default.
//! Normally, view definition is unitless, however some operations like VR input requires proper units mapping.
Standard_Real UnitFactor() const { return myUnitFactor; }
//! Set unit scale factor.
Standard_EXPORT void SetUnitFactor (Standard_Real theFactor);
//! Return XR session.
const Handle(Aspect_XRSession)& XRSession() const { return myXRSession; }
//! Set XR session.
void SetXRSession (const Handle(Aspect_XRSession)& theSession) { myXRSession = theSession; }
//! Return TRUE if there is active XR session.
Standard_EXPORT bool IsActiveXR() const;
//! Initialize XR session.
Standard_EXPORT virtual bool InitXR();
//! Release XR session.
Standard_EXPORT virtual void ReleaseXR();
//! Process input.
Standard_EXPORT virtual void ProcessXRInput();
//! Compute PosedXRCamera() based on current XR head pose and make it active.
Standard_EXPORT void SetupXRPosedCamera();
//! Set current camera back to BaseXRCamera() and copy temporary modifications of PosedXRCamera().
//! Calls SynchronizeXRPosedToBaseCamera() beforehand.
Standard_EXPORT void UnsetXRPosedCamera();
//! Returns transient XR camera position with tracked head orientation applied.
const Handle(Graphic3d_Camera)& PosedXRCamera() const { return myPosedXRCamera; }
//! Sets transient XR camera position with tracked head orientation applied.
void SetPosedXRCamera (const Handle(Graphic3d_Camera)& theCamera) { myPosedXRCamera = theCamera; }
//! Returns anchor camera definition (without tracked head orientation).
const Handle(Graphic3d_Camera)& BaseXRCamera() const { return myBaseXRCamera; }
//! Sets anchor camera definition.
void SetBaseXRCamera (const Handle(Graphic3d_Camera)& theCamera) { myBaseXRCamera = theCamera; }
//! Convert XR pose to world space.
//! @param theTrsfXR [in] transformation defined in VR local coordinate system,
//! oriented as Y-up, X-right and -Z-forward
//! @return transformation defining orientation of XR pose in world space
gp_Trsf PoseXRToWorld (const gp_Trsf& thePoseXR) const
{
const Handle(Graphic3d_Camera)& anOrigin = myBaseXRCamera;
const gp_Ax3 anAxVr (gp::Origin(), gp::DZ(), gp::DX());
const gp_Ax3 aCameraCS (anOrigin->Eye().XYZ(), -anOrigin->Direction(), -anOrigin->SideRight());
gp_Trsf aTrsfCS;
aTrsfCS.SetTransformation (aCameraCS, anAxVr);
return aTrsfCS * thePoseXR;
}
//! Recomputes PosedXRCamera() based on BaseXRCamera() and head orientation.
Standard_EXPORT void SynchronizeXRBaseToPosedCamera();
//! Checks if PosedXRCamera() has been modified since SetupXRPosedCamera()
//! and copies these modifications to BaseXRCamera().
Standard_EXPORT void SynchronizeXRPosedToBaseCamera();
//! Compute camera position based on XR pose.
Standard_EXPORT void ComputeXRPosedCameraFromBase (Graphic3d_Camera& theCam,
const gp_Trsf& theXRTrsf) const;
//! Update based camera from posed camera by applying reversed transformation.
Standard_EXPORT void ComputeXRBaseCameraFromPosed (const Graphic3d_Camera& theCamPosed,
const gp_Trsf& thePoseTrsf);
//! Turn XR camera direction using current (head) eye position as anchor.
Standard_EXPORT void TurnViewXRCamera (const gp_Trsf& theTrsfTurn);
public: //! @name obsolete Graduated Trihedron functionality
//! Returns data of a graduated trihedron
@@ -490,6 +569,13 @@ protected:
Graphic3d_TypeOfShadingModel myShadingModel;
Graphic3d_TypeOfVisualization myVisualization;
Handle(Aspect_XRSession) myXRSession;
Handle(Graphic3d_Camera) myBackXRCamera; //!< camera projection parameters to restore after closing XR session (FOV, aspect and similar)
Handle(Graphic3d_Camera) myBaseXRCamera; //!< neutral camera orientation defining coordinate system in which head tracking is defined
Handle(Graphic3d_Camera) myPosedXRCamera; //!< transient XR camera orientation with tracked head orientation applied (based on myBaseXRCamera)
Handle(Graphic3d_Camera) myPosedXRCameraCopy; //!< neutral camera orientation copy at the beginning of processing input
Standard_Real myUnitFactor; //!< unit scale factor defined as scale factor for m (meters)
protected:
Graphic3d_GraduatedTrihedron myGTrihedronData;

View File

@@ -81,6 +81,8 @@ Graphic3d_Camera::Graphic3d_Camera()
myAxialScale (1.0, 1.0, 1.0),
myProjType (Projection_Orthographic),
myFOVy (45.0),
myFOVx (45.0),
myFOV2d (180.0),
myFOVyTan (Tan (DTR_HALF * 45.0)),
myZNear (DEFAULT_ZNEAR),
myZFar (DEFAULT_ZFAR),
@@ -89,7 +91,10 @@ Graphic3d_Camera::Graphic3d_Camera()
myZFocus (1.0),
myZFocusType (FocusType_Relative),
myIOD (0.05),
myIODType (IODType_Relative)
myIODType (IODType_Relative),
myIsCustomProjMatM (false),
myIsCustomProjMatLR(false),
myIsCustomFrustomLR(false)
{
myWorldViewProjState.Initialize ((Standard_Size)Standard_Atomic_Increment (&THE_STATE_COUNTER),
(Standard_Size)Standard_Atomic_Increment (&THE_STATE_COUNTER),
@@ -108,6 +113,8 @@ Graphic3d_Camera::Graphic3d_Camera (const Handle(Graphic3d_Camera)& theOther)
myAxialScale (1.0, 1.0, 1.0),
myProjType (Projection_Orthographic),
myFOVy (45.0),
myFOVx (45.0),
myFOV2d (180.0),
myFOVyTan (Tan (DTR_HALF * 45.0)),
myZNear (DEFAULT_ZNEAR),
myZFar (DEFAULT_ZFAR),
@@ -116,7 +123,10 @@ Graphic3d_Camera::Graphic3d_Camera (const Handle(Graphic3d_Camera)& theOther)
myZFocus (1.0),
myZFocusType (FocusType_Relative),
myIOD (0.05),
myIODType (IODType_Relative)
myIODType (IODType_Relative),
myIsCustomProjMatM (false),
myIsCustomProjMatLR(false),
myIsCustomFrustomLR(false)
{
myWorldViewProjState.Initialize (this);
@@ -130,6 +140,7 @@ Graphic3d_Camera::Graphic3d_Camera (const Handle(Graphic3d_Camera)& theOther)
void Graphic3d_Camera::CopyMappingData (const Handle(Graphic3d_Camera)& theOtherCamera)
{
SetFOVy (theOtherCamera->FOVy());
SetFOV2d (theOtherCamera->FOV2d());
SetZRange (theOtherCamera->ZNear(), theOtherCamera->ZFar());
SetAspect (theOtherCamera->Aspect());
SetScale (theOtherCamera->Scale());
@@ -137,6 +148,20 @@ void Graphic3d_Camera::CopyMappingData (const Handle(Graphic3d_Camera)& theOther
SetIOD (theOtherCamera->GetIODType(), theOtherCamera->IOD());
SetProjectionType (theOtherCamera->ProjectionType());
SetTile (theOtherCamera->myTile);
ResetCustomProjection();
if (theOtherCamera->IsCustomStereoProjection())
{
SetCustomStereoProjection (theOtherCamera->myCustomProjMatL, theOtherCamera->myCustomProjMatR);
}
else if (theOtherCamera->IsCustomStereoFrustum())
{
SetCustomStereoFrustums (theOtherCamera->myCustomFrustumL, theOtherCamera->myCustomFrustumR);
}
if (theOtherCamera->IsCustomMonoProjection())
{
SetCustomMonoProjection (theOtherCamera->myCustomProjMatM);
}
}
// =======================================================================
@@ -419,11 +444,27 @@ void Graphic3d_Camera::SetFOVy (const Standard_Real theFOVy)
}
myFOVy = theFOVy;
myFOVx = theFOVy * myAspect;
myFOVyTan = Tan(DTR_HALF * myFOVy);
InvalidateProjection();
}
// =======================================================================
// function : SetFOV2d
// purpose :
// =======================================================================
void Graphic3d_Camera::SetFOV2d (const Standard_Real theFOV)
{
if (FOV2d() == theFOV)
{
return;
}
myFOV2d = theFOV;
InvalidateProjection();
}
// =======================================================================
// function : SetZRange
// purpose :
@@ -462,6 +503,7 @@ void Graphic3d_Camera::SetAspect (const Standard_Real theAspect)
}
myAspect = theAspect;
myFOVx = myFOVy * theAspect;
InvalidateProjection();
}
@@ -871,6 +913,62 @@ const Graphic3d_Mat4& Graphic3d_Camera::ProjectionStereoRightF() const
return UpdateProjection (myMatricesF).RProjection;
}
// =======================================================================
// function : ResetCustomProjection
// purpose :
// =======================================================================
void Graphic3d_Camera::ResetCustomProjection()
{
if (myIsCustomFrustomLR
|| myIsCustomProjMatLR
|| myIsCustomProjMatM)
{
myIsCustomFrustomLR = false;
myIsCustomProjMatLR = false;
myIsCustomProjMatM = false;
InvalidateProjection();
}
}
// =======================================================================
// function : SetCustomStereoFrustums
// purpose :
// =======================================================================
void Graphic3d_Camera::SetCustomStereoFrustums (const Aspect_FrustumLRBT<Standard_Real>& theFrustumL,
const Aspect_FrustumLRBT<Standard_Real>& theFrustumR)
{
myCustomFrustumL = theFrustumL;
myCustomFrustumR = theFrustumR;
myIsCustomFrustomLR = true;
myIsCustomProjMatLR = false;
InvalidateProjection();
}
// =======================================================================
// function : SetCustomStereoProjection
// purpose :
// =======================================================================
void Graphic3d_Camera::SetCustomStereoProjection (const Graphic3d_Mat4d& theProjL,
const Graphic3d_Mat4d& theProjR)
{
myCustomProjMatL = theProjL;
myCustomProjMatR = theProjR;
myIsCustomProjMatLR = true;
myIsCustomFrustomLR = false;
InvalidateProjection();
}
// =======================================================================
// function : SetCustomMonoProjection
// purpose :
// =======================================================================
void Graphic3d_Camera::SetCustomMonoProjection (const Graphic3d_Mat4d& theProj)
{
myCustomProjMatM = theProj;
myIsCustomProjMatM = true;
InvalidateProjection();
}
// =======================================================================
// function : UpdateProjection
// purpose :
@@ -894,13 +992,11 @@ Graphic3d_Camera::TransformMatrices<Elem_t>&
Elem_t aDXHalf = 0.0, aDYHalf = 0.0;
if (IsOrthographic())
{
aDXHalf = aScale * Elem_t (0.5);
aDYHalf = aScale * Elem_t (0.5);
aDXHalf = aDYHalf = aScale * Elem_t (0.5);
}
else
{
aDXHalf = aZNear * Elem_t (myFOVyTan);
aDYHalf = aZNear * Elem_t (myFOVyTan);
aDXHalf = aDYHalf = aZNear * Elem_t (myFOVyTan);
}
if (anAspect > 1.0)
@@ -913,10 +1009,11 @@ Graphic3d_Camera::TransformMatrices<Elem_t>&
}
// sets right of frustum based on aspect ratio
Elem_t aLeft = -aDXHalf;
Elem_t aRight = aDXHalf;
Elem_t aBot = -aDYHalf;
Elem_t aTop = aDYHalf;
Aspect_FrustumLRBT<Elem_t> anLRBT;
anLRBT.Left = -aDXHalf;
anLRBT.Right = aDXHalf;
anLRBT.Bottom = -aDYHalf;
anLRBT.Top = aDYHalf;
Elem_t aIOD = myIODType == IODType_Relative
? static_cast<Elem_t> (myIOD * Distance())
@@ -931,56 +1028,83 @@ Graphic3d_Camera::TransformMatrices<Elem_t>&
const Elem_t aDXFull = Elem_t(2) * aDXHalf;
const Elem_t aDYFull = Elem_t(2) * aDYHalf;
const Graphic3d_Vec2i anOffset = myTile.OffsetLowerLeft();
aLeft = -aDXHalf + aDXFull * static_cast<Elem_t> (anOffset.x()) / static_cast<Elem_t> (myTile.TotalSize.x());
aRight = -aDXHalf + aDXFull * static_cast<Elem_t> (anOffset.x() + myTile.TileSize.x()) / static_cast<Elem_t> (myTile.TotalSize.x());
aBot = -aDYHalf + aDYFull * static_cast<Elem_t> (anOffset.y()) / static_cast<Elem_t> (myTile.TotalSize.y());
aTop = -aDYHalf + aDYFull * static_cast<Elem_t> (anOffset.y() + myTile.TileSize.y()) / static_cast<Elem_t> (myTile.TotalSize.y());
anLRBT.Left = -aDXHalf + aDXFull * static_cast<Elem_t> (anOffset.x()) / static_cast<Elem_t> (myTile.TotalSize.x());
anLRBT.Right = -aDXHalf + aDXFull * static_cast<Elem_t> (anOffset.x() + myTile.TileSize.x()) / static_cast<Elem_t> (myTile.TotalSize.x());
anLRBT.Bottom = -aDYHalf + aDYFull * static_cast<Elem_t> (anOffset.y()) / static_cast<Elem_t> (myTile.TotalSize.y());
anLRBT.Top = -aDYHalf + aDYFull * static_cast<Elem_t> (anOffset.y() + myTile.TileSize.y()) / static_cast<Elem_t> (myTile.TotalSize.y());
}
if (myIsCustomProjMatM)
{
theMatrices.MProjection.ConvertFrom (myCustomProjMatM);
}
switch (myProjType)
{
case Projection_Orthographic :
OrthoProj (aLeft, aRight, aBot, aTop, aZNear, aZFar, theMatrices.MProjection);
break;
case Projection_Perspective :
PerspectiveProj (aLeft, aRight, aBot, aTop, aZNear, aZFar, theMatrices.MProjection);
break;
case Projection_MonoLeftEye :
case Projection_Orthographic:
{
StereoEyeProj (aLeft, aRight, aBot, aTop,
aZNear, aZFar, aIOD, aFocus,
Standard_True, theMatrices.MProjection);
theMatrices.LProjection = theMatrices.MProjection;
if (!myIsCustomProjMatM)
{
orthoProj (theMatrices.MProjection, anLRBT, aZNear, aZFar);
}
break;
}
case Projection_MonoRightEye :
case Projection_Perspective:
{
StereoEyeProj (aLeft, aRight, aBot, aTop,
aZNear, aZFar, aIOD, aFocus,
Standard_False, theMatrices.MProjection);
theMatrices.RProjection = theMatrices.MProjection;
if (!myIsCustomProjMatM)
{
perspectiveProj (theMatrices.MProjection, anLRBT, aZNear, aZFar);
}
break;
}
case Projection_Stereo :
case Projection_MonoLeftEye:
case Projection_MonoRightEye:
case Projection_Stereo:
{
PerspectiveProj (aLeft, aRight, aBot, aTop, aZNear, aZFar, theMatrices.MProjection);
if (!myIsCustomProjMatM)
{
perspectiveProj (theMatrices.MProjection, anLRBT, aZNear, aZFar);
}
if (myIsCustomProjMatLR)
{
theMatrices.LProjection.ConvertFrom (myCustomProjMatL);
theMatrices.RProjection.ConvertFrom (myCustomProjMatR);
}
else if (myIsCustomFrustomLR)
{
anLRBT = Aspect_FrustumLRBT<Elem_t> (myCustomFrustumL).Multiplied (aZNear);
perspectiveProj (theMatrices.LProjection, anLRBT, aZNear, aZFar);
if (aIOD != Elem_t (0.0))
{
theMatrices.LProjection.Translate (NCollection_Vec3<Elem_t> (Elem_t (0.5) * aIOD, Elem_t (0.0), Elem_t (0.0)));
}
StereoEyeProj (aLeft, aRight, aBot, aTop,
aZNear, aZFar, aIOD, aFocus,
Standard_True,
theMatrices.LProjection);
StereoEyeProj (aLeft, aRight, aBot, aTop,
aZNear, aZFar, aIOD, aFocus,
Standard_False,
theMatrices.RProjection);
anLRBT = Aspect_FrustumLRBT<Elem_t> (myCustomFrustumR).Multiplied (aZNear);
perspectiveProj (theMatrices.RProjection, anLRBT, aZNear, aZFar);
if (aIOD != Elem_t (0.0))
{
theMatrices.RProjection.Translate (NCollection_Vec3<Elem_t> (Elem_t (-0.5) * aIOD, Elem_t (0.0), Elem_t (0.0)));
}
}
else
{
stereoEyeProj (theMatrices.LProjection,
anLRBT, aZNear, aZFar, aIOD, aFocus,
Aspect_Eye_Left);
stereoEyeProj (theMatrices.RProjection,
anLRBT, aZNear, aZFar, aIOD, aFocus,
Aspect_Eye_Right);
}
break;
}
}
if (myProjType == Projection_MonoLeftEye)
{
theMatrices.MProjection = theMatrices.LProjection;
}
else if (myProjType == Projection_MonoRightEye)
{
theMatrices.MProjection = theMatrices.RProjection;
}
return theMatrices; // for inline accessors
}
@@ -1044,29 +1168,26 @@ void Graphic3d_Camera::InvalidateOrientation()
}
// =======================================================================
// function : OrthoProj
// function : orthoProj
// purpose :
// =======================================================================
template <typename Elem_t>
void Graphic3d_Camera::OrthoProj (const Elem_t theLeft,
const Elem_t theRight,
const Elem_t theBottom,
const Elem_t theTop,
void Graphic3d_Camera::orthoProj (NCollection_Mat4<Elem_t>& theOutMx,
const Aspect_FrustumLRBT<Elem_t>& theLRBT,
const Elem_t theNear,
const Elem_t theFar,
NCollection_Mat4<Elem_t>& theOutMx)
const Elem_t theFar)
{
// row 0
theOutMx.ChangeValue (0, 0) = Elem_t (2.0) / (theRight - theLeft);
theOutMx.ChangeValue (0, 0) = Elem_t (2.0) / (theLRBT.Right - theLRBT.Left);
theOutMx.ChangeValue (0, 1) = Elem_t (0.0);
theOutMx.ChangeValue (0, 2) = Elem_t (0.0);
theOutMx.ChangeValue (0, 3) = - (theRight + theLeft) / (theRight - theLeft);
theOutMx.ChangeValue (0, 3) = - (theLRBT.Right + theLRBT.Left) / (theLRBT.Right - theLRBT.Left);
// row 1
theOutMx.ChangeValue (1, 0) = Elem_t (0.0);
theOutMx.ChangeValue (1, 1) = Elem_t (2.0) / (theTop - theBottom);
theOutMx.ChangeValue (1, 1) = Elem_t (2.0) / (theLRBT.Top - theLRBT.Bottom);
theOutMx.ChangeValue (1, 2) = Elem_t (0.0);
theOutMx.ChangeValue (1, 3) = - (theTop + theBottom) / (theTop - theBottom);
theOutMx.ChangeValue (1, 3) = - (theLRBT.Top + theLRBT.Bottom) / (theLRBT.Top - theLRBT.Bottom);
// row 2
theOutMx.ChangeValue (2, 0) = Elem_t (0.0);
@@ -1086,29 +1207,26 @@ void Graphic3d_Camera::OrthoProj (const Elem_t theLeft,
// purpose :
// =======================================================================
template <typename Elem_t>
void Graphic3d_Camera::PerspectiveProj (const Elem_t theLeft,
const Elem_t theRight,
const Elem_t theBottom,
const Elem_t theTop,
void Graphic3d_Camera::perspectiveProj (NCollection_Mat4<Elem_t>& theOutMx,
const Aspect_FrustumLRBT<Elem_t>& theLRBT,
const Elem_t theNear,
const Elem_t theFar,
NCollection_Mat4<Elem_t>& theOutMx)
const Elem_t theFar)
{
// column 0
theOutMx.ChangeValue (0, 0) = (Elem_t (2.0) * theNear) / (theRight - theLeft);
theOutMx.ChangeValue (0, 0) = (Elem_t (2.0) * theNear) / (theLRBT.Right - theLRBT.Left);
theOutMx.ChangeValue (1, 0) = Elem_t (0.0);
theOutMx.ChangeValue (2, 0) = Elem_t (0.0);
theOutMx.ChangeValue (3, 0) = Elem_t (0.0);
// column 1
theOutMx.ChangeValue (0, 1) = Elem_t (0.0);
theOutMx.ChangeValue (1, 1) = (Elem_t (2.0) * theNear) / (theTop - theBottom);
theOutMx.ChangeValue (1, 1) = (Elem_t (2.0) * theNear) / (theLRBT.Top - theLRBT.Bottom);
theOutMx.ChangeValue (2, 1) = Elem_t (0.0);
theOutMx.ChangeValue (3, 1) = Elem_t (0.0);
// column 2
theOutMx.ChangeValue (0, 2) = (theRight + theLeft) / (theRight - theLeft);
theOutMx.ChangeValue (1, 2) = (theTop + theBottom) / (theTop - theBottom);
theOutMx.ChangeValue (0, 2) = (theLRBT.Right + theLRBT.Left) / (theLRBT.Right - theLRBT.Left);
theOutMx.ChangeValue (1, 2) = (theLRBT.Top + theLRBT.Bottom) / (theLRBT.Top - theLRBT.Bottom);
theOutMx.ChangeValue (2, 2) = -(theFar + theNear) / (theFar - theNear);
theOutMx.ChangeValue (3, 2) = Elem_t (-1.0);
@@ -1124,25 +1242,22 @@ void Graphic3d_Camera::PerspectiveProj (const Elem_t theLeft,
// purpose :
// =======================================================================
template <typename Elem_t>
void Graphic3d_Camera::StereoEyeProj (const Elem_t theLeft,
const Elem_t theRight,
const Elem_t theBottom,
const Elem_t theTop,
void Graphic3d_Camera::stereoEyeProj (NCollection_Mat4<Elem_t>& theOutMx,
const Aspect_FrustumLRBT<Elem_t>& theLRBT,
const Elem_t theNear,
const Elem_t theFar,
const Elem_t theIOD,
const Elem_t theZFocus,
const Standard_Boolean theIsLeft,
NCollection_Mat4<Elem_t>& theOutMx)
const Aspect_Eye theEyeIndex)
{
Elem_t aDx = theIsLeft ? Elem_t (0.5) * theIOD : Elem_t (-0.5) * theIOD;
Elem_t aDx = theEyeIndex == Aspect_Eye_Left ? Elem_t (0.5) * theIOD : Elem_t (-0.5) * theIOD;
Elem_t aDXStereoShift = aDx * theNear / theZFocus;
// construct eye projection matrix
PerspectiveProj (theLeft + aDXStereoShift,
theRight + aDXStereoShift,
theBottom, theTop, theNear, theFar,
theOutMx);
Aspect_FrustumLRBT<Elem_t> aLRBT = theLRBT;
aLRBT.Left = theLRBT.Left + aDXStereoShift;
aLRBT.Right = theLRBT.Right + aDXStereoShift;
perspectiveProj (theOutMx, aLRBT, theNear, theFar);
if (theIOD != Elem_t (0.0))
{

View File

@@ -16,6 +16,8 @@
#ifndef _Graphic3d_Camera_HeaderFile
#define _Graphic3d_Camera_HeaderFile
#include <Aspect_Eye.hxx>
#include <Aspect_FrustumLRBT.hxx>
#include <Graphic3d_CameraTile.hxx>
#include <Graphic3d_Mat4d.hxx>
#include <Graphic3d_Mat4.hxx>
@@ -185,6 +187,12 @@ public:
//! Return a copy of orthogonalized up direction vector.
Standard_EXPORT gp_Dir OrthogonalizedUp() const;
//! Right side direction.
gp_Dir SideRight() const
{
return -(gp_Vec (Direction()) ^ gp_Vec (OrthogonalizedUp()));
}
//! Get camera Eye position.
//! @return camera eye location.
const gp_Pnt& Eye() const { return myEye; }
@@ -284,15 +292,26 @@ public:
}
//! Set Field Of View (FOV) in y axis for perspective projection.
//! Field of View in x axis is automatically scaled from view aspect ratio.
//! @param theFOVy [in] the FOV in degrees.
Standard_EXPORT void SetFOVy (const Standard_Real theFOVy);
//! Get Field Of View (FOV) in y axis.
//! @return the FOV value in degrees.
Standard_Real FOVy() const
{
return myFOVy;
}
Standard_Real FOVy() const { return myFOVy; }
//! Get Field Of View (FOV) in x axis.
//! @return the FOV value in degrees.
Standard_Real FOVx() const { return myFOVx; }
//! Get Field Of View (FOV) restriction for 2D on-screen elements; 180 degrees by default.
//! When 2D FOV is smaller than FOVy or FOVx, 2D elements defined within offset from view corner
//! will be extended to fit into specified 2D FOV.
//! This can be useful to make 2D elements sharply visible, like in case of HMD normally having extra large FOVy.
Standard_Real FOV2d() const { return myFOV2d; }
//! Set Field Of View (FOV) restriction for 2D on-screen elements.
Standard_EXPORT void SetFOV2d (Standard_Real theFOV);
//! Estimate Z-min and Z-max planes of projection volume to match the
//! displayed objects. The methods ensures that view volume will
@@ -427,6 +446,24 @@ public:
//! @return values in form of gp_Pnt (Width, Height, Depth).
Standard_EXPORT gp_XYZ ViewDimensions (const Standard_Real theZValue) const;
//! Return offset to the view corner in NDC space within dimension X for 2d on-screen elements, which is normally 0.5.
//! Can be clamped when FOVx exceeds FOV2d.
Standard_Real NDC2dOffsetX() const
{
return myFOV2d >= myFOVx
? 0.5
: 0.5 * myFOV2d / myFOVx;
}
//! Return offset to the view corner in NDC space within dimension X for 2d on-screen elements, which is normally 0.5.
//! Can be clamped when FOVy exceeds FOV2d.
Standard_Real NDC2dOffsetY() const
{
return myFOV2d >= myFOVy
? 0.5
: 0.5 * myFOV2d / myFOVy;
}
//! Calculate WCS frustum planes for the camera projection volume.
//! Frustum is a convex volume determined by six planes directing
//! inwards.
@@ -552,6 +589,32 @@ public:
//! The matrix will be updated on request.
Standard_EXPORT void InvalidateOrientation();
public:
//! Unset all custom frustums and projection matrices.
Standard_EXPORT void ResetCustomProjection();
//! Return TRUE if custom stereo frustums are set.
bool IsCustomStereoFrustum() const { return myIsCustomFrustomLR; }
//! Set custom stereo frustums.
//! These can be retrieved from APIs like OpenVR.
Standard_EXPORT void SetCustomStereoFrustums (const Aspect_FrustumLRBT<Standard_Real>& theFrustumL,
const Aspect_FrustumLRBT<Standard_Real>& theFrustumR);
//! Return TRUE if custom stereo projection matrices are set.
bool IsCustomStereoProjection() const { return myIsCustomProjMatLR; }
//! Set custom stereo projection matrices.
Standard_EXPORT void SetCustomStereoProjection (const Graphic3d_Mat4d& theProjL,
const Graphic3d_Mat4d& theProjR);
//! Return TRUE if custom projection matrix is set.
bool IsCustomMonoProjection() const { return myIsCustomProjMatM; }
//! Set custom projection matrix.
Standard_EXPORT void SetCustomMonoProjection (const Graphic3d_Mat4d& theProj);
//! Dumps the content of me into the stream
Standard_EXPORT void DumpJson (Standard_OStream& theOStream, Standard_Integer theDepth = -1) const;
@@ -572,68 +635,44 @@ private:
private:
//! Compose orthographic projection matrix for
//! the passed camera volume mapping.
//! @param theLeft [in] the left mapping (clipping) coordinate.
//! @param theRight [in] the right mapping (clipping) coordinate.
//! @param theBottom [in] the bottom mapping (clipping) coordinate.
//! @param theTop [in] the top mapping (clipping) coordinate.
//! @param theNear [in] the near mapping (clipping) coordinate.
//! @param theFar [in] the far mapping (clipping) coordinate.
//! @param theOutMx [out] the projection matrix.
//! Compose orthographic projection matrix for the passed camera volume mapping.
//! @param theOutMx [out] the projection matrix
//! @param theLRBT [in] the left/right/bottom/top mapping (clipping) coordinates
//! @param theNear [in] the near mapping (clipping) coordinate
//! @param theFar [in] the far mapping (clipping) coordinate
template <typename Elem_t>
static void
OrthoProj (const Elem_t theLeft,
const Elem_t theRight,
const Elem_t theBottom,
const Elem_t theTop,
const Elem_t theNear,
const Elem_t theFar,
NCollection_Mat4<Elem_t>& theOutMx);
static void orthoProj (NCollection_Mat4<Elem_t>& theOutMx,
const Aspect_FrustumLRBT<Elem_t>& theLRBT,
const Elem_t theNear,
const Elem_t theFar);
//! Compose perspective projection matrix for
//! the passed camera volume mapping.
//! @param theLeft [in] the left mapping (clipping) coordinate.
//! @param theRight [in] the right mapping (clipping) coordinate.
//! @param theBottom [in] the bottom mapping (clipping) coordinate.
//! @param theTop [in] the top mapping (clipping) coordinate.
//! @param theNear [in] the near mapping (clipping) coordinate.
//! @param theFar [in] the far mapping (clipping) coordinate.
//! @param theOutMx [out] the projection matrix.
//! Compose perspective projection matrix for the passed camera volume mapping.
//! @param theOutMx [out] the projection matrix
//! @param theLRBT [in] the left/right/bottom/top mapping (clipping) coordinates
//! @param theNear [in] the near mapping (clipping) coordinate
//! @param theFar [in] the far mapping (clipping) coordinate
template <typename Elem_t>
static void
PerspectiveProj (const Elem_t theLeft,
const Elem_t theRight,
const Elem_t theBottom,
const Elem_t theTop,
const Elem_t theNear,
const Elem_t theFar,
NCollection_Mat4<Elem_t>& theOutMx);
static void perspectiveProj (NCollection_Mat4<Elem_t>& theOutMx,
const Aspect_FrustumLRBT<Elem_t>& theLRBT,
const Elem_t theNear,
const Elem_t theFar);
//! Compose projection matrix for L/R stereo eyes.
//! @param theLeft [in] the left mapping (clipping) coordinate.
//! @param theRight [in] the right mapping (clipping) coordinate.
//! @param theBottom [in] the bottom mapping (clipping) coordinate.
//! @param theTop [in] the top mapping (clipping) coordinate.
//! @param theNear [in] the near mapping (clipping) coordinate.
//! @param theFar [in] the far mapping (clipping) coordinate.
//! @param theIOD [in] the Intraocular distance.
//! @param theZFocus [in] the z coordinate of off-axis
//! projection plane with zero parallax.
//! @param theIsLeft [in] boolean flag to choose between L/R eyes.
//! @param theOutMx [out] the projection matrix.
//! @param theOutMx [out] the projection matrix
//! @param theLRBT [in] the left/right/bottom/top mapping (clipping) coordinates
//! @param theNear [in] the near mapping (clipping) coordinate
//! @param theFar [in] the far mapping (clipping) coordinate
//! @param theIOD [in] the Intraocular distance
//! @param theZFocus [in] the z coordinate of off-axis projection plane with zero parallax
//! @param theEyeIndex [in] choose between L/R eyes
template <typename Elem_t>
static void
StereoEyeProj (const Elem_t theLeft,
const Elem_t theRight,
const Elem_t theBottom,
const Elem_t theTop,
const Elem_t theNear,
const Elem_t theFar,
const Elem_t theIOD,
const Elem_t theZFocus,
const Standard_Boolean theIsLeft,
NCollection_Mat4<Elem_t>& theOutMx);
static void stereoEyeProj (NCollection_Mat4<Elem_t>& theOutMx,
const Aspect_FrustumLRBT<Elem_t>& theLRBT,
const Elem_t theNear,
const Elem_t theFar,
const Elem_t theIOD,
const Elem_t theZFocus,
const Aspect_Eye theEyeIndex);
//! Construct "look at" orientation transformation.
//! Reference point differs for perspective and ortho modes
@@ -684,6 +723,8 @@ private:
Projection myProjType; //!< Projection type used for rendering.
Standard_Real myFOVy; //!< Field Of View in y axis.
Standard_Real myFOVx; //!< Field Of View in x axis.
Standard_Real myFOV2d; //!< Field Of View limit for 2d on-screen elements
Standard_Real myFOVyTan; //!< Field Of View as Tan(DTR_HALF * myFOVy)
Standard_Real myZNear; //!< Distance to near clipping plane.
Standard_Real myZFar; //!< Distance to far clipping plane.
@@ -698,6 +739,15 @@ private:
Graphic3d_CameraTile myTile;//!< Tile defining sub-area for drawing
Graphic3d_Mat4d myCustomProjMatM;
Graphic3d_Mat4d myCustomProjMatL;
Graphic3d_Mat4d myCustomProjMatR;
Aspect_FrustumLRBT<Standard_Real> myCustomFrustumL; //!< left custom frustum
Aspect_FrustumLRBT<Standard_Real> myCustomFrustumR; //!< right custom frustum
Standard_Boolean myIsCustomProjMatM; //!< flag indicating usage of custom projection matrix
Standard_Boolean myIsCustomProjMatLR; //!< flag indicating usage of custom stereo projection matrices
Standard_Boolean myIsCustomFrustomLR; //!< flag indicating usage of custom stereo frustums
mutable TransformMatrices<Standard_Real> myMatricesD;
mutable TransformMatrices<Standard_ShortReal> myMatricesF;

View File

@@ -134,8 +134,10 @@ public:
WhitePoint (1.f),
// stereoscopic parameters
StereoMode (Graphic3d_StereoMode_QuadBuffer),
HmdFov2d (30.0f),
AnaglyphFilter (Anaglyph_RedCyan_Optimized),
ToReverseStereo (Standard_False),
ToMirrorComposer (Standard_True),
//
StatsPosition (new Graphic3d_TransformPers (Graphic3d_TMF_2d, Aspect_TOTP_LEFT_UPPER, Graphic3d_Vec2i (20, 20))),
ChartPosition (new Graphic3d_TransformPers (Graphic3d_TMF_2d, Aspect_TOTP_RIGHT_UPPER, Graphic3d_Vec2i (20, 20))),
@@ -225,10 +227,12 @@ public:
Standard_ShortReal WhitePoint; //!< white point value used in filmic tone mapping (path tracing), 1.0 by default
Graphic3d_StereoMode StereoMode; //!< stereoscopic output mode, Graphic3d_StereoMode_QuadBuffer by default
Standard_ShortReal HmdFov2d; //!< sharp field of view range in degrees for displaying on-screen 2D elements, 30.0 by default;
Anaglyph AnaglyphFilter; //!< filter for anaglyph output, Anaglyph_RedCyan_Optimized by default
Graphic3d_Mat4 AnaglyphLeft; //!< left anaglyph filter (in normalized colorspace), Color = AnaglyphRight * theColorRight + AnaglyphLeft * theColorLeft;
Graphic3d_Mat4 AnaglyphRight; //!< right anaglyph filter (in normalized colorspace), Color = AnaglyphRight * theColorRight + AnaglyphLeft * theColorLeft;
Standard_Boolean ToReverseStereo; //!< flag to reverse stereo pair, FALSE by default
Standard_Boolean ToMirrorComposer; //!< if output device is an external composer - mirror rendering results in window in addition to sending frame to composer, TRUE by default
Handle(Graphic3d_TransformPers) StatsPosition; //!< location of stats, upper-left position by default
Handle(Graphic3d_TransformPers) ChartPosition; //!< location of stats chart, upper-right position by default

View File

@@ -27,6 +27,7 @@ enum Graphic3d_StereoMode
Graphic3d_StereoMode_SideBySide, //!< horizontal pair
Graphic3d_StereoMode_OverUnder, //!< vertical pair
Graphic3d_StereoMode_SoftPageFlip, //!< software PageFlip for shutter glasses, should NOT be used!
Graphic3d_StereoMode_OpenVR, //!< OpenVR (HMD)
Graphic3d_StereoMode_NB //!< the number of modes
};

View File

@@ -364,7 +364,7 @@ void Graphic3d_TransformPers::Apply (const Handle(Graphic3d_Camera)& theCamera,
{
const Standard_Real anOffsetX = (Standard_Real(myParams.Params2d.OffsetX) + aJitterComp) * aScale;
const gp_Dir aSide = aForward.Crossed (theCamera->Up());
const gp_XYZ aDeltaX = aSide.XYZ() * (Abs(aViewDim.X()) * 0.5 - anOffsetX);
const gp_XYZ aDeltaX = aSide.XYZ() * (Abs(aViewDim.X()) * theCamera->NDC2dOffsetX() - anOffsetX);
if ((myParams.Params2d.Corner & Aspect_TOTP_RIGHT) != 0)
{
aCenter += aDeltaX;
@@ -377,7 +377,7 @@ void Graphic3d_TransformPers::Apply (const Handle(Graphic3d_Camera)& theCamera,
if ((myParams.Params2d.Corner & (Aspect_TOTP_TOP | Aspect_TOTP_BOTTOM)) != 0)
{
const Standard_Real anOffsetY = (Standard_Real(myParams.Params2d.OffsetY) + aJitterComp) * aScale;
const gp_XYZ aDeltaY = theCamera->Up().XYZ() * (Abs(aViewDim.Y()) * 0.5 - anOffsetY);
const gp_XYZ aDeltaY = theCamera->Up().XYZ() * (Abs(aViewDim.Y()) * theCamera->NDC2dOffsetY() - anOffsetY);
if ((myParams.Params2d.Corner & Aspect_TOTP_TOP) != 0)
{
aCenter += aDeltaY;
@@ -408,7 +408,7 @@ void Graphic3d_TransformPers::Apply (const Handle(Graphic3d_Camera)& theCamera,
gp_XYZ aCenter (0.0, 0.0, -aFocus);
if ((myParams.Params2d.Corner & (Aspect_TOTP_LEFT | Aspect_TOTP_RIGHT)) != 0)
{
aCenter.SetX (-aViewDim.X() * 0.5 + (Standard_Real(myParams.Params2d.OffsetX) + aJitterComp) * aScale);
aCenter.SetX (-aViewDim.X() * theCamera->NDC2dOffsetX() + (Standard_Real(myParams.Params2d.OffsetX) + aJitterComp) * aScale);
if ((myParams.Params2d.Corner & Aspect_TOTP_RIGHT) != 0)
{
aCenter.SetX (-aCenter.X());
@@ -416,7 +416,7 @@ void Graphic3d_TransformPers::Apply (const Handle(Graphic3d_Camera)& theCamera,
}
if ((myParams.Params2d.Corner & (Aspect_TOTP_TOP | Aspect_TOTP_BOTTOM)) != 0)
{
aCenter.SetY (-aViewDim.Y() * 0.5 + (Standard_Real(myParams.Params2d.OffsetY) + aJitterComp) * aScale);
aCenter.SetY (-aViewDim.Y() * theCamera->NDC2dOffsetY() + (Standard_Real(myParams.Params2d.OffsetY) + aJitterComp) * aScale);
if ((myParams.Params2d.Corner & Aspect_TOTP_TOP) != 0)
{
aCenter.SetY (-aCenter.Y());