1
0
mirror of https://git.dev.opencascade.org/repos/occt.git synced 2025-04-04 18:06:22 +03:00

0030612: Visualization - provide texture map with video as image source

Introduced package Media wrapping FFmpeg structures.
Media_PlayerContext class manages video decoding into Media_IFrameQueue interface.
Graphic3d_MediaTextureSet implements Media_IFrameQueue interface and can be used as texture source.
AIS_MediaPlayer implements simple AIS object displaying video.
This commit is contained in:
kgv 2019-03-28 00:57:17 +03:00 committed by apn
parent 9491df8c1b
commit 98e6c6d17b
36 changed files with 4012 additions and 77 deletions

View File

@ -203,6 +203,7 @@ n Aspect
n DsgPrs
n Graphic3d
n Image
n Media
n MeshVS
n OpenGl
n D3DHost

View File

@ -65,6 +65,7 @@ if (WIN32)
set (CSF_shell32 "shell32.lib")
set (CSF_wsock32 "wsock32.lib")
set (CSF_psapi "psapi.lib")
set (CSF_winmm "winmm.lib")
set (CSF_d3d9 "D3D9.lib")
if ("${CMAKE_SYSTEM_NAME}" STREQUAL "WindowsStore" OR USE_GLES2)
set (CSF_OpenGlLibs "libEGL libGLESv2")

View File

@ -1359,6 +1359,7 @@ proc osutils:csfList { theOS theCsfLibsMap theCsfFrmsMap } {
set aLibsMap(CSF_opengl32) "opengl32"
set aLibsMap(CSF_wsock32) "wsock32"
set aLibsMap(CSF_netapi32) "netapi32"
set aLibsMap(CSF_winmm) "winmm"
set aLibsMap(CSF_OpenGlLibs) "opengl32"
if { "$::HAVE_GLES2" == "true" } {
set aLibsMap(CSF_OpenGlLibs) "libEGL libGLESv2"

View File

@ -168,7 +168,7 @@ void AIS_Animation::StartTimer (const Standard_Real theStartPts,
{
if (myTimer.IsNull())
{
myTimer = new AIS_AnimationTimer();
myTimer = new Media_Timer();
}
myTimer->Stop();
myTimer->Seek (theStartPts);

View File

@ -191,7 +191,7 @@ protected:
protected:
Handle(AIS_AnimationTimer) myTimer;
Handle(Media_Timer) myTimer;
TCollection_AsciiString myName; //!< animation name
NCollection_Sequence<Handle(AIS_Animation)>

View File

@ -15,60 +15,7 @@
#ifndef _AIS_AnimationTimer_HeaderFile
#define _AIS_AnimationTimer_HeaderFile
#include <OSD_Timer.hxx>
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
//! Auxiliary class defining the animation timer.
class AIS_AnimationTimer : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(AIS_AnimationTimer, Standard_Transient)
public:
//! Empty constructor.
AIS_AnimationTimer() : myTimerFrom (0.0), myTimerSpeed (1.0) {}
//! Return elapsed time in seconds.
Standard_Real ElapsedTime() const
{
return myTimerFrom + myTimer.ElapsedTime() * myTimerSpeed;
}
//! Return playback speed coefficient (1.0 means normal speed).
Standard_Real PlaybackSpeed() const { return myTimerSpeed; }
//! Setup playback speed coefficient.
Standard_EXPORT void SetPlaybackSpeed (const Standard_Real theSpeed);
//! Return true if timer has been started.
Standard_Boolean IsStarted() const
{
return myTimer.IsStarted();
}
//! Start the timer.
void Start()
{
myTimer.Start();
}
//! Pause the timer.
Standard_EXPORT void Pause();
//! Stop the timer.
Standard_EXPORT void Stop();
//! Seek the timer to specified position.
Standard_EXPORT void Seek (const Standard_Real theTime);
protected:
OSD_Timer myTimer;
Standard_Real myTimerFrom;
Standard_Real myTimerSpeed;
};
DEFINE_STANDARD_HANDLE(AIS_AnimationTimer, Standard_Transient)
#include <Media_Timer.hxx>
typedef Media_Timer AIS_AnimationTimer;
#endif // _AIS_AnimationTimer_HeaderFile

250
src/AIS/AIS_MediaPlayer.cxx Normal file
View File

@ -0,0 +1,250 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#include <AIS_MediaPlayer.hxx>
#include <AIS_InteractiveContext.hxx>
#include <Media_PlayerContext.hxx>
#include <Message.hxx>
#include <Message_Messenger.hxx>
#include <Graphic3d_ArrayOfTriangles.hxx>
#include <Graphic3d_MediaTexture.hxx>
#include <SelectMgr_EntityOwner.hxx>
#include <Select3D_SensitivePrimitiveArray.hxx>
#include <V3d_Viewer.hxx>
IMPLEMENT_STANDARD_RTTIEXT(AIS_MediaPlayer, AIS_InteractiveObject)
//! Create an array of triangles defining a rectangle.
static Handle(Graphic3d_ArrayOfTriangles) createRectangleArray (const Graphic3d_Vec2i& theLower,
const Graphic3d_Vec2i& theUpper,
Graphic3d_ArrayFlags theFlags)
{
Handle(Graphic3d_ArrayOfTriangles) aRectTris = new Graphic3d_ArrayOfTriangles (4, 6, theFlags);
aRectTris->AddVertex (gp_Pnt (theLower.x(), theLower.y(), 0.0), gp_Pnt2d (0.0, 1.0));
aRectTris->AddVertex (gp_Pnt (theLower.x(), theUpper.y(), 0.0), gp_Pnt2d (0.0, 0.0));
aRectTris->AddVertex (gp_Pnt (theUpper.x(), theUpper.y(), 0.0), gp_Pnt2d (1.0, 0.0));
aRectTris->AddVertex (gp_Pnt (theUpper.x(), theLower.y(), 0.0), gp_Pnt2d (1.0, 1.0));
aRectTris->AddEdges (1, 2, 3);
aRectTris->AddEdges (1, 3, 4);
return aRectTris;
}
//================================================================
// Function : AIS_MediaPlayer
// Purpose :
//================================================================
AIS_MediaPlayer::AIS_MediaPlayer()
: myFramePair (new Graphic3d_MediaTextureSet()),
myFrameSize (1, 1),
myToClosePlayer (false)
{
SetTransformPersistence (new Graphic3d_TransformPers (Graphic3d_TMF_2d, Aspect_TOTP_LEFT_LOWER));
SetZLayer (Graphic3d_ZLayerId_TopOSD);
SetInfiniteState (true);
Graphic3d_MaterialAspect aMat;
myFrameAspect = new Graphic3d_AspectFillArea3d (Aspect_IS_SOLID, Quantity_NOC_WHITE, Quantity_NOC_BLACK, Aspect_TOL_SOLID, 1.0f, aMat, aMat);
myFrameAspect->SetShadingModel (Graphic3d_TOSM_UNLIT);
myFrameAspect->SetTextureMapOn (true);
myFrameAspect->SetTextureSet (myFramePair);
}
//================================================================
// Function : ~AIS_MediaPlayer
// Purpose :
//================================================================
AIS_MediaPlayer::~AIS_MediaPlayer()
{
// stop threads
myFramePair.Nullify();
}
// =======================================================================
// function : OpenInput
// purpose :
// =======================================================================
void AIS_MediaPlayer::OpenInput (const TCollection_AsciiString& thePath,
Standard_Boolean theToWait)
{
if (myFramePair->PlayerContext().IsNull()
&& thePath.IsEmpty())
{
return;
}
myFramePair->OpenInput (thePath, theToWait);
SynchronizeAspects();
}
// =======================================================================
// function : PresentFrame
// purpose :
// =======================================================================
bool AIS_MediaPlayer::PresentFrame (const Graphic3d_Vec2i& theLeftCorner,
const Graphic3d_Vec2i& theMaxSize)
{
if (myToClosePlayer)
{
myToClosePlayer = false;
if (!HasInteractiveContext())
{
return false;
}
if (!myFramePair->PlayerContext().IsNull())
{
myFramePair->PlayerContext()->Pause();
}
Handle(AIS_InteractiveContext) aCtx = GetContext();
Handle(AIS_InteractiveObject) aThis = this;
aCtx->Remove (aThis, false);
aCtx->CurrentViewer()->Invalidate();
return true;
}
if (myFramePair->PlayerContext().IsNull())
{
return false;
}
bool toRedraw = myFramePair->SwapFrames();
toRedraw = updateSize (theLeftCorner, theMaxSize) || toRedraw;
if (toRedraw)
{
myFrameAspect->SetShaderProgram (myFramePair->ShaderProgram());
SynchronizeAspects();
}
return toRedraw;
}
// =======================================================================
// function : updateSize
// purpose :
// =======================================================================
bool AIS_MediaPlayer::updateSize (const Graphic3d_Vec2i& theLeftCorner,
const Graphic3d_Vec2i& theMaxSize)
{
const Graphic3d_Vec2i aFrameSize = myFramePair->FrameSize();
Graphic3d_Vec2i aNewPos = theLeftCorner;
Graphic3d_Vec2i aNewSize = myFrameSize;
if (aFrameSize.x() > 0
&& aFrameSize.y() > 0)
{
const double anAspect = double(theMaxSize.x()) / double(theMaxSize.y());
const double aFitAspect = double(aFrameSize.x()) / double(aFrameSize.y());
aNewSize = aFrameSize;
if (aFitAspect >= anAspect)
{
aNewSize.y() = int(double(aFrameSize.x()) / aFitAspect);
}
else
{
aNewSize.x() = int(double(aFrameSize.y()) * aFitAspect);
}
for (int aCoord = 0; aCoord < 2; ++aCoord)
{
if (aNewSize[aCoord] > theMaxSize[aCoord])
{
const double aScale = double(theMaxSize[aCoord]) / double(aNewSize[aCoord]);
aNewSize.x() = int(double(aNewSize.x()) * aScale);
aNewSize.y() = int(double(aNewSize.y()) * aScale);
}
}
aNewPos = theLeftCorner + theMaxSize / 2 - aNewSize / 2;
}
else if (myFrameSize.x() < 2
|| myFrameSize.y() < 2)
{
aNewSize = theMaxSize;
}
if (myFrameSize == aNewSize
&& myFrameBottomLeft == aNewPos)
{
return false;
}
myFrameSize = aNewSize;
myFrameBottomLeft = aNewPos;
if (HasInteractiveContext())
{
SetToUpdate();
GetContext()->Redisplay (this, false);
GetContext()->CurrentViewer()->Invalidate();
}
return true;
}
// =======================================================================
// function : PlayPause
// purpose :
// =======================================================================
void AIS_MediaPlayer::PlayPause()
{
if (myFramePair->PlayerContext().IsNull())
{
return;
}
Standard_Real aProgress = 0.0, aDuration = 0.0;
bool isPaused = false;
myFramePair->PlayerContext()->PlayPause (isPaused, aProgress, aDuration);
}
// =======================================================================
// function : Compute
// purpose :
// =======================================================================
void AIS_MediaPlayer::Compute (const Handle(PrsMgr_PresentationManager3d)& ,
const Handle(Prs3d_Presentation)& thePrs,
const Standard_Integer theMode)
{
thePrs->SetInfiniteState (IsInfinite());
if (theMode != 0)
{
return;
}
// main frame
{
Handle(Graphic3d_ArrayOfTriangles) aTris = createRectangleArray (myFrameBottomLeft, myFrameBottomLeft + myFrameSize, Graphic3d_ArrayFlags_VertexTexel);
Handle(Graphic3d_Group) aMainGroup = thePrs->NewGroup();
aMainGroup->SetGroupPrimitivesAspect (myFrameAspect);
aMainGroup->AddPrimitiveArray (aTris);
}
}
// =======================================================================
// function : ComputeSelection
// purpose :
// =======================================================================
void AIS_MediaPlayer::ComputeSelection (const Handle(SelectMgr_Selection)& theSel,
const Standard_Integer theMode)
{
if (theMode != 0)
{
return;
}
Handle(Graphic3d_ArrayOfTriangles) aTris = createRectangleArray (myFrameBottomLeft, myFrameBottomLeft + myFrameSize, Graphic3d_ArrayFlags_None);
Handle(SelectMgr_EntityOwner) anOwner = new SelectMgr_EntityOwner (this, 5);
Handle(Select3D_SensitivePrimitiveArray) aSens = new Select3D_SensitivePrimitiveArray (anOwner);
aSens->InitTriangulation (aTris->Attributes(), aTris->Indices(), TopLoc_Location());
theSel->Add (aSens);
}

View File

@ -0,0 +1,96 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _AIS_MediaPlayer_HeaderFile
#define _AIS_MediaPlayer_HeaderFile
#include <AIS_InteractiveObject.hxx>
#include <Graphic3d_MediaTextureSet.hxx>
class Media_PlayerContext;
//! Presentation for video playback.
class AIS_MediaPlayer : public AIS_InteractiveObject
{
DEFINE_STANDARD_RTTIEXT(AIS_MediaPlayer, AIS_InteractiveObject)
public:
//! Empty constructor.
Standard_EXPORT AIS_MediaPlayer();
//! Destructor.
Standard_EXPORT virtual ~AIS_MediaPlayer();
//! Setup callback to be called on queue progress (e.g. when new frame should be displayed).
void SetCallback (Graphic3d_MediaTextureSet::CallbackOnUpdate_t theCallbackFunction, void* theCallbackUserPtr)
{
myFramePair->SetCallback (theCallbackFunction, theCallbackUserPtr);
}
//! Open specified file.
Standard_EXPORT void OpenInput (const TCollection_AsciiString& thePath,
Standard_Boolean theToWait);
//! Display new frame.
Standard_EXPORT bool PresentFrame (const Graphic3d_Vec2i& theLeftCorner,
const Graphic3d_Vec2i& theMaxSize);
//! Return player context.
const Handle(Media_PlayerContext)& PlayerContext() const { return myFramePair->PlayerContext(); }
//! Switch playback state.
Standard_EXPORT void PlayPause();
//! Schedule player to be closed.
void SetClosePlayer()
{
myToClosePlayer = true;
myFramePair->Notify();
}
//! Return duration.
double Duration() const { return myFramePair->Duration(); }
//! @name AIS_InteractiveObject interface
protected:
//! Accept only display mode 0.
virtual Standard_Boolean AcceptDisplayMode (const Standard_Integer theMode) const Standard_OVERRIDE { return theMode == 0; }
//! Compute presentation.
Standard_EXPORT virtual void Compute (const Handle(PrsMgr_PresentationManager3d)& thePrsMgr,
const Handle(Prs3d_Presentation)& thePrs,
const Standard_Integer theMode) Standard_OVERRIDE;
//! Compute selection
Standard_EXPORT virtual void ComputeSelection (const Handle(SelectMgr_Selection)& theSel,
const Standard_Integer theMode) Standard_OVERRIDE;
protected:
//! Update frame size.
Standard_EXPORT bool updateSize (const Graphic3d_Vec2i& theLeftCorner,
const Graphic3d_Vec2i& theMaxSize);
protected:
Handle(Graphic3d_MediaTextureSet) myFramePair;
Handle(Graphic3d_AspectFillArea3d) myFrameAspect;
Graphic3d_Vec2i myFrameBottomLeft;
Graphic3d_Vec2i myFrameSize;
bool myToClosePlayer;
};
#endif // _AIS_MediaPlayer_HeaderFile

View File

@ -2,7 +2,6 @@ AIS.cxx
AIS.hxx
AIS_Animation.cxx
AIS_Animation.hxx
AIS_AnimationTimer.cxx
AIS_AnimationTimer.hxx
AIS_AnimationCamera.cxx
AIS_AnimationCamera.hxx
@ -105,6 +104,8 @@ AIS_MapIteratorOfMapOfInteractive.hxx
AIS_MapOfInteractive.hxx
AIS_MaxRadiusDimension.cxx
AIS_MaxRadiusDimension.hxx
AIS_MediaPlayer.cxx
AIS_MediaPlayer.hxx
AIS_MidPointRelation.cxx
AIS_MidPointRelation.hxx
AIS_MidPointRelation.lxx

View File

@ -92,6 +92,10 @@ Graphic3d_Mat4d.hxx
Graphic3d_MaterialAspect.cxx
Graphic3d_MaterialAspect.hxx
Graphic3d_MaterialDefinitionError.hxx
Graphic3d_MediaTexture.cxx
Graphic3d_MediaTexture.hxx
Graphic3d_MediaTextureSet.cxx
Graphic3d_MediaTextureSet.hxx
Graphic3d_NameOfMaterial.hxx
Graphic3d_NameOfTexture1D.hxx
Graphic3d_NameOfTexture2D.hxx

View File

@ -0,0 +1,102 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Graphic3d_MediaTexture.hxx>
#include <Graphic3d_TextureParams.hxx>
#include <Media_Frame.hxx>
#include <Message.hxx>
#include <Message_Messenger.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Graphic3d_MediaTexture, Graphic3d_Texture2D)
// ================================================================
// Function : Graphic3d_MediaTexture
// Purpose :
// ================================================================
Graphic3d_MediaTexture::Graphic3d_MediaTexture (const Handle(Media_HMutex)& theMutex,
Standard_Integer thePlane)
: Graphic3d_Texture2D ("", Graphic3d_TOT_2D),
myMutex (theMutex),
myPlane (thePlane)
{
myParams->SetModulate(false);
myParams->SetRepeat (false);
myParams->SetFilter (Graphic3d_TOTF_BILINEAR);
myParams->SetTextureUnit (Graphic3d_TextureUnit(int(Graphic3d_TextureUnit_0) + thePlane));
}
// ================================================================
// Function : GetImage
// Purpose :
// ================================================================
Handle(Image_PixMap) Graphic3d_MediaTexture::GetImage() const
{
Standard_Mutex::Sentry aLock (myMutex.get());
if (myFrame.IsNull()
|| myFrame->IsLocked()
|| myFrame->IsEmpty()
|| myFrame->SizeX() < 1
|| myFrame->SizeY() < 1)
{
return Handle(Image_PixMap)();
}
if (myPixMapWrapper.IsNull())
{
myPixMapWrapper = new Image_PixMap();
}
#ifdef HAVE_FFMPEG
const AVFrame* aFrame = myFrame->Frame();
const Image_Format anOcctFmt = Media_Frame::FormatFFmpeg2Occt (myFrame->Format());
if (anOcctFmt != Image_Format_UNKNOWN)
{
if (myPlane != 0
|| !myPixMapWrapper->InitWrapper (anOcctFmt, aFrame->data[0], aFrame->width, aFrame->height, aFrame->linesize[0]))
{
return Handle(Image_PixMap)();
}
return myPixMapWrapper;
}
else if (myFrame->Format() == AV_PIX_FMT_YUV420P
|| myFrame->Format() == AV_PIX_FMT_YUVJ420P)
{
const Graphic3d_Vec2i aSize = myPlane == 0 ? myFrame->Size() : myFrame->Size() / 2;
if (myPlane > 3
|| !myPixMapWrapper->InitWrapper (Image_Format_Gray, aFrame->data[myPlane], aSize.x(), aSize.y(), aFrame->linesize[myPlane]))
{
return Handle(Image_PixMap)();
}
return myPixMapWrapper;
}
#endif
return Handle(Image_PixMap)();
}

View File

@ -0,0 +1,57 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Graphic3d_MediaTexture_HeaderFile
#define _Graphic3d_MediaTexture_HeaderFile
#include <Graphic3d_Texture2D.hxx>
#include <NCollection_DefineAlloc.hxx>
#include <NCollection_Shared.hxx>
#include <Standard_Mutex.hxx>
class Media_Frame;
typedef NCollection_Shared<Standard_Mutex> Media_HMutex;
//! Texture adapter for Media_Frame.
class Graphic3d_MediaTexture : public Graphic3d_Texture2D
{
DEFINE_STANDARD_RTTIEXT(Graphic3d_MediaTexture, Graphic3d_Texture2D)
public:
//! Main constructor.
Standard_EXPORT Graphic3d_MediaTexture (const Handle(Media_HMutex)& theMutex,
Standard_Integer thePlane = -1);
//! Image reader.
Standard_EXPORT virtual Handle(Image_PixMap) GetImage() const Standard_OVERRIDE;
//! Return the frame.
const Handle(Media_Frame)& Frame() const { return myFrame; }
//! Set the frame.
void SetFrame (const Handle(Media_Frame)& theFrame) { myFrame = theFrame; }
//! Regenerate a new texture id
void GenerateNewId() { generateId(); }
protected:
mutable Handle(Media_HMutex) myMutex;
Handle(Media_Frame) myFrame;
Standard_Integer myPlane;
mutable Handle(Image_PixMap) myPixMapWrapper;
};
#endif // _Graphic3d_MediaTexture_HeaderFile

View File

@ -0,0 +1,260 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Graphic3d_MediaTextureSet.hxx>
#include <Media_Frame.hxx>
#include <Media_PlayerContext.hxx>
#include <Graphic3d_ShaderProgram.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Graphic3d_MediaTextureSet, Graphic3d_TextureSet)
// ================================================================
// Function : Graphic3d_MediaTexture
// Purpose :
// ================================================================
Graphic3d_MediaTextureSet::Graphic3d_MediaTextureSet()
: Graphic3d_TextureSet (4),
myMutex (new Media_HMutex()),
myCallbackFunction(NULL),
myCallbackUserPtr (NULL),
myProgress (0.0),
myDuration (0.0),
myFront (0),
myToPresentFrame (Standard_False),
myIsPlanarYUV (Standard_False),
myIsFullRangeYUV (Standard_True)
{
myFramePair[0] = new Media_Frame();
myFramePair[1] = new Media_Frame();
for (Standard_Integer aPlaneIter = 0; aPlaneIter < Size(); ++aPlaneIter)
{
Handle(Graphic3d_MediaTexture) aTexture = new Graphic3d_MediaTexture (myMutex, aPlaneIter);
SetValue (Lower() + aPlaneIter, aTexture);
}
#define EOL "\n"
TCollection_AsciiString aSrcVert =
EOL"out vec2 TexCoord;"
EOL"void main()"
EOL"{"
EOL" TexCoord = occTexCoord.st;"
EOL " gl_Position = occProjectionMatrix * occWorldViewMatrix * occModelWorldMatrix * occVertex;"
EOL"}";
TCollection_AsciiString F_SHADER_YUV2RGB_MPEG =
EOL"const float TheRangeBits = 1.0;"
EOL"vec3 convertToRGB (in vec3 theYUV)"
EOL"{"
EOL" vec3 aYUV = theYUV.rgb;"
EOL" aYUV *= TheRangeBits;"
EOL" aYUV.x = 1.1643 * (aYUV.x - 0.0625);"
EOL" aYUV.y -= 0.5;"
EOL" aYUV.z -= 0.5;"
EOL" vec3 aColor = vec3(0.0);"
EOL" aColor.r = aYUV.x + 1.5958 * aYUV.z;"
EOL" aColor.g = aYUV.x - 0.39173 * aYUV.y - 0.81290 * aYUV.z;"
EOL" aColor.b = aYUV.x + 2.017 * aYUV.y;"
EOL" return aColor;"
EOL"}";
TCollection_AsciiString F_SHADER_YUV2RGB_FULL =
EOL"const float TheRangeBits = 1.0;"
EOL"vec3 convertToRGB (in vec3 theYUV)"
EOL"{"
EOL" vec3 aYUV = theYUV.rgb;"
EOL" aYUV *= TheRangeBits;"
EOL" aYUV.x = aYUV.x;"
EOL" aYUV.y -= 0.5;"
EOL" aYUV.z -= 0.5;"
EOL" vec3 aColor = vec3(0.0);"
EOL" aColor.r = aYUV.x + 1.402 * aYUV.z;"
EOL" aColor.g = aYUV.x - 0.344 * aYUV.y - 0.714 * aYUV.z;"
EOL" aColor.b = aYUV.x + 1.772 * aYUV.y;"
EOL" return aColor;"
EOL"}";
TCollection_AsciiString aSrcFrag =
EOL"in vec2 TexCoord;"
EOL"uniform sampler2D occSampler1;"
EOL"uniform sampler2D occSampler2;"
EOL"vec3 convertToRGB (in vec3 theYUV);"
EOL"void main()"
EOL"{"
EOL" vec3 aYUV = vec3 (occTexture2D (occSampler0, TexCoord.st).r,"
EOL" occTexture2D (occSampler1, TexCoord.st).r,"
EOL" occTexture2D (occSampler2, TexCoord.st).r);"
EOL " occSetFragColor (vec4 (convertToRGB (aYUV), 1.0));"
EOL"}";
myShaderYUV = new Graphic3d_ShaderProgram();
myShaderYUV->SetHeader ("#version 150");
myShaderYUV->SetNbLightsMax (0);
myShaderYUV->SetNbClipPlanesMax (0);
myShaderYUV->AttachShader (Graphic3d_ShaderObject::CreateFromSource (Graphic3d_TOS_VERTEX, aSrcVert));
myShaderYUV->AttachShader (Graphic3d_ShaderObject::CreateFromSource (Graphic3d_TOS_FRAGMENT, aSrcFrag + F_SHADER_YUV2RGB_MPEG));
myShaderYUVJ = new Graphic3d_ShaderProgram();
myShaderYUVJ->SetHeader ("#version 150");
myShaderYUVJ->SetNbLightsMax (0);
myShaderYUVJ->SetNbClipPlanesMax (0);
myShaderYUVJ->AttachShader (Graphic3d_ShaderObject::CreateFromSource (Graphic3d_TOS_VERTEX, aSrcVert));
myShaderYUVJ->AttachShader (Graphic3d_ShaderObject::CreateFromSource (Graphic3d_TOS_FRAGMENT, aSrcFrag + F_SHADER_YUV2RGB_FULL));
}
// =======================================================================
// function : SetCallback
// purpose :
// =======================================================================
void Graphic3d_MediaTextureSet::SetCallback (CallbackOnUpdate_t theCallbackFunction,
void* theCallbackUserPtr)
{
myCallbackFunction = theCallbackFunction;
myCallbackUserPtr = theCallbackUserPtr;
}
// =======================================================================
// function : Notify
// purpose :
// =======================================================================
void Graphic3d_MediaTextureSet::Notify()
{
if (myCallbackFunction != NULL)
{
myCallbackFunction (myCallbackUserPtr);
}
}
// =======================================================================
// function : OpenInput
// purpose :
// =======================================================================
void Graphic3d_MediaTextureSet::OpenInput (const TCollection_AsciiString& thePath,
Standard_Boolean theToWait)
{
if (myPlayerCtx.IsNull())
{
if (thePath.IsEmpty())
{
myInput.Clear();
return;
}
myPlayerCtx = new Media_PlayerContext (this);
}
myProgress = 0.0;
myDuration = 0.0;
myPlayerCtx->SetInput (thePath, theToWait);
myInput = thePath;
}
// =======================================================================
// function : LockFrame
// purpose :
// =======================================================================
Handle(Media_Frame) Graphic3d_MediaTextureSet::LockFrame()
{
{
Standard_Mutex::Sentry aLock (myMutex.get());
if (!myToPresentFrame)
{
Handle(Media_Frame) aFrame = myFramePair[myFront == 0 ? 1 : 0];
if (aFrame->IsLocked())
{
return Handle(Media_Frame)();
}
aFrame->SetLocked (true);
return aFrame;
}
}
Notify();
return Handle(Media_Frame)();
}
// =======================================================================
// function : ReleaseFrame
// purpose :
// =======================================================================
void Graphic3d_MediaTextureSet::ReleaseFrame (const Handle(Media_Frame)& theFrame)
{
{
Standard_Mutex::Sentry aLock (myMutex.get());
theFrame->SetLocked (false);
myToPresentFrame = true;
}
if (myCallbackFunction != NULL)
{
myCallbackFunction (myCallbackUserPtr);
}
}
// ================================================================
// Function : SwapFrames
// Purpose :
// ================================================================
Standard_Boolean Graphic3d_MediaTextureSet::SwapFrames()
{
if (myPlayerCtx.IsNull())
{
return Standard_False;
}
Standard_Boolean isPaused = Standard_False;
myPlayerCtx->PlaybackState (isPaused, myProgress, myDuration);
Standard_Mutex::Sentry aLock (myMutex.get());
if (!myToPresentFrame)
{
return Standard_False;
}
myToPresentFrame = false;
myFront = myFront == 0 ? 1 : 0;
const Handle(Media_Frame)& aFront = myFramePair[myFront];
myFrameSize = aFront->Size();
#ifdef HAVE_FFMPEG
myIsPlanarYUV = aFront->Format() == AV_PIX_FMT_YUV420P
|| aFront->Format() == AV_PIX_FMT_YUVJ420P;
#endif
myIsFullRangeYUV = aFront->IsFullRangeYUV();
for (int aPlaneIter = Lower(); aPlaneIter <= Upper(); ++aPlaneIter)
{
if (Graphic3d_MediaTexture* aTexture = dynamic_cast<Graphic3d_MediaTexture*> (Value (aPlaneIter).get()))
{
aTexture->SetFrame (aFront);
aTexture->UpdateRevision();
}
}
return Standard_True;
}

View File

@ -0,0 +1,113 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Graphic3d_MediaTextureSet_HeaderFile
#define _Graphic3d_MediaTextureSet_HeaderFile
#include <Media_IFrameQueue.hxx>
#include <Graphic3d_MediaTexture.hxx>
#include <Graphic3d_TextureSet.hxx>
class Graphic3d_ShaderProgram;
class Media_PlayerContext;
//! Texture adapter for Media_Frame.
class Graphic3d_MediaTextureSet : public Graphic3d_TextureSet, public Media_IFrameQueue
{
DEFINE_STANDARD_RTTIEXT(Graphic3d_MediaTextureSet, Graphic3d_TextureSet)
public:
//! Callback definition.
typedef void (*CallbackOnUpdate_t)(void* theUserPtr);
public:
//! Empty constructor.
Standard_EXPORT Graphic3d_MediaTextureSet();
//! Setup callback to be called on queue progress (e.g. when new frame should be displayed).
Standard_EXPORT void SetCallback (CallbackOnUpdate_t theCallbackFunction, void* theCallbackUserPtr);
//! Call callback.
Standard_EXPORT void Notify();
//! Return input media.
const TCollection_AsciiString& Input() const { return myInput; }
//! Open specified file.
//! Passing an empty path would close current input.
Standard_EXPORT void OpenInput (const TCollection_AsciiString& thePath,
Standard_Boolean theToWait);
//! Return player context; it can be NULL until first OpenInput().
const Handle(Media_PlayerContext)& PlayerContext() const { return myPlayerCtx; }
//! Swap front/back frames.
Standard_EXPORT Standard_Boolean SwapFrames();
//! Return front frame dimensions.
Graphic3d_Vec2i FrameSize() const { return myFrameSize; }
//! Return shader program for displaying texture set.
Handle(Graphic3d_ShaderProgram) ShaderProgram() const
{
if (myIsPlanarYUV)
{
return myIsFullRangeYUV ? myShaderYUVJ : myShaderYUV;
}
return Handle(Graphic3d_ShaderProgram)();
}
//! Return TRUE if texture set defined 3 YUV planes.
Standard_Boolean IsPlanarYUV() const { return myIsPlanarYUV; }
//! Return TRUE if YUV range is full.
Standard_Boolean IsFullRangeYUV() const { return myIsFullRangeYUV; }
//! Return duration in seconds.
double Duration() const { return myDuration; }
//! Return playback progress in seconds.
double Progress() const { return myProgress; }
//! @name Media_IFrameQueue interface
private:
//! Lock the frame for decoding into.
virtual Handle(Media_Frame) LockFrame() Standard_OVERRIDE;
//! Release the frame to present decoding results.
virtual void ReleaseFrame (const Handle(Media_Frame)& theFrame) Standard_OVERRIDE;
protected:
Handle(Media_PlayerContext) myPlayerCtx; //!< player context
Handle(Media_Frame) myFramePair[2]; //!< front/back frames pair
Handle(Graphic3d_ShaderProgram) myShaderYUV; //!< shader program for YUV texture set
Handle(Graphic3d_ShaderProgram) myShaderYUVJ; //!< shader program for YUVJ texture set
Handle(Media_HMutex) myMutex; //!< mutex for accessing frames
TCollection_AsciiString myInput; //!< input media
CallbackOnUpdate_t myCallbackFunction; //!< callback function
void* myCallbackUserPtr; //!< callback data
Graphic3d_Vec2i myFrameSize; //!< front frame size
Standard_Real myProgress; //!< playback progress in seconds
Standard_Real myDuration; //!< stream duration
Standard_Integer myFront; //!< index of front texture
Standard_Boolean myToPresentFrame; //!< flag
Standard_Boolean myIsPlanarYUV; //!< front frame contains planar YUV data or native texture format
Standard_Boolean myIsFullRangeYUV; //!< front frame defines full-range or reduced-range YUV
};
#endif // _Graphic3d_MediaTextureSet_HeaderFile

View File

@ -283,7 +283,7 @@ Standard_Boolean Image_VideoRecorder::addVideoStream (const Image_VideoParams& t
// some formats want stream headers to be separate
if (myAVContext->oformat->flags & AVFMT_GLOBALHEADER)
{
aCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
aCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
return Standard_True;
#else
@ -456,18 +456,6 @@ Standard_Boolean Image_VideoRecorder::writeVideoFrame (const Standard_Boolean th
AVPacket aPacket;
memset (&aPacket, 0, sizeof(aPacket));
av_init_packet (&aPacket);
if ((myAVContext->oformat->flags & AVFMT_RAWPICTURE) != 0
&& !theToFlush)
{
// raw video case - directly store the picture in the packet
aPacket.flags |= AV_PKT_FLAG_KEY;
aPacket.stream_index = myVideoStream->index;
aPacket.data = myFrame->data[0];
aPacket.size = sizeof(AVPicture);
aResAv = av_interleaved_write_frame (myAVContext, &aPacket);
}
else
{
// encode the image
myFrame->pts = myFrameCount;

17
src/Media/FILES Normal file
View File

@ -0,0 +1,17 @@
Media_BufferPool.cxx
Media_BufferPool.hxx
Media_CodecContext.cxx
Media_CodecContext.hxx
Media_FormatContext.cxx
Media_FormatContext.hxx
Media_Frame.cxx
Media_Frame.hxx
Media_Packet.cxx
Media_Packet.hxx
Media_PlayerContext.cxx
Media_PlayerContext.hxx
Media_Scaler.cxx
Media_Scaler.hxx
Media_Timer.cxx
Media_Timer.hxx
Media_IFrameQueue.hxx

View File

@ -0,0 +1,107 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Media_BufferPool.hxx>
#include <Media_Frame.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_BufferPool, Standard_Transient)
// =======================================================================
// function : Media_BufferPool
// purpose :
// =======================================================================
Media_BufferPool::Media_BufferPool()
: myPool (NULL),
myBufferSize (0)
{
//
}
// =======================================================================
// function : ~Media_BufferPool
// purpose :
// =======================================================================
Media_BufferPool::~Media_BufferPool()
{
Release();
}
// =======================================================================
// function : Release
// purpose :
// =======================================================================
void Media_BufferPool::Release()
{
if (myPool != NULL)
{
#ifdef HAVE_FFMPEG
av_buffer_pool_uninit (&myPool);
#endif
myPool = NULL;
myBufferSize = 0;
}
}
// =======================================================================
// function : Init
// purpose :
// =======================================================================
bool Media_BufferPool::Init (int theBufferSize)
{
if (myBufferSize == theBufferSize)
{
return true;
}
Release();
if (theBufferSize == 0)
{
return true;
}
#ifdef HAVE_FFMPEG
myPool = av_buffer_pool_init (theBufferSize, NULL);
#endif
myBufferSize = theBufferSize;
return myPool != NULL;
}
// =======================================================================
// function : GetBuffer
// purpose :
// =======================================================================
AVBufferRef* Media_BufferPool::GetBuffer()
{
#ifdef HAVE_FFMPEG
return av_buffer_pool_get (myPool);
#else
return NULL;
#endif
}

View File

@ -0,0 +1,61 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_BufferPool_HeaderFile
#define _Media_BufferPool_HeaderFile
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
struct AVBufferPool;
struct AVBufferRef;
//! AVBufferPool wrapper.
class Media_BufferPool : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_BufferPool, Standard_Transient)
public:
//! Empty constructor
Standard_EXPORT Media_BufferPool();
//! Destructor
Standard_EXPORT ~Media_BufferPool();
//! Release the pool (reference-counted buffer will be released when needed).
Standard_EXPORT void Release();
//! (Re-)initialize the pool.
Standard_EXPORT bool Init (int theBufferSize);
//! Return buffer size within the pool.
int BufferSize() const { return myBufferSize; }
//! Get new buffer from the pool.
Standard_EXPORT AVBufferRef* GetBuffer();
private:
// prevent copies
Media_BufferPool (const Media_BufferPool& theCopy);
Media_BufferPool& operator=(const Media_BufferPool& theCopy);
protected:
AVBufferPool* myPool;
int myBufferSize;
};
#endif // _Media_BufferPool_HeaderFile

View File

@ -0,0 +1,277 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Media_CodecContext.hxx>
#include <Media_Frame.hxx>
#include <Media_FormatContext.hxx>
#include <Message.hxx>
#include <Message_Messenger.hxx>
#include <OSD_Parallel.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavformat/avformat.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_CodecContext, Standard_Transient)
// =======================================================================
// function : Media_CodecContext
// purpose :
// =======================================================================
Media_CodecContext::Media_CodecContext()
: myCodecCtx (NULL),
myCodec (NULL),
myPtsStartBase (0.0),
myPtsStartStream(0.0),
myTimeBase (1.0),
myStreamIndex (0),
myPixelAspectRatio (1.0f)
{
#ifdef HAVE_FFMPEG
myCodecCtx = avcodec_alloc_context3 (NULL);
#endif
}
// =======================================================================
// function : ~Media_CodecContext
// purpose :
// =======================================================================
Media_CodecContext::~Media_CodecContext()
{
Close();
}
// =======================================================================
// function : Init
// purpose :
// =======================================================================
bool Media_CodecContext::Init (const AVStream& theStream,
double thePtsStartBase,
int theNbThreads)
{
#ifdef HAVE_FFMPEG
return Init (theStream, thePtsStartBase, theNbThreads, AV_CODEC_ID_NONE);
#else
return Init (theStream, thePtsStartBase, theNbThreads, 0);
#endif
}
// =======================================================================
// function : Init
// purpose :
// =======================================================================
bool Media_CodecContext::Init (const AVStream& theStream,
double thePtsStartBase,
int theNbThreads,
int theCodecId)
{
#ifdef HAVE_FFMPEG
myStreamIndex = theStream.index;
if (avcodec_parameters_to_context (myCodecCtx, theStream.codecpar) < 0)
{
Message::DefaultMessenger()->Send ("Internal error: unable to copy codec parameters", Message_Fail);
Close();
return false;
}
myTimeBase = av_q2d (theStream.time_base);
myPtsStartBase = thePtsStartBase;
myPtsStartStream = Media_FormatContext::StreamUnitsToSeconds (theStream, theStream.start_time);
const AVCodecID aCodecId = theCodecId != AV_CODEC_ID_NONE ? (AVCodecID )theCodecId : theStream.codecpar->codec_id;
myCodec = avcodec_find_decoder (aCodecId);
if (myCodec == NULL)
{
Message::DefaultMessenger()->Send ("FFmpeg: unable to find decoder", Message_Fail);
Close();
return false;
}
myCodecCtx->codec_id = aCodecId;
AVDictionary* anOpts = NULL;
av_dict_set (&anOpts, "refcounted_frames", "1", 0);
if (theStream.codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
myCodecCtx->thread_count = theNbThreads <= -1 ? OSD_Parallel::NbLogicalProcessors() : theNbThreads;
}
if (avcodec_open2 (myCodecCtx, myCodec, &anOpts) < 0)
{
Message::DefaultMessenger()->Send ("FFmpeg: unable to open decoder", Message_Fail);
Close();
return false;
}
myPixelAspectRatio = 1.0f;
if (theStream.sample_aspect_ratio.num && av_cmp_q(theStream.sample_aspect_ratio, myCodecCtx->sample_aspect_ratio))
{
myPixelAspectRatio = float(theStream.sample_aspect_ratio.num) / float(theStream.sample_aspect_ratio.den);
}
else
{
if (myCodecCtx->sample_aspect_ratio.num == 0
|| myCodecCtx->sample_aspect_ratio.den == 0)
{
myPixelAspectRatio = 1.0f;
}
else
{
myPixelAspectRatio = float(myCodecCtx->sample_aspect_ratio.num) / float(myCodecCtx->sample_aspect_ratio.den);
}
}
if (theStream.codecpar->codec_type == AVMEDIA_TYPE_VIDEO
&& (myCodecCtx->width <= 0
|| myCodecCtx->height <= 0))
{
Message::DefaultMessenger()->Send ("FFmpeg: video stream has invalid dimensions", Message_Fail);
Close();
return false;
}
return true;
#else
(void )&theStream;
(void )thePtsStartBase;
(void )theNbThreads;
(void )theCodecId;
return false;
#endif
}
// =======================================================================
// function : Close
// purpose :
// =======================================================================
void Media_CodecContext::Close()
{
if (myCodecCtx != NULL)
{
#ifdef HAVE_FFMPEG
avcodec_free_context (&myCodecCtx);
#endif
}
}
// =======================================================================
// function : Flush
// purpose :
// =======================================================================
void Media_CodecContext::Flush()
{
if (myCodecCtx != NULL)
{
#ifdef HAVE_FFMPEG
avcodec_flush_buffers (myCodecCtx);
#endif
}
}
// =======================================================================
// function : SizeX
// purpose :
// =======================================================================
int Media_CodecContext::SizeX() const
{
#ifdef HAVE_FFMPEG
return (myCodecCtx != NULL) ? myCodecCtx->width : 0;
#else
return 0;
#endif
}
// =======================================================================
// function : SizeY
// purpose :
// =======================================================================
int Media_CodecContext::SizeY() const
{
#ifdef HAVE_FFMPEG
return (myCodecCtx != NULL) ? myCodecCtx->height : 0;
#else
return 0;
#endif
}
// =======================================================================
// function : CanProcessPacket
// purpose :
// =======================================================================
bool Media_CodecContext::CanProcessPacket (const Handle(Media_Packet)& thePacket) const
{
return !thePacket.IsNull()
&& myStreamIndex == thePacket->StreamIndex();
}
// =======================================================================
// function : SendPacket
// purpose :
// =======================================================================
bool Media_CodecContext::SendPacket (const Handle(Media_Packet)& thePacket)
{
if (!CanProcessPacket (thePacket))
{
return false;
}
#ifdef HAVE_FFMPEG
const int aRes = avcodec_send_packet (myCodecCtx, thePacket->Packet());
if (aRes < 0 && aRes != AVERROR_EOF)
{
return false;
}
return true;
#else
return false;
#endif
}
// =======================================================================
// function : ReceiveFrame
// purpose :
// =======================================================================
bool Media_CodecContext::ReceiveFrame (const Handle(Media_Frame)& theFrame)
{
if (theFrame.IsNull())
{
return false;
}
#ifdef HAVE_FFMPEG
const int aRes2 = avcodec_receive_frame (myCodecCtx, theFrame->ChangeFrame());
if (aRes2 < 0)
{
return false;
}
const int64_t aPacketPts = theFrame->BestEffortTimestamp() != AV_NOPTS_VALUE ? theFrame->BestEffortTimestamp() : 0;
const double aFramePts = double(aPacketPts) * myTimeBase - myPtsStartBase;
theFrame->SetPts (aFramePts);
return true;
#else
return false;
#endif
}

View File

@ -0,0 +1,100 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_CodecContext_HeaderFile
#define _Media_CodecContext_HeaderFile
#include <Media_Packet.hxx>
struct AVCodec;
struct AVCodecContext;
struct AVStream;
class Media_Frame;
//! AVCodecContext wrapper - the coder/decoder holder.
class Media_CodecContext : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_CodecContext, Standard_Transient)
public:
//! Constructor.
Standard_EXPORT Media_CodecContext();
//! Destructor.
Standard_EXPORT virtual ~Media_CodecContext();
//! Return context.
AVCodecContext* Context() const { return myCodecCtx; }
//! Open codec specified within the stream.
//! @param theStream stream to open
//! @param thePtsStartBase PTS start in seconds
//! @param theNbThreads amount of threads to use for AVMEDIA_TYPE_VIDEO stream;
//! -1 means OSD_Parallel::NbLogicalProcessors(),
//! 0 means auto by FFmpeg itself
//! >0 means specified number of threads (decoder should support multi-threading to take effect)
Standard_EXPORT bool Init (const AVStream& theStream,
double thePtsStartBase,
int theNbThreads = -1);
//! Open codec.
//! @param theStream stream to open
//! @param thePtsStartBase PTS start in seconds
//! @param theNbThreads amount of threads to use for AVMEDIA_TYPE_VIDEO stream;
//! -1 means OSD_Parallel::NbLogicalProcessors(),
//! 0 means auto by FFmpeg itself
//! >0 means specified number of threads (decoder should support multi-threading to take effect)
//! @param theCodecId codec (AVCodecID) to open
Standard_EXPORT bool Init (const AVStream& theStream,
double thePtsStartBase,
int theNbThreads,
int theCodecId);
//! Close input.
Standard_EXPORT void Close();
//! @return source frame width
Standard_EXPORT int SizeX() const;
//! @return source frame height
Standard_EXPORT int SizeY() const;
//! Return stream index.
int StreamIndex() const { return myStreamIndex; }
//! avcodec_flush_buffers() wrapper.
Standard_EXPORT void Flush();
//! Return true if packet belongs to this stream.
Standard_EXPORT bool CanProcessPacket (const Handle(Media_Packet)& thePacket) const;
//! avcodec_send_packet() wrapper.
Standard_EXPORT bool SendPacket (const Handle(Media_Packet)& thePacket);
//! avcodec_receive_frame() wrapper.
Standard_EXPORT bool ReceiveFrame (const Handle(Media_Frame)& theFrame);
protected:
AVCodecContext* myCodecCtx; //!< codec context
AVCodec* myCodec; //!< opened codec
double myPtsStartBase; //!< starting PTS in context
double myPtsStartStream; //!< starting PTS in the stream
double myTimeBase; //!< stream timebase
int myStreamIndex; //!< stream index
float myPixelAspectRatio; //!< pixel aspect ratio
};
#endif // _Media_CodecContext_HeaderFile

View File

@ -0,0 +1,558 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Media_FormatContext.hxx>
#include <Message.hxx>
#include <Message_Messenger.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavformat/avformat.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_FormatContext, Standard_Transient)
namespace
{
static const double THE_SECONDS_IN_HOUR = 3600.0;
static const double THE_SECONDS_IN_MINUTE = 60.0;
static const double THE_SECOND_IN_HOUR = 1.0 / THE_SECONDS_IN_HOUR;
static const double THE_SECOND_IN_MINUTE = 1.0 / THE_SECONDS_IN_MINUTE;
#ifdef HAVE_FFMPEG
static const AVRational ST_AV_TIME_BASE_Q = {1, AV_TIME_BASE};
static const double ST_AV_TIME_BASE_D = av_q2d (ST_AV_TIME_BASE_Q);
//! Format framerate value.
static TCollection_AsciiString formatFps (double theVal)
{
const uint64_t aVal = uint64_t(theVal * 100.0 + 0.5);
char aBuff[256];
if(aVal == 0)
{
Sprintf (aBuff, "%1.4f", theVal);
}
else if (aVal % 100)
{
Sprintf (aBuff, "%3.2f", theVal);
}
else if (aVal % (100 * 1000))
{
Sprintf (aBuff, "%1.0f", theVal);
}
else
{
Sprintf (aBuff, "%1.0fk", theVal / 1000);
}
return aBuff;
}
#endif
}
// =======================================================================
// function : FormatAVErrorDescription
// purpose :
// =======================================================================
TCollection_AsciiString Media_FormatContext::FormatAVErrorDescription (int theErrCodeAV)
{
#ifdef HAVE_FFMPEG
char aBuff[4096];
memset (aBuff, 0, sizeof(aBuff));
if (av_strerror (theErrCodeAV, aBuff, 4096) != -1)
{
return TCollection_AsciiString (aBuff);
}
#ifdef _MSC_VER
wchar_t aBuffW[4096];
memset (aBuffW, 0, sizeof(aBuffW));
if (_wcserror_s (aBuffW, 4096, AVUNERROR(theErrCodeAV)) == 0)
{
return TCollection_AsciiString (aBuffW);
}
#elif defined(_WIN32)
// MinGW has only thread-unsafe variant
char* anErrDesc = strerror (AVUNERROR(theErrCodeAV));
if (anErrDesc != NULL)
{
return TCollection_AsciiString (anErrDesc);
}
#endif
return TCollection_AsciiString (aBuff);
#else
return TCollection_AsciiString ("AVError #") + theErrCodeAV;
#endif
}
// =======================================================================
// function : FormatUnitsToSeconds
// purpose :
// =======================================================================
double Media_FormatContext::FormatUnitsToSeconds (int64_t theTimeUnits)
{
#ifdef HAVE_FFMPEG
return (theTimeUnits != AV_NOPTS_VALUE)
? (ST_AV_TIME_BASE_D * theTimeUnits) : 0.0;
#else
(void )theTimeUnits;
return 0.0;
#endif
}
// =======================================================================
// function : UnitsToSeconds
// purpose :
// =======================================================================
double Media_FormatContext::UnitsToSeconds (const AVRational& theTimeBase,
int64_t theTimeUnits)
{
#ifdef HAVE_FFMPEG
return (theTimeUnits != AV_NOPTS_VALUE)
? (av_q2d (theTimeBase) * theTimeUnits) : 0.0;
#else
(void )&theTimeBase;
(void )theTimeUnits;
return 0.0;
#endif
}
// =======================================================================
// function : StreamUnitsToSeconds
// purpose :
// =======================================================================
double Media_FormatContext::StreamUnitsToSeconds (const AVStream& theStream,
int64_t theTimeUnits)
{
#ifdef HAVE_FFMPEG
return UnitsToSeconds (theStream.time_base, theTimeUnits);
#else
(void )&theStream;
(void )theTimeUnits;
return 0.0;
#endif
}
// =======================================================================
// function : SecondsToUnits
// purpose :
// =======================================================================
int64_t Media_FormatContext::SecondsToUnits (double theTimeSeconds)
{
#ifdef HAVE_FFMPEG
return int64_t(theTimeSeconds / ST_AV_TIME_BASE_D);
#else
(void )theTimeSeconds;
return 0;
#endif
}
// =======================================================================
// function : SecondsToUnits
// purpose :
// =======================================================================
int64_t Media_FormatContext::SecondsToUnits (const AVRational& theTimeBase,
double theTimeSeconds)
{
#ifdef HAVE_FFMPEG
return int64_t(theTimeSeconds / av_q2d (theTimeBase));
#else
(void )&theTimeBase;
(void )theTimeSeconds;
return 0;
#endif
}
// =======================================================================
// function : Media_FormatContext
// purpose :
// =======================================================================
int64_t Media_FormatContext::StreamSecondsToUnits (const AVStream& theStream,
double theTimeSeconds)
{
#ifdef HAVE_FFMPEG
return SecondsToUnits (theStream.time_base, theTimeSeconds);
#else
(void )&theStream;
(void )theTimeSeconds;
return 0;
#endif
}
// =======================================================================
// function : Media_FormatContext
// purpose :
// =======================================================================
Media_FormatContext::Media_FormatContext()
: myFormatCtx (NULL),
myPtsStartBase(0.0),
myDuration (0.0)
{
//
}
// =======================================================================
// function : ~Media_FormatContext
// purpose :
// =======================================================================
Media_FormatContext::~Media_FormatContext()
{
Close();
}
// =======================================================================
// function : NbSteams
// purpose :
// =======================================================================
unsigned int Media_FormatContext::NbSteams() const
{
#ifdef HAVE_FFMPEG
return myFormatCtx->nb_streams;
#else
return 0;
#endif
}
// =======================================================================
// function : Stream
// purpose :
// =======================================================================
const AVStream& Media_FormatContext::Stream (unsigned int theIndex) const
{
#ifdef HAVE_FFMPEG
return *myFormatCtx->streams[theIndex];
#else
(void )theIndex;
throw Standard_ProgramError("Media_FormatContext::Stream()");
#endif
}
// =======================================================================
// function : OpenInput
// purpose :
// =======================================================================
bool Media_FormatContext::OpenInput (const TCollection_AsciiString& theInput)
{
#ifdef HAVE_FFMPEG
const int avErrCode = avformat_open_input (&myFormatCtx, theInput.ToCString(), NULL, NULL);
if (avErrCode != 0)
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: Couldn't open video file '") + theInput
+ "'\nError: " + FormatAVErrorDescription (avErrCode), Message_Fail);
Close();
return false;
}
// retrieve stream information
if (avformat_find_stream_info (myFormatCtx, NULL) < 0)
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: Couldn't find stream information in '") + theInput + "'", Message_Fail);
Close();
return false;
}
#ifdef _DEBUG
av_dump_format (myFormatCtx, 0, theInput.ToCString(), false);
#endif
myDuration = 0.0;
myPtsStartBase = 0.0;
TCollection_AsciiString anExt = theInput;
anExt.LowerCase();
if (anExt.EndsWith (".png")
|| anExt.EndsWith (".jpg")
|| anExt.EndsWith (".jpeg")
|| anExt.EndsWith (".mpo")
|| anExt.EndsWith (".bmp")
|| anExt.EndsWith (".tif")
|| anExt.EndsWith (".tiff"))
{
// black-list images to workaround non-zero duration
return true;
}
myDuration = FormatUnitsToSeconds (myFormatCtx->duration);
if (myFormatCtx->nb_streams != 0)
{
myPtsStartBase = 2.e+100;
for (unsigned int aStreamId = 0; aStreamId < myFormatCtx->nb_streams; ++aStreamId)
{
const AVStream& aStream = *myFormatCtx->streams[aStreamId];
myPtsStartBase = Min (myPtsStartBase, StreamUnitsToSeconds (aStream, aStream.start_time));
myDuration = Max (myDuration, StreamUnitsToSeconds (aStream, aStream.duration));
}
}
return true;
#else
Message::DefaultMessenger()->Send ("Error: FFmpeg library is unavailable", Message_Fail);
(void )theInput;
return false;
#endif
}
// =======================================================================
// function : Close
// purpose :
// =======================================================================
void Media_FormatContext::Close()
{
if (myFormatCtx != NULL)
{
#ifdef HAVE_FFMPEG
avformat_close_input (&myFormatCtx);
//avformat_free_context (myFormatCtx);
#endif
}
}
// =======================================================================
// function : FormatTime
// purpose :
// =======================================================================
TCollection_AsciiString Media_FormatContext::FormatTime (double theSeconds)
{
double aSecIn = theSeconds;
unsigned int aHours = (unsigned int )(aSecIn * THE_SECOND_IN_HOUR);
aSecIn -= double(aHours) * THE_SECONDS_IN_HOUR;
unsigned int aMinutes = (unsigned int )(aSecIn * THE_SECOND_IN_MINUTE);
aSecIn -= double(aMinutes) * THE_SECONDS_IN_MINUTE;
unsigned int aSeconds = (unsigned int )aSecIn;
aSecIn -= double(aSeconds);
double aMilliSeconds = 1000.0 * aSecIn;
char aBuffer[64];
if (aHours > 0)
{
Sprintf (aBuffer, "%02u:%02u:%02u", aHours, aMinutes, aSeconds);
return aBuffer;
}
else if (aMinutes > 0)
{
Sprintf (aBuffer, "%02u:%02u", aMinutes, aSeconds);
return aBuffer;
}
else if (aSeconds > 0)
{
Sprintf (aBuffer, "%2u s", aSeconds);
return aBuffer;
}
return TCollection_AsciiString (aMilliSeconds) + " ms";
}
// =======================================================================
// function : FormatTimeProgress
// purpose :
// =======================================================================
TCollection_AsciiString Media_FormatContext::FormatTimeProgress (double theProgress,
double theDuration)
{
double aSecIn1 = theProgress;
unsigned int aHours1 = (unsigned int )(aSecIn1 * THE_SECOND_IN_HOUR);
aSecIn1 -= double(aHours1) * THE_SECONDS_IN_HOUR;
unsigned int aMinutes1 = (unsigned int )(aSecIn1 * THE_SECOND_IN_MINUTE);
aSecIn1 -= double(aMinutes1) * THE_SECONDS_IN_MINUTE;
unsigned int aSeconds1 = (unsigned int )aSecIn1;
aSecIn1 -= double(aSeconds1);
double aSecIn2 = theDuration;
unsigned int aHours2 = (unsigned int )(aSecIn2 * THE_SECOND_IN_HOUR);
aSecIn2 -= double(aHours2) * THE_SECONDS_IN_HOUR;
unsigned int aMinutes2 = (unsigned int )(aSecIn2 * THE_SECOND_IN_MINUTE);
aSecIn2 -= double(aMinutes2) * THE_SECONDS_IN_MINUTE;
unsigned int aSeconds2 = (unsigned int )aSecIn2;
aSecIn2 -= double(aSeconds2);
char aBuffer[256];
if (aHours1 > 0
|| aHours2 > 0)
{
Sprintf (aBuffer, "%02u:%02u:%02u / %02u:%02u:%02u", aHours1, aMinutes1, aSeconds1, aHours2, aMinutes2, aSeconds2);
return aBuffer;
}
Sprintf (aBuffer, "%02u:%02u / %02u:%02u", aMinutes1, aSeconds1, aMinutes2, aSeconds2);
return aBuffer;
}
// =======================================================================
// function : StreamInfo
// purpose :
// =======================================================================
TCollection_AsciiString Media_FormatContext::StreamInfo (unsigned int theIndex,
AVCodecContext* theCodecCtx) const
{
#ifdef HAVE_FFMPEG
const AVStream& aStream = *myFormatCtx->streams[theIndex];
AVCodecContext* aCodecCtx = theCodecCtx;
if (aCodecCtx == NULL)
{
Standard_DISABLE_DEPRECATION_WARNINGS
aCodecCtx = aStream.codec;
Standard_ENABLE_DEPRECATION_WARNINGS
}
char aFrmtBuff[4096] = {};
avcodec_string (aFrmtBuff, sizeof(aFrmtBuff), aCodecCtx, 0);
TCollection_AsciiString aStreamInfo (aFrmtBuff);
if (aStream.sample_aspect_ratio.num && av_cmp_q(aStream.sample_aspect_ratio, aStream.codecpar->sample_aspect_ratio))
{
AVRational aDispAspectRatio;
av_reduce (&aDispAspectRatio.num, &aDispAspectRatio.den,
aStream.codecpar->width * int64_t(aStream.sample_aspect_ratio.num),
aStream.codecpar->height * int64_t(aStream.sample_aspect_ratio.den),
1024 * 1024);
aStreamInfo = aStreamInfo + ", SAR " + aStream.sample_aspect_ratio.num + ":" + aStream.sample_aspect_ratio.den
+ " DAR " + aDispAspectRatio.num + ":" + aDispAspectRatio.den;
}
if (aStream.codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
if (aStream.avg_frame_rate.den != 0 && aStream.avg_frame_rate.num != 0)
{
aStreamInfo += TCollection_AsciiString(", ") + formatFps (av_q2d (aStream.avg_frame_rate)) + " fps";
}
if (aStream.r_frame_rate.den != 0 && aStream.r_frame_rate.num != 0)
{
aStreamInfo += TCollection_AsciiString(", ") + formatFps (av_q2d (aStream.r_frame_rate)) + " tbr";
}
if (aStream.time_base.den != 0 && aStream.time_base.num != 0)
{
aStreamInfo += TCollection_AsciiString(", ") + formatFps(1 / av_q2d (aStream.time_base)) + " tbn";
}
if (aCodecCtx->time_base.den != 0 && aCodecCtx->time_base.num != 0)
{
aStreamInfo += TCollection_AsciiString(", ") + formatFps(1 / av_q2d (aCodecCtx->time_base)) + " tbc";
}
}
if (myDuration > 0.0)
{
aStreamInfo += TCollection_AsciiString(", duration: ") + FormatTime (myDuration);
}
return aStreamInfo;
#else
(void )theIndex;
(void )theCodecCtx;
return TCollection_AsciiString();
#endif
}
// =======================================================================
// function : ReadPacket
// purpose :
// =======================================================================
bool Media_FormatContext::ReadPacket (const Handle(Media_Packet)& thePacket)
{
if (thePacket.IsNull())
{
return false;
}
#ifdef HAVE_FFMPEG
return av_read_frame (myFormatCtx, thePacket->ChangePacket()) >= 0;
#else
return false;
#endif
}
// =======================================================================
// function : SeekStream
// purpose :
// =======================================================================
bool Media_FormatContext::SeekStream (unsigned int theStreamId,
double theSeekPts,
bool theToSeekBack)
{
#ifdef HAVE_FFMPEG
const int aFlags = theToSeekBack ? AVSEEK_FLAG_BACKWARD : 0;
AVStream& aStream = *myFormatCtx->streams[theStreamId];
if ((aStream.disposition & AV_DISPOSITION_ATTACHED_PIC) != 0)
{
return false;
}
int64_t aSeekTarget = StreamSecondsToUnits (aStream, theSeekPts + StreamUnitsToSeconds (aStream, aStream.start_time));
bool isSeekDone = av_seek_frame (myFormatCtx, theStreamId, aSeekTarget, aFlags) >= 0;
// try 10 more times in backward direction to work-around huge duration between key frames
// will not work for some streams with undefined cur_dts (AV_NOPTS_VALUE)!!!
for (int aTries = 10; isSeekDone && theToSeekBack && aTries > 0 && (aStream.cur_dts > aSeekTarget); --aTries)
{
aSeekTarget -= StreamSecondsToUnits (aStream, 1.0);
isSeekDone = av_seek_frame (myFormatCtx, theStreamId, aSeekTarget, aFlags) >= 0;
}
if (isSeekDone)
{
return true;
}
TCollection_AsciiString aStreamType = aStream.codecpar->codec_type == AVMEDIA_TYPE_VIDEO
? "Video"
: (aStream.codecpar->codec_type == AVMEDIA_TYPE_AUDIO
? "Audio"
: "");
Message::DefaultMessenger()->Send (TCollection_AsciiString ("Error while seeking ") + aStreamType + " stream to "
+ theSeekPts + " sec (" + (theSeekPts + StreamUnitsToSeconds (aStream, aStream.start_time)) + " sec)",
Message_Warning);
return false;
#else
(void )theStreamId;
(void )theSeekPts;
(void )theToSeekBack;
return false;
#endif
}
// =======================================================================
// function : Seek
// purpose :
// =======================================================================
bool Media_FormatContext::Seek (double theSeekPts,
bool theToSeekBack)
{
#ifdef HAVE_FFMPEG
const int aFlags = theToSeekBack ? AVSEEK_FLAG_BACKWARD : 0;
int64_t aSeekTarget = SecondsToUnits (theSeekPts);
if (av_seek_frame (myFormatCtx, -1, aSeekTarget, aFlags) >= 0)
{
return true;
}
const char* aFileName =
#if(LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(58, 7, 100))
myFormatCtx->url;
#else
myFormatCtx->filename;
#endif
Message::DefaultMessenger()->Send (TCollection_AsciiString("Disaster! Seeking to ") + theSeekPts + " [" + aFileName + "] has failed.", Message_Warning);
return false;
#else
(void )theSeekPts;
(void )theToSeekBack;
return false;
#endif
}

View File

@ -0,0 +1,133 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_FormatContext_HeaderFile
#define _Media_FormatContext_HeaderFile
#include <Media_Packet.hxx>
#include <TCollection_AsciiString.hxx>
struct AVCodecContext;
struct AVFormatContext;
struct AVStream;
struct AVRational;
//! AVFormatContext wrapper - the media input/output stream holder.
class Media_FormatContext : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_FormatContext, Standard_Transient)
public:
//! Returns string description for AVError code.
Standard_EXPORT static TCollection_AsciiString FormatAVErrorDescription (int theErrCodeAV);
//! Convert time units into seconds for context.
//! @param theTimeUnits value to convert
//! @return converted time units in seconds
Standard_EXPORT static double FormatUnitsToSeconds (int64_t theTimeUnits);
//! Convert time units into seconds. Returns zero for invalid value.
//! @param theTimeBase the timebase
//! @param theTimeUnits value to convert
//! @return converted time units in seconds
Standard_EXPORT static double UnitsToSeconds (const AVRational& theTimeBase,
int64_t theTimeUnits);
//! Convert time units into seconds using stream base.
//! @param theStream the stream;
//! @param theTimeUnits value to convert;
//! @return converted time units in seconds.
Standard_EXPORT static double StreamUnitsToSeconds (const AVStream& theStream,
int64_t theTimeUnits);
//! Convert seconds into time units for context.
//! @param theTimeSeconds value to convert
//! @return time units
Standard_EXPORT static int64_t SecondsToUnits (double theTimeSeconds);
//! Convert seconds into time units.
//! @param theTimeBase the timebase
//! @param theTimeSeconds value to convert
//! @return time units
Standard_EXPORT static int64_t SecondsToUnits (const AVRational& theTimeBase,
double theTimeSeconds);
//! Convert seconds into time units for stream.
//! @param theStream the stream
//! @param theTimeSeconds value to convert
//! @return time units
Standard_EXPORT static int64_t StreamSecondsToUnits (const AVStream& theStream,
double theTimeSeconds);
//! Time formatter.
Standard_EXPORT static TCollection_AsciiString FormatTime (double theSeconds);
//! Time progress / duration formatter.
Standard_EXPORT static TCollection_AsciiString FormatTimeProgress (double theProgress,
double theDuration);
public:
//! Constructor.
Standard_EXPORT Media_FormatContext();
//! Destructor.
Standard_EXPORT virtual ~Media_FormatContext();
//! Return context.
AVFormatContext* Context() const { return myFormatCtx; }
//! Open input.
Standard_EXPORT bool OpenInput (const TCollection_AsciiString& theInput);
//! Close input.
Standard_EXPORT void Close();
//! Return amount of streams.
Standard_EXPORT unsigned int NbSteams() const;
//! Return stream.
Standard_EXPORT const AVStream& Stream (unsigned int theIndex) const;
//! Format stream info.
Standard_EXPORT TCollection_AsciiString StreamInfo (unsigned int theIndex,
AVCodecContext* theCodecCtx = NULL) const;
//! Return PTS start base in seconds.
double PtsStartBase() const { return myPtsStartBase; }
//! Return duration in seconds.
double Duration() const { return myDuration; }
//! av_read_frame() wrapper.
Standard_EXPORT bool ReadPacket (const Handle(Media_Packet)& thePacket);
//! Seek stream to specified position.
Standard_EXPORT bool SeekStream (unsigned int theStreamId,
double theSeekPts,
bool toSeekBack);
//! Seek context to specified position.
Standard_EXPORT bool Seek (double theSeekPts,
bool toSeekBack);
protected:
AVFormatContext* myFormatCtx; //!< format context
double myPtsStartBase; //!< start time
double myDuration; //!< duration
};
#endif // _Media_FormatContext_HeaderFile

296
src/Media/Media_Frame.cxx Normal file
View File

@ -0,0 +1,296 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Media_Frame.hxx>
#include <Media_Scaler.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_Frame, Standard_Transient)
// =======================================================================
// function : FormatFFmpeg2Occt
// purpose :
// =======================================================================
Image_Format Media_Frame::FormatFFmpeg2Occt (int theFormat)
{
#ifdef HAVE_FFMPEG
switch (theFormat)
{
case AV_PIX_FMT_RGBA:
return Image_Format_RGBA;
case AV_PIX_FMT_BGRA:
return Image_Format_BGRA;
case AV_PIX_FMT_RGB0:
return Image_Format_RGB32;
case AV_PIX_FMT_BGR0:
return Image_Format_BGR32;
case AV_PIX_FMT_RGB24:
return Image_Format_RGB;
case AV_PIX_FMT_BGR24:
return Image_Format_BGR;
case AV_PIX_FMT_GRAY8:
return Image_Format_Gray;
default:
return Image_Format_UNKNOWN;
}
#else
(void )theFormat;
return Image_Format_UNKNOWN;
#endif
}
// =======================================================================
// function : FormatOcct2FFmpeg
// purpose :
// =======================================================================
int Media_Frame::FormatOcct2FFmpeg (Image_Format theFormat)
{
#ifdef HAVE_FFMPEG
switch (theFormat)
{
case Image_Format_RGBA:
return AV_PIX_FMT_RGBA;
case Image_Format_BGRA:
return AV_PIX_FMT_BGRA;
case Image_Format_RGB32:
return AV_PIX_FMT_RGB0;
case Image_Format_BGR32:
return AV_PIX_FMT_BGR0;
case Image_Format_RGB:
return AV_PIX_FMT_RGB24;
case Image_Format_BGR:
return AV_PIX_FMT_BGR24;
case Image_Format_Gray:
return AV_PIX_FMT_GRAY8;
case Image_Format_Alpha:
return AV_PIX_FMT_GRAY8;
case Image_Format_GrayF:
case Image_Format_AlphaF:
case Image_Format_RGBAF:
case Image_Format_RGBF:
case Image_Format_BGRAF:
case Image_Format_BGRF:
case Image_Format_UNKNOWN:
return AV_PIX_FMT_NONE; // unsupported
}
return AV_PIX_FMT_NONE;
#else
(void )theFormat;
return 0;
#endif
}
// =======================================================================
// function : Media_Frame
// purpose :
// =======================================================================
Media_Frame::Media_Frame()
: myFrame (NULL),
myFramePts (0.0),
myPixelRatio(1.0f),
myIsLocked (false)
{
#ifdef HAVE_FFMPEG
myFrame = av_frame_alloc();
#endif
Unref();
}
// =======================================================================
// function : ~Media_Frame
// purpose :
// =======================================================================
Media_Frame::~Media_Frame()
{
#ifdef HAVE_FFMPEG
av_frame_free (&myFrame);
#endif
}
// =======================================================================
// function : Unref
// purpose :
// =======================================================================
void Media_Frame::Unref()
{
#ifdef HAVE_FFMPEG
av_frame_unref (myFrame);
#endif
}
// =======================================================================
// function : IsFullRangeYUV
// purpose :
// =======================================================================
bool Media_Frame::IsFullRangeYUV() const
{
#ifdef HAVE_FFMPEG
return Format() == AV_PIX_FMT_YUVJ420P
|| myFrame->color_range == AVCOL_RANGE_JPEG;
#else
return true;
#endif
}
// =======================================================================
// function : Swap
// purpose :
// =======================================================================
void Media_Frame::Swap (const Handle(Media_Frame)& theFrame1,
const Handle(Media_Frame)& theFrame2)
{
std::swap (theFrame1->myFrame, theFrame2->myFrame);
}
// =======================================================================
// function : IsEmpty
// purpose :
// =======================================================================
bool Media_Frame::IsEmpty() const
{
#ifdef HAVE_FFMPEG
return myFrame->format == -1; // AV_PIX_FMT_NONE
#else
return true;
#endif
}
// =======================================================================
// function : SizeX
// purpose :
// =======================================================================
int Media_Frame::SizeX() const
{
#ifdef HAVE_FFMPEG
return myFrame->width;
#else
return 0;
#endif
}
// =======================================================================
// function : SizeY
// purpose :
// =======================================================================
int Media_Frame::SizeY() const
{
#ifdef HAVE_FFMPEG
return myFrame->height;
#else
return 0;
#endif
}
// =======================================================================
// function : Format
// purpose :
// =======================================================================
int Media_Frame::Format() const
{
#ifdef HAVE_FFMPEG
return myFrame->format;
#else
return 0;
#endif
}
// =======================================================================
// function : Plane
// purpose :
// =======================================================================
uint8_t* Media_Frame::Plane (int thePlaneId) const
{
#ifdef HAVE_FFMPEG
return myFrame->data[thePlaneId];
#else
(void )thePlaneId;
return NULL;
#endif
}
// =======================================================================
// function : LineSize
// purpose :
// =======================================================================
int Media_Frame::LineSize (int thePlaneId) const
{
#ifdef HAVE_FFMPEG
return myFrame->linesize[thePlaneId];
#else
(void )thePlaneId;
return 0;
#endif
}
// =======================================================================
// function : BestEffortTimestamp
// purpose :
// =======================================================================
int64_t Media_Frame::BestEffortTimestamp() const
{
#ifdef HAVE_FFMPEG
return myFrame->best_effort_timestamp;
#else
return 0;
#endif
}
// =======================================================================
// function : InitWrapper
// purpose :
// =======================================================================
bool Media_Frame::InitWrapper (const Handle(Image_PixMap)& thePixMap)
{
Unref();
if (thePixMap.IsNull())
{
return false;
}
#ifdef HAVE_FFMPEG
myFrame->format = FormatOcct2FFmpeg (thePixMap->Format());
if (myFrame->format == AV_PIX_FMT_NONE)
{
return false;
}
myFrame->width = (int )thePixMap->SizeX();
myFrame->height = (int )thePixMap->SizeY();
myFrame->data[0] = (uint8_t* )thePixMap->ChangeData();
myFrame->linesize[0] = (int )thePixMap->SizeRowBytes();
for (int aPlaneIter = 1; aPlaneIter < AV_NUM_DATA_POINTERS; ++aPlaneIter)
{
myFrame->data [aPlaneIter] = NULL;
myFrame->linesize[aPlaneIter] = 0;
}
return true;
#else
return false;
#endif
}

118
src/Media/Media_Frame.hxx Normal file
View File

@ -0,0 +1,118 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_Frame_HeaderFile
#define _Media_Frame_HeaderFile
#include <Graphic3d_Vec2.hxx>
#include <Image_PixMap.hxx>
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
struct AVFrame;
//! AVFrame wrapper - the frame (decoded image/audio sample data) holder.
class Media_Frame : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_Frame, Standard_Transient)
public:
//! Convert pixel format from FFmpeg (AVPixelFormat) to OCCT.
Standard_EXPORT static Image_Format FormatFFmpeg2Occt (int theFormat);
//! Convert pixel format from OCCT to FFmpeg (AVPixelFormat).
//! Returns -1 (AV_PIX_FMT_NONE) if undefined.
Standard_EXPORT static int FormatOcct2FFmpeg (Image_Format theFormat);
//! Swap AVFrame* within two frames.
Standard_EXPORT static void Swap (const Handle(Media_Frame)& theFrame1,
const Handle(Media_Frame)& theFrame2);
public:
//! Empty constructor
Standard_EXPORT Media_Frame();
//! Destructor
Standard_EXPORT virtual ~Media_Frame();
//! Return true if frame does not contain any data.
Standard_EXPORT bool IsEmpty() const;
//! av_frame_unref() wrapper.
Standard_EXPORT void Unref();
//! Return image dimensions.
Graphic3d_Vec2i Size() const { return Graphic3d_Vec2i (SizeX(), SizeY()); }
//! Return image width.
Standard_EXPORT int SizeX() const;
//! Return image height.
Standard_EXPORT int SizeY() const;
//! Return pixel format (AVPixelFormat).
Standard_EXPORT int Format() const;
//! Return TRUE if YUV range is full.
Standard_EXPORT bool IsFullRangeYUV() const;
//! Access data plane for specified Id.
Standard_EXPORT uint8_t* Plane (int thePlaneId) const;
//! @return linesize in bytes for specified data plane
Standard_EXPORT int LineSize (int thePlaneId) const;
//! @return frame timestamp estimated using various heuristics, in stream time base
Standard_EXPORT int64_t BestEffortTimestamp() const;
//! Return frame.
const AVFrame* Frame() const { return myFrame; }
//! Return frame.
AVFrame* ChangeFrame() { return myFrame; }
//! Return presentation timestamp (PTS).
double Pts() const { return myFramePts; }
//! Set presentation timestamp (PTS).
void SetPts (double thePts) { myFramePts = thePts; }
//! Return PAR.
float PixelAspectRatio() const { return myPixelRatio; }
//! Set PAR.
void SetPixelAspectRatio (float theRatio) { myPixelRatio = theRatio; }
//! Return locked state.
bool IsLocked() const { return myIsLocked; }
//! Lock/free frame for edition.
void SetLocked (bool theToLock) { myIsLocked = theToLock; }
public:
//! Wrap allocated image pixmap.
Standard_EXPORT bool InitWrapper (const Handle(Image_PixMap)& thePixMap);
protected:
AVFrame* myFrame; //!< frame
double myFramePts; //!< presentation timestamp
float myPixelRatio; //!< pixel aspect ratio
bool myIsLocked; //!< locked state
};
#endif // _Media_Frame_HeaderFile

View File

@ -0,0 +1,32 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_IFrameQueue_HeaderFile
#define _Media_IFrameQueue_HeaderFile
#include <Media_Frame.hxx>
//! Interface defining frame queuing.
class Media_IFrameQueue
{
public:
//! Lock the frame, e.g. take ownership on a single (not currently displayed) frame from the queue to perform decoding into.
virtual Handle(Media_Frame) LockFrame() = 0;
//! Release previously locked frame, e.g. it can be displayed on the screen.
virtual void ReleaseFrame (const Handle(Media_Frame)& theFrame) = 0;
};
#endif // _Media_IFrameQueue_HeaderFile

180
src/Media/Media_Packet.cxx Normal file
View File

@ -0,0 +1,180 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Media_Packet.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_Packet, Standard_Transient)
// =======================================================================
// function : Media_Packet
// purpose :
// =======================================================================
Media_Packet::Media_Packet()
: myPacket (NULL)
{
#ifdef HAVE_FFMPEG
myPacket = av_packet_alloc();
#endif
}
// =======================================================================
// function : ~Media_Packet
// purpose :
// =======================================================================
Media_Packet::~Media_Packet()
{
#ifdef HAVE_FFMPEG
av_packet_free (&myPacket);
#endif
}
// =======================================================================
// function : Unref
// purpose :
// =======================================================================
void Media_Packet::Unref()
{
#ifdef HAVE_FFMPEG
av_packet_unref (myPacket);
#endif
}
// =======================================================================
// function : Data
// purpose :
// =======================================================================
const uint8_t* Media_Packet::Data() const
{
#ifdef HAVE_FFMPEG
return myPacket->data;
#else
return NULL;
#endif
}
// =======================================================================
// function : ChangeData
// purpose :
// =======================================================================
uint8_t* Media_Packet::ChangeData()
{
#ifdef HAVE_FFMPEG
return myPacket->data;
#else
return NULL;
#endif
}
// =======================================================================
// function : Size
// purpose :
// =======================================================================
int Media_Packet::Size() const
{
#ifdef HAVE_FFMPEG
return myPacket->size;
#else
return 0;
#endif
}
// =======================================================================
// function : Pts
// purpose :
// =======================================================================
int64_t Media_Packet::Pts() const
{
#ifdef HAVE_FFMPEG
return myPacket->pts;
#else
return 0;
#endif
}
// =======================================================================
// function : Dts
// purpose :
// =======================================================================
int64_t Media_Packet::Dts() const
{
#ifdef HAVE_FFMPEG
return myPacket->dts;
#else
return 0;
#endif
}
// =======================================================================
// function : Duration
// purpose :
// =======================================================================
int64_t Media_Packet::Duration() const
{
#ifdef HAVE_FFMPEG
return myPacket->duration;
#else
return 0;
#endif
}
// =======================================================================
// function : StreamIndex
// purpose :
// =======================================================================
int Media_Packet::StreamIndex() const
{
#ifdef HAVE_FFMPEG
return myPacket->stream_index;
#else
return 0;
#endif
}
// =======================================================================
// function : IsKeyFrame
// purpose :
// =======================================================================
bool Media_Packet::IsKeyFrame() const
{
#ifdef HAVE_FFMPEG
return (myPacket->flags & AV_PKT_FLAG_KEY) != 0;
#else
return false;
#endif
}
// =======================================================================
// function : SetKeyFrame
// purpose :
// =======================================================================
void Media_Packet::SetKeyFrame()
{
#ifdef HAVE_FFMPEG
myPacket->flags |= AV_PKT_FLAG_KEY;
#endif
}

View File

@ -0,0 +1,84 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_Packet_HeaderFile
#define _Media_Packet_HeaderFile
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
struct AVPacket;
//! AVPacket wrapper - the packet (data chunk for decoding/encoding) holder.
class Media_Packet : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_Packet, Standard_Transient)
public:
//! Empty constructor
Standard_EXPORT Media_Packet();
//! Destructor.
Standard_EXPORT virtual ~Media_Packet();
//! av_packet_unref() wrapper.
Standard_EXPORT void Unref();
//! Return packet.
const AVPacket* Packet() const { return myPacket; }
//! Return packet.
AVPacket* ChangePacket() { return myPacket; }
//! Return data.
Standard_EXPORT const uint8_t* Data() const;
//! Return data.
Standard_EXPORT uint8_t* ChangeData();
//! Return data size.
Standard_EXPORT int Size() const;
//! Return presentation timestamp (PTS).
Standard_EXPORT int64_t Pts() const;
//! Return decoding timestamp (DTS).
Standard_EXPORT int64_t Dts() const;
//! Return Duration.
Standard_EXPORT int64_t Duration() const;
//! Return Duration in seconds.
double DurationSeconds() const { return myDurationSec; }
//! Set Duration in seconds.
void SetDurationSeconds (double theDurationSec) { myDurationSec = theDurationSec; }
//! Return stream index.
Standard_EXPORT int StreamIndex() const;
//! Return TRUE for a key frame.
Standard_EXPORT bool IsKeyFrame() const;
//! Mark as key frame.
Standard_EXPORT void SetKeyFrame();
protected:
AVPacket* myPacket; //!< packet
double myDurationSec; //!< packet duration in seconds
};
#endif // _Media_Packet_HeaderFile

View File

@ -0,0 +1,668 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#ifdef _WIN32
#include <windows.h>
#endif
#include <Media_PlayerContext.hxx>
#include <Image_AlienPixMap.hxx>
#include <Media_BufferPool.hxx>
#include <Media_FormatContext.hxx>
#include <Media_CodecContext.hxx>
#include <Media_Scaler.hxx>
#include <Message.hxx>
#include <Message_Messenger.hxx>
#include <OSD.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_PlayerContext, Standard_Transient)
//================================================================
// Function : Media_PlayerContext
// Purpose :
//================================================================
Media_PlayerContext::Media_PlayerContext (Media_IFrameQueue* theFrameQueue)
: myFrameQueue (theFrameQueue),
myThread (doThreadWrapper),
myWakeEvent (false),
myNextEvent (false),
myDuration (0.0),
myToForceRgb(true),
myToShutDown(false),
mySeekTo (0.0),
myPlayEvent (Media_PlayerEvent_NONE)
{
myThread.Run (this);
#if defined(_WIN32) && !defined(OCCT_UWP)
// Adjust system timer
// By default Windows2K+ timer has ugly precision
// Thus - Sleep(1) may be long 14ms!
// We force best available precision to make Sleep() more adequate
// This affect whole system while running application!
TIMECAPS aTimeCaps = {0, 0};
if (timeGetDevCaps (&aTimeCaps, sizeof(aTimeCaps)) == TIMERR_NOERROR)
{
timeBeginPeriod (aTimeCaps.wPeriodMin);
}
else
{
timeBeginPeriod (1);
}
#endif
}
//================================================================
// Function : ~Media_PlayerContext
// Purpose :
//================================================================
Media_PlayerContext::~Media_PlayerContext()
{
myToShutDown = Standard_True;
myWakeEvent.Set();
myThread.Wait();
#if defined(_WIN32) && !defined(OCCT_UWP)
// restore timer adjustments
TIMECAPS aTimeCaps = {0, 0};
if (timeGetDevCaps (&aTimeCaps, sizeof(aTimeCaps)) == TIMERR_NOERROR)
{
timeEndPeriod (aTimeCaps.wPeriodMin);
}
else
{
timeEndPeriod (1);
}
#endif
}
//================================================================
// Function : DumpFirstFrame
// Purpose :
//================================================================
Handle(Media_Frame) Media_PlayerContext::DumpFirstFrame (const TCollection_AsciiString& theSrcVideo,
TCollection_AsciiString& theMediaInfo)
{
theMediaInfo.Clear();
Handle(Media_FormatContext) aFormatCtx = new Media_FormatContext();
if (!aFormatCtx->OpenInput (theSrcVideo))
{
return Handle(Media_Frame)();
}
Handle(Media_CodecContext) aVideoCtx;
#ifdef HAVE_FFMPEG
for (unsigned int aStreamId = 0; aStreamId < aFormatCtx->NbSteams(); ++aStreamId)
{
const AVStream& aStream = aFormatCtx->Stream (aStreamId);
const AVMediaType aCodecType = aStream.codecpar->codec_type;
if (aCodecType == AVMEDIA_TYPE_VIDEO)
{
aVideoCtx = new Media_CodecContext();
if (!aVideoCtx->Init (aStream, aFormatCtx->PtsStartBase(), 1))
{
return Handle(Media_Frame)();
}
theMediaInfo = aFormatCtx->StreamInfo (aStreamId, aVideoCtx->Context());
break;
}
}
#endif
if (aVideoCtx.IsNull())
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: no video stream in '") + theSrcVideo + "'", Message_Fail);
return Handle(Media_Frame)();
}
Handle(Media_Packet) aPacket = new Media_Packet();
Handle(Media_Frame) aFrame = new Media_Frame();
for (;;)
{
if (!aFormatCtx->ReadPacket (aPacket))
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: unable to read from '") + theSrcVideo + "'", Message_Fail);
return Handle(Media_Frame)();
}
if (!aVideoCtx->CanProcessPacket (aPacket))
{
continue;
}
if (aVideoCtx->SendPacket (aPacket)
&& aVideoCtx->ReceiveFrame (aFrame))
{
break;
}
}
if (aFrame->IsEmpty()
|| aFrame->SizeX() < 1
|| aFrame->SizeY() < 1)
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: unable to decode first video frame from '") + theSrcVideo + "'", Message_Fail);
return Handle(Media_Frame)();
}
return aFrame;
}
//================================================================
// Function : DumpFirstFrame
// Purpose :
//================================================================
bool Media_PlayerContext::DumpFirstFrame (const TCollection_AsciiString& theSrcVideo,
const TCollection_AsciiString& theOutImage,
TCollection_AsciiString& theMediaInfo,
int theMaxSize)
{
Handle(Media_Frame) aFrame = DumpFirstFrame (theSrcVideo, theMediaInfo);
if (aFrame.IsNull())
{
return false;
}
Handle(Image_AlienPixMap) aPixMap = new Image_AlienPixMap();
int aResSizeX = aFrame->SizeX(), aResSizeY = aFrame->SizeY();
if (theMaxSize > 0)
{
if (aResSizeX > aResSizeY)
{
aResSizeX = theMaxSize;
aResSizeY = int((double(aFrame->SizeY()) / double(aFrame->SizeX())) * double(aResSizeX));
}
else
{
aResSizeY = theMaxSize;
aResSizeX = int((double(aFrame->SizeX()) / double(aFrame->SizeY())) * double(aResSizeY));
}
}
if (!aPixMap->InitZero (Image_Format_RGB, aResSizeX, aResSizeY))
{
Message::DefaultMessenger()->Send ("FFmpeg: Failed allocation of RGB frame (out of memory)", Message_Fail);
return false;
}
//Image_Format aFormat = aFrame->FormatFFmpeg2Occt (aFrame->Format());
//if (aFormat == Image_Format_UNKNOWN || theMaxSize > 0)
{
Handle(Media_Frame) anRgbFrame = new Media_Frame();
anRgbFrame->InitWrapper (aPixMap);
Media_Scaler aScaler;
if (!aScaler.Convert (aFrame, anRgbFrame))
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: unable to convert frame into RGB '") + theSrcVideo + "'", Message_Fail);
return false;
}
}
aPixMap->SetTopDown (true);
return aPixMap->Save (theOutImage);
}
//================================================================
// Function : SetInput
// Purpose :
//================================================================
void Media_PlayerContext::SetInput (const TCollection_AsciiString& theInputPath,
Standard_Boolean theToWait)
{
{
Standard_Mutex::Sentry aLock (myMutex);
if (theToWait)
{
myNextEvent.Reset();
}
myInputPath = theInputPath;
myPlayEvent = Media_PlayerEvent_NEXT;
myWakeEvent.Set();
}
if (theToWait)
{
myNextEvent.Wait();
}
}
//================================================================
// Function : PlaybackState
// Purpose :
//================================================================
void Media_PlayerContext::PlaybackState (Standard_Boolean& theIsPaused,
Standard_Real& theProgress,
Standard_Real& theDuration)
{
Standard_Mutex::Sentry aLock (myMutex);
theIsPaused = !myTimer.IsStarted();
theProgress = myTimer.ElapsedTime();
theDuration = myDuration;
}
//================================================================
// Function : pushPlayEvent
// Purpose :
//================================================================
void Media_PlayerContext::PlayPause (Standard_Boolean& theIsPaused,
Standard_Real& theProgress,
Standard_Real& theDuration)
{
Standard_Mutex::Sentry aLock (myMutex);
theProgress = myTimer.ElapsedTime();
theDuration = myDuration;
if (myTimer.IsStarted())
{
pushPlayEvent (Media_PlayerEvent_PAUSE);
theIsPaused = true;
}
else
{
pushPlayEvent (Media_PlayerEvent_RESUME);
theIsPaused = false;
}
}
//================================================================
// Function : Seek
// Purpose :
//================================================================
void Media_PlayerContext::Seek (Standard_Real thePosSec)
{
Standard_Mutex::Sentry aLock (myMutex);
mySeekTo = thePosSec;
pushPlayEvent (Media_PlayerEvent_SEEK);
}
//================================================================
// Function : pushPlayEvent
// Purpose :
//================================================================
void Media_PlayerContext::pushPlayEvent (Media_PlayerEvent thePlayEvent)
{
Standard_Mutex::Sentry aLock (myMutex);
myPlayEvent = thePlayEvent;
myWakeEvent.Set();
}
//================================================================
// Function : popPlayEvent
// Purpose :
//================================================================
bool Media_PlayerContext::popPlayEvent (Media_PlayerEvent& thePlayEvent,
const Handle(Media_FormatContext)& theFormatCtx,
const Handle(Media_CodecContext)& theVideoCtx,
const Handle(Media_Frame)& theFrame)
{
if (myPlayEvent == Media_PlayerEvent_NONE)
{
thePlayEvent = Media_PlayerEvent_NONE;
return false;
}
Standard_Mutex::Sentry aLock (myMutex);
thePlayEvent = myPlayEvent;
if (thePlayEvent == Media_PlayerEvent_PAUSE)
{
myTimer.Pause();
}
else if (thePlayEvent == Media_PlayerEvent_RESUME)
{
myTimer.Start();
}
else if (thePlayEvent == Media_PlayerEvent_SEEK)
{
if (!theFormatCtx.IsNull()
&& !theVideoCtx.IsNull())
{
if (!theFormatCtx->SeekStream (theVideoCtx->StreamIndex(), mySeekTo, false))
{
theFormatCtx->Seek (mySeekTo, false);
}
theVideoCtx->Flush();
if (!theFrame.IsNull())
{
theFrame->Unref();
}
myTimer.Seek (mySeekTo);
}
}
myPlayEvent = Media_PlayerEvent_NONE;
return thePlayEvent != Media_PlayerEvent_NONE;
}
//! Returns nearest (greater or equal) aligned number.
static int getAligned (size_t theNumber,
size_t theAlignment = 32)
{
return int(theNumber + theAlignment - 1 - (theNumber - 1) % theAlignment);
}
//================================================================
// Function : receiveFrame
// Purpose :
//================================================================
bool Media_PlayerContext::receiveFrame (const Handle(Media_Frame)& theFrame,
const Handle(Media_CodecContext)& theVideoCtx)
{
if (myFrameTmp.IsNull())
{
myFrameTmp = new Media_Frame();
}
if (!theVideoCtx->ReceiveFrame (myFrameTmp))
{
return false;
}
theFrame->SetPts (myFrameTmp->Pts());
theFrame->SetPixelAspectRatio (myFrameTmp->PixelAspectRatio());
Image_Format anOcctFmt = Media_Frame::FormatFFmpeg2Occt (myFrameTmp->Format());
if (anOcctFmt != Image_Format_UNKNOWN)
{
Media_Frame::Swap (theFrame, myFrameTmp);
return true;
}
#ifdef HAVE_FFMPEG
else if (!myToForceRgb
&& (myFrameTmp->Format() == AV_PIX_FMT_YUV420P
|| myFrameTmp->Format() == AV_PIX_FMT_YUVJ420P))
{
Media_Frame::Swap (theFrame, myFrameTmp);
return true;
}
#endif
theFrame->Unref();
if (myFrameTmp->IsEmpty()
|| myFrameTmp->Size().x() < 1
|| myFrameTmp->Size().y() < 1)
{
theFrame->Unref();
return false;
}
const Graphic3d_Vec2i aSize = myFrameTmp->Size();
const Graphic3d_Vec2i aSizeUV = myFrameTmp->Size() / 2;
AVFrame* aFrame = theFrame->ChangeFrame();
if (myToForceRgb)
{
if (myBufferPools[0].IsNull())
{
myBufferPools[0] = new Media_BufferPool();
}
const int aLineSize = getAligned (aSize.x() * 3);
const int aBufSize = aLineSize * aSize.y();
if (!myBufferPools[0]->Init (aBufSize))
{
Message::DefaultMessenger()->Send ("FFmpeg: unable to allocate RGB24 frame buffer", Message_Fail);
return false;
}
#ifdef HAVE_FFMPEG
aFrame->buf[0] = myBufferPools[0]->GetBuffer();
if (aFrame->buf[0] == NULL)
{
theFrame->Unref();
Message::DefaultMessenger()->Send ("FFmpeg: unable to allocate RGB24 frame buffer", Message_Fail);
return false;
}
aFrame->format = AV_PIX_FMT_RGB24;
aFrame->width = aSize.x();
aFrame->height = aSize.y();
aFrame->linesize[0] = aLineSize;
aFrame->data[0] = aFrame->buf[0]->data;
#else
(void )aFrame;
#endif
}
else
{
for (int aPlaneIter = 0; aPlaneIter < 3; ++aPlaneIter)
{
if (myBufferPools[aPlaneIter].IsNull())
{
myBufferPools[aPlaneIter] = new Media_BufferPool();
}
}
const int aLineSize = getAligned (aSize.x());
const int aLineSizeUV = getAligned (aSizeUV.x());
const int aBufSize = aLineSize * aSize.y();
const int aBufSizeUV = aLineSizeUV * aSizeUV.y();
if (!myBufferPools[0]->Init (aBufSize)
|| !myBufferPools[1]->Init (aBufSizeUV)
|| !myBufferPools[2]->Init (aBufSizeUV))
{
Message::DefaultMessenger()->Send ("FFmpeg: unable to allocate YUV420P frame buffers", Message_Fail);
return false;
}
#ifdef HAVE_FFMPEG
aFrame->buf[0] = myBufferPools[0]->GetBuffer();
aFrame->buf[1] = myBufferPools[1]->GetBuffer();
aFrame->buf[2] = myBufferPools[2]->GetBuffer();
if (aFrame->buf[0] == NULL
|| aFrame->buf[1] == NULL
|| aFrame->buf[2] == NULL)
{
theFrame->Unref();
Message::DefaultMessenger()->Send ("FFmpeg: unable to allocate YUV420P frame buffers", Message_Fail);
return false;
}
aFrame->format = AV_PIX_FMT_YUV420P;
aFrame->width = aSize.x();
aFrame->height = aSize.y();
aFrame->linesize[0] = aLineSize;
aFrame->linesize[1] = aLineSizeUV;
aFrame->linesize[2] = aLineSizeUV;
aFrame->data[0] = aFrame->buf[0]->data;
aFrame->data[1] = aFrame->buf[1]->data;
aFrame->data[2] = aFrame->buf[2]->data;
#endif
}
if (myScaler.IsNull())
{
myScaler = new Media_Scaler();
}
if (!myScaler->Convert (myFrameTmp, theFrame))
{
return false;
}
myFrameTmp->Unref();
return true;
}
//================================================================
// Function : doThreadLoop
// Purpose :
//================================================================
void Media_PlayerContext::doThreadLoop()
{
OSD::SetSignal (false);
Handle(Media_Frame) aFrame;
bool wasSeeked = false;
for (;;)
{
myWakeEvent.Wait();
myWakeEvent.Reset();
if (myToShutDown)
{
return;
}
TCollection_AsciiString anInput;
{
Standard_Mutex::Sentry aLock (myMutex);
std::swap (anInput, myInputPath);
if (myPlayEvent == Media_PlayerEvent_NEXT)
{
myPlayEvent = Media_PlayerEvent_NONE;
}
}
myNextEvent.Set();
if (anInput.IsEmpty())
{
continue;
}
Handle(Media_FormatContext) aFormatCtx = new Media_FormatContext();
if (!aFormatCtx->OpenInput (anInput))
{
continue;
}
Handle(Media_CodecContext) aVideoCtx;
#ifdef HAVE_FFMPEG
for (unsigned int aStreamId = 0; aStreamId < aFormatCtx->NbSteams(); ++aStreamId)
{
const AVStream& aStream = aFormatCtx->Stream (aStreamId);
const AVMediaType aCodecType = aStream.codecpar->codec_type;
if (aCodecType == AVMEDIA_TYPE_VIDEO)
{
aVideoCtx = new Media_CodecContext();
if (!aVideoCtx->Init (aStream, aFormatCtx->PtsStartBase(), 1))
{
aVideoCtx.Nullify();
}
else
{
break;
}
}
}
#endif
if (aVideoCtx.IsNull())
{
Message::DefaultMessenger()->Send (TCollection_AsciiString ("FFmpeg: no video stream in '") + anInput + "'", Message_Fail);
continue;
}
Handle(Media_Packet) aPacket = new Media_Packet();
Media_PlayerEvent aPlayEvent = Media_PlayerEvent_NONE;
{
Standard_Mutex::Sentry aLock (myMutex);
myTimer.Stop();
myTimer.Start();
myDuration = aFormatCtx->Duration();
}
if (!aFrame.IsNull())
{
aFrame->Unref();
}
const double anUploadDelaySec = 1.0 / 60.0 + 0.0001;
for (;;)
{
if (myToShutDown)
{
return;
}
else if (!aFormatCtx->ReadPacket (aPacket))
{
break;
}
popPlayEvent (aPlayEvent, aFormatCtx, aVideoCtx, aFrame);
if (aPlayEvent == Media_PlayerEvent_NEXT)
{
break;
}
else if (aPlayEvent == Media_PlayerEvent_SEEK)
{
wasSeeked = true;
}
bool isAccepted = false;
if (aVideoCtx->CanProcessPacket (aPacket))
{
isAccepted = true;
aVideoCtx->SendPacket (aPacket);
}
aPacket->Unref();
if (!isAccepted)
{
continue;
}
for (;;)
{
if (myToShutDown)
{
return;
}
else if (popPlayEvent (aPlayEvent, aFormatCtx, aVideoCtx, aFrame))
{
if (aPlayEvent == Media_PlayerEvent_NEXT)
{
break;
}
else if (aPlayEvent == Media_PlayerEvent_SEEK)
{
wasSeeked = true;
}
}
if (aFrame.IsNull())
{
aFrame = myFrameQueue->LockFrame();
if (aFrame.IsNull())
{
OSD::MilliSecSleep (1);
continue;
}
aFrame->Unref();
}
if (aFrame->IsEmpty()
&& !receiveFrame (aFrame, aVideoCtx))
{
break;
}
const double aTime = myTimer.ElapsedTime() - anUploadDelaySec;
if (wasSeeked
|| (aFrame->Pts() <= aTime
&& myTimer.IsStarted()))
{
wasSeeked = false;
myFrameQueue->ReleaseFrame (aFrame);
aFrame.Nullify();
break;
}
OSD::MilliSecSleep (1);
}
if (aPlayEvent == Media_PlayerEvent_NEXT)
{
break;
}
}
}
}

View File

@ -0,0 +1,155 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_PlayerContext_HeaderFile
#define _Media_PlayerContext_HeaderFile
#include <Media_IFrameQueue.hxx>
#include <Media_Timer.hxx>
#include <OSD_Thread.hxx>
#include <Standard_Condition.hxx>
#include <Standard_Mutex.hxx>
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
#include <TCollection_AsciiString.hxx>
class Media_BufferPool;
class Media_CodecContext;
class Media_FormatContext;
class Media_Scaler;
//! Player context.
class Media_PlayerContext : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_PlayerContext, Standard_Transient)
public:
//! Dump first video frame.
//! @param theSrcVideo [in] path to the video
//! @param theMediaInfo [out] video description
Standard_EXPORT static Handle(Media_Frame) DumpFirstFrame (const TCollection_AsciiString& theSrcVideo,
TCollection_AsciiString& theMediaInfo);
//! Dump first video frame.
//! @param theSrcVideo [in] path to the video
//! @param theOutImage [in] path to make a screenshot
//! @param theMediaInfo [out] video description
//! @param theMaxSize [in] when positive - downscales image to specified size
Standard_EXPORT static bool DumpFirstFrame (const TCollection_AsciiString& theSrcVideo,
const TCollection_AsciiString& theOutImage,
TCollection_AsciiString& theMediaInfo,
int theMaxSize = 0);
public:
//! Main constructor.
//! Note that Frame Queue is stored as pointer,
//! and it is expected that this context is stored as a class field of Frame Queue.
Standard_EXPORT Media_PlayerContext (Media_IFrameQueue* theFrameQueue);
//! Destructor.
Standard_EXPORT virtual ~Media_PlayerContext();
public:
//! Set new input for playback.
Standard_EXPORT void SetInput (const TCollection_AsciiString& theInputPath,
Standard_Boolean theToWait);
//! Return playback state.
Standard_EXPORT void PlaybackState (Standard_Boolean& theIsPaused,
Standard_Real& theProgress,
Standard_Real& theDuration);
//! Pause/Pause playback depending on the current state.
Standard_EXPORT void PlayPause (Standard_Boolean& theIsPaused,
Standard_Real& theProgress,
Standard_Real& theDuration);
//! Seek to specified position.
Standard_EXPORT void Seek (Standard_Real thePosSec);
//! Pause playback.
void Pause() { pushPlayEvent (Media_PlayerEvent_PAUSE); }
//! Resume playback.
void Resume() { pushPlayEvent (Media_PlayerEvent_RESUME); }
//! Return TRUE if queue requires RGB pixel format or can handle also YUV pixel format; TRUE by default.
bool ToForceRgb() const { return myToForceRgb; }
//! Set if queue requires RGB pixel format or can handle also YUV pixel format.
void SetForceRgb (bool theToForce) { myToForceRgb = theToForce; }
private:
//! Internal enumeration for events.
enum Media_PlayerEvent
{
Media_PlayerEvent_NONE = 0,
Media_PlayerEvent_PAUSE,
Media_PlayerEvent_RESUME,
Media_PlayerEvent_SEEK,
Media_PlayerEvent_NEXT,
};
private:
//! Thread loop.
Standard_EXPORT void doThreadLoop();
//! Push new playback event.
Standard_EXPORT void pushPlayEvent (Media_PlayerEvent thePlayEvent);
//! Fetch new playback event.
Standard_EXPORT bool popPlayEvent (Media_PlayerEvent& thePlayEvent,
const Handle(Media_FormatContext)& theFormatCtx,
const Handle(Media_CodecContext)& theVideoCtx,
const Handle(Media_Frame)& theFrame);
//! Decode new frame.
bool receiveFrame (const Handle(Media_Frame)& theFrame,
const Handle(Media_CodecContext)& theVideoCtx);
//! Thread creation callback.
static Standard_Address doThreadWrapper (Standard_Address theData)
{
Media_PlayerContext* aThis = (Media_PlayerContext* )theData;
aThis->doThreadLoop();
return 0;
}
private:
Media_IFrameQueue* myFrameQueue; //!< frame queue
OSD_Thread myThread; //!< working thread
Standard_Mutex myMutex; //!< mutex for events
Standard_Condition myWakeEvent; //!< event to wake up working thread and proceed new playback event
Standard_Condition myNextEvent; //!< event to check if working thread processed next file event (e.g. released file handles of previous input)
Media_Timer myTimer; //!< playback timer
Standard_Real myDuration; //!< playback duration
Handle(Media_BufferPool) myBufferPools[4]; //!< per-plane pools
Handle(Media_Frame) myFrameTmp; //!< temporary object holding decoded frame
Handle(Media_Scaler) myScaler; //!< pixel format conversion tool
bool myToForceRgb; //!< flag indicating if queue requires RGB pixel format or can handle also YUV pixel format
volatile bool myToShutDown; //!< flag to terminate working thread
TCollection_AsciiString myInputPath; //!< new input to open
volatile Standard_Real mySeekTo; //!< new seeking position
volatile Media_PlayerEvent myPlayEvent; //!< playback event
};
#endif // _Media_PlayerContext_HeaderFile

151
src/Media/Media_Scaler.cxx Normal file
View File

@ -0,0 +1,151 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
// activate some C99 macros like UINT64_C in "stdint.h" which used by FFmpeg
#ifndef __STDC_CONSTANT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
#include <Media_Scaler.hxx>
#ifdef HAVE_FFMPEG
#include <Standard_WarningsDisable.hxx>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
};
#include <Standard_WarningsRestore.hxx>
#endif
IMPLEMENT_STANDARD_RTTIEXT(Media_Scaler, Standard_Transient)
// =======================================================================
// function : Media_Scaler
// purpose :
// =======================================================================
Media_Scaler::Media_Scaler()
: mySwsContext (NULL),
mySrcFormat (0),
myResFormat (0)
{
#ifdef HAVE_FFMPEG
mySrcFormat = AV_PIX_FMT_NONE;
myResFormat = AV_PIX_FMT_NONE;
#endif
}
// =======================================================================
// function : ~Media_Scaler
// purpose :
// =======================================================================
Media_Scaler::~Media_Scaler()
{
Release();
}
// =======================================================================
// function : Release
// purpose :
// =======================================================================
void Media_Scaler::Release()
{
if (mySwsContext != NULL)
{
#ifdef HAVE_FFMPEG
sws_freeContext (mySwsContext);
#endif
mySwsContext = NULL;
}
}
// =======================================================================
// function : Convert
// purpose :
// =======================================================================
bool Media_Scaler::Init (const Graphic3d_Vec2i& theSrcDims,
int theSrcFormat,
const Graphic3d_Vec2i& theResDims,
int theResFormat)
{
#ifdef HAVE_FFMPEG
if (theSrcDims.x() < 1
|| theSrcDims.y() < 1
|| theResDims.x() < 1
|| theResDims.y() < 1
|| theSrcFormat == AV_PIX_FMT_NONE
|| theResFormat == AV_PIX_FMT_NONE)
{
Release();
return false;
}
else if (mySrcDims == theSrcDims
&& myResDims == theResDims
&& mySrcFormat == theSrcFormat
&& myResFormat == theResFormat)
{
return mySwsContext != NULL;
}
Release();
mySrcDims = theSrcDims;
myResDims = theResDims;
mySrcFormat = theSrcFormat;
myResFormat = theResFormat;
mySwsContext = sws_getContext (theSrcDims.x(), theSrcDims.y(), (AVPixelFormat )theSrcFormat,
theResDims.x(), theResDims.y(), (AVPixelFormat )theResFormat,
SWS_BICUBIC, NULL, NULL, NULL);
return mySwsContext != NULL;
#else
(void )theSrcDims;
(void )theSrcFormat;
(void )theResDims;
(void )theResFormat;
return false;
#endif
}
// =======================================================================
// function : Convert
// purpose :
// =======================================================================
bool Media_Scaler::Convert (const Handle(Media_Frame)& theSrc,
const Handle(Media_Frame)& theRes)
{
if (theSrc.IsNull()
|| theSrc->IsEmpty()
|| theRes.IsNull()
|| theRes->IsEmpty()
|| theSrc == theRes)
{
return false;
}
if (!Init (theSrc->Size(), theSrc->Format(),
theRes->Size(), theRes->Format()))
{
return false;
}
#ifdef HAVE_FFMPEG
sws_scale (mySwsContext,
theSrc->Frame()->data, theSrc->Frame()->linesize,
0, theSrc->SizeY(),
theRes->ChangeFrame()->data, theRes->Frame()->linesize);
return true;
#else
return false;
#endif
}

View File

@ -0,0 +1,68 @@
// Created by: Kirill GAVRILOV
// Copyright (c) 2019 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_Scaler_HeaderFile
#define _Media_Scaler_HeaderFile
#include <Media_Frame.hxx>
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
#include <Graphic3d_Vec2.hxx>
struct SwsContext;
//! SwsContext wrapper - tool performing image scaling and pixel format conversion.
class Media_Scaler : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_Scaler, Standard_Transient)
public:
//! Empty constructor.
Standard_EXPORT Media_Scaler();
//! Destructor.
Standard_EXPORT virtual ~Media_Scaler();
//! sws_freeContext() wrapper.
Standard_EXPORT void Release();
//! sws_getContext() wrapper - creates conversion context.
//! @param theSrcDims dimensions of input frame
//! @param theSrcFormat pixel format (AVPixelFormat) of input frame
//! @param theResDims dimensions of destination frame
//! @param theResFormat pixel format (AVPixelFormat) of destination frame
Standard_EXPORT bool Init (const Graphic3d_Vec2i& theSrcDims,
int theSrcFormat,
const Graphic3d_Vec2i& theResDims,
int theResFormat);
//! Convert one frame to another.
Standard_EXPORT bool Convert (const Handle(Media_Frame)& theSrc,
const Handle(Media_Frame)& theRes);
//! Return TRUE if context was initialized.
bool IsValid() const { return mySwsContext != NULL; }
protected:
SwsContext* mySwsContext; //!< conversion context
Graphic3d_Vec2i mySrcDims; //!< dimensions of input frame
int mySrcFormat; //!< pixel format (AVPixelFormat) of input frame
Graphic3d_Vec2i myResDims; //!< dimensions of destination frame
int myResFormat; //!< pixel format (AVPixelFormat) of destination frame
};
#endif // _Media_Scaler_HeaderFile

View File

@ -12,15 +12,15 @@
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#include <AIS_AnimationTimer.hxx>
#include <Media_Timer.hxx>
IMPLEMENT_STANDARD_RTTIEXT(AIS_AnimationTimer, Standard_Transient)
IMPLEMENT_STANDARD_RTTIEXT(Media_Timer, Standard_Transient)
//=============================================================================
//function : Pause
//purpose :
//=============================================================================
void AIS_AnimationTimer::Pause()
void Media_Timer::Pause()
{
myTimer.Stop();
myTimerFrom += myTimer.ElapsedTime() * myTimerSpeed;
@ -31,7 +31,7 @@ void AIS_AnimationTimer::Pause()
//function : Stop
//purpose :
//=============================================================================
void AIS_AnimationTimer::Stop()
void Media_Timer::Stop()
{
myTimer.Stop();
myTimer.Reset();
@ -42,7 +42,7 @@ void AIS_AnimationTimer::Stop()
//function : SetPlaybackSpeed
//purpose :
//=============================================================================
void AIS_AnimationTimer::SetPlaybackSpeed (const Standard_Real theSpeed)
void Media_Timer::SetPlaybackSpeed (const Standard_Real theSpeed)
{
if (!myTimer.IsStarted())
{
@ -61,7 +61,7 @@ void AIS_AnimationTimer::SetPlaybackSpeed (const Standard_Real theSpeed)
//function : SetPlaybackSpeed
//purpose :
//=============================================================================
void AIS_AnimationTimer::Seek (const Standard_Real theTime)
void Media_Timer::Seek (const Standard_Real theTime)
{
const Standard_Boolean isStarted = myTimer.IsStarted();
myTimer.Stop();

74
src/Media/Media_Timer.hxx Normal file
View File

@ -0,0 +1,74 @@
// Created by: Kirill Gavrilov
// Copyright (c) 2016 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _Media_Timer_HeaderFile
#define _Media_Timer_HeaderFile
#include <OSD_Timer.hxx>
#include <Standard_Transient.hxx>
#include <Standard_Type.hxx>
//! Auxiliary class defining the animation timer.
class Media_Timer : public Standard_Transient
{
DEFINE_STANDARD_RTTIEXT(Media_Timer, Standard_Transient)
public:
//! Empty constructor.
Media_Timer() : myTimerFrom (0.0), myTimerSpeed (1.0) {}
//! Return elapsed time in seconds.
Standard_Real ElapsedTime() const
{
return myTimerFrom + myTimer.ElapsedTime() * myTimerSpeed;
}
//! Return playback speed coefficient (1.0 means normal speed).
Standard_Real PlaybackSpeed() const { return myTimerSpeed; }
//! Setup playback speed coefficient.
Standard_EXPORT void SetPlaybackSpeed (const Standard_Real theSpeed);
//! Return true if timer has been started.
Standard_Boolean IsStarted() const
{
return myTimer.IsStarted();
}
//! Start the timer.
void Start()
{
myTimer.Start();
}
//! Pause the timer.
Standard_EXPORT void Pause();
//! Stop the timer.
Standard_EXPORT void Stop();
//! Seek the timer to specified position.
Standard_EXPORT void Seek (const Standard_Real theTime);
protected:
OSD_Timer myTimer;
Standard_Real myTimerFrom;
Standard_Real myTimerSpeed;
};
DEFINE_STANDARD_HANDLE(Media_Timer, Standard_Transient)
#endif // _Media_Timer_HeaderFile

View File

@ -20,3 +20,4 @@ CSF_IOKit
CSF_FreeImagePlus
CSF_FREETYPE
CSF_FFmpeg
CSF_winmm

View File

@ -3,6 +3,7 @@ InterfaceGraphic
SelectBasics
Xw
Image
Media
WNT
Cocoa
TColQuantity

View File

@ -50,6 +50,7 @@
#include <Graphic3d_CStructure.hxx>
#include <Graphic3d_Texture2Dmanual.hxx>
#include <Graphic3d_GraphicDriver.hxx>
#include <Graphic3d_MediaTextureSet.hxx>
#include <Image_AlienPixMap.hxx>
#include <OSD_File.hxx>
#include <Prs3d_Drawer.hxx>
@ -4134,6 +4135,38 @@ Standard_Integer VTexture (Draw_Interpretor& theDi, Standard_Integer theArgsNb,
{
toSetDefaults = true;
}
else if ((aNameCase == "-video")
&& anArgIter + 1 < theArgsNb)
{
const TCollection_AsciiString anInput (theArgVec[++anArgIter]);
Handle(Graphic3d_MediaTextureSet) aMedia = Handle(Graphic3d_MediaTextureSet)::DownCast (aTextureSetOld);
if (aMedia.IsNull())
{
aMedia = new Graphic3d_MediaTextureSet();
}
if (aMedia->Input() != anInput)
{
aMedia->OpenInput (anInput, false);
}
else
{
if (aMedia->SwapFrames()
&& !aCtx->CurrentViewer()->ZLayerSettings (aTexturedIO->ZLayer()).IsImmediate())
{
ViewerTest::CurrentView()->Invalidate();
}
}
if (aTexturedIO->Attributes()->SetupOwnShadingAspect (aCtx->DefaultDrawer())
&& aTexturedShape.IsNull())
{
aTexturedIO->SetToUpdate();
}
toComputeUV = aTextureSetOld.IsNull();
aTexturedIO->Attributes()->ShadingAspect()->Aspect()->SetTextureMapOn (true);
aTexturedIO->Attributes()->ShadingAspect()->Aspect()->SetTextureSet (aMedia);
aTextureSetOld.Nullify();
}
else if (aCommandName == "vtexture"
&& (aTextureVecNew.IsEmpty()
|| aNameCase.StartsWith ("-tex")))