DShow + OpenGL播放视屏
#include <DShow.h>
#pragma include_alias( "dxtrans.h", "qedit.h" )
#define __IDxtCompositor_INTERFACE_DEFINED__
#define __IDxtAlphaSetter_INTERFACE_DEFINED__
#define __IDxtJpeg_INTERFACE_DEFINED__
#define __IDxtKey_INTERFACE_DEFINED__
#include <uuids.h>
#include <Aviriff.h>
#include <Windows.h>
//for threading
#include <process.h>
#pragma comment(lib,"uuid.lib")
#pragma comment(lib,"Strmiids.lib")
// Due to a missing qedit.h in recent Platform SDKs, we've replicated the relevant contents here
// #include <qedit.h>
MIDL_INTERFACE("0579154A-2B53-4994-B0D0-E773148EFF85")
ISampleGrabberCB : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE SampleCB(
double SampleTime,
IMediaSample *pSample) = ;
virtual HRESULT STDMETHODCALLTYPE BufferCB(
double SampleTime,
BYTE *pBuffer,
;
};
MIDL_INTERFACE("6B652FFF-11FE-4fce-92AD-0266B5D7C78F")
ISampleGrabber : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE SetOneShot(
BOOL OneShot) = ;
virtual HRESULT STDMETHODCALLTYPE SetMediaType(
;
virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType(
AM_MEDIA_TYPE *pType) = ;
virtual HRESULT STDMETHODCALLTYPE SetBufferSamples(
BOOL BufferThem) = ;
virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer(
/* [out][in] */ long *pBufferSize,
;
virtual HRESULT STDMETHODCALLTYPE GetCurrentSample(
;
virtual HRESULT STDMETHODCALLTYPE SetCallback(
ISampleGrabberCB *pCallback,
;
};
EXTERN_C const CLSID CLSID_SampleGrabber;
EXTERN_C const IID IID_ISampleGrabber;
EXTERN_C const CLSID CLSID_NullRenderer;
// GetUnconnectedPin
// Finds an unconnected pin on a filter in the desired direction
HRESULT GetUnconnectedPin(
IBaseFilter *pFilter, // Pointer to the filter.
PIN_DIRECTION PinDir, // Direction of the pin to find.
IPin **ppPin) // Receives a pointer to the pin.
{
*ppPin = ;
IEnumPins *pEnum = ;
IPin *pPin = ;
HRESULT hr = pFilter->EnumPins(&pEnum);
if (FAILED(hr))
{
return hr;
}
, &pPin, NULL) == S_OK)
{
PIN_DIRECTION ThisPinDir;
pPin->QueryDirection(&ThisPinDir);
if (ThisPinDir == PinDir)
{
IPin *pTmp = ;
hr = pPin->ConnectedTo(&pTmp);
if (SUCCEEDED(hr)) // Already connected, not the pin we want.
{
pTmp->Release();
}
else // Unconnected, this is the pin we want.
{
pEnum->Release();
*ppPin = pPin;
return S_OK;
}
}
pPin->Release();
}
pEnum->Release();
// Did not find a matching pin.
return E_FAIL;
}
// Disconnect any connections to the filter.
HRESULT DisconnectPins(IBaseFilter *pFilter)
{
IEnumPins *pEnum = ;
IPin *pPin = ;
HRESULT hr = pFilter->EnumPins(&pEnum);
if (FAILED(hr))
{
return hr;
}
, &pPin, NULL) == S_OK)
{
pPin->Disconnect();
pPin->Release();
}
pEnum->Release();
// Did not find a matching pin.
return S_OK;
}
// ConnectFilters
// Connects a pin of an upstream filter to the pDest downstream filter
HRESULT ConnectFilters(
IGraphBuilder *pGraph, // Filter Graph Manager.
IPin *pOut, // Output pin on the upstream filter.
IBaseFilter *pDest) // Downstream filter.
{
if ((pGraph == NULL) || (pOut == NULL) || (pDest == NULL))
{
return E_POINTER;
}
#ifdef debug
PIN_DIRECTION PinDir;
pOut->QueryDirection(&PinDir);
_ASSERTE(PinDir == PINDIR_OUTPUT);
#endif
// Find an input pin on the downstream filter.
IPin *pIn = ;
HRESULT hr = GetUnconnectedPin(pDest, PINDIR_INPUT, &pIn);
if (FAILED(hr))
{
return hr;
}
// Try to connect them.
hr = pGraph->Connect(pOut, pIn);
pIn->Release();
return hr;
}
// ConnectFilters
// Connects two filters
HRESULT ConnectFilters(
IGraphBuilder *pGraph,
IBaseFilter *pSrc,
IBaseFilter *pDest)
{
if ((pGraph == NULL) || (pSrc == NULL) || (pDest == NULL))
{
return E_POINTER;
}
// Find an output pin on the first filter.
IPin *pOut = ;
HRESULT hr = GetUnconnectedPin(pSrc, PINDIR_OUTPUT, &pOut);
if (FAILED(hr))
{
return hr;
}
hr = ConnectFilters(pGraph, pOut, pDest);
pOut->Release();
return hr;
}
// LocalFreeMediaType
// Free the format buffer in the media type
void LocalFreeMediaType(AM_MEDIA_TYPE& mt)
{
)
{
CoTaskMemFree((PVOID)mt.pbFormat);
mt.cbFormat = ;
mt.pbFormat = NULL;
}
if (mt.pUnk != NULL)
{
// Unecessary because pUnk should not be used, but safest.
mt.pUnk->Release();
mt.pUnk = NULL;
}
}
// LocalDeleteMediaType
// Free the format buffer in the media type,
// then delete the MediaType ptr itself
void LocalDeleteMediaType(AM_MEDIA_TYPE *pmt)
{
if (pmt != NULL)
{
LocalFreeMediaType(*pmt); // See FreeMediaType for the implementation.
CoTaskMemFree(pmt);
}
}
HRESULT SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath)
{
const WCHAR wszStreamName[] = L"ActiveMovieGraph";
HRESULT hr;
IStorage *pStorage = NULL;
hr = StgCreateDocfile(
wszPath,
STGM_CREATE | STGM_TRANSACTED | STGM_READWRITE | STGM_SHARE_EXCLUSIVE,
, &pStorage);
if(FAILED(hr))
{
return hr;
}
IStream *pStream;
hr = pStorage->CreateStream(
wszStreamName,
STGM_WRITE | STGM_CREATE | STGM_SHARE_EXCLUSIVE,
, , &pStream);
if (FAILED(hr))
{
pStorage->Release();
return hr;
}
IPersistStream *pPersist = NULL;
pGraph->QueryInterface(IID_IPersistStream, (void**)&pPersist);
hr = pPersist->Save(pStream, TRUE);
pStream->Release();
pPersist->Release();
if (SUCCEEDED(hr))
{
hr = pStorage->Commit(STGC_DEFAULT);
}
pStorage->Release();
return hr;
}
//-------------------------------------------------------------------------------------------------------------------------------------------------------------
//-------------------------------------------------------------------------------------------------------------------------------------------------------------
// DirectShowVideo - contains a simple directshow video player implementation
//-------------------------------------------------------------------------------------------------------------------------------------------------------------
//-------------------------------------------------------------------------------------------------------------------------------------------------------------
;
static void retainCom(){
){
//printf("com is initialized!\n");
CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
}
comRefCount++;
}
static void releaseCom(){
comRefCount--;
){
//printf("com is uninitialized!\n");
CoUninitialize();
}
}
class DirectShowVideo : public ISampleGrabberCB{
public:
DirectShowVideo(){
retainCom();
clearValues();
InitializeCriticalSection(&critSection);
}
~DirectShowVideo(){
tearDown();
releaseCom();
DeleteCriticalSection(&critSection);
}
void tearDown(){
//printf("tearDown\n");
if(m_pControl){
m_pControl->Release();
}
if(m_pEvent){
m_pEvent->Release();
}
if(m_pSeek){
m_pSeek->Release();
}
if(m_pAudio){
m_pAudio->Release();
}
if(m_pBasicVideo){
m_pBasicVideo->Release();
}
if(m_pGrabber){
m_pGrabber->Release();
}
if(m_pGrabberF){
m_pGrabberF->Release();
}
if(m_pGraph){
m_pGraph->Release();
}
if(m_pNullRenderer){
m_pNullRenderer->Release();
}
if( m_pSourceFile ){
m_pSourceFile->Release();
}
if( m_pPosition ){
m_pPosition->Release();
}
if(rawBuffer){
delete rawBuffer;
}
clearValues();
}
void clearValues(){
hr = ;
m_pGraph = NULL;
m_pControl = NULL;
m_pEvent = NULL;
m_pSeek = NULL;
m_pAudio = NULL;
m_pGrabber = NULL;
m_pGrabberF = NULL;
m_pBasicVideo = NULL;
m_pNullRenderer = NULL;
m_pSourceFile = NULL;
m_pPosition = NULL;
rawBuffer = NULL;
timeNow = ;
lPositionInSecs = ;
lDurationInNanoSecs = ;
lTotalDuration = ;
rtNew = ;
lPosition = ;
lvolume = -;
evCode = ;
width = height = ;
videoSize = ;
bVideoOpened = false;
bLoop = true;
bPaused = false;
bPlaying = false;
bEndReached = false;
bNewPixels = false;
bFrameNew = false;
curMovieFrame = -;
frameCount = -;
movieRate = 1.0;
averageTimePerFrame = 1.0/30.0;
}
//------------------------------------------------
STDMETHODIMP_(ULONG) AddRef() { ; }
STDMETHODIMP_(ULONG) Release() { ; }
//------------------------------------------------
STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject){
*ppvObject = static_cast<ISampleGrabberCB*>(this);
return S_OK;
}
//------------------------------------------------
STDMETHODIMP SampleCB(double Time, IMediaSample *pSample){
BYTE * ptrBuffer = NULL;
HRESULT hr = pSample->GetPointer(&ptrBuffer);
if(hr == S_OK){
long latestBufferLength = pSample->GetActualDataLength();
if(latestBufferLength == videoSize ){
EnterCriticalSection(&critSection);
memcpy(rawBuffer, ptrBuffer, latestBufferLength);
bNewPixels = true;
//this is just so we know if there is a new frame
frameCount++;
LeaveCriticalSection(&critSection);
}else{
printf("ERROR: SampleCB() - buffer sizes do not match\n");
}
}
return S_OK;
}
//This method is meant to have more overhead
STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen){
return E_NOTIMPL;
}
bool loadMovie(std::string path)
{
tearDown();
// Create the Filter Graph Manager and query for interfaces.
//printf("step 1\n");
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void **)&m_pGraph);
if (FAILED(hr)){
tearDown();
return false;
}
//printf("step 2\n");
hr = m_pGraph->QueryInterface(IID_IMediaSeeking, (void**)&m_pSeek);
if (FAILED(hr)){
tearDown();
return false;
}
hr = m_pGraph->QueryInterface(IID_IMediaPosition, (LPVOID *)&m_pPosition);
if (FAILED(hr)){
tearDown();
return false;
}
hr = m_pGraph->QueryInterface(IID_IBasicAudio,(void**)&m_pAudio);
if (FAILED(hr)){
tearDown();
return false;
}
// Use IGraphBuilder::QueryInterface (inherited from IUnknown) to get the IMediaControl interface.
//printf("step 4\n");
hr = m_pGraph->QueryInterface(IID_IMediaControl, (void **)&m_pControl);
if (FAILED(hr)){
tearDown();
return false;
}
// And get the Media Event interface, too.
//printf("step 5\n");
hr = m_pGraph->QueryInterface(IID_IMediaEvent, (void **)&m_pEvent);
if (FAILED(hr)){
tearDown();
return false;
}
//SAMPLE GRABBER (ALLOWS US TO GRAB THE BUFFER)//
// Create the Sample Grabber.
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&m_pGrabberF);
if (FAILED(hr)){
tearDown();
return false;
}
hr = m_pGraph->AddFilter(m_pGrabberF, L"Sample Grabber");
if (FAILED(hr)){
tearDown();
return false;
}
hr = m_pGrabberF->QueryInterface(IID_ISampleGrabber, (void**)&m_pGrabber);
if (FAILED(hr)){
tearDown();
return false;
}
m_pGrabber->SetCallback();
if (FAILED(hr)){
tearDown();
return false;
}
//MEDIA CONVERSION
//Get video properties from the stream's mediatype and apply to the grabber (otherwise we don't get an RGB image)
AM_MEDIA_TYPE mt;
ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
//printf("step 5.5\n");
hr = m_pGrabber->SetMediaType(&mt);
if (FAILED(hr)){
tearDown();
return false;
}
//printf("step 6\n");
std::wstring filePathW = std::wstring(path.begin(), path.end());
//this is the easier way to connect the graph, but we have to remove the video window manually
hr = m_pGraph->RenderFile(filePathW.c_str(), NULL);
//this is the more manual way to do it - its a pain though because the audio won't be connected by default
/*hr = m_pGraph->AddSourceFilter(filePathW.c_str(), L"Source", &m_pSourceFile);
if (FAILED(hr)){
printf("unable to AddSourceFilter\n");
tearDown();
return false;
}*/
//hr = ConnectFilters(m_pGraph, m_pSourceFile, m_pGrabberF);
//if (FAILED(hr)){
// printf("unable to ConnectFilters(m_pGraph, m_pSourceFile, m_pGrabberF)\n");
// tearDown();
// return false;
//}
//printf("step 7\n");
if (SUCCEEDED(hr)){
//Set Params - One Shot should be false unless you want to capture just one buffer
hr = m_pGrabber->SetOneShot(FALSE);
if (FAILED(hr)){
printf("unable to set one shot\n");
tearDown();
return false;
}
hr = m_pGrabber->SetBufferSamples(TRUE);
if (FAILED(hr)){
printf("unable to set buffer samples\n");
tearDown();
return false;
}
//NULL RENDERER//
//used to give the video stream somewhere to go to.
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&m_pNullRenderer));
if (FAILED(hr)){
printf("null renderer error\n");
tearDown();
return false;
}
hr = m_pGraph->AddFilter(m_pNullRenderer, L"Render");
if (FAILED(hr)){
printf("unable to add null renderer\n");
tearDown();
return false;
}
//hr = ConnectFilters(m_pGraph, m_pGrabberF, m_pNullRenderer);
//if (FAILED(hr)){
// printf("unable to ConnectFilters(m_pGraph, m_pGrabberF, m_pNullRenderer)\n");
// tearDown();
// return false;
//}
AM_MEDIA_TYPE mt;
ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
m_pGrabber->GetConnectedMediaType(&mt);
if (FAILED(hr)){
printf("unable to call GetConnectedMediaType\n");
tearDown();
return false;
}
VIDEOINFOHEADER * infoheader = (VIDEOINFOHEADER*)mt.pbFormat;
width = infoheader->bmiHeader.biWidth;
height = infoheader->bmiHeader.biHeight;
averageTimePerFrame = infoheader->AvgTimePerFrame / 10000000.0;
videoSize = width * height * ;
//printf("video dimensions are %i %i\n", width, height);
//we need to manually change the output from the renderer window to the null renderer
IBaseFilter * m_pVideoRenderer;
IPin* pinIn = ;
IPin* pinOut = ;
IBaseFilter * m_pColorSpace;
m_pGraph->FindFilterByName(L"Video Renderer", &m_pVideoRenderer);
if (FAILED(hr)){
printf("failed to find the video renderer\n");
tearDown();
return false;
}
//we disconnect the video renderer window by finding the output pin of the sample grabber
hr = m_pGrabberF->FindPin(L"Out", &pinOut);
if (FAILED(hr)){
printf("failed to find the sample grabber output pin\n");
tearDown();
return false;
}
hr = pinOut->Disconnect();
if (FAILED(hr)){
printf("failed to disconnect grabber output pin\n");
tearDown();
return false;
}
//SaveGraphFile(m_pGraph, L"test1.grf");
//we have to remove it as well otherwise the graph builder will reconnect it
hr = m_pGraph->RemoveFilter(m_pVideoRenderer);
if (FAILED(hr)){
printf("failed to remove the default renderer\n");
tearDown();
return false;
}else{
m_pVideoRenderer->Release();
}
//now connect the null renderer to the grabber output, if we don't do this not frames will be captured
hr = m_pNullRenderer->FindPin(L"In", &pinIn);
if (FAILED(hr)){
printf("failed to find the input pin of the null renderer\n");
tearDown();
return false;
}
hr = pinOut->Connect(pinIn, NULL);
if (FAILED(hr)){
printf("failed to connect the null renderer\n");
tearDown();
return false;
}
//printf("step 8\n");
// Run the graph.
//SaveGraphFile(m_pGraph, L"test2.grf");
hr = m_pControl->Run();
//SaveGraphFile(m_pGraph, L"test3.grf");
// Now pause the graph.
hr = m_pControl->Stop();
updatePlayState();
|| height == ){
tearDown();
printf("Error occured while playing or pausing or opening the file\n");
return false;
}else{
rawBuffer = new unsigned char[videoSize];
//printf("success!\n");
}
}else{
tearDown();
printf("Error occured while playing or pausing or opening the file\n");
return false;
}
bVideoOpened = true;
return true;
}
void update(){
if( bVideoOpened ){
;
#ifdef _WIN64
;
;
#else
;
;
#endif
;
if( curMovieFrame != frameCount ){
bFrameNew = true;
}else{
bFrameNew = false;
}
curMovieFrame = frameCount;
)){
if (eventCode == EC_COMPLETE ){
if(bLoop){
//printf("Restarting!\n");
setPosition(0.0);
}else{
bEndReached = true;
//printf("movie end reached!\n");
stop();
updatePlayState();
}
}
//printf("Event code: %#04x\n Params: %d, %d\n", eventCode, ptrParam1, ptrParam2);
m_pEvent->FreeEventParams(eventCode, ptrParam1, ptrParam2);
}
}
}
bool isLoaded(){
return bVideoOpened;
}
//volume has to be log corrected/converted
void setVolume(float volPct){
if( isLoaded() ){
) volPct = 0.0;
) volPct = 1.0;
long vol = log10(volPct) * 4000.0;
m_pAudio->put_Volume(vol);
}
}
float getVolume(){
float volPct = 0.0;
if( isLoaded() ){
;
m_pAudio->get_Volume(&vol);
volPct = powf(, (float)vol/4000.0);
}
return volPct;
}
double getDurationInSeconds(){
if( isLoaded() ){
;
m_pSeek->GetDuration(&lDurationInNanoSecs);
double timeInSeconds = (double)lDurationInNanoSecs/10000000.0;
return timeInSeconds;
}
return 0.0;
}
double getCurrentTimeInSeconds(){
if( isLoaded() ){
;
m_pSeek->GetCurrentPosition(&lCurrentTimeInNanoSecs);
double timeInSeconds = (double)lCurrentTimeInNanoSecs/10000000.0;
return timeInSeconds;
}
return 0.0;
}
void setPosition(float pct){
if( bVideoOpened ){
if( pct < 0.0 ) pct = 0.0;
if( pct > 1.0 ) pct = 1.0;
;
m_pSeek->GetDuration(&lDurationInNanoSecs);
rtNew = ((float)lDurationInNanoSecs * pct);
hr = m_pSeek->SetPositions(&rtNew, AM_SEEKING_AbsolutePositioning,NULL,AM_SEEKING_NoPositioning);
}
}
float getPosition(){
if( bVideoOpened ){
float timeDur = getDurationInSeconds();
if( timeDur > 0.0 ){
return getCurrentTimeInSeconds() / timeDur;
}
}
return 0.0;
}
void setSpeed(float speed){
if( bVideoOpened ){
m_pPosition->put_Rate(speed);
m_pPosition->get_Rate(&movieRate);
}
}
double getSpeed(){
return movieRate;
}
void processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip){
;
int numBytes = widthInBytes * height;
if(!bRGB){
;
;
if(bFlip){
; y < height; y++){
memcpy(dst + (y * widthInBytes), src + ( (height -y -) * widthInBytes), widthInBytes);
}
}else{
memcpy(dst, src, numBytes);
}
}else{
if(bFlip){
;
) * widthInBytes;
src += y;
; i < numBytes; i+=){
if(x >= width){
x = ;
src -= widthInBytes*;
}
*dst = *(src+);
dst++;
*dst = *(src+);
dst++;
*dst = *src;
dst++;
src+=;
x++;
}
}
else{
; i < numBytes; i+=){
*dst = *(src+);
dst++;
*dst = *(src+);
dst++;
*dst = *src;
dst++;
src+=;
}
}
}
}
void play(){
if( bVideoOpened ){
m_pControl->Run();
bEndReached = false;
updatePlayState();
}
}
void stop(){
if( bVideoOpened ){
if( isPlaying() ){
setPosition(0.0);
}
m_pControl->Stop();
updatePlayState();
}
}
void setPaused(bool bPaused){
if( bVideoOpened ){
if( bPaused ){
m_pControl->Pause();
}else{
m_pControl->Run();
}
updatePlayState();
}
}
void updatePlayState(){
if( bVideoOpened ){
FILTER_STATE fs;
hr = m_pControl->GetState(, (OAFilterState*)&fs);
if(hr==S_OK){
if( fs == State_Running ){
bPlaying = true;
bPaused = false;
}
else if( fs == State_Paused ){
bPlaying = false;
bPaused = true;
}else if( fs == State_Stopped ){
bPlaying = false;
bPaused = false;
}
}
}
}
bool isPlaying(){
return bPlaying;
}
bool isPaused(){
return bPaused;
}
bool isLooping(){
return bLoop;
}
void setLoop(bool loop){
bLoop = loop;
}
bool isMovieDone(){
return bEndReached;
}
float getWidth(){
return width;
}
float getHeight(){
return height;
}
bool isFrameNew(){
return bFrameNew;
}
void nextFrame(){
//we have to do it like this as the frame based approach is not very accurate
if( bVideoOpened && ( isPlaying() || isPaused() ) ){
int curFrame = getCurrentFrameNo();
float curFrameF = curFrame;
; i < ; i++){
setAproximateFrameF( curFrameF + 0.3 * (float)i );
){
break;
}
}
}
}
void preFrame(){
//we have to do it like this as the frame based approach is not very accurate
if( bVideoOpened && ( isPlaying() || isPaused() ) ){
int curFrame = getCurrentFrameNo();
float curFrameF = curFrame;
; i < ; i++){
setAproximateFrameF( curFrameF - 0.3 * (float)i );
){
break;
}
}
}
}
void setAproximateFrameF(float frameF){
if( bVideoOpened ){
float pct = frameF / (float)getAproximateNoFrames();
if( pct > 1.0 ) pct = 1.0;
if( pct < 0.0 ) pct = 0.0;
setPosition(pct);
}
}
void setAproximateFrame(int frame){
if( bVideoOpened ){
float pct = (float)frame / (float)getAproximateNoFrames();
if( pct > 1.0 ) pct = 1.0;
if( pct < 0.0 ) pct = 0.0;
setPosition(pct);
}
}
int getCurrentFrameNo(){
if( bVideoOpened ){
return getPosition() * (float) getAproximateNoFrames();
}
;
}
int getAproximateNoFrames(){
if( bVideoOpened && averageTimePerFrame > 0.0 ){
return getDurationInSeconds() / averageTimePerFrame;
}
;
}
void getPixels(unsigned char * dstBuffer){
if(bVideoOpened && bNewPixels){
EnterCriticalSection(&critSection);
processPixels(rawBuffer, dstBuffer, width, height, true, true);
bNewPixels = false;
LeaveCriticalSection(&critSection);
}
}
//this is the non-callback approach
//void getPixels(unsigned char * dstBuffer){
//
// if(bVideoOpened && isFrameNew()){
// long bufferSize = videoSize;
// HRESULT hr = m_pGrabber->GetCurrentBuffer(&bufferSize, (long *)rawBuffer);
//
// if(hr==S_OK){
// if (videoSize == bufferSize){
// processPixels(rawBuffer, dstBuffer, width, height, true, true);
// }else{
// printf("ERROR: GetPixels() - bufferSizes do not match!\n");
// }
// }else{
// printf("ERROR: GetPixels() - Unable to get pixels for device bufferSize = %i \n", bufferSize);
// }
// }
//}
protected:
HRESULT hr; // COM return value
IGraphBuilder *m_pGraph; // Graph Builder interface
IMediaControl *m_pControl; // Media Control interface
IMediaEvent *m_pEvent; // Media Event interface
IMediaSeeking *m_pSeek; // Media Seeking interface
IMediaPosition * m_pPosition;
IBasicAudio *m_pAudio; // Audio Settings interface
ISampleGrabber * m_pGrabber;
IBaseFilter * m_pSourceFile;
IBaseFilter * m_pGrabberF;
IBasicVideo * m_pBasicVideo;
IBaseFilter * m_pNullRenderer;
REFERENCE_TIME timeNow; // Used for FF & REW of movie, current time
LONGLONG lPositionInSecs; // Time in seconds
LONGLONG lDurationInNanoSecs; // Duration in nanoseconds
LONGLONG lTotalDuration; // Total duration
REFERENCE_TIME rtNew; // Reference time of movie
long lPosition; // Desired position of movie used in FF & REW
long lvolume; // The volume level in 1/100ths dB Valid values range from -10,000 (silence) to 0 (full volume), 0 = 0 dB -10000 = -100 dB
long evCode; // event variable, used to in file to complete wait.
long width, height;
long videoSize;
double averageTimePerFrame;
bool bFrameNew;
bool bNewPixels;
bool bVideoOpened;
bool bPlaying;
bool bPaused;
bool bLoop;
bool bEndReached;
double movieRate;
int curMovieFrame;
int frameCount;
CRITICAL_SECTION critSection;
unsigned char * rawBuffer;
};
class MiDirectShowPlayer
{
public:
DirectShowVideo m_Player;
unsigned char* buffer;
GLuint m_Texid;
int m_Width;
int m_Height;
public:
bool Load(std::string filename)
{
buffer = NULL;
return m_Player.loadMovie(filename);
}
void close()
{
}
void update()
{
if(m_Player.isLoaded())
{
m_Width = m_Player.getWidth();
m_Height = m_Player.getHeight();
m_Player.update();
if(buffer == NULL)
{
;
buffer = new unsigned char[sizei];
}
m_Player.getPixels(buffer);
BuildTex();
}
}
void BuildTex()
{
)
{
glDeleteTextures(,&m_Texid);
}
glGenTextures(,&m_Texid);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,m_Texid);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,,,m_Width,m_Height,,GL_RGB,GL_UNSIGNED_BYTE,buffer);
printf("texid:%d \n",m_Texid);
}
void Play()
{
if(m_Player.isLoaded())
{
m_Player.play();
}
}
void Stop()
{
if(m_Player.isLoaded())
{
m_Player.stop();
}
}
bool isFrameNew()
{
if(m_Player.isLoaded())
{
return m_Player.isFrameNew();
}
}
float GetWidth()
{
if(m_Player.isLoaded())
{
return m_Player.getWidth();
}
}
float GetHeight()
{
if(m_Player.isLoaded())
{
return m_Player.getHeight();
}
}
bool isPaused()
{
return m_Player.isPaused();
}
bool isLoaded()
{
return m_Player.isLoaded();
}
bool isPlaying()
{
return m_Player.isPlaying();
}
float GetPostion()
{
if(m_Player.isLoaded())
{
return m_Player.getPosition();
}
return 0.0f;
}
float GetSpeed()
{
if(m_Player.isLoaded())
{
return m_Player.getSpeed();
}
return 0.0f;
}
float getDuration()
{
if(m_Player.isLoaded())
{
return m_Player.getDurationInSeconds();
}
return 0.0f;
}
void SetPause(bool p)
{
if(m_Player.isLoaded())
{
m_Player.setPaused(p);
}
}
void setPostion(float f)
{
if(m_Player.isLoaded())
{
m_Player.setPosition(f);
}
}
void setVolume(float volume)
{
if(m_Player.isLoaded())
{
m_Player.setVolume(volume);
}
}
void setLoopState(bool b)
{
if(m_Player.isLoaded())
{
m_Player.setLoop(b);
}
}
int getCurrentFrame()
{
if(m_Player.isLoaded())
{
return m_Player.getCurrentFrameNo();
}
;
}
int getTotalNumFrames()
{
if(m_Player.isLoaded())
{
return m_Player.getAproximateNoFrames();
}
;
}
void SetFrame(int f)
{
if(m_Player.isLoaded())
{
)
f =;
if(f > getTotalNumFrames())
f = getTotalNumFrames();
m_Player.setAproximateFrame(f);
}
}
};

代码里先把视屏的每一帧图像得到,放到一个缓冲buffer里,然后用这个buffer生成纹理,纹理贴到窗口上就好了
下面这个OpenGL更新
m_DShowPlayer.update();
/* OpenGL animation code goes here */
glClearColor (0.0f, 0.0f, 0.0f, 0.0f);
glClear (GL_COLOR_BUFFER_BIT);
glPushMatrix ();
// glRotatef (0, 0.0f, 0.0f, 1.0f);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,m_DShowPlayer.m_Texid);
DrawRect(,,,);
glPopMatrix ();
SwapBuffers (hDC);
DShow + OpenGL播放视屏的更多相关文章
- 加入ffmpeg播放视屏
下面的字反了..,另外没声音 2018-4-28 前段时间已经做的差不多了,音频的pack取出来用openAL播放,并实现了视屏同步播放,并且支持unity 现在的问题就是支持大分辨率视屏播放的问题, ...
- opencv播放视屏并控制位置
原文地址:http://blog.csdn.net/augusdi/article/details/9000592 cvGetCaptureProperty是我们需要使用到的获取视频属性的函数. do ...
- webview中播放视屏,返回或者退出后,仍然会有声音。
解决办法: protected void onPause() { super.onPause(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODE ...
- Android视屏播放兼容性问题分享
最近产品提了一个紧急需求:webview加载的URL,需要支持视频播放. 为了快速完成需求,功能实现上直接使用系统自带播放器播放视频.由于是自带播放器,需要进行兼容性测试,过程发现了不少问题,这里分享 ...
- Java利用VLC开发简易视屏播放器
1.环境配置 (1)下载VLC VlC官网http://www.videolan.org/ 各个版本的下载地址http://download.videolan.org/pub/videolan ...
- 最简单的视音频播放示例5:OpenGL播放RGB/YUV
本文记录OpenGL播放视频的技术.OpenGL是一个和Direct3D同一层面的技术.相比于Direct3D,OpenGL具有跨平台的优势.尽管在游戏领域,DirectX的影响力已渐渐超越OpenG ...
- 最简单的视音频播放演示样例5:OpenGL播放RGB/YUV
===================================================== 最简单的视音频播放演示样例系列文章列表: 最简单的视音频播放演示样例1:总述 最简单的视音频 ...
- Android中使用SurfaceView+MediaPlayer+自定义的MediaController实现自定义的视屏播放器
效果图如下: (PS本来是要给大家穿gif动态图的,无奈太大了,没法上传) 功能实现:暂停,播放,快进,快退,全屏,退出全屏,等基本功能 实现的思路: 在主布局中放置一个SurfaceView,在Su ...
- 最简单的视音频播放示例6:OpenGL播放YUV420P(通过Texture,使用Shader)
本文记录OpenGL播放视频的技术.上一篇文章中,介绍了一种简单的使用OpenGL显示视频的方式.但是那还不是OpenGL显示视频技术的精髓.和Direct3D一样,OpenGL更好的显示视频的方式也 ...
随机推荐
- 【MLE】最大似然估计Maximum Likelihood Estimation
模型已定,参数未知 已知某个随机样本满足某种概率分布,但是其中具体的参数不清楚,参数估计就是通过若干次试验,观察其结果,利用结果推出参数的大概值.最大似然估计是建立在这样的思想上:已知某个参数能使这个 ...
- kill-9导致weblogic无法启动
转载自:http://blog.csdn.net/lykangjia/article/details/17486127?rsv_upd=1 今天单位系统遇到一个问题: Resolve Weblogic ...
- RMSprop
- ASP.NET学习笔记(5)——原生Ajax基本操作
说明(2017-11-4 15:32:49): 1. 回北京后又快一个月了,上次在家写的下回预告,到底是没把加水印写完,而且这次也不想写.. 2. 上次许的愿,十月份看完asp.net,已经泡汤了,翻 ...
- PHP error_reporting() 函数
实例 规定不同的错误级别报告: <?php // 关闭错误报告 error_reporting(0); // 报告 runtime 错误 error_reporting(E_ERROR | E_ ...
- A. Counterexample (Codeforces Round #275(div2)
A. Counterexample time limit per test 1 second memory limit per test 256 megabytes input standard in ...
- 认知计算 Cognitive Computing
认知计算代表一种全新的计算模式,它包含信息分析,自然语言处理和机器学习领域的大量创新技术. Cognnitive computing refers to systems that learn at s ...
- [openssl]openssl特定版本安装
卸载旧版本 OpenSSL1. apt-get purge openssl2. rm -rf /etc/ssl #删除配置文件编译与安装 OpenSSLprefix 是安装目录,openssldir ...
- 如何在CentOS 6.5上升级PHP
CentOS 6.5上默认安装PHP 5.3.因为后台网站无法正确运行在PHP 5.3上,所以计划将PHP升级到开发平台一样的版本PHP 5.5.为了方便,我们采用YUM的方式升级PHP 工具/原料 ...
- Hbase 学习(五) 调优
1.垃圾回收器调优 当我们往hbase写入数据,它首先写入memstore当中,当menstore的值大于hbase.hregion.memstore.flush.size参数中设置的值后,就会写入硬 ...