使用directshow代替opencv的vfw的源代码

来源:互联网 发布:罗塞塔石碑mac破解版 编辑:程序博客网 时间:2024/05/16 06:28

http://wmnmtm.blog.163.com/blog/static/38245714201071063355663/

本文提供了使用directshow代替opencv的vfw 的源代码,与转化为iplimage格式的算法.
实验中发现opencv的cvCaptureFromCAM 使用的是vfw,采用消息机制,速度较慢,测试发现fps只有 9-12左右,太慢了.  发现经过使用directshow后速度提升到60帧/s.在opencv group上了解到这是一个普遍问题,也许有人做过转换,却没有完整的例子与代码.在此贴出.对希望提高opencv视频分析速度的有所帮助.

用法如下:
static ARFrameGrabber frameGrabber;
IplImage ds_frame;
frameGrabber.Init(0, true);
frameGrabber.SetFlippedImage(true);
for(;;)
    {
       IplImage *frame =0;
 frameGrabber.GrabByteFrame();
 BYTE *myBuffer = frameGrabber.GetByteBuffer();

 int width = frameGrabber.GetWidth();
 int height = frameGrabber.GetHeight();  
 int stride  = (width * sizeof( RGBTRIPLE ) + 3) & -4;

 cvInitImageHeader( &ds_frame, cvSize(width, height), 8, 3,IPL_ORIGIN_BL, 4 );
 ds_frame.widthStep = stride;   
 cvSetData( &ds_frame, myBuffer, stride ); 
 frame = &ds_frame;

        /* 视频分析部分 */
     
   }

以下是directshow源文件,加入工程可用.


// ARFrameGrabber.h: interface for the ARFrameGrabber class.
//
//////////////////////////////////////////////////////////////////////

#if !defined(AFX_ARFRAMEGRABBER_H__C5553937_4BAB_4FEF_B4A6_1693AB0C99E3__INCLUDED_)
#define AFX_ARFRAMEGRABBER_H__C5553937_4BAB_4FEF_B4A6_1693AB0C99E3__INCLUDED_

#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000

//#include <AR/config.h>

#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <dshow.h>
//#include <streams.h>
#include <atlbase.h>
#include <qedit.h>

class SmartString
{
public:
 SmartString():str(NULL) {}

 SmartString(char* pStr):str(NULL)
 {
  if (pStr)
  {
   int size = int(strlen(pStr));
   str = new char[size+1];
   strcpy(str, pStr);
  }
 }

 SmartString(SmartString& sStr)
 {
  SetString(sStr.GetBuffer());
 }

 ~SmartString()
 {
  if (str)
   delete[] str;
 }

 SmartString& operator =(char* pStr)
 {
  SetString(pStr);
  return *this;
 }

 SmartString& operator =(SmartString& sStr)
 {
  SetString(sStr.GetBuffer());
  return *this;
 }

 char* GetBuffer() {return str;}

protected:
 void SetString(char *pStr)
 {
  if (str)
   delete[] str;

  if (!pStr)
  {
   str = NULL;
  }
  else
  {
   int size = int(strlen(pStr));
   str = new char[size + 1];
   strcpy(str, pStr);
  }

 }


 char* str;
};

class DeviceInfo
{
public:
 DeviceInfo():next(NULL), deviceId(-1)
 {
 }
 ~DeviceInfo()
 {
  if (next)
   delete next;
 }


 SmartString friendlyName;
 int   deviceId; 
 DeviceInfo* next;
};


class  ARFrameGrabber 
{
public:
 ARFrameGrabber();
 virtual ~ARFrameGrabber();

 void Init(int deviceId, bool displayProperties = true);
 void BindFilter(int deviceId, IBaseFilter **pFilter);
 void GrabFrame(long* size, long** pBuffer);
 void GrabFrame();
 void Grab32BitFrame();
 void GrabByteFrame();
 void SetCrossBar();
 


 long  GetBufferSize() {return bufferSize;}
 long* GetBuffer() {return pBuffer;}
 BYTE* GetByteBuffer() {return pBYTEbuffer;}
 
 void SetFlippedImage(bool flag) {flipImage = flag;}

 void DisplayProperties();
 void EnumDevices(DeviceInfo *head);
    int GetWidth();
    int GetHeight();

protected:
 CComPtr<IGraphBuilder> pGraph;
 CComPtr<IBaseFilter> pDeviceFilter;
 CComPtr<IMediaControl> pMediaControl;
 CComPtr<IBaseFilter> pSampleGrabberFilter;
 CComPtr<ISampleGrabber> pSampleGrabber;
 CComPtr<IPin> pGrabberInput;
 CComPtr<IPin> pGrabberOutput;
 CComPtr<IPin> pCameraOutput;
 CComPtr<IMediaEvent> pMediaEvent;
 CComPtr<IBaseFilter> pNullFilter;
 CComPtr<IPin> pNullInputPin;

 void FlipImage(long* pBuf);

private:
 void ReportError(char *msg);

 bool flipImage;
 long bufferSize;
 long *pBuffer;
 BYTE *pBYTEbuffer;
    bool connected;
    int width;
    int height;
};

#endif // !defined(AFX_ARFRAMEGRABBER_H__C5553937_4BAB_4FEF_B4A6_1693AB0C99E3__INCLUDED_)

 

ARFrameGrabber.cpp
// ARFrameGrabber.cpp: implementation of the ARFrameGrabber class.
//
//////////////////////////////////////////////////////////////////////

#include "stdafx.h"
#include <assert.h>
#include "ARFrameGrabber.h"

//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////

ARFrameGrabber::ARFrameGrabber()
    : pBuffer(NULL), bufferSize(0), flipImage(false), connected(false), width(0), height(0)
{
}

ARFrameGrabber::~ARFrameGrabber()
{
 pMediaControl->Stop();
 if (pBuffer)
  delete[] pBuffer;
 if(pBYTEbuffer)
  delete[] pBYTEbuffer;
}


void ARFrameGrabber::Init(int deviceId, bool displayProperties)
{
 HRESULT hr = S_OK;
 CoInitialize(NULL);
 // Create the Filter Graph Manager.
 hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
  IID_IGraphBuilder, (void **)&pGraph);

 hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
    IID_IBaseFilter, (LPVOID *)&pSampleGrabberFilter);

 hr = pGraph->QueryInterface(IID_IMediaControl, (void **) &pMediaControl);
 hr = pGraph->QueryInterface(IID_IMediaEvent, (void **) &pMediaEvent);

 hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
  IID_IBaseFilter, (LPVOID*) &pNullFilter);

 hr = pGraph->AddFilter(pNullFilter, L"NullRenderer");

 hr = pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&pSampleGrabber);

 

 AM_MEDIA_TYPE   mt;
 ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
 mt.majortype = MEDIATYPE_Video;
 mt.subtype = MEDIASUBTYPE_RGB32;
 mt.formattype = FORMAT_VideoInfo;
 hr = pSampleGrabber->SetMediaType(&mt);

 pGraph->AddFilter(pSampleGrabberFilter, L"Grabber");
 


 // Bind Device Filter.  We know the device because the id was passed in
 BindFilter(deviceId, &pDeviceFilter);
 pGraph->AddFilter(pDeviceFilter, NULL);

 CComPtr<IEnumPins> pEnum;
 pDeviceFilter->EnumPins(&pEnum);
 
 hr = pEnum->Reset();
 hr = pEnum->Next(1, &pCameraOutput, NULL);


 pEnum = NULL;
 pSampleGrabberFilter->EnumPins(&pEnum);
 pEnum->Reset();
 hr = pEnum->Next(1, &pGrabberInput, NULL);

 pEnum = NULL;
 pSampleGrabberFilter->EnumPins(&pEnum);
 pEnum->Reset();
 pEnum->Skip(1);
 hr = pEnum->Next(1, &pGrabberOutput, NULL);


 

 pEnum = NULL;
 pNullFilter->EnumPins(&pEnum);
 pEnum->Reset();
 hr = pEnum->Next(1, &pNullInputPin, NULL);

  SetCrossBar();

    if (displayProperties) {
   CComPtr<ISpecifyPropertyPages> pPages;

   HRESULT hr = pCameraOutput->QueryInterface(IID_ISpecifyPropertyPages, (void**)&pPages);
   if (SUCCEEDED(hr))
   {
    PIN_INFO PinInfo;
    pCameraOutput->QueryPinInfo(&PinInfo);

    CAUUID caGUID;
    pPages->GetPages(&caGUID);

    OleCreatePropertyFrame(
     NULL,
     0,
     0,
     L"Property Sheet",
     1,
     (IUnknown **)&(pCameraOutput.p),
     caGUID.cElems,
     caGUID.pElems,
     0,
     0,
     NULL);
    CoTaskMemFree(caGUID.pElems);
    PinInfo.pFilter->Release();
   }
    }

    hr = pGraph->Connect(pCameraOutput, pGrabberInput);

 hr = pGraph->Connect(pGrabberOutput, pNullInputPin);

// hr = pGraph->Render(pGrabberOutput);

 if (FAILED(hr))
 {
  switch(hr)
  {
  case VFW_S_NOPREVIEWPIN :
   break;
  case E_FAIL :
   break;
  case E_INVALIDARG :
   break;
  case E_POINTER :
   break;
  }
 }

 pSampleGrabber->SetBufferSamples(TRUE);
 pSampleGrabber->SetOneShot(TRUE);
   
    hr = pSampleGrabber->GetConnectedMediaType(&mt);
    VIDEOINFOHEADER *videoHeader;
    assert(mt.formattype == FORMAT_VideoInfo);
    videoHeader = reinterpret_cast<VIDEOINFOHEADER*>(mt.pbFormat);
    width = videoHeader->bmiHeader.biWidth;
    height = videoHeader->bmiHeader.biHeight;
    connected = true;
}

void ARFrameGrabber::GrabFrame(long* size, long** pBuffer)
{
    if (!size)
  return;

 // don't want to leak mem, pBuffer must be NULL
 if (!pBuffer || *pBuffer)
  return;

 long evCode;


 pMediaControl->Run();
 pMediaEvent->WaitForCompletion(INFINITE, &evCode);
 pSampleGrabber->GetCurrentBuffer(size, NULL);
 if (*size)
 {
  *pBuffer = new long[*size];
 }

 pSampleGrabber->GetCurrentBuffer(size, *pBuffer);
}

void ARFrameGrabber::GrabFrame()
{
 long evCode;
 long size = 0;

 pMediaControl->Run();
 pMediaEvent->WaitForCompletion(INFINITE, &evCode);
 pSampleGrabber->GetCurrentBuffer(&size, NULL);

 // if buffer is not the same size as before, create a new one
 if (size != bufferSize)
 {
  if (pBuffer)
   delete[] pBuffer;

  bufferSize = size;

  pBuffer = new long[bufferSize];
 }

 pSampleGrabber->GetCurrentBuffer(&size, pBuffer);
 if (flipImage)
  FlipImage(pBuffer);
}

void ARFrameGrabber::FlipImage(long* pBuf)
{
 DWORD *ptr = (DWORD*)pBuf;
 int pixelCount = bufferSize/4;


 if (!pBuf)
  return;

 for (int index = 0; index < pixelCount/2; index++)
 {
  ptr[index] = ptr[index] ^ ptr[pixelCount - index - 1];
  ptr[pixelCount - index - 1] = ptr[index] ^ ptr[pixelCount - index - 1];
  ptr[index] = ptr[index] ^ ptr[pixelCount - index - 1];
 }

}
//add by hardy
void ARFrameGrabber::GrabByteFrame()
{

 long evCode;
 long size = 0;

 pMediaControl->Run();
 pMediaEvent->WaitForCompletion(INFINITE, &evCode);
 pSampleGrabber->GetCurrentBuffer(&size, NULL);

 // if buffer is not the same size as before, create a new one
 if (size != bufferSize)
 {
  if (pBuffer)
   delete[] pBuffer;
  bufferSize = size;
  pBuffer = new long[bufferSize];
  if(pBYTEbuffer)
   delete[] pBYTEbuffer;
  pBYTEbuffer = new BYTE[bufferSize/4*3];

 }

 pSampleGrabber->GetCurrentBuffer(&size, pBuffer);
 BYTE *pTemp = (BYTE*) pBuffer;
 BYTE *pBYTETemp =  pBYTEbuffer;
 for(int i =0 ;i<bufferSize;i++)
 {
/*
  *(pBYTEbuffer) = *(pTemp);     //G
  *(pBYTEbuffer++) = *(pTemp++); //B
  *(pBYTEbuffer++) = *(pTemp++); //R
  pTemp++;
  pTemp++;
  */
  if((i+1)%4==0)
  {
   pTemp++;
  }
  else
  {
  *(pBYTETemp) = *(pTemp);     //G 
  pBYTETemp++;
  pTemp++;
  }
 }
}

void ARFrameGrabber::Grab32BitFrame()
{
 long evCode;
 long size = 0;
 long* pData;
 unsigned char* pTemp;
 unsigned char* ptr;

 pMediaControl->Run();
 pMediaEvent->WaitForCompletion(INFINITE, &evCode);
 pSampleGrabber->GetCurrentBuffer(&size, NULL);

 if (size != bufferSize)
 {
  if (pBuffer)
   delete[] pBuffer;

  bufferSize = size/3*4;  // add space for padding

  pBuffer = new long[bufferSize];
 }

 pData= (long*) new unsigned char[size];
 pSampleGrabber->GetCurrentBuffer(&size, pData);
 ptr = ((unsigned char*)pBuffer) + bufferSize - 1;
 pTemp = (unsigned char*) pData;


 // do the padding
 for (int index = 0; index < size/3; index++)
 {
  unsigned char r = *(pTemp++);
  unsigned char g = *(pTemp++);
  unsigned char b = *(pTemp++);

  *(ptr--) = 0;
  *(ptr--) = b;
  *(ptr--) = g;
  *(ptr--) = r;
 }
/*
 for (int index = 0; index < size; index++)
 {
  *ptr = ((unsigned char *)pTemp)[index];
  ptr--;
  if (index % 3 == 2)
  {
   *ptr = 0;
   ptr--;
  }
 }
*/
 delete[] pData;
}

void ARFrameGrabber::BindFilter(int deviceId, IBaseFilter **pFilter)
{
 if (deviceId < 0)
  return;
 
    // enumerate all video capture devices
 CComPtr<ICreateDevEnum> pCreateDevEnum;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
     IID_ICreateDevEnum, (void**)&pCreateDevEnum);
    if (hr != NOERROR)
 {
//  ErrMsg("Error Creating Device Enumerator");
  return;
 }

    CComPtr<IEnumMoniker> pEm;
    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
        &pEm, 0);
    if (hr != NOERROR)
 {
//  ErrMsg("Sorry, you have no video capture hardware");
  return;
    }

    pEm->Reset();
    ULONG cFetched;
    IMoniker *pM;
 int index = 0;
    while(hr = pEm->Next(1, &pM, &cFetched), hr==S_OK, index <= deviceId)
    {
  IPropertyBag *pBag;
  hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag);
  if(SUCCEEDED(hr))
  {
   VARIANT var;
   var.vt = VT_BSTR;
   hr = pBag->Read(L"FriendlyName", &var, NULL);
   if (hr == NOERROR)
   {
    if (index == deviceId)
    {
     pM->BindToObject(0, 0, IID_IBaseFilter, (void**)pFilter);
    }
    SysFreeString(var.bstrVal);
   }
   pBag->Release();
  }
  pM->Release();
  index++;
    }
}

int ARFrameGrabber::GetWidth()
{
  return width;
}

int ARFrameGrabber::GetHeight()
{
  return height;
}

void ARFrameGrabber::EnumDevices(DeviceInfo *head)
{
 if (!head)
  return;

 DeviceInfo *ptr = head;
 int id = 0;
 
    // enumerate all video capture devices
 CComPtr<ICreateDevEnum> pCreateDevEnum;
//    ICreateDevEnum *pCreateDevEnum;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
     IID_ICreateDevEnum, (void**)&pCreateDevEnum);
    if (hr != NOERROR)
 {
//  ErrMsg("Error Creating Device Enumerator");
  return;
 }

    CComPtr<IEnumMoniker> pEm;
    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
        &pEm, 0);
    if (hr != NOERROR)
 {
//  ErrMsg("Sorry, you have no video capture hardware");
  return;
    }

    pEm->Reset();
    ULONG cFetched;
    IMoniker *pM;
    while(hr = pEm->Next(1, &pM, &cFetched), hr==S_OK)
    {
  IPropertyBag *pBag;
  hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag);
  if(SUCCEEDED(hr))
  {
   VARIANT var;
   var.vt = VT_BSTR;
   hr = pBag->Read(L"FriendlyName", &var, NULL);
   if (hr == NOERROR)
   {
    char str[2048];
    
    if (ptr->deviceId != -1)
    {
     ptr->next = new DeviceInfo();
     ptr = ptr->next;
    }


    ptr->deviceId = id++;
    WideCharToMultiByte(CP_ACP,0,var.bstrVal, -1, str, 2048, NULL, NULL);
    
    ptr->friendlyName = str;

    SysFreeString(var.bstrVal);
   }
   pBag->Release();
  }
  pM->Release();
    }
}


void ARFrameGrabber::ReportError(char *msg)
{
 //MessageBox(NULL, msg, "ARFrameGrabber Error", MB_ICONSTOP);
}
//将输入crossbar变成PhysConn_Video_Composite
void ARFrameGrabber::SetCrossBar()
{
 IAMCrossbar *pXBar1 = NULL;
 ICaptureGraphBuilder2 *pBuilder = NULL;


 HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
        CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2,
        (void **)&pBuilder);

 if (SUCCEEDED(hr))
    {
        hr = pBuilder->SetFiltergraph(pGraph);
    }


 hr = pBuilder->FindInterface(&LOOK_UPSTREAM_ONLY, NULL,
  pDeviceFilter,IID_IAMCrossbar, (void**)&pXBar1);

 if (SUCCEEDED(hr))
 {
  
  long OutputPinCount;
  long InputPinCount;
  long PinIndexRelated;
  long PhysicalType;
  long inPort = 0;
  long outPort = 0;

  pXBar1->get_PinCounts(&OutputPinCount,&InputPinCount);
  for(int i =0;i<InputPinCount;i++)
  {
   pXBar1->get_CrossbarPinInfo(TRUE,i,&PinIndexRelated,&PhysicalType);
   if(PhysConn_Video_Composite==PhysicalType)
   {
    inPort = i;
    break;
   }
  }
  for(int i =0;i<OutputPinCount;i++)
  {
   pXBar1->get_CrossbarPinInfo(FALSE,i,&PinIndexRelated,&PhysicalType);
   if(PhysConn_Video_VideoDecoder==PhysicalType)
   {
    outPort = i;
    break;
   }
  }
  
  if(S_OK==pXBar1->CanRoute(outPort,inPort))
  {
   pXBar1->Route(outPort,inPort);
  }
  pXBar1->Release();  
 }
 pBuilder->Release();
}