directshow虛擬攝像頭

window下directshow虛擬攝像頭代碼:

部分學習例子下載地址:
http://download.csdn.net/detail/ab7936573/9868274

#pragma once

#ifndef POINTER_64
#define POINTER_64 __ptr64
#endif

#include <streams.h>
#include <initguid.h>
#include "LogPrint.h"

#include "CCaptureVideo.h"
#include "EvoPacket.h"
#include "PacketPool.h"
#include "EvoPacketAllocator.h"

extern "C" const GUID CLSID_VirtualCamera;

// {982434C1-D408-4984-A4D0-33DF357BD7ED}
DEFINE_GUID(CLSID_VirtualCamera,
    0x982434c1, 0xd408, 0x4984, 0xa4, 0xd0, 0x33, 0xdf, 0x35, 0x7b, 0xd7, 0xed);

#define VirtualCamera_FilterName    L"Virtual Camera (By evomotion)"

class CVirtualSource : public CSource {
public:
    static CUnknown * WINAPI CreateInstance(LPUNKNOWN lpunk, HRESULT *phr);
    STDMETHODIMP QueryInterface(REFIID riid, void **ppv);
private:
    CVirtualSource(LPUNKNOWN lpunk, HRESULT *phr);
    CSourceStream *m_pPin;
};


#define DECLARE_PTR(type, ptr, expr) type* ptr = (type*)(expr);

const REFERENCE_TIME FPS_30 = UNITS / 30;
const REFERENCE_TIME FPS_20 = UNITS / 20;
const REFERENCE_TIME FPS_10 = UNITS / 10;
const REFERENCE_TIME FPS_5 = UNITS / 5;
const REFERENCE_TIME FPS_4 = UNITS / 4;
const REFERENCE_TIME FPS_3 = UNITS / 3;
const REFERENCE_TIME FPS_2 = UNITS / 2;
const REFERENCE_TIME FPS_1 = UNITS / 1;

const REFERENCE_TIME rtDefaultFrameLength = FPS_30;

class CVirtualStream : public CSourceStream, 
    public IAMStreamConfig, public IKsPropertySet,
    public CVideoFrameHandler
{
protected:
    int m_iImageHeight;                 // The current image height
    int m_iImageWidth;                  // And current image width

    int m_iFrameNumber;
    const REFERENCE_TIME m_rtFrameLength;

    //採集器
    int m_nDeviceID;
    char * m_DeviceName;
    CCaptureVideo m_capture;
    //緩衝器
    PacketPool pool;
    //當前數據
    EvoPacket *m_packet;
    EvoPacket *m_lastPacket;
    //鎖
    CCritSec m_cSharedData;
    CCritSec m_cSharedState;            // Protects our internal state

    IReferenceClock *m_pClock;
    REFERENCE_TIME m_rtStop;
public:
    CVirtualStream(HRESULT *phr, CSource *pFilter);
    ~CVirtualStream();

    //////////////////////////////////////////////////////////////////////////
    //  CBasePin
    //////////////////////////////////////////////////////////////////////////

    // Quality control
    // Not implemented because we aren't going in real time.
    // If the file-writing filter slows the graph down, we just do nothing, which means
    // wait until we're unblocked. No frames are ever dropped.
    STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q);


    //////////////////////////////////////////////////////////////////////////
    //  IUnknown
    //////////////////////////////////////////////////////////////////////////
    STDMETHODIMP QueryInterface(REFIID riid, void **ppv) {
        if (riid == _uuidof(IAMStreamConfig))
            *ppv = (IAMStreamConfig*)this;
        else 
        if (riid == _uuidof(IKsPropertySet))
            *ppv = (IKsPropertySet*)this;
        else
            return CSourceStream::QueryInterface(riid, ppv);

        AddRef();
        return S_OK;
    }
    STDMETHODIMP_(ULONG) AddRef() { return GetOwner()->AddRef(); }
    STDMETHODIMP_(ULONG) Release() { return GetOwner()->Release(); }

    //////////////////////////////////////////////////////////////////////////
    //  CSourceStream
    //////////////////////////////////////////////////////////////////////////

    // Override the version that offers exactly one media type
    HRESULT FillBuffer(IMediaSample *pSample);
    HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pRequest);

    // Set the agreed media type and set up the necessary parameters
    HRESULT SetMediaType(const CMediaType *pMediaType);
    // Support multiple display formats
    HRESULT CheckMediaType(const CMediaType *pMediaType);
    HRESULT GetMediaType(int iPosition, CMediaType *pmt);

    //////////////////////////////////////////////////////////////////////////
    //  IAMStreamConfig
    //////////////////////////////////////////////////////////////////////////
    HRESULT STDMETHODCALLTYPE SetFormat(AM_MEDIA_TYPE *pmt);
    HRESULT STDMETHODCALLTYPE GetFormat(AM_MEDIA_TYPE **ppmt);
    HRESULT STDMETHODCALLTYPE GetNumberOfCapabilities(int *piCount, int *piSize);
    HRESULT STDMETHODCALLTYPE GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC);

    //////////////////////////////////////////////////////////////////////////
    //  IKsPropertySet
    //////////////////////////////////////////////////////////////////////////
    HRESULT STDMETHODCALLTYPE Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, DWORD cbInstanceData, void *pPropData, DWORD cbPropData);
    HRESULT STDMETHODCALLTYPE Get(REFGUID guidPropSet, DWORD dwPropID, void *pInstanceData, DWORD cbInstanceData, void *pPropData, DWORD cbPropData, DWORD *pcbReturned);
    HRESULT STDMETHODCALLTYPE QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport);

    //////////////////////////////////////////////////////////////////////////
    //  CVideoFrameHandler
    //////////////////////////////////////////////////////////////////////////
    void VideoFrameData(double dblSampleTime, BYTE * pBuffer, long lBufferSize);
private:
    //緩存模塊
    EvoPacket *PoolNew(uint32_t dataSize);
    void PoolDelete(EvoPacket **packet);
    void Delete(EvoPacket **packet);
    PacketPool *GetPool();

    //填充彩虹條
    void FillRainbowBar(char * buffer,int size,int width,int height);
};
#include "VirtualSource.h"
#include "EvoAttributeMemory.h"

CUnknown * WINAPI CVirtualSource::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr)
{
    ASSERT(phr);
    DbgSetModuleLevel(LOG_MEMORY, 2);
    CUnknown *punk = new CVirtualSource(lpunk, phr);
    LLOG("CVirtualSource::CreateInstance:0x%X\n", punk);
    return punk;
}

CVirtualSource::CVirtualSource(LPUNKNOWN lpunk, HRESULT *phr) :
    CSource(VirtualCamera_FilterName, lpunk, CLSID_VirtualCamera)
{
    ASSERT(phr);
    CAutoLock cAutoLock(&m_cStateLock);
    m_paStreams = new CSourceStream *[1];
    m_paStreams[0] = m_pPin = new CVirtualStream(phr, this);
    m_iPins = 1;
}

STDMETHODIMP CVirtualSource::QueryInterface(REFIID riid, void **ppv)
{
    if (riid == _uuidof(IAMStreamConfig) || riid == _uuidof(IKsPropertySet))
        return m_paStreams[0]->QueryInterface(riid, ppv);
    else
        return CSource::QueryInterface(riid, ppv);
}


//設備名
static char * devicesName[] = {
    "Video Control",
    "Condor"
};

//#include <gl/glew.h>
//#pragma comment(lib,"glew32.lib")

CVirtualStream::CVirtualStream(HRESULT *phr, CSource *pFilter)
    : CSourceStream(NAME("Push Source Desktop"), phr, pFilter, L"Out"),
    m_iFrameNumber(0),
    m_rtFrameLength(rtDefaultFrameLength),
    m_packet(NULL),
    m_lastPacket(NULL),
    m_nDeviceID(-1),
    m_DeviceName(NULL)
{
    // Save dimensions for later use in FillBuffer()
    m_iImageWidth = 320;
    m_iImageHeight = 240;

    //搜索攝像設備並綁定
    std::vector<std::string > devices;
    m_capture.EnumDevices(devices);
    int count = sizeof(devicesName) / sizeof(devicesName[0]);
    for (int i = 0; i < count; i++) {
        char * name = devicesName[i];
        for (size_t j = 0; j < devices.size(); j++) {
            if (devices[j].compare(name) == 0) {
                HRESULT ret = m_capture.Open(j);
                if (ret == S_OK) {
                    m_nDeviceID = j;
                    m_DeviceName = name;
                    break;
                }
            }
        }
    }
    if (m_nDeviceID != -1) {
        CSampleGrabberCB * cb = m_capture.GetSampleGrabberCB();
        if (cb != NULL) {
            m_iImageWidth = cb->lWidth;
            m_iImageHeight = cb->lHeight;
        }
        m_capture.GrabVideoFrames(TRUE,this);
        m_capture.Play();
    }

    //初始化默認視頻類型
    GetMediaType(0, &m_mt);

    CoCreateInstance(CLSID_SystemClock, NULL, CLSCTX_INPROC_SERVER,
        IID_IReferenceClock, (LPVOID*)&m_pClock);
    m_rtStop = 0;
    //glewInit();
}

CVirtualStream::~CVirtualStream()
{
    LLOG("CVCamPin::~CVCamPin\n");
    m_capture.Close();
    if (m_packet != NULL) {
        delete m_packet;
        m_packet = NULL;
    }
    if (m_lastPacket != NULL) {
        delete m_lastPacket;
        m_lastPacket = NULL;
    }

    if (m_pClock != NULL) {
        m_pClock->Release();
        m_pClock = NULL;
    }
}


STDMETHODIMP CVirtualStream::Notify(IBaseFilter * pSender, Quality q)
{
    return E_FAIL;
} // Notify

//
// GetMediaType
//
// Prefer 5 formats - 8, 16 (*2), 24 or 32 bits per pixel
//
// Prefered types should be ordered by quality, with zero as highest quality.
// Therefore, iPosition =
//      0    Return a 32bit mediatype
//      1    Return a 24bit mediatype
//      2    Return 16bit RGB565
//      3    Return a 16bit mediatype (rgb555)
//      4    Return 8 bit palettised format
//      >4   Invalid
//
HRESULT CVirtualStream::GetMediaType(int iPosition, CMediaType *pmt)
{
    LLOG("CVCamPin::GetMediaType:.%d\n", iPosition);
    CheckPointer(pmt, E_POINTER);

    CAutoLock cAutoLock(m_pFilter->pStateLock());

    if (iPosition < 0) {
        return E_INVALIDARG;
    }

    // Have we run off the end of types?
    if (iPosition > 0) {
        return VFW_S_NO_MORE_ITEMS;
    }
    VIDEOINFO *pvi = (VIDEOINFO*)pmt->Format();
    if (pvi == NULL || pmt->cbFormat != sizeof(VIDEOINFO)) {
        pvi = (VIDEOINFO *)pmt->AllocFormatBuffer(sizeof(VIDEOINFO));
        if (NULL == pvi) {
            LERROR("CVCamPin::GetMediaType:AllocFormatBuffer == NULL\n");
            return(E_OUTOFMEMORY);
        }
        // Initialize the VideoInfo structure before configuring its members
        ZeroMemory(pvi, sizeof(VIDEOINFO));
    }
    else {
        LLOG("CVCamPin::GetMediaType:No Alloc\n");
    }

    {   // Return our 24bit format
        pvi->bmiHeader.biCompression = BI_RGB;
        pvi->bmiHeader.biBitCount = 24;
    }

    // Adjust the parameters common to all formats
    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth = m_iImageWidth;
    pvi->bmiHeader.biHeight = m_iImageHeight;
    pvi->bmiHeader.biPlanes = 1;
    pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;

    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
    pvi->rcSource.right = m_iImageWidth;
    pvi->rcSource.bottom = abs(m_iImageHeight);
    pvi->AvgTimePerFrame = (1000000000 / 100) / 30;
    pvi->dwBitRate = m_iImageWidth*abs(m_iImageHeight) * 3 * 30;

    pmt->SetType(&MEDIATYPE_Video);
    pmt->SetFormatType(&FORMAT_VideoInfo);
    pmt->SetTemporalCompression(FALSE);
    pmt->bFixedSizeSamples = FALSE;

    // Work out the GUID for the subtype from the header info.
    const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);
    pmt->SetSubtype(&SubTypeGUID);
    pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);

    LLOG("CVCamPin::GetMediaType:S_OK\n");
    return NOERROR;

} // GetMediaType


  //
  // CheckMediaType
  //
  // We will accept 8, 16, 24 or 32 bit video formats, in any
  // image size that gives room to bounce.
  // Returns E_INVALIDARG if the mediatype is not acceptable
  //
HRESULT CVirtualStream::CheckMediaType(const CMediaType *pMediaType)
{
    LLOG("CVCamPin::CheckMediaType:.\n");
    CheckPointer(pMediaType, E_POINTER);
    //CAutoLock cAutoLock(m_pFilter->pStateLock());

    if ((*(pMediaType->Type()) != MEDIATYPE_Video) ||   // we only output video
        !(pMediaType->IsFixedSize()))                  // in fixed size samples
    {
        LERROR("CVCamPin::CheckMediaType: Error 1\n");
        return E_INVALIDARG;
    }

    // Check for the subtypes we support
    const GUID *SubType = pMediaType->Subtype();
    if (SubType == NULL) {
        LERROR("CVCamPin::CheckMediaType: Error 2\n");
        return E_INVALIDARG;
    }

    if ((*SubType != MEDIASUBTYPE_RGB8)
        && (*SubType != MEDIASUBTYPE_RGB565)
        && (*SubType != MEDIASUBTYPE_RGB555)
        && (*SubType != MEDIASUBTYPE_RGB24)
        && (*SubType != MEDIASUBTYPE_RGB32))
    {
        LERROR("CVCamPin::CheckMediaType: Error 3\n");
        return E_INVALIDARG;
    }

    // Get the format area of the media type
    VIDEOINFO *pvi = (VIDEOINFO *)pMediaType->Format();

    if (pvi == NULL) {
        LERROR("CVCamPin::CheckMediaType: Error 4\n");
        return E_INVALIDARG;
    }

    // Check if the image width & height have changed
    if (pvi->bmiHeader.biWidth != m_iImageWidth ||
        abs(pvi->bmiHeader.biHeight) != m_iImageHeight)
    {
        // If the image width/height is changed, fail CheckMediaType() to force
        // the renderer to resize the image.
        LERROR("CVCamPin::CheckMediaType: Error 5\n");
        return E_INVALIDARG;
    }

    // Don't accept formats with negative height, which would cause the desktop
    // image to be displayed upside down.
    if (pvi->bmiHeader.biHeight < 0) {
        LERROR("CVCamPin::CheckMediaType: Error 6\n");
        return E_INVALIDARG;
    }
    LLOG("CVCamPin::CheckMediaType:S_OK\n");
    return S_OK;  // This format is acceptable.

} // CheckMediaType

  //
  // SetMediaType
  //
  // Called when a media type is agreed between filters
  //
HRESULT CVirtualStream::SetMediaType(const CMediaType *pMediaType)
{
    LLOG("CVCamPin::SetMediaType:.\n");
    CAutoLock cAutoLock(m_pFilter->pStateLock());

    // Pass the call up to my base class
    HRESULT hr = CSourceStream::SetMediaType(pMediaType);

    if (SUCCEEDED(hr))
    {
        VIDEOINFO * pvi = (VIDEOINFO *)m_mt.Format();
        if (pvi == NULL) {
            LERROR("CVCamPin::SetMediaType: Error 1\n");
            return E_UNEXPECTED;
        }

        switch (pvi->bmiHeader.biBitCount)
        {
            //case 8:     // 8-bit palettized
            //case 16:    // RGB565, RGB555
            case 24:    // RGB24
            //case 32:    // RGB32
                hr = S_OK;
                break;

            default:{
                LLOG("CVCamPin::SetMediaType:%d\n", pvi->bmiHeader.biBitCount);
                // We should never agree any other media types
                ASSERT(FALSE);
                hr = E_INVALIDARG;
                break;
            }
        }
    }
    LLOG("CVCamPin::SetMediaType:%s (%X)\n", SUCCEEDED(hr) ? "S_OK" : "S_FAULE", hr);
    return hr;

} // SetMediaType

  //
  // DecideBufferSize
  //
  // This will always be called after the format has been sucessfully
  // negotiated. So we have a look at m_mt to see what size image we agreed.
  // Then we can ask for buffers of the correct size to contain them.
  //
HRESULT CVirtualStream::DecideBufferSize(IMemAllocator *pAlloc,
    ALLOCATOR_PROPERTIES *pProperties)
{
    LLOG("CVCamPin::DecideBufferSize:.\n");
    CheckPointer(pAlloc, E_POINTER);
    CheckPointer(pProperties, E_POINTER);
    CAutoLock cAutoLock(m_pFilter->pStateLock());

    HRESULT hr = NOERROR;
    VIDEOINFO *pvi = (VIDEOINFO *)m_mt.Format();
    ASSERT(pvi != NULL);

    pProperties->cBuffers = 1;
    pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;

    ASSERT(pProperties->cbBuffer);

    // Ask the allocator to reserve us some sample memory. NOTE: the function
    // can succeed (return NOERROR) but still not have allocated the
    // memory that we requested, so we must check we got whatever we wanted.
    ALLOCATOR_PROPERTIES Actual = { 0 };
    hr = pAlloc->SetProperties(pProperties, &Actual);
    if (FAILED(hr))
    {
        LERROR("CVCamPin::DecideBufferSize: Error 1(%X)\n", hr);
        return hr;
    }

    // Is this allocator unsuitable?
    if (Actual.cbBuffer < pProperties->cbBuffer)
    {
        LERROR("CVCamPin::DecideBufferSize: Error 2(%d %d)\n", Actual.cbBuffer, pProperties->cbBuffer);
        return E_FAIL;
    }

    // Make sure that we have only 1 buffer (we erase the ball in the
    // old buffer to save having to zero a 200k+ buffer every time
    // we draw a frame)
    ASSERT(Actual.cBuffers == 1);
    LLOG("CVCamPin::DecideBufferSize:S_OK\n");
    return NOERROR;

} // DecideBufferSize

  // This is where we insert the DIB bits into the video stream.
  // FillBuffer is called once for every sample in the stream.
HRESULT CVirtualStream::FillBuffer(IMediaSample *pSample)
{
    CheckPointer(pSample, E_POINTER);
    CAutoLock cAutoLockShared(&m_cSharedState);

    BYTE *pData = NULL;
    long cbData;
    // Access the sample's data buffer
    pSample->GetPointer(&pData);
    cbData = pSample->GetSize();

    REFERENCE_TIME rtStart = m_iFrameNumber * m_rtFrameLength;

    while (true) {
        m_pClock->GetTime(&rtStart);
        if (m_rtStop <= rtStart) {
            break;
        }
        else {
            Sleep(1);
        }
    }

    {
        ASSERT(pData != NULL);

        // Check that we're still using video
        ASSERT(m_mt.formattype == FORMAT_VideoInfo);

        VIDEOINFO *pVih = (VIDEOINFO*)m_mt.pbFormat;
        ASSERT(pVih != NULL);

        // Copy the DIB bits over into our filter's output buffer.
        // Since sample size may be larger than the image size, bound the copy size.

        EvoPacket * packet = NULL;
        {
            CAutoLock cAutoLockShared(&m_cSharedData);
            packet = m_packet;
            if (packet == NULL) {
                packet = m_lastPacket;
            }
            else {
                if (m_lastPacket != NULL) {
                    PoolDelete(&m_lastPacket);
                }
                m_lastPacket = m_packet;
                m_packet = NULL;
            }
        }
        if (packet != NULL && packet->GetPacketSize() == cbData)
        {
            EvoAttributeMemory * buffer = (EvoAttributeMemory*)packet;
            memcpy(pData, buffer->GetData(), cbData);
        }
        else 
        {
            if (m_nDeviceID == -1) 
            {
                FillRainbowBar((char*)pData, cbData, pVih->bmiHeader.biWidth, abs(pVih->bmiHeader.biHeight));
            }
            else {
                memset(pData, 0, cbData);
            }
        }
    }
    pSample->SetActualDataLength(cbData);

    // Set the timestamps that will govern playback frame rate.
    // If this file is getting written out as an AVI,
    // then you'll also need to configure the AVI Mux filter to 
    // set the Average Time Per Frame for the AVI Header.
    // The current time is the sample's start.

    REFERENCE_TIME rtStop = rtStart + m_rtFrameLength;
    rtStop = rtStart + m_rtFrameLength;
    pSample->SetTime((REFERENCE_TIME *)&rtStart, (REFERENCE_TIME *)&rtStop);
    pSample->SetMediaTime((REFERENCE_TIME *)&rtStart, (REFERENCE_TIME *)&rtStop);
    m_rtStop = rtStop;

    m_iFrameNumber++;
    pSample->SetDiscontinuity(FALSE);
    pSample->SetPreroll(FALSE);
    // Set TRUE on every sample for uncompressed frames
    pSample->SetSyncPoint(TRUE);

    return S_OK;
}

//////////////////////////////////////////////////////////////////////////
//  IAMStreamConfig
//////////////////////////////////////////////////////////////////////////

HRESULT STDMETHODCALLTYPE CVirtualStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
    LLOG("CVCamPin::SetFormat:.\n");
    if (pmt == NULL) {
        LLOG("CVCamPin::SetFormat:E_POINTER\n");
        return E_POINTER;
    }

    //CAutoLock cAutoLockShared(&m_cSharedState);

    DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat);
    m_mt = *pmt;

    IPin* pin = NULL;
    ConnectedTo(&pin);
    if (pin)
    {
        ASSERT(m_pFilter != NULL);
        IFilterGraph *pGraph = m_pFilter->GetFilterGraph();
        ASSERT(pGraph != NULL);
        pGraph->Reconnect(this);
    }
    else {
        LERROR("CVCamPin::SetFormat: Error\n");
    }
    LLOG("CVCamPin::SetFormat:S_OK\n");
    return S_OK;
}

HRESULT STDMETHODCALLTYPE CVirtualStream::GetFormat(AM_MEDIA_TYPE **ppmt)
{
    //CAutoLock cAutoLockShared(&m_cSharedState);

    if (ppmt != NULL) {
        *ppmt = CreateMediaType(&m_mt);
    }
    LLOG("CVCamPin::GetFormat:S_OK\n");
    return S_OK;
}

HRESULT STDMETHODCALLTYPE CVirtualStream::GetNumberOfCapabilities(int *piCount, int *piSize)
{
    LLOG("CVCamPin::GetNumberOfCapabilities:S_OK\n");
    *piCount = 1;
    *piSize = sizeof(VIDEO_STREAM_CONFIG_CAPS);
    return S_OK;
}

HRESULT STDMETHODCALLTYPE CVirtualStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
    LLOG("CVCamPin::GetStreamCaps:.%d\n", iIndex);
    if (pmt == NULL) {
        LERROR("CVCamPin::GetStreamCaps:pmt == NULL\n");
        return E_POINTER;
    }

    if (m_mt.majortype != MEDIATYPE_Video) {
        LERROR("CVCamPin::GetStreamCaps:m_mt.majortype != MEDIATYPE_Video\n");
        return  VFW_E_NOT_CONNECTED;
    }

    if (*pmt == NULL) {
        *pmt = CreateMediaType(&m_mt);
    }
    if (*pmt == NULL) {
        LERROR("CVCamPin::GetStreamCaps:*pmt == NULL\n");
        return E_POINTER;
    }

    DECLARE_PTR(VIDEOINFO, pvi, (*pmt)->pbFormat);

    if (pvi == NULL) {
        LERROR("CVCamPin::GetStreamCaps:pvi == NULL %d (%d %d)\n", iIndex, m_mt.cbFormat, m_mt.lSampleSize);
        DeleteMediaType(*pmt);
        *pmt = NULL;
        return E_POINTER;
    }
    else {
        LLOG("CVCamPin::GetStreamCaps:%d (%d %d)\n", iIndex, m_mt.cbFormat, m_mt.lSampleSize);

        if (iIndex == 0) iIndex = 8;

        pvi->bmiHeader.biCompression = BI_RGB;
        pvi->bmiHeader.biBitCount    = 24;
        pvi->bmiHeader.biSize       = sizeof(BITMAPINFOHEADER);
        pvi->bmiHeader.biWidth      = m_iImageWidth;
        pvi->bmiHeader.biHeight     = m_iImageHeight;
        pvi->bmiHeader.biPlanes     = 1;
        pvi->bmiHeader.biSizeImage  = GetBitmapSize(&pvi->bmiHeader);
        pvi->bmiHeader.biClrImportant = 0;

        SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
        SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
        pvi->rcSource.right = m_iImageWidth;
        pvi->rcSource.bottom = abs(m_iImageHeight);
        pvi->AvgTimePerFrame = (1000000000 / 100) / 30;
        pvi->dwBitRate = m_iImageWidth*abs(m_iImageHeight)*3*30;

        (*pmt)->majortype = MEDIATYPE_Video;
        (*pmt)->subtype = MEDIASUBTYPE_RGB24;
        (*pmt)->formattype = FORMAT_VideoInfo;
        (*pmt)->bTemporalCompression = FALSE;
        (*pmt)->bFixedSizeSamples= FALSE;
        (*pmt)->lSampleSize = pvi->bmiHeader.biSizeImage;
        (*pmt)->cbFormat = sizeof(VIDEOINFO);
    }

    if (pSCC == NULL) {
        LERROR("CVCamPin::GetStreamCaps:pSCC == NULL\n");
        return E_POINTER;
    }
    else {
        DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC);

        pvscc->guid = FORMAT_VideoInfo;
        pvscc->VideoStandard = AnalogVideo_None;
        pvscc->InputSize.cx = 0;
        pvscc->InputSize.cy = 0;
        pvscc->MinCroppingSize.cx = m_iImageWidth;
        pvscc->MinCroppingSize.cy = m_iImageHeight;
        pvscc->MaxCroppingSize.cx = m_iImageWidth;
        pvscc->MaxCroppingSize.cy = m_iImageHeight;
        pvscc->CropGranularityX = m_iImageWidth;
        pvscc->CropGranularityY = m_iImageHeight;
        pvscc->CropAlignX = 0;
        pvscc->CropAlignY = 0;

        pvscc->MinOutputSize.cx = m_iImageWidth;
        pvscc->MinOutputSize.cy = m_iImageHeight;
        pvscc->MaxOutputSize.cx = m_iImageWidth;
        pvscc->MaxOutputSize.cy = m_iImageHeight;

        pvscc->OutputGranularityX = 0;
        pvscc->OutputGranularityY = 0;
        pvscc->StretchTapsX = 0;
        pvscc->StretchTapsY = 0;
        pvscc->ShrinkTapsX = 0;
        pvscc->ShrinkTapsY = 0;
        pvscc->MinFrameInterval = 33333; //30 fps
        pvscc->MaxFrameInterval = 33333;// 10000000; //0.1 fps
        pvscc->MinBitsPerSecond = (m_iImageWidth * m_iImageHeight * 3 * 8) * 30;
        pvscc->MaxBitsPerSecond = (m_iImageWidth * m_iImageHeight * 3 * 8) * 30;
    }
    LLOG("CVCamPin::GetStreamCaps:S_OK\n");

    return S_OK;
}

//////////////////////////////////////////////////////////////////////////
// IKsPropertySet
//////////////////////////////////////////////////////////////////////////
HRESULT CVirtualStream::Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData,
    DWORD cbInstanceData, void *pPropData, DWORD cbPropData)
{// Set: Cannot set any properties.
    LLOG("CVCamPin::Set:S_OK\n");
    return E_NOTIMPL;
}

// Get: Return the pin category (our only property). 
HRESULT CVirtualStream::Get(REFGUID guidPropSet,   // Which property set.
    DWORD dwPropID,        // Which property in that set.
    void *pInstanceData,   // Instance data (ignore).
    DWORD cbInstanceData,  // Size of the instance data (ignore).
    void *pPropData,       // Buffer to receive the property data.
    DWORD cbPropData,      // Size of the buffer.
    DWORD *pcbReturned     // Return the size of the property.
    )
{
    LLOG("CVCamPin::Get:.\n");
    if (guidPropSet != AMPROPSETID_Pin)             return E_PROP_SET_UNSUPPORTED;
    if (dwPropID != AMPROPERTY_PIN_CATEGORY)        return E_PROP_ID_UNSUPPORTED;
    if (pPropData == NULL && pcbReturned == NULL)   return E_POINTER;

    if (pcbReturned) *pcbReturned = sizeof(GUID);
    if (pPropData == NULL)          return S_OK; // Caller just wants to know the size. 
    if (cbPropData < sizeof(GUID))  return E_UNEXPECTED;// The buffer is too small.

    *(GUID *)pPropData = PIN_CATEGORY_CAPTURE;
    LLOG("CVCamPin::Get:OK\n");
    return S_OK;
}

// QuerySupported: Query whether the pin supports the specified property.
HRESULT CVirtualStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport)
{

    if (guidPropSet != AMPROPSETID_Pin) return E_PROP_SET_UNSUPPORTED;
    if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED;
    // We support getting this property, but not setting it.
    if (pTypeSupport) *pTypeSupport = KSPROPERTY_SUPPORT_GET;
    return S_OK;
}

int index = 0;

void DumpBMP(BITMAPINFOHEADER bmiHeader, BYTE *pBuffer, long lBufferSize)
{
    BITMAPFILEHEADER header = { 0 };
    header.bfType = 0x4D42;
    header.bfOffBits = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER);
    header.bfSize = header.bfOffBits + lBufferSize;
    char buffer[1024];
    sprintf_s(buffer,1024, "E:/tmp/%d.bmp", index++);
    FILE * file = NULL;
    fopen_s(&file,buffer, "wb+");
    fwrite(&header, sizeof(BITMAPFILEHEADER), 1, file);
    fwrite(&bmiHeader, sizeof(BITMAPINFOHEADER), 1, file);
    fwrite(pBuffer, lBufferSize, 1, file);
    fclose(file);
}

void CVirtualStream::VideoFrameData(double dblSampleTime, BYTE * pBuffer, long lBufferSize)
{
    EvoAttributeMemory * data = (EvoAttributeMemory*)PoolNew(lBufferSize);
    if (data != NULL) {
        uint8_t * buffer = data->GetData();
        memcpy(buffer, pBuffer, lBufferSize);
        data->SetPTS(dblSampleTime);

        VIDEOINFO *pVih = (VIDEOINFO*)m_mt.pbFormat;
        ASSERT(pVih != NULL);

        //測試,直接輸出BMP文件
        DumpBMP(pVih->bmiHeader,pBuffer,lBufferSize);

        CAutoLock cAutoLockShared(&m_cSharedData);

        EvoPacket * tmp = m_packet;
        m_packet = data;
        PoolDelete(&tmp);
    }
}

EvoPacket *CVirtualStream::PoolNew(uint32_t dataSize)
{
    PacketPool * pool = GetPool();
    EvoPacket * packet = NULL;
    if (pool != NULL)
    {
        packet = pool->GetPacket(EvoAttributeMemory::GetTag(dataSize), EvoPacket::TypeEnum::TYPE_ATTRIBUTE_MEMORY, dataSize);
        if (packet != NULL)
        {
            packet->Validate();
        }
    }
    if (packet == NULL)
    {
        return EvoPacketAllocator::CreateNew(dataSize, EvoPacket::TypeEnum::TYPE_ATTRIBUTE_MEMORY);
    }
    return packet;
}

void CVirtualStream::PoolDelete(EvoPacket **packet)
{
    PacketPool * pool = PacketPool::GetInstance(PACKET_LIST_MAX_SIZE);
    if (packet != NULL) {
        if (*packet == NULL) return;
        if (pool != NULL)
        {
            pool->PutPacket(packet);
        }
        else
        {
            Delete(packet);
        }
    }
}

void CVirtualStream::Delete(EvoPacket **packet)
{
    if (packet == NULL) return;
    if (*packet == NULL) return;
    delete *packet;
    *packet = NULL;
}

PacketPool *CVirtualStream::GetPool()
{
    return &pool;
}

void CVirtualStream::FillRainbowBar(char * buffer, int size, int width, int height)
{
    if (size != width*height * 3) return;

    //RGB
    int ColorTable[] = {0xFFFFFF,0xFFFF01,0x01FFFF,0x00FF01,0xFF00FF,0x0000FE,0x000000};
    int ColorWidth[] = { 0,0,0,0,0,0,0 };
    int space = width / 7;
    int border = width % 7;
    ColorWidth[0] = ColorWidth[1] = ColorWidth[2] = ColorWidth[3] = ColorWidth[4] = ColorWidth[5] = ColorWidth[6] = space;
    switch (border) {
    case 1:
        ColorWidth[0] += 1;
        break;
    case 2:
        ColorWidth[0] += 1;
        ColorWidth[6] += 1;
        break;
    case 3:
        ColorWidth[0] += 1;
        ColorWidth[3] += 1;
        ColorWidth[6] += 1;
        break;
    case 4:
        ColorWidth[0] += 1;
        ColorWidth[2] += 1;
        ColorWidth[4] += 1;
        ColorWidth[6] += 1;
        break;
    case 5:
        ColorWidth[1] += 1;
        ColorWidth[2] += 1;
        ColorWidth[3] += 1;
        ColorWidth[4] += 1;
        ColorWidth[5] += 1;
        break;
    case 6:
        ColorWidth[0] += 1;
        ColorWidth[1] += 1;
        ColorWidth[2] += 1;
        ColorWidth[4] += 1;
        ColorWidth[5] += 1;
        ColorWidth[6] += 1;
        break;
    }
    int index = 0;
    int index_width = 0;

    char r = 0, g = 0, b = 0;
    int color = ColorTable[index];
    r = (char)(color >> 16);
    g = (char)((color >> 8) & 0xFF);
    b = (char)(color & 0xFF);
    int ci = ColorWidth[index];

    for (int i = 0; i < width; i++) {
        buffer[i * 3] = r;
        buffer[i * 3 + 1] = g;
        buffer[i * 3 + 2] = b;

        if (index_width  > ci) {
            index++;
            index_width = 0;

            if (index > 6) index = 6;
            int color = ColorTable[index];
            r = (char)(color >> 16);
            g = (char)((color >> 8) & 0xFF);
            b = (char)(color & 0xFF);
            ci = ColorWidth[index];
        }
        else {
            index_width++;
        }
    }
    for (int i = 1; i < height; i++) {
        memcpy(&buffer[i*width*3], buffer, width * 3);
    }
}

CVirtualStream繼承的IAMStreamConfig是攝像頭分辨率修改接口,IKsPropertySet用於提供擴展接口支持
CVideoFrameHandler用於採集接口回調數據.

//緩衝器
PacketPool pool;
//當前數據
EvoPacket *m_packet;
EvoPacket *m_lastPacket;
這幾個類是公司內部模塊,暫不能上傳,只是看效果的話這幾個模塊都可以直接去掉.
CCaptureVideo m_capture;是攝像頭數據採集模塊

數據採集模塊代碼:

#pragma once

//http://blog.csdn.net/gaoguide/article/details/47867783

#ifndef POINTER_64
#define POINTER_64 __ptr64
#endif

#include <dshow.h>
#include <qedit.h>
#include <d3d9.h>

#include <streams.h>
#include <initguid.h>

#include <vector>
#include <string>

#include "SampleGrabberCB.h"

#ifndef SAFE_RELEASE
#define SAFE_RELEASE( x ) \
if ( NULL != x ) \
{ \
  x->Release( ); \
  x = NULL; \
}
#endif

class CCaptureVideo
{
public:
    //打開視頻採集
    HRESULT Open(int iDeviceID, bool ShowVideo = false);
    //開始採集
    HRESULT Play();
    //關閉採集
    HRESULT Close();

    CCaptureVideo();
    virtual ~CCaptureVideo();

    //獲取數據回調對象
    CSampleGrabberCB *GetSampleGrabberCB();
    //設置並開啓數據處理模塊
    void GrabVideoFrames(BOOL bGrabVideoFrames, CVideoFrameHandler * frame_handler);
    //枚舉視頻設備
    int EnumDevices(std::vector<std::string> &Devices);
protected:
    IGraphBuilder *   m_pGB;
    ICaptureGraphBuilder2* m_pCapture;
    IBaseFilter*   m_pBF;
    IMediaControl*   m_pMC;
    IVideoWindow*   m_pVW;
    IBaseFilter*    m_pGrabberFilter;
    ISampleGrabber*   m_pGrabber;
    CSampleGrabberCB mCB;
protected:
    //釋放視頻類型
    void FreeMediaType(AM_MEDIA_TYPE& mt);
    //綁定採集設備
    bool BindFilter(int deviceId, IBaseFilter **pFilter);
    //初始化
    HRESULT InitCaptureGraphBuilder();
};
#include "CCaptureVideo.h"
#include <atlbase.h>

#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#define new DEBUG_NEW
#endif

#pragma comment(lib,"Strmiids.lib") 

#define LLOG
#define LERROR

//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
CCaptureVideo::CCaptureVideo()
{
    //COM Library Intialization
    if (FAILED(CoInitialize(NULL))) /*, COINIT_APARTMENTTHREADED)))*/
    {
        LERROR("CCaptureVideo CoInitialize Failed!\r\n");
        return;
    }
    m_pVW = NULL;
    m_pMC = NULL;
    m_pGB = NULL;
    m_pBF = NULL;
    m_pGrabber = NULL;
    m_pGrabberFilter = NULL;
    m_pCapture = NULL;
}

CCaptureVideo::~CCaptureVideo()
{
    if (m_pMC)m_pMC->StopWhenReady();
    if (m_pVW) {
        m_pVW->put_Visible(OAFALSE);
        m_pVW->put_Owner(NULL);
    }
    SAFE_RELEASE(m_pMC);
    SAFE_RELEASE(m_pVW);
    SAFE_RELEASE(m_pGB);
    SAFE_RELEASE(m_pBF);
    SAFE_RELEASE(m_pGrabber);
    SAFE_RELEASE(m_pGrabberFilter);
    SAFE_RELEASE(m_pCapture);
    CoUninitialize();
}

CSampleGrabberCB *CCaptureVideo::GetSampleGrabberCB()
{
    return &mCB;
}

int CCaptureVideo::EnumDevices(std::vector<std::string> &Devices)
{
    int id = 0;
    //枚舉視頻撲捉設備
    ICreateDevEnum *pCreateDevEnum;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&pCreateDevEnum);
    if (hr != NOERROR)return -1;
    CComPtr<IEnumMoniker> pEm;
    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEm, 0);
    if (hr != NOERROR)return -1;
    pEm->Reset();
    ULONG cFetched;
    IMoniker *pM;
    while (hr = pEm->Next(1, &pM, &cFetched), hr == S_OK) {
        IPropertyBag *pBag;
        hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag);
        if (SUCCEEDED(hr)) {
            VARIANT var;
            var.vt = VT_BSTR;
            hr = pBag->Read(L"FriendlyName", &var, NULL);
            if (hr == NOERROR) {
                char str[2048];
                id++;
#ifdef _UNICODE
                WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, str, 2048, NULL, NULL);
#else
                strncpy(str,2048,var.bstrVal);
#endif
                Devices.push_back(str);
                SysFreeString(var.bstrVal);
            }
            pBag->Release();
        }
        pM->Release();
    }
    return id;
}

HRESULT CCaptureVideo::Close()
{
    // Stop media playback
    if (m_pMC)m_pMC->StopWhenReady();
    if (m_pVW) {
        m_pVW->put_Visible(OAFALSE);
        m_pVW->put_Owner(NULL);
    }
    SAFE_RELEASE(m_pMC);
    SAFE_RELEASE(m_pVW);
    SAFE_RELEASE(m_pGB);
    SAFE_RELEASE(m_pBF);
    SAFE_RELEASE(m_pGrabber);
    SAFE_RELEASE(m_pGrabberFilter);
    SAFE_RELEASE(m_pCapture);
    return S_OK;
}

HRESULT CCaptureVideo::Open(int iDeviceID,bool ShowVideo)
{
    HRESULT hr;
    hr = InitCaptureGraphBuilder();
    if (FAILED(hr)) {
        LERROR("Failed to get video interfaces!");
        return hr;
    }
    // Bind Device Filter. We know the device because the id was passed in
    if (!BindFilter(iDeviceID, &m_pBF)) return S_FALSE;
    hr = m_pGB->AddFilter(m_pBF, L"Capture Filter");

    // create a sample grabber
    hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&m_pGrabberFilter);
    if (FAILED(hr)) {
        LERROR("Fail to create SampleGrabber, maybe qedit.dll is not registered?");
        return hr;
    }

    m_pGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&m_pGrabber);
    //設置視頻格式
    AM_MEDIA_TYPE mt;
    ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
    mt.majortype = MEDIATYPE_Video;
    mt.subtype = MEDIASUBTYPE_RGB24; // MEDIASUBTYPE_RGB24 ;
    mt.formattype = FORMAT_VideoInfo;
    hr = m_pGrabber->SetMediaType(&mt);
    if (FAILED(hr)) {
        LERROR("Fail to set media type!");
        return hr;
    }
    hr = m_pGB->AddFilter(m_pGrabberFilter, L"Grabber");
    if (FAILED(hr)) {
        LERROR("Fail to put sample grabber in graph");
        return hr;
    }

    IBaseFilter *pNull = NULL;
    if (!ShowVideo) {
        hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pNull);
        if (FAILED(hr)) {
            LERROR("Fail to create NullRenderer.");
            return hr;
        }
        hr = m_pGB->AddFilter(pNull, L"NullRender");
        if (FAILED(hr)) {
            LERROR("Fail to put NullRender in graph");
            return hr;
        }
    }

    // try to render preview/capture pin
    hr = m_pCapture->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, m_pBF, m_pGrabberFilter, pNull);
    if (FAILED(hr))
        hr = m_pCapture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_pBF, m_pGrabberFilter, pNull);
    if (FAILED(hr)) {
        LERROR("Can’t build the graph");
        return hr;
    }

    hr = m_pGrabber->GetConnectedMediaType(&mt);
    if (FAILED(hr)) {
        LERROR("Failt to read the connected media type");
        return hr;
    }

    if (mt.subtype != MEDIASUBTYPE_RGB24 || mt.majortype != MEDIATYPE_Video) {
        LERROR("Failt to read the connected media type");
        return hr;
    }

    VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*)mt.pbFormat;
    mCB.lWidth = vih->bmiHeader.biWidth;
    mCB.lHeight = vih->bmiHeader.biHeight;
    FreeMediaType(mt);

    mCB.bGrabVideo = FALSE;
    mCB.frame_handler = NULL;
    hr = m_pGrabber->SetBufferSamples(FALSE);
    hr = m_pGrabber->SetOneShot(FALSE);
    hr = m_pGrabber->SetCallback(&mCB, 1);

    return S_OK;
}

HRESULT CCaptureVideo::Play() {
    if (m_pMC == NULL) return S_FALSE;
    HRESULT hr = m_pMC->Run();//開始視頻捕捉
    if (FAILED(hr)) {
        LERROR("Couldn’t run the graph!");
        return hr;
    }
    return S_OK;
}

bool CCaptureVideo::BindFilter(int deviceId, IBaseFilter **pFilter)
{
    if (deviceId < 0)
        return false;
    // enumerate all video capture devices
    CComPtr<ICreateDevEnum> pCreateDevEnum;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
        IID_ICreateDevEnum, (void**)&pCreateDevEnum);
    if (hr != NOERROR)
    {
        return false;
    }
    CComPtr<IEnumMoniker> pEm;
    hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEm, 0);
    if (hr != NOERROR)
    {
        return false;
    }
    pEm->Reset();
    ULONG cFetched;
    IMoniker *pM;
    int index = 0;
    while (hr = pEm->Next(1, &pM, &cFetched), hr == S_OK, index <= deviceId)
    {
        IPropertyBag *pBag;
        hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag);
        if (SUCCEEDED(hr))
        {
            VARIANT var;
            var.vt = VT_BSTR;
            hr = pBag->Read(L"FriendlyName", &var, NULL);
            if (hr == NOERROR)
            {
                if (index == deviceId)
                {
                    pM->BindToObject(0, 0, IID_IBaseFilter, (void**)pFilter);
                }
                SysFreeString(var.bstrVal);
            }
            pBag->Release();
        }
        pM->Release();
        index++;
    }
    return true;
}

HRESULT CCaptureVideo::InitCaptureGraphBuilder()
{
    HRESULT hr;
    // 創建IGraphBuilder接口
    hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&m_pGB);
    // 創建ICaptureGraphBuilder2接口
    hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
        IID_ICaptureGraphBuilder2, (void **)&m_pCapture);
    if (FAILED(hr))return hr;
    m_pCapture->SetFiltergraph(m_pGB);
    hr = m_pGB->QueryInterface(IID_IMediaControl, (void **)&m_pMC);
    if (FAILED(hr))return hr;
    hr = m_pGB->QueryInterface(IID_IVideoWindow, (LPVOID *)&m_pVW);
    if (FAILED(hr))return hr;
    return hr;
}

void CCaptureVideo::FreeMediaType(AM_MEDIA_TYPE& mt)
{
    if (mt.cbFormat != 0) {
        CoTaskMemFree((PVOID)mt.pbFormat);
        // Strictly unnecessary but tidier
        mt.cbFormat = 0;
        mt.pbFormat = NULL;
    }
    if (mt.pUnk != NULL) {
        mt.pUnk->Release();
        mt.pUnk = NULL;
    }
}

void CCaptureVideo::GrabVideoFrames(BOOL bGrabVideoFrames, CVideoFrameHandler * frame_handler)
{
    mCB.frame_handler = frame_handler;
    mCB.bGrabVideo = bGrabVideoFrames;
}
#pragma once

#ifndef POINTER_64
#define POINTER_64 __ptr64
#endif

#include <qedit.h>

//視頻數據回調
class CVideoFrameHandler {
public:
    virtual void VideoFrameData(double dblSampleTime, BYTE * pBuffer, long lBufferSize) = 0;
};

//樣本數據採集器
class CSampleGrabberCB : public ISampleGrabberCB
{
public:
    long       lWidth;
    long       lHeight;
    CVideoFrameHandler *  frame_handler;
    BOOL       bGrabVideo;
public:
    CSampleGrabberCB();
    STDMETHODIMP_(ULONG) AddRef();
    STDMETHODIMP_(ULONG) Release();
    STDMETHODIMP QueryInterface(REFIID riid, void ** ppv);

    //數據採集回調
    STDMETHODIMP SampleCB(double SampleTime, IMediaSample * pSample);
    STDMETHODIMP BufferCB(double dblSampleTime, BYTE * pBuffer, long lBufferSize);
};
#include "SampleGrabberCB.h"

CSampleGrabberCB::CSampleGrabberCB() {
    lWidth = 0;
    lHeight = 0;
    bGrabVideo = FALSE;
    frame_handler = NULL;
}

STDMETHODIMP_(ULONG) CSampleGrabberCB::AddRef() { return 2; }

STDMETHODIMP_(ULONG) CSampleGrabberCB::Release() { return 1; }

STDMETHODIMP CSampleGrabberCB::QueryInterface(REFIID riid, void ** ppv) {
    if (riid == IID_ISampleGrabberCB || riid == IID_IUnknown) {
        *ppv = (void *) static_cast<ISampleGrabberCB*>(this);
        return NOERROR;
    }
    return E_NOINTERFACE;
}

STDMETHODIMP CSampleGrabberCB::SampleCB(double SampleTime, IMediaSample * pSample) {
    return 0;
}

STDMETHODIMP CSampleGrabberCB::BufferCB(double dblSampleTime, BYTE * pBuffer, long lBufferSize) {
    if (!pBuffer) return E_POINTER;
    if (bGrabVideo && frame_handler) frame_handler->VideoFrameData(dblSampleTime, pBuffer, lBufferSize);
    return 0;
}
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章