开发者

DirectShow: webcam preview and image capture

开发者 https://www.devze.com 2023-04-08 04:35 出处:网络
After looking at a very similar question and seeing almost identical code, I\'ve decided to ask this question separately. I want to show a video preview of the webcam\'s video stream to the default wi

After looking at a very similar question and seeing almost identical code, I've decided to ask this question separately. I want to show a video preview of the webcam's video stream to the default window DirectShow uses, and I also want the ability to "take a picture" of the video stream at any given moment.

I started with the DirectShow examples on MSDN, as well as the AMCap sample code, and have something I believe should should the preview part, but does not. I've found no examples of grabbing an image from the video stream except using SampleGrabber, which is deprecated and therefore I am trying not to use it.

Below is my code, line for line. Note that most of the code in EnumerateCameras is commented out. That code would've been for attaching to another window, which I don't want to do. In the MSDN documentation, it explicitly states that the VMR_7 creates its own window to display the video stream. I get no errors in my app, but this window never appears.

My question then is this: What am I doing wrong? Alternatively, if you know of a simple example of what I am trying to do, link me to it. AMCap is not a simple example, for reference.

NOTE: InitalizeVMR is for running in windowless state, which is my ultimate goal (integrating into a DirectX game). For now, however, i just want it to run in the simplest mode possible.

EDIT: The first portion of this question, that is previewing the camera stream, is solved. I am now just looking for an alternative to the deprecated SampleGrabber class so I can snap a photo at any moment and save it to a file.

EDIT: After looking for almost an hour on google, the general concensus seems to be that you HAVE to use ISampleGrabber. Please let me know if you find anything different.

Testing code (main.cpp):

CWebcam* camera = new CWebcam();    
HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);    
MessageBox(NULL, L"text", L"caption", NULL);    
if (SUCCEEDED(hr))
{       
camera->Create();       
camera->EnumerateCameras();     
camera->StartCamera();  
}   
int d;  
cin >> d;

Webcam.cpp:

#include "Webcam.h"

CWebcam::CWebcam() {
    HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
    //m_pTexInst = nullptr;
    //m_pTexRes = nullptr;
}

CWebcam::~CWebcam() {
    CoUninitialize();
    m_pDeviceMonikers->Release();
    m_pMediaController->Release();
}

BOOL CWebcam::Create() {
    InitCaptureGraphBuilder(&m_pFilterGraph, &m_pCaptureGraph);
    hr = m_pFilterGraph->QueryInterface(IID_IMediaControl, (void **)&m_pMediaController);
    return TRUE;
}

void CWebcam::Destroy() {
}

void CWebcam::EnumerateCameras() {  
    HRESULT hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &m_pDeviceMonikers);
    if (SUCCEEDED(hr))
    {
        //DisplayDeviceInformation(m_pDeviceMonikers);
        //m_pDeviceMonikers->Release();

        IMoniker *pMoniker = NULL;
        if(m_pDeviceMonikers->Next(1, &pMoniker, NULL) == S_OK)
        {
            hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_pCameraFilter);
            if (SUCCEEDED(hr))
            {
                hr = m_pFilterGraph->AddFilter(m_pCameraFilter, L"Capture Filter");
            }
        }

        // connect the output pin to the video renderer
        if(SUCCEEDED(hr))
        {
            hr = m_pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, 
                m_pCameraFilter, NULL, NULL);
        }
        //InitializeVMR(hwnd, m_pFilterGraph, &m_pVMRControl, 1, FALSE);
        //get the video window that will be displayed from the filter graph
        IVideoWindow *pVideoWindow = NULL;
        hr = m_pFilterGraph->QueryInterface(IID_IVideoWindow, (void **)&pVideoWindow);
        /*if(hr != NOERROR)
        {
            printf("This graph cannot preview properly");
        }
        else
        {
            //get the video stream configurations
            hr = m_pCaptureGraph->FindInterface(&PIN_CATEGORY_CAPTURE,
                &MEDIATYPE_Video, m_pCameraFilter,
                IID_IAMStreamConfig, (void **)&m_pVideoStreamConfig);

            //Find out if this is a DV stream
            AM_MEDIA_TYPE *pMediaTypeDV;

            //fake window handle
            HWND window = NULL;
            if(m_pVideoStreamConfig && SUCCEEDED(m_pVideoStreamConfig->GetFormat(&pMediaTypeDV)))
            {
                if(pMediaTypeDV->formattype == FORMAT_DvInfo)
                {
                    // in this case we want to set the size of the parent window to that of
                    // current DV resolution.
                    // We get that resolution from the IVideoWindow.
                    IBasicVideo* pBasivVideo;

                    // If we got here, gcap.pVW is not NULL 
                    //ASSERT(pVideoWindow != NULL);
                    hr = pVideoWindow->QueryInterface(IID_IBasicVideo, (void**)&pBasivVideo);

                    /*if(SUCCEEDED(hr))
                    {
                        HRESULT hr1, hr2;
                        long lWidth, lHeight;

        开发者_JS百科                hr1 = pBasivVideo->get_VideoHeight(&lHeight);
                        hr2 = pBasivVideo->get_VideoWidth(&lWidth);
                        if(SUCCEEDED(hr1) && SUCCEEDED(hr2))
                        {
                            ResizeWindow(lWidth, abs(lHeight));
                        }
                    }
                }
            }

            RECT rc;
            pVideoWindow->put_Owner((OAHWND)window);    // We own the window now
            pVideoWindow->put_WindowStyle(WS_CHILD);    // you are now a child

            GetClientRect(window, &rc);
            pVideoWindow->SetWindowPosition(0, 0, rc.right, rc.bottom); // be this big
            pVideoWindow->put_Visible(OATRUE);
        }*/
    }   
}

BOOL CWebcam::StartCamera() {
    if(m_bIsStreaming == FALSE)
    {
        m_bIsStreaming = TRUE;
        hr = m_pMediaController->Run();
        if(FAILED(hr))
        {
            // stop parts that ran
            m_pMediaController->Stop();
            return FALSE;
        }
        return TRUE;
    }
    return FALSE;
}

void CWebcam::EndCamera() {
    if(m_bIsStreaming)
    {
        hr = m_pMediaController->Stop();
        m_bIsStreaming = FALSE;
        //invalidate client rect as well so that it must redraw
    }
}

BOOL CWebcam::CaptureToTexture() {
    return TRUE;
}

HRESULT CWebcam::InitCaptureGraphBuilder(
  IGraphBuilder **ppGraph,  // Receives the pointer.
  ICaptureGraphBuilder2 **ppBuild  // Receives the pointer.
)
{
    if (!ppGraph || !ppBuild)
    {
        return E_POINTER;
    }
    IGraphBuilder *pGraph = NULL;
    ICaptureGraphBuilder2 *pBuild = NULL;

    // Create the Capture Graph Builder.
    HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, 
        CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&pBuild );
    if (SUCCEEDED(hr))
    {
        // Create the Filter Graph Manager.
        hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,
            IID_IGraphBuilder, (void**)&pGraph);
        if (SUCCEEDED(hr))
        {
            // Initialize the Capture Graph Builder.
            pBuild->SetFiltergraph(pGraph);

            // Return both interface pointers to the caller.
            *ppBuild = pBuild;
            *ppGraph = pGraph; // The caller must release both interfaces.
            return S_OK;
        }
        else
        {
            pBuild->Release();
        }
    }
    return hr; // Failed
}

HRESULT CWebcam::EnumerateDevices(REFGUID category, IEnumMoniker **ppEnum)
{
    // Create the System Device Enumerator.
    ICreateDevEnum *pSystemDeviceEnumerator;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,  
        CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSystemDeviceEnumerator));

    if (SUCCEEDED(hr))
    {
        // Create an enumerator for the category.
        hr = pSystemDeviceEnumerator->CreateClassEnumerator(category, ppEnum, 0);
        if (hr == S_FALSE)
        {
            hr = VFW_E_NOT_FOUND;  // The category is empty. Treat as an error.
        }
        pSystemDeviceEnumerator->Release();
    }
    return hr;
}

void CWebcam::DisplayDeviceInformation(IEnumMoniker *pEnum)
{
    IMoniker *pMoniker = NULL;
    int counter = 0;

    while (pEnum->Next(1, &pMoniker, NULL) == S_OK)
    {
        IPropertyBag *pPropBag;
        HRESULT hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
        if (FAILED(hr))
        {
            pMoniker->Release();
            continue;  
        } 

        VARIANT var;
        VariantInit(&var);

        // Get description or friendly name.
        hr = pPropBag->Read(L"Description", &var, 0);
        if (FAILED(hr))
        {
            hr = pPropBag->Read(L"FriendlyName", &var, 0);
        }
        if (SUCCEEDED(hr))
        {
            printf("%d: %S\n", counter, var.bstrVal);
            VariantClear(&var); 
        }

        hr = pPropBag->Write(L"FriendlyName", &var);

        // WaveInID applies only to audio capture devices.
        hr = pPropBag->Read(L"WaveInID", &var, 0);
        if (SUCCEEDED(hr))
        {
            printf("%d: WaveIn ID: %d\n", counter, var.lVal);
            VariantClear(&var); 
        }

        hr = pPropBag->Read(L"DevicePath", &var, 0);
        if (SUCCEEDED(hr))
        {
            // The device path is not intended for display.
            printf("%d: Device path: %S\n", counter, var.bstrVal);
            VariantClear(&var); 
        }

        pPropBag->Release();
        pMoniker->Release();
        counter++;
    }
}

HRESULT CWebcam::InitializeVMR(
    HWND hwndApp,         // Application window.
    IGraphBuilder* pFG,    // Pointer to the Filter Graph Manager.
    IVMRWindowlessControl** ppWc,  // Receives the interface.
    DWORD dwNumStreams,  // Number of streams to use.
    BOOL fBlendAppImage  // Are we alpha-blending a bitmap?
    )
{
    IBaseFilter* pVmr = NULL;
    IVMRWindowlessControl* pWc = NULL;
    *ppWc = NULL;

    // Create the VMR and add it to the filter graph.
    HRESULT hr = CoCreateInstance(CLSID_VideoMixingRenderer, NULL,
       CLSCTX_INPROC, IID_IBaseFilter, (void**)&pVmr);
    if (FAILED(hr))
    {
        return hr;
    }
    hr = pFG->AddFilter(pVmr, L"Video Mixing Renderer");
    if (FAILED(hr))
    {
        pVmr->Release();
        return hr;
    }

    // Set the rendering mode and number of streams.  
    IVMRFilterConfig* pConfig;
    hr = pVmr->QueryInterface(IID_IVMRFilterConfig, (void**)&pConfig);
    if (SUCCEEDED(hr)) 
    {
        pConfig->SetRenderingMode(VMRMode_Windowless);

        // Set the VMR-7 to mixing mode if you want more than one video
        // stream, or you want to mix a static bitmap over the video.
        // (The VMR-9 defaults to mixing mode with four inputs.)
        if (dwNumStreams > 1 || fBlendAppImage) 
        {
            pConfig->SetNumberOfStreams(dwNumStreams);
        }
        pConfig->Release();

        hr = pVmr->QueryInterface(IID_IVMRWindowlessControl, (void**)&pWc);
        if (SUCCEEDED(hr)) 
        {
            pWc->SetVideoClippingWindow(hwndApp);
            *ppWc = pWc;  // The caller must release this interface.
        }
    }
    pVmr->Release();

    // Now the VMR can be connected to other filters.
    return hr;
}


In windowless mode VMR would not create separate window. Since you started initialization for widnowless mode, you have to follow SetVideoClippingWindow with IVMRWindowlessControl::SetVideoPosition call to provide position within the window, see VMR Windowless Mode on MSDN.

Another sample code snippet for you: http://www.assembla.com/code/roatl-utilities/subversion/nodes/trunk/FullScreenWindowlessVmrSample01/MainDialog.h#ln188

0

精彩评论

暂无评论...
验证码 换一张
取 消

关注公众号