I am using the below program to capture frames from the video stream using the sample grabber. When I use the MEDIASUBTYPE_RGB32 the images show up and I can open them. However when I use MEDIASUBTYPE_YUV2 or MEDIASUBTYPE_MJPG then the saved image does not open(The photos app says this format is not supported).
I have tried setting the desired format type by using SetFormat and pSampleGrabber->SetMediaType(&mt). I was expecting images being saved by the sample grabber which I could then view.
Can someone explain how I can get this to work with other formats? I also don't understand why RGB32 works when the camera does not even report supporting it. It only reports MJPG and YUV2 as being supported.
#include <dshow.h>
#include <iostream>
#include <time.h>
EXTERN_C const CLSID CLSID_NullRenderer;
EXTERN_C const CLSID CLSID_SampleGrabber;
static
const
IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
class __declspec(uuid("{C1F400A0-3F08-11D3-9F0B-006008039E37}")) SampleGrabber;
#pragma region SampleGrabber
struct __declspec(uuid("0579154a-2b53-4994-b0d0-e773148eff85"))
ISampleGrabberCB : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SampleCB (
double SampleTime,
struct IMediaSample * pSample ) = 0;
virtual HRESULT __stdcall BufferCB (
double SampleTime,
unsigned char * pBuffer,
long BufferLen ) = 0;
};
struct __declspec(uuid("6b652fff-11fe-4fce-92ad-0266b5d7c78f"))
ISampleGrabber : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SetOneShot (
long OneShot ) = 0;
virtual HRESULT __stdcall SetMediaType (
struct _AMMediaType * pType ) = 0;
virtual HRESULT __stdcall GetConnectedMediaType (
struct _AMMediaType * pType ) = 0;
virtual HRESULT __stdcall SetBufferSamples (
long BufferThem ) = 0;
virtual HRESULT __stdcall GetCurrentBuffer (
/*[in,out]*/ long * pBufferSize,
/*[out]*/ long * pBuffer ) = 0;
virtual HRESULT __stdcall GetCurrentSample (
/*[out,retval]*/ struct IMediaSample * * ppSample ) = 0;
virtual HRESULT __stdcall SetCallback (
struct ISampleGrabberCB * pCallback,
long WhichMethodToCallback ) = 0;
};
struct __declspec(uuid("c1f400a0-3f08-11d3-9f0b-006008039e37"))
SampleGrabber;
// [ default ] interface ISampleGrabber
#pragma endregion
// DirectShow objects
HRESULT hr;
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
IMoniker *pMoniker = NULL;
IPropertyBag *pPropBag = NULL;
IGraphBuilder *pGraph = NULL;
ICaptureGraphBuilder2 *pBuilder = NULL;
IBaseFilter *pCap = NULL;
IBaseFilter *pSampleGrabberFilter = NULL;
IBaseFilter *pSampleGrabberStillFilter = NULL;
ISampleGrabber *pSampleGrabber = NULL;
ISampleGrabber *pSampleGrabberStill = NULL;
IBaseFilter *pNullRenderer = NULL;
IBaseFilter *pNullRendererStill = NULL;
IMediaControl *pMediaControl = NULL;
IAMVideoControl *pAMVidControl = NULL;
IAMCameraControl *pAMVCamControl = NULL;
int img_index_curr = 0;
int img_index_prev = 0;
AM_MEDIA_TYPE g_VideoMediaType;
AM_MEDIA_TYPE *pmtConfig;
// Find the still pin.
IPin *pPin = NULL;
char *pBuffer = NULL;
void exit_message(const char* error_message, int error)
{
// Print an error message
fprintf(stderr, error_message);
fprintf(stderr, "\n");
// Clean up DirectShow / COM stuff
if (pBuffer != NULL) delete[] pBuffer;
if (pMediaControl != NULL) pMediaControl->Release();
if (pNullRenderer != NULL) pNullRenderer->Release();
if (pNullRendererStill != NULL) pNullRendererStill->Release();
if (pSampleGrabber != NULL) pSampleGrabber->Release();
if (pSampleGrabberStill != NULL) pSampleGrabberStill->Release();
if (pSampleGrabberFilter != NULL) pSampleGrabberFilter->Release();
if (pSampleGrabberStillFilter != NULL) pSampleGrabberStillFilter->Release();
if (pCap != NULL) pCap->Release();
if (pBuilder != NULL) pBuilder->Release();
if (pGraph != NULL) pGraph->Release();
if (pPropBag != NULL) pPropBag->Release();
if (pMoniker != NULL) pMoniker->Release();
if (pEnum != NULL) pEnum->Release();
if (pDevEnum != NULL) pDevEnum->Release();
if (pPin != NULL) pPin->Release();
if (pAMVidControl != NULL) pAMVidControl->Release();
if (pAMVCamControl != NULL) pAMVCamControl->Release();
CoUninitialize();
// Exit the program
exit(error);
}
class SampleGrabberCallback : public ISampleGrabberCB
{
public:
// Fake referance counting.
STDMETHODIMP_(ULONG) AddRef() { return 1; }
STDMETHODIMP_(ULONG) Release() { return 2; }
char filename[100];
STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject)
{
if (NULL == ppvObject) return E_POINTER;
if (riid == __uuidof(IUnknown))
{
*ppvObject = static_cast<IUnknown*>(this);
return S_OK;
}
if (riid == __uuidof(ISampleGrabberCB))
{
*ppvObject = static_cast<ISampleGrabberCB*>(this);
return S_OK;
}
return E_NOTIMPL;
}
STDMETHODIMP SampleCB(double Time, IMediaSample *pSample)
{
fprintf(stderr, "Callback triggered SampleCB\n");
return E_NOTIMPL;
}
STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen)
{
if ((g_VideoMediaType.majortype != MEDIATYPE_Video) ||
(g_VideoMediaType.formattype != FORMAT_VideoInfo) ||
(g_VideoMediaType.cbFormat < sizeof(VIDEOINFOHEADER)) ||
(g_VideoMediaType.pbFormat == NULL) ||
(g_VideoMediaType.subtype != MEDIASUBTYPE_MJPG)
)
{
fprintf(stderr, "Invalid Format\n");
return VFW_E_INVALIDMEDIATYPE;
}
//fprintf(stderr, "Save file\n");
sprintf(filename, "video/Image_Video_%d.bmp", img_index_curr);
img_index_curr++;
HANDLE hf = CreateFile(filename, GENERIC_WRITE,
FILE_SHARE_WRITE, NULL, CREATE_ALWAYS, 0, NULL);
if (hf == INVALID_HANDLE_VALUE)
{
return E_FAIL;
}
long cbBitmapInfoSize = g_VideoMediaType.cbFormat - SIZE_PREHEADER;
VIDEOINFOHEADER *pVideoHeader =
(VIDEOINFOHEADER*)g_VideoMediaType.pbFormat;
BITMAPFILEHEADER bfh;
ZeroMemory(&bfh, sizeof(bfh));
bfh.bfType = 'MB'; // Little-endian for "BM".
bfh.bfSize = sizeof( bfh ) + BufferLen + cbBitmapInfoSize;
bfh.bfOffBits = sizeof( BITMAPFILEHEADER ) + cbBitmapInfoSize;
// Write the file header.
DWORD dwWritten = 0;
WriteFile( hf, &bfh, sizeof( bfh ), &dwWritten, NULL );
WriteFile(hf, HEADER(pVideoHeader), cbBitmapInfoSize, &dwWritten, NULL);
WriteFile( hf, pBuffer, BufferLen, &dwWritten, NULL );
CloseHandle( hf );
return S_OK;
}
};
// Global instance of the class.
SampleGrabberCallback g_VideoCapCB;
int main(int argc, char **argv)
{
// Capture settings
int show_preview_window = 0;
int device_number = 2;
char device_name[100];
// Other variables
char char_buffer[100];
// Parse command line arguments. Available options:
// /preview
int n = 1;
while (n < argc)
{
// Process next command line argument
if (strcmp(argv[n], "/preview") == 0)
{
// Enable preview window
show_preview_window = 1;
}
else
{
// Unknown command line argument
fprintf(stderr, "Unrecognised option: %s\n", argv[n]);
exit_message("", 1);
}
// Increment command line argument counter
n++;
}
// Intialise COM
hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
if (hr != S_OK)
exit_message("Could not initialise COM", 1);
// Create filter graph
hr = CoCreateInstance(CLSID_FilterGraph, NULL,
CLSCTX_INPROC_SERVER, IID_IGraphBuilder,
(void**)&pGraph);
if (hr != S_OK)
exit_message("Could not create filter graph", 1);
// Create capture graph builder.
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2,
(void **)&pBuilder);
if (hr != S_OK)
exit_message("Could not create capture graph builder", 1);
// Attach capture graph builder to graph
hr = pBuilder->SetFiltergraph(pGraph);
if (hr != S_OK)
exit_message("Could not attach capture graph builder to graph", 1);
// Create system device enumerator
hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum));
if (hr != S_OK)
exit_message("Could not crerate system device enumerator", 1);
// Video input device enumerator
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory, &pEnum, 0);
if (hr != S_OK)
exit_message("No video devices found", 1);
// Get moniker for specified video input device,
// or for the first device if no device number
// was specified.
VARIANT var;
n = 0;
while(1)
{
// Access next device
hr = pEnum->Next(1, &pMoniker, NULL);
if (hr == S_OK)
{
n++; // increment device count
}
else
{
if (device_number == 0)
{
fprintf(stderr,
"Video capture device %s not found\n",
device_name);
}
else
{
fprintf(stderr,
"Video capture device %d not found\n",
device_number);
}
exit_message("", 1);
}
if (n >= device_number)
{
break;
}
}
// Get video input device name
hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
VariantInit(&var);
hr = pPropBag->Read(L"FriendlyName", &var, 0);
fprintf(stderr, "Capture device: %ls\n", var.bstrVal);
VariantClear(&var);
// Create capture filter and add to graph
hr = pMoniker->BindToObject(0, 0,
IID_IBaseFilter, (void**)&pCap);
if (hr != S_OK) exit_message("Could not create capture filter", 1);
// Add capture filter to graph
hr = pGraph->AddFilter(pCap, L"Capture Filter");
if (hr != S_OK) exit_message("Could not add capture filter to graph", 1);
// Create sample grabber filter
hr = CoCreateInstance(CLSID_SampleGrabber, NULL,
CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pSampleGrabberFilter);
if (hr != S_OK)
exit_message("Could not create Sample Grabber filter", 1);
// Query the ISampleGrabber interface of the sample grabber filter
hr = pSampleGrabberFilter->QueryInterface(
IID_ISampleGrabber, (void**)&pSampleGrabber);
if (hr != S_OK)
exit_message("Could not get ISampleGrabber interface to sample grabber filter", 1);
// Enable sample buffering in the sample grabber filter
hr = pSampleGrabber->SetBufferSamples(TRUE);
if (hr != S_OK)
exit_message("Could not enable sample buffering in the sample grabber", 1);
hr = pSampleGrabber->SetCallback(&g_VideoCapCB, 1);
if (hr != S_OK)
exit_message("Could not set sample grabber callback", 1);
fprintf(stderr, "Here");
AM_MEDIA_TYPE mt;
ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_MJPG;
// Set media type in sample grabber filter
hr = pSampleGrabber->SetMediaType(&mt);
if (hr != S_OK)
exit_message("Could not set media type in sample grabber", 1);
// Add sample grabber filter to filter graph
hr = pGraph->AddFilter(pSampleGrabberFilter, L"SampleGrab");
if (hr != S_OK)
exit_message("Could not add Sample Grabber to filter graph", 1);
// Create Null Renderer filter
hr = CoCreateInstance(CLSID_NullRenderer, NULL,
CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pNullRenderer);
if (hr != S_OK)
exit_message("Could not create Null Renderer filter", 1);
// Add Null Renderer filter to filter graph
hr = pGraph->AddFilter(pNullRenderer, L"NullRender");
if (hr != S_OK)
exit_message("Could not add Null Renderer to filter graph", 1);
// Get pointer to IAMStreamConfig
IAMStreamConfig *pConfig = NULL;
hr = pBuilder->FindInterface(
&PIN_CATEGORY_CAPTURE, // Preview pin.
0, // Any media type.
pCap, // Pointer to the capture filter.
IID_IAMStreamConfig, (void**)&pConfig);
if (hr != S_OK)
exit_message("Could not find IAMStreamConfig", 1);
int iCount = 0,iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount,&iSize);
if(iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
for(int iFormat = 0;iFormat < iCount;iFormat++)
{
fprintf(stderr, "iFormat: %d\n", iFormat);
VIDEO_STREAM_CONFIG_CAPS scc;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if(hr == S_OK)
{
fprintf(stderr, "Looking for Format\n");
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
fprintf(stderr, "Width = %d , Height = %d \n", pVih->bmiHeader.biWidth, pVih->bmiHeader.biHeight);
OLECHAR* guidString;
StringFromCLSID(pmtConfig->subtype, &guidString);
fprintf(stderr, "Media Type = %ls\n", guidString);
// Without breaking the below statement MJPG is selected otherwise YUV2 is
// I am not explicity looking as I cannot find RGB32 in this list which is the only one this works with.
if(pVih->bmiHeader.biWidth == 1920 && pVih->bmiHeader.biHeight == 1080)
{
fprintf(stderr, "Set Format -----------------------\n");
hr = pConfig->SetFormat(pmtConfig);
if (hr != S_OK)
exit_message("Could not set format Directly", 1);
}
}
else
{
fprintf(stderr, "Could not get streams cap\n");
}
}
}
fprintf(stderr, "------------------------\n\n");
// Connect up the filter graph's capture stream
hr = pBuilder->RenderStream(
&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pCap, pSampleGrabberFilter, pNullRenderer);
if (hr != S_OK)
exit_message("Could not render capture video stream", 1);
// Connect up the filter graph's preview stream
if (show_preview_window > 0)
{
hr = pBuilder->RenderStream(
&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
pCap, NULL, NULL);
if (hr != S_OK && hr != VFW_S_NOPREVIEWPIN)
exit_message("Could not render preview video stream", 1);
}
// Get media control interfaces to graph builder object
hr = pGraph->QueryInterface(IID_IMediaControl,
(void**)&pMediaControl);
if (hr != S_OK) exit_message("Could not get media control interface", 1);
// Run graph
while(1)
{
hr = pMediaControl->Run();
// Hopefully, the return value was S_OK or S_FALSE
if (hr == S_OK) break; // graph is now running
if (hr == S_FALSE) continue; // graph still preparing to run
// If the Run function returned something else,
// there must be a problem
fprintf(stderr, "Error: %u\n", hr);
exit_message("Could not run filter graph", 1);
}
// Query the capture filter for IAMVideoControl
hr = pCap->QueryInterface(IID_IAMVideoControl, (void**)&pAMVidControl);
if (hr != S_OK) exit_message("Could not get IAMVideoControl", 1);
// Query the capture filter for IAMCameraControl
hr = pCap->QueryInterface(IID_IAMCameraControl, (void**)&pAMVCamControl);
if (hr != S_OK) exit_message("Could not get IAMCameraControl", 1);
long Min, Max, Step, Default, Flags, Val;
// Get the range and default value.
hr = pAMVCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step,
&Default, &Flags);
// Set Default exposure
hr = pAMVCamControl->Set(CameraControl_Exposure, 1, CameraControl_Flags_Manual);
if (hr != S_OK) exit_message("Could not set exposure", 1);
hr = pAMVCamControl->Get(CameraControl_Exposure, &Val, &Flags);
if (hr != S_OK) exit_message("Could not get current camera exposure", 1);
// Get the media type from the video grabber filter
hr = pSampleGrabber->GetConnectedMediaType(
&g_VideoMediaType);
if (hr != S_OK) exit_message("Could not get media type", 1);
int count = 0;
clock_t begin = clock();
// Loop to change exposure and capture still image
while(true)
{
if(img_index_curr > img_index_prev)
{
clock_t end = clock();
double time_spent = (double)(end - begin) / CLOCKS_PER_SEC;
fprintf(stderr, "Image = %d, Time = %f, \n", img_index_curr, time_spent);
if(img_index_curr == 20)
{
break;
}
if(img_index_curr == 10)
{
fprintf(stderr, "Set New expo\n");
Val = -10;
Flags = CameraControl_Flags_Manual;
hr = pAMVCamControl->Set(CameraControl_Exposure, Val, Flags);
clock_t set_exposure_time = clock();
double time_spent_expo = (double)(set_exposure_time - begin) / CLOCKS_PER_SEC;
fprintf(stderr, "Expo set Time = %f, \n", time_spent_expo);
if (hr != S_OK) exit_message("Could not set exposure", 1);
}
count++;
hr = pAMVCamControl->Get(CameraControl_Exposure, &Val, &Flags);
if (hr != S_OK) exit_message("Could not get exposure", 1);
fprintf(stderr, "Expo Val: %ld\n", Val);
fprintf(stderr, "Expo Flags: %ld\n", Flags);
img_index_prev = img_index_curr;
fprintf(stderr, "\n");
}
//Sleep(500);
}
pPin->Release();
// Stop the graph
pMediaControl->Stop();
// Clean up and exit
exit_message("", 0);
}
You save to a Windows Bitmap .bmp
file, and this image format generally does not assume YUY or MJPG content. Well, it might technically make some sense to push such data there, but you will have hard time finding applications capable to recognize the format.
You should be able to save MJPG content to .jpg
file as is (that is, without prepending bitmap headers).
To my best knowledge there is no well known format to hold YUY2 data so that it is viewable right away. You will need to either convert or make your own viewer etc.