도전2022
참고, directshow 코드 본문
SMALL
HRESULT
CGraphManager::CreateCaptureGraphInternal()
{
HRESULT hr = S_OK;
CComVariant varCamName;
CPropertyBag PropBag;
OAEVENT oaEvent;
WCHAR wzDeviceName[ MAX_PATH + 1 ];
CComPtr<IMediaEvent> pMediaEvent;
CComPtr<IGraphBuilder> pFilterGraph;
CComPtr<IBaseFilter> pVideoEncoder;
CComPtr<IBaseFilter> pASFMultiplexer;
CComPtr<IFileSinkFilter> pFileSinkFilter;
CComPtr<IPersistPropertyBag> pPropertyBag;
CComPtr<IDMOWrapperFilter> pWrapperFilter;
CComPtr<IBaseFilter> pImageSinkFilter;
CComPtr<IAMVideoControl> pVideoControl;
CComPtr<IPin> pStillPin;
CComPtr<IUnknown> pUnkCaptureFilter;
CComPtr<IVideoWindow> pVideoWindow;
IMediaControl *pControl = NULL;
CComPtr<IBaseFilter> nullRenderer;
CComPtr<IFilterGraph> pGrabber;
CComPtr<IUnknown> pUnk1;
CComPtr<IPin> pCaptPin;
RECT rect;
//
// Create the capture graph builder and register the filtergraph
manager.
//
CHK( m_pCaptureGraphBuilder.CoCreateInstance( CLSID_CaptureGraphBuilder ));
CHK( pFilterGraph.CoCreateInstance( CLSID_FilterGraph ));
CHK( m_pCaptureGraphBuilder->SetFiltergraph( pFilterGraph ));
//
// Create and initialize the video capture filter
//
CHK( m_pVideoCaptureFilter.CoCreateInstance( CLSID_VideoCapture ));
CHK( m_pVideoCaptureFilter.QueryInterface( &pPropertyBag ));
// We are loading the driver CAM1 in the video capture filter.
CHK( GetFirstCameraDriver( wzDeviceName ));
varCamName = wzDeviceName;
if( varCamName.vt != VT_BSTR )
{
ERR( E_OUTOFMEMORY );
}
CHK( PropBag.Write( L"VCapName", &varCamName ));
CHK( pPropertyBag->Load( &PropBag, NULL ));
// Everything succeeded, the video capture filter is added to the
filtergraph
CHK( pFilterGraph->AddFilter( m_pVideoCaptureFilter, L"Video
Capture Filter Source" ));
//
// Third step: Create the video encoder DMO, load the WMV9
encoder, and
// add it to the graph
//
// Create the video encoder
CHK( pVideoEncoder.CoCreateInstance( CLSID_DMOWrapperFilter ));
CHK( pVideoEncoder.QueryInterface( &pWrapperFilter ));
// Load the WMV9 DMO
CHK( pWrapperFilter->Init( CLSID_CWMV9EncMediaObject,
DMOCATEGORY_VIDEO_ENCODER ));
// Everything succeeded, let's add the encoder to the graph
CHK( pFilterGraph->AddFilter( pVideoEncoder, L"WMV9 DMO
Encoder" ));
//
// Create the ASF multiplexer and add it to the graph
//
CHK( m_pCaptureGraphBuilder->SetOutputFileName( &MEDIASUBTYPE_Asf,
L"\\video1.asf", &pASFMultiplexer, &pFileSinkFilter ));
//
// Connect the video capture filter, the encoder and the
multiplexer together
//
CHK( m_pCaptureGraphBuilder->RenderStream( &PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Video, m_pVideoCaptureFilter, pVideoEncoder,
pASFMultiplexer ));
CHK(pFilterGraph->QueryInterface(IID_IVideoWindow, (void
**)&pVideoWindow));
CHK(m_pCaptureGraphBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Video, m_pVideoCaptureFilter, NULL, NULL ));
CHK(pVideoWindow->put_Owner((OAHWND)g_h1));
hr = pVideoWindow->put_WindowStyle(WS_CHILD | WS_VISIBLE |
WS_CLIPSIBLINGS);
GetClientRect(g_h1, &rect);
CHK(pVideoWindow->SetWindowPosition(rect.left,rect.top,rect.right -
rect.left, rect.bottom - rect.top));
CHK(pVideoWindow->put_AutoShow(OATRUE));
CHK(pVideoWindow->put_Visible(OATRUE));
CHK(pVideoWindow->put_WindowState(SW_SHOW));
// CHK(pVideoWindow->put_FullScreenMode(OATRUE));
// CHK(pVideoWindow->SetWindowForeground(OATRUE));
// CHK(m_pCaptureGraphBuilder->QueryInterface(IID_IMediaControl,
(void **) &pControl));
// CHK(pControl->Run());
//
// Create the still image filter, and connect it to the video capture
filter
//
CHK( pImageSinkFilter.CoCreateInstance( CLSID_IMGSinkFilter ));
CHK( pFilterGraph->AddFilter( pImageSinkFilter, L"Still image
filter" ));
CHK( m_pCaptureGraphBuilder->RenderStream( &PIN_CATEGORY_STILL,
&MEDIATYPE_Video, m_pVideoCaptureFilter, NULL, pImageSinkFilter ));
CHK( pImageSinkFilter.QueryInterface( &m_pImageSinkFilter ));
//
// Prevent the data from flowing into the capture stream
//
CHK( m_pCaptureGraphBuilder->ControlStream( &PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Video, m_pVideoCaptureFilter, 0, 0 ,0,0 ));
IAMStreamConfig *pStillStreamConfig;
// IBaseFilter *pCap;
// GetDefaultCapDevice( &pCap );
CHK(m_pCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_STILL,
&MEDIATYPE_Video,m_pVideoCaptureFilter, IID_IAMStreamConfig,
(void**)&pStillStreamConfig));
AM_MEDIA_TYPE *pType = NULL , pType1;
int iCount=0, iSize=0;
/* // Does not work for Mio
VIDEO_STREAM_CONFIG_CAPS caps;
pStillStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
pStillStreamConfig->GetFormat(&pType);
// pType->majortype = MEDIATYPE_Video;
// pType->subtype = MEDIASUBTYPE_RGB555;
int i;
for(i=0;i<iCount;i++)
{
// GetStreamCaps allocates the AM_MEDIA_TYPE, which must be deleted
by using DeleteMediaType
if(pStillStreamConfig->GetStreamCaps(i, &pType, (BYTE*)&caps) ==
S_OK )
{
VIDEOINFOHEADER* m_fVideoHeader = (VIDEOINFOHEADER*)pType-
int t=0;
t++; // Just use to debug and test the best mode.
}
}
pStillStreamConfig->GetStreamCaps(2, &pType, (BYTE*)&caps) ; // That
means 320X240
VIDEOINFOHEADER* wert = (VIDEOINFOHEADER*)pType->pbFormat; // Take it
into VIDEOINFOHEADER.
CHK(pStillStreamConfig->SetFormat(pType));
pStillStreamConfig->GetFormat(&pType); // HERE I CHECK the connected
STREAM FORMAT !!
wert = (VIDEOINFOHEADER*)pType->pbFormat; // Take it into
VIDEOINFOHEADER.
//
// Let's get the handle for DShow events. The main loop will
listen to both notifications from
// the UI thread and for DShow notifications
//
*/
CHK( pFilterGraph->QueryInterface( IID_IMediaEvent, (void**)
&pMediaEvent ));
CHK( pMediaEvent->GetEventHandle( &oaEvent ));
m_handle[1] = (HANDLE) oaEvent;
m_fGraphBuilt = TRUE;
CHK(nullRenderer.CoCreateInstance(CLSID_NullRend));
CHK(pFilterGraph->AddFilter(nullRenderer,L"NullRenderer"));
// CHK(pGrabber.CoCreateInstance(CLSID_CSampleGrabber));
///////////////////////// No Influence ? ////////////////
CHK( m_pVideoCaptureFilter.QueryInterface( &pVideoControl ));
CHK( m_pVideoCaptureFilter.QueryInterface( &pUnk1 ));
CHK( m_pCaptureGraphBuilder->FindPin( pUnk1, PINDIR_INPUT,
&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, FALSE, 0, &pCaptPin ));
// CHK( pVideoControl->SetMode( pStillPin,
VideoControlFlag_FlipHorizontal ));
//////////////////////////////////////////////
// pCaptPin->Release();
CHK( m_pVideoCaptureFilter.QueryInterface( &pUnkCaptureFilter ));
// CHK(pFilterGraph->AddFilter(pGrabber,L"SamGrabber"));
// IPin *pGrabOut = GetOutPin(pGrab, 0);
// IPin *pNullIn = GetInPin(nullRenderer, 0);
// NotifyMessage( MESSAGE_ERROR, L"Building the graph failed" );
IBaseFilter* pSourceFilter=NULL;
hr=S_FALSE;
IOverlay *pOverlay;
if(m_pCaptureGraphBuilder){
if(SUCCEEDED(pFilterGraph->FindFilterByName(L"Video Capture Filter
Source",&pSourceFilter ))){
IAMCameraControl *pControl=NULL;
if(SUCCEEDED(pSourceFilter->QueryInterface(IID_IAMCameraControl,
(void**)&pControl) )){
//CHK(pControl->Set(CameraControl_Zoom,
150,CameraControl_Flags_Manual));
//CHK(pControl-
// CHK(pControl-
// CHK(pControl-
// CHK(pControl-
}
IAMVideoProcAmp *pVideoControl=NULL;
if(SUCCEEDED(pSourceFilter->QueryInterface(IID_IAMVideoProcAmp,
(void**)&pVideoControl) )){
long nMin,nMax,nDefault,nStep,nCaps;
// hr = pVideoControl-
// CHK(pVideoControl->Set(VideoProcAmp_Scene,
1,CameraControl_Flags_Auto));
}
// ConnectPins(nullRenderer, 0, pSourceFilter,0);
// SafeRelease(pControl);
}
// SafeRelease(pSourceFilter);
/*
IBaseFilter *pFilter= NULL;
IPin *pin = NULL;
if(SUCCEEDED(pFilterGraph->FindFilterByName(L"Video
Renderer",&pFilter ))){
if (SUCCEEDED(pFilter->FindPin(L"VMR Input0", &pin))) {
if (SUCCEEDED(pin->QueryInterface(IID_IOverlay,
(void **)&pOverlay))){
pin->Release();
pFilter->Release();
}
// SafeRelease(pControl);
}
// SafeRelease(pSourceFilter);
} */
}
Cleanup:
if( FAILED( hr ))
{
TCHAR s[40];
_stprintf(s,_T("Message=%x\n"),hr);
OutputDebugString(s);
NotifyMessage( MESSAGE_ERROR, L"Building the graph failed" );
}
return hr;
}
LIST
'참고자료' 카테고리의 다른 글
알집 광고에 지쳐서 다시 7zip으로 ㅠㅠ (0) | 2012.03.17 |
---|---|
참고, directshow 코드 (0) | 2012.03.17 |
Cheat Sheet: Unicode-enabling Microsoft C/C++ Source Code (0) | 2012.03.16 |
플래시로 ebook 넘기는 효과 관련된 글. (0) | 2012.03.15 |
키넥트, 마우스 동작하기 (0) | 2012.03.14 |