-
Notifications
You must be signed in to change notification settings - Fork 3
/
Filters.cpp
422 lines (348 loc) · 14.4 KB
/
Filters.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
#pragma warning(disable:4244)
#pragma warning(disable:4711)
#include <windows.h>
#include <streams.h>
#include <stdio.h>
#include <olectl.h>
#include <dvdmedia.h>
#include <Shlwapi.h>
#include <stdio.h>
#include "DibHelper.h"
#include "filters.h"
const REFERENCE_TIME FPS_30 = UNITS / 30;
const REFERENCE_TIME FPS_20 = UNITS / 20;
const REFERENCE_TIME FPS_10 = UNITS / 10;
const REFERENCE_TIME FPS_5 = UNITS / 5;
const REFERENCE_TIME FPS_4 = UNITS / 4;
const REFERENCE_TIME FPS_3 = UNITS / 3;
const REFERENCE_TIME FPS_2 = UNITS / 2;
const REFERENCE_TIME FPS_1 = UNITS / 1;
//////////////////////////////////////////////////////////////////////////
// CVCam is the source filter which masquerades as a capture device
//////////////////////////////////////////////////////////////////////////
CUnknown * WINAPI CVCam::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr)
{
ASSERT(phr);
CUnknown *punk = new CVCam(lpunk, phr);
return punk;
}
CVCam::CVCam(LPUNKNOWN lpunk, HRESULT *phr) :
CSource(NAME("Virtual Cam"), lpunk, CLSID_VirtualCam)
{
ASSERT(phr);
CAutoLock cAutoLock(&m_cStateLock);
// Create the one and only output pin
m_paStreams = (CSourceStream **) new CVCamStream*[1];
m_paStreams[0] = new CVCamStream(phr, this, TEXT("Virtual Cam"));
}
HRESULT CVCam::QueryInterface(REFIID riid, void **ppv)
{
//Forward request for IAMStreamConfig & IKsPropertySet to the pin
if(riid == _uuidof(IAMStreamConfig) || riid == _uuidof(IKsPropertySet))
return m_paStreams[0]->QueryInterface(riid, ppv);
else
return CSource::QueryInterface(riid, ppv);
}
EXTERN_C IMAGE_DOS_HEADER __ImageBase;
//////////////////////////////////////////////////////////////////////////
// CVCamStream is the one and only output pin of CVCam which handles
// all the stuff.
//////////////////////////////////////////////////////////////////////////
CVCamStream::CVCamStream(HRESULT *phr, CVCam *pParent, LPCWSTR pPinName) :
CSourceStream(NAME("Virtual Cam"),phr, pParent, pPinName),
m_pParent(pParent), m_rtFrameLength(FPS_5)
{
TCHAR path[_MAX_PATH];
::GetModuleFileName((HINSTANCE)&__ImageBase, path, _MAX_PATH);
::PathRenameExtension(path, TEXT(".ini"));
m_rScreen.top = ::GetPrivateProfileInt(TEXT("setting"), TEXT("top"), 100, path);
m_rScreen.left = ::GetPrivateProfileInt(TEXT("setting"), TEXT("left"), 100, path);
m_rScreen.right = ::GetPrivateProfileInt(TEXT("setting"), TEXT("right"), 580, path);
m_rScreen.bottom = ::GetPrivateProfileInt(TEXT("setting"), TEXT("bottom"), 372, path);
if (m_rScreen.right <= m_rScreen.left) m_rScreen.right = m_rScreen.left + 480;
if (m_rScreen.bottom <= m_rScreen.top) m_rScreen.bottom = m_rScreen.top + 360;
TCHAR buf[10];
_snwprintf_s(buf, sizeof(buf), TEXT("%d"), m_rScreen.top);
::WritePrivateProfileString(TEXT("setting"), TEXT("top"), buf, path);
_snwprintf_s(buf, sizeof(buf), TEXT("%d"), m_rScreen.left);
::WritePrivateProfileString(TEXT("setting"), TEXT("left"), buf, path);
_snwprintf_s(buf, sizeof(buf), TEXT("%d"), m_rScreen.right);
::WritePrivateProfileString(TEXT("setting"), TEXT("right"), buf, path);
_snwprintf_s(buf, sizeof(buf), TEXT("%d"), m_rScreen.bottom);
::WritePrivateProfileString(TEXT("setting"), TEXT("bottom"), buf, path);
GetMediaType(1, &m_mt);
}
CVCamStream::~CVCamStream()
{
}
HRESULT CVCamStream::QueryInterface(REFIID riid, void **ppv)
{
// Standard OLE stuff
if(riid == _uuidof(IAMStreamConfig))
*ppv = (IAMStreamConfig*)this;
else if(riid == _uuidof(IKsPropertySet))
*ppv = (IKsPropertySet*)this;
else
return CSourceStream::QueryInterface(riid, ppv);
AddRef();
return S_OK;
}
//////////////////////////////////////////////////////////////////////////
// This is the routine where we create the data being output by the Virtual
// Camera device.
//////////////////////////////////////////////////////////////////////////
HRESULT CVCamStream::FillBuffer(IMediaSample *pSample)
{
BYTE *pData;
long cbData;
CheckPointer(pSample, E_POINTER);
CAutoLock cAutoLockShared(&m_cSharedState);
// Access the sample's data buffer
pSample->GetPointer(&pData);
cbData = pSample->GetSize();
// Check that we're still using video
ASSERT(m_mt.formattype == FORMAT_VideoInfo);
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;
// Copy the DIB bits over into our filter's output buffer.
// Since sample size may be larger than the image size, bound the copy size.
HDIB hDib = CopyScreenToBitmap(&m_rScreen, pData, (BITMAPINFO *) &(pVih->bmiHeader));
if (hDib)
DeleteObject(hDib);
// Set the timestamps that will govern playback frame rate.
// If this file is getting written out as an AVI,
// then you'll also need to configure the AVI Mux filter to
// set the Average Time Per Frame for the AVI Header.
// The current time is the sample's start.
REFERENCE_TIME rtStart = m_iFrameNumber * m_rtFrameLength;
REFERENCE_TIME rtStop = rtStart + m_rtFrameLength;
pSample->SetTime(&rtStart, &rtStop);
m_iFrameNumber++;
// Set TRUE on every sample for uncompressed frames
pSample->SetSyncPoint(TRUE);
return S_OK;
}
//
// Notify
// Ignore quality management messages sent from the downstream filter
STDMETHODIMP CVCamStream::Notify(IBaseFilter * pSender, Quality q)
{
return E_NOTIMPL;
} // Notify
//////////////////////////////////////////////////////////////////////////
// This is called when the output format has been negotiated
//////////////////////////////////////////////////////////////////////////
HRESULT CVCamStream::SetMediaType(const CMediaType *pMediaType)
{
CAutoLock cAutoLock(m_pFilter->pStateLock());
HRESULT hr = CSourceStream::SetMediaType(pMediaType);
return hr;
} // SetMediaType
// See Directshow help topic for IAMStreamConfig for details on this method
HRESULT CVCamStream::GetMediaType(int iPosition, CMediaType *pmt)
{
CheckPointer(pmt,E_POINTER);
CAutoLock cAutoLock(m_pFilter->pStateLock());
if(iPosition < 0)
return E_INVALIDARG;
// Have we run off the end of types?
if(iPosition > 1)
return VFW_S_NO_MORE_ITEMS;
if(iPosition == 0)
{
*pmt = m_mt;
return S_OK;
}
VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO));
if(NULL == pvi)
return(E_OUTOFMEMORY);
// Initialize the VideoInfo structure before configuring its members
ZeroMemory(pvi, sizeof(VIDEOINFOHEADER));
pvi->bmiHeader.biCompression = BI_RGB;
pvi->bmiHeader.biBitCount = 24;
pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
pvi->bmiHeader.biWidth = m_rScreen.right - m_rScreen.left;
pvi->bmiHeader.biHeight = m_rScreen.bottom - m_rScreen.top;
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
pvi->bmiHeader.biClrImportant = 0;
pvi->AvgTimePerFrame = m_rtFrameLength;
SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
pmt->SetType(&MEDIATYPE_Video);
pmt->SetFormatType(&FORMAT_VideoInfo);
pmt->SetTemporalCompression(FALSE);
// Work out the GUID for the subtype from the header info.
const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);
pmt->SetSubtype(&SubTypeGUID);
pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);
return NOERROR;
} // GetMediaType
// This method is called to see if a given output format is supported
HRESULT CVCamStream::CheckMediaType(const CMediaType *pMediaType)
{
if(*pMediaType != m_mt)
return E_INVALIDARG;
return S_OK;
} // CheckMediaType
// This method is called after the pins are connected to allocate buffers to stream data
HRESULT CVCamStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties)
{
CheckPointer(pAlloc,E_POINTER);
CheckPointer(pProperties,E_POINTER);
CAutoLock cAutoLock(m_pFilter->pStateLock());
HRESULT hr = NOERROR;
VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
pProperties->cBuffers = 1;
pProperties->cbBuffer = pvi->bmiHeader.biSizeImage;
ASSERT(pProperties->cbBuffer);
// Ask the allocator to reserve us some sample memory. NOTE: the function
// can succeed (return NOERROR) but still not have allocated the
// memory that we requested, so we must check we got whatever we wanted.
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProperties,&Actual);
if(FAILED(hr))
{
return hr;
}
// Is this allocator unsuitable?
if(Actual.cbBuffer < pProperties->cbBuffer)
{
return E_FAIL;
}
// Make sure that we have only 1 buffer (we erase the ball in the
// old buffer to save having to zero a 200k+ buffer every time
// we draw a frame)
ASSERT(Actual.cBuffers == 1);
return NOERROR;
} // DecideBufferSize
// Called when graph is run
HRESULT CVCamStream::OnThreadCreate()
{
m_iFrameNumber = 0;
return NOERROR;
} // OnThreadCreate
HRESULT CVCamStream::OnThreadDestroy()
{
return NOERROR;
} // OnThreadDestroy
//////////////////////////////////////////////////////////////////////////
// IAMStreamConfig
//////////////////////////////////////////////////////////////////////////
HRESULT STDMETHODCALLTYPE CVCamStream::SetFormat(AM_MEDIA_TYPE *pmt)
{
DECLARE_PTR(VIDEOINFOHEADER, pvi, pmt->pbFormat);
if (pvi->bmiHeader.biHeight != m_rScreen.bottom - m_rScreen.top ||
pvi->bmiHeader.biWidth != m_rScreen.right - m_rScreen.left ||
pvi->bmiHeader.biCompression != BI_RGB ||
pvi->bmiHeader.biBitCount != 24)
return VFW_E_INVALIDMEDIATYPE;
m_mt = *pmt;
m_rtFrameLength = pvi->AvgTimePerFrame;
IPin* pin;
ConnectedTo(&pin);
if(pin)
{
IFilterGraph *pGraph = m_pParent->GetGraph();
pGraph->Reconnect(this);
}
return S_OK;
}
HRESULT STDMETHODCALLTYPE CVCamStream::GetFormat(AM_MEDIA_TYPE **ppmt)
{
*ppmt = CreateMediaType(&m_mt);
return S_OK;
}
HRESULT STDMETHODCALLTYPE CVCamStream::GetNumberOfCapabilities(int *piCount, int *piSize)
{
*piCount = 1;
*piSize = sizeof(VIDEO_STREAM_CONFIG_CAPS);
return S_OK;
}
HRESULT STDMETHODCALLTYPE CVCamStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
*pmt = CreateMediaType(&m_mt);
DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat);
if (iIndex != 0) return E_INVALIDARG;
pvi->bmiHeader.biCompression = BI_RGB;
pvi->bmiHeader.biBitCount = 24;
pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
pvi->bmiHeader.biWidth = m_rScreen.right - m_rScreen.left;
pvi->bmiHeader.biHeight = m_rScreen.bottom - m_rScreen.top;
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
pvi->bmiHeader.biClrImportant = 0;
SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
(*pmt)->majortype = MEDIATYPE_Video;
(*pmt)->subtype = MEDIASUBTYPE_RGB24;
(*pmt)->formattype = FORMAT_VideoInfo;
(*pmt)->bTemporalCompression = FALSE;
(*pmt)->bFixedSizeSamples= FALSE;
(*pmt)->lSampleSize = pvi->bmiHeader.biSizeImage;
(*pmt)->cbFormat = sizeof(VIDEOINFOHEADER);
DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC);
pvscc->guid = FORMAT_VideoInfo;
pvscc->VideoStandard = AnalogVideo_None;
pvscc->InputSize.cx = m_rScreen.right - m_rScreen.left;
pvscc->InputSize.cy = m_rScreen.bottom - m_rScreen.top;
pvscc->MinCroppingSize.cx = m_rScreen.right - m_rScreen.left;
pvscc->MinCroppingSize.cy = m_rScreen.bottom - m_rScreen.top;
pvscc->MaxCroppingSize.cx = m_rScreen.right - m_rScreen.left;
pvscc->MaxCroppingSize.cy = m_rScreen.bottom - m_rScreen.top;
pvscc->CropGranularityX = m_rScreen.right - m_rScreen.left;
pvscc->CropGranularityY = m_rScreen.bottom - m_rScreen.top;
pvscc->CropAlignX = 0;
pvscc->CropAlignY = 0;
pvscc->MinOutputSize.cx = m_rScreen.right - m_rScreen.left;
pvscc->MinOutputSize.cy = m_rScreen.bottom - m_rScreen.top;
pvscc->MaxOutputSize.cx = m_rScreen.right - m_rScreen.left;
pvscc->MaxOutputSize.cy = m_rScreen.bottom - m_rScreen.top;
pvscc->OutputGranularityX = 0;
pvscc->OutputGranularityY = 0;
pvscc->StretchTapsX = 0;
pvscc->StretchTapsY = 0;
pvscc->ShrinkTapsX = 0;
pvscc->ShrinkTapsY = 0;
pvscc->MinFrameInterval = 200000; //50 fps
pvscc->MaxFrameInterval = 50000000; // 0.2 fps
pvscc->MinBitsPerSecond = ((m_rScreen.bottom - m_rScreen.top) * (m_rScreen.right - m_rScreen.left) * 3 * 8) / 5;
pvscc->MaxBitsPerSecond = (m_rScreen.bottom - m_rScreen.top) * (m_rScreen.right - m_rScreen.left) * 4 * 8 * 50;
return S_OK;
}
//////////////////////////////////////////////////////////////////////////
// IKsPropertySet
//////////////////////////////////////////////////////////////////////////
HRESULT CVCamStream::Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData,
DWORD cbInstanceData, void *pPropData, DWORD cbPropData)
{// Set: Cannot set any properties.
return E_NOTIMPL;
}
// Get: Return the pin category (our only property).
HRESULT CVCamStream::Get(
REFGUID guidPropSet, // Which property set.
DWORD dwPropID, // Which property in that set.
void *pInstanceData, // Instance data (ignore).
DWORD cbInstanceData, // Size of the instance data (ignore).
void *pPropData, // Buffer to receive the property data.
DWORD cbPropData, // Size of the buffer.
DWORD *pcbReturned // Return the size of the property.
)
{
if (guidPropSet != AMPROPSETID_Pin) return E_PROP_SET_UNSUPPORTED;
if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED;
if (pPropData == NULL && pcbReturned == NULL) return E_POINTER;
if (pcbReturned) *pcbReturned = sizeof(GUID);
if (pPropData == NULL) return S_OK; // Caller just wants to know the size.
if (cbPropData < sizeof(GUID)) return E_UNEXPECTED;// The buffer is too small.
*(GUID *)pPropData = PIN_CATEGORY_CAPTURE;
return S_OK;
}
// QuerySupported: Query whether the pin supports the specified property.
HRESULT CVCamStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport)
{
if (guidPropSet != AMPROPSETID_Pin) return E_PROP_SET_UNSUPPORTED;
if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED;
// We support getting this property, but not setting it.
if (pTypeSupport) *pTypeSupport = KSPROPERTY_SUPPORT_GET;
return S_OK;
}