📄 filter.cpp
字号:
//+-------------------------------------------------------------------------
//
// Microsoft Windows
//
// Copyright (C) Microsoft Corporation, 1999 - 1999
//
// File: filter.cpp
//
//--------------------------------------------------------------------------
#include "avssamp.h"
#define MAX_TIMESTAMPS 200
#ifdef ALLOC_DATA_PRAGMA
#pragma const_seg("PAGECONST")
#endif // ALLOC_DATA_PRAGMA
VOID
NTAPI
CCapFilter::TimerDeferredRoutine(
IN PKDPC Dpc,
IN PVOID DeferredContext,
IN PVOID SystemArgument1,
IN PVOID SystemArgument2
)
/*++
Routine Description:
Deferred routine to simulate data interrupts.
Arguments:
IN PKDPC Dpc -
pointer to DPC
IN PVOID DeferredContext -
pointer to context
IN PVOID SystemArgument1 -
not used
IN PVOID SystemArgument2 -
not used
Return:
No return value.
--*/
{
/// _DbgPrintF(DEBUGLVL_BLAB,("TimerDeferredRoutine"));
CCapFilter *filter = reinterpret_cast<CCapFilter *>(DeferredContext);
//
// Force this to be completed before the compare.
//
InterlockedExchange(&filter->m_TimerScheduled,FALSE);
if (filter->m_Active)
{
//
// Compute next moment when this Dpc should be scheduled.
// Note that schedule time is computed in absolute system
// time and should not be affected by cumulated errors,
// although changing the system clock will mess this up.
//
LARGE_INTEGER NextTime;
NextTime.QuadPart = filter->m_llStartTime +
(filter->m_iTick + 1) * filter->m_VideoInfoHeader->AvgTimePerFrame;
filter->m_TimerScheduled = TRUE;
KeSetTimer(&filter->m_TimerObject,NextTime,&filter->m_TimerDpc);
}
else
{
_DbgPrintF(DEBUGLVL_TERSE,("DPC noticed we are inactive now"));
}
KsFilterAttemptProcessing(filter->m_pKsFilter,FALSE);
filter->m_iTick++;
}
NTSTATUS
CCapFilter::
Process(
IN PKSFILTER KsFilter,
IN KSPROCESSPIN_INDEXENTRY ProcessPinsIndex[]
)
/*++
Routine Description:
This routine is called when there is data to be processed.
Arguments:
Filter -
Contains a pointer to the filter structure.
ProcessPinsIndex -
Contains a pointer to an array of process pin index entries. This
array is indexed by pin ID. An index entry indicates the number
of pin instances for the corresponding filter type and points to the
first corresponding process pin structure in the ProcessPins array.
This allows process pin structures to be quickly accessed by pin ID
when the number of instances per type is not known in advance.
Return Value:
Indication of whether more processing should be done if frames are
available. A value of FALSE indicates that processing should not
continue even if frames are available on all required queues. Any other
return value indicates processing should continue if frames are
available on all required queues.
--*/
{
/// _DbgPrintF(DEBUGLVL_BLAB,("Process"));
ASSERT(ProcessPinsIndex[0].Count == 1);
// uncomment when audio is required. ASSERT(ProcessPinsIndex[1].Count == 1);
CCapFilter *pFilter = reinterpret_cast<CCapFilter *>(KsFilter->Context);
PKSPROCESSPIN processVideoPin = ProcessPinsIndex[0].Pins[0];
PKSPROCESSPIN processAudioPin = NULL; // uncomment when audio is required. ProcessPinsIndex[1].Pins[0];
//
// compute the presentation time associated to current frame
//
LONGLONG llCrtTime = pFilter->m_iTick * pFilter->m_VideoInfoHeader->AvgTimePerFrame;
//
// save system clock value when processing is invoked
//
if ( pFilter->m_iTimestamp < MAX_TIMESTAMPS) {
LARGE_INTEGER liTimestamp;
KeQuerySystemTime(&liTimestamp);
pFilter->m_rgTimestamps[pFilter->m_iTimestamp].Abs = liTimestamp.QuadPart;
pFilter->m_rgTimestamps[pFilter->m_iTimestamp].Rel = liTimestamp.QuadPart - pFilter->m_llStartTime;
}
//
// If there is frame space available for audio data,
// copy as much audio data as possible.
//
if(processAudioPin != NULL)
{
if ( processAudioPin->BytesAvailable > 0 ) {
ULONGLONG ullCrtAudioPos;
ULONGLONG ullAudioBytes;
PKSSTREAM_HEADER pStreamHeader;
//
// Compute the current position into audio stream and the number
// of samples to be "captured" in order to reach that position.
// Then as much samples as possible are output into frame and
// position of audio stream is updated
//
ullCrtAudioPos = llCrtTime / pFilter->m_ulPeriod;
ullAudioBytes = ullCrtAudioPos - pFilter->m_ullNextAudioPos + 1;
ullAudioBytes = min(processAudioPin->BytesAvailable, ullAudioBytes);
pFilter->m_ullNextAudioPos = ullAudioBytes + pFilter->m_ullNextAudioPos;
pFilter->CopyAudioData(processAudioPin->Data, (ULONG)ullAudioBytes);
processAudioPin->BytesUsed += (ULONG)ullAudioBytes;
processAudioPin->Terminate = TRUE;
pStreamHeader = processAudioPin->StreamPointer->StreamHeader;
pStreamHeader->Duration = 0;
pStreamHeader->PresentationTime.Time = llCrtTime;
pStreamHeader->PresentationTime.Numerator = 1;
pStreamHeader->PresentationTime.Denominator = 1;
pStreamHeader->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_TIMEVALID;
pFilter->RegisterSample(Audio,llCrtTime,ullAudioBytes);
}
else {
pFilter->m_ulAudioDroppedFrames ++;
if ( pFilter->m_ulAudioDroppedFrames % 100 == 0 ) {
DbgPrint("Dropped %lu/%lu audio frames \n",
pFilter->m_ulAudioDroppedFrames,
pFilter->m_iTick);
}
}
}
//
// If there is space available on the video frame,
// generate current video frame data.
//
if ( processVideoPin->BytesAvailable > 0 ) {
PKSSTREAM_HEADER pStreamHeader;
pFilter->m_FrameInfo.PictureNumber = pFilter-> m_iTick;
processVideoPin->BytesUsed =
pFilter->ImageSynth(
processVideoPin->Data,
processVideoPin->BytesAvailable,
IMAGE_XFER_NTSC_EIA_100AMP_100SAT,
0);
processVideoPin->Terminate = TRUE;
pStreamHeader = processVideoPin->StreamPointer->StreamHeader;
pStreamHeader->PresentationTime.Time = llCrtTime;
pStreamHeader->PresentationTime.Numerator = 1;
pStreamHeader->PresentationTime.Denominator = 1;
pStreamHeader->Duration = pFilter->m_VideoInfoHeader->AvgTimePerFrame;
pStreamHeader->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_TIMEVALID |
KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;
pFilter->RegisterSample(Video,llCrtTime,processVideoPin->BytesUsed);
}
else {
pFilter->m_ulVideoDroppedFrames ++;
if ( pFilter->m_ulVideoDroppedFrames % 100 == 0 ) {
DbgPrint("Dropped %lu/%lu video frames \n",
pFilter->m_ulVideoDroppedFrames,
pFilter->m_iTick);
}
}
//
// Run only when the timer fires.
//
return STATUS_PENDING;
}
//
// EIA-189-A Standard color bar definitions
//
// 75% Amplitude, 100% Saturation
const static UCHAR NTSCColorBars75Amp100SatRGB24 [3][8] =
{
// Whi Yel Cya Grn Mag Red Blu Blk
191, 0,191, 0,191, 0,191, 0, // Blue
191,191,191,191, 0, 0, 0, 0, // Green
191,191, 0, 0,191,191, 0, 0, // Red
};
// 100% Amplitude, 100% Saturation
const static UCHAR NTSCColorBars100Amp100SatRGB24 [3][8] =
{
// Whi Yel Cya Grn Mag Red Blu Blk
255, 0,255, 0,255, 0,255, 0, // Blue
255,255,255,255, 0, 0, 0, 0, // Green
255,255, 0, 0,255,255, 0, 0, // Red
};
const static UCHAR NTSCColorBars100Amp100SatYUV [4][8] =
{
// Whi Yel Cya Grn Mag Red Blu Blk
128, 16,166, 54,202, 90,240,128, // U
235,211,170,145,106, 81, 41, 16, // Y
128,146, 16, 34,222,240,109,128, // V
235,211,170,145,106, 81, 41, 16 // Y
};
ULONG
CCapFilter::ImageSynth(
OUT PVOID Data,
IN ULONG ByteCount,
IN ImageXferCommands Command,
IN BOOL FlipHorizontal
)
/*++
Routine Description:
Synthesizes NTSC color bars, white, black, and grayscale images.
Arguments:
Return Value:
size of transfer in bytes
--*/
{
_DbgPrintF(DEBUGLVL_BLAB,("ImageSynth Data=%x Count=%d Cmd=%d Flip=%d", Data, ByteCount, Command, FlipHorizontal));
UINT biWidth = m_VideoInfoHeader->bmiHeader.biWidth;
UINT biHeight = m_VideoInfoHeader->bmiHeader.biHeight;
UINT biSizeImage = m_VideoInfoHeader->bmiHeader.biSizeImage;
UINT biWidthBytes = KS_DIBWIDTHBYTES( m_VideoInfoHeader->bmiHeader );
UINT biBitCount = m_VideoInfoHeader->bmiHeader.biBitCount;
UINT LinesToCopy = abs( biHeight );
DWORD biCompression = m_VideoInfoHeader->bmiHeader.biCompression;
_DbgPrintF(DEBUGLVL_BLAB,(" biWidth=%d biHeight=%d biSizeImage=%d biBitCount=%d LinesToCopy=%d",
biWidth, biHeight, biSizeImage, biWidthBytes, biBitCount, LinesToCopy));
UINT Line;
PUCHAR Image = (PUCHAR)Data;
//
// Synthesize a single line of image data, which will then be replicated
//
if (!ByteCount) {
return 0;
}
if ((biBitCount == 24) && (biCompression == KS_BI_RGB)) {
switch (Command) {
case IMAGE_XFER_NTSC_EIA_100AMP_100SAT:
// 100% saturation
{
UINT x, col;
PUCHAR Temp = m_LineBuffer;
for (x = 0; x < biWidth; x++) {
col = (x * 8) / biWidth;
col = FlipHorizontal ? (7 - col) : col;
*Temp++ = NTSCColorBars100Amp100SatRGB24[0][col]; // Red
*Temp++ = NTSCColorBars100Amp100SatRGB24[1][col]; // Green
*Temp++ = NTSCColorBars100Amp100SatRGB24[2][col]; // Blue
}
}
break;
case IMAGE_XFER_NTSC_EIA_75AMP_100SAT:
// 75% Saturation
{
UINT x, col;
PUCHAR Temp = m_LineBuffer;
for (x = 0; x < biWidth; x++) {
col = (x * 8) / biWidth;
col = FlipHorizontal ? (7 - col) : col;
*Temp++ = NTSCColorBars75Amp100SatRGB24[0][col]; // Red
*Temp++ = NTSCColorBars75Amp100SatRGB24[1][col]; // Green
*Temp++ = NTSCColorBars75Amp100SatRGB24[2][col]; // Blue
}
}
break;
case IMAGE_XFER_BLACK:
// Camma corrected Grayscale ramp
{
UINT x, col;
PUCHAR Temp = m_LineBuffer;
for (x = 0; x < biWidth; x++) {
col = (255 * (x * 10) / biWidth) / 10;
col = FlipHorizontal ? (255 - col) : col;
*Temp++ = (BYTE) col; // Red
*Temp++ = (BYTE) col; // Green
*Temp++ = (BYTE) col; // Blue
}
}
break;
case IMAGE_XFER_WHITE:
// All white
RtlFillMemory(
m_LineBuffer,
biWidthBytes,
(UCHAR) 255);
break;
case IMAGE_XFER_GRAY_INCREASING:
// grayscale increasing with each image captured
RtlFillMemory(
m_LineBuffer,
biWidthBytes,
(UCHAR) (m_FrameInfo.PictureNumber * 8));
break;
default:
break;
}
} // endif RGB24
else if ((biBitCount == 16) && (biCompression == FOURCC_YUV422)) {
switch (Command) {
case IMAGE_XFER_NTSC_EIA_100AMP_100SAT:
default:
{
UINT x, col;
PUCHAR Temp = m_LineBuffer;
for (x = 0; x < (biWidth / 2); x++) {
col = (x * 8) / (biWidth / 2);
col = FlipHorizontal ? (7 - col) : col;
*Temp++ = NTSCColorBars100Amp100SatYUV[0][col]; // U
*Temp++ = NTSCColorBars100Amp100SatYUV[1][col]; // Y
*Temp++ = NTSCColorBars100Amp100SatYUV[2][col]; // V
*Temp++ = NTSCColorBars100Amp100SatYUV[3][col]; // Y
}
}
break;
}
}
else {
_DbgPrintF( DEBUGLVL_ERROR, ("Unknown format!!!") );
}
//
// Copy the single line synthesized to all rows of the image
//
ULONG cutoff = ULONG
( ( ( ( ULONGLONG(m_VideoInfoHeader->AvgTimePerFrame)
* ULONGLONG(m_FrameInfo.PictureNumber)
)
% ULONGLONG(10000000)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -