⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video.cpp

📁 winddk src目录下的WDM源码压缩!
💻 CPP
📖 第 1 页 / 共 3 页
字号:

NTSTATUS
CVideoCapturePin::
Stop (
    IN KSSTATE FromState
    )

/*++

Routine Description:

    Called when the video capture pin transitions from acquire to stop.
    This function will clean up the image synth and any data structures
    that we need to clean up on stop.

Arguments:

    FromState -
        The state the pin is transitioning away from.  This should
        always be KSSTATE_ACQUIRE for this call.

Return Value:

    STATUS_SUCCESS

--*/

{
    PAGED_CODE();

    ASSERT (FromState == KSSTATE_ACQUIRE);

    //
    // Remove the image synthesizer from the object bag and free it.
    //
    KsRemoveItemFromObjectBag (
        m_Pin -> Bag,
        m_ImageSynth,
        TRUE
        );

    m_ImageSynth = NULL;

    if (m_SynthesisBuffer) {
        ExFreePool (m_SynthesisBuffer);
        m_SynthesisBuffer = NULL;
    }

    return STATUS_SUCCESS;

}

/**************************************************************************

    LOCKED CODE

**************************************************************************/

#ifdef ALLOC_PRAGMA
#pragma code_seg()
#endif // ALLOC_PRAGMA


NTSTATUS
CVideoCapturePin::
CaptureFrame (
    IN PKSPROCESSPIN ProcessPin,
    IN ULONG Tick
    )

/*++

Routine Description:

    This routine is called from the filter processing function to capture
    a frame for the video capture pin.  The process pin to capture to is
    passed.

Arguments:

    ProcessPin -
        The process pin associated with this pin.

    Tick -
        The tick count on the filter.  This is the number of timer DPC's that
        have fired since the timer DPC started.

Return Value:

    STATUS_SUCCESS

--*/

{

    ASSERT (ProcessPin -> Pin == m_Pin);

    //
    // Increment the frame number.  This is the total count of frames which
    // have attempted capture.
    //
    m_FrameNumber++;

    //
    // Since this pin is KSPIN_FLAG_FRAMES_NOT_REQUIRED_FOR_PROCESSING, it
    // means that we do not require frames available in order to process.
    // This means that this routine can get called from our DPC with no
    // buffers available to capture into.  In this case, we increment our
    // dropped frame counter and do nothing.
    //
    if (ProcessPin -> BytesAvailable) {

        //
        // Because we adjusted the allocator framing, each frame should be
        // sufficient to trigger capture of the appropriate buffer size.
        //
        ASSERT (ProcessPin -> BytesAvailable >= 
            m_VideoInfoHeader -> bmiHeader.biSizeImage);

        //
        // If we get an invalid buffer, kick it out.
        //
        if (ProcessPin -> BytesAvailable < 
            m_VideoInfoHeader -> bmiHeader.biSizeImage) {

            ProcessPin -> BytesUsed = 0;
            ProcessPin -> Terminate = TRUE;
            m_DroppedFrames++;
            return STATUS_SUCCESS;
        }

        //
        // Generate a synthesized image.
        //
        m_ImageSynth -> SynthesizeBars ();

        //
        // Overlay some activity onto the bars.
        //
        ULONG DropLength = (Tick * 2) % 
            (ABS (m_VideoInfoHeader -> bmiHeader.biHeight));
    
        //
        // Create a drop flowing down DropLength lines from the top of the 
        // image.
        //
        m_ImageSynth -> Fill (
            0, 0, 
            m_VideoInfoHeader -> bmiHeader.biWidth - 1, DropLength, 
            GREEN
            );

        //
        // Overlay the dropped frame count over the image.
        //
        char Text [256];
        sprintf (Text, "Video Skipped: %ld", m_DroppedFrames);

        m_ImageSynth -> OverlayText (
            10,
            10,
            1,
            Text,
            TRANSPARENT,
            BLUE
            );

        //
        // This is used to indicate that there is no audio pin.
        //
        if (m_NotifyAudDrop != (ULONG)-1) {
            sprintf (Text, "Audio Skipped: %ld", m_NotifyAudDrop);

            m_ImageSynth -> OverlayText (
                10,
                20,
                1,
                Text,
                TRANSPARENT,
                BLUE
                );
        }

        //
        // Copy the synthesized image into the buffer.
        //
        RtlCopyMemory (
            ProcessPin -> Data,
            m_SynthesisBuffer,
            m_VideoInfoHeader -> bmiHeader.biSizeImage
            );
        
        ProcessPin -> BytesUsed = m_VideoInfoHeader -> bmiHeader.biSizeImage;
        ProcessPin -> Terminate = TRUE;


        PKSSTREAM_HEADER StreamHeader = 
            ProcessPin -> StreamPointer -> StreamHeader;

        //
        // If there is a clock assigned to the pin, time stamp the sample.
        //
        if (m_Clock) {

            StreamHeader -> PresentationTime.Time = GetTime ();
            StreamHeader -> Duration = m_VideoInfoHeader -> AvgTimePerFrame;

            StreamHeader -> OptionsFlags =
                KSSTREAM_HEADER_OPTIONSF_TIMEVALID |
                KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;

        }

        //
        // Update the extended header info.
        //
        ASSERT (StreamHeader -> Size >= sizeof (KSSTREAM_HEADER) +
            sizeof (KS_FRAME_INFO));

        //
        // Double check the Stream Header size.  AVStream makes no guarantee
        // that because StreamHeaderSize is set to a specific size that you
        // will get that size.  If the proper data type handlers are not 
        // installed, the stream header will be of default size.
        //
        if (StreamHeader -> Size >= sizeof (KSSTREAM_HEADER) +
            sizeof (KS_FRAME_INFO)) {

            PKS_FRAME_INFO FrameInfo = reinterpret_cast <PKS_FRAME_INFO> (
                StreamHeader + 1
                );
    
            FrameInfo -> ExtendedHeaderSize = sizeof (KS_FRAME_INFO);
            FrameInfo -> PictureNumber = (LONGLONG)m_FrameNumber;
            FrameInfo -> DropCount = (LONGLONG)m_DroppedFrames;

        }
    
    } else {
        m_DroppedFrames++;
    }

    return STATUS_SUCCESS;
        
}

/**************************************************************************

    DESCRIPTOR AND DISPATCH LAYOUT

**************************************************************************/

#define D_X 320
#define D_Y 240

//
// FormatRGB24Bpp_Capture:
//
// This is the data range description of the RGB24 capture format we support.
//
const 
KS_DATARANGE_VIDEO 
FormatRGB24Bpp_Capture = {

    //
    // KSDATARANGE
    //
    {   
        sizeof (KS_DATARANGE_VIDEO),                // FormatSize
        0,                                          // Flags
        D_X * D_Y * 3,                              // SampleSize
        0,                                          // Reserved

        STATICGUIDOF (KSDATAFORMAT_TYPE_VIDEO),     // aka. MEDIATYPE_Video
        0xe436eb7d, 0x524f, 0x11ce, 0x9f, 0x53, 0x00, 0x20, 
            0xaf, 0x0b, 0xa7, 0x70,                 // aka. MEDIASUBTYPE_RGB24,
        STATICGUIDOF (KSDATAFORMAT_SPECIFIER_VIDEOINFO) // aka. FORMAT_VideoInfo
    },

    TRUE,               // BOOL,  bFixedSizeSamples (all samples same size?)
    TRUE,               // BOOL,  bTemporalCompression (all I frames?)
    0,                  // Reserved (was StreamDescriptionFlags)
    0,                  // Reserved (was MemoryAllocationFlags   
                        //           (KS_VIDEO_ALLOC_*))

    //
    // _KS_VIDEO_STREAM_CONFIG_CAPS  
    //
    {
        STATICGUIDOF( KSDATAFORMAT_SPECIFIER_VIDEOINFO ), // GUID
        KS_AnalogVideo_NTSC_M |
        KS_AnalogVideo_PAL_B,                    // AnalogVideoStandard
        720,480,        // InputSize, (the inherent size of the incoming signal
                        //             with every digitized pixel unique)
        160,120,        // MinCroppingSize, smallest rcSrc cropping rect allowed
        720,480,        // MaxCroppingSize, largest  rcSrc cropping rect allowed
        8,              // CropGranularityX, granularity of cropping size
        1,              // CropGranularityY
        8,              // CropAlignX, alignment of cropping rect 
        1,              // CropAlignY;
        160, 120,       // MinOutputSize, smallest bitmap stream can produce
        720, 480,       // MaxOutputSize, largest  bitmap stream can produce
        8,              // OutputGranularityX, granularity of output bitmap size
        1,              // OutputGranularityY;
        0,              // StretchTapsX  (0 no stretch, 1 pix dup, 2 interp...)
        0,              // StretchTapsY
        0,              // ShrinkTapsX 
        0,              // ShrinkTapsY 
        333667,         // MinFrameInterval, 100 nS units
        640000000,      // MaxFrameInterval, 100 nS units
        8 * 3 * 30 * 160 * 120,  // MinBitsPerSecond;
        8 * 3 * 30 * 720 * 480   // MaxBitsPerSecond;
    }, 
        
    //
    // KS_VIDEOINFOHEADER (default format)
    //
    {
        0,0,0,0,                            // RECT  rcSource; 
        0,0,0,0,                            // RECT  rcTarget; 
        D_X * D_Y * 3 * 30,                 // DWORD dwBitRate;
        0L,                                 // DWORD dwBitErrorRate; 
        333667,                             // REFERENCE_TIME  AvgTimePerFrame;   
        sizeof (KS_BITMAPINFOHEADER),       // DWORD biSize;
        D_X,                                // LONG  biWidth;
        -D_Y,                               // LONG  biHeight;
        1,                                  // WORD  biPlanes;
        24,                                 // WORD  biBitCount;
        KS_BI_RGB,                          // DWORD biCompression;
        D_X * D_Y * 3,                      // DWORD biSizeImage;
        0,                                  // LONG  biXPelsPerMeter;
        0,                                  // LONG  biYPelsPerMeter;
        0,                                  // DWORD biClrUsed;
        0                                   // DWORD biClrImportant;
    }
}; 

#undef D_X
#undef D_Y

#define D_X 320
#define D_Y 240

//
// FormatUYU2_Capture:
//
// This is the data range description of the UYVY format we support.
//
const 
KS_DATARANGE_VIDEO 
FormatUYU2_Capture = {

    //
    // KSDATARANGE
    //
    {   
        sizeof (KS_DATARANGE_VIDEO),            // FormatSize
        0,                                      // Flags
        D_X * D_Y * 2,                          // SampleSize
        0,                                      // Reserved
        STATICGUIDOF (KSDATAFORMAT_TYPE_VIDEO), // aka. MEDIATYPE_Video
        0x59565955, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 
            0x00, 0x38, 0x9b, 0x71,             // aka. MEDIASUBTYPE_UYVY,
        STATICGUIDOF (KSDATAFORMAT_SPECIFIER_VIDEOINFO) // aka. FORMAT_VideoInfo
    },

    TRUE,               // BOOL,  bFixedSizeSamples (all samples same size?)
    TRUE,               // BOOL,  bTemporalCompression (all I frames?)
    0,                  // Reserved (was StreamDescriptionFlags)
    0,                  // Reserved (was MemoryAllocationFlags   
                        //           (KS_VIDEO_ALLOC_*))

    //
    // _KS_VIDEO_STREAM_CONFIG_CAPS  
    //
    {
        STATICGUIDOF( KSDATAFORMAT_SPECIFIER_VIDEOINFO ), // GUID
        KS_AnalogVideo_NTSC_M |
        KS_AnalogVideo_PAL_B,                    // AnalogVideoStandard
        720,480,        // InputSize, (the inherent size of the incoming signal
                    //             with every digitized pixel unique)
        160,120,        // MinCroppingSize, smallest rcSrc cropping rect allowed
        720,480,        // MaxCroppingSize, largest  rcSrc cropping rect allowed
        8,              // CropGranularityX, granularity of cropping size
        1,              // CropGranularityY
        8,              // CropAlignX, alignment of cropping rect 
        1,              // CropAlignY;
        160, 120,       // MinOutputSize, smallest bitmap stream can produce
        720, 480,       // MaxOutputSize, largest  bitmap stream can produce
        8,              // OutputGranularityX, granularity of output bitmap size
        1,              // OutputGranularityY;
        0,              // StretchTapsX  (0 no stretch, 1 pix dup, 2 interp...)
        0,              // StretchTapsY
        0,              // ShrinkTapsX 
        0,              // ShrinkTapsY 
        333667,         // MinFrameInterval, 100 nS units
        640000000,      // MaxFrameInterval, 100 nS units
        8 * 2 * 30 * 160 * 120,  // MinBitsPerSecond;
        8 * 2 * 30 * 720 * 480   // MaxBitsPerSecond;
    }, 
        
    //
    // KS_VIDEOINFOHEADER (default format)
    //
    {
        0,0,0,0,                            // RECT  rcSource; 
        0,0,0,0,                            // RECT  rcTarget; 
        D_X * D_Y * 2 * 30,                 // DWORD dwBitRate;
        0L,                                 // DWORD dwBitErrorRate; 
        333667,                             // REFERENCE_TIME  AvgTimePerFrame;   
        sizeof (KS_BITMAPINFOHEADER),       // DWORD biSize;
        D_X,                                // LONG  biWidth;
        D_Y,                                // LONG  biHeight;
        1,                                  // WORD  biPlanes;
        16,                                 // WORD  biBitCount;
        FOURCC_YUV422,                      // DWORD biCompression;
        D_X * D_Y * 2,                      // DWORD biSizeImage;
        0,                                  // LONG  biXPelsPerMeter;
        0,                                  // LONG  biYPelsPerMeter;
        0,                                  // DWORD biClrUsed;
        0                                   // DWORD biClrImportant;
    }
}; 

//
// VideoCapturePinDispatch:
//
// This is the dispatch table for the capture pin.  It provides notifications
// about creation, closure, processing, data formats, etc...
//
const
KSPIN_DISPATCH
VideoCapturePinDispatch = {
    CVideoCapturePin::DispatchCreate,       // Pin Create
    NULL,                                   // Pin Close
    NULL,                                   // Pin Process
    NULL,                                   // Pin Reset
    CVideoCapturePin::DispatchSetFormat,    // Pin Set Data Format
    CCapturePin::DispatchSetState,          // Pin Set Device State
    NULL,                                   // Pin Connect
    NULL,                                   // Pin Disconnect
    NULL,                                   // Clock Dispatch
    NULL                                    // Allocator Dispatch
};

//
// VideoCapturePinAllocatorFraming:
//
// This is the simple framing structure for the capture pin.  Note that this
// will be modified via KsEdit when the actual capture format is determined.
//
DECLARE_SIMPLE_FRAMING_EX (
    VideoCapturePinAllocatorFraming,
    STATICGUIDOF (KSMEMORY_TYPE_KERNEL_NONPAGED),
    KSALLOCATOR_REQUIREMENTF_SYSTEM_MEMORY |
        KSALLOCATOR_REQUIREMENTF_PREFERENCES_ONLY,
    2,
    0,
    2 * PAGE_SIZE,
    2 * PAGE_SIZE
    );

//
// VideoCapturePinDataRanges:
//
// This is the list of data ranges supported on the capture pin.  We support
// two: one RGB24, and one UYVY.
//
const 
PKSDATARANGE 
VideoCapturePinDataRanges [CAPTURE_PIN_DATA_RANGE_COUNT] = {
    (PKSDATARANGE) &FormatRGB24Bpp_Capture,
    (PKSDATARANGE) &FormatUYU2_Capture
    };


⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -