📄 capture.cpp
字号:
/**************************************************************************
AVStream Simulated Hardware Sample
Copyright (c) 2001, Microsoft Corporation.
File:
capture.cpp
Abstract:
This file contains source for the video capture pin on the capture
filter. The capture sample performs "fake" DMA directly into
the capture buffers. Common buffer DMA will work slightly differently.
For common buffer DMA, the general technique would be DPC schedules
processing with KsPinAttemptProcessing. The processing routine grabs
the leading edge, copies data out of the common buffer and advances.
Cloning would not be necessary with this technique. It would be
similiar to the way "AVSSamp" works, but it would be pin-centric.
History:
created 3/8/2001
**************************************************************************/
#include "avshws.h"
/**************************************************************************
PAGEABLE CODE
**************************************************************************/
#ifdef ALLOC_PRAGMA
#pragma code_seg("PAGE")
#endif // ALLOC_PRAGMA
CCapturePin::
CCapturePin (
IN PKSPIN Pin
) :
m_Pin (Pin)
/*++
Routine Description:
Construct a new capture pin.
Arguments:
Pin -
The AVStream pin object corresponding to the capture pin
Return Value:
None
--*/
{
PAGED_CODE();
PKSDEVICE Device = KsPinGetDevice (Pin);
//
// Set up our device pointer. This gives us access to "hardware I/O"
// during the capture routines.
//
m_Device = reinterpret_cast <CCaptureDevice *> (Device -> Context);
}
/*************************************************/
NTSTATUS
CCapturePin::
DispatchCreate (
IN PKSPIN Pin,
IN PIRP Irp
)
/*++
Routine Description:
Create a new capture pin. This is the creation dispatch for
the video capture pin.
Arguments:
Pin -
The pin being created
Irp -
The creation Irp
Return Value:
Success / Failure
--*/
{
PAGED_CODE();
NTSTATUS Status = STATUS_SUCCESS;
CCapturePin *CapPin = new (NonPagedPool) CCapturePin (Pin);
if (!CapPin) {
//
// Return failure if we couldn't create the pin.
//
Status = STATUS_INSUFFICIENT_RESOURCES;
} else {
//
// Add the item to the object bag if we we were successful.
// Whenever the pin closes, the bag is cleaned up and we will be
// freed.
//
Status = KsAddItemToObjectBag (
Pin -> Bag,
reinterpret_cast <PVOID> (CapPin),
reinterpret_cast <PFNKSFREE> (CCapturePin::Cleanup)
);
if (!NT_SUCCESS (Status)) {
delete CapPin;
} else {
Pin -> Context = reinterpret_cast <PVOID> (CapPin);
}
}
//
// If we succeeded so far, stash the video info header away and change
// our allocator framing to reflect the fact that only now do we know
// the framing requirements based on the connection format.
//
PKS_VIDEOINFOHEADER VideoInfoHeader = NULL;
if (NT_SUCCESS (Status)) {
VideoInfoHeader = CapPin -> CaptureVideoInfoHeader ();
if (!VideoInfoHeader) {
Status = STATUS_INSUFFICIENT_RESOURCES;
}
}
if (NT_SUCCESS (Status)) {
//
// We need to edit the descriptor to ensure we don't mess up any other
// pins using the descriptor or touch read-only memory.
//
Status = KsEdit (Pin, &Pin -> Descriptor, 'aChS');
if (NT_SUCCESS (Status)) {
Status = KsEdit (
Pin,
&(Pin -> Descriptor -> AllocatorFraming),
'aChS'
);
}
//
// If the edits proceeded without running out of memory, adjust
// the framing based on the video info header.
//
if (NT_SUCCESS (Status)) {
//
// We've KsEdit'ed this... I'm safe to cast away constness as
// long as the edit succeeded.
//
PKSALLOCATOR_FRAMING_EX Framing =
const_cast <PKSALLOCATOR_FRAMING_EX> (
Pin -> Descriptor -> AllocatorFraming
);
Framing -> FramingItem [0].Frames = 2;
//
// The physical and optimal ranges must be biSizeImage. We only
// support one frame size, precisely the size of each capture
// image.
//
Framing -> FramingItem [0].PhysicalRange.MinFrameSize =
Framing -> FramingItem [0].PhysicalRange.MaxFrameSize =
Framing -> FramingItem [0].FramingRange.Range.MinFrameSize =
Framing -> FramingItem [0].FramingRange.Range.MaxFrameSize =
VideoInfoHeader -> bmiHeader.biSizeImage;
Framing -> FramingItem [0].PhysicalRange.Stepping =
Framing -> FramingItem [0].FramingRange.Range.Stepping =
0;
}
}
return Status;
}
/*************************************************/
PKS_VIDEOINFOHEADER
CCapturePin::
CaptureVideoInfoHeader (
)
/*++
Routine Description:
Capture the video info header out of the connection format. This
is what we use to base synthesized images off.
Arguments:
None
Return Value:
The captured video info header or NULL if there is insufficient
memory.
--*/
{
PAGED_CODE();
PKS_VIDEOINFOHEADER ConnectionHeader =
&((reinterpret_cast <PKS_DATAFORMAT_VIDEOINFOHEADER>
(m_Pin -> ConnectionFormat)) ->
VideoInfoHeader);
m_VideoInfoHeader = reinterpret_cast <PKS_VIDEOINFOHEADER> (
ExAllocatePool (
NonPagedPool,
KS_SIZE_VIDEOHEADER (ConnectionHeader)
)
);
if (!m_VideoInfoHeader)
return NULL;
//
// Bag the newly allocated header space. This will get cleaned up
// automatically when the pin closes.
//
NTSTATUS Status =
KsAddItemToObjectBag (
m_Pin -> Bag,
reinterpret_cast <PVOID> (m_VideoInfoHeader),
NULL
);
if (!NT_SUCCESS (Status)) {
ExFreePool (m_VideoInfoHeader);
return NULL;
} else {
//
// Copy the connection format video info header into the newly
// allocated "captured" video info header.
//
RtlCopyMemory (
m_VideoInfoHeader,
ConnectionHeader,
KS_SIZE_VIDEOHEADER (ConnectionHeader)
);
}
return m_VideoInfoHeader;
}
/*************************************************/
NTSTATUS
CCapturePin::
Process (
)
/*++
Routine Description:
The process dispatch for the pin bridges to this location.
We handle setting up scatter gather mappings, etc...
Arguments:
None
Return Value:
Success / Failure
--*/
{
PAGED_CODE();
NTSTATUS Status = STATUS_SUCCESS;
PKSSTREAM_POINTER Leading;
Leading = KsPinGetLeadingEdgeStreamPointer (
m_Pin,
KSSTREAM_POINTER_STATE_LOCKED
);
while (NT_SUCCESS (Status) && Leading) {
PKSSTREAM_POINTER ClonePointer;
PSTREAM_POINTER_CONTEXT SPContext;
//
// For optimization sake in this particular sample, I will only keep
// one clone stream pointer per frame. This complicates the logic
// here but simplifies the completions.
//
// I'm also choosing to do this since I need to keep track of the
// virtual addresses corresponding to each mapping since I'm faking
// DMA. It simplifies that too.
//
if (!m_PreviousStreamPointer) {
//
// First thing we need to do is clone the leading edge. This allows
// us to keep reference on the frames while they're in DMA.
//
Status = KsStreamPointerClone (
Leading,
NULL,
sizeof (STREAM_POINTER_CONTEXT),
&ClonePointer
);
//
// I use this for easy chunking of the buffer. We're not really
// dealing with physical addresses. This keeps track of what
// virtual address in the buffer the current scatter / gather
// mapping corresponds to for the fake hardware.
//
if (NT_SUCCESS (Status)) {
//
// Set the stream header data used to 0. We update this
// in the DMA completions. For queues with DMA, we must
// update this field ourselves.
//
ClonePointer -> StreamHeader -> DataUsed = 0;
SPContext = reinterpret_cast <PSTREAM_POINTER_CONTEXT>
(ClonePointer -> Context);
SPContext -> BufferVirtual =
reinterpret_cast <PUCHAR> (
ClonePointer -> StreamHeader -> Data
);
}
} else {
ClonePointer = m_PreviousStreamPointer;
SPContext = reinterpret_cast <PSTREAM_POINTER_CONTEXT>
(ClonePointer -> Context);
Status = STATUS_SUCCESS;
}
//
// If the clone failed, likely we're out of resources. Break out
// of the loop for now. We may end up starving DMA.
//
if (!NT_SUCCESS (Status)) {
KsStreamPointerUnlock (Leading, FALSE);
break;
}
//
// Program the fake hardware. I would use Clone -> OffsetOut.*, but
// because of the optimization of one stream pointer per frame, it
// doesn't make complete sense.
//
ULONG MappingsUsed =
m_Device -> ProgramScatterGatherMappings (
&(SPContext -> BufferVirtual),
Leading -> OffsetOut.Mappings,
Leading -> OffsetOut.Remaining
);
//
// In order to keep one clone per frame and simplify the fake DMA
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -