📄 axextend.idl
字号:
// Notify the resource manager that a given object has been given the
// user's focus. In ActiveMovie, this will normally be a video renderer
// whose window has received the focus. The filter graph will switch
// contended resources to (in order):
// requests made with this same focus object
// requests whose focus object shares a common source with this
// requests whose focus object shares a common filter graph
// After calling this, you *must* call ReleaseFocus before the IUnknown
// becomes invalid, unless you can guarantee that another SetFocus
// of a different object is done in the meantime. No addref is held.
//
// The resource manager will hold this pointer until replaced or cancelled,
// and will use it to resolve resource contention. It will call
// QueryInterface for IBaseFilter at least and if found will call methods on
// that interface.
HRESULT
SetFocus(
[in] IUnknown* pFocusObject);
// Sets the focus to NULL if the current focus object is still
// pFocusObject. Call this when
// the focus object is about to be destroyed to ensure that no-one is
// still referencing the object.
HRESULT
ReleaseFocus(
[in] IUnknown* pFocusObject);
// !!! still need
// -- app override (some form of SetPriority)
// -- enumeration and description of resources
}
//
// Interface representing an object that can be notified about state
// and other changes within a filter graph. The filtergraph will call plug-in
// distributors that expose this optional interface so that they can
// respond to appropriate changes.
//
// Implement if: you are a plug-in distributor (your class id is found
// under HKCR\Interface\<IID>\Distributor= for some interface).
//
// Use if: you are the filtergraph.
[
object,
uuid(56a868af-0ad4-11ce-b03a-0020af0ba770),
pointer_default(unique)
]
interface IDistributorNotify : IUnknown
{
// called when graph is entering stop state. Called before
// filters are stopped.
HRESULT Stop(void);
// called when graph is entering paused state, before filters are
// notified
HRESULT Pause(void);
// called when graph is entering running state, before filters are
// notified. tStart is the stream-time offset parameter that will be
// given to each filter's IBaseFilter::Run method.
HRESULT Run(REFERENCE_TIME tStart);
// called when the graph's clock is changing, with the new clock. Addref
// the clock if you hold it beyond this method. Called before
// the filters are notified.
HRESULT SetSyncSource(
[in] IReferenceClock * pClock);
// called when the set of filters or their connections has changed.
// Called on every AddFilter, RemoveFilter or ConnectDirect (or anything
// that will lead to one of these).
// You don't need to rebuild your list of interesting filters at this point
// but you should release any refcounts you hold on any filters that
// have been removed.
HRESULT NotifyGraphChange(void);
}
typedef enum {
AM_STREAM_INFO_START_DEFINED = 0x00000001,
AM_STREAM_INFO_STOP_DEFINED = 0x00000002,
AM_STREAM_INFO_DISCARDING = 0x00000004,
AM_STREAM_INFO_STOP_SEND_EXTRA = 0x00000010
} AM_STREAM_INFO_FLAGS;
// Stream information
typedef struct {
REFERENCE_TIME tStart;
REFERENCE_TIME tStop;
DWORD dwStartCookie;
DWORD dwStopCookie;
DWORD dwFlags;
} AM_STREAM_INFO;
//
// IAMStreamControl
//
[
object,
uuid(36b73881-c2c8-11cf-8b46-00805f6cef60),
pointer_default(unique)
]
interface IAMStreamControl : IUnknown
{
// The REFERENCE_TIME pointers may be null, which
// indicates immediately. If the pointer is non-NULL
// and dwCookie is non-zero, then pins should send
// EC_STREAM_CONTROL_STOPPED / EC_STREAM_CONTROL_STARTED
// with an IPin pointer and the cookie, thus allowing
// apps to tie the events back to their requests.
// If either dwCookies is zero, or the pointer is null,
// then no event is sent.
// If you have a capture pin hooked up to a MUX input pin and they
// both support IAMStreamControl, you'll want the MUX to signal the
// stop so you know the last frame was written out. In order for the
// MUX to know it's finished, the capture pin will have to send one
// extra sample after it was supposed to stop, so the MUX can trigger
// off that. So you would set bSendExtra to TRUE for the capture pin
// Leave it FALSE in all other cases.
HRESULT StartAt( [in] const REFERENCE_TIME * ptStart,
[in] DWORD dwCookie );
HRESULT StopAt( [in] const REFERENCE_TIME * ptStop,
[in] BOOL bSendExtra,
[in] DWORD dwCookie );
HRESULT GetInfo( [out] AM_STREAM_INFO *pInfo);
}
//
// ISeekingPassThru
//
[
object,
uuid(36b73883-c2c8-11cf-8b46-00805f6cef60),
pointer_default(unique)
]
interface ISeekingPassThru : IUnknown
{
HRESULT Init( [in] BOOL bSupportRendering,
[in] IPin *pPin);
}
//
// IAMStreamConfig - pin interface
//
// A capture filter or compression filter's output pin
// supports this interface - no matter what data type you produce.
// This interface can be used to set the output format of a pin (as an
// alternative to connecting the pin using a specific media type).
// After setting an output format, the pin will use that format
// the next time it connects to somebody, so you can just Render that
// pin and get a desired format without using Connect(CMediaType)
// Your pin should do that by ONLY OFFERING the media type set in SetFormat
// in its enumeration of media types, and no others. This will ensure that
// that format is indeed used for connection (or at least offer it first).
// An application interested in enumerating accepted mediatypes may have to
// do so BEFORE calling SetFormat.
// But this interface's GetStreamCaps function can get more information
// about accepted media types than the traditional way of enumerating a pin's
// media types, so it should typically be used instead.
// GetStreamCaps gets information about the kinds of formats allowed... how
// it can stretch and crop, and the frame rate and data rates allowed (for
// video)
// VIDEO EXAMPLE
//
// GetStreamCaps returns a whole array of {MediaType, Capabilities}.
// Let's say your capture card supports JPEG anywhere between 160x120 and
// 320x240, and also the size 640x480. Also, say it supports RGB24 at
// resolutions between 160x120 and 320x240 but only multiples of 8. You would
// expose these properties by offering a media type of 320 x 240 JPEG
// (if that is your default or preferred size) coupled with
// capabilities saying minimum 160x120 and maximum 320x240 with granularity of
// 1. The next pair you expose is a media type of 640x480 JPEG coupled with
// capabilities of min 640x480 max 640x480. The third pair is media type
// 320x240 RGB24 with capabilities min 160x120 max 320x240 granularity 8.
// In this way you can expose almost every quirk your card might have.
// An application interested in knowing what compression formats you provide
// can get all the pairs and make a list of all the unique sub types of the
// media types.
//
// If a filter's output pin is connected with a media type that has rcSource
// and rcTarget not empty, it means the filter is being asked to stretch the
// rcSource sub-rectangle of its InputSize (the format of the input pin for
// a compressor, and the largest bitmap a capture filter can generate with
// every pixel unique) into the rcTarget sub-rectangle of its output format.
// For instance, if a video compressor has as input 160x120 RGB, and as output
// 320x240 MPEG with an rcSource of (10,10,20,20) and rcTarget of (0,0,100,100)
// this means the compressor is being asked to take a 10x10 piece of the 160x120
// RGB bitmap, and make it fill the top 100x100 area of a 320x240 bitmap,
// leaving the rest of the 320x240 bitmap untouched.
// A filter does not have to support this and can fail to connect with a
// media type where rcSource and rcTarget are not empty.
//
// Your output pin is connected to the next filter with a certain media
// type (either directly or using the media type passed by SetFormat),
// and you need to look at the AvgBytesPerSecond field of the format
// of that mediatype to see what data rate you are being asked to compress
// the video to, and use that data rate. Using the number of frames per
// second in AvgTimePerFrame, you can figure out how many bytes each frame
// is supposed to be. You can make it smaller, but NEVER EVER make a bigger
// data rate. For a video compressor, your input pin's media type tells you
// the frame rate (use that AvgTimePerFrame). For a capture filter, the
// output media type tells you, so use that AvgTimePerFrame.
//
// The cropping rectangle described below is the same as the rcSrc of the
// output pin's media type.
//
// The output rectangle described below is the same of the width and height
// of the BITMAPINFOHEADER of the media type of the output pin's media type
// AUDIO EXAMPLE
//
// This API can return an array of pairs of (media type, capabilities).
// This can be used to expose all kinds of wierd capabilities. Let's say you
// do any PCM frequency from 11,025 to 44,100 at 8 or 16 bit mono or
// stereo, and you also do 48,000 16bit stereo as a special combination.
// You would expose 3 pairs. The first pair would have Min Freq of 11025 and
// Max Freq of 44100, with MaxChannels=2 and MinBits=8 and MaxBits=8 for the
// capabilites structure, and a media type of anything you like, maybe
// 22kHz, 8bit stereo as a default.
// The 2nd pair would be the same except for MinBits=16 and MaxBits=16 in
// the capabilities structure and the media type could be something like
// 44kHz, 16bit stereo as a default (the media type in the pair should always
// be something legal as described by the capabilities structure... the
// structure tells you how you can change the media type to produce other
// legal media types... for instance changing 44kHz to 29010Hz would be legal,
// but changing bits from 16 to 14 would not be.)
// The 3rd pair would be MinFreq=48000 MaxFreq=48000 MaxChannels=2
// MinBits=16 and MaxBits=16, and the media type would be 48kHz 16bit stereo.
// You can also use the Granularity elements of the structure (like the example
// for video) if you support values that multiples of n, eg. you could say
// minimum bits per sample 8, max 16, and granularity 8 to describe doing
// either 8 or 16 bit all in one structure
//
// If you support non-PCM formats, the media type returned in GetStreamCaps
// can show which non-PCM formats you support (with a default sample rate,
// bit rate and channels) and the capabilities structure going with that
// media type can describe which other sample rates, bit rates and channels
// you support.
[
object,
uuid(C6E13340-30AC-11d0-A18C-00A0C9118956),
pointer_default(unique)
]
interface IAMStreamConfig : IUnknown
{
// this is the structure returned by a VIDEO filter
//
typedef struct _VIDEO_STREAM_CONFIG_CAPS {
GUID guid; // will be MEDIATYPE_Video
// the logical or of all the AnalogVideoStandard's supported
// typically zero if not supported
ULONG VideoStandard;
// the inherent size of the incoming signal... taken from the input
// pin for a compressor, or the largest size a capture filter can
// digitize the signal with every pixel still unique
SIZE InputSize;
// The input of a compressor filter may have to be connected for these
// to be known
// smallest rcSrc cropping rect allowed
SIZE MinCroppingSize;
// largest rcSrc cropping rect allowed
SIZE MaxCroppingSize;
// granularity of cropping size - eg only widths a multiple of 4 allowed
int CropGranularityX;
int CropGranularityY;
// alignment of cropping rect - eg rect must start on multiple of 4
int CropAlignX;
int CropAlignY;
// The input of a compressor filter may have to be connected for these
// to be known
// smallest bitmap this pin can produce
SIZE MinOutputSize;
// largest bitmap this pin can produce
SIZE MaxOutputSize;
// granularity of output bitmap size
int OutputGranularityX;
int OutputGranularityY;
// !!! what about alignment of rcTarget inside BIH if different?
// how well can you stretch in the x direction? 0==not at all
// 1=pixel doubling 2=interpolation(2 taps) 3=better interpolation
// etc.
int StretchTapsX;
int StretchTapsY;
// how well can you shrink in the x direction? 0==not at all
// 1=pixel doubling 2=interpolation(2 taps) 3=better interpolation
// etc.
int ShrinkTapsX;
int ShrinkTapsY;
// CAPTURE filter only - what frame rates are allowed?
LONGLONG MinFrameInterval;
LONGLONG MaxFrameInterval;
// what data rates can this pin produce?
LONG MinBitsPerSecond;
LONG MaxBitsPerSecond;
} VIDEO_STREAM_CONFIG_CAPS;
// this is the structure returned by an AUDIO filter
//
typedef struct _AUDIO_STREAM_CONFIG_CAPS {
GUID guid; // will be MEDIATYPE_Audio
ULONG MinimumChannels;
ULONG MaximumChannels;
ULONG ChannelsGranularity;
ULONG MinimumBitsPerSample;
ULONG MaximumBitsPerSample;
ULONG BitsPerSampleGranularity;
ULONG MinimumSampleFrequency;
ULONG MaximumSampleFrequency;
ULONG SampleFrequencyGranularity;
} AUDIO_STREAM_CONFIG_CAPS;
// - only allowed when pin is not streaming, else the call will FAIL
// - If your output pin is not yet connected, and you can
// connect your output pin with this media type, you should
// succeed the call, and start offering it first (
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -