📄 source.cpp
字号:
// Avisynth v0.3. Copyright 2000 Ben Rudiak-Gould. For distribution
// conditions, please see http://www.math.berkeley.edu/~benrg/avisynth.html .
#include "avisynth.h"
#include "internal-filters.h"
#include "AVIReadHandler.h"
class AVISource : public VideoFilterWithRefcount {
IAVIReadHandler *pfile;
IAVIReadStream *pvideo, *paudio;
HIC hic;
VideoInfo vi;
unsigned char* srcbuffer;
int srcbuffer_size;
BITMAPINFOHEADER* pbiSrc;
BITMAPINFOHEADER biDst;
bool ex;
int last_decompressed_frame;
DWORD DecompressBegin(LPBITMAPINFOHEADER lpbiSrc, LPBITMAPINFOHEADER lpbiDst);
void DecompressFrame(int n, bool preroll, unsigned char* buf);
public:
AVISource::AVISource(const char filename[], int mode); // mode: 0=detect, 1=avifile, 2=opendml
~AVISource();
void GetVideoInfo(VideoInfo* pvi);
void GetFrame(int n, unsigned char* buf);
void GetAudio(void* buf, int start, int count);
bool GetParity(int n);
static PVideoFilter __cdecl Create(const FilterInfo*, const Arg* args, const char*) { return new AVISource(args[0].string, 0); }
static PVideoFilter __cdecl CreateAVIFile(const FilterInfo*, const Arg* args, const char*) { return new AVISource(args[0].string, 1); }
static PVideoFilter __cdecl CreateOpenDML(const FilterInfo*, const Arg* args, const char*) { return new AVISource(args[0].string, 2); }
};
DWORD AVISource::DecompressBegin(LPBITMAPINFOHEADER lpbiSrc, LPBITMAPINFOHEADER lpbiDst) {
if (!ex) {
DWORD result = ICDecompressBegin(hic, lpbiSrc, lpbiDst);
if (result != ICERR_UNSUPPORTED)
return result;
else
ex = true;
// and fall thru
}
return ICDecompressExBegin(hic, 0,
lpbiSrc, 0, 0, 0, lpbiSrc->biWidth, lpbiSrc->biHeight,
lpbiDst, 0, 0, 0, lpbiDst->biWidth, lpbiDst->biHeight);
}
void AVISource::DecompressFrame(int n, bool preroll, unsigned char* buf) {
// bool noframe = (n != AVIStreamFindSample(pvideo, n, FIND_NEXT | FIND_ANY));
if (!hic) {
pvideo->Read(n, 1, buf, vi.ImageSize(), NULL, NULL);
return;
}
// if (!noframe) {
reread:
long bytes_read = srcbuffer_size;
DWORD err = pvideo->Read(n, 1, srcbuffer, srcbuffer_size, &bytes_read, NULL);
if (err == AVIERR_BUFFERTOOSMALL || (err == 0 && !srcbuffer)) {
if (srcbuffer) delete[] srcbuffer;
pvideo->Read(n, 1, 0, srcbuffer_size, &bytes_read, NULL);
srcbuffer = new unsigned char[srcbuffer_size = bytes_read];
goto reread;
} else if (FAILED(err)) {
memset(buf, 0, vi.ImageSize());
return;
}
// }
int flags = preroll * ICDECOMPRESS_PREROLL /*+ noframe * ICDECOMPRESS_NULLFRAME*/;
if (!pvideo->IsKeyFrame(n))
flags |= ICDECOMPRESS_NOTKEYFRAME;
pbiSrc->biSizeImage = bytes_read;
if (ICERR_OK != (!ex ? ICDecompress(hic, flags, pbiSrc, srcbuffer, &biDst, buf)
: ICDecompressEx(hic, flags, pbiSrc, srcbuffer, 0, 0, vi.width, vi.height, &biDst, buf, 0, 0, vi.width, vi.height)))
{
memset(buf, 0, vi.ImageSize());
}
}
AVISource::AVISource(const char filename[], int mode) {
srcbuffer = 0; srcbuffer_size = 0;
memset(&vi, 0, sizeof(vi));
ex = false;
last_decompressed_frame = -65536;
AVIFileInit();
if (mode == 0) {
// if it looks like an AVI file, open in OpenDML mode; otherwise AVIFile mode
HANDLE h = CreateFile(filename, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0, NULL);
if (h == INVALID_HANDLE_VALUE)
throw FilterChainError("AVISource autodetect: couldn't open file");
unsigned buf[3];
DWORD bytes_read;
if (ReadFile(h, buf, 12, &bytes_read, NULL) && bytes_read == 12 && buf[0] == 'FFIR' && buf[2] == ' IVA')
mode = 2;
else
mode = 1;
CloseHandle(h);
}
if (mode == 1) { // AVIFile mode
PAVIFILE paf;
if (FAILED(AVIFileOpen(&paf, filename, OF_READ, 0)))
throw FilterChainError("AVIFileSource: couldn't open file");
pfile = CreateAVIReadHandler(paf);
} else { // OpenDML mode
pfile = CreateAVIReadHandler(filename);
}
// check for video stream
hic = 0;
pvideo = pfile->GetStream(streamtypeVIDEO, 0);
if (pvideo) {
AVISTREAMINFO asi;
long size = sizeof(BITMAPINFOHEADER);
if (SUCCEEDED(pvideo->Info(&asi, sizeof(asi))) && SUCCEEDED(pvideo->ReadFormat(0, 0, &size)) && (pbiSrc = (LPBITMAPINFOHEADER)malloc(size)) && SUCCEEDED(pvideo->ReadFormat(0, pbiSrc, &size))) {
// try to find a way to decompress it...
// first, see if we can handle it directly
if (pbiSrc->biCompression == '2YUY') {
vi.pixel_type = VideoInfo::YUY2;
} else if ((pbiSrc->biCompression == 0 || pbiSrc->biCompression == ' BID') && pbiSrc->biBitCount == 24) {
vi.pixel_type = VideoInfo::BGR24;
// next, see if someone will decompress it to YUY2, or failing that, RGB
} else {
memset(&biDst, 0, sizeof(biDst));
biDst.biSize = sizeof(biDst);
biDst.biWidth = pbiSrc->biWidth;
biDst.biHeight = pbiSrc->biHeight;
biDst.biPlanes = 1;
biDst.biBitCount = 16;
biDst.biCompression = '2YUY';
biDst.biSizeImage = pbiSrc->biWidth*pbiSrc->biHeight*2;
hic = ICLocate(ICTYPE_VIDEO, pbiSrc->biCompression, pbiSrc, &biDst, ICMODE_DECOMPRESS);
if (hic) {
vi.pixel_type = VideoInfo::YUY2;
} else {
biDst.biCompression = BI_RGB;
biDst.biBitCount = 24;
biDst.biSizeImage = pbiSrc->biWidth*pbiSrc->biHeight*3;
hic = ICLocate(ICTYPE_VIDEO, pbiSrc->biCompression, pbiSrc, &biDst, ICMODE_DECOMPRESS);
if (hic) {
vi.pixel_type = VideoInfo::BGR24;
}
}
if (hic)
DecompressBegin(pbiSrc, &biDst);
}
vi.width = pbiSrc->biWidth;
vi.height = pbiSrc->biHeight;
vi.fps_numerator = asi.dwRate;
vi.fps_denominator = asi.dwScale;
vi.num_frames = asi.dwLength;
vi.field_based = false;
}
if (!vi.pixel_type)
throw FilterChainError("AVISource: couldn't decompress video");
}
paudio = pfile->GetStream(streamtypeAUDIO, 0);
if (paudio) {
AVISTREAMINFO asi;
WAVEFORMATEX wfx;
LONG size = sizeof(wfx);
if (SUCCEEDED(paudio->Info(&asi, sizeof(asi))) && SUCCEEDED(paudio->ReadFormat(0, &wfx, &size)) && wfx.wFormatTag == 1) {
vi.num_audio_samples = asi.dwLength;
vi.audio_samples_per_second = wfx.nSamplesPerSec;
vi.stereo = (wfx.nChannels == 2);
vi.sixteen_bit = (wfx.wBitsPerSample == 16);
}
}
}
AVISource::~AVISource() {
if (hic) {
!ex ? ICDecompressEnd(hic) : ICDecompressExEnd(hic);
ICClose(hic);
}
if (paudio)
delete paudio;
if (pvideo)
delete pvideo;
if (pfile)
pfile->Release();
AVIFileExit();
if (pbiSrc)
free(pbiSrc);
}
void AVISource::GetVideoInfo(VideoInfo* pvi) { *pvi = vi; }
void AVISource::GetFrame(int n, unsigned char* buf) {
try {
// find the last keyframe
int keyframe = pvideo->NearestKeyFrame(n);
// maybe we don't need to go back that far
if (last_decompressed_frame < n && last_decompressed_frame >= keyframe)
keyframe = last_decompressed_frame+1;
if (n < 0) n = 0;
if (keyframe < 0) keyframe = 0;
for (int i = keyframe; i <= n; ++i)
DecompressFrame(i, i != n, buf);
last_decompressed_frame = n;
}
catch (...) {
memset(buf, 0, vi.ImageSize());
}
}
void AVISource::GetAudio(void* buf, int start, int count) {
LONG bytes_read=0, samples_read=0;
try {
if (paudio)
paudio->Read(start, vi.AudioSamplesFromBytes(count), buf, count, &bytes_read, &samples_read);
}
catch (...) {
bytes_read = 0;
}
if (bytes_read < count)
memset((char*)buf + bytes_read, 0, count - bytes_read);
}
bool AVISource::GetParity(int n) { return false; }
/********************************************************************
********************************************************************/
class Blackness : public VideoFilterWithRefcount {
VideoInfo vi;
public:
Blackness(VideoInfo* pvi);
Blackness(PVideoFilter pvf, int num_frames=0);
void GetFrame(int n, unsigned char* buf);
void GetAudio(void* buf, int start, int count);
void GetVideoInfo(VideoInfo* pvi);
bool GetParity(int n);
static PVideoFilter __cdecl Create(const FilterInfo*, const Arg* args, const char*) {
return new Blackness(args[1].clip, args[0].integer);
}
};
Blackness::Blackness(VideoInfo* pvi) : vi(*pvi) {}
Blackness::Blackness(PVideoFilter pvf, int num_frames) {
pvf->GetVideoInfo(&vi);
if (num_frames) vi.num_frames = num_frames;
}
void Blackness::GetVideoInfo(VideoInfo* pvi) {
*pvi = vi;
if (vi.HasVideo())
pvi->num_audio_samples = vi.AudioSamplesFromFrames(vi.num_frames);
}
void Blackness::GetAudio(void* buf, int start, int count) { memset(buf, 0, count); }
bool Blackness::GetParity(int n) { return false; }
void Blackness::GetFrame(int n, unsigned char* buf) {
if (vi.IsYUY2()) {
unsigned* p = (unsigned*)buf;
for (int i=vi.ImageSize()>>2; i; --i)
*p++ = 0x80108010;
} else if (vi.IsRGB()) {
memset(buf, 0, vi.ImageSize());
}
}
/********************************************************************
********************************************************************/
#include "ipc.h"
class IPCSource : public VideoFilterWithRefcount {
HANDLE mutex, request, reply, filemap;
AvisynthIPCData* ipc_data;
public:
IPCSource(const char name[]) {
mutex = OpenMutex(MUTEX_ALL_ACCESS, FALSE, Sprintf("Avisynth__semaphore__%s", name));
request = OpenEvent(EVENT_ALL_ACCESS, FALSE, Sprintf("Avisynth__request__%s", name));
reply = OpenEvent(EVENT_ALL_ACCESS, FALSE, Sprintf("Avisynth__reply__%s", name));
filemap = OpenFileMapping(FILE_MAP_ALL_ACCESS, FALSE, Sprintf("Avisynth__data__%s", name));
if (!mutex || !request || !reply || !filemap)
throw FilterChainError("IPCSource: Unable to open IPC objects. Try using a different file name.");
ipc_data = (AvisynthIPCData*) MapViewOfFile(filemap, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, 0);
if (ipc_data->magic != 'LSVA')
throw FilterChainError("IPCSource: bad magic");
if (ipc_data->ver != 1)
throw FilterChainError("IPCSource: wrong server version");
}
~IPCSource() {
CloseHandle(mutex);
CloseHandle(request);
CloseHandle(reply);
UnmapViewOfFile(ipc_data);
CloseHandle(filemap);
}
void GetFrame(int n, unsigned char* buf) {
if (ipc_data->magic == 'ENOG')
throw FilterChainError("IPCSource: frame server has terminated");
WaitForSingleObject(mutex, INFINITE);
ipc_data->requested_frame = n;
ipc_data->audio = false;
SetEvent(request);
WaitForSingleObject(reply, INFINITE);
unsigned char* src = ipc_data->data;
unsigned char* dst = buf;
for (int z = ipc_data->size * ipc_data->vi.height / 4; z; --z) {
dst[0] = src[0];
dst[1] = src[1];
dst[2] = src[2];
dst += 3;
src += 4;
}
ReleaseMutex(mutex);
}
virtual void GetAudio(void* buf, int start, int count) {
if (ipc_data->magic == 'ENOG')
throw FilterChainError("IPCSource: frame server has terminated");
WaitForSingleObject(mutex, INFINITE);
memset(buf, 0, count);
const VideoInfo& vi = ipc_data->vi;
int end = start+vi.AudioSamplesFromBytes(count);
int first_frame = vi.FramesFromAudioSamples(start);
int last_frame = 1+vi.FramesFromAudioSamples(end);
for (int i = first_frame; i <= last_frame; ++i) {
int a = vi.AudioSamplesFromFrames(i);
int b = vi.AudioSamplesFromFrames(i+1);
if (a >= end || b <= start)
continue;
ipc_data->requested_frame = i;
ipc_data->audio = true;
SetEvent(request);
WaitForSingleObject(reply, INFINITE);
if (ipc_data->size != vi.BytesFromAudioSamples(b-a))
throw FilterChainError("IPCSource: audio misalignment");
int p = vi.BytesFromAudioSamples(max(a, start) - start);
int q = vi.BytesFromAudioSamples(max(a, start) - a);
int r = vi.BytesFromAudioSamples(min(b,end) - max(a,start));
memcpy((char*)buf + p, ipc_data->data + q, r);
}
ReleaseMutex(mutex);
}
virtual void GetVideoInfo(VideoInfo* pvi) { *pvi = ipc_data->vi; pvi->field_based=false; } // FIXME
virtual bool GetParity(int n) { return false; }
static PVideoFilter __cdecl Create(const FilterInfo*, const Arg* args, const char*) {
return new IPCSource(args[0].string);
}
};
/********************************************************************
********************************************************************/
#include <evcode.h>
#include <control.h>
#include <mmsystem.h>
#include <amstream.h>
#include <initguid.h>
#include <uuids.h>
#include <math.h>
class DirectShowSource : public VideoFilterWithRefcount {
IDirectDraw* pDD;
IAMMultiMediaStream* pMMStream;
IAMMultiMediaStream* pAudMMStream;
IMediaStream* pVidStream;
IMediaStream* pAudStream;
IDirectDrawMediaStream* pDDMStream;
IDirectDrawStreamSample* pDDSample;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -