📄 misc.cpp
字号:
// Avisynth v0.3. Copyright 2000 Ben Rudiak-Gould. For distribution
// conditions, please see http://www.math.berkeley.edu/~benrg/avisynth.html .
#include "avisynth.h"
#include "internal-filters.h"
_PixelClip PixelClip;
/********************************************************************
********************************************************************/
static inline void YUV2RGB(int y, int u, int v, unsigned char* out) {
const int crv = int(1.596*65536+0.5);
const int cgv = int(0.813*65536+0.5);
const int cgu = int(0.391*65536+0.5);
const int cbu = int(2.018*65536+0.5);
int scaled_y = (y - 16) * int((255.0/219.0)*65536+0.5);
out[0] = ScaledPixelClip(scaled_y + (u-128) * cbu); // blue
out[1] = ScaledPixelClip(scaled_y - (u-128) * cgu - (v-128) * cgv); // green
out[2] = ScaledPixelClip(scaled_y + (v-128) * crv); // red
}
// not used here, but useful to other filters
int RGB2YUV(int rgb) {
const int cyb = int(0.114*219/255*65536+0.5);
const int cyg = int(0.587*219/255*65536+0.5);
const int cyr = int(0.299*219/255*65536+0.5);
// y can't overflow
int y = (cyb*(rgb&255) + cyg*((rgb>>8)&255) + cyr*((rgb>>16)&255) + 0x108000) >> 16;
int scaled_y = (y - 16) * int(255.0/219.0*65536+0.5);
int b_y = ((rgb&255) << 16) - scaled_y;
int u = ScaledPixelClip((b_y >> 10) * int(1/2.018*1024+0.5) + 0x800000);
int r_y = (rgb & 0xFF0000) - scaled_y;
int v = ScaledPixelClip((r_y >> 10) * int(1/1.596*1024+0.5) + 0x800000);
return (y*256+u)*256+v;
}
/********************************************************************
********************************************************************/
class ConvertToRGB : public GenericVideoFilter {
unsigned char* mybuffer;
public:
ConvertToRGB(PVideoFilter _child) : GenericVideoFilter(_child), mybuffer(0) {}
void GetFrame(int n, unsigned char* buf) {
if (!vi.IsYUY2()) {
child->GetFrame(n, buf);
return;
}
if (!mybuffer)
mybuffer = new unsigned char[vi.ImageSize()];
child->GetFrame(n, mybuffer);
for (int y=0; y<vi.height; ++y) {
unsigned char* src = &mybuffer[(vi.height-y-1)*vi.width*2];
unsigned char* dst = &buf[y*vi.width*3];
for (int x=0; x<vi.width; x+=2) {
YUV2RGB(src[x*2+0], src[x*2+1], src[x*2+3], &dst[x*3]);
YUV2RGB(src[x*2+2], src[x*2+1], src[x*2+3], &dst[x*3+3]);
}
}
}
virtual void GetVideoInfo(VideoInfo* pvi) {
*pvi = vi;
if (pvi->IsYUY2())
pvi->pixel_type = VideoInfo::BGR24;
}
virtual ~ConvertToRGB() { if (mybuffer) delete[] mybuffer; }
static PVideoFilter _cdecl Create(const FilterInfo*, const Arg* args, const char*) {
return new ConvertToRGB(args[0].clip);
}
};
class ConvertToYUY2 : public GenericVideoFilter {
unsigned char* mybuffer;
public:
ConvertToYUY2(PVideoFilter _child) : GenericVideoFilter(_child), mybuffer(0) {
if (vi.width & 1)
throw FilterChainError("ConvertToYUY2: image width must be even");
}
void GetFrame(int n, unsigned char* buf) {
if (!vi.IsRGB()) {
child->GetFrame(n, buf);
return;
}
if (!mybuffer)
mybuffer = new unsigned char[vi.ImageSize()];
child->GetFrame(n, mybuffer);
unsigned char* yuv = buf;
for (int y=0; y<vi.height; ++y) {
unsigned char* rgb = &mybuffer[(vi.height-y-1)*vi.width*3];
for (int x = vi.width>>1; x; --x) {
const int cyb = int(0.114*219/255*65536+0.5);
const int cyg = int(0.587*219/255*65536+0.5);
const int cyr = int(0.299*219/255*65536+0.5);
// y1 and y2 can't overflow
int y1 = (cyb*rgb[0] + cyg*rgb[1] + cyr*rgb[2] + 0x108000) >> 16;
yuv[0] = y1;
int y2 = (cyb*rgb[3] + cyg*rgb[4] + cyr*rgb[5] + 0x108000) >> 16;
yuv[2] = y2;
int scaled_y = (y1+y2 - 32) * int(255.0/219.0*32768+0.5);
int b_y = ((rgb[0]+rgb[3]) << 15) - scaled_y;
yuv[1] = ScaledPixelClip((b_y >> 10) * int(1/2.018*1024+0.5) + 0x800000); // u
int r_y = ((rgb[2]+rgb[5]) << 15) - scaled_y;
yuv[3] = ScaledPixelClip((r_y >> 10) * int(1/1.596*1024+0.5) + 0x800000); // v
yuv += 4;
rgb += 6;
}
}
}
virtual void GetVideoInfo(VideoInfo* pvi) {
*pvi = vi;
if (pvi->IsRGB())
pvi->pixel_type = VideoInfo::YUY2;
}
virtual ~ConvertToYUY2() { if (mybuffer) delete[] mybuffer; }
static PVideoFilter _cdecl Create(const FilterInfo*, const Arg* args, const char*) {
return new ConvertToYUY2(args[0].clip);
}
};
/********************************************************************
********************************************************************/
class Subtract : public VideoFilterWithRefcount {
const PVideoFilter child1, child2;
VideoInfo vi1, vi2;
unsigned char* mybuffer;
public:
Subtract(PVideoFilter _child1, PVideoFilter _child2)
: child1(_child1), child2(_child2), mybuffer(0)
{
child1->GetVideoInfo(&vi1);
child2->GetVideoInfo(&vi2);
if (vi1.width != vi2.width || vi1.height != vi2.height)
throw FilterChainError("Subtract: image dimensions don't match");
if (vi1.pixel_type != vi2.pixel_type)
throw FilterChainError("Subtract: image formats don't match");
}
void GetFrame(int n, unsigned char* buf) {
if (!mybuffer)
mybuffer = new unsigned char[vi1.ImageSize()];
child1->GetFrame(n, buf);
child2->GetFrame(n, mybuffer);
for (int i=0; i<vi1.ImageSize(); ++i)
buf[i] = buf[i] - mybuffer[i] + 128;
}
virtual void GetAudio(void* buf, int start, int count) {
child1->GetAudio(buf, start, count);
}
virtual void GetVideoInfo(VideoInfo* pvi) {
*pvi = vi1;
pvi->num_frames = max(pvi->num_frames, vi2.num_frames);
pvi->num_audio_samples = max(pvi->num_audio_samples, vi2.num_audio_samples);
}
virtual bool GetParity(int n) { return child1->GetParity(n); }
virtual ~Subtract() {
if (mybuffer) delete[] mybuffer;
}
static PVideoFilter __cdecl Create(const FilterInfo* self, const Arg* args, const char* arg_types) {
return new Subtract(args[0].clip, args[1].clip);
}
};
/********************************************************************
********************************************************************/
class StackVertical : public VideoFilterWithRefcount {
/*const*/ PVideoFilter child1, child2;
VideoInfo vi1, vi2;
public:
StackVertical(PVideoFilter _child1, PVideoFilter _child2) {
// swap the order of the parameters in RGB mode because it's upside-down
_child1->GetVideoInfo(&vi1);
if (vi1.IsYUY2()) {
child1 = _child1; child2 = _child2;
child2->GetVideoInfo(&vi2);
} else {
child1 = _child2; child2 = _child1;
vi2 = vi1; child1->GetVideoInfo(&vi1);
}
if (vi1.width != vi2.width)
throw FilterChainError("StackVertical: image widths don't match");
if (vi1.pixel_type != vi2.pixel_type)
throw FilterChainError("StackVertical: image formats don't match");
}
void GetFrame(int n, unsigned char* buf) {
child1->GetFrame(n, buf);
child2->GetFrame(n, buf+vi1.ImageSize());
}
void GetAudio(void* buf, int start, int count) { child1->GetAudio(buf, start, count); }
void GetVideoInfo(VideoInfo* pvi) {
*pvi = vi1;
pvi->height += vi2.height;
pvi->num_frames = max(pvi->num_frames, vi2.num_frames);
pvi->num_audio_samples = max(pvi->num_audio_samples, vi2.num_audio_samples);
}
bool GetParity(int n) { return child1->GetParity(n); }
static PVideoFilter __cdecl Create(const FilterInfo* self, const Arg* args, const char* arg_types) {
if (arg_types[1])
return new StackVertical(args[0].clip, Create(self, args+1, arg_types+1));
else
return args[0].clip;
}
};
/********************************************************************
********************************************************************/
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -