📄 videoio.cxx
字号:
videoFormat = videoFmt;
return TRUE;
}
PVideoDevice::VideoFormat PVideoDevice::GetVideoFormat() const
{
return videoFormat;
}
int PVideoDevice::GetNumChannels()
{
return 1;
}
BOOL PVideoDevice::SetChannel(int channelNum)
{
if (channelNum < 0) { // Seek out the first available channel
for (int c = 0; c < GetNumChannels(); c++) {
if (SetChannel(c))
return TRUE;
}
return FALSE;
}
if (channelNum >= GetNumChannels())
return FALSE;
channelNumber = channelNum;
return TRUE;
}
int PVideoDevice::GetChannel() const
{
return channelNumber;
}
//Colour format bit per pixel table.
// These are in rough order of colour gamut size
static struct {
const char * colourFormat;
unsigned bitsPerPixel;
} colourFormatBPPTab[] = {
{ "RGB32", 32 },
{ "BGR32", 32 },
{ "RGB24", 24 },
{ "BGR24", 24 },
{ "MJPEG", 16 },
{ "JPEG", 16 },
{ "YUY2", 16 },
{ "YUV422", 16 },
{ "YUV422P", 16 },
{ "YUV411", 12 },
{ "YUV411P", 12 },
{ "RGB565", 16 },
{ "RGB555", 16 },
{ "YUV420", 12 },
{ "YUV420P", 12 },
{ "IYUV", 12 },
{ "I420", 12 },
{ "YUV410", 10 },
{ "YUV410P", 10 },
{ "Grey", 8 },
{ "GreyF", 8 },
{ "UYVY422", 16 },
{ "UYV444", 24 },
{ "SBGGR8", 8 }
};
BOOL PVideoDevice::SetColourFormatConverter(const PString & colourFmt)
{
if (converter != NULL) {
if (CanCaptureVideo()) {
if (converter->GetDstColourFormat() == colourFmt)
return TRUE;
}
else {
if (converter->GetSrcColourFormat() == colourFmt)
return TRUE;
}
delete converter;
converter = NULL;
}
if (!preferredColourFormat.IsEmpty()) {
PTRACE(4,"PVidDev\tSetColourFormatConverter, want " << colourFmt << " trying " << preferredColourFormat);
if (SetColourFormat(preferredColourFormat)) {
if (CanCaptureVideo()) {
PTRACE(4,"PVidDev\tSetColourFormatConverter set camera to native "<< preferredColourFormat);
if (preferredColourFormat != colourFmt)
converter = PColourConverter::Create(preferredColourFormat, colourFmt, frameWidth, frameHeight);
}
else {
PTRACE(4,"PVidDev\tSetColourFormatConverter set renderer to "<< preferredColourFormat);
if (preferredColourFormat != colourFmt)
converter = PColourConverter::Create(colourFmt, preferredColourFormat, frameWidth, frameHeight);
}
if (nativeVerticalFlip && converter == NULL)
converter = PColourConverter::Create(colourFmt, colourFmt, frameWidth, frameHeight);
if (converter != NULL) {
converter->SetVFlipState(nativeVerticalFlip);
return TRUE;
}
}
}
if (SetColourFormat(colourFmt)) {
if (nativeVerticalFlip) {
converter = PColourConverter::Create(colourFmt, colourFmt, frameWidth, frameHeight);
if (PAssertNULL(converter) == NULL)
return FALSE;
converter->SetVFlipState(nativeVerticalFlip);
}
PTRACE(3, "PVidDev\tSetColourFormatConverter success for native " << colourFmt);
return TRUE;
}
/************************
Eventually, need something more sophisticated than this, but for the
moment pick the known colour formats that the device is very likely to
support and then look for a conversion routine from that to the
destination format.
What we really want is some sort of better heuristic that looks at
computational requirements of each converter and picks a pair of formats
that the hardware supports and uses the least CPU.
*/
PINDEX knownFormatIdx = 0;
while (knownFormatIdx < PARRAYSIZE(colourFormatBPPTab)) {
PString formatToTry = colourFormatBPPTab[knownFormatIdx].colourFormat;
PTRACE(4,"PVidDev\tSetColourFormatConverter, want " << colourFmt << " trying " << formatToTry);
if (SetColourFormat(formatToTry)) {
if (CanCaptureVideo()) {
PTRACE(4,"PVidDev\tSetColourFormatConverter set camera to "<< formatToTry);
converter = PColourConverter::Create(formatToTry, colourFmt, frameWidth, frameHeight);
}
else {
PTRACE(4,"PVidDev\tSetColourFormatConverter set renderer to "<< formatToTry);
converter = PColourConverter::Create(colourFmt, formatToTry, frameWidth, frameHeight);
}
if (converter != NULL) {
// set converter properties that depend on this color format
PTRACE(3, "PVidDev\tSetColourFormatConverter succeeded for " << colourFmt << " and device using " << formatToTry);
converter->SetVFlipState(nativeVerticalFlip);
return TRUE;
}
}
knownFormatIdx++;
}
PTRACE(2, "PVidDev\tSetColourFormatConverter FAILED for " << colourFmt);
return FALSE;
}
BOOL PVideoDevice::SetColourFormat(const PString & colourFmt)
{
if (!colourFmt) {
colourFormat = colourFmt.ToUpper();
return TRUE;
}
for (PINDEX i = 0; i < PARRAYSIZE(colourFormatBPPTab); i++) {
if (SetColourFormat(colourFormatBPPTab[i].colourFormat))
return TRUE;
}
return FALSE;
}
const PString & PVideoDevice::GetColourFormat() const
{
return colourFormat;
}
BOOL PVideoDevice::SetFrameRate(unsigned rate)
{
if (rate < 1) {
frameRate = 0;
return TRUE;
}
frameRate = rate;
previousFrameTime = PTime();
msBetweenFrames = 1000/rate;
frameTimeError = 0;
return TRUE;
}
BOOL PVideoDevice::GetVFlipState()
{
if (converter != NULL)
return converter->GetVFlipState() ^ nativeVerticalFlip;
return nativeVerticalFlip;
}
BOOL PVideoDevice::SetVFlipState(BOOL newVFlip)
{
if (newVFlip && converter == NULL)
converter = PColourConverter::Create(colourFormat, colourFormat, frameWidth, frameHeight);
if (converter != NULL)
converter->SetVFlipState(newVFlip ^ nativeVerticalFlip);
return TRUE;
}
unsigned PVideoDevice::GetFrameRate() const
{
return frameRate;
}
BOOL PVideoDevice::GetFrameSizeLimits(unsigned & minWidth,
unsigned & minHeight,
unsigned & maxWidth,
unsigned & maxHeight)
{
minWidth = minHeight = 1;
maxWidth = maxHeight = UINT_MAX;
return FALSE;
}
static struct {
unsigned asked_width, asked_height, device_width, device_height;
} framesizeTab[] = {
{ 704, 576, 640, 480 },
{ 640, 480, 704, 576 },
{ 640, 480, 352, 288 },
{ 352, 288, 704, 576 },
{ 352, 288, 640, 480 },
{ 352, 288, 352, 240 },
{ 352, 288, 320, 240 },
{ 352, 288, 176, 144 },
{ 352, 240, 352, 288 },
{ 352, 240, 320, 240 },
{ 352, 288, 1024, 576 }, /* High resolution need to be set at the end */
{ 352, 288, 1280, 960 },
{ 320, 240, 352, 288 },
{ 320, 240, 352, 240 },
{ 176, 144, 352, 288 },
{ 176, 144, 352, 240 },
{ 176, 144, 320, 240 },
{ 176, 144, 176, 120 },
{ 176, 144, 160, 120 },
{ 176, 120, 352, 288 },
{ 176, 120, 352, 240 },
{ 176, 120, 320, 240 },
{ 176, 120, 176, 144 },
{ 176, 120, 160, 120 },
{ 176, 144, 1024, 576 },
{ 176, 144, 1280, 960 }, /* High resolution need to be set at the end */
{ 160, 120, 352, 288 },
{ 160, 120, 352, 240 },
{ 160, 120, 320, 240 },
{ 160, 120, 176, 144 },
{ 160, 120, 176, 120 },
};
BOOL PVideoDevice::SetFrameSizeConverter(unsigned width, unsigned height,
BOOL bScaleNotCrop)
{
if (SetFrameSize(width, height)) {
if (nativeVerticalFlip && converter == NULL) {
converter = PColourConverter::Create(colourFormat, colourFormat, frameWidth, frameHeight);
if (PAssertNULL(converter) == NULL)
return FALSE;
}
if (converter != NULL) {
converter->SetFrameSize(frameWidth, frameHeight);
converter->SetVFlipState(nativeVerticalFlip);
}
return TRUE;
}
if (converter == NULL) {
converter = PColourConverter::Create(colourFormat, colourFormat, frameWidth, frameHeight);
if (converter == NULL) {
PTRACE(1, "PVidDev\tSetFrameSizeConverter Colour converter creation failed");
return FALSE;
}
}
PTRACE(3,"PVidDev\tColour converter used for " << width << 'x' << height);
unsigned minWidth, minHeight, maxWidth, maxHeight;
BOOL limits = GetFrameSizeLimits(minWidth, minHeight, maxWidth, maxHeight);
for (PINDEX i = 0; i < PARRAYSIZE(framesizeTab); i++) {
if (framesizeTab[i].asked_width == width &&
framesizeTab[i].asked_height == height &&
(!limits ||
(framesizeTab[i].device_width >= minWidth &&
framesizeTab[i].device_width <= maxWidth &&
framesizeTab[i].device_height >= minHeight &&
framesizeTab[i].device_height <= maxHeight)) &&
SetFrameSize(framesizeTab[i].device_width,
framesizeTab[i].device_height)) {
if (CanCaptureVideo() ?
converter->SetDstFrameSize(width, height, bScaleNotCrop)
:
converter->SetSrcFrameSize(width, height) &&
converter->SetDstFrameSize(framesizeTab[i].device_width,
framesizeTab[i].device_height,
bScaleNotCrop)) {
PTRACE(4,"PVideoDevice\tSetFrameSizeConverter succeeded for converting from "
<< framesizeTab[i].device_width << 'x' << framesizeTab[i].device_height
<< " to " << width << 'x' << height);
converter->SetVFlipState(converter->GetVFlipState() ^ nativeVerticalFlip);
return TRUE;
}
}
}
if (CanCaptureVideo()) {
// Failed to find a resolution the device can do so far, so try
// using the maximum width and height it claims it can do.
if (limits &&
SetFrameSize(maxWidth, maxHeight)) {
if (converter->SetDstFrameSize(width, height, bScaleNotCrop)) {
PTRACE(4,"PVideoDevice\tSetFrameSizeConverter succeeded for converting from "
<< maxWidth << 'x' << maxHeight
<< " to " << width << 'x' << height);
converter->SetVFlipState(converter->GetVFlipState() ^ nativeVerticalFlip);
return TRUE;
}
}
}
PTRACE(2,"PVideoDevice\tSetFrameSizeConverter failed for " << width << 'x' << height);
return FALSE;
}
BOOL PVideoDevice::SetFrameSize(unsigned width, unsigned height)
{
#if PTRACING
unsigned oldWidth = frameWidth;
unsigned oldHeight = frameHeight;
#endif
unsigned minWidth, minHeight, maxWidth, maxHeight;
GetFrameSizeLimits(minWidth, minHeight, maxWidth, maxHeight);
if (width < minWidth)
frameWidth = minWidth;
else if (width > maxWidth)
frameWidth = maxWidth;
else
frameWidth = width;
if (height < minHeight)
frameHeight = minHeight;
else if (height > maxHeight)
frameHeight = maxHeight;
else
frameHeight = height;
if (converter != NULL) {
if (!converter->SetSrcFrameSize(width, height) ||
!converter->SetDstFrameSize(width, height, FALSE)) {
PTRACE(1, "PVidDev\tSetFrameSize with converter failed with " << width << 'x' << height);
return FALSE;
}
}
PTRACE_IF(2, oldWidth != frameWidth || oldHeight != frameHeight,
"PVidDev\tSetFrameSize to " << frameWidth << 'x' << frameHeight);
return TRUE;
}
BOOL PVideoDevice::GetFrameSize(unsigned & width, unsigned & height)
{
#if 1
// Channels get very upset at this not returning the output size.
if (converter)
return converter->GetDstFrameSize(width, height);
#endif
width = frameWidth;
height = frameHeight;
return TRUE;
}
unsigned PVideoDevice::GetFrameWidth() const
{
#if 1
unsigned w,h;
// Channels get very upset at this not returning the output size.
if (converter) {
converter->GetDstFrameSize(w, h);
return w;
}
#endif
return frameWidth;
}
unsigned PVideoDevice::GetFrameHeight() const
{
#if 1
unsigned w,h;
// Channels get very upset at this not returning the output size.
if (converter) {
converter->GetDstFrameSize(w, h);
return h;
}
#endif
return frameHeight;
}
unsigned PVideoDevice::CalculateFrameBytes(unsigned width, unsigned height,
const PString & colourFormat)
{
for (PINDEX i = 0; i < PARRAYSIZE(colourFormatBPPTab); i++) {
if (colourFormat *= colourFormatBPPTab[i].colourFormat)
return width * height * colourFormatBPPTab[i].bitsPerPixel/8;
}
return 0;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -