📄 vidinput_v4l2.cxx
字号:
}
BOOL PVideoInputDevice_V4L2::SetVideoFormat(VideoFormat newFormat)
{
if (newFormat == Auto) {
if (SetVideoFormat(PAL) ||
SetVideoFormat(NTSC) ||
SetVideoFormat(SECAM))
return TRUE;
else
return FALSE;
}
if (!PVideoDevice::SetVideoFormat(newFormat)) {
PTRACE(1,"PVideoDevice::SetVideoFormat failed for format " << newFormat);
return FALSE;
}
struct {
__u32 code;
const char * name;
} static const fmt[3] = { {V4L2_STD_PAL, "PAL"},
{V4L2_STD_NTSC, "NTSC"},
{V4L2_STD_SECAM, "SECAM"} };
struct v4l2_standard videoEnumStd;
memset(&videoEnumStd, 0, sizeof(struct v4l2_standard));
videoEnumStd.index = 0;
while (1) {
if (::ioctl(videoFd, VIDIOC_ENUMSTD, &videoEnumStd) < 0) {
PTRACE(1,"VideoInputDevice\tEnumStd failed : " << ::strerror(errno));
videoEnumStd.id = V4L2_STD_PAL;
break;
}
if (videoEnumStd.id == fmt[newFormat].code) {
break;
}
videoEnumStd.index++;
}
// set the video standard
if (::ioctl(videoFd, VIDIOC_S_STD, &videoEnumStd.id) < 0) {
PTRACE(1,"VideoInputDevice\tS_STD failed : " << ::strerror(errno));
}
PTRACE(6,"PVidInDev\tset video format \"" << fmt[newFormat].name << "\", fd=" << videoFd);
return TRUE;
}
int PVideoInputDevice_V4L2::GetNumChannels()
{
// if opened, return the capability value, else 1 as in videoio.cxx
if (IsOpen ()) {
struct v4l2_input videoEnumInput;
videoEnumInput.index = 0;
while (1) {
if (::ioctl(videoFd, VIDIOC_ENUMINPUT, &videoEnumInput) < 0) {
PTRACE(1,"VideoInputDevice\tEnumInput failed : " << ::strerror(errno));
break;
}
else
videoEnumInput.index++;
}
return videoEnumInput.index;
}
else
return 1;
}
BOOL PVideoInputDevice_V4L2::SetChannel(int newChannel)
{
if (!PVideoDevice::SetChannel(newChannel)) {
PTRACE(1,"PVideoDevice::SetChannel failed for channel " << newChannel);
return FALSE;
}
// set the channel
if (::ioctl(videoFd, VIDIOC_S_INPUT, &channelNumber) < 0) {
PTRACE(1,"VideoInputDevice\tS_INPUT failed : " << ::strerror(errno));
return FALSE;
}
PTRACE(6,"PVidInDev\tset channel " << newChannel << ", fd=" << videoFd);
return TRUE;
}
BOOL PVideoInputDevice_V4L2::SetVideoChannelFormat (int newChannel, VideoFormat videoFormat)
{
if (!SetChannel(newChannel) ||
!SetVideoFormat(videoFormat))
return FALSE;
return TRUE;
}
BOOL PVideoInputDevice_V4L2::SetColourFormat(const PString & newFormat)
{
PINDEX colourFormatIndex = 0;
while (newFormat != colourFormatTab[colourFormatIndex].colourFormat) {
colourFormatIndex++;
if (colourFormatIndex >= PARRAYSIZE(colourFormatTab))
return FALSE;
}
if (!PVideoDevice::SetColourFormat(newFormat)) {
PTRACE(3,"PVidInDev\tSetColourFormat failed for colour format " << newFormat);
return FALSE;
}
BOOL resume = started;
Stop();
ClearMapping();
struct v4l2_format videoFormat;
memset(&videoFormat, 0, sizeof(struct v4l2_format));
videoFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// get the frame rate so we can preserve it throughout the S_FMT call
struct v4l2_streamparm streamParm;
unsigned int fi_n = 0, fi_d = 0;
streamParm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (::ioctl(videoFd, VIDIOC_G_PARM, &streamParm) == 0 &&
streamParm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
fi_n = streamParm.parm.capture.timeperframe.numerator;
fi_d = streamParm.parm.capture.timeperframe.denominator;
} else {
PTRACE(1,"PVidInDev\tG_PARM failed (preserving frame rate may not work) : " << ::strerror(errno));
}
// get the colour format
if (::ioctl(videoFd, VIDIOC_G_FMT, &videoFormat) < 0) {
PTRACE(1,"PVidInDev\tG_FMT failed : " << ::strerror(errno));
return FALSE;
}
videoFormat.fmt.pix.pixelformat = colourFormatTab[colourFormatIndex].code;
// set the colour format
if (::ioctl(videoFd, VIDIOC_S_FMT, &videoFormat) < 0) {
PTRACE(1,"PVidInDev\tS_FMT failed : " << ::strerror(errno));
PTRACE(1,"\tused code of " << videoFormat.fmt.pix.pixelformat << " for palette: " << colourFormatTab[colourFormatIndex].colourFormat);
return FALSE;
}
// get the colour format again to be careful about broken drivers
if (::ioctl(videoFd, VIDIOC_G_FMT, &videoFormat) < 0) {
PTRACE(1,"PVidInDev\tG_FMT failed : " << ::strerror(errno));
return FALSE;
}
if (videoFormat.fmt.pix.pixelformat != colourFormatTab[colourFormatIndex].code) {
PTRACE(3,"PVidInDev\tcolour format mismatch.");
return FALSE;
}
// reset the frame rate because it may have been overridden by the call to S_FMT
if (fi_n == 0 || fi_d == 0 || ::ioctl(videoFd, VIDIOC_S_PARM, &streamParm) < 0) {
PTRACE(3,"PVidInDev\tunable to reset frame rate.");
} else if (streamParm.parm.capture.timeperframe.numerator != fi_n ||
streamParm.parm.capture.timeperframe.denominator != fi_d) {
PTRACE(3, "PVidInDev\tnew frame interval (" << streamParm.parm.capture.timeperframe.numerator
<< "/" << streamParm.parm.capture.timeperframe.denominator
<< ") differs from what was requested (" << fi_n << "/" << fi_d << ").");
}
frameBytes = videoFormat.fmt.pix.sizeimage;
PTRACE(6,"PVidInDev\tset colour format \"" << newFormat << "\", fd=" << videoFd);
if (resume)
return Start();
return TRUE;
}
BOOL PVideoInputDevice_V4L2::SetFrameRate(unsigned rate)
{
if (!PVideoDevice::SetFrameRate(rate)) {
PTRACE(3,"PVidInDev\tSetFrameRate failed for rate " << rate);
return TRUE; // Ignore
}
if (canSetFrameRate) {
videoStreamParm.parm.capture.timeperframe.numerator = 1;
videoStreamParm.parm.capture.timeperframe.denominator = (rate ? rate : 1);
// set the stream parameters
if (::ioctl(videoFd, VIDIOC_S_PARM, &videoStreamParm) < 0) {
PTRACE(1,"PVidInDev\tS_PARM failed : "<< ::strerror(errno));
return TRUE;
}
PTRACE(6,"PVidInDev\tset frame rate " << rate << "fps, fd=" << videoFd);
}
return TRUE;
}
BOOL PVideoInputDevice_V4L2::GetFrameSizeLimits(unsigned & minWidth,
unsigned & minHeight,
unsigned & maxWidth,
unsigned & maxHeight)
{
/* Not used in V4L2 */
minWidth=0;
maxWidth=65535;
minHeight=0;
maxHeight=65535;
return FALSE;
}
BOOL PVideoInputDevice_V4L2::SetFrameSize(unsigned width, unsigned height)
{
if (!PVideoDevice::SetFrameSize(width, height)) {
PTRACE(3,"PVidInDev\tSetFrameSize failed for size " << width << "x" << height);
return FALSE;
}
BOOL resume = started;
Stop();
ClearMapping();
if (!VerifyHardwareFrameSize(width, height)) {
PTRACE(3,"PVidInDev\tVerifyHardwareFrameSize failed for size " << width << "x" << height);
return FALSE;
}
PTRACE(6,"PVidInDev\tset frame size " << width << "x" << height << ", fd=" << videoFd);
if (resume)
return Start();
return TRUE;
}
PINDEX PVideoInputDevice_V4L2::GetMaxFrameBytes()
{
return GetMaxFrameBytesConverted(frameBytes);
}
BOOL PVideoInputDevice_V4L2::SetMapping()
{
if (!canStream)
return FALSE;
struct v4l2_requestbuffers reqbuf;
reqbuf.count = NUM_VIDBUF;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
if (::ioctl(videoFd, VIDIOC_REQBUFS, &reqbuf) < 0) {
PTRACE(3,"PVidInDev\tREQBUFS failed : " << ::strerror(errno));
return FALSE;
}
if (reqbuf.count < 1) {
PTRACE(3,"PVidInDev\tNot enough video buffer available. (got " << reqbuf.count << ")");
return FALSE;
}
if (reqbuf.count > NUM_VIDBUF) {
PTRACE(3,"PVidInDev\tToo much video buffer allocated. (got " << reqbuf.count << ")");
return FALSE;
}
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
videoBufferCount = reqbuf.count;
for (buf.index = 0; buf.index < videoBufferCount; buf.index++) {
if (::ioctl(videoFd, VIDIOC_QUERYBUF, &buf) < 0) {
PTRACE(3,"PVidInDev\tQUERYBUF failed : " << ::strerror(errno));
return FALSE;
}
if ((videoBuffer[buf.index] = (BYTE *)::mmap(0, buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, videoFd, buf.m.offset)) == MAP_FAILED) {
PTRACE(3,"PVidInDev\tmmap failed : " << ::strerror(errno));
return FALSE;
}
}
isMapped = TRUE;
PTRACE(7,"PVidInDev\tset mapping for " << videoBufferCount << " buffers, fd=" << videoFd);
return TRUE;
}
void PVideoInputDevice_V4L2::ClearMapping()
{
if (!canStream) // 'isMapped' wouldn't handle partial mappings
return;
struct v4l2_buffer buf;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
for (buf.index = 0; ; buf.index++) {
if (::ioctl(videoFd, VIDIOC_QUERYBUF, &buf) < 0)
break;
::munmap(videoBuffer[buf.index], buf.length);
}
isMapped = FALSE;
PTRACE(7,"PVidInDev\tclear mapping, fd=" << videoFd);
}
BOOL PVideoInputDevice_V4L2::GetFrameData(BYTE * buffer, PINDEX * bytesReturned)
{
PTRACE(1,"PVidInDev\tGetFrameData()");
if (frameRate>0) {
PTimeInterval delay;
do {
if (!GetFrameDataNoDelay(buffer, bytesReturned))
return FALSE;
delay = PTime() - previousFrameTime;
} while (delay.GetMilliSeconds() < msBetweenFrames);
previousFrameTime = PTime();
return TRUE;
}
return GetFrameDataNoDelay(buffer, bytesReturned);
}
BOOL PVideoInputDevice_V4L2::GetFrameDataNoDelay(BYTE * buffer, PINDEX * bytesReturned)
{
PTRACE(1,"PVidInDev\tGetFrameDataNoDelay()\tstarted:" << started << " canSelect:" << canSelect);
if (!started)
return NormalReadProcess(buffer, bytesReturned);
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = currentvideoBuffer;
if (::ioctl(videoFd, VIDIOC_DQBUF, &buf) < 0) {
PTRACE(1,"PVidInDev\tDQBUF failed : " << ::strerror(errno));
return FALSE;
}
currentvideoBuffer = (currentvideoBuffer+1) % NUM_VIDBUF;
// If converting on the fly do it from frame store to output buffer,
// otherwise do straight copy.
if (converter != NULL)
converter->Convert(videoBuffer[buf.index], buffer, buf.bytesused, bytesReturned);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -