📄 encode.c
字号:
ERR("VIDDEC_process() failed, status=%ld\n", status);
return FAILURE;
}
return SUCCESS;
}
/******************************************************************************
* flipDisplayBuffers
******************************************************************************/
static int flipDisplayBuffers(int fd, int displayIdx)
{
struct fb_var_screeninfo vInfo;
if (ioctl(fd, FBIOGET_VSCREENINFO, &vInfo) == -1) {
ERR("Failed FBIOGET_VSCREENINFO (%s)\n", strerror(errno));
return FAILURE;
}
vInfo.yoffset = vInfo.yres * displayIdx;
/* Swap the working buffer for the displayed buffer */
if (ioctl(fd, FBIOPAN_DISPLAY, &vInfo) == -1) {
ERR("Failed FBIOPAN_DISPLAY (%s)\n", strerror(errno));
return FAILURE;
}
return SUCCESS;
}
/******************************************************************************
* initCaptureDevice
******************************************************************************/
static int initCaptureDevice(enum Resolution resolution,
VideoBuffer **vidBufsPtr, int *numVidBufsPtr,
int *captureWidth, int *captureHeight)
{
struct v4l2_requestbuffers req;
struct v4l2_capability cap;
struct v4l2_cropcap cropCap;
struct v4l2_crop crop;
struct v4l2_format fmt;
struct v4l2_buffer buf;
v4l2_std_id std;
enum v4l2_buf_type type;
int fd;
int ret;
VideoBuffer *buffers;
int numBufs;
fd = open(V4L2_DEVICE, O_RDWR | O_NONBLOCK, 0);
if (fd == -1) {
ERR("Cannot open %s (%s)\n", V4L2_DEVICE, strerror(errno));
return FAILURE;
}
/* Query for capture device capabilities */
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {
if (errno == EINVAL) {
ERR("%s is no V4L2 device\n", V4L2_DEVICE);
return FAILURE;
}
ERR("Failed VIDIOC_QUERYCAP on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
ERR("%s is no video capture device\n", V4L2_DEVICE);
return FAILURE;
}
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
ERR("%s does not support streaming i/o\n", V4L2_DEVICE);
return FAILURE;
}
/* Select video input, video standard and tune here. */
cropCap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_CROPCAP, &cropCap) == -1) {
ERR("VIDIOC_CROPCAP failed %d, %s\n", errno, strerror(errno));
return FAILURE;
}
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropCap.defrect;
if (ioctl(fd, VIDIOC_S_CROP, &crop) == -1) {
ERR("VIDIOC_S_CROP failed %d, %s\n", errno, strerror(errno));
return FAILURE;
}
/* Auto detect PAL or NTSC using the capture driver as sanity check */
std = VPFE_STD_AUTO;
if(ioctl(fd, VIDIOC_S_STD, &std) == -1) {
ERR("VIDIOC_S_STD (auto) failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
do {
ret = ioctl(fd, VIDIOC_QUERYSTD, &std);
} while (ret == -1 && errno == EAGAIN);
if (ret == -1) {
ERR("VIDIOC_QUERYSTD failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
switch (std) {
case V4L2_STD_NTSC:
DBG("NTSC detected!\n");
if (getYFactor() == PAL) {
ERR("NTSC camera connected but PAL selected.\n");
return FAILURE;
}
break;
case V4L2_STD_PAL:
DBG("PAL detected!\n");
if (getYFactor() == NTSC) {
ERR("PAL camera connected but NTSC selected.\n");
return FAILURE;
}
break;
default:
ERR("Camera connected using unsupported video standard.\n");
return FAILURE;
}
/* Use either NTSC or PAL depending on display kernel parameter */
std = getYFactor() == NTSC ? V4L2_STD_NTSC : V4L2_STD_PAL;
if(ioctl(fd, VIDIOC_S_STD, &std) == -1) {
ERR("VIDIOC_S_STD failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
/* Set the video capture image format */
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (resolution == D1) {
*captureHeight = 480; /* Should be D1_HEIGHT with D1 PAL support */
*captureWidth = D1_WIDTH;
}
else {
*captureHeight = CIF_HEIGHT;
*captureWidth = CIF_WIDTH;
}
fmt.fmt.pix.width = *captureWidth;
fmt.fmt.pix.height = *captureHeight;
/* Set the video capture format */
if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {
ERR("VIDIOC_S_FMT failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
printf("Capturing %dx%d video\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
CLEAR(req);
req.count = NUM_BUFS;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
/* Allocate buffers inside the capture device driver */
if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {
ERR("VIDIOC_REQBUFS failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
DBG("%d capture buffers were successfully allocated.\n", req.count);
if (req.count < NUM_BUFS) {
ERR("Insufficient buffer memory on %s\n", V4L2_DEVICE);
return FAILURE;
}
buffers = calloc(req.count, sizeof(*buffers));
if (!buffers) {
ERR("Failed to allocate memory for capture buffer structs.\n");
return FAILURE;
}
/* Map the allocated buffers to user space */
for (numBufs = 0; numBufs < req.count; numBufs++) {
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = numBufs;
if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
ERR("Failed VIDIOC_QUERYBUF on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
buffers[numBufs].length = buf.length;
buffers[numBufs].start = mmap(NULL,
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd, buf.m.offset);
if (buffers[numBufs].start == MAP_FAILED) {
ERR("Failed to mmap buffer on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
DBG("Buffer %d mmapped to %#x.\n", numBufs,
(unsigned int) buffers[numBufs].start);
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
ERR("VIODIOC_QBUF failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
}
/* Start the video streaming */
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {
ERR("VIDIOC_STREAMON failed on %s (%s)\n", V4L2_DEVICE,
strerror(errno));
return FAILURE;
}
*vidBufsPtr = buffers;
*numVidBufsPtr = numBufs;
return fd;
}
/******************************************************************************
* cleanupCaptureDevice
******************************************************************************/
static void cleanupCaptureDevice(int fd, VideoBuffer *vidBufs, int numVidBufs)
{
enum v4l2_buf_type type;
unsigned int i;
/* Shut off the video capture */
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_STREAMOFF, &type) == -1) {
ERR("VIDIOC_STREAMOFF failed (%s)\n", strerror(errno));
}
/* Unmap the capture frame buffers from user space */
for (i = 0; i < numVidBufs; ++i) {
if (munmap(vidBufs[i].start, vidBufs[i].length) == -1) {
ERR("Failed to unmap capture buffer %d\n", i);
}
}
free(vidBufs);
if (close(fd) == -1) {
ERR("Failed to close capture device (%s)\n", strerror(errno));
}
}
/******************************************************************************
* initDisplayDevice
******************************************************************************/
static int initDisplayDevice(enum Resolution resolution, char *displays[])
{
struct fb_var_screeninfo varInfo;
struct Zoom_Params zoom;
int fd;
unsigned int *buf;
int i;
int std;
fd = open(FBVID_DEVICE, O_RDWR);
if (fd == -1) {
ERR("Failed to open fb device %s (%s)\n", FBVID_DEVICE,
strerror(errno));
return FAILURE;
}
if (ioctl(fd, FBIOGET_VSCREENINFO, &varInfo) == -1) {
ERR("Failed FBIOGET_VSCREENINFO on %s (%s)\n", FBVID_DEVICE,
strerror(errno));
return FAILURE;
}
if (ioctl(fd, FBIO_GETSTD, &std) == -1) {
ERR("Failed to get video standard from display device driver\n");
return FAILURE;
}
if ((std >> 16) == 0x1) {
setYFactor(NTSC);
}
else {
setYFactor(PAL);
}
/* Display is always D1 */
varInfo.xres = D1_WIDTH;
varInfo.yres = D1_HEIGHT;
varInfo.bits_per_pixel = SCREEN_BPP;
if (ioctl(fd, FBIOPUT_VSCREENINFO, &varInfo) < -1) {
ERR("Failed FBIOPUT_VSCREENINFO on %s (%s)\n", FBVID_DEVICE,
strerror(errno));
return FAILURE;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -