⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 v4l2.c.svn-base

📁 mediastreamer2是开源的网络传输媒体流的库
💻 SVN-BASE
📖 第 1 页 / 共 2 页
字号:
/* * Video4Linux2 grab interface * Copyright (c) 2000,2001 Fabrice Bellard. * Copyright (c) 2006 Luca Abeni. * * Part of this file is based on the V4L2 video capture example * (http://v4l2spec.bytesex.org/v4l2spec/capture.c) * * Thanks to Michael Niedermayer for providing the mapping between * V4L2_PIX_FMT_* and PIX_FMT_* * * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */#include "avformat.h"#include <unistd.h>#include <fcntl.h>#include <sys/ioctl.h>#include <sys/mman.h>#include <sys/time.h>#include <asm/types.h>#include <linux/videodev2.h>#include <time.h>static const int desired_video_buffers = 256;enum io_method {    io_read,    io_mmap,    io_userptr};struct video_data {    int fd;    int frame_format; /* V4L2_PIX_FMT_* */    enum io_method io_method;    int width, height;    int frame_rate;    int frame_rate_base;    int frame_size;    int top_field_first;    int buffers;    void **buf_start;    unsigned int *buf_len;};struct buff_data {    int index;    int fd;};struct fmt_map {    enum PixelFormat ff_fmt;    int32_t v4l2_fmt;};static struct fmt_map fmt_conversion_table[] = {    {        .ff_fmt = PIX_FMT_YUV420P,        .v4l2_fmt = V4L2_PIX_FMT_YUV420,    },    {        .ff_fmt = PIX_FMT_YUV422P,        .v4l2_fmt = V4L2_PIX_FMT_YUV422P,    },    {        .ff_fmt = PIX_FMT_YUYV422,        .v4l2_fmt = V4L2_PIX_FMT_YUYV,    },    {        .ff_fmt = PIX_FMT_UYVY422,        .v4l2_fmt = V4L2_PIX_FMT_UYVY,    },    {        .ff_fmt = PIX_FMT_YUV411P,        .v4l2_fmt = V4L2_PIX_FMT_YUV411P,    },    {        .ff_fmt = PIX_FMT_YUV410P,        .v4l2_fmt = V4L2_PIX_FMT_YUV410,    },    {        .ff_fmt = PIX_FMT_BGR24,        .v4l2_fmt = V4L2_PIX_FMT_BGR24,    },    {        .ff_fmt = PIX_FMT_RGB24,        .v4l2_fmt = V4L2_PIX_FMT_RGB24,    },    /*    {        .ff_fmt = PIX_FMT_RGB32,        .v4l2_fmt = V4L2_PIX_FMT_BGR32,    },    */    {        .ff_fmt = PIX_FMT_GRAY8,        .v4l2_fmt = V4L2_PIX_FMT_GREY,    },};static int device_open(AVFormatContext *ctx, uint32_t *capabilities){    struct v4l2_capability cap;    int fd;    int res;    int flags = O_RDWR;    if (ctx->flags & AVFMT_FLAG_NONBLOCK) {        flags |= O_NONBLOCK;    }    fd = open(ctx->filename, flags, 0);    if (fd < 0) {        av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s : %s\n",                 ctx->filename, strerror(errno));        return -1;    }    res = ioctl(fd, VIDIOC_QUERYCAP, &cap);    // ENOIOCTLCMD definition only availble on __KERNEL__    if (res < 0 && errno == 515)    {        av_log(ctx, AV_LOG_ERROR, "QUERYCAP not implemented, probably V4L device but not supporting V4L2\n");        close(fd);        return -1;    }    if (res < 0) {        av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",                 strerror(errno));        close(fd);        return -1;    }    if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {        av_log(ctx, AV_LOG_ERROR, "Not a video capture device\n");        close(fd);        return -1;    }    *capabilities = cap.capabilities;    return fd;}static int device_init(AVFormatContext *ctx, int *width, int *height, int pix_fmt){    struct video_data *s = ctx->priv_data;    int fd = s->fd;    struct v4l2_format fmt;    int res;    memset(&fmt, 0, sizeof(struct v4l2_format));    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;    fmt.fmt.pix.width = *width;    fmt.fmt.pix.height = *height;    fmt.fmt.pix.pixelformat = pix_fmt;    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;    res = ioctl(fd, VIDIOC_S_FMT, &fmt);    if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {        av_log(ctx, AV_LOG_INFO, "The V4L2 driver changed the video from %dx%d to %dx%d\n", *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);        *width = fmt.fmt.pix.width;        *height = fmt.fmt.pix.height;    }    return res;}static int first_field(int fd){    int res;    v4l2_std_id std;    res = ioctl(fd, VIDIOC_G_STD, &std);    if (res < 0) {        return 0;    }    if (std & V4L2_STD_NTSC) {        return 0;    }    return 1;}static uint32_t fmt_ff2v4l(enum PixelFormat pix_fmt){    int i;    for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {        if (fmt_conversion_table[i].ff_fmt == pix_fmt) {            return fmt_conversion_table[i].v4l2_fmt;        }    }    return 0;}static enum PixelFormat fmt_v4l2ff(uint32_t pix_fmt){    int i;    for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {        if (fmt_conversion_table[i].v4l2_fmt == pix_fmt) {            return fmt_conversion_table[i].ff_fmt;        }    }    return -1;}static int mmap_init(AVFormatContext *ctx){    struct video_data *s = ctx->priv_data;    struct v4l2_requestbuffers req;    int i, res;    memset(&req, 0, sizeof(struct v4l2_requestbuffers));    req.count = desired_video_buffers;    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;    req.memory = V4L2_MEMORY_MMAP;    res = ioctl (s->fd, VIDIOC_REQBUFS, &req);    if (res < 0) {        if (errno == EINVAL) {            av_log(ctx, AV_LOG_ERROR, "Device does not support mmap\n");        } else {            av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n");        }        return -1;    }    if (req.count < 2) {        av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");        return -1;    }    s->buffers = req.count;    s->buf_start = av_malloc(sizeof(void *) * s->buffers);    if (s->buf_start == NULL) {        av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");        return -1;    }    s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);    if (s->buf_len == NULL) {        av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");        av_free(s->buf_start);        return -1;    }    for (i = 0; i < req.count; i++) {        struct v4l2_buffer buf;        memset(&buf, 0, sizeof(struct v4l2_buffer));        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;        buf.memory = V4L2_MEMORY_MMAP;        buf.index = i;        res = ioctl (s->fd, VIDIOC_QUERYBUF, &buf);        if (res < 0) {            av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");            return -1;        }        s->buf_len[i] = buf.length;        if (s->buf_len[i] < s->frame_size) {            av_log(ctx, AV_LOG_ERROR, "Buffer len [%d] = %d != %d\n", i, s->buf_len[i], s->frame_size);            return -1;        }        s->buf_start[i] = mmap (NULL, buf.length,                        PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);        if (s->buf_start[i] == MAP_FAILED) {            av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));            return -1;        }    }    return 0;}static int read_init(AVFormatContext *ctx){    return -1;}static void mmap_release_buffer(AVPacket *pkt){    struct v4l2_buffer buf;    int res, fd;    struct buff_data *buf_descriptor = pkt->priv;    memset(&buf, 0, sizeof(struct v4l2_buffer));    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;    buf.memory = V4L2_MEMORY_MMAP;    buf.index = buf_descriptor->index;    fd = buf_descriptor->fd;    av_free(buf_descriptor);    res = ioctl (fd, VIDIOC_QBUF, &buf);    if (res < 0) {        av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n");    }    pkt->data = NULL;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -