📄 video_v4l_source.cpp
字号:
rc = ioctl(m_videoDevice, VIDIOCGAUDIO, &videoAudio); if (rc == 0 && (videoAudio.flags & VIDEO_AUDIO_MUTABLE)) { if (mute) { videoAudio.flags |= VIDEO_AUDIO_MUTE; } else { videoAudio.flags &= ~VIDEO_AUDIO_MUTE; } rc = ioctl(m_videoDevice, VIDIOCSAUDIO, &videoAudio); if (rc < 0) { debug_message("Can't set video audio for %s", m_pConfig->m_videoCapabilities->m_deviceName); } }}bool CV4LVideoSource::SetPictureControls(){ if (m_videoDevice == -1) { return false; } struct video_picture videoPicture; int rc; rc = ioctl(m_videoDevice, VIDIOCGPICT, &videoPicture); if (rc < 0) { return false; } videoPicture.brightness = (u_int16_t) ((m_pConfig->GetIntegerValue(CONFIG_VIDEO_BRIGHTNESS) * 0xFFFF) / 100); videoPicture.hue = (u_int16_t) ((m_pConfig->GetIntegerValue(CONFIG_VIDEO_HUE) * 0xFFFF) / 100); videoPicture.colour = (u_int16_t) ((m_pConfig->GetIntegerValue(CONFIG_VIDEO_COLOR) * 0xFFFF) / 100); videoPicture.contrast = (u_int16_t) ((m_pConfig->GetIntegerValue(CONFIG_VIDEO_CONTRAST) * 0xFFFF) / 100); rc = ioctl(m_videoDevice, VIDIOCSPICT, &videoPicture); if (rc < 0) { return false; } return true;}int8_t CV4LVideoSource::AcquireFrame(Timestamp &frameTimestamp){ int rc; ReleaseFrames(); rc = ioctl(m_videoDevice, VIDIOCSYNC, &m_videoFrameMap[m_captureHead]); if (rc != 0) { return -1; } if (m_cacheTimestamp) frameTimestamp = m_videoFrameMapTimestamp[m_captureHead]; else frameTimestamp = GetTimestamp(); int8_t capturedFrame = m_captureHead; m_captureHead = (m_captureHead + 1) % m_videoMbuf.frames; return capturedFrame;}void c_ReleaseFrame (void *f){ yuv_media_frame_t *yuv = (yuv_media_frame_t *)f; if (yuv->free_y) { CHECK_AND_FREE(yuv->y); } else { CV4LVideoSource *s = (CV4LVideoSource *)yuv->hardware; s->IndicateReleaseFrame(yuv->hardware_index); } free(yuv);}void CV4LVideoSource::ReleaseFrames (void){ uint32_t index_mask = 1; uint32_t released_mask; uint32_t back_on_queue_mask = 0; SDL_LockMutex(m_v4l_mutex); released_mask = m_release_index_mask; SDL_UnlockMutex(m_v4l_mutex); m_release_index_mask = 0; while (released_mask != 0) { index_mask = 1 << m_encodeHead; if ((index_mask & released_mask) != 0) { back_on_queue_mask |= index_mask; Timestamp calc = GetTimestamp(); if (calc > m_videoSrcFrameDuration + m_lastVideoFrameMapTimestampLoaded) {#ifdef DEBUG_TIMESTAMPS debug_message("video frame delay past end of buffer - time is "U64" should be "U64, calc, m_videoSrcFrameDuration + m_lastVideoFrameMapTimestampLoaded);#endif m_videoCaptureStartTimestamp = calc; m_videoFrameMapFrame[m_encodeHead] = 0; m_videoFrameMapTimestamp[m_encodeHead] = calc; } else { m_videoFrameMapFrame[m_encodeHead] = m_lastVideoFrameMapFrameLoaded + 1; m_videoFrameMapTimestamp[m_encodeHead] = CalculateVideoTimestampFromFrames(m_videoFrameMapFrame[m_encodeHead]); } m_lastVideoFrameMapFrameLoaded = m_videoFrameMapFrame[m_encodeHead]; m_lastVideoFrameMapTimestampLoaded = m_videoFrameMapTimestamp[m_encodeHead]; ioctl(m_videoDevice, VIDIOCMCAPTURE, &m_videoFrameMap[m_encodeHead]); m_encodeHead = (m_encodeHead + 1) % m_videoMbuf.frames; } else { released_mask = 0; } } if (back_on_queue_mask != 0) { SDL_LockMutex(m_v4l_mutex); m_release_index_mask &= ~back_on_queue_mask; SDL_UnlockMutex(m_v4l_mutex); } }void CV4LVideoSource::ProcessVideo(void){ // for efficiency, process ~1 second before returning to check for commands Timestamp frameTimestamp; for (int pass = 0; pass < m_maxPasses; pass++) { // get next frame from video capture device int8_t index; index = AcquireFrame(frameTimestamp); if (index == -1) { continue; } u_int8_t* mallocedYuvImage = NULL; u_int8_t* pY; u_int8_t* pU; u_int8_t* pV; // perform colorspace conversion if necessary if (m_videoNeedRgbToYuv) { mallocedYuvImage = (u_int8_t*)Malloc(m_videoSrcYUVSize); pY = mallocedYuvImage; pU = pY + m_videoSrcYSize; pV = pU + m_videoSrcUVSize, RGB2YUV( m_videoSrcWidth, m_videoSrcHeight, (u_int8_t*)m_videoMap + m_videoMbuf.offsets[index], pY, pU, pV, 1, false); } else { pY = (u_int8_t*)m_videoMap + m_videoMbuf.offsets[index]; pU = pY + m_videoSrcYSize; pV = pU + m_videoSrcUVSize; } if (m_decimate_filter) { video_filter_decimate(pY, m_videoSrcWidth, m_videoSrcHeight); } yuv_media_frame_t *yuv = MALLOC_STRUCTURE(yuv_media_frame_t); yuv->y = pY; yuv->u = pU; yuv->v = pV; yuv->y_stride = m_videoSrcWidth; yuv->uv_stride = m_videoSrcWidth >> 1; yuv->w = m_videoSrcWidth; yuv->h = m_videoSrcHeight; yuv->hardware = this; yuv->hardware_index = index; if (m_videoWantKeyFrame && frameTimestamp >= m_audioStartTimestamp) { yuv->force_iframe = true; m_videoWantKeyFrame = false; debug_message("Frame "U64" request key frame", frameTimestamp); } else yuv->force_iframe = false; yuv->free_y = (mallocedYuvImage != NULL); CMediaFrame *frame = new CMediaFrame(YUVVIDEOFRAME, yuv, 0, frameTimestamp); frame->SetMediaFreeFunction(c_ReleaseFrame); ForwardFrame(frame); //debug_message("video source forward"); // enqueue the frame to video capture buffer if (mallocedYuvImage != NULL) { IndicateReleaseFrame(index); } }}bool CV4LVideoSource::InitialVideoProbe(CLiveConfig* pConfig){ static char* devices[] = { "/dev/video", "/dev/video0", "/dev/video1", "/dev/video2", "/dev/video3" }; const char* deviceName = pConfig->GetStringValue(CONFIG_VIDEO_SOURCE_NAME); CVideoCapabilities* pVideoCaps; // first try the device we're configured with pVideoCaps = new CVideoCapabilities(deviceName); if (pVideoCaps->IsValid()) { pConfig->m_videoCapabilities = pVideoCaps; return true; } delete pVideoCaps; // no luck, go searching for (u_int32_t i = 0; i < sizeof(devices) / sizeof(char*); i++) { // don't waste time trying something that's already failed if (!strcmp(devices[i], deviceName)) { continue; } pVideoCaps = new CVideoCapabilities(devices[i]); if (pVideoCaps->IsValid()) { pConfig->SetStringValue(CONFIG_VIDEO_SOURCE_NAME, devices[i]); pConfig->m_videoCapabilities = pVideoCaps; return true; } delete pVideoCaps; } return false;}bool CVideoCapabilities::ProbeDevice(){ int rc; int videoDevice = open(m_deviceName, O_RDWR); if (videoDevice < 0) { return false; } m_canOpen = true; // get device capabilities struct video_capability videoCapability; rc = ioctl(videoDevice, VIDIOCGCAP, &videoCapability); if (rc < 0) { debug_message("Failed to get video capabilities for %s", m_deviceName); m_canCapture = false; close(videoDevice); return false; } if (!(videoCapability.type & VID_TYPE_CAPTURE)) { debug_message("Device %s is not capable of video capture!", m_deviceName); m_canCapture = false; close(videoDevice); return false; } m_canCapture = true; m_driverName = strdup(videoCapability.name); m_numInputs = videoCapability.channels; m_minWidth = videoCapability.minwidth; m_minHeight = videoCapability.minheight; m_maxWidth = videoCapability.maxwidth; m_maxHeight = videoCapability.maxheight; m_hasAudio = videoCapability.audios; m_inputNames = (char**)malloc(m_numInputs * sizeof(char*)); memset(m_inputNames, 0, m_numInputs * sizeof(char*)); m_inputSignalTypes = (u_int8_t*)malloc(m_numInputs * sizeof(u_int8_t)); memset(m_inputSignalTypes, 0, m_numInputs * sizeof(u_int8_t)); m_inputHasTuners = (bool*)malloc(m_numInputs * sizeof(bool)); memset(m_inputHasTuners, 0, m_numInputs * sizeof(bool)); m_inputTunerSignalTypes = (u_int8_t*)malloc(m_numInputs * sizeof(u_int8_t)); memset(m_inputTunerSignalTypes, 0, m_numInputs * sizeof(u_int8_t)); for (int i = 0; i < m_numInputs; i++) { // N.B. "channel" here is really an input source struct video_channel videoChannel; videoChannel.channel = i; rc = ioctl(videoDevice, VIDIOCGCHAN, &videoChannel); if (rc < 0) { debug_message("Failed to get video channel info for %s:%u", m_deviceName, i); continue; } m_inputNames[i] = strdup(videoChannel.name); m_inputSignalTypes[i] = videoChannel.norm; if (videoChannel.flags & VIDEO_VC_TUNER) { // ignore videoChannel.tuners for now // current bt drivers only support 1 tuner per input port struct video_tuner videoTuner; videoTuner.tuner = 0; rc = ioctl(videoDevice, VIDIOCGTUNER, &videoTuner); if (rc < 0) { debug_message("Failed to get video tuner info for %s:%u", m_deviceName, i); continue; } m_inputHasTuners[i] = true; m_inputTunerSignalTypes[i] = videoTuner.flags & 0x7; } } close(videoDevice); return true;}static const char *signals[] = { "PAL", "NTSC", "SECAM",};void CVideoCapabilities::Display (CLiveConfig *pConfig, char *msg, uint32_t max_len){ uint32_t port = pConfig->GetIntegerValue(CONFIG_VIDEO_INPUT); if (port >= m_numInputs) { snprintf(msg, max_len, "Video port has illegal value"); return; } if (m_inputHasTuners[port] == false) { snprintf(msg, max_len, "%s, %ux%u, %s, %s", pConfig->GetStringValue(CONFIG_VIDEO_SOURCE_NAME), pConfig->m_videoWidth, pConfig->m_videoHeight, signals[pConfig->GetIntegerValue(CONFIG_VIDEO_SIGNAL)], m_inputNames[port]); } else { snprintf(msg, max_len, "%s, %ux%u, %s, %s, %s, channel %s", pConfig->GetStringValue(CONFIG_VIDEO_SOURCE_NAME), pConfig->m_videoWidth, pConfig->m_videoHeight, signals[pConfig->GetIntegerValue(CONFIG_VIDEO_SIGNAL)], m_inputNames[port], chanlists[pConfig->GetIntegerValue(CONFIG_VIDEO_CHANNEL_LIST_INDEX)].name, chanlists[pConfig->GetIntegerValue(CONFIG_VIDEO_CHANNEL_LIST_INDEX)].list[pConfig->GetIntegerValue(CONFIG_VIDEO_CHANNEL_INDEX)].name ); }}#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -