⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video_v4l_source.cpp

📁 网络MPEG4IP流媒体开发源代码
💻 CPP
📖 第 1 页 / 共 2 页
字号:
		munmap(m_videoMap, m_videoMbuf.size);		m_videoMap = NULL;	}	close(m_videoDevice);	m_videoDevice = -1;	return false;}void CV4LVideoSource::ReleaseDevice(){	SetVideoAudioMute(true);	// release device resources	munmap(m_videoMap, m_videoMbuf.size);	m_videoMap = NULL;	close(m_videoDevice);	m_videoDevice = -1;}	void CV4LVideoSource::SetVideoAudioMute(bool mute){	if (!m_pConfig->m_videoCapabilities	  || !m_pConfig->m_videoCapabilities->m_hasAudio) {		return;	}	int rc;	struct video_audio videoAudio;	rc = ioctl(m_videoDevice, VIDIOCGAUDIO, &videoAudio);	if (rc == 0 && (videoAudio.flags & VIDEO_AUDIO_MUTABLE)) {		if (mute) {			videoAudio.flags |= VIDEO_AUDIO_MUTE;		} else {			videoAudio.flags &= ~VIDEO_AUDIO_MUTE;		}		rc = ioctl(m_videoDevice, VIDIOCSAUDIO, &videoAudio);		if (rc < 0) {			debug_message("Can't set video audio for %s",				m_pConfig->m_videoCapabilities->m_deviceName);		}	}}bool CV4LVideoSource::SetPictureControls(){	if (m_videoDevice == -1) {		return false;	}	struct video_picture videoPicture;	int rc;	rc = ioctl(m_videoDevice, VIDIOCGPICT, &videoPicture);	if (rc < 0) {		return false;	}	videoPicture.brightness = (u_int16_t)		((m_pConfig->GetIntegerValue(CONFIG_VIDEO_BRIGHTNESS) * 0xFFFF) / 100);	videoPicture.hue = (u_int16_t)		((m_pConfig->GetIntegerValue(CONFIG_VIDEO_HUE) * 0xFFFF) / 100);	videoPicture.colour = (u_int16_t)		((m_pConfig->GetIntegerValue(CONFIG_VIDEO_COLOR) * 0xFFFF) / 100);	videoPicture.contrast = (u_int16_t)		((m_pConfig->GetIntegerValue(CONFIG_VIDEO_CONTRAST) * 0xFFFF) / 100);	rc = ioctl(m_videoDevice, VIDIOCSPICT, &videoPicture);	if (rc < 0) {		return false;	}	return true;}int8_t CV4LVideoSource::AcquireFrame(Timestamp &frameTimestamp){	int rc;	rc = ioctl(m_videoDevice, VIDIOCSYNC, &m_videoFrameMap[m_captureHead]);	if (rc != 0) {		return -1;	}	if (m_cacheTimestamp)	  frameTimestamp = m_videoFrameMapTimestamp[m_captureHead];	else	  frameTimestamp = GetTimestamp();	int8_t capturedFrame = m_captureHead;	m_captureHead = (m_captureHead + 1) % m_videoMbuf.frames;	return capturedFrame;}bool CV4LVideoSource::ReleaseFrame(int8_t frameNumber){  Timestamp calc = GetTimestamp();  if (calc > m_videoSrcFrameDuration + m_lastVideoFrameMapTimestampLoaded) {    error_message("video frame delay past end of buffer - time is %llu should be %llu",		  calc,		  m_videoSrcFrameDuration + m_lastVideoFrameMapTimestampLoaded);    m_videoCaptureStartTimestamp = calc;    m_videoFrameMapFrame[frameNumber] = 0;    m_videoFrameMapTimestamp[frameNumber] = calc;  } else {    m_videoFrameMapFrame[frameNumber] = m_lastVideoFrameMapFrameLoaded + 1;    m_videoFrameMapTimestamp[frameNumber] =      CalculateVideoTimestampFromFrames(m_videoFrameMapFrame[frameNumber]);  }  m_lastVideoFrameMapFrameLoaded = m_videoFrameMapFrame[frameNumber];  m_lastVideoFrameMapTimestampLoaded = m_videoFrameMapTimestamp[frameNumber];  return (ioctl(m_videoDevice, VIDIOCMCAPTURE, 		&m_videoFrameMap[frameNumber]) == 0);}void CV4LVideoSource::ProcessVideo(void){	// for efficiency, process ~1 second before returning to check for commands  Timestamp frameTimestamp;	for (int pass = 0; pass < m_maxPasses; pass++) {		// get next frame from video capture device		m_encodeHead = AcquireFrame(frameTimestamp);		if (m_encodeHead == -1) {			continue;		}		u_int8_t* mallocedYuvImage = NULL;		u_int8_t* pY;		u_int8_t* pU;		u_int8_t* pV;		// perform colorspace conversion if necessary		if (m_videoSrcType == RGBVIDEOFRAME) {			mallocedYuvImage = (u_int8_t*)Malloc(m_videoSrcYUVSize);			pY = mallocedYuvImage;			pU = pY + m_videoSrcYSize;			pV = pU + m_videoSrcUVSize,			RGB2YUV(				m_videoSrcWidth,				m_videoSrcHeight,				(u_int8_t*)m_videoMap + m_videoMbuf.offsets[m_encodeHead],				pY,				pU,				pV,				1);		} else {			pY = (u_int8_t*)m_videoMap + m_videoMbuf.offsets[m_encodeHead];			pU = pY + m_videoSrcYSize;			pV = pU + m_videoSrcUVSize;		}		ProcessVideoYUVFrame(			pY, 			pU, 			pV,			m_videoSrcWidth,			m_videoSrcWidth >> 1,			frameTimestamp);		// release video frame buffer back to video capture device		if (ReleaseFrame(m_encodeHead)) {			m_encodeHead = (m_encodeHead + 1) % m_videoMbuf.frames;		} else {			debug_message("Couldn't release capture buffer!");		}		if (mallocedYuvImage != NULL) {		  free(mallocedYuvImage);		}	}}bool CV4LVideoSource::InitialVideoProbe(CLiveConfig* pConfig){	static char* devices[] = {		"/dev/video", 		"/dev/video0", 		"/dev/video1", 		"/dev/video2", 		"/dev/video3"	};	char* deviceName = pConfig->GetStringValue(CONFIG_VIDEO_SOURCE_NAME);	CVideoCapabilities* pVideoCaps;	// first try the device we're configured with	pVideoCaps = new CVideoCapabilities(deviceName);	if (pVideoCaps->IsValid()) {		pConfig->m_videoCapabilities = pVideoCaps;		return true;	}	delete pVideoCaps;	// no luck, go searching	for (u_int32_t i = 0; i < sizeof(devices) / sizeof(char*); i++) {		// don't waste time trying something that's already failed		if (!strcmp(devices[i], deviceName)) {			continue;		} 		pVideoCaps = new CVideoCapabilities(devices[i]);		if (pVideoCaps->IsValid()) {			pConfig->SetStringValue(CONFIG_VIDEO_SOURCE_NAME, devices[i]);			pConfig->m_videoCapabilities = pVideoCaps;			return true;		}				delete pVideoCaps;	}	return false;}bool CVideoCapabilities::ProbeDevice(){	int rc;	int videoDevice = open(m_deviceName, O_RDWR);	if (videoDevice < 0) {		return false;	}	m_canOpen = true;	// get device capabilities	struct video_capability videoCapability;	rc = ioctl(videoDevice, VIDIOCGCAP, &videoCapability);	if (rc < 0) {		debug_message("Failed to get video capabilities for %s", m_deviceName);		m_canCapture = false;		close(videoDevice);		return false;	}	if (!(videoCapability.type & VID_TYPE_CAPTURE)) {		debug_message("Device %s is not capable of video capture!", 			m_deviceName);		m_canCapture = false;		close(videoDevice);		return false;	}	m_canCapture = true;	m_driverName = stralloc(videoCapability.name);	m_numInputs = videoCapability.channels;	m_minWidth = videoCapability.minwidth;	m_minHeight = videoCapability.minheight;	m_maxWidth = videoCapability.maxwidth;	m_maxHeight = videoCapability.maxheight;	m_hasAudio = videoCapability.audios;	m_inputNames = (char**)malloc(m_numInputs * sizeof(char*));	memset(m_inputNames, 0, m_numInputs * sizeof(char*));	m_inputSignalTypes = (u_int8_t*)malloc(m_numInputs * sizeof(u_int8_t));	memset(m_inputSignalTypes, 0, m_numInputs * sizeof(u_int8_t));	m_inputHasTuners = (bool*)malloc(m_numInputs * sizeof(bool));	memset(m_inputHasTuners, 0, m_numInputs * sizeof(bool));	m_inputTunerSignalTypes = (u_int8_t*)malloc(m_numInputs * sizeof(u_int8_t));	memset(m_inputTunerSignalTypes, 0, m_numInputs * sizeof(u_int8_t));	for (int i = 0; i < m_numInputs; i++) {		// N.B. "channel" here is really an input source		struct video_channel videoChannel;		videoChannel.channel = i;		rc = ioctl(videoDevice, VIDIOCGCHAN, &videoChannel);		if (rc < 0) {			debug_message("Failed to get video channel info for %s:%u",				m_deviceName, i);			continue;		}		m_inputNames[i] = stralloc(videoChannel.name);		m_inputSignalTypes[i] = videoChannel.norm;		if (videoChannel.flags & VIDEO_VC_TUNER) {			// ignore videoChannel.tuners for now			// current bt drivers only support 1 tuner per input port			struct video_tuner videoTuner;			videoTuner.tuner = 0;			rc = ioctl(videoDevice, VIDIOCGTUNER, &videoTuner);			if (rc < 0) {				debug_message("Failed to get video tuner info for %s:%u",					m_deviceName, i);				continue;			}							m_inputHasTuners[i] = true;			m_inputTunerSignalTypes[i] = videoTuner.flags & 0x7;		}	}	close(videoDevice);	return true;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -