⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 rtsp_datasource.cpp

📁 彩信浏览器
💻 CPP
📖 第 1 页 / 共 2 页
字号:
				context->video_fmt.height = subsession->videoHeight();				lib::logger::get_logger()->debug("ambulant::net::rtsp_demux(net::url& url), width: %d, height: %d, FPS: %f",context->video_fmt.width, context->video_fmt.height, 1000000.0/context->video_fmt.frameduration);			}		} else {			AM_DBG lib::logger::get_logger()->debug("rtsp_demux: ignoring \"%s\" subsession", subsession->mediumName());		}		context->nstream++;		#if 0 // XXXJack: Corresponding code in live playCommon.cpp is prety different...		int rtp_sock_num = subsession->rtpSource()->RTPgs()->socketNum();		int buf_size = increaseReceiveBufferTo(*context->env, rtp_sock_num, desired_buf_size);		(void)buf_size; // Forestall compiler warning#endif		if(!context->rtsp_client->setupMediaSubsession(*subsession, false, false)) {			lib::logger::get_logger()->error("ambulant::net::rtsp_demux(net::url& url) failed to send setup command to subsesion");			//lib::logger::get_logger()->error("RTSP Connection Failed");			delete context;			return NULL;		}	}		lib::logger::get_logger()->debug("rtps_demux::supported(%s): duration=%ld", ch_url, context->time_left);	return context;		}timestamp_tambulant::net::rtsp_demux::get_clip_begin(){	timestamp_t rv;	m_critical_section.enter();	rv = m_clip_begin;	m_critical_section.leave();	return rv;}timestamp_tambulant::net::rtsp_demux::get_clip_end(){	timestamp_t rv;	m_critical_section.enter();	rv = m_clip_end;	m_critical_section.leave();	return rv;}voidambulant::net::rtsp_demux::seek(timestamp_t time){		m_critical_section.enter();	m_clip_begin = time;	m_clip_begin_set = false;	m_critical_section.leave();}voidambulant::net::rtsp_demux::set_position(timestamp_t time){	float time_sec;		m_critical_section.enter();	time_sec = time / 1000000.0;	MediaSubsession* subsession;	MediaSubsessionIterator iter(*m_context->media_session);	while (( subsession = iter.next() ) != NULL) {		m_context->rtsp_client->playMediaSubsession(*subsession, time_sec);	}	m_critical_section.leave();}unsigned long ambulant::net::rtsp_demux::run() {	AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() called (%d)", m_context->need_audio);	m_context->blocking_flag = 0;	if (m_context->media_session) {		if(!m_context->rtsp_client->playMediaSession(*m_context->media_session)) {			lib::logger::get_logger()->error("playing RTSP connection failed");			return 1;		}	} else {		lib::logger::get_logger()->error("playing RTSP connection failed");		return 1;	}	AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() starting the loop ");	add_ref();	int firstTime=0;			while(!m_context->eof && !exit_requested()) {		AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run: start another loop iteration");#if 0 // XXXJack: suspect...		if (!m_clip_begin_set) {			set_position(m_clip_begin);			m_clip_begin_set = true;		}#endif		MediaSubsession* subsession;		MediaSubsessionIterator iter(*m_context->media_session);		// Only audio/video session need to apply for a job !		while ((subsession = iter.next()) != NULL) {			if (strcmp(subsession->mediumName(), "audio") == 0) {				if(m_context->need_audio) {					assert(!m_context->audio_packet);					m_context->audio_packet = (unsigned char*) malloc(MAX_RTP_FRAME_SIZE);					AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() Calling getNextFrame for an audio frame");					m_context->need_audio = false;					subsession->readSource()->getNextFrame(m_context->audio_packet, MAX_RTP_FRAME_SIZE, after_reading_audio, m_context,  on_source_close ,m_context);				}			} else if (strcmp(subsession->mediumName(), "video") == 0) {				if (m_context->need_video) {					assert(!m_context->video_packet);					m_context->configDataLen=0;//Required by after_reading_video					if(firstTime==0){						//For MP4V-ES video format we need to insert a header into the RTSP stream					        //which should be present in the 'config' MIME parameter which should be present hopefully in the SDP description						//this idea was copied from mplayer libmpdemux/demux_rtp.cpp						firstTime=1;								//if(strcmp(gettext(m_context->video_codec_name), "MP4V-ES")==0)						//Optional check(therefore removed), since it should not matter for other formats.						//{							AM_DBG lib::logger::get_logger()->debug("Came here good %s", m_context->video_codec_name);							unsigned configLen;		    				unsigned char* configData        							= parseGeneralConfigStr(subsession->fmtp_config(), configLen);							m_context->configData = configData;							m_context->configDataLen = configLen;													//}					}							m_context->video_packet = (unsigned char*) malloc(MAX_RTP_FRAME_SIZE);					//std::cout << " MAX_RTP_FRAME_SIZE = " << MAX_RTP_FRAME_SIZE;					AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() Calling getNextFrame for an video frame");					m_context->need_video = false;					AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() video_packet 0x%x", m_context->video_packet);					subsession->readSource()->getNextFrame(m_context->video_packet, MAX_RTP_FRAME_SIZE, after_reading_video, m_context, on_source_close,m_context);									}			} else {				AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() not interested in this data");			}		}				AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run() blocking_flag: 0x%x, %d, need_audio %d", &m_context->blocking_flag, m_context->blocking_flag, m_context->need_audio);				TaskScheduler& scheduler = m_context->env->taskScheduler();#ifdef	DUMMYTASK		dummyTask (m_context->env);#endif/*DUMMYTASK*/		scheduler.doEventLoop(&m_context->blocking_flag);		m_context->blocking_flag = 0;	}	for (int i=0; i<MAX_STREAMS; i++) {		demux_datasink *sink = m_context->sinks[i];		if (sink)			sink->data_avail(0, 0, 0);	}	AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::run(0x%x): returning", (void*)this);	release();	return 0;}voidambulant::net::rtsp_demux::_cancel(){	AM_DBG lib::logger::get_logger()->debug("ambulant::net::rtsp_demux::_cancel(0x%x): m_context=0x%x rtspClient=0x%x mediaSession=0x%x", (void*)this, m_context, m_context?m_context->rtsp_client:0,m_context?m_context->media_session:0);	if (m_context) {	 	m_context->eof = true;		m_context->blocking_flag = 0;	}//	if (is_running())//		stop();	release();}voidambulant::net::rtsp_demux::cancel(){	m_critical_section.enter();	_cancel();	m_critical_section.leave();}static void after_reading_audio(void* data, unsigned sz, unsigned truncated, struct timeval pts, unsigned duration){	AM_DBG lib::logger::get_logger()->debug("after_reading_audio: called data = 0x%x, sz = %d, truncated = %d", data, sz, truncated);	rtsp_context_t* context = NULL;	if (data) {		context = (rtsp_context_t*) data;	} 	AM_DBG lib::logger::get_logger()->debug("after_reading_audio: audio data available (client data: 0x%x", data);	if (context) {		timestamp_t rpts = (pts.tv_sec* 1000000 )+  pts.tv_usec;		if(context->sinks[context->audio_stream]) {			AM_DBG lib::logger::get_logger()->debug("after_reading_audio: calling data_avail");			context->sinks[context->audio_stream]->data_avail(rpts, (uint8_t*) context->audio_packet, sz);			AM_DBG lib::logger::get_logger()->debug("after_reading_audio: calling data_avail done");		}		assert (context->audio_packet);		free(context->audio_packet);		context->audio_packet = NULL;		//XXX Do we need to free data here ?	}	context->blocking_flag = ~0;	context->need_audio = true;}	static void after_reading_video(void* data, unsigned sz, unsigned truncated, struct timeval pts, unsigned duration){	AM_DBG lib::logger::get_logger()->debug("after_reading_video: called data = 0x%x, sz = %d, truncated = %d pts=(%d s, %d us), dur=%d", data, sz, truncated, pts.tv_sec, pts.tv_usec, duration);	rtsp_context_t* context = (rtsp_context_t*) data;	assert(context);	assert(context->video_packet);		// For the first packet, we remember the timestamp so we can convert Live's wallclock timestamps to	// our zero-based timestamps.	if (context->first_sync_time.tv_sec == 0 && context->first_sync_time.tv_usec == 0 ) {		context->first_sync_time.tv_sec = pts.tv_sec;		context->first_sync_time.tv_usec = pts.tv_usec;		context->last_pts=0;		if(context->configDataLen > 0)//Required by MP4V-ES. Only required for the first packet.			context->sinks[context->video_stream]->data_avail(0, (uint8_t*) context->configData , context->configDataLen);				//memcpy(context->vbuffer, context->video_packet, sz);		//context->vbufferlen=sz;	}	timestamp_t rpts =  (pts.tv_sec - context->first_sync_time.tv_sec) * 1000000  +  (timestamp_t) (pts.tv_usec - context->first_sync_time.tv_usec);	AM_DBG lib::logger::get_logger()->debug("after_reading_video: called timestamp %lld, sec = %d, usec =  %d", rpts, pts.tv_sec, pts.tv_usec);			//Frame alignment for Mpeg1 or 2 frames, Live doesn't do it.	//If the frame is bigger than 20kb display the rest next time	//TODO display what I have currently as well : the impartial frame.	 if (rpts == context->last_pts) {		 if((sz + context->vbufferlen)>20000)		 {			 lib::logger::get_logger()->trace("Frame too large to display");			 context->vbufferlen=0;		 }else{		 			memcpy((context->vbuffer + context->vbufferlen), context->video_packet, sz);			context->vbufferlen += sz;		 }	 }else{		 		// Send the data to our sink, which is responsible for copying/saving it before returning.		if(context->sinks[context->video_stream]) {			AM_DBG lib::logger::get_logger()->debug("Video packet length %d", context->vbufferlen);			context->sinks[context->video_stream]->data_avail(context->last_pts, (uint8_t*) context->vbuffer , context->vbufferlen);		}				context->last_pts=rpts;		//copy the first packet of the next frame		memcpy(context->vbuffer, context->video_packet, sz);		context->vbufferlen=sz;	} 	 		 // Tell the main demux loop that we're ready for another packet.	context->need_video = true;	free(context->video_packet);	if(context->configDataLen > 0){		free(context->configData);		context->configData = NULL;	}	context->video_packet  = NULL;	if (context->last_pts >= context->time_left) {		lib::logger::get_logger()->debug("after_reading_video: last_pts = %ld", context->last_pts);	 	context->eof = true;	}	context->blocking_flag = ~0;	//XXX Do we need to free data here ?}static void on_source_close(void* data) {	rtsp_context_t* context = (rtsp_context_t*) data;	if (context) {		context->eof = true;		context->blocking_flag = ~0;	}}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -