📄 main.c
字号:
/*extensions for some mobile phones*/ sprintf(sdpLine, "a=framesize:%d %d-%d", rtp->packetizer->PayloadType, w, h); } /*AMR*/ if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR_WB)) { sprintf(sdpLine, "a=fmtp:%d octet-align", rtp->packetizer->PayloadType); fprintf(sdp_out, "%s\n", sdpLine); } /*Text*/ else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) { gf_hinter_format_ttxt_sdp(rtp->packetizer, payloadName, sdpLine, rtp->session->mp4File, rtp->track); fprintf(sdp_out, "%s\n", sdpLine); } /*EVRC/SMV in non header-free mode*/ else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (rtp->packetizer->auh_size>1)) { sprintf(sdpLine, "a=fmtp:%d maxptime=%d", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20); fprintf(sdp_out, "%s\n", sdpLine); } /*H264/AVC*/ else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) { GF_AVCConfig *avcc = gf_isom_avc_config_get(rtp->session->mp4File, rtp->track, 1); sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", rtp->packetizer->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication); if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) { u32 i, count, b64s; char b64[200]; strcat(sdpLine, "; sprop-parameter-sets="); count = gf_list_count(avcc->sequenceParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; strcat(sdpLine, b64); if (i+1<count) strcat(sdpLine, ","); } if (i) strcat(sdpLine, ","); count = gf_list_count(avcc->pictureParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; strcat(sdpLine, b64); if (i+1<count) strcat(sdpLine, ","); } } fprintf(sdp_out, "%s\n", sdpLine); gf_odf_avc_cfg_del(avcc); } /*MPEG-4 decoder config*/ else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_MPEG4) { GF_DecoderConfig *dcd; dcd = gf_isom_get_decoder_config(rtp->session->mp4File, rtp->track, 1); if (dcd && dcd->decoderSpecificInfo && dcd->decoderSpecificInfo->data) { gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, dcd->decoderSpecificInfo->data, dcd->decoderSpecificInfo->dataLength); } else { gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, NULL, 0); } if (dcd) gf_odf_desc_del((GF_Descriptor *)dcd); if (rtp->packetizer->slMap.IV_length) { const char *kms; gf_isom_get_ismacryp_info(rtp->session->mp4File, rtp->track, 1, NULL, NULL, NULL, NULL, &kms, NULL, NULL, NULL); if (!strnicmp(kms, "(key)", 5) || !strnicmp(kms, "(ipmp)", 6) || !strnicmp(kms, "(uri)", 5)) { strcat(sdpLine, "; ISMACrypKey="); } else { strcat(sdpLine, "; ISMACrypKey=(uri)"); } strcat(sdpLine, kms); } fprintf(sdp_out, "%s\n", sdpLine); } /*MPEG-4 Audio LATM*/ else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_LATM) { GF_DecoderConfig *dcd; GF_BitStream *bs; char *config_bytes; u32 config_size; /* form config string */ bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ gf_bs_write_int(bs, 1, 1); /* all streams same time */ gf_bs_write_int(bs, 0, 6); /* numSubFrames */ gf_bs_write_int(bs, 0, 4); /* numPrograms */ gf_bs_write_int(bs, 0, 3); /* numLayer */ /* audio-specific config */ dcd = gf_isom_get_decoder_config(rtp->session->mp4File, rtp->track, 1); if (dcd) { gf_bs_write_data(bs, dcd->decoderSpecificInfo->data, dcd->decoderSpecificInfo->dataLength); gf_odf_desc_del((GF_Descriptor *)dcd); } /* other data */ gf_bs_write_int(bs, 0, 3); /* frameLengthType */ gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ gf_bs_get_content(bs, &config_bytes, &config_size); gf_bs_del(bs); gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, config_bytes, config_size); fprintf(sdp_out, "%s\n", sdpLine); free(config_bytes); } rtp = rtp->next; } fprintf(sdp_out, "\n"); fclose(sdp_out); return GF_OK;} /* rtp_init_packetizer */GF_Err rtp_init_channel(RTP_Stream *rtp, u32 path_mtu, char * dest, int port){ GF_RTSPTransport tr; GF_Err res; rtp->channel = gf_rtp_new(); gf_rtp_set_ports(rtp->channel, 0); tr.IsUnicast = gf_sk_is_multicast_address(dest) ? 0 : 1; tr.Profile="RTP/AVP"; tr.destination = dest; tr.source = "0.0.0.0"; tr.IsRecord = 0; tr.Append = 0; tr.SSRC = rand(); tr.port_first = port; tr.port_last = port+1; if (tr.IsUnicast) { tr.client_port_first = port; tr.client_port_last = port+1; } else { tr.source = dest; } res = gf_rtp_setup_transport(rtp->channel, &tr, dest); if (res !=0) { fprintf(stdout, "Cannot setup RTP transport info\n"); return res; } res = gf_rtp_initialize(rtp->channel, 0, 1, 1500, 0, 0, NULL); if (res !=0) { fprintf(stdout, "Cannot initialize RTP sockets\n"); return res; } return GF_OK;} /* rtp_init_channel */// ---------------------------------------------------------------------------------------------------/* * paquetization of a burst * process the AUs till the burst size is reached. * */void burst_process_session(RTP_Session *session){ RTP_Stream *rtp, *to_send; Bool first = 1; u32 time; time = gf_sys_clock(); if (!session->timelineOrigin) session->timelineOrigin = time; rtp = session->stream; while (rtp) { rtp->process_burst = 1; rtp = rtp->next; } rtp = NULL; while (1) { u64 min_ts = (u64) -1; to_send = NULL; /*for each stream, locate next time*/ rtp = session->stream; while (rtp) { /*channel is no longer active in current burst*/ if (!rtp->process_burst) { rtp = rtp->next; continue; } /*flush prev stream if needed*/ rtp_flush_channel(rtp); /*load next AU*/ if (!rtp->au) { if (rtp->current_au >= rtp->nb_aus) { if (!session->looping) { rtp->process_burst = 0; rtp = rtp->next; continue; } rtp->process_burst = 1; rtp->ts_offset = rtp->next_ts; rtp->microsec_ts_offset = (u32) (rtp->next_ts*(1000000.0/rtp->packetizer->sl_config.timestampResolution)) + session->timelineOrigin; rtp->current_au = 0; } if (rtp->current_au + 1==rtp->nb_aus) { rtp->current_au = rtp->current_au; } rtp->au = gf_isom_get_sample(session->mp4File, rtp->track, rtp->current_au + 1, &rtp->sample_desc_index); rtp->current_au ++; if (rtp->au) { u64 ts; rtp->sample_duration = gf_isom_get_sample_duration(session->mp4File, rtp->track, rtp->current_au); rtp->sample_duration = (u32)(rtp->sample_duration*rtp->ts_scale); rtp->microsec_dts = (u64) (rtp->microsec_ts_scale * (s64) (rtp->au->DTS)) + rtp->microsec_ts_offset + session->timelineOrigin; ts = (u64) (rtp->ts_scale * (s64) (rtp->au->DTS)); rtp->packetizer->sl_header.decodingTimeStamp = ts + rtp->ts_offset; ts = (u64) (rtp->ts_scale * (s64) (rtp->au->DTS+rtp->au->CTS_Offset)); rtp->packetizer->sl_header.compositionTimeStamp = ts + rtp->ts_offset; rtp->packetizer->sl_header.randomAccessPointFlag = rtp->au->IsRAP; } } if (rtp->au) { if (min_ts > rtp->microsec_dts) { min_ts = rtp->microsec_dts; to_send = rtp; } } rtp = rtp->next; } /*burst is full or no packet to write due to timing*/ if (!to_send) break; rtp = to_send; /*compute drift*/ if (first) { first = 0; session->drift = (s32) ((s64)(time - session->timelineOrigin) - ((s64) rtp->next_ts*1000/rtp->packetizer->sl_config.timestampResolution)); if (session->streamer->log_level >= LOG_BURST) fprintf(stdout, "Time %u - Burst %u - Session %u (Time %u) - TS %d - Drift %d ms\n", time, session->streamer->nbBurstSent, session->id, time - session->timelineOrigin, rtp->next_ts, session->drift); else { fprintf(stdout, "Time %u - Burst %u - Session %u (Time %u) - TS %d - Drift %d ms\r", time, session->streamer->nbBurstSent, session->id, time - session->timelineOrigin, rtp->next_ts, session->drift); fflush(stdout); } } if (session->streamer->log_level >= LOG_AU) fprintf(stdout, "Sess %d - stream %d - Processing AU %d - DTS "LLD" - CTS "LLD"\n", session->id, rtp->track, rtp->current_au, rtp->packetizer->sl_header.decodingTimeStamp, rtp->packetizer->sl_header.compositionTimeStamp); gf_rtp_builder_process(rtp->packetizer, rtp->au->data, rtp->au->dataLength, (u8) 1, rtp->au->dataLength, rtp->sample_duration, (u8) rtp->sample_desc_index); rtp->next_ts = (u32)(rtp->packetizer->sl_header.decodingTimeStamp + rtp->sample_duration); /*OK delete sample*/ gf_isom_sample_del(&rtp->au); } if (session->streamer->log_level >= LOG_BURST) fprintf(stdout, " Actual Burst Size %d bytes - Actual Bit Rate %d kbps\n", session->dataLengthInBurst, 8*session->dataLengthInBurst/(session->streamer->burstDuration+session->streamer->offDuration)); session->nbBurstSent++; session->streamer->nbBurstSent++; }void process_sessions(Streamer *streamer){ RTP_Session *session; RTP_Stream *rtp, *to_send; u32 time; s32 diff; u64 min_ts; time = gf_sys_clock(); /*browse all sessions and locate most mature stream*/ to_send = NULL; min_ts = (u64) -1; session = streamer->session; while (session) { /*init session timeline - all sessions are sync'ed for packet scheduling purposes*/ if (!session->timelineOrigin) session->timelineOrigin = time*1000; rtp = session->stream; while (rtp) { /*load next AU*/ if (!rtp->au) { if (rtp->current_au >= rtp->nb_aus) { Double scale; if (!rtp->session->looping) { rtp = rtp->next; continue; } /*increment ts offset*/ scale = rtp->packetizer->sl_config.timestampResolution/1000.0; rtp->ts_offset += (u32) (session->duration * scale); rtp->microsec_ts_offset = (u32) (rtp->ts_offset*(1000000.0/rtp->packetizer->sl_config.timestampResolution)) + session->timelineOrigin; rtp->current_au = 0; } if (rtp->current_au + 1==rtp->nb_aus) { rtp->current_au = rtp->current_au; } rtp->au = gf_isom_get_sample(rtp->session->mp4File, rtp->track, rtp->current_au + 1, &rtp->sample_desc_index); rtp->current_au ++; if (rtp->au) { u64 ts; rtp->sample_duration = gf_isom_get_sample_duration(rtp->session->mp4File, rtp->track, rtp->current_au); rtp->sample_duration = (u32)(rtp->sample_duration*rtp->ts_scale); rtp->microsec_dts = (u64) (rtp->microsec_ts_scale * (s64) (rtp->au->DTS)) + rtp->microsec_ts_offset + session->timelineOrigin; ts = (u64) (rtp->ts_scale * (s64) (rtp->au->DTS)); rtp->packetizer->sl_header.decodingTimeStamp = ts + rtp->ts_offset; ts = (u64) (rtp->ts_scale * (s64) (rtp->au->DTS+rtp->au->CTS_Offset)); rtp->packetizer->sl_header.compositionTimeStamp = ts + rtp->ts_offset; rtp->packetizer->sl_header.randomAccessPointFlag = rtp->au->IsRAP; } } /*check timing*/ if (rtp->au) { if (min_ts > rtp->microsec_dts) { min_ts = rtp->microsec_dts; to_send = rtp; } } rtp = rtp->next; } session = session->next; } /*no input data ...*/ if( !to_send) return; min_ts /= 1000; /*sleep until TS is mature*/ while (1) { diff = (u32) (min_ts) - gf_sys_clock(); if (diff > 2) { //fprintf(stdout, "RTP session %d stream %d - sleeping %d ms\n", to_send->session->id, to_send->track, diff); gf_sleep(1); } else { if (diff<0) fprintf(stdout, "WARNING: RTP session %d stream %d - sending packet %d ms too late\n", to_send->session->id, to_send->track, -diff); break; } } /*send packets*/ gf_rtp_builder_process(to_send->packetizer, to_send->au->data, to_send->au->dataLength, (u8) 1, to_send->au->dataLength, to_send->sample_duration, (u8) to_send->sample_desc_index); /*delete sample*/ gf_isom_sample_del(&to_send->au);}// ---------------------------------------------------------------------------------------------------u16 check_next_port(Streamer *streamer, u16 first_port){ RTP_Session *session = streamer->session; while (session) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -