📄 rtp_depacketizer.c
字号:
memset(&rtp->sl_map, 0, sizeof(GP_RTPSLMap)); if (!stricmp(map->payload_name, "enc-mpeg4-generic")) rtp->flags |= GF_RTP_HAS_ISMACRYP; /*then process all FMTPs*/ i=0; while ((fmtp = (GF_SDP_FMTP*)gf_list_enum(media->FMTP, &i))) { GF_X_Attribute *att; //we work with only one PayloadType for now if (fmtp->PayloadType != map->PayloadType) continue; j=0; while ((att = (GF_X_Attribute *)gf_list_enum(fmtp->Attributes, &j))) { payt_set_param(rtp, att->Name, att->Value); } } switch (rtp->payt) { case GF_RTP_PAYT_LATM: { u32 AudioMuxVersion, AllStreamsSameTime, numSubFrames, numPrograms, numLayers; GF_M4ADecSpecInfo cfg; char *latm_dsi = rtp->sl_map.config; GF_BitStream *bs = gf_bs_new(latm_dsi, rtp->sl_map.configSize, GF_BITSTREAM_READ); AudioMuxVersion = gf_bs_read_int(bs, 1); AllStreamsSameTime = gf_bs_read_int(bs, 1); numSubFrames = gf_bs_read_int(bs, 6); numPrograms = gf_bs_read_int(bs, 4); numLayers = gf_bs_read_int(bs, 3); if (AudioMuxVersion || !AllStreamsSameTime || numSubFrames || numPrograms || numLayers) { gf_bs_del(bs); return GF_NOT_SUPPORTED; } memset(&cfg, 0, sizeof(cfg)); cfg.base_object_type = gf_bs_read_int(bs, 5); cfg.base_sr_index = gf_bs_read_int(bs, 4); if (cfg.base_sr_index == 0x0F) { cfg.base_sr = gf_bs_read_int(bs, 24); } else { cfg.base_sr = GF_M4ASampleRates[cfg.base_sr_index]; } cfg.nb_chan = gf_bs_read_int(bs, 4); if (cfg.base_object_type==5) { cfg.has_sbr = 1; cfg.sbr_sr_index = gf_bs_read_int(bs, 4); if (cfg.sbr_sr_index == 0x0F) { cfg.sbr_sr = gf_bs_read_int(bs, 24); } else { cfg.sbr_sr = GF_M4ASampleRates[cfg.sbr_sr_index]; } cfg.sbr_object_type = gf_bs_read_int(bs, 5); } gf_bs_del(bs); free(rtp->sl_map.config); bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); /*write as regular AAC*/ gf_bs_write_int(bs, cfg.base_object_type, 5); gf_bs_write_int(bs, cfg.base_sr_index, 4); gf_bs_write_int(bs, cfg.nb_chan, 4); gf_bs_align(bs); gf_bs_get_content(bs, &rtp->sl_map.config, &rtp->sl_map.configSize); gf_bs_del(bs); rtp->sl_map.StreamType = GF_STREAM_AUDIO; rtp->sl_map.ObjectTypeIndication = 0x40; /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_latm; } break; case GF_RTP_PAYT_MPEG4: /*mark if AU header is present*/ rtp->sl_map.auh_first_min_len = 0; if (rtp->flags & GF_RTP_HAS_ISMACRYP) { if (!rtp->isma_scheme) rtp->isma_scheme = GF_4CC('i','A','E','C'); if (!rtp->sl_map.IV_length) rtp->sl_map.IV_length = 4; if (rtp->flags & GF_RTP_ISMA_SEL_ENC) rtp->sl_map.auh_first_min_len += 8; else rtp->sl_map.auh_first_min_len += 8*(rtp->sl_map.IV_length + rtp->sl_map.KI_length); } rtp->sl_map.auh_first_min_len += rtp->sl_map.CTSDeltaLength; rtp->sl_map.auh_first_min_len += rtp->sl_map.DTSDeltaLength; rtp->sl_map.auh_first_min_len += rtp->sl_map.SizeLength; rtp->sl_map.auh_first_min_len += rtp->sl_map.RandomAccessIndication; rtp->sl_map.auh_first_min_len += rtp->sl_map.StreamStateIndication; rtp->sl_map.auh_min_len = rtp->sl_map.auh_first_min_len; rtp->sl_map.auh_first_min_len += rtp->sl_map.IndexLength; rtp->sl_map.auh_min_len += rtp->sl_map.IndexDeltaLength; /*RFC3016 flags*/ if (!stricmp(map->payload_name, "MP4V-ES")) { rtp->sl_map.StreamType = GF_STREAM_VISUAL; rtp->sl_map.ObjectTypeIndication = 0x20; } else if (!stricmp(map->payload_name, "MP4A-LATM")) { rtp->sl_map.StreamType = GF_STREAM_AUDIO; rtp->sl_map.ObjectTypeIndication = 0x40; } /*MPEG-4 video, check RAPs if not indicated*/ if ((rtp->sl_map.StreamType == GF_STREAM_VISUAL) && (rtp->sl_map.ObjectTypeIndication == 0x20) && !rtp->sl_map.RandomAccessIndication) { rtp->flags |= GF_RTP_M4V_CHECK_RAP; } /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_mpeg4; break; case GF_RTP_PAYT_MPEG12_AUDIO: rtp->sl_map.StreamType = GF_STREAM_AUDIO; rtp->sl_map.ObjectTypeIndication = 0x69; /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_mpeg12_audio; break; case GF_RTP_PAYT_MPEG12_VIDEO: /*we signal RAPs*/ rtp->sl_map.RandomAccessIndication = 1; rtp->sl_map.StreamType = GF_STREAM_VISUAL; /*FIXME: how to differentiate MPEG1 from MPEG2 video before any frame is received??*/ rtp->sl_map.ObjectTypeIndication = 0x6A; /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_mpeg12_video; break; case GF_RTP_PAYT_AMR: case GF_RTP_PAYT_AMR_WB: { GF_BitStream *bs; rtp->sl_map.StreamType = GF_STREAM_AUDIO; rtp->sl_map.ObjectTypeIndication = GPAC_EXTRA_CODECS_OTI; /*create DSI*/ bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); if (rtp->payt == GF_RTP_PAYT_AMR) { gf_bs_write_u32(bs, GF_4CC('s', 'a', 'm', 'r')); gf_bs_write_u16(bs, 8000); gf_bs_write_u16(bs, 160); } else { gf_bs_write_u32(bs, GF_4CC('s', 'a', 'w', 'b')); gf_bs_write_u16(bs, 16000); gf_bs_write_u16(bs, 320); } gf_bs_write_u8(bs, 1); gf_bs_write_u8(bs, 16); gf_bs_write_u8(bs, 1); gf_bs_get_content(bs, &rtp->sl_map.config, &rtp->sl_map.configSize); gf_bs_del(bs); /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_amr; } break; case GF_RTP_PAYT_H263: { u32 x, y, w, h; GF_X_Attribute *att; GF_BitStream *bs; x = y = w = h = 0; j=0; while ((att = (GF_X_Attribute *)gf_list_enum(media->Attributes, &j))) { if (stricmp(att->Name, "cliprect")) continue; /*only get the display area*/ sscanf(att->Value, "%d,%d,%d,%d", &y, &x, &h, &w); } rtp->sl_map.StreamType = GF_STREAM_VISUAL; rtp->sl_map.ObjectTypeIndication = GPAC_EXTRA_CODECS_OTI; /*create DSI*/ bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, GF_4CC('h', '2', '6', '3')); gf_bs_write_u16(bs, w); gf_bs_write_u16(bs, h); gf_bs_get_content(bs, &rtp->sl_map.config, &rtp->sl_map.configSize); gf_bs_del(bs); /*we signal RAPs*/ rtp->sl_map.RandomAccessIndication = 1; /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_h263; } break; case GF_RTP_PAYT_3GPP_TEXT: { char *tx3g, *a_tx3g; GF_BitStream *bs; u32 nb_desc; GF_SDP_FMTP *fmtp; GF_TextConfig tcfg; memset(&tcfg, 0, sizeof(GF_TextConfig)); tcfg.tag = GF_ODF_TEXT_CFG_TAG; tcfg.Base3GPPFormat = 0x10; tcfg.MPEGExtendedFormat = 0x10; tcfg.profileLevel = 0x10; tcfg.timescale = map->ClockRate; tcfg.sampleDescriptionFlags = 1; tx3g = NULL; i=0; while ((fmtp = (GF_SDP_FMTP*)gf_list_enum(media->FMTP, &i))) { GF_X_Attribute *att; if (fmtp->PayloadType != map->PayloadType) continue; j=0; while ((att = (GF_X_Attribute *)gf_list_enum(fmtp->Attributes, &j))) { if (!stricmp(att->Name, "width")) tcfg.text_width = atoi(att->Value); else if (!stricmp(att->Name, "height")) tcfg.text_height = atoi(att->Value); else if (!stricmp(att->Name, "tx")) tcfg.horiz_offset = atoi(att->Value); else if (!stricmp(att->Name, "ty")) tcfg.vert_offset = atoi(att->Value); else if (!stricmp(att->Name, "layer")) tcfg.layer = atoi(att->Value); else if (!stricmp(att->Name, "max-w")) tcfg.video_width = atoi(att->Value); else if (!stricmp(att->Name, "max-h")) tcfg.video_height = atoi(att->Value); else if (!stricmp(att->Name, "tx3g")) tx3g = att->Value; } } if (!tx3g) return GF_NON_COMPLIANT_BITSTREAM; bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u8(bs, tcfg.Base3GPPFormat); gf_bs_write_u8(bs, tcfg.MPEGExtendedFormat); /*MPEGExtendedFormat*/ gf_bs_write_u8(bs, tcfg.profileLevel); /*profileLevel*/ gf_bs_write_u24(bs, tcfg.timescale); gf_bs_write_int(bs, 0, 1); /*no alt formats*/ gf_bs_write_int(bs, tcfg.sampleDescriptionFlags, 2); gf_bs_write_int(bs, 1, 1); /*we will write sample desc*/ gf_bs_write_int(bs, 1, 1); /*video info*/ gf_bs_write_int(bs, 0, 3); /*reserved, spec doesn't say the values*/ gf_bs_write_u8(bs, tcfg.layer); gf_bs_write_u16(bs, tcfg.text_width); gf_bs_write_u16(bs, tcfg.text_height); /*get all tx3g (comma separated)*/ nb_desc = 1; a_tx3g = tx3g; while ((a_tx3g = strchr(a_tx3g, ',')) ) { a_tx3g ++; nb_desc ++; } a_tx3g = tx3g; gf_bs_write_u8(bs, nb_desc); while (1) { char *next_tx3g, szOut[1000]; u32 len; strcpy(a_tx3g, tx3g); next_tx3g = strchr(a_tx3g, ','); if (next_tx3g) next_tx3g[0] = 0; len = gf_base64_decode(a_tx3g, strlen(a_tx3g), szOut, 1000); gf_bs_write_data(bs, szOut, len); tx3g = strchr(tx3g, ','); if (!tx3g) break; tx3g += 1; while (tx3g[0] == ' ') tx3g += 1; } /*write video cfg*/ gf_bs_write_u16(bs, tcfg.video_width); gf_bs_write_u16(bs, tcfg.video_height); gf_bs_write_u16(bs, tcfg.horiz_offset); gf_bs_write_u16(bs, tcfg.vert_offset); gf_bs_get_content(bs, &rtp->sl_map.config, &rtp->sl_map.configSize); rtp->sl_map.StreamType = GF_STREAM_TEXT; rtp->sl_map.ObjectTypeIndication = 0x08; gf_bs_del(bs); /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_ttxt; } break; case GF_RTP_PAYT_H264_AVC: { GF_SDP_FMTP *fmtp; GF_AVCConfig *avcc = gf_odf_avc_cfg_new(); avcc->AVCProfileIndication = (rtp->sl_map.PL_ID>>16) & 0xFF; avcc->profile_compatibility = (rtp->sl_map.PL_ID>>8) & 0xFF; avcc->AVCLevelIndication = rtp->sl_map.PL_ID & 0xFF; avcc->configurationVersion = 1; avcc->nal_unit_size = 4; rtp->sl_map.StreamType = 4; rtp->sl_map.ObjectTypeIndication = 0x21; /*we will signal RAPs*/ rtp->sl_map.RandomAccessIndication = 1; /*rewrite sps and pps*/ i=0; while ((fmtp = (GF_SDP_FMTP*)gf_list_enum(media->FMTP, &i))) { GF_X_Attribute *att; if (fmtp->PayloadType != map->PayloadType) continue; j=0; while ((att = (GF_X_Attribute *)gf_list_enum(fmtp->Attributes, &j))) { char *nal_ptr, *sep; if (stricmp(att->Name, "sprop-parameter-sets")) continue; nal_ptr = att->Value; while (nal_ptr) { u32 nalt, b64size, ret; char *b64_d; sep = strchr(nal_ptr, ','); if (sep) sep[0] = 0; b64size = strlen(nal_ptr); b64_d = (char*)malloc(sizeof(char)*b64size); ret = gf_base64_decode(nal_ptr, b64size, b64_d, b64size); b64_d[ret] = 0; nalt = b64_d[0] & 0x1F; if (/*SPS*/(nalt==0x07) || /*PPS*/(nalt==0x08)) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)malloc(sizeof(GF_AVCConfigSlot)); sl->size = ret; sl->data = (char*)malloc(sizeof(char)*sl->size); memcpy(sl->data, b64_d, sizeof(char)*sl->size); if (nalt==0x07) { gf_list_add(avcc->sequenceParameterSets, sl); } else { gf_list_add(avcc->pictureParameterSets, sl); } } free(b64_d); if (sep) { sep[0] = ','; nal_ptr = sep+1; } else { break; } } } } gf_odf_avc_cfg_write(avcc, &rtp->sl_map.config, &rtp->sl_map.configSize); gf_odf_avc_cfg_del(avcc); } /*assign depacketizer*/ rtp->depacketize = gf_rtp_parse_h264; break; default: return GF_NOT_SUPPORTED; } return GF_OK;}GF_EXPORTGF_RTPDepacketizer *gf_rtp_depacketizer_new(GF_SDPMedia *media, void (*sl_packet_cbk)(void *udta, char *payload, u32 size, GF_SLHeader *hdr, GF_Err e), void *udta){ GF_Err e; GF_RTPMap *map; u32 payt; GF_RTPDepacketizer *tmp; /*check RTP map. For now we only support 1 RTPMap*/ if (!sl_packet_cbk || !media || media->fmt_list || (gf_list_count(media->RTPMaps) > 1)) return NULL; /*check payload type*/ map = (GF_RTPMap *)gf_list_get(media->RTPMaps, 0); payt = gf_rtp_get_payload_type(map, media); if (!payt) return NULL; GF_SAFEALLOC(tmp, GF_RTPDepacketizer); tmp->payt = payt; e = gf_rtp_payt_setup(tmp, map, media); if (e) { free(tmp); return NULL; } assert(tmp->depacketize); tmp->clock_rate = map->ClockRate; tmp->on_sl_packet = sl_packet_cbk; tmp->udta = udta; return tmp;}GF_EXPORTvoid gf_rtp_depacketizer_reset(GF_RTPDepacketizer *rtp, Bool full_reset){ if (rtp) { if (rtp->inter_bs) gf_bs_del(rtp->inter_bs); rtp->inter_bs = NULL; rtp->flags |= GF_RTP_NEW_AU; if (full_reset) memset(&rtp->sl_hdr, 0, sizeof(GF_SLHeader)); }}GF_EXPORTvoid gf_rtp_depacketizer_del(GF_RTPDepacketizer *rtp){ if (rtp) { gf_rtp_depacketizer_reset(rtp, 0); if (rtp->sl_map.config) free(rtp->sl_map.config); if (rtp->key) free(rtp->key); free(rtp); }}GF_EXPORTvoid gf_rtp_depacketizer_process(GF_RTPDepacketizer *rtp, GF_RTPHeader *hdr, char *payload, u32 size){ assert(rtp && rtp->depacketize); rtp->depacketize(rtp, hdr, payload, size);}GF_EXPORTvoid gf_rtp_depacketizer_get_slconfig(GF_RTPDepacketizer *rtp, GF_SLConfig *slc){ memset(slc, 0, sizeof(GF_SLConfig)); slc->tag = GF_ODF_SLC_TAG; slc->AULength = rtp->sl_map.ConstantSize; if (rtp->sl_map.ConstantDuration) { slc->CUDuration = slc->AUDuration = rtp->sl_map.ConstantDuration; } else { slc->CUDuration = slc->AUDuration = rtp->sl_hdr.au_duration; } slc->AUSeqNumLength = rtp->sl_map.StreamStateIndication; /*RTP SN is on 16 bits*/ slc->packetSeqNumLength = 0; /*RTP TS is on 32 bits*/ slc->timestampLength = 32; slc->timeScale = slc->timestampResolution = rtp->clock_rate; slc->useTimestampsFlag = 1; /*we override these flags because we emulate the flags through the marker bit */ slc->useAccessUnitEndFlag = slc->useAccessUnitStartFlag = 1; slc->useRandomAccessPointFlag = rtp->sl_map.RandomAccessIndication; /*by default all packets are RAP if not signaled. This is true for audio shoud NEVER be seen with systems streams and is overriden for video (cf below)*/ slc->hasRandomAccessUnitsOnlyFlag = rtp->sl_map.RandomAccessIndication ? 0 : 1; /*checking RAP for video*/ if (rtp->flags & GF_RTP_M4V_CHECK_RAP) { slc->useRandomAccessPointFlag = 1; slc->hasRandomAccessUnitsOnlyFlag = 0; } /*should work for simple carsousel without streamState indicated*/ slc->AUSeqNumLength = rtp->sl_map.IndexLength;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -