📄 rtp_payloads.c
字号:
gf_bs_write_u8(bs, nb_desc); while (1) { char *next_tx3g, szOut[1000]; u32 len; strcpy(a_tx3g, tx3g); next_tx3g = strchr(a_tx3g, ','); if (next_tx3g) next_tx3g[0] = 0; len = gf_base64_decode(a_tx3g, strlen(a_tx3g), szOut, 1000); gf_bs_write_data(bs, szOut, len); tx3g = strchr(tx3g, ','); if (!tx3g) break; tx3g += 1; while (tx3g[0] == ' ') tx3g += 1; } /*write video cfg*/ gf_bs_write_u16(bs, tcfg.video_width); gf_bs_write_u16(bs, tcfg.video_height); gf_bs_write_u16(bs, tcfg.horiz_offset); gf_bs_write_u16(bs, tcfg.vert_offset); gf_bs_get_content(bs, &ch->sl_map.config, &ch->sl_map.configSize); ch->sl_map.StreamType = GF_STREAM_TEXT; ch->sl_map.ObjectTypeIndication = 0x08; gf_bs_del(bs); } break; case GP_RTP_PAYT_H264_AVC: { GF_SDP_FMTP *fmtp; GF_AVCConfig *avcc = gf_odf_avc_cfg_new(); avcc->AVCProfileIndication = (ch->sl_map.PL_ID>>16) & 0xFF; avcc->profile_compatibility = (ch->sl_map.PL_ID>>8) & 0xFF; avcc->AVCLevelIndication = ch->sl_map.PL_ID & 0xFF; avcc->configurationVersion = 1; avcc->nal_unit_size = 4; ch->sl_map.StreamType = 4; ch->sl_map.ObjectTypeIndication = 0x21; /*we will signal RAPs*/ ch->sl_map.RandomAccessIndication = 1; /*rewrite sps and pps*/ i=0; while ((fmtp = (GF_SDP_FMTP*)gf_list_enum(media->FMTP, &i))) { GF_X_Attribute *att; if (fmtp->PayloadType != map->PayloadType) continue; j=0; while ((att = (GF_X_Attribute *)gf_list_enum(fmtp->Attributes, &j))) { char *nal_ptr, *sep; if (stricmp(att->Name, "sprop-parameter-sets")) continue; nal_ptr = att->Value; while (nal_ptr) { u32 nalt, b64size, ret; char *b64_d; sep = strchr(nal_ptr, ','); if (sep) sep[0] = 0; b64size = strlen(nal_ptr); b64_d = (char*)malloc(sizeof(char)*b64size); ret = gf_base64_decode(nal_ptr, b64size, b64_d, b64size); b64_d[ret] = 0; nalt = b64_d[0] & 0x1F; if (/*SPS*/(nalt==0x07) || /*PPS*/(nalt==0x08)) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)malloc(sizeof(GF_AVCConfigSlot)); sl->size = ret; sl->data = (char*)malloc(sizeof(char)*sl->size); memcpy(sl->data, b64_d, sizeof(char)*sl->size); if (nalt==0x07) { gf_list_add(avcc->sequenceParameterSets, sl); } else { gf_list_add(avcc->pictureParameterSets, sl); } } free(b64_d); if (sep) { sep[0] = ','; nal_ptr = sep+1; } else { break; } } } } gf_odf_avc_cfg_write(avcc, &ch->sl_map.config, &ch->sl_map.configSize); gf_odf_avc_cfg_del(avcc); } break; } return 1;}void RP_ParsePayloadMPEG4(RTPStream *ch, GF_RTPHeader *hdr, char *payload, u32 size){ u32 aux_size, au_size, first_idx, au_hdr_size, pay_start, num_au; s32 au_idx; GF_BitStream *hdr_bs, *aux_bs; hdr_bs = gf_bs_new(payload, size, GF_BITSTREAM_READ); aux_bs = gf_bs_new(payload, size, GF_BITSTREAM_READ);// printf("parsing packet %d size %d ts %d M %d\n", hdr->SequenceNumber, size, hdr->TimeStamp, hdr->Marker); /*global AU header len*/ au_hdr_size = 0; if (ch->sl_map.auh_first_min_len) { au_hdr_size = gf_bs_read_u16(hdr_bs); gf_bs_read_u16(aux_bs); } /*jump to aux section, skip it and get payload start*/ gf_bs_read_int(aux_bs, au_hdr_size); gf_bs_align(aux_bs); if (ch->sl_map.AuxiliaryDataSizeLength) { aux_size = gf_bs_read_int(aux_bs, ch->sl_map.AuxiliaryDataSizeLength); gf_bs_read_int(aux_bs, aux_size); gf_bs_align(aux_bs); } pay_start = (u32) gf_bs_get_position(aux_bs); gf_bs_del(aux_bs); first_idx = 0; au_idx = 0; ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp; ch->sl_hdr.decodingTimeStamp = hdr->TimeStamp; num_au = 0; ch->sl_hdr.accessUnitEndFlag = hdr->Marker; /*override some defaults for RFC 3016*/ if (ch->flags & RTP_NEW_AU) { ch->sl_hdr.accessUnitStartFlag = 1; } else { ch->sl_hdr.accessUnitStartFlag = 0; } ch->sl_hdr.randomAccessPointFlag = 0; while (1) { /*get default AU size*/ au_size = ch->sl_map.ConstantSize; /*not signaled, assume max one AU per packet*/ if (!au_size) au_size = size - pay_start; if ((!num_au && ch->sl_map.auh_first_min_len) || (num_au && ch->sl_map.auh_min_len)) { /*ISMACryp*/ if (ch->flags & RTP_HAS_ISMACRYP) { ch->sl_hdr.isma_encrypted = 1; if (ch->flags & RTP_ISMA_SEL_ENC) { ch->sl_hdr.isma_encrypted = gf_bs_read_int(hdr_bs, 1); gf_bs_read_int(hdr_bs, 7); } /*Note: ISMACryp ALWAYS indicates IV (BSO) and KEYIDX, even when sample is not encrypted. This is quite a waste when using selective encryption....*/ if (!num_au) { ch->sl_hdr.isma_BSO = gf_bs_read_int(hdr_bs, 8*ch->sl_map.IV_length); } /*NOT SUPPORTED YET*/ else if (ch->sl_map.IV_delta_length) { ch->sl_hdr.isma_BSO += gf_bs_read_int(hdr_bs, 8*ch->sl_map.IV_delta_length); } if (ch->sl_map.KI_length) { /*NOT SUPPORTED YET*/ if (!num_au || !(ch->flags & RTP_ISMA_HAS_KEY_IDX) ) { gf_bs_skip_bytes(hdr_bs, ch->sl_map.KI_length); } } } /*AU size*/ if (ch->sl_map.SizeLength) { au_size = gf_bs_read_int(hdr_bs, ch->sl_map.SizeLength); if (au_size > size - pay_start) au_size = size - pay_start; au_hdr_size -= ch->sl_map.SizeLength; } /*AU index*/ if (! num_au) { au_idx = first_idx = gf_bs_read_int(hdr_bs, ch->sl_map.IndexLength); au_hdr_size -= ch->sl_map.IndexLength; } else { au_idx += 1 + (s32) gf_bs_read_int(hdr_bs, ch->sl_map.IndexDeltaLength); au_hdr_size -= ch->sl_map.IndexDeltaLength; } /*CTS flag*/ if (ch->sl_map.CTSDeltaLength) { ch->sl_hdr.compositionTimeStampFlag = gf_bs_read_int(hdr_bs, 1); au_hdr_size -= 1; } else { /*get CTS from IDX*/ if (ch->sl_map.ConstantDuration) { ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp + (au_idx - first_idx) * ch->sl_map.ConstantDuration; } else { ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp + (au_idx - first_idx) * ch->unit_duration; } } /*CTS in-band*/ if (ch->sl_hdr.compositionTimeStampFlag) { ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp + (s32) gf_bs_read_int(hdr_bs, ch->sl_map.CTSDeltaLength); au_hdr_size -= ch->sl_map.CTSDeltaLength; } /*DTS flag is always present (needed for reconstruction of TSs in case of packet loss)*/ if (ch->sl_map.DTSDeltaLength) { ch->sl_hdr.decodingTimeStampFlag = gf_bs_read_int(hdr_bs, 1); au_hdr_size -= 1; } else { /*NO DTS otherwise*/ ch->sl_hdr.decodingTimeStampFlag = 0; } if (ch->sl_hdr.decodingTimeStampFlag) { u32 ts_off = gf_bs_read_int(hdr_bs, ch->sl_map.DTSDeltaLength); /*TODO FIXME may not be true in case of TS wrapping*/ if (hdr->TimeStamp > ts_off) ch->sl_hdr.decodingTimeStamp = hdr->TimeStamp - ts_off; au_hdr_size -= ch->sl_map.DTSDeltaLength; } /*RAP flag*/ if (ch->sl_map.RandomAccessIndication) { ch->sl_hdr.randomAccessPointFlag = gf_bs_read_int(hdr_bs, 1); au_hdr_size -= 1; } /*stream state - map directly to seqNum*/ if (ch->sl_map.StreamStateIndication) { ch->sl_hdr.AU_sequenceNumber = gf_bs_read_int(hdr_bs, ch->sl_map.StreamStateIndication); au_hdr_size -= ch->sl_map.StreamStateIndication; } else { ch->sl_hdr.AU_sequenceNumber = au_idx; } } /*no header present, update CTS/DTS - note we're sure there's no interleaving*/ else { if (num_au) { ch->sl_hdr.compositionTimeStamp += ch->sl_map.ConstantDuration; ch->sl_hdr.decodingTimeStamp += ch->sl_map.ConstantDuration; } } /*we cannot map RTP SN to SL SN since an RTP packet may carry several SL ones - only inc by 1 seq nums*/ ch->sl_hdr.packetSequenceNumber += 1; /*force indication of CTS whenever we have a new AU*/ ch->sl_hdr.compositionTimeStampFlag = (ch->flags & RTP_NEW_AU) ? 1 : 0; /*locate VOP start code*/ if (ch->sl_hdr.accessUnitStartFlag && (ch->flags & RTP_M4V_CHECK_RAP)) { u32 i; Bool is_rap = 0; unsigned char *pay = (unsigned char *) payload + pay_start; i=0; while (i<au_size-4) { if (!pay[i] && !pay[i+1] && (pay[i+2]==1) && (pay[i+3]==0xB6)) { is_rap = ((pay[i+4] & 0xC0)==0) ? 1 : 0; break; } i++; } ch->sl_hdr.randomAccessPointFlag = is_rap ? 1 : 0; } if (ch->owner->first_packet_drop && (ch->sl_hdr.packetSequenceNumber >= ch->owner->first_packet_drop) ) { if ( (ch->sl_hdr.packetSequenceNumber - ch->owner->first_packet_drop) % ch->owner->frequency_drop) gf_term_on_sl_packet(ch->owner->service, ch->channel, payload + pay_start, au_size, &ch->sl_hdr, GF_OK); } else { gf_term_on_sl_packet(ch->owner->service, ch->channel, payload + pay_start, au_size, &ch->sl_hdr, GF_OK); } ch->sl_hdr.compositionTimeStampFlag = 0; if (ch->flags & RTP_HAS_ISMACRYP) ch->sl_hdr.isma_BSO += au_size; if (au_hdr_size < ch->sl_map.auh_min_len) break; pay_start += au_size; if (pay_start >= size) break; num_au ++; } if (hdr->Marker) ch->flags |= RTP_NEW_AU; else ch->flags &= ~RTP_NEW_AU; gf_bs_del(hdr_bs);}void RP_ParsePayloadMPEG12Audio(RTPStream *ch, GF_RTPHeader *hdr, char *payload, u32 size){ u16 offset; u32 mp3hdr, ts; GF_BitStream *bs; ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp; ch->sl_hdr.decodingTimeStamp = hdr->TimeStamp; ch->sl_hdr.accessUnitStartFlag = ch->sl_hdr.accessUnitEndFlag ? 1 : 0; if (ch->flags & RTP_NEW_AU) ch->sl_hdr.accessUnitStartFlag = 1; /*get frag header*/ bs = gf_bs_new(payload, size, GF_BITSTREAM_READ); gf_bs_read_u16(bs); offset = gf_bs_read_u16(bs); gf_bs_del(bs); payload += 4; size -= 4; mp3hdr = 0; while (1) { /*frame start if no offset*/ ch->sl_hdr.accessUnitStartFlag = offset ? 0 : 1; /*new frame, store size*/ ch->sl_hdr.compositionTimeStampFlag = 0; if (ch->sl_hdr.accessUnitStartFlag) { mp3hdr = GF_4CC((u8) payload[0], (u8) payload[1], (u8) payload[2], (u8) payload[3]); ch->sl_hdr.accessUnitLength = gf_mp3_frame_size(mp3hdr); ch->sl_hdr.compositionTimeStampFlag = 1; } if (!ch->sl_hdr.accessUnitLength) break; /*fragmented frame*/ if (ch->sl_hdr.accessUnitLength>size) { gf_term_on_sl_packet(ch->owner->service, ch->channel, payload, ch->sl_hdr.accessUnitLength, &ch->sl_hdr, GF_OK); ch->sl_hdr.accessUnitLength -= size; ch->sl_hdr.accessUnitStartFlag = ch->sl_hdr.accessUnitEndFlag = 0; return; } /*complete frame*/ ch->sl_hdr.accessUnitEndFlag = 1; gf_term_on_sl_packet(ch->owner->service, ch->channel, payload, ch->sl_hdr.accessUnitLength, &ch->sl_hdr, GF_OK); payload += ch->sl_hdr.accessUnitLength; size -= ch->sl_hdr.accessUnitLength; ch->sl_hdr.accessUnitLength = 0; /*if fragmented there shall not be other frames in the packet*/ if (!ch->sl_hdr.accessUnitStartFlag) return; if (!size) break; offset = 0; /*get ts*/ ts = gf_mp3_window_size(mp3hdr); ch->sl_hdr.compositionTimeStamp += ts; ch->sl_hdr.decodingTimeStamp += ts; } ch->flags |= RTP_NEW_AU;}void RP_ParsePayloadMPEG12Video(RTPStream *ch, GF_RTPHeader *hdr, char *payload, u32 size){ u8 pic_type; ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp; ch->sl_hdr.decodingTimeStamp = hdr->TimeStamp; pic_type = payload[2] & 0x7; payload += 4; size -= 4; /*missed something*/ if (ch->sl_hdr.compositionTimeStamp != hdr->TimeStamp) ch->flags |= RTP_NEW_AU; ch->sl_hdr.accessUnitStartFlag = (ch->flags & RTP_NEW_AU) ? 1 : 0; ch->sl_hdr.accessUnitEndFlag = hdr->Marker ? 1 : 0; ch->sl_hdr.randomAccessPointFlag = (pic_type==1) ? 1 : 0; if (ch->sl_hdr.accessUnitStartFlag) { ch->sl_hdr.compositionTimeStamp = hdr->TimeStamp; ch->sl_hdr.compositionTimeStampFlag = 1; } else { ch->sl_hdr.compositionTimeStampFlag = 0; } gf_term_on_sl_packet(ch->owner->service, ch->channel, payload, size, &ch->sl_hdr, GF_OK); if (hdr->Marker) { ch->flags |= RTP_NEW_AU; } else { ch->flags &= ~RTP_NEW_AU; }}void RP_ParsePayloadMPEG12(RTPStream *ch, GF_RTPHeader *hdr, char *payload, u32 size){ switch (ch->sl_map.StreamType) { case GF_STREAM_VISUAL: RP_ParsePayloadMPEG12Video(ch, hdr, payload, size); break; case GF_STREAM_AUDIO: RP_ParsePayloadMPEG12Audio(ch, hdr, payload, size); break; }}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -