📄 channel.c
字号:
ch->len = 0; return; } au->CTS = ch->CTS; au->DTS = ch->DTS; au->RAP = ch->IsRap; au->data = ch->buffer; au->dataLength = ch->len; au->PaddingBits = ch->padingBits; ch->IsRap = 0; ch->padingBits = 0; au->next = NULL; ch->buffer = NULL; if (ch->len + ch->media_padding_bytes != ch->allocSize) { au->data = (char*)realloc(au->data, sizeof(char) * (au->dataLength + ch->media_padding_bytes)); } if (ch->media_padding_bytes) memset(au->data + au->dataLength, 0, sizeof(char)*ch->media_padding_bytes); ch->len = ch->allocSize = 0; gf_es_lock(ch, 1); if (ch->service && ch->service->cache) { GF_SLHeader slh; memset(&slh, 0, sizeof(GF_SLHeader)); slh.accessUnitEndFlag = slh.accessUnitStartFlag = 1; slh.compositionTimeStampFlag = slh.decodingTimeStampFlag = 1; slh.decodingTimeStamp = ch->net_dts; slh.compositionTimeStamp = ch->net_cts; slh.randomAccessPointFlag = au->RAP; ch->service->cache->Write(ch->service->cache, ch, au->data, au->dataLength, &slh); } if (!ch->AU_buffer_first) { ch->AU_buffer_first = au; ch->AU_buffer_last = au; ch->AU_Count = 1; } else { if (ch->AU_buffer_last->DTS<=au->DTS) { ch->AU_buffer_last->next = au; ch->AU_buffer_last = ch->AU_buffer_last->next; } /*enable deinterleaving only for audio channels (some video transport may not be able to compute DTS, cf MPEG1-2/RTP) HOWEVER, we must recompute a monotone increasing DTS in case the decoder does perform frame reordering in which case the DTS is used for presentation time!!*/ else if (ch->odm->codec && (ch->odm->codec->type!=GF_STREAM_AUDIO)) { GF_DBUnit *au_prev, *ins_au; u32 DTS; /*append AU*/ ch->AU_buffer_last->next = au; ch->AU_buffer_last = ch->AU_buffer_last->next; DTS = au->DTS; au_prev = ch->AU_buffer_first; /*locate first AU in buffer with DTS greater than new unit CTS*/ while (au_prev->next && (au_prev->DTS < DTS) ) au_prev = au_prev->next; /*remember insertion point*/ ins_au = au_prev; /*shift all following frames DTS*/ while (au_prev->next) { au_prev->next->DTS = au_prev->DTS; au_prev = au_prev->next; } /*and apply*/ ins_au->DTS = DTS; } else { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] Audio deinterleaving OD %d ch %d\n", ch->esd->ESID, ch->odm->OD->objectDescriptorID)); /*de-interleaving of AUs*/ if (ch->AU_buffer_first->DTS > au->DTS) { au->next = ch->AU_buffer_first; ch->AU_buffer_first = au; } else { GF_DBUnit *au_prev = ch->AU_buffer_first; while (au_prev->next && au_prev->next->DTS<au->DTS) { au_prev = au_prev->next; } assert(au_prev); if (au_prev->next->DTS==au->DTS) { free(au->data); free(au); } else { au->next = au_prev->next; au_prev->next = au; } } } ch->AU_Count += 1; } Channel_UpdateBufferTime(ch); ch->au_duration = 0; if (duration) ch->au_duration = (u32) ((u64)1000 * duration / ch->ts_res); GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d - Dispatch AU CTS %d time %d Buffer %d Nb AUs %d\n", ch->esd->ESID, au->CTS, gf_clock_time(ch->clock), ch->BufferTime, ch->AU_Count)); if (ch->BufferOn) { ch->last_au_time = gf_term_get_time(ch->odm->term); Channel_UpdateBuffering(ch, 1); } /*little opt: if this is an OD AU, try to setup the object if needed */ if (ch->esd->decoderConfig->streamType==GF_STREAM_OD) { gf_term_lock_net(ch->odm->term, 1); gf_codec_process(ch->odm->subscene->od_codec, 100); gf_term_lock_net(ch->odm->term, 0); } gf_es_lock(ch, 0); return;}void Channel_RecieveSkipSL(GF_ClientService *serv, GF_Channel *ch, char *StreamBuf, u32 StreamLength){ GF_DBUnit *au; if (!StreamLength) return; gf_es_lock(ch, 1); au = gf_db_unit_new(); au->RAP = 1; au->DTS = gf_clock_time(ch->clock); au->data = (char*)malloc(sizeof(char) * (ch->media_padding_bytes + StreamLength)); memcpy(au->data, StreamBuf, sizeof(char) * StreamLength); if (ch->media_padding_bytes) memset(au->data + StreamLength, 0, sizeof(char)*ch->media_padding_bytes); au->dataLength = StreamLength; au->next = NULL; /*if channel owns the clock, start it*/ if (ch->clock && !ch->IsClockInit) { if (gf_es_owns_clock(ch)) { gf_clock_set_time(ch->clock, 0); ch->IsClockInit = 1; ch->seed_ts = 0; } if (ch->clock->clock_init && !ch->IsClockInit) { ch->IsClockInit = 1; ch->seed_ts = gf_clock_time(ch->clock); } } if (!ch->AU_buffer_first) { ch->AU_buffer_first = au; ch->AU_buffer_last = au; ch->AU_Count = 1; } else { ch->AU_buffer_last->next = au; ch->AU_buffer_last = ch->AU_buffer_last->next; ch->AU_Count += 1; } Channel_UpdateBufferTime(ch); if (ch->BufferOn) { ch->last_au_time = gf_term_get_time(ch->odm->term); Channel_UpdateBuffering(ch, 1); } gf_es_lock(ch, 0);}/*handles reception of an SL-PDU, logical or physical*/void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *StreamBuf, u32 StreamLength, GF_SLHeader *header, GF_Err reception_status){ GF_SLHeader hdr; u32 nbAU, OldLength, size, AUSeqNum, SLHdrLen; Bool EndAU, NewAU; char *payload; if (ch->skip_sl) { Channel_RecieveSkipSL(serv, ch, StreamBuf, StreamLength); return; } /*physical SL-PDU - depacketize*/ if (!header) { if (!StreamLength) return; gf_sl_depacketize(ch->esd->slConfig, &hdr, StreamBuf, StreamLength, &SLHdrLen); StreamLength -= SLHdrLen; } else { hdr = *header; SLHdrLen = 0; } payload = StreamBuf + SLHdrLen; /*check state*/ if (!ch->codec_resilient && (reception_status==GF_CORRUPTED_DATA)) { Channel_WaitRAP(ch); return; } if (!ch->esd->slConfig->useAccessUnitStartFlag) { /*no AU signaling - each packet is an AU*/ if (!ch->esd->slConfig->useAccessUnitEndFlag) hdr.accessUnitEndFlag = hdr.accessUnitStartFlag = 1; /*otherwise AU are signaled by end of previous packet*/ else hdr.accessUnitStartFlag = ch->NextIsAUStart; } /*get RAP*/ if (ch->esd->slConfig->hasRandomAccessUnitsOnlyFlag) { hdr.randomAccessPointFlag = 1; } else if (!ch->esd->slConfig->useRandomAccessPointFlag || ch->codec_resilient) { ch->stream_state = 0; } if (ch->esd->slConfig->packetSeqNumLength) { if (ch->pck_sn && hdr.packetSequenceNumber) { /*repeated -> drop*/ if (ch->pck_sn == hdr.packetSequenceNumber) { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: repeated packet, droping\n", ch->esd->ESID)); return; } /*if codec has no resiliency check packet drops*/ if (!ch->codec_resilient && !hdr.accessUnitStartFlag) { if (ch->pck_sn == (u32) (1<<ch->esd->slConfig->packetSeqNumLength) ) { if (hdr.packetSequenceNumber) { GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: packet loss, droping & wait RAP\n", ch->esd->ESID)); Channel_WaitRAP(ch); return; } } else if (ch->pck_sn + 1 != hdr.packetSequenceNumber) { GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: packet loss, droping & wait RAP\n", ch->esd->ESID)); Channel_WaitRAP(ch); return; } } } ch->pck_sn = hdr.packetSequenceNumber; } /*if idle or empty, skip the packet*/ if (hdr.idleFlag || (hdr.paddingFlag && !hdr.paddingBits)) { GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: Idle or empty packet - skipping\n", ch->esd->ESID)); return; } NewAU = 0; if (hdr.accessUnitStartFlag) { NewAU = 1; ch->NextIsAUStart = 0; /*if we have a pending AU, add it*/ if (ch->buffer) { if (ch->esd->slConfig->useAccessUnitEndFlag) { GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: missed end of AU (DTS %d)\n", ch->esd->ESID, ch->DTS)); } if (ch->codec_resilient) { Channel_DispatchAU(ch, 0); } else { free(ch->buffer); ch->buffer = NULL; ch->AULength = 0; ch->len = ch->allocSize = 0; } } AUSeqNum = hdr.AU_sequenceNumber; /*Get CTS */ if (hdr.compositionTimeStampFlag) { ch->net_dts = ch->net_cts = hdr.compositionTimeStamp; /*get DTS */ if (hdr.decodingTimeStampFlag) ch->net_dts = hdr.decodingTimeStamp; /*until clock is not init check seed ts*/ if (!ch->IsClockInit && (ch->net_dts < ch->seed_ts)) ch->seed_ts = ch->net_dts; ch->net_dts -= ch->seed_ts; ch->net_cts -= ch->seed_ts; /*TS Wraping not tested*/ ch->CTS = (u32) (ch->ts_offset + (s64) (ch->net_cts) * 1000 / ch->ts_res); ch->DTS = (u32) (ch->ts_offset + (s64) (ch->net_dts) * 1000 / ch->ts_res); } else { /*use CU duration*/ if (!ch->IsClockInit) ch->DTS = ch->CTS = ch->ts_offset; if (!ch->esd->slConfig->AUSeqNumLength) { if (!ch->au_sn) { ch->CTS = ch->ts_offset; ch->au_sn = 1; } else { ch->CTS += ch->esd->slConfig->CUDuration; } } else { //use the sequence number to get the TS if (AUSeqNum < ch->au_sn) { nbAU = ( (1<<ch->esd->slConfig->AUSeqNumLength) - ch->au_sn) + AUSeqNum; } else { nbAU = AUSeqNum - ch->au_sn; } ch->CTS += nbAU * ch->esd->slConfig->CUDuration; } } if (!ch->IsClockInit) { /*first data received on channel inits the clock, regardless of: - wether channel owns the clock or not - wether data can be processed or not (carousel) We then offset other streams' timeline by the time ellapsed since clock initialization */#if 1 if (!ch->clock->clock_init) { gf_clock_set_time(ch->clock, ch->CTS); GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: initializing clock at STB %d - AU DTS %d\n", ch->esd->ESID, gf_term_get_time(ch->odm->term), ch->DTS)); } else if (ch->clock->no_time_ctrl && 0) { s32 offset = gf_term_get_time(ch->odm->term); offset -= ch->clock->StartTime; if (offset>0) { ch->ts_offset += offset; ch->DTS += offset; ch->CTS += offset; } } ch->IsClockInit = 1;#else if (!ch->clock->clock_init && gf_es_owns_clock(ch)) { gf_clock_set_time(ch->clock, ch->DTS); } if (ch->clock->clock_init) ch->IsClockInit = 1;#endif } /*if the AU Length is carried in SL, get its size*/ if (ch->esd->slConfig->AULength > 0) { ch->AULength = hdr.accessUnitLength; } else { ch->AULength = 0; } /*carousel for repeated AUs.*/ if (ch->esd->slConfig->AUSeqNumLength) { if (hdr.randomAccessPointFlag) { /*initial tune-in*/ if (ch->stream_state==1) { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found - tuning in\n", ch->esd->ESID)); ch->au_sn = AUSeqNum; ch->stream_state = 0; } /*carousel RAP*/ else if (AUSeqNum == ch->au_sn) { /*error recovery*/ if (ch->stream_state==2) { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found - recovering\n", ch->esd->ESID)); ch->stream_state = 0; } else { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found - skipping\n", ch->esd->ESID)); return; } } /*regular RAP*/ else { ch->au_sn = AUSeqNum; ch->stream_state = 0; } } /*regular AU but waiting for RAP*/ else if (ch->stream_state) { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Waiting for RAP Carousel - skipping\n", ch->esd->ESID)); return; } else { ch->au_sn = AUSeqNum; GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: NON-RAP AU received\n", ch->esd->ESID)); } } /*no carousel signaling, tune-in at first RAP*/ else if (hdr.randomAccessPointFlag) { ch->stream_state = 0; GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP AU received\n", ch->esd->ESID)); } /*waiting for RAP, return*/ else if (ch->stream_state) { GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Waiting for RAP - skipping AU (DTS %d)\n", ch->esd->ESID, ch->DTS)); return; } } /*update the RAP marker on a packet base (to cope with AVC/H264 NALU->AU reconstruction)*/
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -