📄 play_mp4.c
字号:
status = RMGetMP4SubtitleTrackIDByIndex(pSendContext->mp4c, index, &subtitletrackID); if (status != RM_OK) { RMDBGLOG((ENABLE, "cant get id for subtitle track %lu!\n", index)); return status; } if (pSendContext->mp4tSubT) { RMDBGLOG((ENABLE, "subtitle track already open, closing\n")); RMCloseMP4Track(pSendContext->mp4c, pSendContext->mp4tSubT); pSendContext->mp4tSubT = (ExternalRMmp4Track) NULL; } if (subtitletrackID > 0) { RMDBGLOG((ENABLE, "selecting subtitle track %lu, id %lu\n", index, subtitletrackID)); pSendContext->mp4tSubT = RMOpenMP4Track(pSendContext->mp4c, subtitletrackID); if (pSendContext->mp4tSubT == NULL) { RMDBGLOG((ENABLE, "couldnt open subtitle track!!\n")); return RM_ERROR; } pSendContext->subtCTSTimeScale = RMGetMP4TrackTimeScale(pSendContext->mp4tSubT); RMDBGLOG((ENABLE, "subtitle timeScale %lu\n", pSendContext->subtCTSTimeScale)); } else return RM_ERROR; return RM_OK;}static RMstatus send_videoDSI(struct SendMP4DataContext * pSendContext){ RMuint32 i; RMstatus status; if (!pSendContext->sendVideoDSI) { return RM_OK; } while (RUAGetBuffer(pSendContext->pDMA, &(pSendContext->videosample.buf), COMMON_TIMEOUT_US) != RM_OK) RMDBGLOG((ENABLE, "there are no free buffers for video DSI right now, retry later...\n")); memcpy(pSendContext->videosample.buf, pSendContext->videodsiBuf, pSendContext->videodsiSize); pSendContext->videosample.size = pSendContext->videodsiSize; pSendContext->videosample.flags = 0; pSendContext->video_Info.ValidFields = 0; pSendContext->video_Info.TimeStamp = 0; for (i = 0; i < pSendContext->videodsiSize; i++) { RMDBGLOG((DISABLE, "videoDSI[%lu]=%02X\n", i, pSendContext->videodsiBuf[i])); } if (pSendContext->SendVideoData) { while( RUASendData(pSendContext->pRUA, pSendContext->dcc_info->video_decoder, pSendContext->pDMA, pSendContext->videosample.buf, pSendContext->videosample.size, &pSendContext->video_Info, sizeof(pSendContext->video_Info)) != RM_OK) { RMDBGLOG((ENABLE, "waiting to send video DSI\n")); } } RMDBGLOG((ENABLE, "video DSI sent, size %ld\n", pSendContext->videodsiSize)); status = dump_data_into_file(pSendContext->play_opt, RMVDEMUX_VIDEO, pSendContext->videosample.buf, pSendContext->videosample.size, pSendContext->video_Info.TimeStamp, (pSendContext->video_Info.ValidFields & TIME_STAMP_INFO), pSendContext->video_Info.FirstAccessUnitPointer); if (RMFAILED(status)) { RMDBGLOG((ENABLE, "Cannot dump data %d\n", status)); return status; } if (pSendContext->videosample.buf != NULL) { RUAReleaseBuffer(pSendContext->pDMA, pSendContext->videosample.buf); pSendContext->videosample.buf = NULL; } pSendContext->sendVideoDSI = FALSE; return RM_OK;}static RMstatus Stop(struct SendMP4DataContext * pSendContext, RMuint32 devices){ RMstatus err = RM_OK; struct dcc_context *dcc_info = pSendContext->dcc_info; if (devices & RM_DEVICES_STC) { RMDBGLOG((ENABLE, "STOP: stc\n")); DCCSTCStop(dcc_info->pStcSource); } if (devices & RM_DEVICES_VIDEO) { if (pSendContext->SendVideoData) { RMDBGLOG((ENABLE, "STOP: video decoder\n")); err = DCCStopVideoSource(dcc_info->pVideoSource, DCCStopMode_LastFrame); if (RMFAILED(err)){ RMDBGLOG((ENABLE, "Error stopping video source %d\n", err)); return err; } pSendContext->lastVideoPTS = 0; pSendContext->lastSPUPTS = 0; pSendContext->sendVideoDSI = TRUE; } } if (devices & RM_DEVICES_AUDIO) { if ((pSendContext->SendAudioData) && (dcc_info->pMultipleAudioSource)) { RMDBGLOG((ENABLE, "STOP: multiple audio decoders\n")); err = DCCStopMultipleAudioSource(dcc_info->pMultipleAudioSource); if (RMFAILED(err)){ RMDBGLOG((ENABLE,"Error stopping multiple audio source %d\n", err)); return err; } pSendContext->lastAudioPTS = 0; } } if ((devices & RM_DEVICES_AUDIO) && (devices & RM_DEVICES_VIDEO)) { pSendContext->FirstSystemTimeStamp = TRUE; } if (pSendContext->monitorFIFOs) { RMDBGLOG((ENABLE, "FIFO STATUS after stop\n")); monitorFIFO(pSendContext, TRUE); RMDBGLOG((ENABLE, "**********************\n")); } return err;}static RMstatus Play(struct SendMP4DataContext * pSendContext, RMuint32 devices, enum DCCVideoPlayCommand mode){ RMstatus err = RM_OK; struct dcc_context *dcc_info = pSendContext->dcc_info; if (pSendContext->monitorFIFOs) { RMDBGLOG((ENABLE, "FIFO STATUS before play\n")); monitorFIFO(pSendContext, TRUE); RMDBGLOG((ENABLE, "**********************\n")); } if (devices & RM_DEVICES_STC) { RMDBGLOG((ENABLE, "PLAY: stc\n")); DCCSTCPlay(dcc_info->pStcSource); } if (devices & RM_DEVICES_VIDEO) { if (pSendContext->SendVideoData) { RMDBGLOG((ENABLE, "PLAY: video decoder\n")); err = DCCPlayVideoSource(dcc_info->pVideoSource, mode); if (RMFAILED(err)) { RMDBGLOG((ENABLE, "Cannot play video decoder %d\n", err)); return err; } } send_videoDSI(pSendContext); } if (devices & RM_DEVICES_AUDIO) { if ((pSendContext->SendAudioData) && (dcc_info->pMultipleAudioSource)) { RMDBGLOG((ENABLE, "PLAY: multiple audio decoders\n")); err = DCCPlayMultipleAudioSource(dcc_info->pMultipleAudioSource); if (RMFAILED(err)) { RMDBGLOG((ENABLE, "Cannot play video decoder %d\n", err)); return err; } } } return err;}// used for prebufferingstatic RMstatus Pause(struct SendMP4DataContext * pSendContext, RMuint32 devices){ RMstatus err = RM_OK; if (devices & RM_DEVICES_STC) { RMDBGLOG((ENABLE, "PAUSE: stc\n")); DCCSTCStop(pSendContext->dcc_info->pStcSource); } if (devices & RM_DEVICES_VIDEO) { if (pSendContext->SendVideoData) { RMDBGLOG((ENABLE, "PAUSE: video decoder\n")); err = DCCPauseVideoSource(pSendContext->dcc_info->pVideoSource); if (RMFAILED(err)) { RMDBGLOG((ENABLE, "Cannot pause video decoder %d\n", err)); return err; } } } if (devices & RM_DEVICES_AUDIO) { if ((pSendContext->SendAudioData) && (pSendContext->dcc_info->pMultipleAudioSource)) { RMDBGLOG((ENABLE, "PAUSE: audio decoder\n")); err = DCCPauseMultipleAudioSource(pSendContext->dcc_info->pMultipleAudioSource); if (RMFAILED(err)) { RMDBGLOG((ENABLE, "Cannot pause video decoder %d\n", err)); return err; } } } return err;}#define PLAY_TIMERS 0static RMstatus send_MP4_audio(struct SendMP4DataContext * pSendContext){ RMstatus status; RMuint64 pts; RMuint8 *buf = NULL; enum RM_PSM_State PlaybackStatus = RM_PSM_GetState(pSendContext->PSMcontext, &(pSendContext->dcc_info)); RMuint32 nextSampleSize = 0; if (PlaybackStatus == RM_PSM_Stopped) { return RM_OK; } if ((PlaybackStatus != RM_PSM_Playing) && (PlaybackStatus != RM_PSM_Paused) && (PlaybackStatus != RM_PSM_Prebuffering)) return RM_OK; if (pSendContext->audioTransferPending) goto send_data; if (pSendContext->play_opt->disk_ctrl_state == DISK_CONTROL_STATE_SLEEPING) RMDBGLOG((ENABLE, "trying to read from a suspended drive!!\n")); pSendContext->audiosample.buf = pSendContext->audioDMABuffer + pSendContext->audioDMABufferOffset; pSendContext->audiosample.size = (1<<pSendContext->dmaBufferSizeLog2) - pSendContext->audioDMABufferOffset; if (pSendContext->isAACaudioPayload) { buf = pSendContext->audiosample.buf; pSendContext->audiosample.size -= ADTS_HEADER_SIZE; pSendContext->audiosample.buf += ADTS_HEADER_SIZE; } if (!RMGetNextMP4Sample(pSendContext->mp4tA, &(pSendContext->audiosample), pSendContext->audiosample.size)) { RMDBGLOG((ENABLE,"Failed to get Audio sample -> end of audio stream\n")); pSendContext->audioEOS = TRUE; return RM_EOS; } if (!pSendContext->audiosample.size) RMDBGLOG((ENABLE, ">>> got an audio sample of size 0!\n")); RMGetMP4TrackNextSampleSize(pSendContext->mp4tA, &nextSampleSize); RMDBGLOG((DISABLE, "got audio sample of size %ld, nextSampleSize %lu, last audioDataSent %lu\n", pSendContext->audiosample.size, nextSampleSize, pSendContext->audioDataSent)); if (pSendContext->isAACaudioPayload) { FillAdtsHeader(buf, pSendContext->audiosample.size, pSendContext->AAC_SampleRateFromDSI, pSendContext->AAC_ChannelsFromDSI, pSendContext->AAC_ObjectIDFromDSI); pSendContext->audiosample.buf = buf; pSendContext->audiosample.size += ADTS_HEADER_SIZE; } if (pSendContext->audiosample.flags & MP4_CTS_VALID) { pSendContext->audio_Info.ValidFields = TIME_STAMP_INFO; pts = RMuint64from2RMuint32(pSendContext->audiosample.CTS_MSB, pSendContext->audiosample.CTS_LSB); pSendContext->audio_Info.TimeStamp = pts; if (pSendContext->audio_vop_tir != pSendContext->AudioCTSTimeScale) { pSendContext->audio_Info.TimeStamp = round_int_div(pts * pSendContext->audio_vop_tir, pSendContext->AudioCTSTimeScale); RMDBGLOG((DISABLE, "audio pts scaling! old %lld => %lld thru a factor %ld/%ld\n", pts, pSendContext->audio_Info.TimeStamp, pSendContext->audio_vop_tir, pSendContext->AudioCTSTimeScale)); } else RMDBGLOG((DISABLE, "setting pts to %llu\n", pts)); pSendContext->lastAudioPTS = (1000 * pSendContext->audio_Info.TimeStamp) / pSendContext->audio_vop_tir; } if (!pSendContext->SendAudioPts) { pSendContext->audio_Info.TimeStamp = 0; pSendContext->audio_Info.ValidFields = 0; } // happens only once if (!pSendContext->audio_ptsScalingMsg) { if (pSendContext->audio_vop_tir != pSendContext->AudioCTSTimeScale) { RMDBGLOG((ENABLE, ">>> audio pts scaling! thru a factor %ld/%ld\n", pSendContext->audio_vop_tir, pSendContext->AudioCTSTimeScale)); } else RMDBGLOG((ENABLE, ">>> no audio pts scaling required\n")); pSendContext->audio_ptsScalingMsg = TRUE; } if (pSendContext->FirstSystemTimeStamp && (pSendContext->audio_Info.ValidFields & TIME_STAMP_INFO)) { RMuint64 dummyPTS = pSendContext->audio_Info.TimeStamp; RMDBGLOG((ENABLE, "FirstSystemTimeStamp from audio = %lld(0x%llX) at %ld/sec = %llu s\n", dummyPTS, dummyPTS, pSendContext->audio_vop_tir, dummyPTS / pSendContext->audio_vop_tir)); pSendContext->FirstSystemTimeStamp = FALSE; RMDBGLOG((ENABLE, "setting timers\n")); DCCSTCSetTime(pSendContext->dcc_info->pStcSource, dummyPTS+pSendContext->stc_offset_ms*((RMint64)(pSendContext->audio_vop_tir/1000)), pSendContext->audio_vop_tir);#if PLAY_TIMERS if ((PlaybackStatus != RM_PSM_Prebuffering) && (PlaybackStatus != RM_PSM_Paused) && (PlaybackStatus != RM_PSM_Stopped)) DCCSTCPlay(pSendContext->dcc_info->pStcSource);#endif } else if (pSendContext->FirstSystemTimeStamp) { if (pSendContext->forceTimersToZero) { RMDBGLOG((ENABLE, ">>> no pts, force timers to zero\n")); DCCSTCSetTime(pSendContext->dcc_info->pStcSource, pSendContext->stc_offset_ms*((RMint64)(pSendContext->audio_vop_tir/1000)), pSendContext->audio_vop_tir);#if PLAY_TIMERS if ((PlaybackStatus != RM_PSM_Prebuffering) && (PlaybackStatus != RM_PSM_Paused) && (PlaybackStatus != RM_PSM_Stopped)) DCCSTCPlay(pSendContext->dcc_info->pStcSource);#endif pSendContext->FirstSystemTimeStamp = FALSE; } else { RMDBGLOG((ENABLE, "waiting for first time stamp to be detected, timer not set!!!, audio skipped\n")); return RM_OK; } } RMDBGLOG((DISABLE, "about to send a packet audio dma 0x%08X, sample 0x%08X, size %ld, pts %lld(0x%llx) %s\n", pSendContext->pDMA, pSendContext->audiosample.buf, pSendContext->audiosample.size, pSendContext->audio_Info.TimeStamp, pSendContext->audio_Info.TimeStamp, (pSendContext->audio_Info.ValidFields & TIME_STAMP_INFO) ? "valid":"")); pSendContext->audioDMABufferOffset += pSendContext->audiosample.size; send_data: if (pSendContext->audiosample.size != 0) { if (pSendContext->SendAudioData) { RMint32 lastOKinstance; status = DCCMultipleAudioSendData(pSendContext->dcc_info->pMultipleAudioSource, pSendContext->pDMA, pSendContext->audiosample.buf, pSendContext->audiosample.size, &pSendContext->audio_Info, sizeof(pSendContext->audio_Info), &lastOKinstance); if (status != RM_OK) { if (status == RM_PENDING) { RMDBGLOG((ENABLE, "audio transfer pending\n")); pSendContext->audioTransferPending = TRUE; } return status; } pSendContext->audioTransferPending = FALSE; } status = dump_data_into_file(pSendContext->play_opt, RMVDEMUX_AUDIO, pSendContext->audiosample.buf, pSendContext->audiosample.size, pSendContext->audio_Info.TimeStamp, (pSendContext->audio_Info.ValidFields & TIME_STAMP_INFO), pSendContext->audio_Info.FirstAccessUnitPointer); if (RMFAILED(status)) { RMDBGLOG((ENABLE, "Cannot dump data %d\n", status)); } if (pSendContext->SendAudioData) { RMDBGLOG((SENDDBG, "sent a packet audio dma 0x%08X, sample 0x%08X, size %ld, total %ld, pts %lld(0x%llx) %s\n", pSendContext->pDMA, pSendContext->audiosample.buf, pSendContext->audiosample.size, pSendContext->audioDataSent, pSendContext->audio_Info.TimeStamp, pSendContext->audio_Info.TimeStamp, (pSendContext->audio_Info.ValidFields & TIME_STAMP_INFO) ? "valid":"")); } // required for prebuffering routine pSendContext->audioDataSent += pSendContext->audiosample.size; } else { RMDBGLOG((ENABLE, "got audio sample of size 0!\n")); } return RM_OK;} static RMstatus send_video_payload(struct SendMP4DataContext *pSendContext){ RMstatus status; RMuint32 sizeBuffer = (1 << pSendContext->dmaBufferSizeLog2); RMmp4Sample *sample = &(pSendContext->videosample); RMuint8 *videoDMAStartCodeBuffer; if (pSendContext->isH264) { // h264 streams packed into MP4 system files require startcode reinsertion. RMDBGLOG((H264_SC_DBG, "send_video_payload (size %lu)\n", sample->size)); if (sample->size < 3) RMDBGLOG((ENABLE, "warning read a sampleSize < 3\n"));
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -