📄 h264videofileservermediasubsession.cpp
字号:
//filename H264ServerMesiaSubsession.cpp
/*
* Written by Engin Kurutepe.
* kurutepe@xxxxxxxxxxxxxxxx
*/
#include "H264VideoFileServerMediaSubsession.hh"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
#include "ByteStreamFileSource.hh"
H264VideoFileServerMediaSubsession::H264VideoFileServerMediaSubsession( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource) : MPEG4VideoFileServerMediaSubsession(env, fileName, reuseFirstSource)//anson , fDoneFlag(0)
{
}
H264VideoFileServerMediaSubsession* H264VideoFileServerMediaSubsession::createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource)
{
return new H264VideoFileServerMediaSubsession(env, fileName, reuseFirstSource);
}
H264VideoFileServerMediaSubsession::~H264VideoFileServerMediaSubsession(void)
{
}
static void afterPlayingDummy(void* clientData) {
H264VideoFileServerMediaSubsession* subsess = (H264VideoFileServerMediaSubsession*)clientData;
subsess->afterPlayingDummy1();
}
void H264VideoFileServerMediaSubsession::afterPlayingDummy1() {
// Unschedule any pending 'checking' task:
envir().taskScheduler().unscheduleDelayedTask(nextTask());
// Signal the event loop that we're done:
setDoneFlag();
}
static void checkForAuxSDPLine(void* clientData)
{
H264VideoFileServerMediaSubsession* subsess = (H264VideoFileServerMediaSubsession*)clientData;
subsess->checkForAuxSDPLine1();
}
void H264VideoFileServerMediaSubsession::checkForAuxSDPLine1() {
if (fDummyRTPSink->auxSDPLine() != NULL) {
// Signal the event loop that we're done:
setDoneFlag();
}
else {
// try again after a brief delay:
int uSecsToDelay = 100000; // 100 ms
nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc*)checkForAuxSDPLine, this);
}
}
char const* H264VideoFileServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
// Note: For H264 video files, the 'config' information isn't known
// until we start reading the file. This means that "rtpSink"s
// "auxSDPLine()" will be NULL initially, and we need to start reading
// data from our file until this changes.
fDummyRTPSink = rtpSink;
//anson fDummySource = inputSource;
// Start reading the file:
fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);
// Check whether the sink's 'auxSDPLine()' is ready:
checkForAuxSDPLine(this);
envir().taskScheduler().doEventLoop(&fDoneFlag);
char const* auxSDPLine = fDummyRTPSink->auxSDPLine();
return auxSDPLine;
}
FramedSource* H264VideoFileServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate)
{
estBitrate = 500; // kbps, estimate
// Create the video source:
ByteStreamFileSource* fileSource
= ByteStreamFileSource::createNew(envir(), fFileName);
if (fileSource == NULL) return NULL;
fFileSize = fileSource->fileSize();
// Create a framer for the Video Elementary Stream:
H264VideoStreamFramer *framer = H264VideoStreamFramer::createNew(envir(),
fileSource);
//source_sps = framer->getSPS();
//source_pps = framer->getPPS();
//source_pli = framer->getProfileLevelID();
return framer;
}
RTPSink* H264VideoFileServerMediaSubsession::createNewRTPSink( Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/)
{
//char *param_sets = new char[strlen(source_sps)+strlen(source_pps)+2];
//
//strcpy(param_sets, source_sps);
//strcat(param_sets, ",");
//strcat(param_sets, source_pps);
H264VideoRTPSink *sink = H264VideoRTPSink::createNew( envir(),
rtpGroupsock,
rtpPayloadTypeIfDynamic,
0,
"" );
// delete [] param_sets;
return sink;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -