📄 mediasession.cpp
字号:
case 26: {temp = "JPEG"; freq = 90000; nCh = 1; break;} case 28: {temp = "NV"; freq = 90000; nCh = 1; break;} case 31: {temp = "H261"; freq = 90000; nCh = 1; break;} case 32: {temp = "MPV"; freq = 90000; nCh = 1; break;} case 33: {temp = "MP2T"; freq = 90000; nCh = 1; break;} case 34: {temp = "H263"; freq = 90000; nCh = 1; break;} }; return strDup(temp);}unsigned MediaSession::guessRTPTimestampFrequency(char const* mediumName, char const* codecName) { // By default, we assume that audio sessions use a frequency of 8000, // and that video sessions use a frequency of 90000. // Begin by checking for known exceptions to this rule // (where the frequency is known unambiguously (e.g., not like "DVI4")) if (strcmp(codecName, "L16") == 0) return 44100; if (strcmp(codecName, "MPA") == 0 || strcmp(codecName, "MPA-ROBUST") == 0 || strcmp(codecName, "X-MP3-DRAFT-00")) return 90000; // Now, guess default values: if (strcmp(mediumName, "video") == 0) return 90000; return 8000; // for "audio", and any other medium}static unsigned computeSeqNumStagger(unsigned staggerSeconds) { // To compute the sequence number stagger, assume // - one MP3 frame per packet // - 1152 samples per MP3 frame (defined by MP3 standard) // - a sampling frequency of 44100 Hz // (Later, allow this to be parameterized)##### unsigned const samplesPerFrame = 1152; unsigned samplesPerSecond = 44100; double secondsPerFrame = (double)samplesPerFrame/samplesPerSecond; return (unsigned)(staggerSeconds/secondsPerFrame);}Boolean MediaSession::initiateByMediaType(char const* mimeType, MediaSubsession*& resultSubsession, PrioritizedRTPStreamSelector*& resultMultiSource, int& resultMultiSourceSessionId, int useSpecialRTPoffset) { // Look through this session's subsessions for media that match "mimeType" resultSubsession = NULL; resultMultiSource = NULL; resultMultiSourceSessionId = 0; unsigned maxStaggerSeconds = 0; MediaSubsessionIterator iter(*this); MediaSubsession* subsession; while ((subsession = iter.next()) != NULL) { if (resultMultiSourceSessionId != 0 && subsession->mctSLAPSessionId() != resultMultiSourceSessionId) { // We're using a multi-source SLAP session, but this subsession // isn't part of it continue; } Boolean wasAlreadyInitiated = subsession->readSource() != NULL; if (!wasAlreadyInitiated) { // Try to create a source for this subsession: if (!subsession->initiate(useSpecialRTPoffset)) return False; } // Make sure the source's MIME type is one that we handle: if (strcmp(subsession->readSource()->MIMEtype(), mimeType) != 0) { if (!wasAlreadyInitiated) subsession->deInitiate(); continue; } if (subsession->mctSLAPSessionId() == 0) { // Normal case: a single session resultSubsession = subsession; break; // use this } else { // Special case: a multi-source SLAP session resultMultiSourceSessionId = subsession->mctSLAPSessionId(); unsigned subsessionStaggerSeconds = subsession->mctSLAPStagger(); if (subsessionStaggerSeconds > maxStaggerSeconds) { maxStaggerSeconds = subsessionStaggerSeconds; } } } if (resultSubsession == NULL && resultMultiSourceSessionId == 0) { envir().setResultMsg("Session has no usable media subsession"); return False; } if (resultMultiSourceSessionId != 0) { // We have a multi-source MCT SLAP session; create a selector for it: unsigned seqNumStagger = computeSeqNumStagger(maxStaggerSeconds); resultMultiSource = PrioritizedRTPStreamSelector::createNew(envir(), seqNumStagger); if (resultMultiSource == NULL) return False; // Note: each subsession has its own RTCP instance; we don't return them // Then run through the subsessions again, adding each of the sources: iter.reset(); while ((subsession = iter.next()) != NULL) { if (subsession->mctSLAPSessionId() == resultMultiSourceSessionId) { resultMultiSource->addInputRTPStream(subsession->rtpSource(), subsession->rtcpInstance()); } } } return True;}////////// MediaSubsessionIterator //////////MediaSubsessionIterator::MediaSubsessionIterator(MediaSession& session) : fOurSession(session) { reset();}MediaSubsessionIterator::~MediaSubsessionIterator() {}MediaSubsession* MediaSubsessionIterator::next() { MediaSubsession* result = fNextPtr; if (fNextPtr != NULL) fNextPtr = fNextPtr->fNext; return result;}void MediaSubsessionIterator::reset() { fNextPtr = fOurSession.fSubsessionsHead;}////////// MediaSubsession //////////MediaSubsession::MediaSubsession(MediaSession& parent) : sessionId(NULL), serverPortNum(0), sink(NULL), miscPtr(NULL), fParent(parent), fNext(NULL), fConnectionEndpointName(NULL), fClientPortNum(0), fRTPPayloadFormat(0xFF), fSavedSDPLines(NULL), fMediumName(NULL), fCodecName(NULL), fProtocolName(NULL), fRTPTimestampFrequency(0), fControlPath(NULL), fSourceFilterAddr(parent.sourceFilterAddr()), fAuxiliarydatasizelength(0), fConstantduration(0), fConstantsize(0), fCRC(0), fCtsdeltalength(0), fDe_interleavebuffersize(0), fDtsdeltalength(0), fIndexdeltalength(0), fIndexlength(0), fInterleaving(0), fMaxdisplacement(0), fObjecttype(0), fOctetalign(0), fProfile_level_id(0), fRobustsorting(0), fSizelength(0), fStreamstateindication(0), fStreamtype(0), fCpresent(False), fRandomaccessindication(False), fConfig(NULL), fMode(NULL), fSpropParameterSets(NULL), fPlayEndTime(0.0), fMCT_SLAP_SessionId(0), fMCT_SLAP_Stagger(0), fVideoWidth(0), fVideoHeight(0), fVideoFPS(0), fNumChannels(1), fScale(1.0f), fRTPSocket(NULL), fRTCPSocket(NULL), fRTPSource(NULL), fRTCPInstance(NULL), fReadSource(NULL) {#ifdef SUPPORT_REAL_RTSP RealInitSDPAttributes(this);#endif}MediaSubsession::~MediaSubsession() { deInitiate(); delete[] fConnectionEndpointName; delete[] fSavedSDPLines; delete[] fMediumName; delete[] fCodecName; delete[] fProtocolName; delete[] fControlPath; delete[] fConfig; delete[] fMode; delete[] fSpropParameterSets; delete fNext;#ifdef SUPPORT_REAL_RTSP RealReclaimSDPAttributes(this);#endif}float MediaSubsession::playEndTime() const { if (fPlayEndTime > 0) return fPlayEndTime; return fParent.playEndTime();}Boolean MediaSubsession::initiate(int useSpecialRTPoffset) { if (fReadSource != NULL) return True; // has already been initiated do { if (fCodecName == NULL) { env().setResultMsg("Codec is unspecified"); break; } // Create RTP and RTCP 'Groupsocks' on which to receive incoming data. // (Groupsocks will work even for unicast addresses) Groupsock* oldGroupsock = NULL; Boolean success = False; struct in_addr tempAddr; tempAddr.s_addr = connectionEndpointAddress(); // This could get changed later, as a result of a RTSP "SETUP" while (1) { unsigned short rtpPortNum = fClientPortNum&~1; if (isSSM()) { fRTPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, rtpPortNum); } else { fRTPSocket = new Groupsock(env(), tempAddr, rtpPortNum, 255); } if (fRTPSocket == NULL) { env().setResultMsg("Failed to create RTP socket"); break; } // Get the client port number, to make sure that it's even (for RTP): Port clientPort(0); if (!getSourcePort(env(), fRTPSocket->socketNum(), clientPort)) { break; } fClientPortNum = ntohs(clientPort.num()); // If the port number's not even, try again: if ((fClientPortNum&1) == 0) { success = True; break; } // Try again: delete oldGroupsock; oldGroupsock = fRTPSocket; fClientPortNum = 0; } delete oldGroupsock; if (!success) break; // Set our RTCP port to be the RTP port +1 unsigned short const rtcpPortNum = fClientPortNum|1; if (isSSM()) { fRTCPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, rtcpPortNum); // Also, send RTCP packets back to the source via unicast: if (fRTCPSocket != NULL) { fRTCPSocket->changeDestinationParameters(fSourceFilterAddr,0,~0); } } else { fRTCPSocket = new Groupsock(env(), tempAddr, rtcpPortNum, 255); } if (fRTCPSocket == NULL) { char tmpBuf[100]; sprintf(tmpBuf, "Failed to create RTCP socket (port %d)", rtcpPortNum); env().setResultMsg(tmpBuf); break; } // Check "fProtocolName" if (strcmp(fProtocolName, "UDP") == 0) { // A UDP-packetized stream (*not* a RTP stream) fReadSource = BasicUDPSource::createNew(env(), fRTPSocket); fRTPSource = NULL; // Note! if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream fReadSource = MPEG2TransportStreamFramer::createNew(env(), fReadSource); // this sets "durationInMicroseconds" correctly, based on the PCR values } } else { // Check "fCodecName" against the set of codecs that we support, // and create our RTP source accordingly // (Later make this code more efficient, as this set grows #####) // (Also, add more fmts that can be implemented by SimpleRTPSource#####) Boolean createSimpleRTPSource = False; Boolean doNormalMBitRule = False; // used if "createSimpleRTPSource" if (strcmp(fCodecName, "QCELP") == 0) { // QCELP audio fReadSource = QCELPAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource, fRTPPayloadFormat, fRTPTimestampFrequency); // Note that fReadSource will differ from fRTPSource in this case } else if (strcmp(fCodecName, "AMR") == 0) { // AMR audio (narrowband) fReadSource = AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource, fRTPPayloadFormat, 0 /*isWideband*/, fNumChannels, fOctetalign, fInterleaving, fRobustsorting, fCRC); // Note that fReadSource will differ from fRTPSource in this case } else if (strcmp(fCodecName, "AMR-WB") == 0) { // AMR audio (wideband) fReadSource = AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource, fRTPPayloadFormat, 1 /*isWideband*/, fNumChannels, fOctetalign, fInterleaving, fRobustsorting, fCRC); // Note that fReadSource will differ from fRTPSource in this case } else if (strcmp(fCodecName, "MPA") == 0) { // MPEG-1 or 2 audio fReadSource = fRTPSource = MPEG1or2AudioRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "MPA-ROBUST") == 0) { // robust MP3 audio fRTPSource = MP3ADURTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); if (fRTPSource == NULL) break; // Add a filter that deinterleaves the ADUs after depacketizing them: MP3ADUdeinterleaver* deinterleaver = MP3ADUdeinterleaver::createNew(env(), fRTPSource); if (deinterleaver == NULL) break; // Add another filter that converts these ADUs to MP3 frames: fReadSource = MP3FromADUSource::createNew(env(), deinterleaver); } else if (strcmp(fCodecName, "X-MP3-DRAFT-00") == 0) { // a non-standard variant of "MPA-ROBUST" used by RealNetworks // (one 'ADU'ized MP3 frame per packet; no headers) fRTPSource = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency, "audio/MPA-ROBUST" /*hack*/); if (fRTPSource == NULL) break; // Add a filter that converts these ADUs to MP3 frames: fReadSource = MP3FromADUSource::createNew(env(), fRTPSource, False /*no ADU header*/); } else if (strcmp(fCodecName, "MP4A-LATM") == 0) { // MPEG-4 LATM audio fReadSource = fRTPSource = MPEG4LATMAudioRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "AC3") == 0) { // AC3 audio fReadSource = fRTPSource = AC3AudioRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "MP4V-ES") == 0) { // MPEG-4 Elem Str vid fReadSource = fRTPSource = MPEG4ESVideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "MPEG4-GENERIC") == 0) { fReadSource = fRTPSource = MPEG4GenericRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency, fMediumName, fMode, fSizelength, fIndexlength, fIndexdeltalength); } else if (strcmp(fCodecName, "MPV") == 0) { // MPEG-1 or 2 video fReadSource = fRTPSource = MPEG1or2VideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream fRTPSource = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency, "video/MP2T", 0, False); fReadSource = MPEG2TransportStreamFramer::createNew(env(), fRTPSource); // this sets "durationInMicroseconds" correctly, based on the PCR values } else if (strcmp(fCodecName, "H261") == 0) { // H.261 fReadSource = fRTPSource = H261VideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "H263-1998") == 0 || strcmp(fCodecName, "H263-2000") == 0) { // H.263+ fReadSource = fRTPSource = H263plusVideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "H264") == 0) { fReadSource = fRTPSource = H264VideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "JPEG") == 0) { // motion JPEG fReadSource = fRTPSource = JPEGVideoRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat, fRTPTimestampFrequency); } else if (strcmp(fCodecName, "X-QT") == 0 || strcmp(fCodecName, "X-QUICKTIME") == 0) { // Generic QuickTime streams, as defined in // <http://developer.apple.com/quicktime/icefloe/dispatch026.html> char* mimeType = new char[strlen(mediumName()) + strlen(codecName()) + 2] ; sprintf(mimeType, "%s/%s", mediumName(), codecName());
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -