📄 wis-streamer.cpp
字号:
/* * Copyright (C) 2005 WIS Technologies International Ltd. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and the associated README documentation file (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */// An application that streams audio/video captured by a WIS GO7007,// using a built-in RTSP server.// main program#include <liveMedia.hh>#include <BasicUsageEnvironment.hh>#include <getopt.h>#include "Options.hh"#include "WISJPEGVideoServerMediaSubsession.hh"#include "WISMPEG1or2VideoServerMediaSubsession.hh"#include "WISMPEG4VideoServerMediaSubsession.hh"#include "WISPCMAudioServerMediaSubsession.hh"#include "WISJPEGStreamSource.hh"#define PCM_AUDIO_IS_LITTLE_ENDIAN 1static void setupUnicastStreaming(WISInput& inputDevice, ServerMediaSession* sms);static void setupMulticastStreaming(WISInput& inputDevice, ServerMediaSession* sms);static void reclaimMulticastStreaming();int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // Print an introduction message: *env << "wis-streamer: A streaming server for the WIS GO7007 encoder driver\n"; *env << "\tFor driver documentation and source code, see: <http://oss.wischip.com/>\n"; *env << "\tFor server documentation and source code, see: <http://www.live.com/wis-streamer/>\n"; *env << "\tBuilt using \"LIVE.COM Streaming Media\": <http://www.live.com/liveMedia/>\n"; // Parse command-line options: checkArgs(*env, argc, argv); *env << "Initializing..."; // Initialize the WIS input device: WISInput* inputDevice = WISInput::createNew(*env); if (inputDevice == NULL) { *env << "Failed to create WIS input device\n"; exit(1); } // Create the RTSP server: RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } *env << "...done\n"; // Create a record describing the media to be streamed: ServerMediaSession* sms = ServerMediaSession::createNew(*env, "", NULL, streamDescription, streamingMode == STREAMING_MULTICAST_SSM); rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; // Configure it for unicast or multicast streaming: if (streamingMode == STREAMING_UNICAST) { setupUnicastStreaming(*inputDevice, sms); } else { setupMulticastStreaming(*inputDevice, sms); } // Begin the LIVE.COM event loop: env->taskScheduler().doEventLoop(); // does not return // If "doEventLoop()" *did* return, we'd now do this to reclaim resources before exiting: if (streamingMode != STREAMING_UNICAST) reclaimMulticastStreaming(); Medium::close(rtspServer); // will also reclaim "sms" and its "ServerMediaSubsession"s Medium::close(inputDevice); reclaimArgs(); env->reclaim(); delete scheduler; return 0; // only to prevent compiler warning}static void setupUnicastStreaming(WISInput& inputDevice, ServerMediaSession* sms) { // Add a subsession for the desired video format (if any): switch (videoFormat) { case VFMT_NONE: break; // do nothing case VFMT_MJPEG: sms->addSubsession(WISJPEGVideoServerMediaSubsession ::createNew(sms->envir(), inputDevice, videoBitrate)); break; case VFMT_MPEG1: case VFMT_MPEG2: sms->addSubsession(WISMPEG1or2VideoServerMediaSubsession ::createNew(sms->envir(), inputDevice, videoBitrate)); break; case VFMT_MPEG4: sms->addSubsession(WISMPEG4VideoServerMediaSubsession ::createNew(sms->envir(), inputDevice, videoBitrate)); break; } // Add a subsession for the desired audio format (if any): Boolean convertToULaw = False; // by default switch (audioFormat) { case AFMT_NONE: break; // do nothing case AFMT_PCM_ULAW: convertToULaw = True; // Fall through to: case AFMT_PCM_RAW16: sms->addSubsession(WISPCMAudioServerMediaSubsession ::createNew(sms->envir(), inputDevice, audioSamplingFrequency, audioIsStereo ? 2 : 1, PCM_AUDIO_IS_LITTLE_ENDIAN, convertToULaw)); break; }}// Objects used for multicast streaming:static Groupsock* rtpGroupsockAudio = NULL;static Groupsock* rtcpGroupsockAudio = NULL;static Groupsock* rtpGroupsockVideo = NULL;static Groupsock* rtcpGroupsockVideo = NULL;static FramedSource* sourceAudio = NULL;static RTPSink* sinkAudio = NULL;static RTCPInstance* rtcpAudio = NULL;static FramedSource* sourceVideo = NULL;static RTPSink* sinkVideo = NULL;static RTCPInstance* rtcpVideo = NULL;static void setupMulticastStreaming(WISInput& inputDevice, ServerMediaSession* sms) { UsageEnvironment& env = sms->envir(); struct in_addr dest; dest.s_addr = multicastAddress; const unsigned char ttl = 255; // For RTCP: const unsigned maxCNAMElen = 100; unsigned char CNAME[maxCNAMElen + 1]; gethostname((char *) CNAME, maxCNAMElen); CNAME[maxCNAMElen] = '\0'; // just in case /******************audio ***********************/ if (audioFormat != AFMT_NONE) { // Create the audio source: FramedSource* pcmSource = inputDevice.audioSource(); // Add in any filter necessary to transform the data prior to streaming: if (audioFormat == AFMT_PCM_ULAW) { // stream u-law // Add a filter that converts from raw 16-bit PCM audio // to 8-bit u-law audio: sourceAudio = uLawFromPCMAudioSource::createNew(env, pcmSource); } else { // stream raw PCM if (PCM_AUDIO_IS_LITTLE_ENDIAN) { // The 16-bit samples are in little-endian order. Add a filter // that converts them to network (i.e., big-endian) order: sourceAudio = EndianSwap16::createNew(env, pcmSource); } else { // The 16-bit samples are already in big-endian order: sourceAudio = pcmSource; } } // Create 'groupsocks' for RTP and RTCP: const Port rtpPortAudio(audioRTPPortNum); const Port rtcpPortAudio(audioRTPPortNum+1); rtpGroupsockAudio = new Groupsock(env, dest, rtpPortAudio, ttl); rtcpGroupsockAudio = new Groupsock(env, dest, rtcpPortAudio, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockAudio->multicastSendOnly(); rtcpGroupsockAudio->multicastSendOnly(); } // Create a 'Simple RTP' sink from the RTP 'groupsock' (to stream raw or u-law PCM): char* mimeType; unsigned char payloadFormatCode = 96; // by default unsigned audioNumChannels = audioIsStereo ? 2 : 1; unsigned bitsPerSecond = audioSamplingFrequency*16/*bits-per-sample*/*audioNumChannels; if (audioFormat == AFMT_PCM_ULAW) { // stream u-law mimeType = "PCMU"; if (audioSamplingFrequency == 8000 && audioNumChannels == 1) { payloadFormatCode = 0; // a static RTP payload type } bitsPerSecond /= 2; } else { // stream raw PCM mimeType = "L16"; if (audioSamplingFrequency == 44100 && audioNumChannels == 2) { payloadFormatCode = 10; // a static RTP payload type } else if (audioSamplingFrequency == 44100 && audioNumChannels == 1) { payloadFormatCode = 11; // a static RTP payload type } } setAudioRTPSinkBufferSize(); sinkAudio = SimpleRTPSink::createNew(env, rtpGroupsockAudio, payloadFormatCode, audioSamplingFrequency, "audio", mimeType, audioNumChannels); // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthAudio = (bitsPerSecond+500)/1000; // in kbps; for RTCP b/w share rtcpAudio = RTCPInstance::createNew(env, rtcpGroupsockAudio, totalSessionBandwidthAudio, CNAME, sinkAudio, NULL /* we're a server */, streamingMode == STREAMING_MULTICAST_SSM); // Note: This starts RTCP running automatically sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkAudio, rtcpAudio)); // Start streaming: sinkAudio->startPlaying(*sourceAudio, NULL, NULL); } /******************video ***********************/ if (videoFormat != VFMT_NONE) { // Create the video source: switch (videoFormat) { case VFMT_NONE: // not used break; case VFMT_MJPEG: { sourceVideo = WISJPEGStreamSource::createNew(inputDevice.videoSource()); break; } case VFMT_MPEG1: case VFMT_MPEG2: { sourceVideo = MPEG1or2VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource()); break; } case VFMT_MPEG4: { sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource()); break; } } // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } // Create an appropriate 'Video RTP' sink from the RTP 'groupsock': unsigned char payloadFormatCode = 97; // if dynamic setVideoRTPSinkBufferSize(); switch (videoFormat) { case VFMT_NONE: // not used break; case VFMT_MJPEG: { sinkVideo = JPEGVideoRTPSink::createNew(env, rtpGroupsockVideo); break; } case VFMT_MPEG1: case VFMT_MPEG2: { sinkVideo = MPEG1or2VideoRTPSink::createNew(env, rtpGroupsockVideo); break; } case VFMT_MPEG4: { sinkVideo = MPEG4ESVideoRTPSink::createNew(env, rtpGroupsockVideo, payloadFormatCode); break; } } // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthVideo = (videoBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpVideo = RTCPInstance::createNew(env, rtcpGroupsockVideo, totalSessionBandwidthVideo, CNAME, sinkVideo, NULL /* we're a server */ , streamingMode == STREAMING_MULTICAST_SSM); // Note: This starts RTCP running automatically sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkVideo, rtcpVideo)); // Start streaming: sinkVideo->startPlaying(*sourceVideo, NULL, NULL); }}static void reclaimMulticastStreaming() { Medium::close(rtcpAudio); Medium::close(sinkAudio); Medium::close(sourceAudio); delete rtpGroupsockAudio; delete rtcpGroupsockAudio; Medium::close(rtcpVideo); Medium::close(sinkVideo); Medium::close(sourceVideo); delete rtpGroupsockVideo; delete rtcpGroupsockVideo;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -