⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 mediademux.cpp

📁 自己在wince的环境下做的一移动数字电视驱动
💻 CPP
📖 第 1 页 / 共 2 页
字号:
/////////////////
//Author: Wenson Chen.
//Company: Innofidei.com
//Date: 2007/09/26-
//Licesence Notes:
//

/*++

Copyright (C) 2007-2008 Innofidei, Inc.

Module Name: MediaDemux.cpp

Demux Audio, Video and Program guide

History: 
--*/

#include "Demux.h"
#include "string.h"
#include "stdio.h"

#ifndef _DEBUG
//#define printf(...)  //
#endif
//This function is used when the segment header is right parsed.
//We simply remove mode 2 header from the unit
//Return true if all the fragment is correctly combined
//Parameter:
//wUnitLen, it is a IN/OUT parameter. The input is the unit length(with mode 2 header), 
//the output is the combined length, without the mode 2 header
bool CDemuxer::CombineMode2Fragment(LPBYTE lpIn, WORD& wUnitLen)
{
	LPBYTE lpEnd	= lpIn + wUnitLen;
	WORD pure_len	= 0;
	LPBYTE dest		= lpIn;//DO NOT alloc another buffer, just compact to the input buffer

	while(lpIn + 4 < lpEnd)
	{
		BYTE startByte		= STEP1(lpIn);
		if(startByte != 0x55)
		{
			printf("CombineMode2Fragment fail: NOT 0x55\n");
			return false;
		}
		WORD temp			= STEP2(lpIn);
		WORD pkLen			= temp&0xfff;

		BYTE unitType		= 0xff;
		BYTE mode2HeadLen	= 3;
		BYTE packetType = (temp>>12) & 3;
		if(packetType == 2)
		{
			unitType	= STEP1(lpIn);
			mode2HeadLen+=1;
		}

		BYTE crc8			= STEP1(lpIn);
		BYTE realcrc8		= CalcCRC8(lpIn-mode2HeadLen-1, mode2HeadLen);

		if(realcrc8 == crc8)//Checksum ok
		{
			memcpy(dest, lpIn, pkLen);
			pure_len		+=pkLen;
			if((temp>>14)&1)	//This is the last fragment
			{
				wUnitLen	= pure_len;
				return true;
			}
			dest			+= pkLen;
			lpIn			+= pkLen;
			continue;
		}
		printf("CombineMode2Fragment fail!!!!!!!!!!!!!!!!!!!!\n");
		//We can recover some data here
		return false;
	}
	return false;
}

//If there is a checksum error happened in the frame header, subframe header or segment header
//Use this function to recover as much as possible data
void CDemuxer::RecoverMode2Fragment(LPBYTE lpIn, DWORD dwDataLen, int data_type)
{
	LPBYTE lpEnd = lpIn + dwDataLen;

	while(lpIn + 4 < lpEnd)
	{
		OnAVMode2Fragment(0, lpIn, lpEnd, UNKNOWN);
	}
}

void CDemuxer::ParseXpePayload(LPBYTE dest, WORD length, bool is_file_mode)
{
	if(is_file_mode == false)	//the payload is a IP packet for RTP
	{
		dest+=28;				//IP and UDP header
		length-=28;
#ifdef RTP_OUTPUT_SUPPORT
		OnAvRtpPacket(dest, length);
#else
		CRtpHeader rtp_header;
		rtp_header.Parse(dest);
		switch(rtp_header.pt)
		{
		case RTP_PT_H264:	RtpToNal(rtp_header.get_ts(), dest+12, length-12,WHOLE_FRAME);	break;
		case RTP_PT_AACP:	RtpToAacFrame(rtp_header.get_ts(), dest+12, length-12); //OnAudioUnit(rtp_header.get_ts(), dest+12, length-12);	
							break;
		default:	printf("Wrong RTP payload type %d\n", rtp_header.pt);	break;
		}
#endif
		return;
	}
	OnFileModeXpe(dest, length);
}

void CDemuxer::ParseVideoSegment(LPBYTE lpIn, DWORD dwStartTimeStamp, DWORD dwVideoSegLen)
{
	CVideoDataSegHeader	tVideoSegHeader;
	DWORD dwRet = tVideoSegHeader.Parse(lpIn/*, dwVideoSegLen*/);
	if(DEMUX_NO_ERROR != dwRet)
	{
		RecoverMode2Fragment(lpIn, dwVideoSegLen, VIDEO);
		return;
	}

	OnVideoSegmentHeader(tVideoSegHeader);

	for (unsigned i=0;i<tVideoSegHeader.wVideoUnitNum;i++)
	{
		WORD wUnitLen = tVideoSegHeader.tVideoUnitParams[i].wUnitLen;

		DWORD absoluteTime = tVideoSegHeader.tVideoUnitParams[i].wRelativeTime + dwStartTimeStamp;
		switch(tSubFrame[m_subFrameId].packetMode)
		{
		case 0://ģʽ2
			OnVideoUnit(absoluteTime, lpIn, wUnitLen);
			break;
		case 1://ģʽ1
#ifndef RTP_OUTPUT_SUPPORT
			OnVideoNalData(absoluteTime, lpIn, wUnitLen,WHOLE_FRAME);
#else
			// directly support RTP packet casting
			{
				BYTE* data = lpIn;
				if(data)
				{
					int last = 0;
					int stage = 0;
					for(DWORD i=2;i<wUnitLen;i++)
					{
						if (data[i] == 0)
						{
							stage ++;
							continue;
						}
						if (data[i] == 1 && stage >= 2)
						{
							NalToRtp(m_videoSeq, absoluteTime, data+last, i-2-last);
							
							last=i-2;
							continue;
						}
						stage=0;
					}
					NalToRtp(m_videoSeq, absoluteTime, data+last, wUnitLen-last);
				}
			}
#endif
			break;
		}
		lpIn += wUnitLen;
	}
	printf("-video(%d units)-\n", tVideoSegHeader.wVideoUnitNum);
}

void CDemuxer::ParseAudioSegment(LPBYTE lpIn, DWORD dwStartTimeStamp, DWORD dwAudioSegLen)
{
	CAudioDataSegHeader	tAudioSegHeader;
	DWORD dwRet = tAudioSegHeader.Parse(lpIn/*, dwAudioSegLen*/);

	if(DEMUX_NO_ERROR != dwRet)
	{
		RecoverMode2Fragment(lpIn, dwAudioSegLen, AUDIO);
		return;
	}

	OnAudioSegmentHeader(tAudioSegHeader);
	for(unsigned i=0;i<tAudioSegHeader.nUnitNum;i++)
	{
		WORD wUnitLen = tAudioSegHeader.tAudioUnitParams[i].wUnitLen;
		DWORD absoluteTime = tAudioSegHeader.tAudioUnitParams[i].wRelativeTime + dwStartTimeStamp;
		switch(tSubFrame[m_subFrameId].packetMode)
		{
		case 0://MODE 2
			OnAudioUnit(absoluteTime, lpIn, wUnitLen);
			break;
		case 1://MODE 1(Obsellete)
#ifndef RTP_OUTPUT_SUPPORT
			RtpToAacFrame(absoluteTime, lpIn, wUnitLen);
#else
			//
			// directly support RTP packet casting
			//
			{
				CRtp * pRtp     = (CRtp*)(lpIn - 12);
				pRtp->set_seq(m_audioSeq++);
				pRtp->set_ts(absoluteTime);

				pRtp->x         = 0;
				pRtp->cc        = 0;    
				pRtp->m         = 1;
				pRtp->pt        = RTP_PT_AACP;
				pRtp->p         = 0;
				pRtp->ssrc_0    = 0;
				pRtp->ssrc_1    = 0;
				pRtp->ssrc_2    = 0;
				pRtp->ssrc_3    = 0;
				pRtp->version   = 2;
				OnAvRtpPacket((LPBYTE)pRtp, wUnitLen + 12);
			}
#endif
			break;
		}

		lpIn += wUnitLen;
	}
	printf("-audio(%d units)-\n", tAudioSegHeader.nUnitNum);
}

void CDemuxer::ParseProgramGuideInfo(LPBYTE lpIn, WORD wUnitLen)
{
	CProgramGuide pf;
	pf.Parse(lpIn, wUnitLen);
	OnProgramGuide(pf);
}

void CDemuxer::ParseXpe(LPBYTE lpIn, WORD wUnitLen)
{
	//We are asumming that the XPE maight be splited and transmitted in the later second at the end of the frame
	WORD wXpeLen = wUnitLen;//there is NOT a CRC32, so we do not minus 4
	LPBYTE pXpe = lpIn;

	CXpe xpe;
	if(false == xpe.ParseHeader(pXpe, wXpeLen))
	{
		XPE_pos = 0;
		XPE_State = false;
		return;
	}
	if(xpe.startFlag == 1)
	{
		XPE_pos = 0;
		serviceMode = xpe.serviceModeFlag;
		if(xpe.endFlag == 1)
		{
			printf("This XPE is a complete IP packet\n");
			ParseXpePayload(pXpe, xpe.pl, serviceMode);
			XPE_State = false;
		}
		else
		{
			XPE_total_len = xpe.dataLength;
			printf("This XPE is %d bytes, buffer %d bytes\n", XPE_total_len, xpe.pl);
			memcpy(XPE_pl+XPE_pos, pXpe, xpe.pl);
			XPE_pos+=xpe.pl;
			XPE_State = true;
		}
	}
	else if(XPE_State == true)
	{
		if(xpe.endFlag == 1)
		{
			printf("buffer more %d bytes\n", xpe.pl);
			memcpy(XPE_pl+XPE_pos, pXpe, xpe.pl);
			ParseXpePayload(XPE_pl, XPE_pos+xpe.pl, serviceMode);
			XPE_pos = 0;
			XPE_State = false;
		}
		else
		{
			printf("buffer more %d bytes\n", xpe.pl);
			memcpy(XPE_pl+XPE_pos, pXpe, xpe.pl);
			XPE_pos+=xpe.pl;
		}
	}
}

void CDemuxer::ParseDataSegment(LPBYTE lpIn, DWORD dwDataSegLen)
{
	CDataDataSegHeader	tDataSegHeader;
	DWORD dwRet = tDataSegHeader.Parse(lpIn, dwDataSegLen);
	if(DEMUX_NO_ERROR != dwRet)
	{
		RecoverMode2Fragment(lpIn, dwDataSegLen, DATA);
		return;
	}

	OnDataSegmentHeader(tDataSegHeader);

	for (unsigned i=0;i<tDataSegHeader.nUnitNum;i++)
	{
		WORD wUnitLen = tDataSegHeader.tDataUnitParams[i].wUnitLen;
		switch(tSubFrame[m_subFrameId].packetMode)
		{
		case 0:
			OnDataUnit(tDataSegHeader.tDataUnitParams[i].nUnitType, lpIn, wUnitLen);
			break;
		case 1:
			OnMode1DataUnit(tDataSegHeader.tDataUnitParams[i].nUnitType, lpIn, wUnitLen);
			break;
		}

		lpIn += tDataSegHeader.tDataUnitParams[i].wUnitLen;
	}
	printf("-data(%d)-\n", tDataSegHeader.nUnitNum);
}

void CDemuxer::OnDataUnit(BYTE nUnitType, LPBYTE lpIn, WORD wUnitLen)
{
	if(CombineMode2Fragment(lpIn, wUnitLen) == false)
	{
		XPE_pos		= 0;
		XPE_State	= false;
		return;
	}
	OnMode1DataUnit(nUnitType, lpIn, wUnitLen);
}

DWORD CDemuxer::ParseSubframe(int subId, LPBYTE lpIn)
{
	m_subFrameId = subId;//2008,3,12
	DWORD dwRet = tSubFrame[subId].ParseHeader(lpIn);//Parse SubFrameHeader
	if(DEMUX_NO_ERROR != dwRet)
	{
		RecoverMode2Fragment(lpIn, dwSubFrameLen[subId]);//UNKNOWN type
		return dwRet;
	}

	lpIn += tSubFrame[subId].nHeaderLen+4;
	if(tSubFrame[subId].dwVideoSegLen > 0 )
	{ 
		ParseVideoSegment(lpIn, tSubFrame[subId].dwStartTimeStamp, tSubFrame[subId].dwVideoSegLen);
		lpIn += tSubFrame[subId].dwVideoSegLen;
	}

	if(tSubFrame[subId].dwAudioSegLen > 0 )
	{
		ParseAudioSegment(lpIn, tSubFrame[subId].dwStartTimeStamp, tSubFrame[subId].dwAudioSegLen);
		lpIn += tSubFrame[subId].dwAudioSegLen;
	}

	if(tSubFrame[subId].dwDataSegLen > 0 )
	{
		ParseDataSegment(lpIn, tSubFrame[subId].dwDataSegLen);
	}

	return DEMUX_NO_ERROR;
}

void CRtpHeader::Parse(LPBYTE lpIn)
{
	BYTE b = STEP1(lpIn);
	version = b>>6;				// protocol version 
	p		= (b>>5)&1;         // padding flag 
	x		= (b>>4)&1;         // header extension flag 
	cc		= b&15;				// CSRC count 
	b		= STEP1(lpIn);
	m		= b>>7;				// marker bit 
	pt		= b&0x7f;			// payload type 
	seq_h	= STEP1(lpIn);      // sequence number 
	seq_l	= STEP1(lpIn);
	ts_0	= STEP1(lpIn);
	ts_1	= STEP1(lpIn);
	ts_2	= STEP1(lpIn);
	ts_3	= STEP1(lpIn);
	ssrc_0	= STEP1(lpIn);
	ssrc_1	= STEP1(lpIn);
	ssrc_2	= STEP1(lpIn);
	ssrc_3	= STEP1(lpIn);
}

DWORD CSubframe::ParseHeader(LPBYTE lpIn)
{
	DWORD i=0;
	nHeaderLen						= GET1(lpIn);
	if(false == CheckCRC32(lpIn, nHeaderLen))
	{
		return DEMUX_CRC_ERROR;
	}
	/*====================================================================*/
	lpIn ++;
	BYTE nTemp						= STEP1(lpIn);
	nStartTimeFlag					= nTemp >> 7;		//bit7
	nVideoSegLenFlag				= (nTemp >> 6) & 1;	//bit6
	nAudioSegLenFlag				= (nTemp >> 5) & 1;	//bit5
	nDataSegLenFlag					= (nTemp >> 4) & 1;	//bit4
	nExtendZoneFlag					= (nTemp >> 3) & 1;	//bit3, bit0-2reserved
	nCyptInd						= (nTemp>>1) & 3;
	packetMode						= nTemp & 1;

	if(nStartTimeFlag)
	{
		dwStartTimeStamp			= STEP4(lpIn);
		printf("Subframe StartTimeStamp: %u\n", dwStartTimeStamp/22500);
	}
	else
	{
		dwStartTimeStamp = 0;
	}

	if(nVideoSegLenFlag)
	{
		DWORD u32Tmp				= STEP3(lpIn);
		dwVideoSegLen				= u32Tmp >> 3;
		nVideoStreamNum				= BYTE(u32Tmp & 0x7);
	}
	else
	{
		dwVideoSegLen				= 0;
		nVideoStreamNum				= 0;
	}
	if(nAudioSegLenFlag)
	{
		DWORD u32Tmp				= STEP3(lpIn);
		dwAudioSegLen				= u32Tmp>>3;
		nAudioStreamNum				= BYTE(u32Tmp & 0x7);
	}
	else
	{
		dwAudioSegLen				= 0;
		nAudioStreamNum				= 0;
	}
	if(nDataSegLenFlag)
	{
		DWORD u32Tmp				= STEP3(lpIn);
		dwDataSegLen				= u32Tmp>>3;
		ECMInd						= (BYTE)((u32Tmp>>2)&1);	//2008, Jan
	}
	else
	{
		dwDataSegLen				= 0;
		ECMInd						= 0;	//2008, Jan
	}
	if(nExtendZoneFlag)
	{
		for(i = 0; i < nVideoStreamNum; i++)
		{
			nTemp = STEP1(lpIn);
			taVideoPara[i].nAlgorithmType	= nTemp >> 5;	//3 bits
			taVideoPara[i].nBRFlag			= (nTemp >> 4) & 1;
			taVideoPara[i].nPPFlag			= (nTemp >> 3) & 1;
			taVideoPara[i].bResolution		= (nTemp >> 2) & 1;
			taVideoPara[i].bFR				= (nTemp >> 1) & 1;
			//Reserved 1 bit

			if(taVideoPara[i].nBRFlag)					// 
			{                                        
				taVideoPara[i].wBitRate		= STEP2(lpIn);	//
			}
			else
			{
				taVideoPara[i].wBitRate = 0;
			}
			// 
			if((taVideoPara[i].nPPFlag))				// 
			{                    
				WORD wTemp = STEP2(lpIn);
				taVideoPara[i].nXCoordinate = wTemp >> 10;			// 6
				taVideoPara[i].nXCoordinate = (wTemp >> 4) & 0x3f;	// 6
				taVideoPara[i].nDisplayPRI  = (wTemp >> 1) & 7;		// 3
				//Reserved 1 bit
			}
			else
			{
				taVideoPara[i].nXCoordinate = 0;
				taVideoPara[i].nXCoordinate = 0;
				taVideoPara[i].nDisplayPRI  = 0;
			}
			if(taVideoPara[i].bResolution)				//
			{                    
				DWORD u32Tmp = STEP3(lpIn);
				taVideoPara[i].wXResolution = WORD((u32Tmp>>10) & 0x3ff);	//10 bits
				taVideoPara[i].wYResolution = WORD(u32Tmp & 0x3ff);		//10 bits
			}
			else
			{
				taVideoPara[i].wXResolution = 0;
				taVideoPara[i].wYResolution = 0;
			}
			if(taVideoPara[i].bFR)				// 
			{                    
				nTemp						= STEP1(lpIn);
				taVideoPara[i].nFrameRate	= nTemp >> 4;
			}
			else
			{
				taVideoPara[i].nFrameRate	= 0;
			}
		}

		for(i = 0; i < nAudioStreamNum; i++)
		{
			nTemp = STEP1(lpIn);
			taAudioPara[i].nAlgorithmType	= nTemp >> 4;
			taAudioPara[i].nBitRateFlag		= (nTemp >> 3) & 1;
			taAudioPara[i].nSampleRateFlag	= (nTemp >> 2) & 1;
			taAudioPara[i].nStreamDescFlag	= (nTemp >> 1) & 1;

			if(taAudioPara[i].nBitRateFlag)	// Audio Bitrate
			{                                        
				WORD wTemp					= STEP2(lpIn);
				taAudioPara[i].wBitRate		= wTemp >> 2;
			}
			else
			{
				taAudioPara[i].wBitRate = 0;
			}

			if(taAudioPara[i].nSampleRateFlag)	//Audio Sample Rate
			{                                        
				nTemp = STEP1(lpIn);
				taAudioPara[i].nSampleRate	= nTemp & 0xF;
			}
			else
			{
				taAudioPara[i].nSampleRate = 0;
			}

			if(taAudioPara[i].nStreamDescFlag)	//Audio Stream
			{                    
				taAudioPara[i].dwAudioStreamDesc = STEP3(lpIn);
			}
			else
			{
				taAudioPara[i].dwAudioStreamDesc = 0;
			}
		}
	}

	return 0;
}

/* Ƶͷ */
DWORD CVideoDataSegHeader::Parse(LPBYTE& rlpIn)
{

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -