frameparser.c

来自「君正早期ucos系统(只有早期的才不没有打包成库),MPLAYER,文件系统,图」· C语言 代码 · 共 1,611 行 · 第 1/5 页

C
1,611
字号
        fp->ParserState = TP_FP_STATE_END;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;
}

VOID GetEbDataLength(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{
    TP_UINT8   castid;
    
    if (fp->FrameDataLen > EB_PART1_WIDTH)
    {
        castid = *fp->FrameData & 0x03;
        if (castid == 0) //None EB data
        {
            fp->ParserState = TP_FP_STATE_END;
            return;
        }
        
        stream->EbStream.DataLen = (*(fp->FrameData+2)<<8) + *(fp->FrameData+3);
        fp->ParserState = TP_FP_STATE_GET_EB_DATA;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;
}

VOID GetEbData(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{    
    if (fp->FrameDataLen > EB_PART1_WIDTH+stream->EbStream.DataLen+HEADER_CRC_FIELD_WIDTH)
    {
        if (!CheckCRC(fp->FrameData, EB_PART1_WIDTH+stream->EbStream.DataLen, 
                        fp->FrameData+EB_PART1_WIDTH+stream->EbStream.DataLen))
        {
            fp->Error = TP_RET_CRC_ERR;
            return;
        }
        
        fp->FrameData += EB_PART1_WIDTH; //jump to cast data position
        stream->EbStream.Data = (TP_UINT8*)pf_alloc(stream->EbStream.DataLen);
        pf_memcpy(stream->EbStream.Data, fp->FrameData, stream->EbStream.DataLen);
        stream->EbStream.Valid = TRUE;
        stream->EbStream.State = TP_FRAME_DATA_STATE_READY;
        
        fp->FrameData += stream->EbStream.DataLen+HEADER_CRC_FIELD_WIDTH;
        fp->FrameDataLen -= EB_PART1_WIDTH+stream->EbStream.DataLen+HEADER_CRC_FIELD_WIDTH;
        
        fp->ParserState = TP_FP_STATE_END;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;
}

VOID GetFrameHeaderLength(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{
    if (fp->FrameDataLen > HEADER_LENGTH_FIELD_WIDTH)
    {
        fp->HeaderLength = (TP_UINT8)*(fp->FrameData);
        fp->ParserState = TP_FP_STATE_GET_HEADER_CONTENT;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;
}

VOID GetFrameHeaderContent(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{
    TP_STREAM_PARAM_T   newStreamParam;
    
    if (fp->FrameDataLen >= fp->HeaderLength + HEADER_CRC_FIELD_WIDTH)
    {
        if (!CheckCRC(fp->FrameData, (TP_UINT32)fp->HeaderLength, fp->FrameData+fp->HeaderLength))
        {
            fp->Error = TP_RET_CRC_ERR;
            return;
        }
        
        pf_memset(&newStreamParam, 0, sizeof(TP_STREAM_PARAM_T));
        
        fp->FrameData += HEADER_LENGTH_FIELD_WIDTH; 
        pf_memcpy(&fp->HeaderTag, fp->FrameData, sizeof(HEADER_TAG_T));
        fp->FrameData += HEADER_TAG_FIELD_WIDTH;
        if (fp->HeaderTag.StartPlayTime_isValid) //get start play time
        {
            newStreamParam.StartPlayTime = *fp->FrameData++;
            newStreamParam.StartPlayTime <<= 8;
            newStreamParam.StartPlayTime += *fp->FrameData++;
            newStreamParam.StartPlayTime <<= 8;
            newStreamParam.StartPlayTime += *fp->FrameData++;
            newStreamParam.StartPlayTime <<= 8;
            newStreamParam.StartPlayTime += *fp->FrameData++;
        }
        if (fp->HeaderTag.VideoField_isValid) // get video field length and total number
        {
            fp->VideoFieldLength = *fp->FrameData++;
            fp->VideoFieldLength <<= 8;
            fp->VideoFieldLength += *fp->FrameData++;
            fp->VideoFieldLength <<= 5;
            fp->VideoFieldLength += *fp->FrameData >> 3;
            fp->VideoStreamTotal = *fp->FrameData & 0x07;
            fp->FrameData++;
        }
        if (fp->HeaderTag.AudioField_isValid) // get audio field length and total number
        {
            fp->AudioFieldLength = *fp->FrameData++;
            fp->AudioFieldLength <<= 8;
            fp->AudioFieldLength += *fp->FrameData++;
            fp->AudioFieldLength <<= 5;
            fp->AudioFieldLength += *fp->FrameData >> 3;
            fp->AudioStreamTotal = *fp->FrameData & 0x07;
            fp->FrameData++;
        }
        if (fp->HeaderTag.OtherField_isValid)  //get data field length
        {
            fp->OtherFieldLength = *fp->FrameData++;
            fp->OtherFieldLength <<= 8;
            fp->OtherFieldLength += *fp->FrameData++;
            fp->OtherFieldLength <<= 5;
            fp->OtherFieldLength += *fp->FrameData >> 3;
            fp->FrameData++;
        }
        if (fp->HeaderTag.ExtendField_isValid)
        {
            GetFrameHeaderExtend(fp, &newStreamParam);
            if(!CmpStreamParam(&stream->StreamParam, &newStreamParam))
            {
                pf_memcpy(&stream->StreamParam, &newStreamParam, sizeof(TP_STREAM_PARAM_T));
                stream->StreamParam.Updated = TRUE;
            }
        }
            
        fp->FrameData += HEADER_CRC_FIELD_WIDTH;
        fp->FrameDataLen -= fp->HeaderLength + HEADER_CRC_FIELD_WIDTH;
        fp->ParserState = TP_FP_STATE_GET_VIDEO_DATA_HEADER_LENGTH;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;
}

VOID GetFrameHeaderExtend(FRAME_PARSER_T *fp, TP_STREAM_PARAM_T *streamparam)
{
    TP_INT32 i,j;

    for (i=0; i<fp->VideoStreamTotal; i++)
    {       
        pf_memcpy(&fp->VideoStreamParamTag[i], fp->FrameData++, sizeof(VIDEO_STREAM_PARAM_TAG_T));
        streamparam->VideoParam[i].ArithmeticType = (TP_VIDEO_ARITHMETIC_TYPE_E)fp->VideoStreamParamTag[i].ArithmeticType;
        if (fp->VideoStreamParamTag[i].BitRateField_isValid)
        {
            streamparam->VideoParam[i].BitRate = (*fp->FrameData<<8) + *(fp->FrameData+1);
            fp->FrameData += 2;
        }
        if (fp->VideoStreamParamTag[i].DisplayAreaField_isValid)
        {
            streamparam->VideoParam[i].xStart = *fp->FrameData >> 2;
            streamparam->VideoParam[i].yStart = *fp->FrameData & 0x03;
            fp->FrameData++;
            streamparam->VideoParam[i].yStart <<= 4;
            streamparam->VideoParam[i].yStart += *fp->FrameData >> 4;
            streamparam->VideoParam[i].Priority = *fp->FrameData & 0x0E;
            fp->FrameData++;
        }
        
        streamparam->VideoParam[i].HrWidth = DEFAULT_DISPLAY_HORIZON_WIDTH;
        streamparam->VideoParam[i].VrWidth = DEFAULT_DISPLAY_VERTICAL_WIDTH;
        if (fp->VideoStreamParamTag[i].ResolutionField_isValid)
        {
            streamparam->VideoParam[i].HrWidth = *fp->FrameData++ & 0x0F;
            streamparam->VideoParam[i].HrWidth <<= 6;
            streamparam->VideoParam[i].HrWidth += *fp->FrameData >> 2;
            streamparam->VideoParam[i].VrWidth = *fp->FrameData++ & 0x03;
            streamparam->VideoParam[i].VrWidth <<= 8;
            streamparam->VideoParam[i].HrWidth += *fp->FrameData++;
        }
        if (fp->VideoStreamParamTag[i].FramFreqField_isValid)
        {
            streamparam->VideoParam[i].FrameFreq = (TP_VIDEO_FRAME_FREQ_E)(*fp->FrameData++ >> 4);
        }
    }
    streamparam->VideoStreamNum = fp->VideoStreamTotal;
    
    for (i=0; i< fp->AudioStreamTotal; i++)
    {
        pf_memcpy(&fp->AudioStreamParamTag[i], fp->FrameData++, sizeof(AUDIO_STREAM_PARAM_TAG_T));
        streamparam->AudioParam[i].ArithmeticType = fp->AudioStreamParamTag[i].ArithmeticType;
        if (fp->AudioStreamParamTag[i].BitRateField_isValid)
        {
            streamparam->AudioParam[i].BitRate = (*fp->FrameData<<6) + (*(fp->FrameData+1)>>2);
            fp->FrameData += 2;
        }

        if (fp->AudioStreamParamTag[i].SamplerateField_isValid)
            streamparam->AudioParam[i].SampleRate = (TP_AUDIO_SAMPLERATE_E)(*fp->FrameData++ & 0x0F);

        if (fp->AudioStreamParamTag[i].DescriptionField_isValid)
        {
            streamparam->AudioParam[i].Language = DEFAULT_LANGUAGE;
            for(j=0; j<TP_LANGUAGE_TYPE_MAX; j++)
            {
                if(pf_memcmp((VOID*)Language_Tab[j], fp->FrameData, 3)==0)
                {
                    streamparam->AudioParam[i].Language = j;
                    break;
                }
            }
            fp->FrameData += 3;
        }
    }
    streamparam->AudioStreamNum = fp->AudioStreamTotal;
}

VOID GetVideoDataHeaderLength(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{
    if (!fp->HeaderTag.VideoField_isValid)
    {
        fp->ParserState = TP_FP_STATE_GET_AUDIO_DATA_HEADER_UNITNUM;
        return;
    }
    
    if (fp->FrameDataLen > VIDEO_DATA_HEADER_LENGTH_FIELD_WIDTH)
    {
        fp->VideoDataHeaderLength = (*fp->FrameData<<4) + (*(fp->FrameData+1)>>4);
        fp->ParserState = TP_FP_STATE_GET_VIDEO_DATA_HEADER;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;
}

VOID GetVideoDataHeader(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{    
    if (fp->FrameDataLen >= fp->VideoDataHeaderLength+HEADER_CRC_FIELD_WIDTH)
    {
        if (!CheckCRC(fp->FrameData, (TP_UINT32)fp->VideoDataHeaderLength, fp->FrameData+fp->VideoDataHeaderLength))
        {
            if (fp->VideoFieldLength > fp->FrameDataLen)
            {
                fp->DisCardDatalen = fp->VideoFieldLength - fp->FrameDataLen;
                fp->FrameData += fp->FrameDataLen;
                fp->FrameDataLen = 0;
                fp->Error = TP_RET_DATA_NONE_ERR;
            }
            else
            {
                fp->FrameData += fp->VideoFieldLength;
                fp->FrameDataLen -= fp->VideoFieldLength;
            }
            fp->ParserState = TP_FP_STATE_GET_AUDIO_DATA_HEADER_UNITNUM;
            return;
        }
        
        fp->VideoDataHeader = pf_alloc(fp->VideoDataHeaderLength - VIDEO_DATA_HEADER_LENGTH_FIELD_WIDTH);
        pf_memcpy(fp->VideoDataHeader, fp->FrameData + VIDEO_DATA_HEADER_LENGTH_FIELD_WIDTH, fp->VideoDataHeaderLength-VIDEO_DATA_HEADER_LENGTH_FIELD_WIDTH);  
        fp->CurUnitHeader = fp->VideoDataHeader;
        
        fp->FrameData += fp->VideoDataHeaderLength+HEADER_CRC_FIELD_WIDTH;
        fp->FrameDataLen -= fp->VideoDataHeaderLength + HEADER_CRC_FIELD_WIDTH;
        fp->VideoFieldLength -= fp->VideoDataHeaderLength + HEADER_CRC_FIELD_WIDTH;
        fp->VideoDataHeaderLength -= VIDEO_DATA_HEADER_LENGTH_FIELD_WIDTH;
        fp->ParserState = TP_FP_STATE_GET_VIDEO_DATA_HEADER_CONTENT;
    }
    else
        fp->Error = TP_RET_DATA_NONE_ERR;

}

VOID GetVideoDataHeaderContent(FRAME_PARSER_T *fp, TP_DTV_STREAM_T *stream)
{
    TP_UINT32  sid;
    TP_BOOL buffound=FALSE;
    TP_UINT32  framelen;
    TP_STREAM_VIDEO_FRAME_T *curf;
    TP_UINT8 *buf;

    if (fp->VideoDataHeaderLength >= VIDEO_DATA_HEADER_UNIT_PARAM_WIDTH)
    {
        framelen = (*fp->CurUnitHeader<<8) + *(fp->CurUnitHeader+1); 
        fp->CurUnitLen = framelen;
        sid = *(fp->CurUnitHeader+2) >> 2;
        sid &= 0x07;
        
        //append on a frame end
        for (curf=stream->VideoFrame; curf; curf=curf->next)
        {
            if ((sid == curf->StreamId)
               &&(curf->State == TP_FRAME_DATA_STATE_READING))
            {
                fp->CurVideoFrame = curf;
                buffound = TRUE;
                break;
            }
        }

        if (buffound)
        {
            buf = pf_alloc(framelen + fp->CurVideoFrame->Size);
            pf_memcpy(buf, fp->CurVideoFrame->Data, fp->CurVideoFrame->Size);
            pf_free(fp->CurVideoFrame->Data);
            fp->CurVideoFrame->Data = buf;
            if (*(fp->CurUnitHeader+2) & 0x01) //have starttime
                fp->CurVideoFrame->StartPlayTime = (*(fp->CurUnitHeader+3)<<8) + *(fp->CurUnitHeader+4);
        }
        else
        {
            //alloc a frame buffer
            if (stream->VideoFrame == 0)
            {
                 stream->VideoFrame = pf_alloc(sizeof(TP_STREAM_VIDEO_FRAME_T));
                 fp->CurVideoFrame = stream->VideoFrame;
            }
            else
            {
                fp->CurVideoFrame->next = pf_alloc(sizeof(TP_STREAM_VIDEO_FRAME_T));
                fp->CurVideoFrame = fp->CurVideoFrame->next;
            }
            
            pf_memset(fp->CurVideoFrame, 0, sizeof(TP_STREAM_VIDEO_FRAME_T));
            fp->CurVideoFrame->Data = pf_alloc(framelen);
            fp->CurVideoFrame->StreamId = sid;
            fp->CurVideoFrame->Type = (TP_VIDEO_FRAME_TYPE_E)(*(fp->CurUnitHeader+2) >> 5);
            if (*(fp->CurUnitHeader+2) & 0x01) //have starttime
                fp->CurVideoFrame->StartPlayTime = (*(fp->CurUnitHeader+3)<<8) + *(fp->CurUnitHeader+4);
            fp->CurVideoFrame->State = TP_FRAME_DATA_STATE_READING;
            fp->CurVideoFrame->Size = 0;


⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?