⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 ffmpeg_demux.c

📁 一个用于智能手机的多媒体库适合S60 WinCE的跨平台开发库
💻 C
📖 第 1 页 / 共 2 页
字号:
/* *			GPAC - Multimedia Framework C SDK * *			Copyright (c) Jean Le Feuvre 2000-2005  *					All rights reserved * *  This file is part of GPAC / FFMPEG module * *  GPAC is free software; you can redistribute it and/or modify *  it under the terms of the GNU Lesser General Public License as published by *  the Free Software Foundation; either version 2, or (at your option) *  any later version. *    *  GPAC is distributed in the hope that it will be useful, *  but WITHOUT ANY WARRANTY; without even the implied warranty of *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *  GNU Lesser General Public License for more details. *    *  You should have received a copy of the GNU Lesser General Public *  License along with this library; see the file COPYING.  If not, write to *  the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.  *		 */#include "ffmpeg_in.h"/*default buffer is 200 ms per channel*/#define FFD_DATA_BUFFER		800//#if defined(__DARWIN__) || defined(__APPLE__)#if !defined(WIN32) && !defined(_WIN32_WCE) && !defined(__SYMBIAN32__)#include <errno.h>#endifstatic u32 FFDemux_Run(void *par){	AVPacket pkt;	s64 seek_to;	u64 seek_audio, seek_video;	Bool video_init, do_seek, map_audio_time, map_video_time;	GF_NetworkCommand com;	GF_NetworkCommand map;	GF_SLHeader slh;	FFDemux *ffd = (FFDemux *) par;	memset(&map, 0, sizeof(GF_NetworkCommand));	map.command_type = GF_NET_CHAN_MAP_TIME;		memset(&com, 0, sizeof(GF_NetworkCommand));	com.command_type = GF_NET_CHAN_BUFFER_QUERY;	memset(&slh, 0, sizeof(GF_SLHeader));	slh.compositionTimeStampFlag = slh.decodingTimeStampFlag = 1;	seek_to = (s64) (AV_TIME_BASE*ffd->seek_time);	map_video_time = !ffd->seekable;	video_init = (seek_to && ffd->video_ch) ? 0 : 1;	seek_audio = seek_video = 0;	if (ffd->seekable && (ffd->audio_st>=0)) seek_audio = (u64) (s64) (ffd->seek_time*ffd->audio_tscale.den);	if (ffd->seekable && (ffd->video_st>=0)) seek_video = (u64) (s64) (ffd->seek_time*ffd->video_tscale.den);	/*it appears that ffmpeg has trouble resyncing on some mpeg files - we trick it by restarting to 0 to get the 	first video frame, and only then seek*/	if (ffd->seekable) av_seek_frame(ffd->ctx, -1, video_init ? seek_to : 0, AVSEEK_FLAG_BACKWARD);	do_seek = !video_init;	map_audio_time = video_init ? ffd->unreliable_audio_timing : 0;	while (ffd->is_running) {		pkt.stream_index = -1;		/*EOF*/        if (av_read_frame(ffd->ctx, &pkt) <0) break;		if (pkt.pts == AV_NOPTS_VALUE) pkt.pts = pkt.dts;		if (!pkt.dts) pkt.dts = pkt.pts;		slh.compositionTimeStamp = pkt.pts;		slh.decodingTimeStamp = pkt.dts;		gf_mx_p(ffd->mx);		/*blindly send audio as soon as video is init*/		if (ffd->audio_ch && (pkt.stream_index == ffd->audio_st) && !do_seek) {			slh.compositionTimeStamp *= ffd->audio_tscale.num;			slh.decodingTimeStamp *= ffd->audio_tscale.num;			if (map_audio_time) {				map.base.on_channel = ffd->audio_ch;				map.map_time.media_time = ffd->seek_time;				/*mapwith TS=0 since we don't use SL*/				map.map_time.timestamp = 0;				map.map_time.reset_buffers = 1;				map_audio_time = 0;				gf_term_on_command(ffd->service, &map, GF_OK);			}			else if (slh.compositionTimeStamp < seek_audio) {				slh.decodingTimeStamp = slh.compositionTimeStamp = seek_audio;			}			gf_term_on_sl_packet(ffd->service, ffd->audio_ch, pkt.data, pkt.size, &slh, GF_OK);		} 		else if (ffd->video_ch && (pkt.stream_index == ffd->video_st)) {			slh.compositionTimeStamp *= ffd->video_tscale.num;			slh.decodingTimeStamp *= ffd->video_tscale.num;			/*if we get pts = 0 after a seek the demuxer is reseting PTSs, so force map time*/			if ((!do_seek && seek_to && !slh.compositionTimeStamp) || (map_video_time) ) {				seek_to = 0;				map_video_time = 0;				map.base.on_channel = ffd->video_ch;				map.map_time.timestamp = (u64) pkt.pts;//				map.map_time.media_time = ffd->seek_time;				map.map_time.media_time = 0;				map.map_time.reset_buffers = 0;				gf_term_on_command(ffd->service, &map, GF_OK);			}			else if (slh.compositionTimeStamp < seek_video) {				slh.decodingTimeStamp = slh.compositionTimeStamp = seek_video;			}			gf_term_on_sl_packet(ffd->service, ffd->video_ch, pkt.data, pkt.size, &slh, GF_OK);			video_init = 1;		}		gf_mx_v(ffd->mx);		av_free_packet(&pkt);		/*here's the trick - only seek after sending the first packets of each stream - this allows ffmpeg video decoders		to resync properly*/		if (do_seek && video_init && ffd->seekable) {			av_seek_frame(ffd->ctx, -1, seek_to, AVSEEK_FLAG_BACKWARD);			do_seek = 0;			map_audio_time = ffd->unreliable_audio_timing;		}		/*sleep untill the buffer occupancy is too low - note that this work because all streams in this		demuxer are synchronized*/		while (1) {			if (ffd->audio_ch) {				com.base.on_channel = ffd->audio_ch;				gf_term_on_command(ffd->service, &com, GF_OK);				if (com.buffer.occupancy < ffd->data_buffer_ms) break;			}			if (ffd->video_ch) {				com.base.on_channel = ffd->video_ch;				gf_term_on_command(ffd->service, &com, GF_OK);				if (com.buffer.occupancy < ffd->data_buffer_ms) break;			}			gf_sleep(10);						/*escape if disconnect*/			if (!ffd->audio_run && !ffd->video_run) break;		}		if (!ffd->audio_run && !ffd->video_run) break;	}	/*signal EOS*/	if (ffd->audio_ch) gf_term_on_sl_packet(ffd->service, ffd->audio_ch, NULL, 0, NULL, GF_EOS);	if (ffd->video_ch) gf_term_on_sl_packet(ffd->service, ffd->video_ch, NULL, 0, NULL, GF_EOS);	ffd->is_running = 0;	return 0;}static Bool FFD_CanHandleURL(GF_InputService *plug, const char *url){	Bool has_audio, has_video;	s32 i;	AVFormatContext *ctx;	AVOutputFormat *fmt_out;	Bool ret = 0;	char *ext, szName[1000], szExt[20];	const char *szExtList;	strcpy(szName, url);	ext = strrchr(szName, '#');	if (ext) ext[0] = 0;	/*disable RTP/RTSP from ffmpeg*/	if (!strnicmp(szName, "rtsp://", 7)) return 0;	if (!strnicmp(szName, "rtspu://", 8)) return 0;	if (!strnicmp(szName, "rtp://", 6)) return 0;	ext = strrchr(szName, '.');	if (ext) {		strcpy(szExt, &ext[1]);		strlwr(szExt);		if (!strcmp(szExt, "ts")) return 0;		/*note we forbid ffmpeg to handle files we support*/		if (!strcmp(szExt, "mp4") || !strcmp(szExt, "mpg4") || !strcmp(szExt, "m4a") || !strcmp(szExt, "m21") 			|| !strcmp(szExt, "m4v") || !strcmp(szExt, "m4a") 			|| !strcmp(szExt, "3gp") || !strcmp(szExt, "3gpp") || !strcmp(szExt, "3gp2") || !strcmp(szExt, "3g2") 			|| !strcmp(szExt, "mp3") 			|| !strcmp(szExt, "amr") 			|| !strcmp(szExt, "bt") || !strcmp(szExt, "wrl") || !strcmp(szExt, "x3dv") 			|| !strcmp(szExt, "xmt") || !strcmp(szExt, "xmta") || !strcmp(szExt, "x3d") 			) return 0;		/*check any default stuff that should work with ffmpeg*/		if (gf_term_check_extension(plug, "video/mpeg", "mpg mpeg mp2 mpa mpe mpv2", "MPEG 1/2 Movies", ext)) return 1;		if (gf_term_check_extension(plug, "video/x-mpeg", "mpg mpeg mp2 mpa mpe mpv2", "MPEG 1/2 Movies", ext)) return 1;		if (gf_term_check_extension(plug, "video/x-mpeg-systems", "mpg mpeg mp2 mpa mpe mpv2", "MPEG 1/2 Movies", ext)) return 1;		if (gf_term_check_extension(plug, "audio/basic", "snd au", "Basic Audio", ext)) return 1;		if (gf_term_check_extension(plug, "audio/x-wav", "wav", "WAV Audio", ext)) return 1;		if (gf_term_check_extension(plug, "video/x-ms-asf", "asf wma wmv asx asr", "WindowsMedia Movies", ext)) return 1;		if (gf_term_check_extension(plug, "video/x-ms-wmv", "asf wma wmv asx asr", "WindowsMedia Movies", ext)) return 1;		if (gf_term_check_extension(plug, "video/avi", "avi", "AVI Movies", ext)) return 1;		if (gf_term_check_extension(plug, "video/H263", "h263 263", "H263 Video", ext)) return 1;		if (gf_term_check_extension(plug, "video/H264", "h264 264", "H264 Video", ext)) return 1;		if (gf_term_check_extension(plug, "video/MPEG4", "cmp", "MPEG-4 Video", ext)) return 1;		/*we let ffmpeg handle mov because some QT files with uncompressed or adpcm audio use 1 audio sample 		per MP4 sample which is a killer for our MP4 lib, whereas ffmpeg handles these as complete audio chunks 		moreover ffmpeg handles cmov, we don't*/		if (gf_term_check_extension(plug, "video/quicktime", "mov qt", "QuickTime Movies", ext)) return 1;	}	ctx = NULL;    if (av_open_input_file(&ctx, szName, NULL, 0, NULL)<0) {		AVInputFormat *av_in = NULL;;		/*some extensions not supported by ffmpeg*/		if (ext && !strcmp(szExt, "cmp")) av_in = av_find_input_format("m4v");		if (av_open_input_file(&ctx, szName, av_in, 0, NULL)<0) {			return 0;		}	}    if (!ctx || av_find_stream_info(ctx) <0) goto exit;	/*figure out if we can use codecs or not*/	has_video = has_audio = 0;    for(i = 0; i < ctx->nb_streams; i++) {        AVCodecContext *enc = ctx->streams[i]->codec;        switch(enc->codec_type) {        case CODEC_TYPE_AUDIO:            if (!has_audio) has_audio = 1;            break;        case CODEC_TYPE_VIDEO:            if (!has_video) has_video= 1;            break;        default:            break;        }    }	if (!has_audio && !has_video) goto exit;	ret = 1;	fmt_out = guess_stream_format(NULL, url, NULL);	if (fmt_out) gf_term_register_mime_type(plug, fmt_out->mime_type, fmt_out->extensions, fmt_out->name);	else {		ext = strrchr(szName, '.');		if (ext) {			strcpy(szExt, &ext[1]);			strlwr(szExt);			szExtList = gf_modules_get_option((GF_BaseInterface *)plug, "MimeTypes", "application/x-ffmpeg");			if (!szExtList) {				gf_term_register_mime_type(plug, "application/x-ffmpeg", szExt, "Other Movies (FFMPEG)");			} else if (!strstr(szExtList, szExt)) {				u32 len;				char *buf;				len = strlen(szExtList) + strlen(szExt) + 1;				buf = malloc(sizeof(char)*len);				sprintf(buf, "\"%s ", szExt);				strcat(buf, &szExtList[1]);				gf_modules_set_option((GF_BaseInterface *)plug, "MimeTypes", "application/x-ffmpeg", buf);				free(buf);			}		}	}exit:    if (ctx) av_close_input_file(ctx);	return ret;}static GF_ESD *FFD_GetESDescriptor(FFDemux *ffd, Bool for_audio){	GF_BitStream *bs;	Bool dont_use_sl;	GF_ESD *esd = (GF_ESD *) gf_odf_desc_esd_new(0);	esd->ESID = 1 + (for_audio ? ffd->audio_st : ffd->video_st);	esd->decoderConfig->streamType = for_audio ? GF_STREAM_AUDIO : GF_STREAM_VISUAL;	esd->decoderConfig->avgBitrate = esd->decoderConfig->maxBitrate = 0;	/*remap std object types - depending on input formats, FFMPEG may not have separate DSI from initial frame. 	In this case we have no choice but using FFMPEG decoders*/	if (for_audio) {        AVCodecContext *dec = ffd->ctx->streams[ffd->audio_st]->codec;		esd->slConfig->timestampResolution = ffd->audio_tscale.den; 		switch (dec->codec_id) {		case CODEC_ID_MP2:			esd->decoderConfig->objectTypeIndication = 0x6B;			break;		case CODEC_ID_MP3:			esd->decoderConfig->objectTypeIndication = 0x69;			break;		case CODEC_ID_AAC:			if (!dec->extradata_size) goto opaque_audio;			esd->decoderConfig->objectTypeIndication = 0x40;			esd->decoderConfig->decoderSpecificInfo->dataLength = dec->extradata_size;			esd->decoderConfig->decoderSpecificInfo->data = malloc(sizeof(char)*dec->extradata_size);			memcpy(esd->decoderConfig->decoderSpecificInfo->data, 					dec->extradata, 					sizeof(char)*dec->extradata_size);			break;		default:opaque_audio:			esd->decoderConfig->objectTypeIndication = GPAC_FFMPEG_CODECS_OTI;			bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);			gf_bs_write_u32(bs, dec->codec_id);			gf_bs_write_u32(bs, dec->sample_rate);			gf_bs_write_u16(bs, dec->channels);			gf_bs_write_u16(bs, dec->bits_per_sample);			gf_bs_write_u16(bs, dec->frame_size);			gf_bs_write_u16(bs, dec->block_align);			gf_bs_write_u32(bs, dec->codec_tag);			gf_bs_write_u32(bs, dec->bit_rate);			if (dec->extradata_size) {				gf_bs_write_data(bs, dec->extradata, dec->extradata_size);			}			gf_bs_get_content(bs, (unsigned char **) &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);			gf_bs_del(bs);			break;		}		dont_use_sl = ffd->unreliable_audio_timing;	} else {        AVCodecContext *dec = ffd->ctx->streams[ffd->video_st]->codec;		esd->slConfig->timestampResolution = ffd->video_tscale.den;		switch (dec->codec_id) {		case CODEC_ID_MPEG4:		case CODEC_ID_H264:			/*if dsi not detected force use ffmpeg*/			if (!dec->extradata_size) goto opaque_video;			/*otherwise use any MPEG-4 Visual*/			esd->decoderConfig->objectTypeIndication = (dec->codec_id==CODEC_ID_H264) ? 0x21 : 0x20;			esd->decoderConfig->decoderSpecificInfo->dataLength = dec->extradata_size;			esd->decoderConfig->decoderSpecificInfo->data = malloc(sizeof(char)*dec->extradata_size);			memcpy(esd->decoderConfig->decoderSpecificInfo->data, 					dec->extradata, 					sizeof(char)*dec->extradata_size);			break;		case CODEC_ID_MPEG1VIDEO:			esd->decoderConfig->objectTypeIndication = 0x6A;			break;		case CODEC_ID_MPEG2VIDEO:			esd->decoderConfig->objectTypeIndication = 0x65;			break;		default:opaque_video:			esd->decoderConfig->objectTypeIndication = GPAC_FFMPEG_CODECS_OTI;			bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);			gf_bs_write_u32(bs, dec->codec_id);			gf_bs_write_u32(bs, dec->width);			gf_bs_write_u32(bs, dec->height);			gf_bs_write_u32(bs, dec->codec_tag);			gf_bs_write_u32(bs, dec->bit_rate);			if (dec->extradata_size) {				gf_bs_write_data(bs, dec->extradata, dec->extradata_size);			}			gf_bs_get_content(bs, (unsigned char **) &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);			gf_bs_del(bs);			break;		}		dont_use_sl = 0;	}	if (dont_use_sl) {		esd->slConfig->predefined = SLPredef_SkipSL;	} else {		/*only send full AUs*/		esd->slConfig->useAccessUnitStartFlag = esd->slConfig->useAccessUnitEndFlag = 0;		esd->slConfig->hasRandomAccessUnitsOnlyFlag = 1;		esd->slConfig->useTimestampsFlag = 1;	}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -