📄 media_object.c
字号:
/* * GPAC - Multimedia Framework C SDK * * Copyright (c) Jean Le Feuvre 2000-2005 * All rights reserved * * This file is part of GPAC / Media terminal sub-project * * GPAC is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * GPAC is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; see the file COPYING. If not, write to * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. * */#include <gpac/internal/terminal_dev.h>#include <gpac/internal/renderer_dev.h>#include <gpac/internal/scenegraph_dev.h>#include <gpac/nodes_x3d.h>#include "media_memory.h"#include "media_control.h"#include <gpac/nodes_svg_sa.h>#include <gpac/nodes_svg_sani.h>#include <gpac/nodes_svg_da.h>static GF_MediaObject *get_sync_reference(GF_InlineScene *is, XMLRI *iri, u32 o_type, GF_Node *orig_ref, Bool *post_pone){ MFURL mfurl; SFURL sfurl; GF_MediaObject *res; GF_Node *ref = NULL; u32 stream_id = 0; if (iri->type==XMLRI_STREAMID) { stream_id = iri->lsr_stream_id; } else if (!iri->string) { return NULL; } else { if (iri->target) ref = iri->target; else if (iri->string[0]=='#') ref = gf_sg_find_node_by_name(is->graph, iri->string+1); else ref = gf_sg_find_node_by_name(is->graph, iri->string); if (ref) { GF_FieldInfo info; /*safety check, break cyclic references*/ if (ref==orig_ref) return NULL; switch (ref->sgprivate->tag) { case TAG_SVG_audio: o_type = GF_MEDIA_OBJECT_AUDIO; if (gf_svg_get_attribute_by_tag(ref, TAG_SVG_ATT_xlink_href, 0, 0, &info)==GF_OK) { return get_sync_reference(is, info.far_ptr, o_type, orig_ref ? orig_ref : ref, post_pone); } return NULL; case TAG_SVG_video: o_type = GF_MEDIA_OBJECT_VIDEO; if (gf_svg_get_attribute_by_tag(ref, TAG_SVG_ATT_xlink_href, 0, 0, &info)==GF_OK) { return get_sync_reference(is, info.far_ptr, o_type, orig_ref ? orig_ref : ref, post_pone); } return NULL; default: return NULL; } } } *post_pone = 0; mfurl.count = 1; mfurl.vals = &sfurl; mfurl.vals[0].OD_ID = stream_id; mfurl.vals[0].url = iri->string; res = gf_is_get_media_object(is, &mfurl, o_type, 0); if (!res) *post_pone = 1; return res;}GF_EXPORTGF_MediaObject *gf_mo_find(GF_Node *node, MFURL *url, Bool lock_timelines){ u32 obj_type; Bool post_pone; GF_FieldInfo info; GF_InlineScene *is; GF_MediaObject *res, *syncRef; GF_SceneGraph *sg = gf_node_get_graph(node); if (!sg) return NULL; is = (GF_InlineScene*)gf_sg_get_private(sg); if (!is) return NULL; syncRef = NULL; /*keep track of the kind of object expected if URL is not using OD scheme*/ switch (gf_node_get_tag(node)) { /*MPEG4 only*/ case TAG_MPEG4_AudioSource: obj_type = GF_MEDIA_OBJECT_AUDIO; break; case TAG_MPEG4_AnimationStream: obj_type = GF_MEDIA_OBJECT_BIFS; break; case TAG_MPEG4_InputSensor: obj_type = GF_MEDIA_OBJECT_INTERACT; break; /*MPEG4/X3D*/ case TAG_MPEG4_MovieTexture: case TAG_X3D_MovieTexture: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_MPEG4_Background2D: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_MPEG4_Background: case TAG_X3D_Background: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_MPEG4_ImageTexture: case TAG_X3D_ImageTexture: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_MPEG4_AudioClip: case TAG_X3D_AudioClip: obj_type = GF_MEDIA_OBJECT_AUDIO; break; case TAG_MPEG4_Inline: case TAG_X3D_Inline: obj_type = GF_MEDIA_OBJECT_SCENE; break; /*SVG*/#ifdef GPAC_ENABLE_SVG_SA case TAG_SVG_SA_audio: obj_type = GF_MEDIA_OBJECT_AUDIO; break; case TAG_SVG_SA_image: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_SVG_SA_video: obj_type = GF_MEDIA_OBJECT_VIDEO; break;#endif#ifdef GPAC_ENABLE_SVG_SANI case TAG_SVG_SANI_audio: obj_type = GF_MEDIA_OBJECT_AUDIO; break; case TAG_SVG_SANI_image: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_SVG_SANI_video: obj_type = GF_MEDIA_OBJECT_VIDEO; break;#endif case TAG_SVG_audio: obj_type = GF_MEDIA_OBJECT_AUDIO; if (gf_svg_get_attribute_by_tag(node, TAG_SVG_ATT_syncReference, 0, 0, &info)==GF_OK) { syncRef = get_sync_reference(is, info.far_ptr, GF_MEDIA_OBJECT_UNDEF, node, &post_pone); /*syncRef is specified but doesn't exist yet, post-pone*/ if (post_pone) return NULL; } break; case TAG_SVG_image: obj_type = GF_MEDIA_OBJECT_VIDEO; break; case TAG_SVG_video: obj_type = GF_MEDIA_OBJECT_VIDEO; if (gf_svg_get_attribute_by_tag(node, TAG_SVG_ATT_syncReference, 0, 0, &info)==GF_OK) { syncRef = get_sync_reference(is, info.far_ptr, GF_MEDIA_OBJECT_UNDEF, node, &post_pone); /*syncRef is specified but doesn't exist yet, post-pone*/ if (post_pone) return NULL; } break; default: obj_type = GF_MEDIA_OBJECT_UNDEF; break; } res = gf_is_get_media_object_ex(is, url, obj_type, lock_timelines, syncRef); return res;}GF_MediaObject *gf_mo_new(){ GF_MediaObject *mo; mo = (GF_MediaObject *) malloc(sizeof(GF_MediaObject)); memset(mo, 0, sizeof(GF_MediaObject)); mo->speed = FIX_ONE; mo->URLs.count = 0; mo->URLs.vals = NULL; return mo;}GF_EXPORTBool gf_mo_get_visual_info(GF_MediaObject *mo, u32 *width, u32 *height, u32 *stride, u32 *pixel_ar, u32 *pixelFormat){ GF_CodecCapability cap; if ((mo->type != GF_MEDIA_OBJECT_VIDEO) && (mo->type!=GF_MEDIA_OBJECT_TEXT)) return 0; if (width) { cap.CapCode = GF_CODEC_WIDTH; gf_codec_get_capability(mo->odm->codec, &cap); *width = cap.cap.valueInt; } if (height) { cap.CapCode = GF_CODEC_HEIGHT; gf_codec_get_capability(mo->odm->codec, &cap); *height = cap.cap.valueInt; } if (mo->type==GF_MEDIA_OBJECT_TEXT) return 1; if (stride) { cap.CapCode = GF_CODEC_STRIDE; gf_codec_get_capability(mo->odm->codec, &cap); *stride = cap.cap.valueInt; } if (pixelFormat) { cap.CapCode = GF_CODEC_PIXEL_FORMAT; gf_codec_get_capability(mo->odm->codec, &cap); *pixelFormat = cap.cap.valueInt; } /*get PAR settings*/ if (pixel_ar) { cap.CapCode = GF_CODEC_PAR; gf_codec_get_capability(mo->odm->codec, &cap); *pixel_ar = cap.cap.valueInt; if (! (*pixel_ar & 0x0000FFFF)) *pixel_ar = 0; if (! (*pixel_ar & 0xFFFF0000)) *pixel_ar = 0; } return 1;}GF_EXPORTBool gf_mo_get_audio_info(GF_MediaObject *mo, u32 *sample_rate, u32 *bits_per_sample, u32 *num_channels, u32 *channel_config){ GF_CodecCapability cap; if (!mo->odm || !mo->odm->codec || (mo->type != GF_MEDIA_OBJECT_AUDIO)) return 0; if (sample_rate) { cap.CapCode = GF_CODEC_SAMPLERATE; gf_codec_get_capability(mo->odm->codec, &cap); *sample_rate = cap.cap.valueInt; } if (num_channels) { cap.CapCode = GF_CODEC_NB_CHAN; gf_codec_get_capability(mo->odm->codec, &cap); *num_channels = cap.cap.valueInt; } if (bits_per_sample) { cap.CapCode = GF_CODEC_BITS_PER_SAMPLE; gf_codec_get_capability(mo->odm->codec, &cap); *bits_per_sample = cap.cap.valueInt; } if (channel_config) { cap.CapCode = GF_CODEC_CHANNEL_CONFIG; gf_codec_get_capability(mo->odm->codec, &cap); *channel_config = cap.cap.valueInt; } return 1;}void MO_UpdateCaps(GF_MediaObject *mo){ GF_CodecCapability cap; mo->flags &= ~GF_MO_IS_INIT; if (mo->type == GF_MEDIA_OBJECT_VIDEO) { cap.CapCode = GF_CODEC_FPS; gf_codec_get_capability(mo->odm->codec, &cap); mo->odm->codec->fps = cap.cap.valueFloat; } else if (mo->type == GF_MEDIA_OBJECT_AUDIO) { u32 sr, nb_ch, bps; gf_mo_get_audio_info(mo, &sr, &bps, &nb_ch, NULL); mo->odm->codec->bytes_per_sec = sr * nb_ch * bps / 8; }}GF_EXPORTchar *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestamp, u32 *size){ u32 obj_time; GF_CMUnit *CU; *eos = 0; if (!gf_odm_lock_mo(mo)) return NULL; if (!mo->odm->codec || !mo->odm->codec->CB) { gf_odm_lock(mo->odm, 0); return NULL; } /*if frame locked return it*/ if (mo->nb_fetch) { *eos = 0; *timestamp = mo->timestamp; *size = mo->framesize; mo->nb_fetch ++; gf_odm_lock(mo->odm, 0); return mo->frame; } /*end of stream */ *eos = gf_cm_is_eos(mo->odm->codec->CB); /*not running and no resync (ie audio)*/ if (!resync && !gf_cm_is_running(mo->odm->codec->CB)) { gf_odm_lock(mo->odm, 0); return NULL; } /*new frame to fetch, lock*/ CU = gf_cm_get_output(mo->odm->codec->CB); /*no output*/ if (!CU || (CU->RenderedLength == CU->dataLength)) { gf_odm_lock(mo->odm, 0); return NULL; } /*note this assert is NOT true when recomputing DTS from CTS on the fly (MPEG1/2 RTP and H264/AVC RTP)*/ //assert(CU->TS >= mo->odm->codec->CB->LastRenderedTS); if (mo->odm->codec->CB->UnitCount==1) resync = 0; /*resync*/ if (resync) { u32 nb_droped = 0; obj_time = gf_clock_time(mo->odm->codec->ck); while (CU->TS < obj_time) { if (!CU->next->dataLength) break; /*figure out closest time*/ if (CU->next->TS > obj_time) { *eos = 0; break; } nb_droped ++; if (nb_droped>1) { GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d] At OTB %d dropped frame TS %d\n", mo->odm->OD->objectDescriptorID, obj_time, CU->TS)); mo->odm->codec->nb_droped++; } /*discard*/ CU->RenderedLength = CU->dataLength = 0; gf_cm_drop_output(mo->odm->codec->CB); /*get next*/ CU = gf_cm_get_output(mo->odm->codec->CB); *eos = gf_cm_is_eos(mo->odm->codec->CB); } } mo->framesize = CU->dataLength - CU->RenderedLength; mo->frame = CU->data + CU->RenderedLength; if (mo->timestamp != CU->TS) { MS_UpdateTiming(mo->odm, *eos); mo->timestamp = CU->TS; GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %d fetch frame TS %d size %d - %d unit in CB\n", mo->odm->OD->objectDescriptorID, gf_clock_time(mo->odm->codec->ck), mo->timestamp, mo->framesize, mo->odm->codec->CB->UnitCount)); /*signal EOS after rendering last frame, not while rendering it*/ *eos = 0; } /*also adjust CU time based on consummed bytes in input, since some codecs output very large audio chunks*/ if (mo->odm->codec->bytes_per_sec) mo->timestamp += CU->RenderedLength * 1000 / mo->odm->codec->bytes_per_sec; mo->nb_fetch ++; *timestamp = mo->timestamp; *size = mo->framesize; gf_odm_lock(mo->odm, 0); return mo->frame;}GF_EXPORTvoid gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 forceDrop){ u32 obj_time; if (!gf_odm_lock_mo(mo)) return; if (!mo->nb_fetch) { gf_odm_lock(mo->odm, 0); return; } mo->nb_fetch--; if (mo->nb_fetch) { gf_odm_lock(mo->odm, 0); return; } /*perform a sanity check on TS since the CB may have changed status - this may happen in temporal scalability only*/ if (mo->odm->codec->CB->output->dataLength ) { if (nb_bytes==0xFFFFFFFF) { mo->odm->codec->CB->output->RenderedLength = mo->odm->codec->CB->output->dataLength; } else { assert(mo->odm->codec->CB->output->RenderedLength + nb_bytes <= mo->odm->codec->CB->output->dataLength); mo->odm->codec->CB->output->RenderedLength += nb_bytes; }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -