⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 audio_stacks.c

📁 一个用于智能手机的多媒体库适合S60 WinCE的跨平台开发库
💻 C
📖 第 1 页 / 共 2 页
字号:
/* *			GPAC - Multimedia Framework C SDK * *			Copyright (c) Jean Le Feuvre 2000-2005 *					All rights reserved * *  This file is part of GPAC / Scene Rendering sub-project * *  GPAC is free software; you can redistribute it and/or modify *  it under the terms of the GNU Lesser General Public License as published by *  the Free Software Foundation; either version 2, or (at your option) *  any later version. *    *  GPAC is distributed in the hope that it will be useful, *  but WITHOUT ANY WARRANTY; without even the implied warranty of *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *  GNU Lesser General Public License for more details. *    *  You should have received a copy of the GNU Lesser General Public *  License along with this library; see the file COPYING.  If not, write to *  the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.  * */#include "common_stacks.h"#include <gpac/nodes_mpeg4.h>#include <gpac/nodes_x3d.h>typedef struct{	GF_AudioInput input;	GF_TimeNode time_handle;	Double start_time;	Bool set_duration;} AudioClipStack;static void AC_Activate(AudioClipStack *st, M_AudioClip *ac){	gf_sr_audio_open(&st->input, &ac->url, 0, -1);	ac->isActive = 1;	gf_node_event_out_str((GF_Node *)ac, "isActive");	gf_mo_set_speed(st->input.stream, st->input.speed);	/*rerender all graph to get parent audio group*/	gf_sr_invalidate(st->input.compositor, NULL);}static void AC_Deactivate(AudioClipStack *st, M_AudioClip *ac){	gf_sr_audio_stop(&st->input);	ac->isActive = 0;	gf_node_event_out_str((GF_Node *)ac, "isActive");	st->time_handle.needs_unregister = 1;}static void RenderAudioClip(GF_Node *node, void *rs, Bool is_destroy){	GF_BaseEffect *eff = (GF_BaseEffect *)rs;	M_AudioClip *ac = (M_AudioClip *)node;	AudioClipStack *st = (AudioClipStack *)gf_node_get_private(node);	if (is_destroy) {		gf_sr_audio_stop(&st->input);		gf_sr_audio_unregister(&st->input);		if (st->time_handle.is_registered) {			gf_sr_unregister_time_node(st->input.compositor, &st->time_handle);		}		free(st);		return;	}		/*check end of stream*/	if (st->input.stream && st->input.stream_finished) {		if (gf_mo_get_loop(st->input.stream, ac->loop)) {			gf_sr_audio_restart(&st->input);		} else if (ac->isActive && gf_mo_should_deactivate(st->input.stream)) {			/*deactivate*/			AC_Deactivate(st, ac);		}	}	if (ac->isActive) {		gf_sr_audio_register(&st->input, (GF_BaseEffect*)rs);	}	if (st->set_duration && st->input.stream) {		ac->duration_changed = gf_mo_get_duration(st->input.stream);		gf_node_event_out_str(node, "duration_changed");		st->set_duration = 0;	}	/*store mute flag*/	st->input.is_muted = (eff->trav_flags & GF_SR_TRAV_SWITCHED_OFF);}static void AC_UpdateTime(GF_TimeNode *tn){	Double time;	M_AudioClip *ac = (M_AudioClip *)tn->obj;	AudioClipStack *st = (AudioClipStack *)gf_node_get_private(tn->obj);	if (! ac->isActive) {		st->start_time = ac->startTime;		st->input.speed = ac->pitch;	}	time = gf_node_get_scene_time(tn->obj);	if ((time<st->start_time) || (st->start_time<0)) return;		if (ac->isActive) {		if ( (ac->stopTime > st->start_time) && (time>=ac->stopTime)) {			AC_Deactivate(st, ac);			return;		}	}	if (!ac->isActive) AC_Activate(st, ac);}void InitAudioClip(GF_Renderer *sr, GF_Node *node){	AudioClipStack *st;	GF_SAFEALLOC(st, AudioClipStack);	gf_sr_audio_setup(&st->input, sr, node);	st->time_handle.UpdateTimeNode = AC_UpdateTime;	st->time_handle.obj = node;	st->set_duration = 1;	gf_node_set_private(node, st);	gf_node_set_callback_function(node, RenderAudioClip);	gf_sr_register_time_node(sr, &st->time_handle);}void AudioClipModified(GF_Node *node){	M_AudioClip *ac = (M_AudioClip *)node;	AudioClipStack *st = (AudioClipStack *) gf_node_get_private(node);	if (!st) return;	/*MPEG4 spec is not clear about that , so this is not forbidden*/	if (st->input.is_open && st->input.is_open) {		if (gf_sr_audio_check_url(&st->input, &ac->url)) {			gf_sr_audio_stop(&st->input);			gf_sr_audio_open(&st->input, &ac->url, 0, -1);			/*force unregister to resetup audio cfg*/			gf_sr_audio_unregister(&st->input);			gf_sr_invalidate(st->input.compositor, NULL);		}	}	//update state if we're active	if (ac->isActive) {		AC_UpdateTime(&st->time_handle);		/*we're no longer active fon't check for reactivation*/		if (!ac->isActive) return;	}	/*make sure we are still registered*/	if (!st->time_handle.is_registered && !st->time_handle.needs_unregister) 		gf_sr_register_time_node(st->input.compositor, &st->time_handle);	else		st->time_handle.needs_unregister = 0;}typedef struct{	GF_AudioInput input;	GF_TimeNode time_handle;	Bool is_active;	Double start_time;} AudioSourceStack;static void AS_Activate(AudioSourceStack *st, M_AudioSource *as){	gf_sr_audio_open(&st->input, &as->url, 0, -1);	st->is_active = 1;	gf_mo_set_speed(st->input.stream, st->input.speed);	/*rerender all graph to get parent audio group*/	gf_sr_invalidate(st->input.compositor, NULL);}static void AS_Deactivate(AudioSourceStack *st, M_AudioSource *as){	gf_sr_audio_stop(&st->input);	st->is_active = 0;	st->time_handle.needs_unregister = 1;}static void RenderAudioSource(GF_Node *node, void *rs, Bool is_destroy){	GF_BaseEffect*eff = (GF_BaseEffect*)rs;	M_AudioSource *as = (M_AudioSource *)node;	AudioSourceStack *st = (AudioSourceStack *)gf_node_get_private(node);	if (is_destroy) {		gf_sr_audio_stop(&st->input);		gf_sr_audio_unregister(&st->input);		if (st->time_handle.is_registered) {			gf_sr_unregister_time_node(st->input.compositor, &st->time_handle);		}		free(st);		return;	}		/*check end of stream*/	if (st->input.stream && st->input.stream_finished) {		if (gf_mo_get_loop(st->input.stream, 0)) {			gf_sr_audio_restart(&st->input);		} else if (st->is_active && gf_mo_should_deactivate(st->input.stream)) {			/*deactivate*/			AS_Deactivate(st, as);		}	}	if (st->is_active) {		gf_sr_audio_register(&st->input, (GF_BaseEffect*)rs);	}	/*store mute flag*/	st->input.is_muted = (eff->trav_flags & GF_SR_TRAV_SWITCHED_OFF);}static void AS_UpdateTime(GF_TimeNode *tn){	Double time;	M_AudioSource *as = (M_AudioSource *)tn->obj;	AudioSourceStack *st = (AudioSourceStack *)gf_node_get_private(tn->obj);	if (! st->is_active) {		st->start_time = as->startTime;		st->input.speed = as->speed;	}	time = gf_node_get_scene_time(tn->obj);	if ((time<st->start_time) || (st->start_time<0)) return;		if (st->input.input_ifce.GetSpeed(st->input.input_ifce.callback) && st->is_active) {		if ( (as->stopTime > st->start_time) && (time>=as->stopTime)) {			AS_Deactivate(st, as);			return;		}	}	if (!st->is_active) AS_Activate(st, as);}void InitAudioSource(GF_Renderer *sr, GF_Node *node){	AudioSourceStack *st;	GF_SAFEALLOC(st, AudioSourceStack);	gf_sr_audio_setup(&st->input, sr, node);	st->time_handle.UpdateTimeNode = AS_UpdateTime;	st->time_handle.obj = node;	gf_node_set_private(node, st);	gf_node_set_callback_function(node, RenderAudioSource);	gf_sr_register_time_node(sr, &st->time_handle);}void AudioSourceModified(GF_Node *node){	M_AudioSource *as = (M_AudioSource *)node;	AudioSourceStack *st = (AudioSourceStack *) gf_node_get_private(node);	if (!st) return;	/*MPEG4 spec is not clear about that , so this is not forbidden*/	if (st->input.is_open&& st->input.is_open) {		if (gf_sr_audio_check_url(&st->input, &as->url)) {			gf_sr_audio_stop(&st->input);			gf_sr_audio_open(&st->input, &as->url, 0, -1);			/*force unregister to resetup audio cfg*/			gf_sr_audio_unregister(&st->input);			gf_sr_invalidate(st->input.compositor, NULL);		}	}	//update state if we're active	if (st->is_active) {		AS_UpdateTime(&st->time_handle);		if (!st->is_active) return;	}	/*make sure we are still registered*/	if (!st->time_handle.is_registered && !st->time_handle.needs_unregister) 		gf_sr_register_time_node(st->input.compositor, &st->time_handle);	else		st->time_handle.needs_unregister = 0;}typedef struct{	AUDIO_GROUP_NODE	GF_TimeNode time_handle;	Double start_time;	Bool set_duration;	/*AudioBuffer mixes its children*/

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -