📄 inline.c
字号:
if (gf_odm_find_segment(is->root_od, seg_name) != NULL) return NULL; return seg_name;}GF_EXPORTBool gf_is_default_scene_viewpoint(GF_Node *node){ const char *nname, *sname; GF_SceneGraph *sg = gf_node_get_graph(node); GF_InlineScene *is = sg ? (GF_InlineScene *) gf_sg_get_private(sg) : NULL; if (!is) return 0; nname = gf_node_get_name(node); if (!nname) return 0; sname = IS_GetSceneViewName(is); if (!sname) return 0; return (!strcmp(nname, sname));}GF_EXPORTvoid gf_is_register_extra_graph(GF_InlineScene *is, GF_SceneGraph *extra_scene, Bool do_remove){ if (do_remove) { if (gf_list_find(is->extra_scenes, extra_scene)<0) return; gf_list_del_item(is->extra_scenes, extra_scene); /*for root scene*/ if (is->root_od->term->root_scene == is) { gf_sr_register_extra_graph(is->root_od->term->renderer, extra_scene, 1); } } else { if (gf_list_find(is->extra_scenes, extra_scene)>=0) return; gf_list_add(is->extra_scenes, extra_scene); /*for root scene*/ if (is->root_od->term->root_scene == is) { gf_sr_register_extra_graph(is->root_od->term->renderer, extra_scene, 0); } }}static void gf_is_get_video_size(GF_MediaObject *mo, u32 *w, u32 *h){ u32 pixel_ar; if (!gf_mo_get_visual_info(mo, w, h, NULL, &pixel_ar, NULL)) return; if (pixel_ar) { u32 n, d; n = (pixel_ar>>16) & 0xFF; d = (pixel_ar) & 0xFF; *w = (*w * n) / d; }}static void IS_UpdateVideoPos(GF_InlineScene *is){ MFURL url; M_Transform2D *tr; GF_MediaObject *mo; u32 w, h, v_w, v_h; if (!is->visual_url.OD_ID && !is->visual_url.url) return; url.count = 1; url.vals = &is->visual_url; mo = IS_CheckExistingObject(is, &url); if (!mo) return; tr = (M_Transform2D *) gf_sg_find_node_by_name(is->graph, "DYN_TRANS"); if (!tr) return; gf_sg_get_scene_size_info(is->graph, &w, &h); if (!w || !h) return; gf_is_get_video_size(mo, &v_w, &v_h); tr->translation.x = INT2FIX((s32) (w - v_w)) / 2; tr->translation.y = INT2FIX((s32) (h - v_h)) / 2; gf_node_dirty_set((GF_Node *)tr, 0, 0); if (is->root_od->term->root_scene == is) { //if (is->graph_attached) gf_sr_set_scene(is->root_od->term->renderer, NULL); gf_sr_set_scene(is->root_od->term->renderer, is->graph); }}static GF_Node *is_create_node(GF_SceneGraph *sg, u32 tag, const char *def_name){ GF_Node *n = gf_node_new(sg, tag); if (n) { if (def_name) gf_node_set_id(n, gf_sg_get_next_available_node_id(sg), def_name); gf_node_init(n); } return n;}static Bool is_odm_url(SFURL *url, GF_ObjectManager *odm){ if (!url->OD_ID && !url->url) return 0; if (odm->OD->objectDescriptorID != GF_ESM_DYNAMIC_OD_ID) return (url->OD_ID==odm->OD->objectDescriptorID) ? 1 : 0; if (!url->url || !odm->OD->URLString) return 0; return !stricmp(url->url, odm->OD->URLString);}void gf_is_force_scene_size_video(GF_InlineScene *is, GF_MediaObject *mo){ u32 w, h; gf_is_get_video_size(mo, &w, &h); gf_is_force_scene_size(is, w, h);}/*regenerates the scene graph for dynamic scene.This will also try to reload any previously presented streams. Note that in the usual case the scene is generatedjust once when recieving the first OD AU (ressources are NOT destroyed when seeking), but since the network may needto update the OD ressources, we still kake care of it*/void gf_is_regenerate(GF_InlineScene *is){ u32 i, nb_obj, w, h; GF_Node *n1, *n2; SFURL *sfu; GF_Event evt; GF_ObjectManager *first_odm, *odm; M_AudioClip *ac; M_MovieTexture *mt; M_AnimationStream *as; if (!is->is_dynamic_scene) return; GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Inline] Regenerating scene graph for service %s\n", is->root_od->net_service->url)); if (is->root_od->term->root_scene == is) gf_sr_set_scene(is->root_od->term->renderer, NULL); gf_sg_reset(is->graph); gf_sg_get_scene_size_info(is->graph, &w, &h); gf_sg_set_scene_size_info(is->graph, w, h, 1); n1 = is_create_node(is->graph, TAG_MPEG4_OrderedGroup, NULL); gf_sg_set_root_node(is->graph, n1); gf_node_register(n1, NULL); n2 = is_create_node(is->graph, TAG_MPEG4_Sound2D, NULL); gf_node_list_add_child( &((GF_ParentNode *)n1)->children, n2); gf_node_register(n2, n1); ac = (M_AudioClip *) is_create_node(is->graph, TAG_MPEG4_AudioClip, "DYN_AUDIO"); ac->startTime = gf_is_get_time(is); ((M_Sound2D *)n2)->source = (GF_Node *)ac; gf_node_register((GF_Node *)ac, n2); nb_obj = 0; first_odm = NULL; i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(is->ODlist, &i))) { if (!odm->codec || (odm->codec->type!=GF_STREAM_AUDIO)) continue; if (is_odm_url(&is->audio_url, odm)) { gf_sg_vrml_mf_append(&ac->url, GF_SG_VRML_MFURL, (void **) &sfu); sfu->OD_ID = is->audio_url.OD_ID; if (is->audio_url.url) sfu->url = strdup(is->audio_url.url); first_odm = NULL; nb_obj++; break; } if (!first_odm) first_odm = odm; } if (first_odm) { if (is->audio_url.url) free(is->audio_url.url); is->audio_url.url = NULL; is->audio_url.OD_ID = first_odm->OD->objectDescriptorID; if (is->audio_url.OD_ID==GF_ESM_DYNAMIC_OD_ID) is->audio_url.url = strdup(first_odm->net_service->url); gf_sg_vrml_mf_append(&ac->url, GF_SG_VRML_MFURL, (void **) &sfu); sfu->OD_ID = is->audio_url.OD_ID; if (is->audio_url.url) sfu->url = strdup(is->audio_url.url); nb_obj++; if (!is->dyn_ck) is->dyn_ck = first_odm->codec->ck; } /*transform for any translation due to scene resize (3GPP)*/ n2 = is_create_node(is->graph, TAG_MPEG4_Transform2D, "DYN_TRANS"); gf_node_list_add_child( &((GF_ParentNode *)n1)->children, n2); gf_node_register(n2, n1); n1 = n2; n2 = is_create_node(is->graph, TAG_MPEG4_Shape, NULL); gf_node_list_add_child( &((GF_ParentNode *)n1)->children, n2); gf_node_register(n2, n1); n1 = n2; n2 = is_create_node(is->graph, TAG_MPEG4_Appearance, NULL); ((M_Shape *)n1)->appearance = n2; gf_node_register(n2, n1); /*note we create a movie texture even for images...*/ mt = (M_MovieTexture *) is_create_node(is->graph, TAG_MPEG4_MovieTexture, "DYN_VIDEO"); mt->startTime = gf_is_get_time(is); ((M_Appearance *)n2)->texture = (GF_Node *)mt; gf_node_register((GF_Node *)mt, n2); first_odm = NULL; i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(is->ODlist, &i))) { if (!odm->codec || (odm->codec->type!=GF_STREAM_VISUAL)) continue; if (is_odm_url(&is->visual_url, odm)) { gf_sg_vrml_mf_append(&mt->url, GF_SG_VRML_MFURL, (void **) &sfu); sfu->OD_ID = is->visual_url.OD_ID; if (is->visual_url.url) sfu->url = strdup(is->visual_url.url); if (odm->mo) { gf_is_get_video_size(odm->mo, &w, &h); gf_sg_set_scene_size_info(is->graph, w, h, 1); } first_odm = NULL; nb_obj++; break; } if (!first_odm) first_odm = odm; } if (first_odm) { if (is->visual_url.url) free(is->visual_url.url); is->visual_url.url = NULL; is->visual_url.OD_ID = first_odm->OD->objectDescriptorID; if (is->visual_url.OD_ID==GF_ESM_DYNAMIC_OD_ID) is->visual_url.url = strdup(first_odm->net_service->url); gf_sg_vrml_mf_append(&mt->url, GF_SG_VRML_MFURL, (void **) &sfu); sfu->OD_ID = is->visual_url.OD_ID; if (is->visual_url.url) sfu->url = strdup(is->visual_url.url); if (first_odm->mo) { gf_is_get_video_size(first_odm->mo, &w, &h); gf_sg_set_scene_size_info(is->graph, w, h, 1); } nb_obj++; if (!is->dyn_ck) is->dyn_ck = first_odm->codec->ck; } n2 = is_create_node(is->graph, TAG_MPEG4_Bitmap, NULL); ((M_Shape *)n1)->geometry = n2; gf_node_register(n2, n1); /*text streams controlled through AnimationStream*/ n1 = gf_sg_get_root_node(is->graph); as = (M_AnimationStream *) is_create_node(is->graph, TAG_MPEG4_AnimationStream, "DYN_TEXT"); gf_node_list_add_child( &((GF_ParentNode *)n1)->children, (GF_Node*)as); gf_node_register((GF_Node *)as, n1); first_odm = NULL; i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(is->ODlist, &i))) { if (!odm->codec || ((odm->codec->type!=GF_STREAM_TEXT) && (odm->codec->type!=GF_STREAM_ND_SUBPIC)) ) continue; if (!nb_obj || is_odm_url(&is->text_url, odm)) { if (is->text_url.url) free(is->text_url.url); is->text_url.url = NULL; gf_sg_vrml_mf_append(&as->url, GF_SG_VRML_MFURL, (void **) &sfu); sfu->OD_ID = is->visual_url.OD_ID = odm->OD->objectDescriptorID; if (odm->OD->objectDescriptorID == GF_ESM_DYNAMIC_OD_ID) { sfu->url = strdup(odm->net_service->url); is->text_url.url = strdup(odm->net_service->url); } first_odm = NULL; if (!is->dyn_ck) is->dyn_ck = odm->codec->ck; break; } if (!first_odm) first_odm = odm; } if (first_odm) { if (is->text_url.url) free(is->text_url.url); is->text_url.url = NULL; gf_sg_vrml_mf_append(&as->url, GF_SG_VRML_MFURL, (void **) &sfu); sfu->OD_ID = is->text_url.OD_ID = first_odm->OD->objectDescriptorID; if (is->text_url.OD_ID==GF_ESM_DYNAMIC_OD_ID) { is->text_url.url = strdup(first_odm->net_service->url); sfu->url = strdup(first_odm->net_service->url); } if (!is->dyn_ck) is->dyn_ck = first_odm->codec->ck; } /*disconnect to force resize*/ if (is->root_od->term->root_scene == is) { if (is->graph_attached) gf_sr_set_scene(is->root_od->term->renderer, NULL); gf_sr_set_scene(is->root_od->term->renderer, is->graph); is->graph_attached = 1; evt.type = GF_EVENT_STREAMLIST; GF_USER_SENDEVENT(is->root_od->term->user,&evt); IS_UpdateVideoPos(is); } else { is->graph_attached = 1; gf_term_invalidate_renderer(is->root_od->term); }}static Bool check_odm_deactivate(SFURL *url, GF_ObjectManager *odm, GF_Node *n){ GF_FieldInfo info; if (!is_odm_url(url, odm) || !n) return 0; if (url->url) free(url->url); url->url = NULL; url->OD_ID = 0; gf_node_get_field_by_name(n, "url", &info); gf_sg_vrml_mf_reset(info.far_ptr, GF_SG_VRML_MFURL); gf_node_get_field_by_name(n, "stopTime", &info); *((SFTime *)info.far_ptr) = gf_node_get_scene_time(n); gf_node_changed(n, NULL); return 1;}void gf_is_select_object(GF_InlineScene *is, GF_ObjectManager *odm){ if (!is->is_dynamic_scene || !is->graph_attached || !odm) return; if (!odm->codec) return; if (odm->state) { if (check_odm_deactivate(&is->audio_url, odm, gf_sg_find_node_by_name(is->graph, "DYN_AUDIO")) ) return; if (check_odm_deactivate(&is->visual_url, odm, gf_sg_find_node_by_name(is->graph, "DYN_VIDEO") )) return; if (check_odm_deactivate(&is->text_url, odm, gf_sg_find_node_by_name(is->graph, "DYN_TEXT") )) return; } if (odm->codec->type == GF_STREAM_AUDIO) { M_AudioClip *ac = (M_AudioClip *) gf_sg_find_node_by_name(is->graph, "DYN_AUDIO"); if (!ac) return; if (is->audio_url.url) free(is->audio_url.url); is->audio_url.url = NULL; is->audio_url.OD_ID = odm->OD->objectDescriptorID; if (!ac->url.count) gf_sg_vrml_mf_alloc(&ac->url, GF_SG_VRML_MFURL, 1); ac->url.vals[0].OD_ID = odm->OD->objectDescriptorID; if (ac->url.vals[0].url) free(ac->url.vals[0].url); if (odm->OD->URLString) { is->audio_url.url = strdup(odm->OD->URLString); ac->url.vals[0].url = strdup(odm->OD->URLString); } ac->startTime = gf_is_get_time(is); gf_node_changed((GF_Node *)ac, NULL); return; } if (odm->codec->type == GF_STREAM_VISUAL) { M_MovieTexture *mt = (M_MovieTexture*) gf_sg_find_node_by_name(is->graph, "DYN_VIDEO"); if (!mt) return; if (is->visual_url.url) free(is->visual_url.url); is->visual_url.url = NULL; is->visual_url.OD_ID = odm->OD->objectDescriptorID; if (!mt->url.count) gf_sg_vrml_mf_alloc(&mt->url, GF_SG_VRML_MFURL, 1); mt->url.vals[0].OD_ID = odm->OD->objectDescriptorID; if (mt->url.vals[0].url) free(mt->url.vals[0].url); if (odm->OD->URLString) { is->visual_url.url = strdup(odm->OD->URLString); mt->url.vals[0].url = strdup(odm->OD->URLString); } mt->startTime = gf_is_get_time(is); gf_node_changed((GF_Node *)mt, NULL); if (odm->mo) gf_is_force_scene_size_video(is, odm->mo); return; } if (odm->codec->type == GF_STREAM_TEXT) { M_AnimationStream *as = (M_AnimationStream*) gf_sg_find_node_by_name(is->graph, "DYN_TEXT"); if (!as) return; if (is->text_url.url) free(is->text_url.url); is->text_url.url = NULL; is->text_url.OD_ID = odm->OD->objectDescriptorID; if (!as->url.count) gf_sg_vrml_mf_alloc(&as->url, GF_SG_VRML_MFURL, 1); as->url.vals[0].OD_ID = odm->OD->objectDescriptorID; if (as->url.vals[0].url) free(as->url.vals[0].url); if (odm->OD->URLString) { is->text_url.url = strdup(odm->OD->URLString); as->url.vals[0].url = strdup(odm->OD->URLString); } as->startTime = gf_is_get_time(is); gf_node_changed((GF_Node *)as, NULL); return; }}GF_EXPORTvoid gf_is_force_scene_size(GF_InlineScene *is, u32 width, u32 height){ /*for now only allowed when no scene info*/ if (!is->is_dynamic_scene) return; gf_sg_set_scene_size_info(is->graph, width, height, gf_sg_use_pixel_metrics(is->graph)); if (is->root_od->term->root_scene != is) return; gf_sr_set_scene(is->root_od->term->renderer, is->graph); IS_UpdateVideoPos(is);}void gf_is_restart_dynamic(GF_InlineScene *is, u64 from_time){ u32 i; GF_List *to_restart; GF_ObjectManager *odm; GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[InlineScene] Restarting from "LLD"\n", LLD_CAST from_time)); to_restart = gf_list_new(); i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(is->ODlist, &i))) { if (odm->state) { gf_list_add(to_restart, odm); gf_odm_stop(odm, 1); } } /*reset clock*/ if (is->dyn_ck) gf_clock_reset(is->dyn_ck); /*restart objects*/ i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(to_restart, &i))) { odm->media_start_time = from_time; gf_odm_start(odm); } gf_list_del(to_restart); /*also check nodes if no media control since they may be deactivated (end of stream)*/ if (!is->root_od->media_ctrl) { M_AudioClip *ac = (M_AudioClip *) gf_sg_find_node_by_name(is->graph, "DYN_AUDIO"); M_MovieTexture *mt = (M_MovieTexture *) gf_sg_find_node_by_name(is->graph, "DYN_VIDEO"); M_AnimationStream *as = (M_AnimationStream *) gf_sg_find_node_by_name(is->graph, "DYN_TEXT"); if (ac) { ac->startTime = gf_is_get_time(is); gf_node_changed((GF_Node *)ac, NULL); } if (mt) { mt->startTime = gf_is_get_time(is); gf_node_changed((GF_Node *)mt, NULL); } if (as) { as->startTime = gf_is_get_time(is); gf_node_changed((GF_Node *)as, NULL); } }}GF_EXPORTBool gf_is_process_anchor(GF_Node *caller, GF_Event *evt){ u32 i; GF_Terminal *term; M_Inline *inl; GF_InlineScene *is; GF_SceneGraph *sg = gf_node_get_graph(caller); if (!sg) return 1; is = (GF_InlineScene *)gf_sg_get_private(sg); if (!is) return 1; term = is->root_od->term; /*if main scene forward to user. If no params or first one not "self" forward to user*/ if ((term->root_scene==is) || !evt->navigate.parameters || !evt->navigate.param_count || (stricmp(evt->navigate.parameters[0], "self") && stricmp(evt->navigate.parameters[0], "_self"))) { if (term->user->EventProc) return term->user->EventProc(term->user->opaque, evt); return 1; } /*FIXME this is too restrictive, we assume the navigate URL is really a presentation one...*/ i=0; while ((inl = (M_Inline*)gf_list_enum(is->inline_nodes, &i))) { gf_sg_vrml_mf_reset(&inl->url, GF_SG_VRML_MFURL); gf_sg_vrml_mf_alloc(&inl->url, GF_SG_VRML_MFURL, 1); inl->url.vals[0].url = strdup(evt->navigate.to_url ? evt->navigate.to_url : ""); /*signal URL change but don't destroy inline scene now since we got this event from inside the scene, this could crash renderers*/ is->needs_restart = 2; } return 1;}GF_EXPORTGF_Renderer *gf_sr_get_renderer(GF_Node *node){ GF_InlineScene *is; GF_SceneGraph *sg = gf_node_get_graph(node); if (!sg) return NULL; is = (GF_InlineScene *)gf_sg_get_private(sg); if (!is) return NULL; return is->root_od->term->renderer;}void InitInline(GF_InlineScene *is, GF_Node *node){ gf_node_set_callback_function(node, gf_is_render);}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -