⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video_stream.c

📁 基于linux的DVD播放器程序
💻 C
📖 第 1 页 / 共 5 页
字号:
    if(picture_start_code != MPEG2_VS_PICTURE_START_CODE) {    WARNING("wrong start_code picture_start_code: %08x\n",	    picture_start_code);    DINDENT(-2);    return -1;  }    /*   * TODO better check if pts really is for this picture   *   * the first picture_start_code in a packet belongs to the   * picture that the pts in the packet corresponds to.   */  if(PTS_DTS_flags & 0x02) {    PTS_DTS_flags = 0;    last_pts = PTS;    prev_scr_nr = last_scr_nr;    last_scr_nr = scr_nr;    picture_has_pts = 1;    DPRINTFI(1, "PTS: %016llx %lld.%06lld\n",	     PTS,	     PTS/PTS_BASE,	     (PTS%PTS_BASE)*1000000/PTS_BASE);  } else {    picture_has_pts = 0;  }    if(PTS_DTS_flags & 0x02) {    if(last_scr_nr != prev_scr_nr) {         /*      fprintf(stderr, "=== last_scr_nr: %d, prev_scr_nr: %d\n",	      last_scr_nr, prev_scr_nr);      */      /*      fprintf(stderr, "--- last_scr: %ld.%09ld, prev_scr: %ld.%09ld\n",	      TIME_S (ctrl_time[last_scr_nr].realtime_offset),	      TIME_SS(ctrl_time[last_scr_nr].realtime_offset),	      TIME_S (ctrl_time[prev_scr_nr].realtime_offset),	      TIME_SS(ctrl_time[prev_scr_nr].realtime_offset));      */      /*      fprintf(stderr, "+++ last_pts: %lld\n", last_pts);      */    }  }  pic.header.temporal_reference = GETBITS(10, "temporal_reference");  pic.header.picture_coding_type = GETBITS(3, "picture_coding_type");    DPRINTFI(1, "temporal_reference: %d\n", pic.header.temporal_reference);#ifdef DEBUG  /* Table 6-12 --- picture_coding_type */  DPRINTFI(1, "picture_coding_type: %01x, ", pic.header.picture_coding_type);  switch(pic.header.picture_coding_type) {  case PIC_CODING_TYPE_FORBIDDEN:    DPRINTF(1, "forbidden\n");    break;  case PIC_CODING_TYPE_I:    DPRINTF(1, "intra-coded (I)\n");    break;  case PIC_CODING_TYPE_P:    DPRINTF(1, "predictive-coded (P)\n");    break;  case PIC_CODING_TYPE_B:    DPRINTF(1, "bidirectionally-predictive-coded (B)\n");    break;  case PIC_CODING_TYPE_D:    DPRINTF(1, "shall not be used (dc intra-coded (D) in ISO/IEC11172-2)\n");    break;  default:    DPRINTF(1, "reserved\n");    break;  }#endif    switch(pic.header.picture_coding_type) {  case PIC_CODING_TYPE_I:  case PIC_CODING_TYPE_P:  case PIC_CODING_TYPE_B:    break;  case PIC_CODING_TYPE_FORBIDDEN:  case PIC_CODING_TYPE_D:  default:    /* FIXME: Workaround, should return error and skipp whole picture */    pic.header.picture_coding_type = PIC_CODING_TYPE_P;  }  pic.header.vbv_delay = GETBITS(16, "vbv_delay");    /* To be able to use the same motion vector code for MPEG-1 and 2 we     use f_code[] instead of forward_f_code/backward_f_code.      In MPEG-2 f_code[] values will be read from the bitstream (later) in      picture_coding_extension(). */  if((pic.header.picture_coding_type == PIC_CODING_TYPE_P) ||     (pic.header.picture_coding_type == PIC_CODING_TYPE_B)) {    pic.header.full_pel_vector[0] = GETBITS(1, "full_pel_forward_vector");    pic.header.forward_f_code = GETBITS(3, "forward_f_code");    if(pic.header.forward_f_code == 0) {      pic.header.forward_f_code = 1;      WARNING("%s", " ** forward_f_code == ZERO\n");    }    pic.coding_ext.f_code[0][0] = pic.header.forward_f_code;    pic.coding_ext.f_code[0][1] = pic.header.forward_f_code;  }    if(pic.header.picture_coding_type == PIC_CODING_TYPE_B) {    pic.header.full_pel_vector[1] = GETBITS(1, "full_pel_backward_vector");    pic.header.backward_f_code = GETBITS(3, "backward_f_code");    if(pic.header.backward_f_code == 0) {      pic.header.backward_f_code = 1;      WARNING("%s", "** backward_f_code == ZERO\n");    }    pic.coding_ext.f_code[1][0] = pic.header.backward_f_code;    pic.coding_ext.f_code[1][1] = pic.header.backward_f_code;  }    while(nextbits(1) == 1) {    GETBITS(1, "extra_bit_picture");    GETBITS(8, "extra_information_picture");  }  GETBITS(1, "extra_bit_picture");    DINDENT(-2);  next_start_code();  return 0;}// Get id of empty bufferint get_picture_buf(data_q_t *data_q){  int n;  MsgEvent_t ev;  data_buf_head_t *data_head;  data_head = data_q->data_head;    //fprintf(stderr, "vs: searching for free picture\n");  //  search for empty buffer  while(1) {    ev.type = MsgEventQNone;    for(n = 0; n < data_head->nr_of_dataelems; n++) {            if((data_q->data_elems[n].is_reference == 0) &&	 (data_q->data_elems[n].displayed == 1)) {	data_q->data_elems[n].displayed = 0;	// found buf	//fprintf(stderr, "vs: found free picture buf: @%d\n", n);	//fprintf(stderr, "|\n");	return n;      }    }        // no empty buffer    data_q->q_head->writer_requests_notification = 1;    //fprintf(stderr, "vs: didn't find free picture, setting notification\n");    /*    for(n = 0; n < data_q->data_head->nr_of_dataelems; n++) {      fprintf(stderr, "vs: [%d] is_ref: @%d, disp: @%d\n",	      n+4,	      picture_ctrl_data[n].is_reference,	      picture_ctrl_data[n].displayed);    }    */    while(ev.type != MsgEventQNotify) {      //fprintf(stderr, ".");      for(n = 0; n < data_q->data_head->nr_of_dataelems; n++) {		if((data_q->data_elems[n].is_reference == 0) &&	   (data_q->data_elems[n].displayed == 1)) {	  data_q->data_elems[n].displayed = 0;	  // found buf;	  //fprintf(stderr, "vs: found free2 picture buf: @%d\n", n);	  //fprintf(stderr, "+\n");	  	  return n;	}      }      //fprintf(stderr, "video_decode: waiting for notification1\n");      if(MsgNextEvent(msgq, &ev) != -1) {	handle_events(msgq, &ev);      }      /*      for(n = 0; n < picture_ctrl_head->nr_of_dataelems; n++) {	fprintf(stderr, "vs: [%d] is_ref: @%d, disp: @%d\n", 		n+4,		picture_ctrl_data[n].is_reference,		picture_ctrl_data[n].displayed);      }      */    }  }}// Put decoded picture into display queuevoid dpy_q_put(int id, data_q_t *data_q){  MsgEvent_t ev;  int elem;    elem = data_q->q_head->write_nr;  /*  fprintf(stderr, "DEBUG[vs]: try put picture in q, elem: @%d, bufid: @%d\n",	  elem, id);  */  if(data_q->q_elems[elem].in_use) {    data_q->q_head->writer_requests_notification = 1;    //fprintf(stderr, "vs:  elem in use, setting notification\n");        while(data_q->q_elems[elem].in_use) {      //fprintf(stderr, "video_decode: waiting for notification2\n");      if(MsgNextEvent(msgq, &ev) != -1) {	handle_events(msgq, &ev);      }    }  }  //fprintf(stderr, "vs:  elem free to fill\n");      data_q->q_elems[elem].data_elem_index = id;  data_q->q_elems[elem].in_use = 1;    data_q->q_head->write_nr =    (data_q->q_head->write_nr + 1) % data_q->q_head->nr_of_qelems;    if(data_q->q_head->reader_requests_notification) {    //fprintf(stderr, "vs:  reader wants notify, sending...\n");      data_q->q_head->reader_requests_notification = 0;    ev.type = MsgEventQNotify;    ev.notify.qid = data_q->q_head->qid;    if(MsgSendEvent(msgq, data_q->q_head->reader, &ev, 0) == -1) {      fprintf(stderr, "video_decode: couldn't send notification\n");    }  }}/* 6.2.3.6 Picture data */void picture_data(void){  static int buf_id;  static int fwd_ref_buf_id = -1;  static int bwd_ref_buf_id  = -1;  int err;  picture_data_elem_t *pinfos;  static int bwd_ref_temporal_reference = -1;  static int last_timestamped_temp_ref = -1;  static int drop_frame = 0;  int temporal_reference_error = 0;  pinfos = cur_data_q->data_elems;   DPRINTFI(1, "picture_data()\n");  DINDENT(2);#ifdef HAVE_MMX    emms();#endif      if(msgqid != -1) {    MsgEvent_t ev;    while(MsgCheckEvent(msgq, &ev) != -1) {      handle_events(msgq, &ev);    }    //chk_for_msg();  }    DPRINTFI(1, "last_temporal_ref_to_dpy: %d\n", last_temporal_ref_to_dpy);  DPRINTFI(1, "bwd_ref_temporal_reference: %d\n", bwd_ref_temporal_reference);  if(prev_coded_temp_ref != pic.header.temporal_reference) {        /* If this is a I/P picture then we must release the reference        frame that is going to be replaced. (It might not have been        displayed yet so it is not necessarily free for reuse.) */        switch(pic.header.picture_coding_type) {    case PIC_CODING_TYPE_I:    case PIC_CODING_TYPE_P:            /* check to see if a temporal reference has been skipped */            if(bwd_ref_temporal_reference != -1) {		WARNING("%s", "** temporal reference skipped\n");		/* If we are in a new GOP and there is an old 	   undisplayed bwd_ref_temporal_reference, _don't_ use 	   that as the last last_temporal_ref_to_dpy (since 	   temporal_reference are only valid within a GOP). */	/* FIXME: Care will have to be taken for the time calculations,	   if the new image doesn't have a PTS stamp. This is not	   handled correctly yet. */	if(prev_coded_temp_ref != -2)	  last_temporal_ref_to_dpy = bwd_ref_temporal_reference;		/* bwd_ref should not be in the dpy_q more than one time */	bwd_ref_temporal_reference = -1;		/* put bwd_ref in dpy_q */		last_pts_to_dpy = pinfos[bwd_ref_buf_id].PTS;	last_scr_nr_to_dpy = pinfos[bwd_ref_buf_id].scr_nr;		if(flush_to_scrid != -1) {	  	  pinfos[bwd_ref_buf_id].is_reference = 0;	  pinfos[bwd_ref_buf_id].displayed = 1;	} else {	  	  dpy_q_put(bwd_ref_buf_id, cur_data_q);	}	      }      DPRINTFI(1, "last_temporal_ref_to_dpy: %d\n", last_temporal_ref_to_dpy);      DPRINTFI(1, "bwd_ref_temporal_reference: %d\n", bwd_ref_temporal_reference);	      if(fwd_ref_buf_id != -1) {	/* current fwd_ref_image is not used as reference any more */	pinfos[fwd_ref_buf_id].is_reference = 0;      }            /* get new buffer */      buf_id = get_picture_buf(cur_data_q);      dst_image = &cur_data_q->image_bufs[buf_id];            /* Age the reference frame */      fwd_ref_buf_id = bwd_ref_buf_id;       fwd_ref_image = bwd_ref_image;             /* and add the new (to be decoded) frame */      bwd_ref_image = dst_image;      bwd_ref_buf_id = buf_id;            bwd_ref_temporal_reference = pic.header.temporal_reference;            /* this buffer is used as reference picture by the decoder */      pinfos[buf_id].is_reference = 1;             break;    case PIC_CODING_TYPE_B:            /* get new buffer */      buf_id = get_picture_buf(cur_data_q);      dst_image = &cur_data_q->image_bufs[buf_id];            break;    }    DPRINTFI(1, "last_temporal_ref_to_dpy: %d\n", last_temporal_ref_to_dpy);    DPRINTFI(1, "bwd_ref_temporal_reference: %d\n", bwd_ref_temporal_reference);      /*     * temporal reference is incremented by 1 for every frame.     *      *  this can be used to keep track of the order in which the pictures     *  shall be displayed.      *  it can not be used to calculate the time when a specific picture     *  should be displayed (one can make a guess, but     *  it isn't necessarily true that a frame with a temporal reference     *  1 greater than the previous picture should be displayed     *  1 frame interval later)     *     *     * time stamp     *     * this tells when a picture shall be displayed     * not all pictured have time stamps     *     *     */        /*     * Time stamps     *     * case 1:     *  The packet containing the picture header start code     *  had a time stamp.     *     *  In this case the time stamp is used for this picture.     *     * case 2:     *  The packet containing the picture header start code     *  didn't have a time stamp.     *     *  In this case the time stamp for this picture must be calculated.     *       *  case 2.1:     *   There is a previously decoded picture with a time stamp     *   in the same temporal reference context     *     *   If the temporal reference for the previous picture is lower     *   than the temp_ref for this picture then we take     *   the difference between the two temp_refs and multiply     *   with the frame interval time and then add this to     *   to the original time stamp to get the time stamp for this picture     *     *   timestamp = (this_temp_ref - timestamped_temp_ref)*     *                frame_interval+timestamped_temp_ref_timestamp     *      *   todo: We must take into account that the temporal reference wraps     *   at 1024.     *     *     *   If the temporal reference for the previous picture is higher     *   than the current, we do the same.     *        *        *  case 2.2:     *   There is no previously decoded picture with a time stamp     */                    /* If the packet containing the picture header start code had        a time stamp, that time stamp is used.              Otherwise a time stamp is calculated from the last picture        produced for viewing.        Iff we predict the time stamp then we must also make sure to use        the same scr as the picure we predict from.    */    if(picture_has_pts) {      last_timestamped_temp_ref = pic.header.temporal_reference;      pinfos[buf_id].PTS = last_pts;      pinfos[buf_id].PTS_DTS_flags = 0x02;      pinfos[buf_id].scr_nr = last_scr_nr;      /*	fprintf(stderr, "#%ld.%09ld\n",	TIME_S (pinfos[buf_id].pts_time),	TIME_SS(pinfos[buf_id].pts_time));      */    } else {      /* Predict if we don't already have a pts for the frame. */      uint64_t calc_pts;      switch(pic.header.picture_coding_type) {      case PIC_CODING_TYPE_I:      case PIC_CODING_TYPE_P:	/* TODO: Is this correct? */		/* First case: we don't use the temporal_reference	 * In this case we can calculate the time stamp for	 * the oldest reference frame (fwd_ref) to be displayed when	 * we get a new reference frame	 *	 * The forward ref time stamp should be the 	 * previous displayed frame's timestamp plus one frame interval,	 * because when we get a new reference frame we know that the	 * next frame to display is the old reference frame(fwd_ref)	 */		/* In case the fwd_ref picture already has a time stamp, do nothing	 * Also check to see that we do have a fwd_ref picture	 */		/* Second case: We use the temporal_reference	 * In this case we can look at the previous temporal ref	 */	if(last_timestamped_temp_ref != -1) {	  calc_pts = last_pts +	    (pic.header.temporal_reference - last_timestamped_temp_ref) *	    frame_interval;	  /*	    calc_pts = last_pts_to_dpy +	    buf_ctrl_head->frame_interval;	  */	} else {	  if(last_temporal_ref_to_dpy == -1) {	    calc_pts = last_pts_to_dpy +	      (pic.header.temporal_reference - last_timestamped_temp_ref) *	      frame_interval;	  } else {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -