📄 gxfenc.c
字号:
} return url_ftell(pb) - pos;}static int gxf_write_umf_media_mpeg(ByteIOContext *pb, GXFStreamContext *stream){ if (stream->codec->pix_fmt == PIX_FMT_YUV422P) put_le32(pb, 2); else put_le32(pb, 1); /* default to 420 */ put_le32(pb, stream->first_gop_closed == 1); /* closed = 1, open = 0, unknown = 255 */ put_le32(pb, 3); /* top = 1, bottom = 2, frame = 3, unknown = 0 */ put_le32(pb, 1); /* I picture per GOP */ put_le32(pb, stream->p_per_gop); put_le32(pb, stream->b_per_gop); if (stream->codec->codec_id == CODEC_ID_MPEG2VIDEO) put_le32(pb, 2); else if (stream->codec->codec_id == CODEC_ID_MPEG1VIDEO) put_le32(pb, 1); else put_le32(pb, 0); put_le32(pb, 0); /* reserved */ return 32;}static int gxf_write_umf_media_timecode(ByteIOContext *pb, GXFStreamContext *track){ /* FIXME implement */ put_be32(pb, 0); /* drop frame flag */ put_be32(pb, 0); /* reserved */ put_be32(pb, 0); /* reserved */ put_be32(pb, 0); /* reserved */ put_be32(pb, 0); /* reserved */ put_be32(pb, 0); /* reserved */ put_be32(pb, 0); /* reserved */ put_be32(pb, 0); /* reserved */ return 32;}static int gxf_write_umf_media_dv(ByteIOContext *pb, GXFStreamContext *track){ int i; for (i = 0; i < 8; i++) { put_be32(pb, 0); } return 32;}static int gxf_write_umf_media_audio(ByteIOContext *pb, GXFStreamContext *track){ put_le64(pb, av_dbl2int(1)); /* sound level to begin to */ put_le64(pb, av_dbl2int(1)); /* sound level to begin to */ put_le32(pb, 0); /* number of fields over which to ramp up sound level */ put_le32(pb, 0); /* number of fields over which to ramp down sound level */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ return 32;}#if 0static int gxf_write_umf_media_mjpeg(ByteIOContext *pb, GXFStreamContext *track){ put_be64(pb, 0); /* FIXME FLOAT max chroma quant level */ put_be64(pb, 0); /* FIXME FLOAT max luma quant level */ put_be64(pb, 0); /* FIXME FLOAT min chroma quant level */ put_be64(pb, 0); /* FIXME FLOAT min luma quant level */ return 32;}#endifstatic int gxf_write_umf_media_description(ByteIOContext *pb, GXFContext *ctx){ offset_t pos; int i; pos = url_ftell(pb); ctx->umf_media_offset = pos - ctx->umf_start_offset; for (i = 0; i < ctx->fc->nb_streams; ++i) { GXFStreamContext *sc = &ctx->streams[i]; char buffer[88]; offset_t startpos, curpos; int path_size = strlen(ES_NAME_PATTERN); memset(buffer, 0, 88); startpos = url_ftell(pb); put_le16(pb, 0); /* length */ put_le16(pb, sc->media_info); put_le16(pb, 0); /* reserved */ put_le16(pb, 0); /* reserved */ put_le32(pb, ctx->nb_frames); put_le32(pb, 0); /* attributes rw, ro */ put_le32(pb, 0); /* mark in */ put_le32(pb, ctx->nb_frames); /* mark out */ strncpy(buffer, ES_NAME_PATTERN, path_size); put_buffer(pb, (uint8_t *)buffer, path_size); put_be16(pb, sc->media_info); put_buffer(pb, (uint8_t *)buffer + path_size + 2, 88 - path_size - 2); put_le32(pb, sc->track_type); put_le32(pb, sc->sample_rate); put_le32(pb, sc->sample_size); put_le32(pb, 0); /* reserved */ switch (sc->codec->codec_id) { case CODEC_ID_MPEG2VIDEO: gxf_write_umf_media_mpeg(pb, sc); break; case CODEC_ID_PCM_S16LE: gxf_write_umf_media_audio(pb, sc); break; case CODEC_ID_DVVIDEO: gxf_write_umf_media_dv(pb, sc); break; default: gxf_write_umf_media_timecode(pb, sc); /* 8 0bytes */ } curpos = url_ftell(pb); url_fseek(pb, startpos, SEEK_SET); put_le16(pb, curpos - startpos); url_fseek(pb, curpos, SEEK_SET); } return url_ftell(pb) - pos;}static int gxf_write_umf_user_data(ByteIOContext *pb, GXFContext *ctx){ offset_t pos = url_ftell(pb); ctx->umf_user_data_offset = pos - ctx->umf_start_offset; put_le32(pb, 20); put_le32(pb, 0); put_le16(pb, 0); put_le16(pb, 0); put_le32(pb, 0); put_byte(pb, 0); put_byte(pb, 0); put_byte(pb, 0); put_byte(pb, 0); return 20;}static int gxf_write_umf_packet(ByteIOContext *pb, GXFContext *ctx){ offset_t pos = url_ftell(pb); gxf_write_packet_header(pb, PKT_UMF); /* preamble */ put_byte(pb, 3); /* first and last (only) packet */ put_be32(pb, ctx->umf_length); /* data length */ ctx->umf_start_offset = url_ftell(pb); gxf_write_umf_payload(pb, ctx); gxf_write_umf_material_description(pb, ctx); ctx->umf_track_size = gxf_write_umf_track_description(pb, ctx); ctx->umf_media_size = gxf_write_umf_media_description(pb, ctx); ctx->umf_user_data_size = gxf_write_umf_user_data(pb, ctx); ctx->umf_length = url_ftell(pb) - ctx->umf_start_offset; return updatePacketSize(pb, pos);}#define GXF_NODELAY -5000static int gxf_write_header(AVFormatContext *s){ ByteIOContext *pb = &s->pb; GXFContext *gxf = s->priv_data; int i; gxf->fc = s; gxf->flags |= 0x00080000; /* material is simple clip */ for (i = 0; i < s->nb_streams; ++i) { AVStream *st = s->streams[i]; GXFStreamContext *sc = &gxf->streams[i]; sc->codec = st->codec; sc->index = i; sc->media_type = codec_get_tag(gxf_media_types, sc->codec->codec_id); if (st->codec->codec_type == CODEC_TYPE_AUDIO) { if (st->codec->codec_id != CODEC_ID_PCM_S16LE) { av_log(s, AV_LOG_ERROR, "only 16 BIT PCM LE allowed for now\n"); return -1; } if (st->codec->sample_rate != 48000) { av_log(s, AV_LOG_ERROR, "only 48000hz sampling rate is allowed\n"); return -1; } if (st->codec->channels != 1) { av_log(s, AV_LOG_ERROR, "only mono tracks are allowed\n"); return -1; } sc->track_type = 2; sc->sample_rate = st->codec->sample_rate; av_set_pts_info(st, 64, 1, sc->sample_rate); sc->sample_size = 16; sc->frame_rate_index = -2; sc->lines_index = -2; sc->fields = -2; gxf->audio_tracks++; gxf->flags |= 0x04000000; /* audio is 16 bit pcm */ av_fifo_init(&sc->audio_buffer, 3*GXF_AUDIO_PACKET_SIZE); } else if (sc->codec->codec_type == CODEC_TYPE_VIDEO) { /* FIXME check from time_base ? */ if (sc->codec->height == 480 || sc->codec->height == 512) { /* NTSC or NTSC+VBI */ sc->frame_rate_index = 5; sc->sample_rate = 60; gxf->flags |= 0x00000080; } else { /* assume PAL */ sc->frame_rate_index = 6; sc->media_type++; sc->sample_rate = 50; gxf->flags |= 0x00000040; } gxf->sample_rate = sc->sample_rate; av_set_pts_info(st, 64, 1, st->codec->time_base.den); sc->dts_delay = GXF_NODELAY; if (gxf_find_lines_index(sc) < 0) sc->lines_index = -1; sc->sample_size = st->codec->bit_rate; sc->fields = 2; /* interlaced */ switch (sc->codec->codec_id) { case CODEC_ID_MPEG2VIDEO: sc->first_gop_closed = -1; sc->track_type = 4; gxf->mpeg_tracks++; gxf->flags |= 0x00008000; break; case CODEC_ID_DVVIDEO: if (sc->codec->pix_fmt == PIX_FMT_YUV422P) { sc->media_type += 2; sc->track_type = 6; gxf->flags |= 0x00002000; } else { sc->track_type = 5; gxf->flags |= 0x00001000; } break; default: av_log(s, AV_LOG_ERROR, "video codec not supported\n"); return -1; } } } gxf_write_map_packet(pb, gxf); //gxf_write_flt_packet(pb, gxf); gxf_write_umf_packet(pb, gxf); put_flush_packet(pb); return 0;}static int gxf_write_eos_packet(ByteIOContext *pb, GXFContext *ctx){ offset_t pos = url_ftell(pb); gxf_write_packet_header(pb, PKT_EOS); return updatePacketSize(pb, pos);}static int gxf_write_trailer(AVFormatContext *s){ ByteIOContext *pb = &s->pb; GXFContext *gxf = s->priv_data; offset_t end; int i; for (i = 0; i < s->nb_streams; ++i) { if (s->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO) { av_fifo_free(&gxf->streams[i].audio_buffer); } else if (s->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) { gxf->nb_frames = 2 * s->streams[i]->codec->frame_number; } } gxf_write_eos_packet(pb, gxf); end = url_ftell(pb); url_fseek(pb, 0, SEEK_SET); /* overwrite map and umf packets with new values */ gxf_write_map_packet(pb, gxf); //gxf_write_flt_packet(pb, gxf); gxf_write_umf_packet(pb, gxf); url_fseek(pb, end, SEEK_SET); return 0;}static int gxf_parse_mpeg_frame(GXFStreamContext *sc, const uint8_t *buf, int size){ uint32_t c=-1; int i; for(i=0; i<size-4 && c!=0x100; i++){ c = (c<<8) + buf[i]; if(c == 0x1B8 && sc->first_gop_closed == -1) /* GOP start code */ sc->first_gop_closed= (buf[i+4]>>6)&1; } return (buf[i+1]>>3)&7;}static int gxf_write_media_preamble(ByteIOContext *pb, GXFContext *ctx, AVPacket *pkt, int size){ GXFStreamContext *sc = &ctx->streams[pkt->stream_index]; int64_t dts = av_rescale_rnd(pkt->dts, ctx->sample_rate, sc->codec->time_base.den, AV_ROUND_UP); put_byte(pb, sc->media_type); put_byte(pb, sc->index); put_be32(pb, dts); if (sc->codec->codec_type == CODEC_TYPE_AUDIO) { put_be16(pb, 0); put_be16(pb, size / 2); } else if (sc->codec->codec_id == CODEC_ID_MPEG2VIDEO) { int frame_type = gxf_parse_mpeg_frame(sc, pkt->data, pkt->size); if (frame_type == FF_I_TYPE) { put_byte(pb, 0x0d); sc->iframes++; } else if (frame_type == FF_B_TYPE) { put_byte(pb, 0x0f); sc->bframes++; } else { put_byte(pb, 0x0e); sc->pframes++; } put_be24(pb, size); } else if (sc->codec->codec_id == CODEC_ID_DVVIDEO) { put_byte(pb, size / 4096); put_be24(pb, 0); } else put_be32(pb, size); put_be32(pb, dts); put_byte(pb, 1); /* flags */ put_byte(pb, 0); /* reserved */ return 16;}static int gxf_write_media_packet(ByteIOContext *pb, GXFContext *ctx, AVPacket *pkt){ GXFStreamContext *sc = &ctx->streams[pkt->stream_index]; offset_t pos = url_ftell(pb); int padding = 0; gxf_write_packet_header(pb, PKT_MEDIA); if (sc->codec->codec_id == CODEC_ID_MPEG2VIDEO && pkt->size % 4) /* MPEG-2 frames must be padded */ padding = 4 - pkt->size % 4; else if (sc->codec->codec_type == CODEC_TYPE_AUDIO) padding = GXF_AUDIO_PACKET_SIZE - pkt->size; gxf_write_media_preamble(pb, ctx, pkt, pkt->size + padding); put_buffer(pb, pkt->data, pkt->size); gxf_write_padding(pb, padding); return updatePacketSize(pb, pos);}static int gxf_write_packet(AVFormatContext *s, AVPacket *pkt){ GXFContext *gxf = s->priv_data; gxf_write_media_packet(&s->pb, gxf, pkt); put_flush_packet(&s->pb); return 0;}static int gxf_new_audio_packet(GXFContext *gxf, GXFStreamContext *sc, AVPacket *pkt, int flush){ int size = flush ? av_fifo_size(&sc->audio_buffer) : GXF_AUDIO_PACKET_SIZE; if (!size) return 0; av_new_packet(pkt, size); av_fifo_read(&sc->audio_buffer, pkt->data, size); pkt->stream_index = sc->index; pkt->dts = sc->current_dts; sc->current_dts += size / 2; /* we only support 16 bit pcm mono for now */ return size;}static int gxf_interleave_packet(AVFormatContext *s, AVPacket *out, AVPacket *pkt, int flush){ GXFContext *gxf = s->priv_data; AVPacket new_pkt; int i; for (i = 0; i < s->nb_streams; i++) { AVStream *st = s->streams[i]; GXFStreamContext *sc = &gxf->streams[i]; if (st->codec->codec_type == CODEC_TYPE_AUDIO) { if (pkt && pkt->stream_index == i) { av_fifo_write(&sc->audio_buffer, pkt->data, pkt->size); pkt = NULL; } if (flush || av_fifo_size(&sc->audio_buffer) >= GXF_AUDIO_PACKET_SIZE) { if (!pkt && gxf_new_audio_packet(gxf, sc, &new_pkt, flush) > 0) { pkt = &new_pkt; break; /* add pkt right now into list */ } } } else if (pkt && pkt->stream_index == i) { if (sc->dts_delay == GXF_NODELAY) /* adjust dts if needed */ sc->dts_delay = pkt->dts; pkt->dts -= sc->dts_delay; } } return av_interleave_packet_per_dts(s, out, pkt, flush);}AVOutputFormat gxf_muxer = { "gxf", "GXF format", NULL, "gxf", sizeof(GXFContext), CODEC_ID_PCM_S16LE, CODEC_ID_MPEG2VIDEO, gxf_write_header, gxf_write_packet, gxf_write_trailer, 0, NULL, gxf_interleave_packet,};
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -