📄 xan.c
字号:
rgb_plane[index++] = s->palette[pix * 4 + 0]; rgb_plane[index++] = s->palette[pix * 4 + 1]; rgb_plane[index++] = s->palette[pix * 4 + 2]; ADVANCE_CURRENT_X(); } break; case PIX_FMT_RGBA32: rgb32_plane = (unsigned int *)s->current_frame.data[0]; palette32 = (unsigned int *)s->palette; stride = s->current_frame.linesize[0] / 4; line_inc = stride - width; index = y * stride + x; current_x = x; while(pixel_count--) { rgb32_plane[index++] = palette32[*pixel_buffer++]; ADVANCE_CURRENT_X(); } break; case PIX_FMT_YUV444P: y_plane = s->current_frame.data[0]; u_plane = s->current_frame.data[1]; v_plane = s->current_frame.data[2]; stride = s->current_frame.linesize[0]; line_inc = stride - width; index = y * stride + x; current_x = x; while(pixel_count--) { pix = *pixel_buffer++; y_plane[index] = s->palette[pix * 4 + 0]; u_plane[index] = s->palette[pix * 4 + 1]; v_plane[index] = s->palette[pix * 4 + 2]; index++; ADVANCE_CURRENT_X(); } break; default: av_log(s->avctx, AV_LOG_ERROR, " Xan WC3: Unhandled colorspace\n"); break; }}#define ADVANCE_CURFRAME_X() \ curframe_x++; \ if (curframe_x >= width) { \ curframe_index += line_inc; \ curframe_x = 0; \ }#define ADVANCE_PREVFRAME_X() \ prevframe_x++; \ if (prevframe_x >= width) { \ prevframe_index += line_inc; \ prevframe_x = 0; \ }static void inline xan_wc3_copy_pixel_run(XanContext *s, int x, int y, int pixel_count, int motion_x, int motion_y){ int stride; int line_inc; int curframe_index, prevframe_index; int curframe_x, prevframe_x; int width = s->avctx->width; unsigned char *palette_plane, *prev_palette_plane; unsigned char *y_plane, *u_plane, *v_plane; unsigned char *prev_y_plane, *prev_u_plane, *prev_v_plane; unsigned char *rgb_plane, *prev_rgb_plane; unsigned short *rgb16_plane, *prev_rgb16_plane; unsigned int *rgb32_plane, *prev_rgb32_plane; switch (s->avctx->pix_fmt) { case PIX_FMT_PAL8: palette_plane = s->current_frame.data[0]; prev_palette_plane = s->last_frame.data[0]; stride = s->current_frame.linesize[0]; line_inc = stride - width; curframe_index = y * stride + x; curframe_x = x; prevframe_index = (y + motion_y) * stride + x + motion_x; prevframe_x = x + motion_x; while(pixel_count--) { palette_plane[curframe_index++] = prev_palette_plane[prevframe_index++]; ADVANCE_CURFRAME_X(); ADVANCE_PREVFRAME_X(); } break; case PIX_FMT_RGB555: case PIX_FMT_RGB565: rgb16_plane = (unsigned short *)s->current_frame.data[0]; prev_rgb16_plane = (unsigned short *)s->last_frame.data[0]; stride = s->current_frame.linesize[0] / 2; line_inc = stride - width; curframe_index = y * stride + x; curframe_x = x; prevframe_index = (y + motion_y) * stride + x + motion_x; prevframe_x = x + motion_x; while(pixel_count--) { rgb16_plane[curframe_index++] = prev_rgb16_plane[prevframe_index++]; ADVANCE_CURFRAME_X(); ADVANCE_PREVFRAME_X(); } break; case PIX_FMT_RGB24: case PIX_FMT_BGR24: rgb_plane = s->current_frame.data[0]; prev_rgb_plane = s->last_frame.data[0]; stride = s->current_frame.linesize[0]; line_inc = stride - width * 3; curframe_index = y * stride + x * 3; curframe_x = x; prevframe_index = (y + motion_y) * stride + (3 * (x + motion_x)); prevframe_x = x + motion_x; while(pixel_count--) { rgb_plane[curframe_index++] = prev_rgb_plane[prevframe_index++]; rgb_plane[curframe_index++] = prev_rgb_plane[prevframe_index++]; rgb_plane[curframe_index++] = prev_rgb_plane[prevframe_index++]; ADVANCE_CURFRAME_X(); ADVANCE_PREVFRAME_X(); } break; case PIX_FMT_RGBA32: rgb32_plane = (unsigned int *)s->current_frame.data[0]; prev_rgb32_plane = (unsigned int *)s->last_frame.data[0]; stride = s->current_frame.linesize[0] / 4; line_inc = stride - width; curframe_index = y * stride + x; curframe_x = x; prevframe_index = (y + motion_y) * stride + x + motion_x; prevframe_x = x + motion_x; while(pixel_count--) { rgb32_plane[curframe_index++] = prev_rgb32_plane[prevframe_index++]; ADVANCE_CURFRAME_X(); ADVANCE_PREVFRAME_X(); } break; case PIX_FMT_YUV444P: y_plane = s->current_frame.data[0]; u_plane = s->current_frame.data[1]; v_plane = s->current_frame.data[2]; prev_y_plane = s->last_frame.data[0]; prev_u_plane = s->last_frame.data[1]; prev_v_plane = s->last_frame.data[2]; stride = s->current_frame.linesize[0]; line_inc = stride - width; curframe_index = y * stride + x; curframe_x = x; prevframe_index = (y + motion_y) * stride + x + motion_x; prevframe_x = x + motion_x; while(pixel_count--) { y_plane[curframe_index] = prev_y_plane[prevframe_index]; u_plane[curframe_index] = prev_u_plane[prevframe_index]; v_plane[curframe_index] = prev_v_plane[prevframe_index]; curframe_index++; ADVANCE_CURFRAME_X(); prevframe_index++; ADVANCE_PREVFRAME_X(); } break; default: av_log(s->avctx, AV_LOG_ERROR, " Xan WC3: Unhandled colorspace\n"); break; }}static void xan_wc3_decode_frame(XanContext *s) { int width = s->avctx->width; int height = s->avctx->height; int total_pixels = width * height; unsigned char opcode; unsigned char flag = 0; int size = 0; int motion_x, motion_y; int x, y; unsigned char *opcode_buffer = s->buffer1; unsigned char *imagedata_buffer = s->buffer2; /* pointers to segments inside the compressed chunk */ unsigned char *huffman_segment; unsigned char *size_segment; unsigned char *vector_segment; unsigned char *imagedata_segment; huffman_segment = s->buf + LE_16(&s->buf[0]); size_segment = s->buf + LE_16(&s->buf[2]); vector_segment = s->buf + LE_16(&s->buf[4]); imagedata_segment = s->buf + LE_16(&s->buf[6]); xan_huffman_decode(opcode_buffer, huffman_segment); if (imagedata_segment[0] == 2) xan_unpack(imagedata_buffer, &imagedata_segment[1]); else imagedata_buffer = &imagedata_segment[1]; /* use the decoded data segments to build the frame */ x = y = 0; while (total_pixels) { opcode = *opcode_buffer++; size = 0; switch (opcode) { case 0: flag ^= 1; continue; case 1: case 2: case 3: case 4: case 5: case 6: case 7: case 8: size = opcode; break; case 12: case 13: case 14: case 15: case 16: case 17: case 18: size += (opcode - 10); break; case 9: case 19: size = *size_segment++; break; case 10: case 20: size = BE_16(&size_segment[0]); size_segment += 2; break; case 11: case 21: size = (size_segment[0] << 16) | (size_segment[1] << 8) | size_segment[2]; size_segment += 3; break; } if (opcode < 12) { flag ^= 1; if (flag) { /* run of (size) pixels is unchanged from last frame */ xan_wc3_copy_pixel_run(s, x, y, size, 0, 0); } else { /* output a run of pixels from imagedata_buffer */ xan_wc3_output_pixel_run(s, imagedata_buffer, x, y, size); imagedata_buffer += size; } } else { /* run-based motion compensation from last frame */ motion_x = (*vector_segment >> 4) & 0xF; motion_y = *vector_segment & 0xF; vector_segment++; /* sign extension */ if (motion_x & 0x8) motion_x |= 0xFFFFFFF0; if (motion_y & 0x8) motion_y |= 0xFFFFFFF0; /* copy a run of pixels from the previous frame */ xan_wc3_copy_pixel_run(s, x, y, size, motion_x, motion_y); flag = 0; } /* coordinate accounting */ total_pixels -= size; while (size) { if (x + size >= width) { y++; size -= (width - x); x = 0; } else { x += size; size = 0; } } } /* for PAL8, make the palette available on the way out */ if (s->avctx->pix_fmt == PIX_FMT_PAL8) { memcpy(s->current_frame.data[1], s->palette, PALETTE_COUNT * 4); s->current_frame.palette_has_changed = 1; s->avctx->palctrl->palette_changed = 0; }}static void xan_wc4_decode_frame(XanContext *s) {}static int xan_decode_frame(AVCodecContext *avctx, void *data, int *data_size, uint8_t *buf, int buf_size){ XanContext *s = avctx->priv_data; AVPaletteControl *palette_control = avctx->palctrl; int keyframe = 0; if (palette_control->palette_changed) { /* load the new palette and reset the palette control */ xan_wc3_build_palette(s, palette_control->palette); /* If pal8 we clear flag when we copy palette */ if (s->avctx->pix_fmt != PIX_FMT_PAL8) palette_control->palette_changed = 0; keyframe = 1; } if (avctx->get_buffer(avctx, &s->current_frame)) { av_log(s->avctx, AV_LOG_ERROR, " Xan Video: get_buffer() failed\n"); return -1; } s->current_frame.reference = 3; s->buf = buf; s->size = buf_size; if (avctx->codec->id == CODEC_ID_XAN_WC3) xan_wc3_decode_frame(s); else if (avctx->codec->id == CODEC_ID_XAN_WC4) xan_wc4_decode_frame(s); /* release the last frame if it is allocated */ if (s->last_frame.data[0]) avctx->release_buffer(avctx, &s->last_frame); /* shuffle frames */ s->last_frame = s->current_frame; *data_size = sizeof(AVFrame); *(AVFrame*)data = s->current_frame; /* always report that the buffer was completely consumed */ return buf_size;}static int xan_decode_end(AVCodecContext *avctx){ XanContext *s = avctx->priv_data; /* release the last frame */ avctx->release_buffer(avctx, &s->last_frame); av_free(s->buffer1); av_free(s->buffer2); return 0;}AVCodec xan_wc3_decoder = { "xan_wc3", CODEC_TYPE_VIDEO, CODEC_ID_XAN_WC3, sizeof(XanContext), xan_decode_init, NULL, xan_decode_end, xan_decode_frame, CODEC_CAP_DR1,};/*AVCodec xan_wc4_decoder = { "xan_wc4", CODEC_TYPE_VIDEO, CODEC_ID_XAN_WC4, sizeof(XanContext), xan_decode_init, NULL, xan_decode_end, xan_decode_frame, CODEC_CAP_DR1,};*/
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -