aarch64: Add assembly support for -fsanitize=hwaddress tagged globals.
[libav.git] / libavcodec / interplayvideo.c
blobf5593d347a2326fc30a9d1a04b7ee37a30f00b28
1 /*
2 * Interplay MVE Video Decoder
3 * Copyright (C) 2003 The FFmpeg project
5 * This file is part of Libav.
7 * Libav is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * Libav is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with Libav; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 /**
23 * @file
24 * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25 * For more information about the Interplay MVE format, visit:
26 * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27 * This code is written in such a way that the identifiers match up
28 * with the encoding descriptions in the document.
30 * This decoder presently only supports a PAL8 output colorspace.
32 * An Interplay video frame consists of 2 parts: The decoding map and
33 * the video data. A demuxer must load these 2 parts together in a single
34 * buffer before sending it through the stream to this decoder.
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
41 #define BITSTREAM_READER_LE
42 #include "avcodec.h"
43 #include "bitstream.h"
44 #include "bytestream.h"
45 #include "hpeldsp.h"
46 #include "internal.h"
48 #define PALETTE_COUNT 256
50 typedef struct IpvideoContext {
52 AVCodecContext *avctx;
53 HpelDSPContext hdsp;
54 AVFrame *second_last_frame;
55 AVFrame *last_frame;
56 const unsigned char *decoding_map;
57 int decoding_map_size;
59 int is_16bpp;
60 GetByteContext stream_ptr, mv_ptr;
61 unsigned char *pixel_ptr;
62 int line_inc;
63 int stride;
64 int upper_motion_limit_offset;
66 uint32_t pal[256];
67 } IpvideoContext;
69 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
71 int current_offset = s->pixel_ptr - dst->data[0];
72 int motion_offset = current_offset + delta_y * dst->linesize[0]
73 + delta_x * (1 + s->is_16bpp);
74 if (motion_offset < 0) {
75 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
76 return AVERROR_INVALIDDATA;
77 } else if (motion_offset > s->upper_motion_limit_offset) {
78 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
79 motion_offset, s->upper_motion_limit_offset);
80 return AVERROR_INVALIDDATA;
82 if (!src->data[0]) {
83 av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
84 return AVERROR(EINVAL);
86 s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
87 dst->linesize[0], 8);
88 return 0;
91 static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
93 return copy_from(s, s->last_frame, frame, 0, 0);
96 static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
98 return copy_from(s, s->second_last_frame, frame, 0, 0);
101 static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
103 unsigned char B;
104 int x, y;
106 /* copy block from 2 frames ago using a motion vector; need 1 more byte */
107 if (!s->is_16bpp) {
108 B = bytestream2_get_byte(&s->stream_ptr);
109 } else {
110 B = bytestream2_get_byte(&s->mv_ptr);
113 if (B < 56) {
114 x = 8 + (B % 7);
115 y = B / 7;
116 } else {
117 x = -14 + ((B - 56) % 29);
118 y = 8 + ((B - 56) / 29);
121 ff_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
122 return copy_from(s, s->second_last_frame, frame, x, y);
125 static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
127 unsigned char B;
128 int x, y;
130 /* copy 8x8 block from current frame from an up/left block */
132 /* need 1 more byte for motion */
133 if (!s->is_16bpp) {
134 B = bytestream2_get_byte(&s->stream_ptr);
135 } else {
136 B = bytestream2_get_byte(&s->mv_ptr);
139 if (B < 56) {
140 x = -(8 + (B % 7));
141 y = -(B / 7);
142 } else {
143 x = -(-14 + ((B - 56) % 29));
144 y = -( 8 + ((B - 56) / 29));
147 ff_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
148 return copy_from(s, frame, frame, x, y);
151 static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
153 int x, y;
154 unsigned char B, BL, BH;
156 /* copy a block from the previous frame; need 1 more byte */
157 if (!s->is_16bpp) {
158 B = bytestream2_get_byte(&s->stream_ptr);
159 } else {
160 B = bytestream2_get_byte(&s->mv_ptr);
163 BL = B & 0x0F;
164 BH = (B >> 4) & 0x0F;
165 x = -8 + BL;
166 y = -8 + BH;
168 ff_dlog(NULL, " motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
169 return copy_from(s, s->last_frame, frame, x, y);
172 static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
174 signed char x, y;
176 /* copy a block from the previous frame using an expanded range;
177 * need 2 more bytes */
178 x = bytestream2_get_byte(&s->stream_ptr);
179 y = bytestream2_get_byte(&s->stream_ptr);
181 ff_dlog(NULL, " motion bytes = %d, %d\n", x, y);
182 return copy_from(s, s->last_frame, frame, x, y);
185 static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
187 /* mystery opcode? skip multiple blocks? */
188 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
190 /* report success */
191 return 0;
194 static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
196 int x, y;
197 unsigned char P[2];
198 unsigned int flags;
200 /* 2-color encoding */
201 P[0] = bytestream2_get_byte(&s->stream_ptr);
202 P[1] = bytestream2_get_byte(&s->stream_ptr);
204 if (P[0] <= P[1]) {
206 /* need 8 more bytes from the stream */
207 for (y = 0; y < 8; y++) {
208 flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
209 for (; flags != 1; flags >>= 1)
210 *s->pixel_ptr++ = P[flags & 1];
211 s->pixel_ptr += s->line_inc;
214 } else {
216 /* need 2 more bytes from the stream */
217 flags = bytestream2_get_le16(&s->stream_ptr);
218 for (y = 0; y < 8; y += 2) {
219 for (x = 0; x < 8; x += 2, flags >>= 1) {
220 s->pixel_ptr[x ] =
221 s->pixel_ptr[x + 1 ] =
222 s->pixel_ptr[x + s->stride] =
223 s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
225 s->pixel_ptr += s->stride * 2;
229 /* report success */
230 return 0;
233 static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
235 int x, y;
236 unsigned char P[4];
237 unsigned int flags = 0;
239 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
240 * either top and bottom or left and right halves */
241 P[0] = bytestream2_get_byte(&s->stream_ptr);
242 P[1] = bytestream2_get_byte(&s->stream_ptr);
244 if (P[0] <= P[1]) {
245 for (y = 0; y < 16; y++) {
246 // new values for each 4x4 block
247 if (!(y & 3)) {
248 if (y) {
249 P[0] = bytestream2_get_byte(&s->stream_ptr);
250 P[1] = bytestream2_get_byte(&s->stream_ptr);
252 flags = bytestream2_get_le16(&s->stream_ptr);
255 for (x = 0; x < 4; x++, flags >>= 1)
256 *s->pixel_ptr++ = P[flags & 1];
257 s->pixel_ptr += s->stride - 4;
258 // switch to right half
259 if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
262 } else {
263 flags = bytestream2_get_le32(&s->stream_ptr);
264 P[2] = bytestream2_get_byte(&s->stream_ptr);
265 P[3] = bytestream2_get_byte(&s->stream_ptr);
267 if (P[2] <= P[3]) {
269 /* vertical split; left & right halves are 2-color encoded */
271 for (y = 0; y < 16; y++) {
272 for (x = 0; x < 4; x++, flags >>= 1)
273 *s->pixel_ptr++ = P[flags & 1];
274 s->pixel_ptr += s->stride - 4;
275 // switch to right half
276 if (y == 7) {
277 s->pixel_ptr -= 8 * s->stride - 4;
278 P[0] = P[2];
279 P[1] = P[3];
280 flags = bytestream2_get_le32(&s->stream_ptr);
284 } else {
286 /* horizontal split; top & bottom halves are 2-color encoded */
288 for (y = 0; y < 8; y++) {
289 if (y == 4) {
290 P[0] = P[2];
291 P[1] = P[3];
292 flags = bytestream2_get_le32(&s->stream_ptr);
295 for (x = 0; x < 8; x++, flags >>= 1)
296 *s->pixel_ptr++ = P[flags & 1];
297 s->pixel_ptr += s->line_inc;
302 /* report success */
303 return 0;
306 static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
308 int x, y;
309 unsigned char P[4];
311 /* 4-color encoding */
312 bytestream2_get_buffer(&s->stream_ptr, P, 4);
314 if (P[0] <= P[1]) {
315 if (P[2] <= P[3]) {
317 /* 1 of 4 colors for each pixel, need 16 more bytes */
318 for (y = 0; y < 8; y++) {
319 /* get the next set of 8 2-bit flags */
320 int flags = bytestream2_get_le16(&s->stream_ptr);
321 for (x = 0; x < 8; x++, flags >>= 2)
322 *s->pixel_ptr++ = P[flags & 0x03];
323 s->pixel_ptr += s->line_inc;
326 } else {
327 uint32_t flags;
329 /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
330 flags = bytestream2_get_le32(&s->stream_ptr);
332 for (y = 0; y < 8; y += 2) {
333 for (x = 0; x < 8; x += 2, flags >>= 2) {
334 s->pixel_ptr[x ] =
335 s->pixel_ptr[x + 1 ] =
336 s->pixel_ptr[x + s->stride] =
337 s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
339 s->pixel_ptr += s->stride * 2;
343 } else {
344 uint64_t flags;
346 /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
347 flags = bytestream2_get_le64(&s->stream_ptr);
348 if (P[2] <= P[3]) {
349 for (y = 0; y < 8; y++) {
350 for (x = 0; x < 8; x += 2, flags >>= 2) {
351 s->pixel_ptr[x ] =
352 s->pixel_ptr[x + 1] = P[flags & 0x03];
354 s->pixel_ptr += s->stride;
356 } else {
357 for (y = 0; y < 8; y += 2) {
358 for (x = 0; x < 8; x++, flags >>= 2) {
359 s->pixel_ptr[x ] =
360 s->pixel_ptr[x + s->stride] = P[flags & 0x03];
362 s->pixel_ptr += s->stride * 2;
367 /* report success */
368 return 0;
371 static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
373 int x, y;
374 unsigned char P[8];
375 int flags = 0;
377 bytestream2_get_buffer(&s->stream_ptr, P, 4);
379 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
380 * either top and bottom or left and right halves */
381 if (P[0] <= P[1]) {
383 /* 4-color encoding for each quadrant; need 32 bytes */
384 for (y = 0; y < 16; y++) {
385 // new values for each 4x4 block
386 if (!(y & 3)) {
387 if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
388 flags = bytestream2_get_le32(&s->stream_ptr);
391 for (x = 0; x < 4; x++, flags >>= 2)
392 *s->pixel_ptr++ = P[flags & 0x03];
394 s->pixel_ptr += s->stride - 4;
395 // switch to right half
396 if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
399 } else {
400 // vertical split?
401 int vert;
402 uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
404 bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
405 vert = P[4] <= P[5];
407 /* 4-color encoding for either left and right or top and bottom
408 * halves */
410 for (y = 0; y < 16; y++) {
411 for (x = 0; x < 4; x++, flags >>= 2)
412 *s->pixel_ptr++ = P[flags & 0x03];
414 if (vert) {
415 s->pixel_ptr += s->stride - 4;
416 // switch to right half
417 if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
418 } else if (y & 1) s->pixel_ptr += s->line_inc;
420 // load values for second half
421 if (y == 7) {
422 memcpy(P, P + 4, 4);
423 flags = bytestream2_get_le64(&s->stream_ptr);
428 /* report success */
429 return 0;
432 static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
434 int y;
436 /* 64-color encoding (each pixel in block is a different color) */
437 for (y = 0; y < 8; y++) {
438 bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
439 s->pixel_ptr += s->stride;
442 /* report success */
443 return 0;
446 static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
448 int x, y;
450 /* 16-color block encoding: each 2x2 block is a different color */
451 for (y = 0; y < 8; y += 2) {
452 for (x = 0; x < 8; x += 2) {
453 s->pixel_ptr[x ] =
454 s->pixel_ptr[x + 1 ] =
455 s->pixel_ptr[x + s->stride] =
456 s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
458 s->pixel_ptr += s->stride * 2;
461 /* report success */
462 return 0;
465 static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
467 int y;
468 unsigned char P[2];
470 /* 4-color block encoding: each 4x4 block is a different color */
471 for (y = 0; y < 8; y++) {
472 if (!(y & 3)) {
473 P[0] = bytestream2_get_byte(&s->stream_ptr);
474 P[1] = bytestream2_get_byte(&s->stream_ptr);
476 memset(s->pixel_ptr, P[0], 4);
477 memset(s->pixel_ptr + 4, P[1], 4);
478 s->pixel_ptr += s->stride;
481 /* report success */
482 return 0;
485 static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
487 int y;
488 unsigned char pix;
490 /* 1-color encoding: the whole block is 1 solid color */
491 pix = bytestream2_get_byte(&s->stream_ptr);
493 for (y = 0; y < 8; y++) {
494 memset(s->pixel_ptr, pix, 8);
495 s->pixel_ptr += s->stride;
498 /* report success */
499 return 0;
502 static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
504 int x, y;
505 unsigned char sample[2];
507 /* dithered encoding */
508 sample[0] = bytestream2_get_byte(&s->stream_ptr);
509 sample[1] = bytestream2_get_byte(&s->stream_ptr);
511 for (y = 0; y < 8; y++) {
512 for (x = 0; x < 8; x += 2) {
513 *s->pixel_ptr++ = sample[ y & 1 ];
514 *s->pixel_ptr++ = sample[!(y & 1)];
516 s->pixel_ptr += s->line_inc;
519 /* report success */
520 return 0;
523 static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
525 signed char x, y;
527 /* copy a block from the second last frame using an expanded range */
528 x = bytestream2_get_byte(&s->stream_ptr);
529 y = bytestream2_get_byte(&s->stream_ptr);
531 ff_dlog(NULL, " motion bytes = %d, %d\n", x, y);
532 return copy_from(s, s->second_last_frame, frame, x, y);
535 static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
537 int x, y;
538 uint16_t P[2];
539 unsigned int flags;
540 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
542 /* 2-color encoding */
543 P[0] = bytestream2_get_le16(&s->stream_ptr);
544 P[1] = bytestream2_get_le16(&s->stream_ptr);
546 if (!(P[0] & 0x8000)) {
548 for (y = 0; y < 8; y++) {
549 flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
550 for (; flags != 1; flags >>= 1)
551 *pixel_ptr++ = P[flags & 1];
552 pixel_ptr += s->line_inc;
555 } else {
557 flags = bytestream2_get_le16(&s->stream_ptr);
558 for (y = 0; y < 8; y += 2) {
559 for (x = 0; x < 8; x += 2, flags >>= 1) {
560 pixel_ptr[x ] =
561 pixel_ptr[x + 1 ] =
562 pixel_ptr[x + s->stride] =
563 pixel_ptr[x + 1 + s->stride] = P[flags & 1];
565 pixel_ptr += s->stride * 2;
569 return 0;
572 static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
574 int x, y;
575 uint16_t P[4];
576 unsigned int flags = 0;
577 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
579 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
580 * either top and bottom or left and right halves */
581 P[0] = bytestream2_get_le16(&s->stream_ptr);
582 P[1] = bytestream2_get_le16(&s->stream_ptr);
584 if (!(P[0] & 0x8000)) {
586 for (y = 0; y < 16; y++) {
587 // new values for each 4x4 block
588 if (!(y & 3)) {
589 if (y) {
590 P[0] = bytestream2_get_le16(&s->stream_ptr);
591 P[1] = bytestream2_get_le16(&s->stream_ptr);
593 flags = bytestream2_get_le16(&s->stream_ptr);
596 for (x = 0; x < 4; x++, flags >>= 1)
597 *pixel_ptr++ = P[flags & 1];
598 pixel_ptr += s->stride - 4;
599 // switch to right half
600 if (y == 7) pixel_ptr -= 8 * s->stride - 4;
603 } else {
605 flags = bytestream2_get_le32(&s->stream_ptr);
606 P[2] = bytestream2_get_le16(&s->stream_ptr);
607 P[3] = bytestream2_get_le16(&s->stream_ptr);
609 if (!(P[2] & 0x8000)) {
611 /* vertical split; left & right halves are 2-color encoded */
613 for (y = 0; y < 16; y++) {
614 for (x = 0; x < 4; x++, flags >>= 1)
615 *pixel_ptr++ = P[flags & 1];
616 pixel_ptr += s->stride - 4;
617 // switch to right half
618 if (y == 7) {
619 pixel_ptr -= 8 * s->stride - 4;
620 P[0] = P[2];
621 P[1] = P[3];
622 flags = bytestream2_get_le32(&s->stream_ptr);
626 } else {
628 /* horizontal split; top & bottom halves are 2-color encoded */
630 for (y = 0; y < 8; y++) {
631 if (y == 4) {
632 P[0] = P[2];
633 P[1] = P[3];
634 flags = bytestream2_get_le32(&s->stream_ptr);
637 for (x = 0; x < 8; x++, flags >>= 1)
638 *pixel_ptr++ = P[flags & 1];
639 pixel_ptr += s->line_inc;
644 /* report success */
645 return 0;
648 static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
650 int x, y;
651 uint16_t P[4];
652 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
654 /* 4-color encoding */
655 for (x = 0; x < 4; x++)
656 P[x] = bytestream2_get_le16(&s->stream_ptr);
658 if (!(P[0] & 0x8000)) {
659 if (!(P[2] & 0x8000)) {
661 /* 1 of 4 colors for each pixel */
662 for (y = 0; y < 8; y++) {
663 /* get the next set of 8 2-bit flags */
664 int flags = bytestream2_get_le16(&s->stream_ptr);
665 for (x = 0; x < 8; x++, flags >>= 2)
666 *pixel_ptr++ = P[flags & 0x03];
667 pixel_ptr += s->line_inc;
670 } else {
671 uint32_t flags;
673 /* 1 of 4 colors for each 2x2 block */
674 flags = bytestream2_get_le32(&s->stream_ptr);
676 for (y = 0; y < 8; y += 2) {
677 for (x = 0; x < 8; x += 2, flags >>= 2) {
678 pixel_ptr[x ] =
679 pixel_ptr[x + 1 ] =
680 pixel_ptr[x + s->stride] =
681 pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
683 pixel_ptr += s->stride * 2;
687 } else {
688 uint64_t flags;
690 /* 1 of 4 colors for each 2x1 or 1x2 block */
691 flags = bytestream2_get_le64(&s->stream_ptr);
692 if (!(P[2] & 0x8000)) {
693 for (y = 0; y < 8; y++) {
694 for (x = 0; x < 8; x += 2, flags >>= 2) {
695 pixel_ptr[x ] =
696 pixel_ptr[x + 1] = P[flags & 0x03];
698 pixel_ptr += s->stride;
700 } else {
701 for (y = 0; y < 8; y += 2) {
702 for (x = 0; x < 8; x++, flags >>= 2) {
703 pixel_ptr[x ] =
704 pixel_ptr[x + s->stride] = P[flags & 0x03];
706 pixel_ptr += s->stride * 2;
711 /* report success */
712 return 0;
715 static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
717 int x, y;
718 uint16_t P[8];
719 int flags = 0;
720 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
722 for (x = 0; x < 4; x++)
723 P[x] = bytestream2_get_le16(&s->stream_ptr);
725 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
726 * either top and bottom or left and right halves */
727 if (!(P[0] & 0x8000)) {
729 /* 4-color encoding for each quadrant */
730 for (y = 0; y < 16; y++) {
731 // new values for each 4x4 block
732 if (!(y & 3)) {
733 if (y)
734 for (x = 0; x < 4; x++)
735 P[x] = bytestream2_get_le16(&s->stream_ptr);
736 flags = bytestream2_get_le32(&s->stream_ptr);
739 for (x = 0; x < 4; x++, flags >>= 2)
740 *pixel_ptr++ = P[flags & 0x03];
742 pixel_ptr += s->stride - 4;
743 // switch to right half
744 if (y == 7) pixel_ptr -= 8 * s->stride - 4;
747 } else {
748 // vertical split?
749 int vert;
750 uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
752 for (x = 4; x < 8; x++)
753 P[x] = bytestream2_get_le16(&s->stream_ptr);
754 vert = !(P[4] & 0x8000);
756 /* 4-color encoding for either left and right or top and bottom
757 * halves */
759 for (y = 0; y < 16; y++) {
760 for (x = 0; x < 4; x++, flags >>= 2)
761 *pixel_ptr++ = P[flags & 0x03];
763 if (vert) {
764 pixel_ptr += s->stride - 4;
765 // switch to right half
766 if (y == 7) pixel_ptr -= 8 * s->stride - 4;
767 } else if (y & 1) pixel_ptr += s->line_inc;
769 // load values for second half
770 if (y == 7) {
771 memcpy(P, P + 4, 8);
772 flags = bytestream2_get_le64(&s->stream_ptr);
777 /* report success */
778 return 0;
781 static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
783 int x, y;
784 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
786 /* 64-color encoding (each pixel in block is a different color) */
787 for (y = 0; y < 8; y++) {
788 for (x = 0; x < 8; x++)
789 pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
790 pixel_ptr += s->stride;
793 /* report success */
794 return 0;
797 static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
799 int x, y;
800 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
802 /* 16-color block encoding: each 2x2 block is a different color */
803 for (y = 0; y < 8; y += 2) {
804 for (x = 0; x < 8; x += 2) {
805 pixel_ptr[x ] =
806 pixel_ptr[x + 1 ] =
807 pixel_ptr[x + s->stride] =
808 pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
810 pixel_ptr += s->stride * 2;
813 /* report success */
814 return 0;
817 static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
819 int x, y;
820 uint16_t P[2];
821 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
823 /* 4-color block encoding: each 4x4 block is a different color */
824 for (y = 0; y < 8; y++) {
825 if (!(y & 3)) {
826 P[0] = bytestream2_get_le16(&s->stream_ptr);
827 P[1] = bytestream2_get_le16(&s->stream_ptr);
829 for (x = 0; x < 8; x++)
830 pixel_ptr[x] = P[x >> 2];
831 pixel_ptr += s->stride;
834 /* report success */
835 return 0;
838 static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
840 int x, y;
841 uint16_t pix;
842 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
844 /* 1-color encoding: the whole block is 1 solid color */
845 pix = bytestream2_get_le16(&s->stream_ptr);
847 for (y = 0; y < 8; y++) {
848 for (x = 0; x < 8; x++)
849 pixel_ptr[x] = pix;
850 pixel_ptr += s->stride;
853 /* report success */
854 return 0;
857 static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
858 ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
859 ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
860 ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
861 ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
862 ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
863 ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
864 ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
865 ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
868 static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
869 ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
870 ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
871 ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
872 ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
873 ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
874 ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
875 ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
876 ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
879 static void ipvideo_decode_opcodes(IpvideoContext *s, AVFrame *frame)
881 int x, y;
882 unsigned char opcode;
883 int ret;
884 BitstreamContext bc;
886 bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
887 if (!s->is_16bpp) {
888 /* this is PAL8, so make the palette available */
889 memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
891 s->stride = frame->linesize[0];
892 } else {
893 s->stride = frame->linesize[0] >> 1;
894 s->mv_ptr = s->stream_ptr;
895 bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
897 s->line_inc = s->stride - 8;
898 s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
899 + (s->avctx->width - 8) * (1 + s->is_16bpp);
901 bitstream_init8(&bc, s->decoding_map, s->decoding_map_size);
902 for (y = 0; y < s->avctx->height; y += 8) {
903 for (x = 0; x < s->avctx->width; x += 8) {
904 opcode = bitstream_read(&bc, 4);
906 ff_dlog(s->avctx,
907 " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
908 x, y, opcode, bytestream2_tell(&s->stream_ptr));
910 if (!s->is_16bpp) {
911 s->pixel_ptr = frame->data[0] + x
912 + y*frame->linesize[0];
913 ret = ipvideo_decode_block[opcode](s, frame);
914 } else {
915 s->pixel_ptr = frame->data[0] + x*2
916 + y*frame->linesize[0];
917 ret = ipvideo_decode_block16[opcode](s, frame);
919 if (ret != 0) {
920 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
921 s->avctx->frame_number, x, y);
922 return;
926 if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
927 av_log(s->avctx, AV_LOG_ERROR,
928 "Interplay video: decode finished with %d bytes left over\n",
929 bytestream2_get_bytes_left(&s->stream_ptr));
933 static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
935 IpvideoContext *s = avctx->priv_data;
937 s->avctx = avctx;
939 s->is_16bpp = avctx->bits_per_coded_sample == 16;
940 avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
942 ff_hpeldsp_init(&s->hdsp, avctx->flags);
944 s->last_frame = av_frame_alloc();
945 s->second_last_frame = av_frame_alloc();
946 if (!s->last_frame || !s->second_last_frame) {
947 av_frame_free(&s->last_frame);
948 av_frame_free(&s->second_last_frame);
949 return AVERROR(ENOMEM);
952 return 0;
955 static int ipvideo_decode_frame(AVCodecContext *avctx,
956 void *data, int *got_frame,
957 AVPacket *avpkt)
959 const uint8_t *buf = avpkt->data;
960 int buf_size = avpkt->size;
961 IpvideoContext *s = avctx->priv_data;
962 AVFrame *frame = data;
963 int ret;
965 /* decoding map contains 4 bits of information per 8x8 block */
966 s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
968 /* compressed buffer needs to be large enough to at least hold an entire
969 * decoding map */
970 if (buf_size < s->decoding_map_size)
971 return buf_size;
973 s->decoding_map = buf;
974 bytestream2_init(&s->stream_ptr, buf + s->decoding_map_size,
975 buf_size - s->decoding_map_size);
977 if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0) {
978 av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
979 return ret;
982 if (!s->is_16bpp) {
983 const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
984 if (pal) {
985 frame->palette_has_changed = 1;
986 memcpy(s->pal, pal, AVPALETTE_SIZE);
990 ipvideo_decode_opcodes(s, frame);
992 *got_frame = 1;
994 /* shuffle frames */
995 av_frame_unref(s->second_last_frame);
996 FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
997 if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
998 return ret;
1000 /* report that the buffer was completely consumed */
1001 return buf_size;
1004 static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
1006 IpvideoContext *s = avctx->priv_data;
1008 av_frame_free(&s->last_frame);
1009 av_frame_free(&s->second_last_frame);
1011 return 0;
1014 AVCodec ff_interplay_video_decoder = {
1015 .name = "interplayvideo",
1016 .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1017 .type = AVMEDIA_TYPE_VIDEO,
1018 .id = AV_CODEC_ID_INTERPLAY_VIDEO,
1019 .priv_data_size = sizeof(IpvideoContext),
1020 .init = ipvideo_decode_init,
1021 .close = ipvideo_decode_end,
1022 .decode = ipvideo_decode_frame,
1023 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,