2 * Interplay MVE Video Decoder
3 * Copyright (C) 2003 The FFmpeg project
5 * This file is part of Libav.
7 * Libav is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * Libav is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with Libav; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24 * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25 * For more information about the Interplay MVE format, visit:
26 * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27 * This code is written in such a way that the identifiers match up
28 * with the encoding descriptions in the document.
30 * This decoder presently only supports a PAL8 output colorspace.
32 * An Interplay video frame consists of 2 parts: The decoding map and
33 * the video data. A demuxer must load these 2 parts together in a single
34 * buffer before sending it through the stream to this decoder.
41 #define BITSTREAM_READER_LE
43 #include "bitstream.h"
44 #include "bytestream.h"
48 #define PALETTE_COUNT 256
50 typedef struct IpvideoContext
{
52 AVCodecContext
*avctx
;
54 AVFrame
*second_last_frame
;
56 const unsigned char *decoding_map
;
57 int decoding_map_size
;
60 GetByteContext stream_ptr
, mv_ptr
;
61 unsigned char *pixel_ptr
;
64 int upper_motion_limit_offset
;
69 static int copy_from(IpvideoContext
*s
, AVFrame
*src
, AVFrame
*dst
, int delta_x
, int delta_y
)
71 int current_offset
= s
->pixel_ptr
- dst
->data
[0];
72 int motion_offset
= current_offset
+ delta_y
* dst
->linesize
[0]
73 + delta_x
* (1 + s
->is_16bpp
);
74 if (motion_offset
< 0) {
75 av_log(s
->avctx
, AV_LOG_ERROR
, " Interplay video: motion offset < 0 (%d)\n", motion_offset
);
76 return AVERROR_INVALIDDATA
;
77 } else if (motion_offset
> s
->upper_motion_limit_offset
) {
78 av_log(s
->avctx
, AV_LOG_ERROR
, " Interplay video: motion offset above limit (%d >= %d)\n",
79 motion_offset
, s
->upper_motion_limit_offset
);
80 return AVERROR_INVALIDDATA
;
83 av_log(s
->avctx
, AV_LOG_ERROR
, "Invalid decode type, corrupted header?\n");
84 return AVERROR(EINVAL
);
86 s
->hdsp
.put_pixels_tab
[!s
->is_16bpp
][0](s
->pixel_ptr
, src
->data
[0] + motion_offset
,
91 static int ipvideo_decode_block_opcode_0x0(IpvideoContext
*s
, AVFrame
*frame
)
93 return copy_from(s
, s
->last_frame
, frame
, 0, 0);
96 static int ipvideo_decode_block_opcode_0x1(IpvideoContext
*s
, AVFrame
*frame
)
98 return copy_from(s
, s
->second_last_frame
, frame
, 0, 0);
101 static int ipvideo_decode_block_opcode_0x2(IpvideoContext
*s
, AVFrame
*frame
)
106 /* copy block from 2 frames ago using a motion vector; need 1 more byte */
108 B
= bytestream2_get_byte(&s
->stream_ptr
);
110 B
= bytestream2_get_byte(&s
->mv_ptr
);
117 x
= -14 + ((B
- 56) % 29);
118 y
= 8 + ((B
- 56) / 29);
121 ff_dlog(NULL
, " motion byte = %d, (x, y) = (%d, %d)\n", B
, x
, y
);
122 return copy_from(s
, s
->second_last_frame
, frame
, x
, y
);
125 static int ipvideo_decode_block_opcode_0x3(IpvideoContext
*s
, AVFrame
*frame
)
130 /* copy 8x8 block from current frame from an up/left block */
132 /* need 1 more byte for motion */
134 B
= bytestream2_get_byte(&s
->stream_ptr
);
136 B
= bytestream2_get_byte(&s
->mv_ptr
);
143 x
= -(-14 + ((B
- 56) % 29));
144 y
= -( 8 + ((B
- 56) / 29));
147 ff_dlog(NULL
, " motion byte = %d, (x, y) = (%d, %d)\n", B
, x
, y
);
148 return copy_from(s
, frame
, frame
, x
, y
);
151 static int ipvideo_decode_block_opcode_0x4(IpvideoContext
*s
, AVFrame
*frame
)
154 unsigned char B
, BL
, BH
;
156 /* copy a block from the previous frame; need 1 more byte */
158 B
= bytestream2_get_byte(&s
->stream_ptr
);
160 B
= bytestream2_get_byte(&s
->mv_ptr
);
164 BH
= (B
>> 4) & 0x0F;
168 ff_dlog(NULL
, " motion byte = %d, (x, y) = (%d, %d)\n", B
, x
, y
);
169 return copy_from(s
, s
->last_frame
, frame
, x
, y
);
172 static int ipvideo_decode_block_opcode_0x5(IpvideoContext
*s
, AVFrame
*frame
)
176 /* copy a block from the previous frame using an expanded range;
177 * need 2 more bytes */
178 x
= bytestream2_get_byte(&s
->stream_ptr
);
179 y
= bytestream2_get_byte(&s
->stream_ptr
);
181 ff_dlog(NULL
, " motion bytes = %d, %d\n", x
, y
);
182 return copy_from(s
, s
->last_frame
, frame
, x
, y
);
185 static int ipvideo_decode_block_opcode_0x6(IpvideoContext
*s
, AVFrame
*frame
)
187 /* mystery opcode? skip multiple blocks? */
188 av_log(s
->avctx
, AV_LOG_ERROR
, " Interplay video: Help! Mystery opcode 0x6 seen\n");
194 static int ipvideo_decode_block_opcode_0x7(IpvideoContext
*s
, AVFrame
*frame
)
200 /* 2-color encoding */
201 P
[0] = bytestream2_get_byte(&s
->stream_ptr
);
202 P
[1] = bytestream2_get_byte(&s
->stream_ptr
);
206 /* need 8 more bytes from the stream */
207 for (y
= 0; y
< 8; y
++) {
208 flags
= bytestream2_get_byte(&s
->stream_ptr
) | 0x100;
209 for (; flags
!= 1; flags
>>= 1)
210 *s
->pixel_ptr
++ = P
[flags
& 1];
211 s
->pixel_ptr
+= s
->line_inc
;
216 /* need 2 more bytes from the stream */
217 flags
= bytestream2_get_le16(&s
->stream_ptr
);
218 for (y
= 0; y
< 8; y
+= 2) {
219 for (x
= 0; x
< 8; x
+= 2, flags
>>= 1) {
221 s
->pixel_ptr
[x
+ 1 ] =
222 s
->pixel_ptr
[x
+ s
->stride
] =
223 s
->pixel_ptr
[x
+ 1 + s
->stride
] = P
[flags
& 1];
225 s
->pixel_ptr
+= s
->stride
* 2;
233 static int ipvideo_decode_block_opcode_0x8(IpvideoContext
*s
, AVFrame
*frame
)
237 unsigned int flags
= 0;
239 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
240 * either top and bottom or left and right halves */
241 P
[0] = bytestream2_get_byte(&s
->stream_ptr
);
242 P
[1] = bytestream2_get_byte(&s
->stream_ptr
);
245 for (y
= 0; y
< 16; y
++) {
246 // new values for each 4x4 block
249 P
[0] = bytestream2_get_byte(&s
->stream_ptr
);
250 P
[1] = bytestream2_get_byte(&s
->stream_ptr
);
252 flags
= bytestream2_get_le16(&s
->stream_ptr
);
255 for (x
= 0; x
< 4; x
++, flags
>>= 1)
256 *s
->pixel_ptr
++ = P
[flags
& 1];
257 s
->pixel_ptr
+= s
->stride
- 4;
258 // switch to right half
259 if (y
== 7) s
->pixel_ptr
-= 8 * s
->stride
- 4;
263 flags
= bytestream2_get_le32(&s
->stream_ptr
);
264 P
[2] = bytestream2_get_byte(&s
->stream_ptr
);
265 P
[3] = bytestream2_get_byte(&s
->stream_ptr
);
269 /* vertical split; left & right halves are 2-color encoded */
271 for (y
= 0; y
< 16; y
++) {
272 for (x
= 0; x
< 4; x
++, flags
>>= 1)
273 *s
->pixel_ptr
++ = P
[flags
& 1];
274 s
->pixel_ptr
+= s
->stride
- 4;
275 // switch to right half
277 s
->pixel_ptr
-= 8 * s
->stride
- 4;
280 flags
= bytestream2_get_le32(&s
->stream_ptr
);
286 /* horizontal split; top & bottom halves are 2-color encoded */
288 for (y
= 0; y
< 8; y
++) {
292 flags
= bytestream2_get_le32(&s
->stream_ptr
);
295 for (x
= 0; x
< 8; x
++, flags
>>= 1)
296 *s
->pixel_ptr
++ = P
[flags
& 1];
297 s
->pixel_ptr
+= s
->line_inc
;
306 static int ipvideo_decode_block_opcode_0x9(IpvideoContext
*s
, AVFrame
*frame
)
311 /* 4-color encoding */
312 bytestream2_get_buffer(&s
->stream_ptr
, P
, 4);
317 /* 1 of 4 colors for each pixel, need 16 more bytes */
318 for (y
= 0; y
< 8; y
++) {
319 /* get the next set of 8 2-bit flags */
320 int flags
= bytestream2_get_le16(&s
->stream_ptr
);
321 for (x
= 0; x
< 8; x
++, flags
>>= 2)
322 *s
->pixel_ptr
++ = P
[flags
& 0x03];
323 s
->pixel_ptr
+= s
->line_inc
;
329 /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
330 flags
= bytestream2_get_le32(&s
->stream_ptr
);
332 for (y
= 0; y
< 8; y
+= 2) {
333 for (x
= 0; x
< 8; x
+= 2, flags
>>= 2) {
335 s
->pixel_ptr
[x
+ 1 ] =
336 s
->pixel_ptr
[x
+ s
->stride
] =
337 s
->pixel_ptr
[x
+ 1 + s
->stride
] = P
[flags
& 0x03];
339 s
->pixel_ptr
+= s
->stride
* 2;
346 /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
347 flags
= bytestream2_get_le64(&s
->stream_ptr
);
349 for (y
= 0; y
< 8; y
++) {
350 for (x
= 0; x
< 8; x
+= 2, flags
>>= 2) {
352 s
->pixel_ptr
[x
+ 1] = P
[flags
& 0x03];
354 s
->pixel_ptr
+= s
->stride
;
357 for (y
= 0; y
< 8; y
+= 2) {
358 for (x
= 0; x
< 8; x
++, flags
>>= 2) {
360 s
->pixel_ptr
[x
+ s
->stride
] = P
[flags
& 0x03];
362 s
->pixel_ptr
+= s
->stride
* 2;
371 static int ipvideo_decode_block_opcode_0xA(IpvideoContext
*s
, AVFrame
*frame
)
377 bytestream2_get_buffer(&s
->stream_ptr
, P
, 4);
379 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
380 * either top and bottom or left and right halves */
383 /* 4-color encoding for each quadrant; need 32 bytes */
384 for (y
= 0; y
< 16; y
++) {
385 // new values for each 4x4 block
387 if (y
) bytestream2_get_buffer(&s
->stream_ptr
, P
, 4);
388 flags
= bytestream2_get_le32(&s
->stream_ptr
);
391 for (x
= 0; x
< 4; x
++, flags
>>= 2)
392 *s
->pixel_ptr
++ = P
[flags
& 0x03];
394 s
->pixel_ptr
+= s
->stride
- 4;
395 // switch to right half
396 if (y
== 7) s
->pixel_ptr
-= 8 * s
->stride
- 4;
402 uint64_t flags
= bytestream2_get_le64(&s
->stream_ptr
);
404 bytestream2_get_buffer(&s
->stream_ptr
, P
+ 4, 4);
407 /* 4-color encoding for either left and right or top and bottom
410 for (y
= 0; y
< 16; y
++) {
411 for (x
= 0; x
< 4; x
++, flags
>>= 2)
412 *s
->pixel_ptr
++ = P
[flags
& 0x03];
415 s
->pixel_ptr
+= s
->stride
- 4;
416 // switch to right half
417 if (y
== 7) s
->pixel_ptr
-= 8 * s
->stride
- 4;
418 } else if (y
& 1) s
->pixel_ptr
+= s
->line_inc
;
420 // load values for second half
423 flags
= bytestream2_get_le64(&s
->stream_ptr
);
432 static int ipvideo_decode_block_opcode_0xB(IpvideoContext
*s
, AVFrame
*frame
)
436 /* 64-color encoding (each pixel in block is a different color) */
437 for (y
= 0; y
< 8; y
++) {
438 bytestream2_get_buffer(&s
->stream_ptr
, s
->pixel_ptr
, 8);
439 s
->pixel_ptr
+= s
->stride
;
446 static int ipvideo_decode_block_opcode_0xC(IpvideoContext
*s
, AVFrame
*frame
)
450 /* 16-color block encoding: each 2x2 block is a different color */
451 for (y
= 0; y
< 8; y
+= 2) {
452 for (x
= 0; x
< 8; x
+= 2) {
454 s
->pixel_ptr
[x
+ 1 ] =
455 s
->pixel_ptr
[x
+ s
->stride
] =
456 s
->pixel_ptr
[x
+ 1 + s
->stride
] = bytestream2_get_byte(&s
->stream_ptr
);
458 s
->pixel_ptr
+= s
->stride
* 2;
465 static int ipvideo_decode_block_opcode_0xD(IpvideoContext
*s
, AVFrame
*frame
)
470 /* 4-color block encoding: each 4x4 block is a different color */
471 for (y
= 0; y
< 8; y
++) {
473 P
[0] = bytestream2_get_byte(&s
->stream_ptr
);
474 P
[1] = bytestream2_get_byte(&s
->stream_ptr
);
476 memset(s
->pixel_ptr
, P
[0], 4);
477 memset(s
->pixel_ptr
+ 4, P
[1], 4);
478 s
->pixel_ptr
+= s
->stride
;
485 static int ipvideo_decode_block_opcode_0xE(IpvideoContext
*s
, AVFrame
*frame
)
490 /* 1-color encoding: the whole block is 1 solid color */
491 pix
= bytestream2_get_byte(&s
->stream_ptr
);
493 for (y
= 0; y
< 8; y
++) {
494 memset(s
->pixel_ptr
, pix
, 8);
495 s
->pixel_ptr
+= s
->stride
;
502 static int ipvideo_decode_block_opcode_0xF(IpvideoContext
*s
, AVFrame
*frame
)
505 unsigned char sample
[2];
507 /* dithered encoding */
508 sample
[0] = bytestream2_get_byte(&s
->stream_ptr
);
509 sample
[1] = bytestream2_get_byte(&s
->stream_ptr
);
511 for (y
= 0; y
< 8; y
++) {
512 for (x
= 0; x
< 8; x
+= 2) {
513 *s
->pixel_ptr
++ = sample
[ y
& 1 ];
514 *s
->pixel_ptr
++ = sample
[!(y
& 1)];
516 s
->pixel_ptr
+= s
->line_inc
;
523 static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext
*s
, AVFrame
*frame
)
527 /* copy a block from the second last frame using an expanded range */
528 x
= bytestream2_get_byte(&s
->stream_ptr
);
529 y
= bytestream2_get_byte(&s
->stream_ptr
);
531 ff_dlog(NULL
, " motion bytes = %d, %d\n", x
, y
);
532 return copy_from(s
, s
->second_last_frame
, frame
, x
, y
);
535 static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext
*s
, AVFrame
*frame
)
540 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
542 /* 2-color encoding */
543 P
[0] = bytestream2_get_le16(&s
->stream_ptr
);
544 P
[1] = bytestream2_get_le16(&s
->stream_ptr
);
546 if (!(P
[0] & 0x8000)) {
548 for (y
= 0; y
< 8; y
++) {
549 flags
= bytestream2_get_byte(&s
->stream_ptr
) | 0x100;
550 for (; flags
!= 1; flags
>>= 1)
551 *pixel_ptr
++ = P
[flags
& 1];
552 pixel_ptr
+= s
->line_inc
;
557 flags
= bytestream2_get_le16(&s
->stream_ptr
);
558 for (y
= 0; y
< 8; y
+= 2) {
559 for (x
= 0; x
< 8; x
+= 2, flags
>>= 1) {
562 pixel_ptr
[x
+ s
->stride
] =
563 pixel_ptr
[x
+ 1 + s
->stride
] = P
[flags
& 1];
565 pixel_ptr
+= s
->stride
* 2;
572 static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext
*s
, AVFrame
*frame
)
576 unsigned int flags
= 0;
577 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
579 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
580 * either top and bottom or left and right halves */
581 P
[0] = bytestream2_get_le16(&s
->stream_ptr
);
582 P
[1] = bytestream2_get_le16(&s
->stream_ptr
);
584 if (!(P
[0] & 0x8000)) {
586 for (y
= 0; y
< 16; y
++) {
587 // new values for each 4x4 block
590 P
[0] = bytestream2_get_le16(&s
->stream_ptr
);
591 P
[1] = bytestream2_get_le16(&s
->stream_ptr
);
593 flags
= bytestream2_get_le16(&s
->stream_ptr
);
596 for (x
= 0; x
< 4; x
++, flags
>>= 1)
597 *pixel_ptr
++ = P
[flags
& 1];
598 pixel_ptr
+= s
->stride
- 4;
599 // switch to right half
600 if (y
== 7) pixel_ptr
-= 8 * s
->stride
- 4;
605 flags
= bytestream2_get_le32(&s
->stream_ptr
);
606 P
[2] = bytestream2_get_le16(&s
->stream_ptr
);
607 P
[3] = bytestream2_get_le16(&s
->stream_ptr
);
609 if (!(P
[2] & 0x8000)) {
611 /* vertical split; left & right halves are 2-color encoded */
613 for (y
= 0; y
< 16; y
++) {
614 for (x
= 0; x
< 4; x
++, flags
>>= 1)
615 *pixel_ptr
++ = P
[flags
& 1];
616 pixel_ptr
+= s
->stride
- 4;
617 // switch to right half
619 pixel_ptr
-= 8 * s
->stride
- 4;
622 flags
= bytestream2_get_le32(&s
->stream_ptr
);
628 /* horizontal split; top & bottom halves are 2-color encoded */
630 for (y
= 0; y
< 8; y
++) {
634 flags
= bytestream2_get_le32(&s
->stream_ptr
);
637 for (x
= 0; x
< 8; x
++, flags
>>= 1)
638 *pixel_ptr
++ = P
[flags
& 1];
639 pixel_ptr
+= s
->line_inc
;
648 static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext
*s
, AVFrame
*frame
)
652 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
654 /* 4-color encoding */
655 for (x
= 0; x
< 4; x
++)
656 P
[x
] = bytestream2_get_le16(&s
->stream_ptr
);
658 if (!(P
[0] & 0x8000)) {
659 if (!(P
[2] & 0x8000)) {
661 /* 1 of 4 colors for each pixel */
662 for (y
= 0; y
< 8; y
++) {
663 /* get the next set of 8 2-bit flags */
664 int flags
= bytestream2_get_le16(&s
->stream_ptr
);
665 for (x
= 0; x
< 8; x
++, flags
>>= 2)
666 *pixel_ptr
++ = P
[flags
& 0x03];
667 pixel_ptr
+= s
->line_inc
;
673 /* 1 of 4 colors for each 2x2 block */
674 flags
= bytestream2_get_le32(&s
->stream_ptr
);
676 for (y
= 0; y
< 8; y
+= 2) {
677 for (x
= 0; x
< 8; x
+= 2, flags
>>= 2) {
680 pixel_ptr
[x
+ s
->stride
] =
681 pixel_ptr
[x
+ 1 + s
->stride
] = P
[flags
& 0x03];
683 pixel_ptr
+= s
->stride
* 2;
690 /* 1 of 4 colors for each 2x1 or 1x2 block */
691 flags
= bytestream2_get_le64(&s
->stream_ptr
);
692 if (!(P
[2] & 0x8000)) {
693 for (y
= 0; y
< 8; y
++) {
694 for (x
= 0; x
< 8; x
+= 2, flags
>>= 2) {
696 pixel_ptr
[x
+ 1] = P
[flags
& 0x03];
698 pixel_ptr
+= s
->stride
;
701 for (y
= 0; y
< 8; y
+= 2) {
702 for (x
= 0; x
< 8; x
++, flags
>>= 2) {
704 pixel_ptr
[x
+ s
->stride
] = P
[flags
& 0x03];
706 pixel_ptr
+= s
->stride
* 2;
715 static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext
*s
, AVFrame
*frame
)
720 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
722 for (x
= 0; x
< 4; x
++)
723 P
[x
] = bytestream2_get_le16(&s
->stream_ptr
);
725 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
726 * either top and bottom or left and right halves */
727 if (!(P
[0] & 0x8000)) {
729 /* 4-color encoding for each quadrant */
730 for (y
= 0; y
< 16; y
++) {
731 // new values for each 4x4 block
734 for (x
= 0; x
< 4; x
++)
735 P
[x
] = bytestream2_get_le16(&s
->stream_ptr
);
736 flags
= bytestream2_get_le32(&s
->stream_ptr
);
739 for (x
= 0; x
< 4; x
++, flags
>>= 2)
740 *pixel_ptr
++ = P
[flags
& 0x03];
742 pixel_ptr
+= s
->stride
- 4;
743 // switch to right half
744 if (y
== 7) pixel_ptr
-= 8 * s
->stride
- 4;
750 uint64_t flags
= bytestream2_get_le64(&s
->stream_ptr
);
752 for (x
= 4; x
< 8; x
++)
753 P
[x
] = bytestream2_get_le16(&s
->stream_ptr
);
754 vert
= !(P
[4] & 0x8000);
756 /* 4-color encoding for either left and right or top and bottom
759 for (y
= 0; y
< 16; y
++) {
760 for (x
= 0; x
< 4; x
++, flags
>>= 2)
761 *pixel_ptr
++ = P
[flags
& 0x03];
764 pixel_ptr
+= s
->stride
- 4;
765 // switch to right half
766 if (y
== 7) pixel_ptr
-= 8 * s
->stride
- 4;
767 } else if (y
& 1) pixel_ptr
+= s
->line_inc
;
769 // load values for second half
772 flags
= bytestream2_get_le64(&s
->stream_ptr
);
781 static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext
*s
, AVFrame
*frame
)
784 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
786 /* 64-color encoding (each pixel in block is a different color) */
787 for (y
= 0; y
< 8; y
++) {
788 for (x
= 0; x
< 8; x
++)
789 pixel_ptr
[x
] = bytestream2_get_le16(&s
->stream_ptr
);
790 pixel_ptr
+= s
->stride
;
797 static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext
*s
, AVFrame
*frame
)
800 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
802 /* 16-color block encoding: each 2x2 block is a different color */
803 for (y
= 0; y
< 8; y
+= 2) {
804 for (x
= 0; x
< 8; x
+= 2) {
807 pixel_ptr
[x
+ s
->stride
] =
808 pixel_ptr
[x
+ 1 + s
->stride
] = bytestream2_get_le16(&s
->stream_ptr
);
810 pixel_ptr
+= s
->stride
* 2;
817 static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext
*s
, AVFrame
*frame
)
821 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
823 /* 4-color block encoding: each 4x4 block is a different color */
824 for (y
= 0; y
< 8; y
++) {
826 P
[0] = bytestream2_get_le16(&s
->stream_ptr
);
827 P
[1] = bytestream2_get_le16(&s
->stream_ptr
);
829 for (x
= 0; x
< 8; x
++)
830 pixel_ptr
[x
] = P
[x
>> 2];
831 pixel_ptr
+= s
->stride
;
838 static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext
*s
, AVFrame
*frame
)
842 uint16_t *pixel_ptr
= (uint16_t*)s
->pixel_ptr
;
844 /* 1-color encoding: the whole block is 1 solid color */
845 pix
= bytestream2_get_le16(&s
->stream_ptr
);
847 for (y
= 0; y
< 8; y
++) {
848 for (x
= 0; x
< 8; x
++)
850 pixel_ptr
+= s
->stride
;
857 static int (* const ipvideo_decode_block
[])(IpvideoContext
*s
, AVFrame
*frame
) = {
858 ipvideo_decode_block_opcode_0x0
, ipvideo_decode_block_opcode_0x1
,
859 ipvideo_decode_block_opcode_0x2
, ipvideo_decode_block_opcode_0x3
,
860 ipvideo_decode_block_opcode_0x4
, ipvideo_decode_block_opcode_0x5
,
861 ipvideo_decode_block_opcode_0x6
, ipvideo_decode_block_opcode_0x7
,
862 ipvideo_decode_block_opcode_0x8
, ipvideo_decode_block_opcode_0x9
,
863 ipvideo_decode_block_opcode_0xA
, ipvideo_decode_block_opcode_0xB
,
864 ipvideo_decode_block_opcode_0xC
, ipvideo_decode_block_opcode_0xD
,
865 ipvideo_decode_block_opcode_0xE
, ipvideo_decode_block_opcode_0xF
,
868 static int (* const ipvideo_decode_block16
[])(IpvideoContext
*s
, AVFrame
*frame
) = {
869 ipvideo_decode_block_opcode_0x0
, ipvideo_decode_block_opcode_0x1
,
870 ipvideo_decode_block_opcode_0x2
, ipvideo_decode_block_opcode_0x3
,
871 ipvideo_decode_block_opcode_0x4
, ipvideo_decode_block_opcode_0x5
,
872 ipvideo_decode_block_opcode_0x6_16
, ipvideo_decode_block_opcode_0x7_16
,
873 ipvideo_decode_block_opcode_0x8_16
, ipvideo_decode_block_opcode_0x9_16
,
874 ipvideo_decode_block_opcode_0xA_16
, ipvideo_decode_block_opcode_0xB_16
,
875 ipvideo_decode_block_opcode_0xC_16
, ipvideo_decode_block_opcode_0xD_16
,
876 ipvideo_decode_block_opcode_0xE_16
, ipvideo_decode_block_opcode_0x1
,
879 static void ipvideo_decode_opcodes(IpvideoContext
*s
, AVFrame
*frame
)
882 unsigned char opcode
;
886 bytestream2_skip(&s
->stream_ptr
, 14); /* data starts 14 bytes in */
888 /* this is PAL8, so make the palette available */
889 memcpy(frame
->data
[1], s
->pal
, AVPALETTE_SIZE
);
891 s
->stride
= frame
->linesize
[0];
893 s
->stride
= frame
->linesize
[0] >> 1;
894 s
->mv_ptr
= s
->stream_ptr
;
895 bytestream2_skip(&s
->mv_ptr
, bytestream2_get_le16(&s
->stream_ptr
));
897 s
->line_inc
= s
->stride
- 8;
898 s
->upper_motion_limit_offset
= (s
->avctx
->height
- 8) * frame
->linesize
[0]
899 + (s
->avctx
->width
- 8) * (1 + s
->is_16bpp
);
901 bitstream_init8(&bc
, s
->decoding_map
, s
->decoding_map_size
);
902 for (y
= 0; y
< s
->avctx
->height
; y
+= 8) {
903 for (x
= 0; x
< s
->avctx
->width
; x
+= 8) {
904 opcode
= bitstream_read(&bc
, 4);
907 " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
908 x
, y
, opcode
, bytestream2_tell(&s
->stream_ptr
));
911 s
->pixel_ptr
= frame
->data
[0] + x
912 + y
*frame
->linesize
[0];
913 ret
= ipvideo_decode_block
[opcode
](s
, frame
);
915 s
->pixel_ptr
= frame
->data
[0] + x
*2
916 + y
*frame
->linesize
[0];
917 ret
= ipvideo_decode_block16
[opcode
](s
, frame
);
920 av_log(s
->avctx
, AV_LOG_ERROR
, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
921 s
->avctx
->frame_number
, x
, y
);
926 if (bytestream2_get_bytes_left(&s
->stream_ptr
) > 1) {
927 av_log(s
->avctx
, AV_LOG_ERROR
,
928 "Interplay video: decode finished with %d bytes left over\n",
929 bytestream2_get_bytes_left(&s
->stream_ptr
));
933 static av_cold
int ipvideo_decode_init(AVCodecContext
*avctx
)
935 IpvideoContext
*s
= avctx
->priv_data
;
939 s
->is_16bpp
= avctx
->bits_per_coded_sample
== 16;
940 avctx
->pix_fmt
= s
->is_16bpp
? AV_PIX_FMT_RGB555
: AV_PIX_FMT_PAL8
;
942 ff_hpeldsp_init(&s
->hdsp
, avctx
->flags
);
944 s
->last_frame
= av_frame_alloc();
945 s
->second_last_frame
= av_frame_alloc();
946 if (!s
->last_frame
|| !s
->second_last_frame
) {
947 av_frame_free(&s
->last_frame
);
948 av_frame_free(&s
->second_last_frame
);
949 return AVERROR(ENOMEM
);
955 static int ipvideo_decode_frame(AVCodecContext
*avctx
,
956 void *data
, int *got_frame
,
959 const uint8_t *buf
= avpkt
->data
;
960 int buf_size
= avpkt
->size
;
961 IpvideoContext
*s
= avctx
->priv_data
;
962 AVFrame
*frame
= data
;
965 /* decoding map contains 4 bits of information per 8x8 block */
966 s
->decoding_map_size
= avctx
->width
* avctx
->height
/ (8 * 8 * 2);
968 /* compressed buffer needs to be large enough to at least hold an entire
970 if (buf_size
< s
->decoding_map_size
)
973 s
->decoding_map
= buf
;
974 bytestream2_init(&s
->stream_ptr
, buf
+ s
->decoding_map_size
,
975 buf_size
- s
->decoding_map_size
);
977 if ((ret
= ff_get_buffer(avctx
, frame
, AV_GET_BUFFER_FLAG_REF
)) < 0) {
978 av_log(avctx
, AV_LOG_ERROR
, " Interplay Video: get_buffer() failed\n");
983 const uint8_t *pal
= av_packet_get_side_data(avpkt
, AV_PKT_DATA_PALETTE
, NULL
);
985 frame
->palette_has_changed
= 1;
986 memcpy(s
->pal
, pal
, AVPALETTE_SIZE
);
990 ipvideo_decode_opcodes(s
, frame
);
995 av_frame_unref(s
->second_last_frame
);
996 FFSWAP(AVFrame
*, s
->second_last_frame
, s
->last_frame
);
997 if ((ret
= av_frame_ref(s
->last_frame
, frame
)) < 0)
1000 /* report that the buffer was completely consumed */
1004 static av_cold
int ipvideo_decode_end(AVCodecContext
*avctx
)
1006 IpvideoContext
*s
= avctx
->priv_data
;
1008 av_frame_free(&s
->last_frame
);
1009 av_frame_free(&s
->second_last_frame
);
1014 AVCodec ff_interplay_video_decoder
= {
1015 .name
= "interplayvideo",
1016 .long_name
= NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1017 .type
= AVMEDIA_TYPE_VIDEO
,
1018 .id
= AV_CODEC_ID_INTERPLAY_VIDEO
,
1019 .priv_data_size
= sizeof(IpvideoContext
),
1020 .init
= ipvideo_decode_init
,
1021 .close
= ipvideo_decode_end
,
1022 .decode
= ipvideo_decode_frame
,
1023 .capabilities
= AV_CODEC_CAP_DR1
| AV_CODEC_CAP_PARAM_CHANGE
,