Implement dynamic software fallback in PPB_VideoDecoder API.
[chromium-blink-merge.git] / media / base / video_frame_unittest.cc
blob879be7dddc3244e1970de5c0c36e593e344f5b9d
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/video_frame.h"
7 #include "base/bind.h"
8 #include "base/callback_helpers.h"
9 #include "base/format_macros.h"
10 #include "base/memory/aligned_memory.h"
11 #include "base/memory/scoped_ptr.h"
12 #include "base/strings/stringprintf.h"
13 #include "gpu/command_buffer/common/mailbox_holder.h"
14 #include "media/base/yuv_convert.h"
15 #include "testing/gtest/include/gtest/gtest.h"
17 namespace media {
19 using base::MD5DigestToBase16;
21 // Helper function that initializes a YV12 frame with white and black scan
22 // lines based on the |white_to_black| parameter. If 0, then the entire
23 // frame will be black, if 1 then the entire frame will be white.
24 void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
25 EXPECT_EQ(PIXEL_FORMAT_YV12, frame->format());
26 const int first_black_row =
27 static_cast<int>(frame->coded_size().height() * white_to_black);
28 uint8* y_plane = frame->data(VideoFrame::kYPlane);
29 for (int row = 0; row < frame->coded_size().height(); ++row) {
30 int color = (row < first_black_row) ? 0xFF : 0x00;
31 memset(y_plane, color, frame->stride(VideoFrame::kYPlane));
32 y_plane += frame->stride(VideoFrame::kYPlane);
34 uint8* u_plane = frame->data(VideoFrame::kUPlane);
35 uint8* v_plane = frame->data(VideoFrame::kVPlane);
36 for (int row = 0; row < frame->coded_size().height(); row += 2) {
37 memset(u_plane, 0x80, frame->stride(VideoFrame::kUPlane));
38 memset(v_plane, 0x80, frame->stride(VideoFrame::kVPlane));
39 u_plane += frame->stride(VideoFrame::kUPlane);
40 v_plane += frame->stride(VideoFrame::kVPlane);
44 // Given a |yv12_frame| this method converts the YV12 frame to RGBA and
45 // makes sure that all the pixels of the RBG frame equal |expect_rgb_color|.
46 void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
47 ASSERT_EQ(PIXEL_FORMAT_YV12, yv12_frame->format());
48 ASSERT_EQ(yv12_frame->stride(VideoFrame::kUPlane),
49 yv12_frame->stride(VideoFrame::kVPlane));
50 ASSERT_EQ(
51 yv12_frame->coded_size().width() & (VideoFrame::kFrameSizeAlignment - 1),
52 0);
53 ASSERT_EQ(
54 yv12_frame->coded_size().height() & (VideoFrame::kFrameSizeAlignment - 1),
55 0);
57 size_t bytes_per_row = yv12_frame->coded_size().width() * 4u;
58 uint8* rgb_data = reinterpret_cast<uint8*>(
59 base::AlignedAlloc(bytes_per_row * yv12_frame->coded_size().height() +
60 VideoFrame::kFrameSizePadding,
61 VideoFrame::kFrameAddressAlignment));
63 media::ConvertYUVToRGB32(yv12_frame->data(VideoFrame::kYPlane),
64 yv12_frame->data(VideoFrame::kUPlane),
65 yv12_frame->data(VideoFrame::kVPlane),
66 rgb_data,
67 yv12_frame->coded_size().width(),
68 yv12_frame->coded_size().height(),
69 yv12_frame->stride(VideoFrame::kYPlane),
70 yv12_frame->stride(VideoFrame::kUPlane),
71 bytes_per_row,
72 media::YV12);
74 for (int row = 0; row < yv12_frame->coded_size().height(); ++row) {
75 uint32* rgb_row_data = reinterpret_cast<uint32*>(
76 rgb_data + (bytes_per_row * row));
77 for (int col = 0; col < yv12_frame->coded_size().width(); ++col) {
78 SCOPED_TRACE(base::StringPrintf("Checking (%d, %d)", row, col));
79 EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
83 base::AlignedFree(rgb_data);
86 // Fill each plane to its reported extents and verify accessors report non
87 // zero values. Additionally, for the first plane verify the rows and
88 // row_bytes values are correct.
89 void ExpectFrameExtents(VideoPixelFormat format, const char* expected_hash) {
90 const unsigned char kFillByte = 0x80;
91 const int kWidth = 61;
92 const int kHeight = 31;
93 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
95 gfx::Size size(kWidth, kHeight);
96 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
97 format, size, gfx::Rect(size), size, kTimestamp);
98 ASSERT_TRUE(frame.get());
100 int planes = VideoFrame::NumPlanes(format);
101 for (int plane = 0; plane < planes; plane++) {
102 SCOPED_TRACE(base::StringPrintf("Checking plane %d", plane));
103 EXPECT_TRUE(frame->data(plane));
104 EXPECT_TRUE(frame->stride(plane));
105 EXPECT_TRUE(frame->rows(plane));
106 EXPECT_TRUE(frame->row_bytes(plane));
108 memset(frame->data(plane), kFillByte,
109 frame->stride(plane) * frame->rows(plane));
112 base::MD5Context context;
113 base::MD5Init(&context);
114 VideoFrame::HashFrameForTesting(&context, frame);
115 base::MD5Digest digest;
116 base::MD5Final(&digest, &context);
117 EXPECT_EQ(MD5DigestToBase16(digest), expected_hash);
120 TEST(VideoFrame, CreateFrame) {
121 const int kWidth = 64;
122 const int kHeight = 48;
123 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
125 // Create a YV12 Video Frame.
126 gfx::Size size(kWidth, kHeight);
127 scoped_refptr<media::VideoFrame> frame = VideoFrame::CreateFrame(
128 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, kTimestamp);
129 ASSERT_TRUE(frame.get());
131 // Test VideoFrame implementation.
132 EXPECT_EQ(media::PIXEL_FORMAT_YV12, frame->format());
134 SCOPED_TRACE("");
135 InitializeYV12Frame(frame.get(), 0.0f);
136 ExpectFrameColor(frame.get(), 0xFF000000);
138 base::MD5Digest digest;
139 base::MD5Context context;
140 base::MD5Init(&context);
141 VideoFrame::HashFrameForTesting(&context, frame);
142 base::MD5Final(&digest, &context);
143 EXPECT_EQ(MD5DigestToBase16(digest), "9065c841d9fca49186ef8b4ef547e79b");
145 SCOPED_TRACE("");
146 InitializeYV12Frame(frame.get(), 1.0f);
147 ExpectFrameColor(frame.get(), 0xFFFFFFFF);
149 base::MD5Init(&context);
150 VideoFrame::HashFrameForTesting(&context, frame);
151 base::MD5Final(&digest, &context);
152 EXPECT_EQ(MD5DigestToBase16(digest), "911991d51438ad2e1a40ed5f6fc7c796");
154 // Test an empty frame.
155 frame = VideoFrame::CreateEOSFrame();
156 EXPECT_TRUE(
157 frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
160 TEST(VideoFrame, CreateZeroInitializedFrame) {
161 const int kWidth = 2;
162 const int kHeight = 2;
163 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
165 // Create a YV12 Video Frame.
166 gfx::Size size(kWidth, kHeight);
167 scoped_refptr<media::VideoFrame> frame =
168 VideoFrame::CreateZeroInitializedFrame(media::PIXEL_FORMAT_YV12, size,
169 gfx::Rect(size), size, kTimestamp);
170 ASSERT_TRUE(frame.get());
171 EXPECT_TRUE(frame->IsMappable());
173 // Verify that frame is initialized with zeros.
174 // TODO(emircan): Check all the contents when we know the exact size of the
175 // allocated buffer.
176 for (size_t i = 0; i < VideoFrame::NumPlanes(frame->format()); ++i)
177 EXPECT_EQ(0, frame->data(i)[0]);
180 TEST(VideoFrame, CreateBlackFrame) {
181 const int kWidth = 2;
182 const int kHeight = 2;
183 const uint8 kExpectedYRow[] = { 0, 0 };
184 const uint8 kExpectedUVRow[] = { 128 };
186 scoped_refptr<media::VideoFrame> frame =
187 VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
188 ASSERT_TRUE(frame.get());
189 EXPECT_TRUE(frame->IsMappable());
191 // Test basic properties.
192 EXPECT_EQ(0, frame->timestamp().InMicroseconds());
193 EXPECT_FALSE(
194 frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
196 // Test |frame| properties.
197 EXPECT_EQ(PIXEL_FORMAT_YV12, frame->format());
198 EXPECT_EQ(kWidth, frame->coded_size().width());
199 EXPECT_EQ(kHeight, frame->coded_size().height());
201 // Test frames themselves.
202 uint8* y_plane = frame->data(VideoFrame::kYPlane);
203 for (int y = 0; y < frame->coded_size().height(); ++y) {
204 EXPECT_EQ(0, memcmp(kExpectedYRow, y_plane, arraysize(kExpectedYRow)));
205 y_plane += frame->stride(VideoFrame::kYPlane);
208 uint8* u_plane = frame->data(VideoFrame::kUPlane);
209 uint8* v_plane = frame->data(VideoFrame::kVPlane);
210 for (int y = 0; y < frame->coded_size().height() / 2; ++y) {
211 EXPECT_EQ(0, memcmp(kExpectedUVRow, u_plane, arraysize(kExpectedUVRow)));
212 EXPECT_EQ(0, memcmp(kExpectedUVRow, v_plane, arraysize(kExpectedUVRow)));
213 u_plane += frame->stride(VideoFrame::kUPlane);
214 v_plane += frame->stride(VideoFrame::kVPlane);
218 static void FrameNoLongerNeededCallback(
219 const scoped_refptr<media::VideoFrame>& frame,
220 bool* triggered) {
221 *triggered = true;
224 TEST(VideoFrame, WrapVideoFrame) {
225 const int kWidth = 4;
226 const int kHeight = 4;
227 scoped_refptr<media::VideoFrame> frame;
228 bool done_callback_was_run = false;
230 scoped_refptr<media::VideoFrame> wrapped_frame =
231 VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
232 ASSERT_TRUE(wrapped_frame.get());
234 gfx::Rect visible_rect(1, 1, 1, 1);
235 gfx::Size natural_size = visible_rect.size();
236 frame = media::VideoFrame::WrapVideoFrame(
237 wrapped_frame, visible_rect, natural_size);
238 frame->AddDestructionObserver(
239 base::Bind(&FrameNoLongerNeededCallback, wrapped_frame,
240 &done_callback_was_run));
241 EXPECT_EQ(wrapped_frame->coded_size(), frame->coded_size());
242 EXPECT_EQ(wrapped_frame->data(media::VideoFrame::kYPlane),
243 frame->data(media::VideoFrame::kYPlane));
244 EXPECT_NE(wrapped_frame->visible_rect(), frame->visible_rect());
245 EXPECT_EQ(visible_rect, frame->visible_rect());
246 EXPECT_NE(wrapped_frame->natural_size(), frame->natural_size());
247 EXPECT_EQ(natural_size, frame->natural_size());
250 EXPECT_FALSE(done_callback_was_run);
251 frame = NULL;
252 EXPECT_TRUE(done_callback_was_run);
255 // Ensure each frame is properly sized and allocated. Will trigger OOB reads
256 // and writes as well as incorrect frame hashes otherwise.
257 TEST(VideoFrame, CheckFrameExtents) {
258 // Each call consists of a Format and the expected hash of all
259 // planes if filled with kFillByte (defined in ExpectFrameExtents).
260 ExpectFrameExtents(PIXEL_FORMAT_YV12, "8e5d54cb23cd0edca111dd35ffb6ff05");
261 ExpectFrameExtents(PIXEL_FORMAT_YV16, "cce408a044b212db42a10dfec304b3ef");
264 static void TextureCallback(uint32* called_sync_point,
265 uint32 release_sync_point) {
266 *called_sync_point = release_sync_point;
269 // Verify the gpu::MailboxHolder::ReleaseCallback is called when VideoFrame is
270 // destroyed with the default release sync point.
271 TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) {
272 uint32 called_sync_point = 1;
275 scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture(
276 PIXEL_FORMAT_ARGB,
277 gpu::MailboxHolder(gpu::Mailbox::Generate(), 5, 0 /* sync_point */),
278 base::Bind(&TextureCallback, &called_sync_point),
279 gfx::Size(10, 10), // coded_size
280 gfx::Rect(10, 10), // visible_rect
281 gfx::Size(10, 10), // natural_size
282 base::TimeDelta()); // timestamp
283 EXPECT_EQ(PIXEL_FORMAT_ARGB, frame->format());
284 EXPECT_EQ(VideoFrame::STORAGE_OPAQUE, frame->storage_type());
285 EXPECT_TRUE(frame->HasTextures());
287 // Nobody set a sync point to |frame|, so |frame| set |called_sync_point| to 0
288 // as default value.
289 EXPECT_EQ(0u, called_sync_point);
292 namespace {
294 class SyncPointClientImpl : public VideoFrame::SyncPointClient {
295 public:
296 explicit SyncPointClientImpl(uint32 sync_point) : sync_point_(sync_point) {}
297 ~SyncPointClientImpl() override {}
298 uint32 InsertSyncPoint() override { return sync_point_; }
299 void WaitSyncPoint(uint32 sync_point) override {}
301 private:
302 uint32 sync_point_;
305 } // namespace
307 // Verify the gpu::MailboxHolder::ReleaseCallback is called when VideoFrame is
308 // destroyed with the release sync point, which was updated by clients.
309 // (i.e. the compositor, webgl).
310 TEST(VideoFrame,
311 TexturesNoLongerNeededCallbackAfterTakingAndReleasingMailboxes) {
312 const int kPlanesNum = 3;
313 gpu::Mailbox mailbox[kPlanesNum];
314 for (int i = 0; i < kPlanesNum; ++i) {
315 mailbox[i].name[0] = 50 + 1;
318 uint32 sync_point = 7;
319 uint32 target = 9;
320 uint32 release_sync_point = 111;
321 uint32 called_sync_point = 0;
323 scoped_refptr<VideoFrame> frame = VideoFrame::WrapYUV420NativeTextures(
324 gpu::MailboxHolder(mailbox[VideoFrame::kYPlane], target, sync_point),
325 gpu::MailboxHolder(mailbox[VideoFrame::kUPlane], target, sync_point),
326 gpu::MailboxHolder(mailbox[VideoFrame::kVPlane], target, sync_point),
327 base::Bind(&TextureCallback, &called_sync_point),
328 gfx::Size(10, 10), // coded_size
329 gfx::Rect(10, 10), // visible_rect
330 gfx::Size(10, 10), // natural_size
331 base::TimeDelta()); // timestamp
333 EXPECT_EQ(VideoFrame::STORAGE_OPAQUE, frame->storage_type());
334 EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
335 EXPECT_EQ(3u, VideoFrame::NumPlanes(frame->format()));
336 EXPECT_TRUE(frame->HasTextures());
337 for (size_t i = 0; i < VideoFrame::NumPlanes(frame->format()); ++i) {
338 const gpu::MailboxHolder& mailbox_holder = frame->mailbox_holder(i);
339 EXPECT_EQ(mailbox[i].name[0], mailbox_holder.mailbox.name[0]);
340 EXPECT_EQ(target, mailbox_holder.texture_target);
341 EXPECT_EQ(sync_point, mailbox_holder.sync_point);
344 SyncPointClientImpl client(release_sync_point);
345 frame->UpdateReleaseSyncPoint(&client);
346 EXPECT_EQ(sync_point,
347 frame->mailbox_holder(VideoFrame::kYPlane).sync_point);
349 EXPECT_EQ(release_sync_point, called_sync_point);
352 TEST(VideoFrame, IsValidConfig_OddCodedSize) {
353 // Odd sizes are valid for all formats. Odd formats may be internally rounded
354 // in VideoFrame::CreateFrame because VideoFrame owns the allocation and can
355 // pad the requested coded_size to ensure the UV sample boundaries line up
356 // with the Y plane after subsample scaling. See CreateFrame_OddWidth.
357 gfx::Size odd_size(677, 288);
359 // First choosing a format with sub-sampling for UV.
360 EXPECT_TRUE(VideoFrame::IsValidConfig(
361 PIXEL_FORMAT_I420, VideoFrame::STORAGE_OWNED_MEMORY, odd_size,
362 gfx::Rect(odd_size), odd_size));
364 // Next try a format with no sub-sampling for UV.
365 EXPECT_TRUE(VideoFrame::IsValidConfig(
366 PIXEL_FORMAT_YV24, VideoFrame::STORAGE_OWNED_MEMORY, odd_size,
367 gfx::Rect(odd_size), odd_size));
370 TEST(VideoFrame, CreateFrame_OddWidth) {
371 // Odd sizes are non-standard for YUV formats that subsample the UV, but they
372 // do exist in the wild and should be gracefully handled by VideoFrame in
373 // situations where VideoFrame allocates the YUV memory. See discussion in
374 // crrev.com/1240833003
375 const gfx::Size odd_size(677, 288);
376 const base::TimeDelta kTimestamp = base::TimeDelta();
378 // First create a frame that sub-samples UV.
379 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
380 PIXEL_FORMAT_I420, odd_size, gfx::Rect(odd_size), odd_size, kTimestamp);
381 ASSERT_TRUE(frame.get());
382 // I420 aligns UV to every 2 Y pixels. Hence, 677 should be rounded to 678
383 // which is the nearest value such that width % 2 == 0
384 EXPECT_EQ(678, frame->coded_size().width());
386 // Next create a frame that does not sub-sample UV.
387 frame = VideoFrame::CreateFrame(PIXEL_FORMAT_YV24, odd_size,
388 gfx::Rect(odd_size), odd_size, kTimestamp);
389 ASSERT_TRUE(frame.get());
390 // No sub-sampling for YV24 will mean odd width can remain odd since any pixel
391 // in the Y plane has a a corresponding pixel in the UV planes at the same
392 // index.
393 EXPECT_EQ(677, frame->coded_size().width());
396 TEST(VideoFrameMetadata, SetAndThenGetAllKeysForAllTypes) {
397 VideoFrameMetadata metadata;
399 for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
400 const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
402 EXPECT_FALSE(metadata.HasKey(key));
403 metadata.SetBoolean(key, true);
404 EXPECT_TRUE(metadata.HasKey(key));
405 bool bool_value = false;
406 EXPECT_TRUE(metadata.GetBoolean(key, &bool_value));
407 EXPECT_EQ(true, bool_value);
408 metadata.Clear();
410 EXPECT_FALSE(metadata.HasKey(key));
411 metadata.SetInteger(key, i);
412 EXPECT_TRUE(metadata.HasKey(key));
413 int int_value = -999;
414 EXPECT_TRUE(metadata.GetInteger(key, &int_value));
415 EXPECT_EQ(i, int_value);
416 metadata.Clear();
418 EXPECT_FALSE(metadata.HasKey(key));
419 metadata.SetDouble(key, 3.14 * i);
420 EXPECT_TRUE(metadata.HasKey(key));
421 double double_value = -999.99;
422 EXPECT_TRUE(metadata.GetDouble(key, &double_value));
423 EXPECT_EQ(3.14 * i, double_value);
424 metadata.Clear();
426 EXPECT_FALSE(metadata.HasKey(key));
427 metadata.SetString(key, base::StringPrintf("\xfe%d\xff", i));
428 EXPECT_TRUE(metadata.HasKey(key));
429 std::string string_value;
430 EXPECT_TRUE(metadata.GetString(key, &string_value));
431 EXPECT_EQ(base::StringPrintf("\xfe%d\xff", i), string_value);
432 metadata.Clear();
434 EXPECT_FALSE(metadata.HasKey(key));
435 metadata.SetTimeDelta(key, base::TimeDelta::FromInternalValue(42 + i));
436 EXPECT_TRUE(metadata.HasKey(key));
437 base::TimeDelta delta_value;
438 EXPECT_TRUE(metadata.GetTimeDelta(key, &delta_value));
439 EXPECT_EQ(base::TimeDelta::FromInternalValue(42 + i), delta_value);
440 metadata.Clear();
442 EXPECT_FALSE(metadata.HasKey(key));
443 metadata.SetTimeTicks(key, base::TimeTicks::FromInternalValue(~(0LL) + i));
444 EXPECT_TRUE(metadata.HasKey(key));
445 base::TimeTicks ticks_value;
446 EXPECT_TRUE(metadata.GetTimeTicks(key, &ticks_value));
447 EXPECT_EQ(base::TimeTicks::FromInternalValue(~(0LL) + i), ticks_value);
448 metadata.Clear();
450 EXPECT_FALSE(metadata.HasKey(key));
451 metadata.SetValue(key, base::Value::CreateNullValue());
452 EXPECT_TRUE(metadata.HasKey(key));
453 const base::Value* const null_value = metadata.GetValue(key);
454 EXPECT_TRUE(null_value);
455 EXPECT_EQ(base::Value::TYPE_NULL, null_value->GetType());
456 metadata.Clear();
460 TEST(VideoFrameMetadata, PassMetadataViaIntermediary) {
461 VideoFrameMetadata expected;
462 for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
463 const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
464 expected.SetInteger(key, i);
467 base::DictionaryValue tmp;
468 expected.MergeInternalValuesInto(&tmp);
469 EXPECT_EQ(static_cast<size_t>(VideoFrameMetadata::NUM_KEYS), tmp.size());
471 VideoFrameMetadata result;
472 result.MergeInternalValuesFrom(tmp);
474 for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
475 const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
476 int value = -1;
477 EXPECT_TRUE(result.GetInteger(key, &value));
478 EXPECT_EQ(i, value);
482 } // namespace media