Remove existing Skia suppressions
[chromium-blink-merge.git] / media / base / video_frame_unittest.cc
blob405c1e969b3abd6fdabd2a0ef2671758952cfdab
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/video_frame.h"
7 #include "base/bind.h"
8 #include "base/callback_helpers.h"
9 #include "base/format_macros.h"
10 #include "base/memory/aligned_memory.h"
11 #include "base/memory/scoped_ptr.h"
12 #include "base/strings/stringprintf.h"
13 #include "gpu/command_buffer/common/mailbox_holder.h"
14 #include "media/base/buffers.h"
15 #include "media/base/yuv_convert.h"
16 #include "testing/gtest/include/gtest/gtest.h"
18 namespace media {
20 using base::MD5DigestToBase16;
22 // Helper function that initializes a YV12 frame with white and black scan
23 // lines based on the |white_to_black| parameter. If 0, then the entire
24 // frame will be black, if 1 then the entire frame will be white.
25 void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
26 EXPECT_EQ(VideoFrame::YV12, frame->format());
27 const int first_black_row =
28 static_cast<int>(frame->coded_size().height() * white_to_black);
29 uint8* y_plane = frame->data(VideoFrame::kYPlane);
30 for (int row = 0; row < frame->coded_size().height(); ++row) {
31 int color = (row < first_black_row) ? 0xFF : 0x00;
32 memset(y_plane, color, frame->stride(VideoFrame::kYPlane));
33 y_plane += frame->stride(VideoFrame::kYPlane);
35 uint8* u_plane = frame->data(VideoFrame::kUPlane);
36 uint8* v_plane = frame->data(VideoFrame::kVPlane);
37 for (int row = 0; row < frame->coded_size().height(); row += 2) {
38 memset(u_plane, 0x80, frame->stride(VideoFrame::kUPlane));
39 memset(v_plane, 0x80, frame->stride(VideoFrame::kVPlane));
40 u_plane += frame->stride(VideoFrame::kUPlane);
41 v_plane += frame->stride(VideoFrame::kVPlane);
45 // Given a |yv12_frame| this method converts the YV12 frame to RGBA and
46 // makes sure that all the pixels of the RBG frame equal |expect_rgb_color|.
47 void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
48 ASSERT_EQ(VideoFrame::YV12, yv12_frame->format());
49 ASSERT_EQ(yv12_frame->stride(VideoFrame::kUPlane),
50 yv12_frame->stride(VideoFrame::kVPlane));
51 ASSERT_EQ(
52 yv12_frame->coded_size().width() & (VideoFrame::kFrameSizeAlignment - 1),
53 0);
54 ASSERT_EQ(
55 yv12_frame->coded_size().height() & (VideoFrame::kFrameSizeAlignment - 1),
56 0);
58 size_t bytes_per_row = yv12_frame->coded_size().width() * 4u;
59 uint8* rgb_data = reinterpret_cast<uint8*>(
60 base::AlignedAlloc(bytes_per_row * yv12_frame->coded_size().height() +
61 VideoFrame::kFrameSizePadding,
62 VideoFrame::kFrameAddressAlignment));
64 media::ConvertYUVToRGB32(yv12_frame->data(VideoFrame::kYPlane),
65 yv12_frame->data(VideoFrame::kUPlane),
66 yv12_frame->data(VideoFrame::kVPlane),
67 rgb_data,
68 yv12_frame->coded_size().width(),
69 yv12_frame->coded_size().height(),
70 yv12_frame->stride(VideoFrame::kYPlane),
71 yv12_frame->stride(VideoFrame::kUPlane),
72 bytes_per_row,
73 media::YV12);
75 for (int row = 0; row < yv12_frame->coded_size().height(); ++row) {
76 uint32* rgb_row_data = reinterpret_cast<uint32*>(
77 rgb_data + (bytes_per_row * row));
78 for (int col = 0; col < yv12_frame->coded_size().width(); ++col) {
79 SCOPED_TRACE(base::StringPrintf("Checking (%d, %d)", row, col));
80 EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
84 base::AlignedFree(rgb_data);
87 // Fill each plane to its reported extents and verify accessors report non
88 // zero values. Additionally, for the first plane verify the rows and
89 // row_bytes values are correct.
90 void ExpectFrameExtents(VideoFrame::Format format, const char* expected_hash) {
91 const unsigned char kFillByte = 0x80;
92 const int kWidth = 61;
93 const int kHeight = 31;
94 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
96 gfx::Size size(kWidth, kHeight);
97 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
98 format, size, gfx::Rect(size), size, kTimestamp);
99 ASSERT_TRUE(frame.get());
101 int planes = VideoFrame::NumPlanes(format);
102 for (int plane = 0; plane < planes; plane++) {
103 SCOPED_TRACE(base::StringPrintf("Checking plane %d", plane));
104 EXPECT_TRUE(frame->data(plane));
105 EXPECT_TRUE(frame->stride(plane));
106 EXPECT_TRUE(frame->rows(plane));
107 EXPECT_TRUE(frame->row_bytes(plane));
109 memset(frame->data(plane), kFillByte,
110 frame->stride(plane) * frame->rows(plane));
113 base::MD5Context context;
114 base::MD5Init(&context);
115 frame->HashFrameForTesting(&context);
116 base::MD5Digest digest;
117 base::MD5Final(&digest, &context);
118 EXPECT_EQ(MD5DigestToBase16(digest), expected_hash);
121 TEST(VideoFrame, CreateFrame) {
122 const int kWidth = 64;
123 const int kHeight = 48;
124 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
126 // Create a YV12 Video Frame.
127 gfx::Size size(kWidth, kHeight);
128 scoped_refptr<media::VideoFrame> frame =
129 VideoFrame::CreateFrame(media::VideoFrame::YV12, size, gfx::Rect(size),
130 size, kTimestamp);
131 ASSERT_TRUE(frame.get());
133 // Test VideoFrame implementation.
134 EXPECT_EQ(media::VideoFrame::YV12, frame->format());
136 SCOPED_TRACE("");
137 InitializeYV12Frame(frame.get(), 0.0f);
138 ExpectFrameColor(frame.get(), 0xFF000000);
140 base::MD5Digest digest;
141 base::MD5Context context;
142 base::MD5Init(&context);
143 frame->HashFrameForTesting(&context);
144 base::MD5Final(&digest, &context);
145 EXPECT_EQ(MD5DigestToBase16(digest), "9065c841d9fca49186ef8b4ef547e79b");
147 SCOPED_TRACE("");
148 InitializeYV12Frame(frame.get(), 1.0f);
149 ExpectFrameColor(frame.get(), 0xFFFFFFFF);
151 base::MD5Init(&context);
152 frame->HashFrameForTesting(&context);
153 base::MD5Final(&digest, &context);
154 EXPECT_EQ(MD5DigestToBase16(digest), "911991d51438ad2e1a40ed5f6fc7c796");
156 // Test an empty frame.
157 frame = VideoFrame::CreateEOSFrame();
158 EXPECT_TRUE(
159 frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
162 TEST(VideoFrame, CreateBlackFrame) {
163 const int kWidth = 2;
164 const int kHeight = 2;
165 const uint8 kExpectedYRow[] = { 0, 0 };
166 const uint8 kExpectedUVRow[] = { 128 };
168 scoped_refptr<media::VideoFrame> frame =
169 VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
170 ASSERT_TRUE(frame.get());
171 EXPECT_TRUE(frame->IsMappable());
173 // Test basic properties.
174 EXPECT_EQ(0, frame->timestamp().InMicroseconds());
175 EXPECT_FALSE(
176 frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
178 // Test |frame| properties.
179 EXPECT_EQ(VideoFrame::YV12, frame->format());
180 EXPECT_EQ(kWidth, frame->coded_size().width());
181 EXPECT_EQ(kHeight, frame->coded_size().height());
183 // Test frames themselves.
184 uint8* y_plane = frame->data(VideoFrame::kYPlane);
185 for (int y = 0; y < frame->coded_size().height(); ++y) {
186 EXPECT_EQ(0, memcmp(kExpectedYRow, y_plane, arraysize(kExpectedYRow)));
187 y_plane += frame->stride(VideoFrame::kYPlane);
190 uint8* u_plane = frame->data(VideoFrame::kUPlane);
191 uint8* v_plane = frame->data(VideoFrame::kVPlane);
192 for (int y = 0; y < frame->coded_size().height() / 2; ++y) {
193 EXPECT_EQ(0, memcmp(kExpectedUVRow, u_plane, arraysize(kExpectedUVRow)));
194 EXPECT_EQ(0, memcmp(kExpectedUVRow, v_plane, arraysize(kExpectedUVRow)));
195 u_plane += frame->stride(VideoFrame::kUPlane);
196 v_plane += frame->stride(VideoFrame::kVPlane);
200 static void FrameNoLongerNeededCallback(
201 const scoped_refptr<media::VideoFrame>& frame,
202 bool* triggered) {
203 *triggered = true;
206 TEST(VideoFrame, WrapVideoFrame) {
207 const int kWidth = 4;
208 const int kHeight = 4;
209 scoped_refptr<media::VideoFrame> frame;
210 bool done_callback_was_run = false;
212 scoped_refptr<media::VideoFrame> wrapped_frame =
213 VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
214 ASSERT_TRUE(wrapped_frame.get());
216 gfx::Rect visible_rect(1, 1, 1, 1);
217 gfx::Size natural_size = visible_rect.size();
218 frame = media::VideoFrame::WrapVideoFrame(
219 wrapped_frame, visible_rect, natural_size);
220 frame->AddDestructionObserver(
221 base::Bind(&FrameNoLongerNeededCallback, wrapped_frame,
222 &done_callback_was_run));
223 EXPECT_EQ(wrapped_frame->coded_size(), frame->coded_size());
224 EXPECT_EQ(wrapped_frame->data(media::VideoFrame::kYPlane),
225 frame->data(media::VideoFrame::kYPlane));
226 EXPECT_NE(wrapped_frame->visible_rect(), frame->visible_rect());
227 EXPECT_EQ(visible_rect, frame->visible_rect());
228 EXPECT_NE(wrapped_frame->natural_size(), frame->natural_size());
229 EXPECT_EQ(natural_size, frame->natural_size());
232 EXPECT_FALSE(done_callback_was_run);
233 frame = NULL;
234 EXPECT_TRUE(done_callback_was_run);
237 // Ensure each frame is properly sized and allocated. Will trigger OOB reads
238 // and writes as well as incorrect frame hashes otherwise.
239 TEST(VideoFrame, CheckFrameExtents) {
240 // Each call consists of a VideoFrame::Format and the expected hash of all
241 // planes if filled with kFillByte (defined in ExpectFrameExtents).
242 ExpectFrameExtents(VideoFrame::YV12, "8e5d54cb23cd0edca111dd35ffb6ff05");
243 ExpectFrameExtents(VideoFrame::YV16, "cce408a044b212db42a10dfec304b3ef");
246 static void TextureCallback(uint32* called_sync_point,
247 uint32 release_sync_point) {
248 *called_sync_point = release_sync_point;
251 // Verify the gpu::MailboxHolder::ReleaseCallback is called when VideoFrame is
252 // destroyed with the default release sync point.
253 TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) {
254 uint32 called_sync_point = 1;
257 scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture(
258 VideoFrame::ARGB,
259 gpu::MailboxHolder(gpu::Mailbox::Generate(), 5, 0 /* sync_point */),
260 base::Bind(&TextureCallback, &called_sync_point),
261 gfx::Size(10, 10), // coded_size
262 gfx::Rect(10, 10), // visible_rect
263 gfx::Size(10, 10), // natural_size
264 base::TimeDelta()); // timestamp
265 EXPECT_EQ(VideoFrame::ARGB, frame->format());
266 EXPECT_EQ(VideoFrame::STORAGE_OPAQUE, frame->storage_type());
267 EXPECT_TRUE(frame->HasTextures());
269 // Nobody set a sync point to |frame|, so |frame| set |called_sync_point| to 0
270 // as default value.
271 EXPECT_EQ(0u, called_sync_point);
274 namespace {
276 class SyncPointClientImpl : public VideoFrame::SyncPointClient {
277 public:
278 explicit SyncPointClientImpl(uint32 sync_point) : sync_point_(sync_point) {}
279 ~SyncPointClientImpl() override {}
280 uint32 InsertSyncPoint() override { return sync_point_; }
281 void WaitSyncPoint(uint32 sync_point) override {}
283 private:
284 uint32 sync_point_;
287 } // namespace
289 // Verify the gpu::MailboxHolder::ReleaseCallback is called when VideoFrame is
290 // destroyed with the release sync point, which was updated by clients.
291 // (i.e. the compositor, webgl).
292 TEST(VideoFrame,
293 TexturesNoLongerNeededCallbackAfterTakingAndReleasingMailboxes) {
294 const int kPlanesNum = 3;
295 gpu::Mailbox mailbox[kPlanesNum];
296 for (int i = 0; i < kPlanesNum; ++i) {
297 mailbox[i].name[0] = 50 + 1;
300 uint32 sync_point = 7;
301 uint32 target = 9;
302 uint32 release_sync_point = 111;
303 uint32 called_sync_point = 0;
305 scoped_refptr<VideoFrame> frame = VideoFrame::WrapYUV420NativeTextures(
306 gpu::MailboxHolder(mailbox[VideoFrame::kYPlane], target, sync_point),
307 gpu::MailboxHolder(mailbox[VideoFrame::kUPlane], target, sync_point),
308 gpu::MailboxHolder(mailbox[VideoFrame::kVPlane], target, sync_point),
309 base::Bind(&TextureCallback, &called_sync_point),
310 gfx::Size(10, 10), // coded_size
311 gfx::Rect(10, 10), // visible_rect
312 gfx::Size(10, 10), // natural_size
313 base::TimeDelta()); // timestamp
315 EXPECT_EQ(VideoFrame::STORAGE_OPAQUE, frame->storage_type());
316 EXPECT_EQ(VideoFrame::I420, frame->format());
317 EXPECT_EQ(3u, VideoFrame::NumPlanes(frame->format()));
318 EXPECT_TRUE(frame->HasTextures());
319 for (size_t i = 0; i < VideoFrame::NumPlanes(frame->format()); ++i) {
320 const gpu::MailboxHolder& mailbox_holder = frame->mailbox_holder(i);
321 EXPECT_EQ(mailbox[i].name[0], mailbox_holder.mailbox.name[0]);
322 EXPECT_EQ(target, mailbox_holder.texture_target);
323 EXPECT_EQ(sync_point, mailbox_holder.sync_point);
326 SyncPointClientImpl client(release_sync_point);
327 frame->UpdateReleaseSyncPoint(&client);
328 EXPECT_EQ(sync_point,
329 frame->mailbox_holder(VideoFrame::kYPlane).sync_point);
331 EXPECT_EQ(release_sync_point, called_sync_point);
334 TEST(VideoFrame, ZeroInitialized) {
335 const int kWidth = 64;
336 const int kHeight = 48;
337 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
339 gfx::Size size(kWidth, kHeight);
340 scoped_refptr<media::VideoFrame> frame = VideoFrame::CreateFrame(
341 media::VideoFrame::YV12, size, gfx::Rect(size), size, kTimestamp);
343 for (size_t i = 0; i < VideoFrame::NumPlanes(frame->format()); ++i)
344 EXPECT_EQ(0, frame->data(i)[0]);
347 TEST(VideoFrameMetadata, SetAndThenGetAllKeysForAllTypes) {
348 VideoFrameMetadata metadata;
350 for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
351 const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
353 EXPECT_FALSE(metadata.HasKey(key));
354 metadata.SetBoolean(key, true);
355 EXPECT_TRUE(metadata.HasKey(key));
356 bool bool_value = false;
357 EXPECT_TRUE(metadata.GetBoolean(key, &bool_value));
358 EXPECT_EQ(true, bool_value);
359 metadata.Clear();
361 EXPECT_FALSE(metadata.HasKey(key));
362 metadata.SetInteger(key, i);
363 EXPECT_TRUE(metadata.HasKey(key));
364 int int_value = -999;
365 EXPECT_TRUE(metadata.GetInteger(key, &int_value));
366 EXPECT_EQ(i, int_value);
367 metadata.Clear();
369 EXPECT_FALSE(metadata.HasKey(key));
370 metadata.SetDouble(key, 3.14 * i);
371 EXPECT_TRUE(metadata.HasKey(key));
372 double double_value = -999.99;
373 EXPECT_TRUE(metadata.GetDouble(key, &double_value));
374 EXPECT_EQ(3.14 * i, double_value);
375 metadata.Clear();
377 EXPECT_FALSE(metadata.HasKey(key));
378 metadata.SetString(key, base::StringPrintf("\xfe%d\xff", i));
379 EXPECT_TRUE(metadata.HasKey(key));
380 std::string string_value;
381 EXPECT_TRUE(metadata.GetString(key, &string_value));
382 EXPECT_EQ(base::StringPrintf("\xfe%d\xff", i), string_value);
383 metadata.Clear();
385 EXPECT_FALSE(metadata.HasKey(key));
386 metadata.SetTimeDelta(key, base::TimeDelta::FromInternalValue(42 + i));
387 EXPECT_TRUE(metadata.HasKey(key));
388 base::TimeDelta delta_value;
389 EXPECT_TRUE(metadata.GetTimeDelta(key, &delta_value));
390 EXPECT_EQ(base::TimeDelta::FromInternalValue(42 + i), delta_value);
391 metadata.Clear();
393 EXPECT_FALSE(metadata.HasKey(key));
394 metadata.SetTimeTicks(key, base::TimeTicks::FromInternalValue(~(0LL) + i));
395 EXPECT_TRUE(metadata.HasKey(key));
396 base::TimeTicks ticks_value;
397 EXPECT_TRUE(metadata.GetTimeTicks(key, &ticks_value));
398 EXPECT_EQ(base::TimeTicks::FromInternalValue(~(0LL) + i), ticks_value);
399 metadata.Clear();
401 EXPECT_FALSE(metadata.HasKey(key));
402 metadata.SetValue(key, base::Value::CreateNullValue());
403 EXPECT_TRUE(metadata.HasKey(key));
404 const base::Value* const null_value = metadata.GetValue(key);
405 EXPECT_TRUE(null_value);
406 EXPECT_EQ(base::Value::TYPE_NULL, null_value->GetType());
407 metadata.Clear();
411 TEST(VideoFrameMetadata, PassMetadataViaIntermediary) {
412 VideoFrameMetadata expected;
413 for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
414 const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
415 expected.SetInteger(key, i);
418 base::DictionaryValue tmp;
419 expected.MergeInternalValuesInto(&tmp);
420 EXPECT_EQ(static_cast<size_t>(VideoFrameMetadata::NUM_KEYS), tmp.size());
422 VideoFrameMetadata result;
423 result.MergeInternalValuesFrom(tmp);
425 for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
426 const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
427 int value = -1;
428 EXPECT_TRUE(result.GetInteger(key, &value));
429 EXPECT_EQ(i, value);
433 } // namespace media