Add details (where missing) for histograms and remove a few that are not worth provid...
[chromium-blink-merge.git] / media / base / video_frame_unittest.cc
blob618d68f458f774de68fc2792e3e6693247308eae
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "media/base/video_frame.h"
7 #include "base/bind.h"
8 #include "base/callback_helpers.h"
9 #include "base/format_macros.h"
10 #include "base/memory/aligned_memory.h"
11 #include "base/memory/scoped_ptr.h"
12 #include "base/strings/stringprintf.h"
13 #include "gpu/command_buffer/common/mailbox_holder.h"
14 #include "media/base/buffers.h"
15 #include "media/base/yuv_convert.h"
16 #include "testing/gtest/include/gtest/gtest.h"
18 namespace media {
20 using base::MD5DigestToBase16;
22 // Helper function that initializes a YV12 frame with white and black scan
23 // lines based on the |white_to_black| parameter. If 0, then the entire
24 // frame will be black, if 1 then the entire frame will be white.
25 void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
26 EXPECT_EQ(VideoFrame::YV12, frame->format());
27 int first_black_row = static_cast<int>(frame->coded_size().height() *
28 white_to_black);
29 uint8* y_plane = frame->data(VideoFrame::kYPlane);
30 for (int row = 0; row < frame->coded_size().height(); ++row) {
31 int color = (row < first_black_row) ? 0xFF : 0x00;
32 memset(y_plane, color, frame->stride(VideoFrame::kYPlane));
33 y_plane += frame->stride(VideoFrame::kYPlane);
35 uint8* u_plane = frame->data(VideoFrame::kUPlane);
36 uint8* v_plane = frame->data(VideoFrame::kVPlane);
37 for (int row = 0; row < frame->coded_size().height(); row += 2) {
38 memset(u_plane, 0x80, frame->stride(VideoFrame::kUPlane));
39 memset(v_plane, 0x80, frame->stride(VideoFrame::kVPlane));
40 u_plane += frame->stride(VideoFrame::kUPlane);
41 v_plane += frame->stride(VideoFrame::kVPlane);
45 // Given a |yv12_frame| this method converts the YV12 frame to RGBA and
46 // makes sure that all the pixels of the RBG frame equal |expect_rgb_color|.
47 void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
48 ASSERT_EQ(VideoFrame::YV12, yv12_frame->format());
49 ASSERT_EQ(yv12_frame->stride(VideoFrame::kUPlane),
50 yv12_frame->stride(VideoFrame::kVPlane));
51 ASSERT_EQ(
52 yv12_frame->coded_size().width() & (VideoFrame::kFrameSizeAlignment - 1),
53 0);
54 ASSERT_EQ(
55 yv12_frame->coded_size().height() & (VideoFrame::kFrameSizeAlignment - 1),
56 0);
58 size_t bytes_per_row = yv12_frame->coded_size().width() * 4u;
59 uint8* rgb_data = reinterpret_cast<uint8*>(
60 base::AlignedAlloc(bytes_per_row * yv12_frame->coded_size().height() +
61 VideoFrame::kFrameSizePadding,
62 VideoFrame::kFrameAddressAlignment));
64 media::ConvertYUVToRGB32(yv12_frame->data(VideoFrame::kYPlane),
65 yv12_frame->data(VideoFrame::kUPlane),
66 yv12_frame->data(VideoFrame::kVPlane),
67 rgb_data,
68 yv12_frame->coded_size().width(),
69 yv12_frame->coded_size().height(),
70 yv12_frame->stride(VideoFrame::kYPlane),
71 yv12_frame->stride(VideoFrame::kUPlane),
72 bytes_per_row,
73 media::YV12);
75 for (int row = 0; row < yv12_frame->coded_size().height(); ++row) {
76 uint32* rgb_row_data = reinterpret_cast<uint32*>(
77 rgb_data + (bytes_per_row * row));
78 for (int col = 0; col < yv12_frame->coded_size().width(); ++col) {
79 SCOPED_TRACE(
80 base::StringPrintf("Checking (%d, %d)", row, col));
81 EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
85 base::AlignedFree(rgb_data);
88 // Fill each plane to its reported extents and verify accessors report non
89 // zero values. Additionally, for the first plane verify the rows and
90 // row_bytes values are correct.
91 void ExpectFrameExtents(VideoFrame::Format format, const char* expected_hash) {
92 const unsigned char kFillByte = 0x80;
93 const int kWidth = 61;
94 const int kHeight = 31;
95 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
97 gfx::Size size(kWidth, kHeight);
98 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
99 format, size, gfx::Rect(size), size, kTimestamp);
100 ASSERT_TRUE(frame.get());
102 int planes = VideoFrame::NumPlanes(format);
103 for (int plane = 0; plane < planes; plane++) {
104 SCOPED_TRACE(base::StringPrintf("Checking plane %d", plane));
105 EXPECT_TRUE(frame->data(plane));
106 EXPECT_TRUE(frame->stride(plane));
107 EXPECT_TRUE(frame->rows(plane));
108 EXPECT_TRUE(frame->row_bytes(plane));
110 memset(frame->data(plane), kFillByte,
111 frame->stride(plane) * frame->rows(plane));
114 base::MD5Context context;
115 base::MD5Init(&context);
116 frame->HashFrameForTesting(&context);
117 base::MD5Digest digest;
118 base::MD5Final(&digest, &context);
119 EXPECT_EQ(MD5DigestToBase16(digest), expected_hash);
122 TEST(VideoFrame, CreateFrame) {
123 const int kWidth = 64;
124 const int kHeight = 48;
125 const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
127 // Create a YV12 Video Frame.
128 gfx::Size size(kWidth, kHeight);
129 scoped_refptr<media::VideoFrame> frame =
130 VideoFrame::CreateFrame(media::VideoFrame::YV12, size, gfx::Rect(size),
131 size, kTimestamp);
132 ASSERT_TRUE(frame.get());
134 // Test VideoFrame implementation.
135 EXPECT_EQ(media::VideoFrame::YV12, frame->format());
137 SCOPED_TRACE("");
138 InitializeYV12Frame(frame.get(), 0.0f);
139 ExpectFrameColor(frame.get(), 0xFF000000);
141 base::MD5Digest digest;
142 base::MD5Context context;
143 base::MD5Init(&context);
144 frame->HashFrameForTesting(&context);
145 base::MD5Final(&digest, &context);
146 EXPECT_EQ(MD5DigestToBase16(digest), "9065c841d9fca49186ef8b4ef547e79b");
148 SCOPED_TRACE("");
149 InitializeYV12Frame(frame.get(), 1.0f);
150 ExpectFrameColor(frame.get(), 0xFFFFFFFF);
152 base::MD5Init(&context);
153 frame->HashFrameForTesting(&context);
154 base::MD5Final(&digest, &context);
155 EXPECT_EQ(MD5DigestToBase16(digest), "911991d51438ad2e1a40ed5f6fc7c796");
157 // Test an empty frame.
158 frame = VideoFrame::CreateEOSFrame();
159 EXPECT_TRUE(frame->end_of_stream());
162 TEST(VideoFrame, CreateBlackFrame) {
163 const int kWidth = 2;
164 const int kHeight = 2;
165 const uint8 kExpectedYRow[] = { 0, 0 };
166 const uint8 kExpectedUVRow[] = { 128 };
168 scoped_refptr<media::VideoFrame> frame =
169 VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
170 ASSERT_TRUE(frame.get());
172 // Test basic properties.
173 EXPECT_EQ(0, frame->timestamp().InMicroseconds());
174 EXPECT_FALSE(frame->end_of_stream());
176 // Test |frame| properties.
177 EXPECT_EQ(VideoFrame::YV12, frame->format());
178 EXPECT_EQ(kWidth, frame->coded_size().width());
179 EXPECT_EQ(kHeight, frame->coded_size().height());
181 // Test frames themselves.
182 uint8* y_plane = frame->data(VideoFrame::kYPlane);
183 for (int y = 0; y < frame->coded_size().height(); ++y) {
184 EXPECT_EQ(0, memcmp(kExpectedYRow, y_plane, arraysize(kExpectedYRow)));
185 y_plane += frame->stride(VideoFrame::kYPlane);
188 uint8* u_plane = frame->data(VideoFrame::kUPlane);
189 uint8* v_plane = frame->data(VideoFrame::kVPlane);
190 for (int y = 0; y < frame->coded_size().height() / 2; ++y) {
191 EXPECT_EQ(0, memcmp(kExpectedUVRow, u_plane, arraysize(kExpectedUVRow)));
192 EXPECT_EQ(0, memcmp(kExpectedUVRow, v_plane, arraysize(kExpectedUVRow)));
193 u_plane += frame->stride(VideoFrame::kUPlane);
194 v_plane += frame->stride(VideoFrame::kVPlane);
198 static void FrameNoLongerNeededCallback(
199 const scoped_refptr<media::VideoFrame>& frame,
200 bool* triggered) {
201 *triggered = true;
204 TEST(VideoFrame, WrapVideoFrame) {
205 const int kWidth = 4;
206 const int kHeight = 4;
207 scoped_refptr<media::VideoFrame> frame;
208 bool no_longer_needed_triggered = false;
210 scoped_refptr<media::VideoFrame> wrapped_frame =
211 VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
212 ASSERT_TRUE(wrapped_frame.get());
214 gfx::Rect visible_rect(1, 1, 1, 1);
215 gfx::Size natural_size = visible_rect.size();
216 frame = media::VideoFrame::WrapVideoFrame(
217 wrapped_frame, visible_rect, natural_size,
218 base::Bind(&FrameNoLongerNeededCallback, wrapped_frame,
219 &no_longer_needed_triggered));
220 EXPECT_EQ(wrapped_frame->coded_size(), frame->coded_size());
221 EXPECT_EQ(wrapped_frame->data(media::VideoFrame::kYPlane),
222 frame->data(media::VideoFrame::kYPlane));
223 EXPECT_NE(wrapped_frame->visible_rect(), frame->visible_rect());
224 EXPECT_EQ(visible_rect, frame->visible_rect());
225 EXPECT_NE(wrapped_frame->natural_size(), frame->natural_size());
226 EXPECT_EQ(natural_size, frame->natural_size());
229 EXPECT_FALSE(no_longer_needed_triggered);
230 frame = NULL;
231 EXPECT_TRUE(no_longer_needed_triggered);
234 // Ensure each frame is properly sized and allocated. Will trigger OOB reads
235 // and writes as well as incorrect frame hashes otherwise.
236 TEST(VideoFrame, CheckFrameExtents) {
237 // Each call consists of a VideoFrame::Format and the expected hash of all
238 // planes if filled with kFillByte (defined in ExpectFrameExtents).
239 ExpectFrameExtents(VideoFrame::YV12, "8e5d54cb23cd0edca111dd35ffb6ff05");
240 ExpectFrameExtents(VideoFrame::YV16, "cce408a044b212db42a10dfec304b3ef");
243 static void TextureCallback(std::vector<uint32>* called_sync_point,
244 const std::vector<uint32>& release_sync_points) {
245 called_sync_point->assign(release_sync_points.begin(),
246 release_sync_points.end());
249 // Verify the gpu::MailboxHolder::ReleaseCallback is called when VideoFrame is
250 // destroyed with the default release sync points.
251 TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) {
252 std::vector<uint32> called_sync_points;
253 called_sync_points.push_back(1);
256 scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture(
257 make_scoped_ptr(
258 new gpu::MailboxHolder(gpu::Mailbox(), 5, 0 /* sync_point */)),
259 base::Bind(&TextureCallback, &called_sync_points),
260 gfx::Size(10, 10), // coded_size
261 gfx::Rect(10, 10), // visible_rect
262 gfx::Size(10, 10), // natural_size
263 base::TimeDelta(), // timestamp
264 VideoFrame::ReadPixelsCB()); // read_pixels_cb
266 EXPECT_EQ(1u, called_sync_points.size());
268 EXPECT_TRUE(called_sync_points.empty());
271 // Verify the gpu::MailboxHolder::ReleaseCallback is called when VideoFrame is
272 // destroyed with the release sync points, which was updated by clients.
273 // (i.e. the compositor, webgl).
274 TEST(VideoFrame, TextureNoLongerNeededCallbackAfterTakingAndReleasingMailbox) {
275 std::vector<uint32> called_sync_points;
277 gpu::Mailbox mailbox;
278 mailbox.name[0] = 50;
279 uint32 sync_point = 7;
280 uint32 target = 9;
281 std::vector<uint32> release_sync_points;
282 release_sync_points.push_back(1);
283 release_sync_points.push_back(2);
284 release_sync_points.push_back(3);
287 scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture(
288 make_scoped_ptr(new gpu::MailboxHolder(mailbox, target, sync_point)),
289 base::Bind(&TextureCallback, &called_sync_points),
290 gfx::Size(10, 10), // coded_size
291 gfx::Rect(10, 10), // visible_rect
292 gfx::Size(10, 10), // natural_size
293 base::TimeDelta(), // timestamp
294 VideoFrame::ReadPixelsCB()); // read_pixels_cb
295 EXPECT_TRUE(called_sync_points.empty());
297 const gpu::MailboxHolder* mailbox_holder = frame->mailbox_holder();
299 EXPECT_EQ(mailbox.name[0], mailbox_holder->mailbox.name[0]);
300 EXPECT_EQ(target, mailbox_holder->texture_target);
301 EXPECT_EQ(sync_point, mailbox_holder->sync_point);
303 frame->AppendReleaseSyncPoint(release_sync_points[0]);
304 frame->AppendReleaseSyncPoint(release_sync_points[1]);
305 frame->AppendReleaseSyncPoint(release_sync_points[2]);
306 EXPECT_EQ(sync_point, mailbox_holder->sync_point);
308 EXPECT_EQ(release_sync_points, called_sync_points);
311 } // namespace media