Use multiline attribute to check for IA2_STATE_MULTILINE.
[chromium-blink-merge.git] / content / browser / renderer_host / media / video_capture_texture_wrapper.cc
blobed4adcaa48ef9141154f10c3e3bb2d7cc5ae1efa
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/browser/renderer_host/media/video_capture_texture_wrapper.h"
7 #include "base/bind.h"
8 #include "content/browser/compositor/image_transport_factory.h"
9 #include "content/browser/gpu/browser_gpu_channel_host_factory.h"
10 #include "content/browser/gpu/browser_gpu_memory_buffer_manager.h"
11 #include "content/browser/gpu/gpu_data_manager_impl.h"
12 #include "content/browser/renderer_host/media/video_capture_controller.h"
13 #include "content/common/gpu/client/context_provider_command_buffer.h"
14 #include "content/common/gpu/client/gl_helper.h"
15 #include "content/common/gpu/client/gpu_channel_host.h"
16 #include "content/common/gpu/client/webgraphicscontext3d_command_buffer_impl.h"
17 #include "content/common/gpu/gpu_process_launch_causes.h"
18 #include "content/public/browser/browser_thread.h"
19 #include "gpu/command_buffer/common/mailbox_holder.h"
20 #include "media/base/bind_to_current_loop.h"
21 #include "media/base/video_capture_types.h"
22 #include "media/base/video_frame.h"
23 #include "third_party/khronos/GLES2/gl2ext.h"
24 #include "third_party/libyuv/include/libyuv.h"
26 namespace content {
28 namespace {
30 // VideoCaptureController has at most 3 capture frames in flight.
31 const size_t kNumGpuMemoryBuffers = 3;
33 uint32 VideoPixelFormatToFourCC(media::VideoPixelFormat pixel_format) {
34 switch (pixel_format) {
35 // I420 is needed by Fake and FileVideoCaptureDevice
36 case media::PIXEL_FORMAT_I420:
37 return libyuv::FOURCC_I420;
38 case media::PIXEL_FORMAT_UYVY:
39 return libyuv::FOURCC_UYVY;
40 case media::PIXEL_FORMAT_YUY2:
41 return libyuv::FOURCC_YUY2;
42 case media::PIXEL_FORMAT_MJPEG:
43 return libyuv::FOURCC_MJPG;
44 default:
45 NOTREACHED() << "Bad captured pixel format: "
46 << media::VideoCaptureFormat::PixelFormatToString(pixel_format);
48 return libyuv::FOURCC_ANY;
51 // Modelled after GpuProcessTransportFactory::CreateContextCommon().
52 scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl> CreateContextCommon(
53 scoped_refptr<content::GpuChannelHost> gpu_channel_host,
54 int surface_id) {
55 if (!content::GpuDataManagerImpl::GetInstance()->
56 CanUseGpuBrowserCompositor()) {
57 DLOG(ERROR) << "No accelerated graphics found. Check chrome://gpu";
58 return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>();
60 blink::WebGraphicsContext3D::Attributes attrs;
61 attrs.shareResources = true;
62 attrs.depth = false;
63 attrs.stencil = false;
64 attrs.antialias = false;
65 attrs.noAutomaticFlushes = true;
67 if (!gpu_channel_host.get()) {
68 DLOG(ERROR) << "Failed to establish GPU channel.";
69 return scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>();
71 GURL url("chrome://gpu/GpuProcessTransportFactory::CreateCaptureContext");
72 return make_scoped_ptr(
73 new WebGraphicsContext3DCommandBufferImpl(
74 surface_id,
75 url,
76 gpu_channel_host.get(),
77 attrs,
78 true /* lose_context_when_out_of_memory */,
79 content::WebGraphicsContext3DCommandBufferImpl::SharedMemoryLimits(),
80 NULL));
83 // Modelled after
84 // GpuProcessTransportFactory::CreateOffscreenCommandBufferContext().
85 scoped_ptr<content::WebGraphicsContext3DCommandBufferImpl>
86 CreateOffscreenCommandBufferContext() {
87 content::CauseForGpuLaunch cause = content::CAUSE_FOR_GPU_LAUNCH_CANVAS_2D;
88 scoped_refptr<content::GpuChannelHost> gpu_channel_host(
89 content::BrowserGpuChannelHostFactory::instance()->
90 EstablishGpuChannelSync(cause));
91 DCHECK(gpu_channel_host);
92 return CreateContextCommon(gpu_channel_host, 0);
95 typedef base::Callback<void(scoped_refptr<ContextProviderCommandBuffer>)>
96 ProcessContextCallback;
98 void CreateContextOnUIThread(ProcessContextCallback bottom_half) {
99 DCHECK_CURRENTLY_ON(BrowserThread::UI);
100 bottom_half.Run(ContextProviderCommandBuffer::Create(
101 CreateOffscreenCommandBufferContext(), "Offscreen-CaptureThread"));
102 return;
105 void ResetLostContextCallback(
106 const scoped_refptr<ContextProviderCommandBuffer>& capture_thread_context) {
107 capture_thread_context->SetLostContextCallback(
108 cc::ContextProvider::LostContextCallback());
111 } // anonymous namespace
113 // Internal ref-counted class to manage a pool of GpuMemoryBuffers. The contents
114 // of an incoming captured frame are_copied_ into the first available buffer
115 // from the pool and sent to our client ultimately wrapped into a VideoFrame.
116 // This VideoFrame creation is balanced by a waiting on the associated
117 // |sync_point|. After VideoFrame consumption the inserted ReleaseCallback()
118 // will be called, where the GpuMemoryBuffer is recycled.
120 // This class jumps between threads due to GPU-related thread limitations, i.e.
121 // some objects cannot be accessed from IO Thread, where we are constructed,
122 // others need to be constructed on UI Thread. For this reason most of the
123 // operations are carried out on Capture Thread (|capture_task_runner_|).
125 // TODO(mcasas): ctor |frame_format| is used for early GpuMemoryBuffer pool
126 // allocation, but VideoCaptureDevices might provide a different resolution when
127 // calling OnIncomingCapturedData(), be that due to driver preferences or to
128 // its ResolutionChangePolicy. Make the GpuMemoryBuffer allocated on demand.
129 class VideoCaptureTextureWrapper::TextureWrapperDelegate final
130 : public base::RefCountedThreadSafe<TextureWrapperDelegate> {
131 public:
132 TextureWrapperDelegate(
133 const base::WeakPtr<VideoCaptureController>& controller,
134 const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner,
135 const media::VideoCaptureFormat& capture_format);
137 // Copy-converts the incoming data into a GpuMemoruBuffer backed Texture, and
138 // sends it to |controller_| wrapped in a VideoFrame, with |buffer| as storage
139 // backend.
140 void OnIncomingCapturedData(
141 const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>&
142 texture_buffer,
143 const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>&
144 argb_buffer,
145 const gfx::Size& frame_size,
146 const base::TimeTicks& timestamp);
148 private:
149 friend class base::RefCountedThreadSafe<TextureWrapperDelegate>;
150 ~TextureWrapperDelegate();
152 // Creates some necessary members in |capture_task_runner_|.
153 void Init(const media::VideoCaptureFormat& capture_format);
154 // Runs the bottom half of the GlHelper creation.
155 void CreateGlHelper(
156 scoped_refptr<ContextProviderCommandBuffer> capture_thread_context);
158 // Recycles |memory_buffer|, deletes Image and Texture on VideoFrame release.
159 void ReleaseCallback(GLuint image_id,
160 GLuint texture_id,
161 linked_ptr<gfx::GpuMemoryBuffer> memory_buffer,
162 uint32 sync_point);
164 // The Command Buffer lost the GL context, f.i. GPU process crashed. Signal
165 // error to our owner so the capture can be torn down.
166 void LostContextCallback();
168 // Prints the error |message| and notifies |controller_| of an error.
169 void OnError(const std::string& message);
171 const base::WeakPtr<VideoCaptureController> controller_;
172 const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_;
174 // Command buffer reference, needs to be destroyed when unused. It is created
175 // on UI Thread and bound to Capture Thread. In particular, it cannot be used
176 // from IO Thread.
177 scoped_refptr<ContextProviderCommandBuffer> capture_thread_context_;
178 // Created and used from Capture Thread. Cannot be used from IO Thread.
179 scoped_ptr<GLHelper> gl_helper_;
181 // A pool of GpuMemoryBuffers that are used to wrap incoming captured frames;
182 // recycled via ReleaseCallback().
183 std::queue<linked_ptr<gfx::GpuMemoryBuffer>> gpu_memory_buffers_;
185 DISALLOW_COPY_AND_ASSIGN(TextureWrapperDelegate);
188 VideoCaptureTextureWrapper::VideoCaptureTextureWrapper(
189 const base::WeakPtr<VideoCaptureController>& controller,
190 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool,
191 const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner,
192 const media::VideoCaptureFormat& capture_format)
193 : VideoCaptureDeviceClient(controller, buffer_pool),
194 wrapper_delegate_(new TextureWrapperDelegate(controller,
195 capture_task_runner,
196 capture_format)),
197 capture_task_runner_(capture_task_runner) {
198 DCHECK_CURRENTLY_ON(BrowserThread::IO);
201 VideoCaptureTextureWrapper::~VideoCaptureTextureWrapper() {
204 void VideoCaptureTextureWrapper::OnIncomingCapturedData(
205 const uint8* data,
206 int length,
207 const media::VideoCaptureFormat& frame_format,
208 int clockwise_rotation,
209 const base::TimeTicks& timestamp) {
211 // Reserve a temporary Buffer for conversion to ARGB.
212 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> argb_buffer =
213 ReserveOutputBuffer(media::VideoFrame::ARGB, frame_format.frame_size);
214 DVLOG_IF(1, !argb_buffer) << "Couldn't allocate ARGB Buffer";
215 if (!argb_buffer)
216 return;
218 DCHECK(argb_buffer->data());
219 // TODO(mcasas): Take |rotation| into acount.
220 int ret = libyuv::ConvertToARGB(data,
221 length,
222 reinterpret_cast<uint8*>(argb_buffer->data()),
223 frame_format.frame_size.width() * 4,
226 frame_format.frame_size.width(),
227 frame_format.frame_size.height(),
228 frame_format.frame_size.width(),
229 frame_format.frame_size.height(),
230 libyuv::kRotate0,
231 VideoPixelFormatToFourCC(
232 frame_format.pixel_format));
233 DLOG_IF(ERROR, ret != 0) << "Error converting incoming frame";
234 if (ret != 0)
235 return;
237 // Reserve output buffer for the texture on the IPC borderlands.
238 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> texture_buffer =
239 ReserveOutputBuffer(media::VideoFrame::NATIVE_TEXTURE, gfx::Size());
240 DVLOG_IF(1, !texture_buffer) << "Couldn't allocate Texture Buffer";
241 if (!texture_buffer)
242 return;
244 capture_task_runner_->PostTask(
245 FROM_HERE,
246 base::Bind(
247 &TextureWrapperDelegate::OnIncomingCapturedData,
248 wrapper_delegate_,
249 texture_buffer,
250 argb_buffer,
251 frame_format.frame_size,
252 timestamp));
255 VideoCaptureTextureWrapper::TextureWrapperDelegate::TextureWrapperDelegate(
256 const base::WeakPtr<VideoCaptureController>& controller,
257 const scoped_refptr<base::SingleThreadTaskRunner>& capture_task_runner,
258 const media::VideoCaptureFormat& capture_format)
259 : controller_(controller),
260 capture_task_runner_(capture_task_runner) {
261 DCHECK_CURRENTLY_ON(BrowserThread::IO);
262 capture_task_runner_->PostTask(FROM_HERE,
263 base::Bind(&TextureWrapperDelegate::Init, this, capture_format));
266 void VideoCaptureTextureWrapper::TextureWrapperDelegate::OnIncomingCapturedData(
267 const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>&
268 texture_buffer,
269 const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& argb_buffer,
270 const gfx::Size& frame_size,
271 const base::TimeTicks& timestamp) {
272 DCHECK(capture_task_runner_->BelongsToCurrentThread());
274 DVLOG_IF(1, !gl_helper_) << " Skipping ingress frame, no GL context.";
275 if (!gl_helper_)
276 return;
278 DVLOG_IF(1, gpu_memory_buffers_.empty()) << " Skipping ingress frame, 0 GMBs";
279 if (gpu_memory_buffers_.empty())
280 return;
282 linked_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer =
283 gpu_memory_buffers_.front();
284 gpu_memory_buffers_.pop();
285 DCHECK(gpu_memory_buffer.get());
287 void* data = NULL;
288 bool rv = gpu_memory_buffer->Map(&data);
289 DCHECK(rv);
290 uint32 stride;
291 gpu_memory_buffer->GetStride(&stride);
293 uint8* mapped_buffer = static_cast<uint8*>(data);
294 DCHECK(mapped_buffer);
295 libyuv::ARGBCopy(
296 reinterpret_cast<uint8*>(argb_buffer->data()), frame_size.width() * 4,
297 mapped_buffer, stride,
298 frame_size.width(), frame_size.height());
299 gpu_memory_buffer->Unmap();
301 gpu::gles2::GLES2Interface* gl = capture_thread_context_->ContextGL();
302 GLuint image_id = gl->CreateImageCHROMIUM(gpu_memory_buffer->AsClientBuffer(),
303 frame_size.width(),
304 frame_size.height(), GL_BGRA_EXT);
305 DCHECK(image_id);
307 GLuint texture_id = gl_helper_->CreateTexture();
308 DCHECK(texture_id);
310 content::ScopedTextureBinder<GL_TEXTURE_2D> texture_binder(gl, texture_id);
311 gl->BindTexImage2DCHROMIUM(GL_TEXTURE_2D, image_id);
314 scoped_ptr<gpu::MailboxHolder> mailbox_holder(new gpu::MailboxHolder(
315 gl_helper_->ProduceMailboxHolderFromTexture(texture_id)));
316 DCHECK(!mailbox_holder->mailbox.IsZero());
317 DCHECK(mailbox_holder->mailbox.Verify());
318 DCHECK(mailbox_holder->texture_target);
319 DCHECK(mailbox_holder->sync_point);
321 scoped_refptr<media::VideoFrame> video_frame =
322 media::VideoFrame::WrapNativeTexture(
323 mailbox_holder.Pass(),
324 media::BindToCurrentLoop(
325 base::Bind(&VideoCaptureTextureWrapper::TextureWrapperDelegate::
326 ReleaseCallback,
327 this, image_id, texture_id, gpu_memory_buffer)),
328 frame_size,
329 gfx::Rect(frame_size),
330 frame_size,
331 base::TimeDelta(),
332 true /* allow_overlay */);
334 BrowserThread::PostTask(
335 BrowserThread::IO, FROM_HERE,
336 base::Bind(
337 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
338 controller_, texture_buffer, video_frame, timestamp));
341 VideoCaptureTextureWrapper::TextureWrapperDelegate::~TextureWrapperDelegate() {
342 // Might not be running on capture_task_runner_'s thread. Ensure owned objects
343 // are destroyed on the correct threads.
344 while (!gpu_memory_buffers_.empty()) {
345 capture_task_runner_->DeleteSoon(FROM_HERE,
346 gpu_memory_buffers_.front().release());
347 gpu_memory_buffers_.pop();
349 if (gl_helper_)
350 capture_task_runner_->DeleteSoon(FROM_HERE, gl_helper_.release());
352 if (capture_thread_context_) {
353 capture_task_runner_->PostTask(
354 FROM_HERE,
355 base::Bind(&ResetLostContextCallback, capture_thread_context_));
356 capture_thread_context_->AddRef();
357 ContextProviderCommandBuffer* raw_capture_thread_context =
358 capture_thread_context_.get();
359 capture_thread_context_ = nullptr;
360 capture_task_runner_->ReleaseSoon(FROM_HERE, raw_capture_thread_context);
364 void VideoCaptureTextureWrapper::TextureWrapperDelegate::Init(
365 const media::VideoCaptureFormat& capture_format) {
366 DCHECK(capture_task_runner_->BelongsToCurrentThread());
368 // BrowserGpuMemoryBufferManager::current() may not be accessed on IO Thread.
369 // TODO(mcasas): At this point |frame_format| represents the format we want to
370 // get from the VCDevice, but this might send another format.
371 for (size_t i = 0; i < kNumGpuMemoryBuffers; ++i) {
372 linked_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer(
373 BrowserGpuMemoryBufferManager::current()->AllocateGpuMemoryBuffer(
374 capture_format.frame_size,
375 gfx::GpuMemoryBuffer::BGRA_8888,
376 gfx::GpuMemoryBuffer::MAP).release());
377 if (!gpu_memory_buffer.get()) {
378 OnError("Could not allocate GpuMemoryBuffer");
379 while(!gpu_memory_buffers_.empty())
380 gpu_memory_buffers_.pop();
381 return;
383 gpu_memory_buffers_.push(gpu_memory_buffer);
386 // In threaded compositing mode, we have to create our own context for Capture
387 // to avoid using the GPU command queue from multiple threads. Context
388 // creation must happen on UI thread; then the context needs to be bound to
389 // the appropriate thread, which is done in CreateGlHelper().
390 BrowserThread::PostTask(
391 BrowserThread::UI, FROM_HERE,
392 base::Bind(&CreateContextOnUIThread,
393 media::BindToCurrentLoop(
394 base::Bind(&VideoCaptureTextureWrapper::
395 TextureWrapperDelegate::CreateGlHelper,
396 this))));
399 void VideoCaptureTextureWrapper::TextureWrapperDelegate::CreateGlHelper(
400 scoped_refptr<ContextProviderCommandBuffer> capture_thread_context) {
401 DCHECK(capture_task_runner_->BelongsToCurrentThread());
403 if (!capture_thread_context.get()) {
404 DLOG(ERROR) << "No offscreen GL Context!";
405 return;
407 // This may not happen in IO Thread. The destructor resets the context lost
408 // callback, so base::Unretained is safe; otherwise it'd be a circular ref
409 // counted dependency.
410 capture_thread_context->SetLostContextCallback(media::BindToCurrentLoop(
411 base::Bind(
412 &VideoCaptureTextureWrapper::TextureWrapperDelegate::
413 LostContextCallback,
414 base::Unretained(this))));
415 if (!capture_thread_context->BindToCurrentThread()) {
416 capture_thread_context = NULL;
417 DLOG(ERROR) << "Couldn't bind the Capture Context to the Capture Thread.";
418 return;
420 DCHECK(capture_thread_context);
421 capture_thread_context_ = capture_thread_context;
423 // At this point, |capture_thread_context| is a cc::ContextProvider. Creation
424 // of our GLHelper should happen on Capture Thread.
425 gl_helper_.reset(new GLHelper(capture_thread_context->ContextGL(),
426 capture_thread_context->ContextSupport()));
427 DCHECK(gl_helper_);
430 void VideoCaptureTextureWrapper::TextureWrapperDelegate::ReleaseCallback(
431 GLuint image_id,
432 GLuint texture_id,
433 linked_ptr<gfx::GpuMemoryBuffer> memory_buffer,
434 uint32 sync_point) {
435 DCHECK(capture_task_runner_->BelongsToCurrentThread());
437 // TODO(mcasas): Before recycling |memory_buffer| we have to make sure it has
438 // been consumed and fully used.
439 gpu_memory_buffers_.push(memory_buffer);
441 if (gl_helper_) {
442 gl_helper_->DeleteTexture(texture_id);
443 capture_thread_context_->ContextGL()->DestroyImageCHROMIUM(image_id);
447 void VideoCaptureTextureWrapper::TextureWrapperDelegate::LostContextCallback() {
448 DCHECK(capture_task_runner_->BelongsToCurrentThread());
449 // Prevent incoming frames from being processed while OnError gets groked.
450 gl_helper_.reset();
451 OnError("GLContext lost");
454 void VideoCaptureTextureWrapper::TextureWrapperDelegate::OnError(
455 const std::string& message) {
456 DCHECK(capture_task_runner_->BelongsToCurrentThread());
457 DLOG(ERROR) << message;
458 BrowserThread::PostTask(
459 BrowserThread::IO, FROM_HERE,
460 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_));
463 } // namespace content