1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/browser/renderer_host/media/video_capture_device_client.h"
10 #include "base/strings/stringprintf.h"
11 #include "base/trace_event/trace_event.h"
12 #include "content/browser/compositor/image_transport_factory.h"
13 #include "content/browser/gpu/browser_gpu_channel_host_factory.h"
14 #include "content/browser/gpu/browser_gpu_memory_buffer_manager.h"
15 #include "content/browser/gpu/gpu_data_manager_impl.h"
16 #include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
17 #include "content/browser/renderer_host/media/video_capture_controller.h"
18 #include "content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h"
19 #include "content/common/gpu/client/context_provider_command_buffer.h"
20 #include "content/common/gpu/client/gl_helper.h"
21 #include "content/common/gpu/client/gpu_channel_host.h"
22 #include "content/common/gpu/client/webgraphicscontext3d_command_buffer_impl.h"
23 #include "content/common/gpu/gpu_process_launch_causes.h"
24 #include "content/public/browser/browser_thread.h"
25 #include "gpu/command_buffer/common/mailbox_holder.h"
26 #include "media/base/bind_to_current_loop.h"
27 #include "media/base/video_capture_types.h"
28 #include "media/base/video_frame.h"
29 #include "third_party/khronos/GLES2/gl2ext.h"
30 #include "third_party/libyuv/include/libyuv.h"
32 using media::VideoCaptureFormat
;
33 using media::VideoFrame
;
34 using media::VideoFrameMetadata
;
40 #if !defined(OS_ANDROID)
41 // Modelled after GpuProcessTransportFactory::CreateContextCommon().
42 scoped_ptr
<content::WebGraphicsContext3DCommandBufferImpl
> CreateContextCommon(
43 scoped_refptr
<content::GpuChannelHost
> gpu_channel_host
,
45 if (!content::GpuDataManagerImpl::GetInstance()->
46 CanUseGpuBrowserCompositor()) {
47 DLOG(ERROR
) << "No accelerated graphics found. Check chrome://gpu";
48 return scoped_ptr
<content::WebGraphicsContext3DCommandBufferImpl
>();
50 blink::WebGraphicsContext3D::Attributes attrs
;
51 attrs
.shareResources
= true;
53 attrs
.stencil
= false;
54 attrs
.antialias
= false;
55 attrs
.noAutomaticFlushes
= true;
57 if (!gpu_channel_host
.get()) {
58 DLOG(ERROR
) << "Failed to establish GPU channel.";
59 return scoped_ptr
<content::WebGraphicsContext3DCommandBufferImpl
>();
61 GURL
url("chrome://gpu/GpuProcessTransportFactory::CreateCaptureContext");
62 return make_scoped_ptr(
63 new WebGraphicsContext3DCommandBufferImpl(
66 gpu_channel_host
.get(),
68 true /* lose_context_when_out_of_memory */,
69 content::WebGraphicsContext3DCommandBufferImpl::SharedMemoryLimits(),
74 // GpuProcessTransportFactory::CreateOffscreenCommandBufferContext().
75 scoped_ptr
<content::WebGraphicsContext3DCommandBufferImpl
>
76 CreateOffscreenCommandBufferContext() {
77 content::CauseForGpuLaunch cause
= content::CAUSE_FOR_GPU_LAUNCH_CANVAS_2D
;
78 // Android does not support synchronous opening of GPU channels. Should use
79 // EstablishGpuChannel() instead.
80 if (!content::BrowserGpuChannelHostFactory::instance())
81 return scoped_ptr
<content::WebGraphicsContext3DCommandBufferImpl
>();
82 scoped_refptr
<content::GpuChannelHost
> gpu_channel_host(
83 content::BrowserGpuChannelHostFactory::instance()->
84 EstablishGpuChannelSync(cause
));
85 DCHECK(gpu_channel_host
);
86 return CreateContextCommon(gpu_channel_host
, 0);
90 typedef base::Callback
<void(scoped_refptr
<ContextProviderCommandBuffer
>)>
91 ProcessContextCallback
;
93 void CreateContextOnUIThread(ProcessContextCallback bottom_half
) {
94 DCHECK_CURRENTLY_ON(BrowserThread::UI
);
95 #if !defined(OS_ANDROID)
96 bottom_half
.Run(ContextProviderCommandBuffer::Create(
97 CreateOffscreenCommandBufferContext(), OFFSCREEN_VIDEO_CAPTURE_CONTEXT
));
102 void ResetLostContextCallback(
103 const scoped_refptr
<ContextProviderCommandBuffer
>& capture_thread_context
) {
104 capture_thread_context
->SetLostContextCallback(
105 cc::ContextProvider::LostContextCallback());
108 } // anonymous namespace
110 // Class combining a Client::Buffer interface implementation and a pool buffer
111 // implementation to guarantee proper cleanup on destruction on our side.
112 class AutoReleaseBuffer
: public media::VideoCaptureDevice::Client::Buffer
{
114 AutoReleaseBuffer(const scoped_refptr
<VideoCaptureBufferPool
>& pool
,
118 buffer_handle_(pool_
->GetBufferHandle(buffer_id
).Pass()) {
121 int id() const override
{ return id_
; }
122 size_t size() const override
{ return buffer_handle_
->size(); }
123 void* data() override
{ return buffer_handle_
->data(); }
124 ClientBuffer
AsClientBuffer() override
{
125 return buffer_handle_
->AsClientBuffer();
127 #if defined(OS_POSIX)
128 base::FileDescriptor
AsPlatformFile() override
{
129 return buffer_handle_
->AsPlatformFile();
134 ~AutoReleaseBuffer() override
{ pool_
->RelinquishProducerReservation(id_
); }
137 const scoped_refptr
<VideoCaptureBufferPool
> pool_
;
138 const scoped_ptr
<VideoCaptureBufferPool::BufferHandle
> buffer_handle_
;
141 // Internal ref-counted class wrapping an incoming GpuMemoryBuffer into a
142 // Texture backed VideoFrame. This VideoFrame creation is balanced by a waiting
143 // on the associated |sync_point|. After VideoFrame consumption the inserted
144 // ReleaseCallback() will be called, where the Texture is destroyed.
146 // This class jumps between threads due to GPU-related thread limitations, i.e.
147 // some objects cannot be accessed from IO Thread whereas others need to be
148 // constructed on UI Thread. For this reason most of the operations are carried
149 // out on Capture Thread (|capture_task_runner_|).
150 class VideoCaptureDeviceClient::TextureWrapHelper final
151 : public base::RefCountedThreadSafe
<TextureWrapHelper
> {
154 const base::WeakPtr
<VideoCaptureController
>& controller
,
155 const scoped_refptr
<base::SingleThreadTaskRunner
>& capture_task_runner
);
157 // Wraps the GpuMemoryBuffer-backed |buffer| into a Texture, and sends it to
158 // |controller_| wrapped in a VideoFrame.
159 void OnIncomingCapturedGpuMemoryBuffer(
160 scoped_ptr
<media::VideoCaptureDevice::Client::Buffer
> buffer
,
161 const media::VideoCaptureFormat
& frame_format
,
162 const base::TimeTicks
& timestamp
);
165 friend class base::RefCountedThreadSafe
<TextureWrapHelper
>;
166 ~TextureWrapHelper();
168 // Creates some necessary members in |capture_task_runner_|.
170 // Runs the bottom half of the GlHelper creation.
172 scoped_refptr
<ContextProviderCommandBuffer
> capture_thread_context
);
174 // Recycles |memory_buffer|, deletes Image and Texture on VideoFrame release.
175 void ReleaseCallback(GLuint image_id
,
179 // The Command Buffer lost the GL context, f.i. GPU process crashed. Signal
180 // error to our owner so the capture can be torn down.
181 void LostContextCallback();
183 // Prints the error |message| and notifies |controller_| of an error.
184 void OnError(const std::string
& message
);
186 // |controller_| should only be used on IO thread.
187 const base::WeakPtr
<VideoCaptureController
> controller_
;
188 const scoped_refptr
<base::SingleThreadTaskRunner
> capture_task_runner_
;
190 // Command buffer reference, needs to be destroyed when unused. It is created
191 // on UI Thread and bound to Capture Thread. In particular, it cannot be used
193 scoped_refptr
<ContextProviderCommandBuffer
> capture_thread_context_
;
194 // Created and used from Capture Thread. Cannot be used from IO Thread.
195 scoped_ptr
<GLHelper
> gl_helper_
;
197 DISALLOW_COPY_AND_ASSIGN(TextureWrapHelper
);
200 VideoCaptureDeviceClient::VideoCaptureDeviceClient(
201 const base::WeakPtr
<VideoCaptureController
>& controller
,
202 const scoped_refptr
<VideoCaptureBufferPool
>& buffer_pool
,
203 const scoped_refptr
<base::SingleThreadTaskRunner
>& capture_task_runner
)
204 : controller_(controller
),
205 external_jpeg_decoder_initialized_(false),
206 buffer_pool_(buffer_pool
),
207 capture_task_runner_(capture_task_runner
),
208 last_captured_pixel_format_(media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN
) {
209 DCHECK_CURRENTLY_ON(BrowserThread::IO
);
212 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {
213 // This should be on the platform auxiliary thread since
214 // |external_jpeg_decoder_| need to be destructed on the same thread as
215 // OnIncomingCapturedData.
218 void VideoCaptureDeviceClient::OnIncomingCapturedData(
221 const VideoCaptureFormat
& frame_format
,
223 const base::TimeTicks
& timestamp
) {
224 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData");
225 DCHECK_EQ(media::PIXEL_STORAGE_CPU
, frame_format
.pixel_storage
);
227 if (last_captured_pixel_format_
!= frame_format
.pixel_format
) {
228 OnLog("Pixel format: " +
229 VideoCaptureFormat::PixelFormatToString(frame_format
.pixel_format
));
230 last_captured_pixel_format_
= frame_format
.pixel_format
;
232 if (frame_format
.pixel_format
== media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG
) {
233 if (!external_jpeg_decoder_initialized_
) {
234 external_jpeg_decoder_initialized_
= true;
235 // base::Unretained is safe because |this| outlives
236 // |external_jpeg_decoder_| and the callbacks are never called after
237 // |external_jpeg_decoder_| is destroyed.
238 external_jpeg_decoder_
.reset(new VideoCaptureGpuJpegDecoder(
240 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread
,
242 // TODO(kcwu): fallback to software decode if error.
243 // https://crbug.com/503532
244 base::Bind(&VideoCaptureDeviceClient::OnError
,
245 base::Unretained(this))));
246 external_jpeg_decoder_
->Initialize();
251 if (!frame_format
.IsValid())
254 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest
255 // bit decomposition of {width, height}, grabbing the odd and even parts.
256 const int chopped_width
= frame_format
.frame_size
.width() & 1;
257 const int chopped_height
= frame_format
.frame_size
.height() & 1;
258 const int new_unrotated_width
= frame_format
.frame_size
.width() & ~1;
259 const int new_unrotated_height
= frame_format
.frame_size
.height() & ~1;
261 int destination_width
= new_unrotated_width
;
262 int destination_height
= new_unrotated_height
;
263 if (rotation
== 90 || rotation
== 270)
264 std::swap(destination_width
, destination_height
);
266 DCHECK_EQ(0, rotation
% 90)
267 << " Rotation must be a multiple of 90, now: " << rotation
;
268 libyuv::RotationMode rotation_mode
= libyuv::kRotate0
;
270 rotation_mode
= libyuv::kRotate90
;
271 else if (rotation
== 180)
272 rotation_mode
= libyuv::kRotate180
;
273 else if (rotation
== 270)
274 rotation_mode
= libyuv::kRotate270
;
276 const gfx::Size
dimensions(destination_width
, destination_height
);
277 if (!VideoFrame::IsValidConfig(media::PIXEL_FORMAT_I420
,
278 VideoFrame::STORAGE_UNKNOWN
, dimensions
,
279 gfx::Rect(dimensions
), dimensions
)) {
283 scoped_ptr
<Buffer
> buffer(
284 ReserveOutputBuffer(dimensions
, media::VIDEO_CAPTURE_PIXEL_FORMAT_I420
,
285 media::PIXEL_STORAGE_CPU
));
289 const size_t y_plane_size
=
290 VideoFrame::PlaneSize(media::PIXEL_FORMAT_I420
, VideoFrame::kYPlane
,
291 dimensions
).GetArea();
292 const size_t u_plane_size
=
293 VideoFrame::PlaneSize(media::PIXEL_FORMAT_I420
, VideoFrame::kUPlane
,
294 dimensions
).GetArea();
295 uint8
* const yplane
= reinterpret_cast<uint8
*>(buffer
->data());
296 uint8
* const uplane
= yplane
+ y_plane_size
;
297 uint8
* const vplane
= uplane
+ u_plane_size
;
299 const int yplane_stride
= dimensions
.width();
300 const int uv_plane_stride
= yplane_stride
/ 2;
303 libyuv::FourCC origin_colorspace
= libyuv::FOURCC_ANY
;
306 switch (frame_format
.pixel_format
) {
307 case media::VIDEO_CAPTURE_PIXEL_FORMAT_UNKNOWN
: // Color format not set.
309 case media::VIDEO_CAPTURE_PIXEL_FORMAT_I420
:
310 DCHECK(!chopped_width
&& !chopped_height
);
311 origin_colorspace
= libyuv::FOURCC_I420
;
313 case media::VIDEO_CAPTURE_PIXEL_FORMAT_YV12
:
314 DCHECK(!chopped_width
&& !chopped_height
);
315 origin_colorspace
= libyuv::FOURCC_YV12
;
317 case media::VIDEO_CAPTURE_PIXEL_FORMAT_NV12
:
318 DCHECK(!chopped_width
&& !chopped_height
);
319 origin_colorspace
= libyuv::FOURCC_NV12
;
321 case media::VIDEO_CAPTURE_PIXEL_FORMAT_NV21
:
322 DCHECK(!chopped_width
&& !chopped_height
);
323 origin_colorspace
= libyuv::FOURCC_NV21
;
325 case media::VIDEO_CAPTURE_PIXEL_FORMAT_YUY2
:
326 DCHECK(!chopped_width
&& !chopped_height
);
327 origin_colorspace
= libyuv::FOURCC_YUY2
;
329 case media::VIDEO_CAPTURE_PIXEL_FORMAT_UYVY
:
330 DCHECK(!chopped_width
&& !chopped_height
);
331 origin_colorspace
= libyuv::FOURCC_UYVY
;
333 case media::VIDEO_CAPTURE_PIXEL_FORMAT_RGB24
:
334 origin_colorspace
= libyuv::FOURCC_24BG
;
336 // TODO(wjia): Currently, for RGB24 on WIN, capture device always
337 // passes in positive src_width and src_height. Remove this hardcoded
338 // value when nagative src_height is supported. The negative src_height
339 // indicates that vertical flipping is needed.
343 case media::VIDEO_CAPTURE_PIXEL_FORMAT_RGB32
:
344 // Fallback to VIDEO_CAPTURE_PIXEL_FORMAT_ARGB setting |flip| in Windows
349 case media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB
:
350 origin_colorspace
= libyuv::FOURCC_ARGB
;
352 case media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG
:
353 origin_colorspace
= libyuv::FOURCC_MJPG
;
359 // The input |length| can be greater than the required buffer size because of
360 // paddings and/or alignments, but it cannot be smaller.
361 DCHECK_GE(static_cast<size_t>(length
), frame_format
.ImageAllocationSize());
363 if (external_jpeg_decoder_
) {
364 VideoCaptureGpuJpegDecoder::Status status
=
365 external_jpeg_decoder_
->GetStatus();
366 if (status
== VideoCaptureGpuJpegDecoder::INIT_FAILED
) {
367 external_jpeg_decoder_
.reset();
368 } else if (status
== VideoCaptureGpuJpegDecoder::INIT_PASSED
&&
369 frame_format
.pixel_format
== media::VIDEO_CAPTURE_PIXEL_FORMAT_MJPEG
&&
370 rotation
== 0 && !flip
) {
371 external_jpeg_decoder_
->DecodeCapturedData(data
, length
, frame_format
,
372 timestamp
, buffer
.Pass());
377 if (libyuv::ConvertToI420(data
,
387 frame_format
.frame_size
.width(),
388 (flip
? -1 : 1) * frame_format
.frame_size
.height(),
390 new_unrotated_height
,
392 origin_colorspace
) != 0) {
393 DLOG(WARNING
) << "Failed to convert buffer's pixel format to I420 from "
394 << VideoCaptureFormat::PixelFormatToString(
395 frame_format
.pixel_format
);
399 const VideoCaptureFormat output_format
= VideoCaptureFormat(
400 dimensions
, frame_format
.frame_rate
,
401 media::VIDEO_CAPTURE_PIXEL_FORMAT_I420
, media::PIXEL_STORAGE_CPU
);
402 OnIncomingCapturedBuffer(buffer
.Pass(), output_format
, timestamp
);
406 VideoCaptureDeviceClient::OnIncomingCapturedYuvData(
413 const VideoCaptureFormat
& frame_format
,
414 int clockwise_rotation
,
415 const base::TimeTicks
& timestamp
) {
416 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData");
417 DCHECK_EQ(media::VIDEO_CAPTURE_PIXEL_FORMAT_I420
, frame_format
.pixel_format
);
418 DCHECK_EQ(media::PIXEL_STORAGE_CPU
, frame_format
.pixel_storage
);
419 DCHECK_EQ(0, clockwise_rotation
) << "Rotation not supported";
421 scoped_ptr
<Buffer
> buffer(ReserveOutputBuffer(frame_format
.frame_size
,
422 frame_format
.pixel_format
,
423 frame_format
.pixel_storage
));
427 // Blit (copy) here from y,u,v into buffer.data()). Needed so we can return
428 // the parameter buffer synchronously to the driver.
429 const size_t y_plane_size
=
430 VideoFrame::PlaneSize(media::PIXEL_FORMAT_I420
, VideoFrame::kYPlane
,
431 frame_format
.frame_size
).GetArea();
432 const size_t u_plane_size
=
433 VideoFrame::PlaneSize(media::PIXEL_FORMAT_I420
, VideoFrame::kUPlane
,
434 frame_format
.frame_size
).GetArea();
435 uint8
* const dst_y
= reinterpret_cast<uint8
*>(buffer
->data());
436 uint8
* const dst_u
= dst_y
+ y_plane_size
;
437 uint8
* const dst_v
= dst_u
+ u_plane_size
;
439 const size_t dst_y_stride
=
440 VideoFrame::RowBytes(VideoFrame::kYPlane
, media::PIXEL_FORMAT_I420
,
441 frame_format
.frame_size
.width());
442 const size_t dst_u_stride
=
443 VideoFrame::RowBytes(VideoFrame::kUPlane
, media::PIXEL_FORMAT_I420
,
444 frame_format
.frame_size
.width());
445 const size_t dst_v_stride
=
446 VideoFrame::RowBytes(VideoFrame::kVPlane
, media::PIXEL_FORMAT_I420
,
447 frame_format
.frame_size
.width());
448 DCHECK_GE(y_stride
, dst_y_stride
);
449 DCHECK_GE(u_stride
, dst_u_stride
);
450 DCHECK_GE(v_stride
, dst_v_stride
);
452 if (libyuv::I420Copy(y_data
, y_stride
,
458 frame_format
.frame_size
.width(),
459 frame_format
.frame_size
.height())) {
460 DLOG(WARNING
) << "Failed to copy buffer";
464 OnIncomingCapturedBuffer(buffer
.Pass(), frame_format
, timestamp
);
467 scoped_ptr
<media::VideoCaptureDevice::Client::Buffer
>
468 VideoCaptureDeviceClient::ReserveOutputBuffer(
469 const gfx::Size
& frame_size
,
470 media::VideoCapturePixelFormat pixel_format
,
471 media::VideoPixelStorage pixel_storage
) {
472 DCHECK(pixel_format
== media::VIDEO_CAPTURE_PIXEL_FORMAT_I420
||
473 pixel_format
== media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB
);
474 DCHECK_GT(frame_size
.width(), 0);
475 DCHECK_GT(frame_size
.height(), 0);
477 if (pixel_storage
== media::PIXEL_STORAGE_GPUMEMORYBUFFER
&&
478 !texture_wrap_helper_
) {
479 texture_wrap_helper_
=
480 new TextureWrapHelper(controller_
, capture_task_runner_
);
483 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if
484 // it's a ShMem GMB or a DmaBuf GMB.
485 int buffer_id_to_drop
= VideoCaptureBufferPool::kInvalidId
;
486 const int buffer_id
= buffer_pool_
->ReserveForProducer(
487 pixel_format
, pixel_storage
, frame_size
, &buffer_id_to_drop
);
488 if (buffer_id
== VideoCaptureBufferPool::kInvalidId
)
491 scoped_ptr
<media::VideoCaptureDevice::Client::Buffer
> output_buffer(
492 new AutoReleaseBuffer(buffer_pool_
, buffer_id
));
494 if (buffer_id_to_drop
!= VideoCaptureBufferPool::kInvalidId
) {
495 BrowserThread::PostTask(BrowserThread::IO
,
497 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread
,
498 controller_
, buffer_id_to_drop
));
501 return output_buffer
.Pass();
504 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer(
505 scoped_ptr
<Buffer
> buffer
,
506 const VideoCaptureFormat
& frame_format
,
507 const base::TimeTicks
& timestamp
) {
508 if (frame_format
.pixel_storage
== media::PIXEL_STORAGE_GPUMEMORYBUFFER
) {
509 capture_task_runner_
->PostTask(
511 base::Bind(&TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer
,
512 texture_wrap_helper_
,
513 base::Passed(&buffer
),
517 DCHECK(frame_format
.pixel_format
==
518 media::VIDEO_CAPTURE_PIXEL_FORMAT_I420
||
519 frame_format
.pixel_format
== media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB
);
520 scoped_refptr
<VideoFrame
> video_frame
= VideoFrame::WrapExternalData(
521 media::PIXEL_FORMAT_I420
, frame_format
.frame_size
,
522 gfx::Rect(frame_format
.frame_size
), frame_format
.frame_size
,
523 reinterpret_cast<uint8
*>(buffer
->data()),
524 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420
,
525 frame_format
.frame_size
),
527 DCHECK(video_frame
.get());
528 video_frame
->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE
,
529 frame_format
.frame_rate
);
530 OnIncomingCapturedVideoFrame(buffer
.Pass(), video_frame
, timestamp
);
534 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
535 scoped_ptr
<Buffer
> buffer
,
536 const scoped_refptr
<VideoFrame
>& frame
,
537 const base::TimeTicks
& timestamp
) {
538 BrowserThread::PostTask(
542 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread
,
544 base::Passed(&buffer
),
549 void VideoCaptureDeviceClient::OnError(
550 const std::string
& reason
) {
551 const std::string log_message
= base::StringPrintf(
552 "Error on video capture: %s, OS message: %s",
554 logging::SystemErrorCodeToString(
555 logging::GetLastSystemErrorCode()).c_str());
556 DLOG(ERROR
) << log_message
;
558 BrowserThread::PostTask(BrowserThread::IO
,
560 base::Bind(&VideoCaptureController::DoErrorOnIOThread
, controller_
));
563 void VideoCaptureDeviceClient::OnLog(
564 const std::string
& message
) {
565 BrowserThread::PostTask(BrowserThread::IO
, FROM_HERE
,
566 base::Bind(&VideoCaptureController::DoLogOnIOThread
,
567 controller_
, message
));
570 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const {
571 // VideoCaptureBufferPool::GetBufferPoolUtilization() is thread-safe.
572 return buffer_pool_
->GetBufferPoolUtilization();
575 VideoCaptureDeviceClient::TextureWrapHelper::TextureWrapHelper(
576 const base::WeakPtr
<VideoCaptureController
>& controller
,
577 const scoped_refptr
<base::SingleThreadTaskRunner
>& capture_task_runner
)
578 : controller_(controller
),
579 capture_task_runner_(capture_task_runner
) {
580 capture_task_runner_
->PostTask(FROM_HERE
,
581 base::Bind(&TextureWrapHelper::Init
, this));
585 VideoCaptureDeviceClient::TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer(
586 scoped_ptr
<media::VideoCaptureDevice::Client::Buffer
> buffer
,
587 const media::VideoCaptureFormat
& frame_format
,
588 const base::TimeTicks
& timestamp
) {
589 DCHECK(capture_task_runner_
->BelongsToCurrentThread());
590 DCHECK_EQ(media::VIDEO_CAPTURE_PIXEL_FORMAT_ARGB
, frame_format
.pixel_format
);
591 DCHECK_EQ(media::PIXEL_STORAGE_GPUMEMORYBUFFER
, frame_format
.pixel_storage
);
593 // |gl_helper_| might not exist due to asynchronous initialization not
594 // finished or due to termination in process after a context loss.
595 DVLOG(1) << " Skipping ingress frame, no GL context.";
599 gpu::gles2::GLES2Interface
* gl
= capture_thread_context_
->ContextGL();
600 GLuint image_id
= gl
->CreateImageCHROMIUM(buffer
->AsClientBuffer(),
601 frame_format
.frame_size
.width(),
602 frame_format
.frame_size
.height(),
606 const GLuint texture_id
= gl_helper_
->CreateTexture();
609 content::ScopedTextureBinder
<GL_TEXTURE_2D
> texture_binder(gl
, texture_id
);
610 gl
->BindTexImage2DCHROMIUM(GL_TEXTURE_2D
, image_id
);
613 const gpu::MailboxHolder
& mailbox_holder(
614 gl_helper_
->ProduceMailboxHolderFromTexture(texture_id
));
615 DCHECK(!mailbox_holder
.mailbox
.IsZero());
616 DCHECK(mailbox_holder
.mailbox
.Verify());
617 DCHECK(mailbox_holder
.texture_target
);
618 DCHECK(mailbox_holder
.sync_point
);
620 scoped_refptr
<media::VideoFrame
> video_frame
=
621 media::VideoFrame::WrapNativeTexture(
622 media::PIXEL_FORMAT_ARGB
, mailbox_holder
,
623 media::BindToCurrentLoop(base::Bind(
624 &VideoCaptureDeviceClient::TextureWrapHelper::ReleaseCallback
,
625 this, image_id
, texture_id
)),
626 frame_format
.frame_size
, gfx::Rect(frame_format
.frame_size
),
627 frame_format
.frame_size
, base::TimeDelta());
628 video_frame
->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY
, true);
629 video_frame
->metadata()->SetDouble(VideoFrameMetadata::FRAME_RATE
,
630 frame_format
.frame_rate
);
631 #if defined(OS_LINUX)
632 // TODO(mcasas): After http://crev.com/1179323002, use |frame_format| to query
633 // the storage type of the buffer and use the appropriate |video_frame| method.
634 #if defined(USE_OZONE)
635 DCHECK_EQ(1u, media::VideoFrame::NumPlanes(video_frame
->format()));
636 video_frame
->DuplicateFileDescriptors(
637 std::vector
<int>(1, buffer
->AsPlatformFile().fd
));
639 video_frame
->AddSharedMemoryHandle(buffer
->AsPlatformFile());
643 //TODO(mcasas): use AddSharedMemoryHandle() for gfx::SHARED_MEMORY_BUFFER.
645 BrowserThread::PostTask(
646 BrowserThread::IO
, FROM_HERE
,
648 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread
,
649 controller_
, base::Passed(&buffer
), video_frame
, timestamp
));
652 VideoCaptureDeviceClient::TextureWrapHelper::~TextureWrapHelper() {
653 // Might not be running on capture_task_runner_'s thread. Ensure owned objects
654 // are destroyed on the correct threads.
656 capture_task_runner_
->DeleteSoon(FROM_HERE
, gl_helper_
.release());
658 if (capture_thread_context_
) {
659 capture_task_runner_
->PostTask(
661 base::Bind(&ResetLostContextCallback
, capture_thread_context_
));
662 capture_thread_context_
->AddRef();
663 ContextProviderCommandBuffer
* raw_capture_thread_context
=
664 capture_thread_context_
.get();
665 capture_thread_context_
= nullptr;
666 capture_task_runner_
->ReleaseSoon(FROM_HERE
, raw_capture_thread_context
);
670 void VideoCaptureDeviceClient::TextureWrapHelper::Init() {
671 DCHECK(capture_task_runner_
->BelongsToCurrentThread());
673 // In threaded compositing mode, we have to create our own context for Capture
674 // to avoid using the GPU command queue from multiple threads. Context
675 // creation must happen on UI thread; then the context needs to be bound to
676 // the appropriate thread, which is done in CreateGlHelper().
677 BrowserThread::PostTask(
678 BrowserThread::UI
, FROM_HERE
,
680 &CreateContextOnUIThread
,
681 media::BindToCurrentLoop(base::Bind(
682 &VideoCaptureDeviceClient::TextureWrapHelper::CreateGlHelper
,
686 void VideoCaptureDeviceClient::TextureWrapHelper::CreateGlHelper(
687 scoped_refptr
<ContextProviderCommandBuffer
> capture_thread_context
) {
688 DCHECK(capture_task_runner_
->BelongsToCurrentThread());
690 if (!capture_thread_context
.get()) {
691 DLOG(ERROR
) << "No offscreen GL Context!";
694 // This may not happen in IO Thread. The destructor resets the context lost
695 // callback, so base::Unretained is safe; otherwise it'd be a circular ref
696 // counted dependency.
697 capture_thread_context
->SetLostContextCallback(media::BindToCurrentLoop(
699 &VideoCaptureDeviceClient::TextureWrapHelper::LostContextCallback
,
700 base::Unretained(this))));
701 if (!capture_thread_context
->BindToCurrentThread()) {
702 capture_thread_context
= NULL
;
703 DLOG(ERROR
) << "Couldn't bind the Capture Context to the Capture Thread.";
706 DCHECK(capture_thread_context
);
707 capture_thread_context_
= capture_thread_context
;
709 // At this point, |capture_thread_context| is a cc::ContextProvider. Creation
710 // of our GLHelper should happen on Capture Thread.
711 gl_helper_
.reset(new GLHelper(capture_thread_context
->ContextGL(),
712 capture_thread_context
->ContextSupport()));
716 void VideoCaptureDeviceClient::TextureWrapHelper::ReleaseCallback(
720 DCHECK(capture_task_runner_
->BelongsToCurrentThread());
723 gl_helper_
->DeleteTexture(texture_id
);
724 capture_thread_context_
->ContextGL()->DestroyImageCHROMIUM(image_id
);
728 void VideoCaptureDeviceClient::TextureWrapHelper::LostContextCallback() {
729 DCHECK(capture_task_runner_
->BelongsToCurrentThread());
730 // Prevent incoming frames from being processed while OnError gets groked.
732 OnError("GLContext lost");
735 void VideoCaptureDeviceClient::TextureWrapHelper::OnError(
736 const std::string
& message
) {
737 DCHECK(capture_task_runner_
->BelongsToCurrentThread());
738 DLOG(ERROR
) << message
;
739 BrowserThread::PostTask(
740 BrowserThread::IO
, FROM_HERE
,
741 base::Bind(&VideoCaptureController::DoErrorOnIOThread
, controller_
));
744 } // namespace content