1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "ImageLogging.h" // Must appear first
9 #include "nsAVIFDecoder.h"
11 #include <aom/aomdx.h>
13 #include "DAV1DDecoder.h"
14 #include "gfxPlatform.h"
15 #include "YCbCrUtils.h"
18 #include "SurfacePipeFactory.h"
20 #include "mozilla/glean/ImageDecodersMetrics.h"
21 #include "mozilla/Telemetry.h"
22 #include "mozilla/UniquePtrExtensions.h"
24 using namespace mozilla::gfx
;
30 using Telemetry::LABELS_AVIF_A1LX
;
31 using Telemetry::LABELS_AVIF_A1OP
;
32 using Telemetry::LABELS_AVIF_ALPHA
;
33 using Telemetry::LABELS_AVIF_AOM_DECODE_ERROR
;
34 using Telemetry::LABELS_AVIF_BIT_DEPTH
;
35 using Telemetry::LABELS_AVIF_CICP_CP
;
36 using Telemetry::LABELS_AVIF_CICP_MC
;
37 using Telemetry::LABELS_AVIF_CICP_TC
;
38 using Telemetry::LABELS_AVIF_CLAP
;
39 using Telemetry::LABELS_AVIF_COLR
;
40 using Telemetry::LABELS_AVIF_DECODE_RESULT
;
41 using Telemetry::LABELS_AVIF_DECODER
;
42 using Telemetry::LABELS_AVIF_GRID
;
43 using Telemetry::LABELS_AVIF_IPRO
;
44 using Telemetry::LABELS_AVIF_ISPE
;
45 using Telemetry::LABELS_AVIF_LSEL
;
46 using Telemetry::LABELS_AVIF_MAJOR_BRAND
;
47 using Telemetry::LABELS_AVIF_PASP
;
48 using Telemetry::LABELS_AVIF_PIXI
;
49 using Telemetry::LABELS_AVIF_SEQUENCE
;
50 using Telemetry::LABELS_AVIF_YUV_COLOR_SPACE
;
52 static LazyLogModule
sAVIFLog("AVIFDecoder");
54 static const LABELS_AVIF_BIT_DEPTH gColorDepthLabel
[] = {
55 LABELS_AVIF_BIT_DEPTH::color_8
, LABELS_AVIF_BIT_DEPTH::color_10
,
56 LABELS_AVIF_BIT_DEPTH::color_12
, LABELS_AVIF_BIT_DEPTH::color_16
};
58 static const LABELS_AVIF_YUV_COLOR_SPACE gColorSpaceLabel
[] = {
59 LABELS_AVIF_YUV_COLOR_SPACE::BT601
, LABELS_AVIF_YUV_COLOR_SPACE::BT709
,
60 LABELS_AVIF_YUV_COLOR_SPACE::BT2020
, LABELS_AVIF_YUV_COLOR_SPACE::identity
};
62 static Maybe
<IntSize
> GetImageSize(const Mp4parseAvifInfo
& aInfo
) {
63 // Note this does not take cropping via CleanAperture (clap) into account
64 const struct Mp4parseImageSpatialExtents
* ispe
= aInfo
.spatial_extents
;
67 // Decoder::PostSize takes int32_t, but ispe contains uint32_t
68 CheckedInt
<int32_t> width
= ispe
->image_width
;
69 CheckedInt
<int32_t> height
= ispe
->image_height
;
71 if (width
.isValid() && height
.isValid()) {
72 return Some(IntSize
{width
.value(), height
.value()});
79 // Translate the MIAF/HEIF-based orientation transforms (imir, irot) into
80 // ImageLib's representation. Note that the interpretation of imir was reversed
81 // Between HEIF (ISO 23008-12:2017) and ISO/IEC 23008-12:2017/DAmd 2. This is
82 // handled by mp4parse. See mp4parse::read_imir for details.
83 Orientation
GetImageOrientation(const Mp4parseAvifInfo
& aInfo
) {
84 // Per MIAF (ISO/IEC 23000-22:2019) § 7.3.6.7
85 // These properties, if used, shall be indicated to be applied in the
86 // following order: clean aperture first, then rotation, then mirror.
87 // The Orientation type does the same order, but opposite rotation direction
89 const Mp4parseIrot heifRot
= aInfo
.image_rotation
;
90 const Mp4parseImir
* heifMir
= aInfo
.image_mirror
;
94 if (!heifMir
) { // No mirroring
95 mozFlip
= Flip::Unflipped
;
98 case MP4PARSE_IROT_D0
:
99 // ⥠ UPWARDS HARPOON WITH BARB LEFT FROM BAR
102 case MP4PARSE_IROT_D90
:
103 // ⥞ LEFTWARDS HARPOON WITH BARB DOWN FROM BAR
104 mozRot
= Angle::D270
;
106 case MP4PARSE_IROT_D180
:
107 // ⥝ DOWNWARDS HARPOON WITH BARB RIGHT FROM BAR
108 mozRot
= Angle::D180
;
110 case MP4PARSE_IROT_D270
:
111 // ⥛ RIGHTWARDS HARPOON WITH BARB UP FROM BAR
115 MOZ_ASSERT_UNREACHABLE();
119 mozFlip
= Flip::Horizontal
;
121 enum class HeifFlippedOrientation
: uint8_t {
122 IROT_D0_IMIR_V
= (MP4PARSE_IROT_D0
<< 1) | MP4PARSE_IMIR_LEFT_RIGHT
,
123 IROT_D0_IMIR_H
= (MP4PARSE_IROT_D0
<< 1) | MP4PARSE_IMIR_TOP_BOTTOM
,
124 IROT_D90_IMIR_V
= (MP4PARSE_IROT_D90
<< 1) | MP4PARSE_IMIR_LEFT_RIGHT
,
125 IROT_D90_IMIR_H
= (MP4PARSE_IROT_D90
<< 1) | MP4PARSE_IMIR_TOP_BOTTOM
,
126 IROT_D180_IMIR_V
= (MP4PARSE_IROT_D180
<< 1) | MP4PARSE_IMIR_LEFT_RIGHT
,
127 IROT_D180_IMIR_H
= (MP4PARSE_IROT_D180
<< 1) | MP4PARSE_IMIR_TOP_BOTTOM
,
128 IROT_D270_IMIR_V
= (MP4PARSE_IROT_D270
<< 1) | MP4PARSE_IMIR_LEFT_RIGHT
,
129 IROT_D270_IMIR_H
= (MP4PARSE_IROT_D270
<< 1) | MP4PARSE_IMIR_TOP_BOTTOM
,
132 HeifFlippedOrientation heifO
=
133 HeifFlippedOrientation((heifRot
<< 1) | *heifMir
);
136 case HeifFlippedOrientation::IROT_D0_IMIR_V
:
137 case HeifFlippedOrientation::IROT_D180_IMIR_H
:
138 // ⥜ UPWARDS HARPOON WITH BARB RIGHT FROM BAR
141 case HeifFlippedOrientation::IROT_D270_IMIR_V
:
142 case HeifFlippedOrientation::IROT_D90_IMIR_H
:
143 // ⥚ LEFTWARDS HARPOON WITH BARB UP FROM BAR
146 case HeifFlippedOrientation::IROT_D180_IMIR_V
:
147 case HeifFlippedOrientation::IROT_D0_IMIR_H
:
148 // ⥡ DOWNWARDS HARPOON WITH BARB LEFT FROM BAR
149 mozRot
= Angle::D180
;
151 case HeifFlippedOrientation::IROT_D90_IMIR_V
:
152 case HeifFlippedOrientation::IROT_D270_IMIR_H
:
153 // ⥟ RIGHTWARDS HARPOON WITH BARB DOWN FROM BAR
154 mozRot
= Angle::D270
;
157 MOZ_ASSERT_UNREACHABLE();
161 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
162 ("GetImageOrientation: (rot%d, imir(%s)) -> (Angle%d, "
164 static_cast<int>(heifRot
),
165 heifMir
? (*heifMir
== MP4PARSE_IMIR_LEFT_RIGHT
? "left-right"
168 static_cast<int>(mozRot
), static_cast<int>(mozFlip
)));
169 return Orientation
{mozRot
, mozFlip
};
171 bool AVIFDecoderStream::ReadAt(int64_t offset
, void* data
, size_t size
,
172 size_t* bytes_read
) {
173 size
= std::min(size
, size_t(mBuffer
->length() - offset
));
179 memcpy(data
, mBuffer
->begin() + offset
, size
);
184 bool AVIFDecoderStream::Length(int64_t* size
) {
186 static_cast<int64_t>(std::min
<uint64_t>(mBuffer
->length(), INT64_MAX
));
190 const uint8_t* AVIFDecoderStream::GetContiguousAccess(int64_t aOffset
,
192 if (aOffset
+ aSize
>= mBuffer
->length()) {
196 return mBuffer
->begin() + aOffset
;
199 AVIFParser::~AVIFParser() {
200 MOZ_LOG(sAVIFLog
, LogLevel::Debug
, ("Destroy AVIFParser=%p", this));
203 Mp4parseStatus
AVIFParser::Create(const Mp4parseIo
* aIo
, ByteStream
* aBuffer
,
204 UniquePtr
<AVIFParser
>& aParserOut
,
205 bool aAllowSequences
,
206 bool aAnimateAVIFMajor
) {
208 MOZ_ASSERT(!aParserOut
);
210 UniquePtr
<AVIFParser
> p(new AVIFParser(aIo
));
211 Mp4parseStatus status
= p
->Init(aBuffer
, aAllowSequences
, aAnimateAVIFMajor
);
213 if (status
== MP4PARSE_STATUS_OK
) {
214 MOZ_ASSERT(p
->mParser
);
215 aParserOut
= std::move(p
);
221 uint32_t AVIFParser::GetFrameCount() {
224 // Note that because this consumes the frame iterators, this can only be
225 // requested for metadata decodes. Since we had to partially decode the
226 // first frame to determine the size, we need to add one to the result.
227 // This means we return 0 for 1 frame, 1 for 2 frames, etc.
233 uint32_t frameCount
= 0;
235 RefPtr
<MediaRawData
> header
= mColorSampleIter
->GetNextHeader();
240 if (mAlphaSampleIter
) {
241 header
= mAlphaSampleIter
->GetNextHeader();
253 nsAVIFDecoder::DecodeResult
AVIFParser::GetImage(AVIFImage
& aImage
) {
256 // If the AVIF is animated, get next frame and yield if sequence is not done.
258 aImage
.mColorImage
= mColorSampleIter
->GetNext();
260 if (!aImage
.mColorImage
) {
261 return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples
);
264 aImage
.mFrameNum
= mFrameNum
++;
265 int64_t durationMs
= aImage
.mColorImage
->mDuration
.ToMilliseconds();
266 aImage
.mDuration
= FrameTimeout::FromRawMilliseconds(
267 static_cast<int32_t>(std::min
<int64_t>(durationMs
, INT32_MAX
)));
269 if (mAlphaSampleIter
) {
270 aImage
.mAlphaImage
= mAlphaSampleIter
->GetNext();
271 if (!aImage
.mAlphaImage
) {
272 return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples
);
276 bool hasNext
= mColorSampleIter
->HasNext();
277 if (mAlphaSampleIter
&& (hasNext
!= mAlphaSampleIter
->HasNext())) {
279 sAVIFLog
, LogLevel::Warning
,
280 ("[this=%p] The %s sequence ends before frame %d, aborting decode.",
281 this, hasNext
? "alpha" : "color", mFrameNum
));
282 return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples
);
285 return AsVariant(nsAVIFDecoder::NonDecoderResult::Complete
);
287 return AsVariant(nsAVIFDecoder::NonDecoderResult::OutputAvailable
);
290 if (!mInfo
.has_primary_item
) {
291 return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples
);
294 // If the AVIF is not animated, get the pitm image and return Complete.
295 Mp4parseAvifImage image
= {};
296 Mp4parseStatus status
= mp4parse_avif_get_image(mParser
.get(), &image
);
297 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
298 ("[this=%p] mp4parse_avif_get_image -> %d; primary_item length: "
299 "%zu, alpha_item length: %zu",
300 this, status
, image
.primary_image
.length
, image
.alpha_image
.length
));
301 if (status
!= MP4PARSE_STATUS_OK
) {
302 return AsVariant(status
);
305 // Ideally has_primary_item and no errors would guarantee primary_image.data
306 // exists but it doesn't so we check it too.
307 if (!image
.primary_image
.data
) {
308 return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples
);
311 RefPtr
<MediaRawData
> colorImage
=
312 new MediaRawData(image
.primary_image
.data
, image
.primary_image
.length
);
313 RefPtr
<MediaRawData
> alphaImage
= nullptr;
315 if (image
.alpha_image
.length
) {
317 new MediaRawData(image
.alpha_image
.data
, image
.alpha_image
.length
);
320 aImage
.mFrameNum
= 0;
321 aImage
.mDuration
= FrameTimeout::Forever();
322 aImage
.mColorImage
= colorImage
;
323 aImage
.mAlphaImage
= alphaImage
;
324 return AsVariant(nsAVIFDecoder::NonDecoderResult::Complete
);
327 AVIFParser::AVIFParser(const Mp4parseIo
* aIo
) : mIo(aIo
) {
329 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
330 ("Create AVIFParser=%p, image.avif.compliance_strictness: %d", this,
331 StaticPrefs::image_avif_compliance_strictness()));
334 static Mp4parseStatus
CreateSampleIterator(
335 Mp4parseAvifParser
* aParser
, ByteStream
* aBuffer
, uint32_t trackID
,
336 UniquePtr
<SampleIterator
>& aIteratorOut
) {
337 Mp4parseByteData data
;
340 mp4parse_avif_get_indice_table(aParser
, trackID
, &data
, ×cale
);
341 if (rv
!= MP4PARSE_STATUS_OK
) {
345 UniquePtr
<IndiceWrapper
> wrapper
= MakeUnique
<IndiceWrapper
>(data
);
346 RefPtr
<MP4SampleIndex
> index
= new MP4SampleIndex(
347 *wrapper
, aBuffer
, trackID
, false, AssertedCast
<int32_t>(timescale
));
348 aIteratorOut
= MakeUnique
<SampleIterator
>(index
);
349 return MP4PARSE_STATUS_OK
;
352 Mp4parseStatus
AVIFParser::Init(ByteStream
* aBuffer
, bool aAllowSequences
,
353 bool aAnimateAVIFMajor
) {
354 #define CHECK_MP4PARSE_STATUS(v) \
356 if ((v) != MP4PARSE_STATUS_OK) { \
361 MOZ_ASSERT(!mParser
);
363 Mp4parseAvifParser
* parser
= nullptr;
364 Mp4parseStatus status
=
365 mp4parse_avif_new(mIo
,
366 static_cast<enum Mp4parseStrictness
>(
367 StaticPrefs::image_avif_compliance_strictness()),
369 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
370 ("[this=%p] mp4parse_avif_new status: %d", this, status
));
371 CHECK_MP4PARSE_STATUS(status
);
373 mParser
.reset(parser
);
375 status
= mp4parse_avif_get_info(mParser
.get(), &mInfo
);
376 CHECK_MP4PARSE_STATUS(status
);
378 bool useSequence
= mInfo
.has_sequence
;
380 if (!aAllowSequences
) {
381 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
382 ("[this=%p] AVIF sequences disabled", this));
384 } else if (!aAnimateAVIFMajor
&&
385 !!memcmp(mInfo
.major_brand
, "avis", sizeof(mInfo
.major_brand
))) {
387 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
388 ("[this=%p] AVIF prefers still image", this));
393 status
= CreateSampleIterator(parser
, aBuffer
, mInfo
.color_track_id
,
395 CHECK_MP4PARSE_STATUS(status
);
396 MOZ_ASSERT(mColorSampleIter
);
398 if (mInfo
.alpha_track_id
) {
399 status
= CreateSampleIterator(parser
, aBuffer
, mInfo
.alpha_track_id
,
401 CHECK_MP4PARSE_STATUS(status
);
402 MOZ_ASSERT(mAlphaSampleIter
);
409 bool AVIFParser::IsAnimated() const { return !!mColorSampleIter
; }
411 // The gfx::YUVColorSpace value is only used in the conversion from YUV -> RGB.
412 // Typically this comes directly from the CICP matrix_coefficients value, but
413 // certain values require additionally considering the colour_primaries value.
414 // See `gfxUtils::CicpToColorSpace` for details. We return a gfx::YUVColorSpace
415 // rather than CICP::MatrixCoefficients, since that's what
416 // `gfx::ConvertYCbCrATo[A]RGB` uses. `aBitstreamColorSpaceFunc` abstracts the
417 // fact that different decoder libraries require different methods for
418 // extracting the CICP values from the AV1 bitstream and we don't want to do
419 // that work unnecessarily because in addition to wasted effort, it would make
420 // the logging more confusing.
421 template <typename F
>
422 static gfx::YUVColorSpace
GetAVIFColorSpace(
423 const Mp4parseNclxColourInformation
* aNclx
, F
&& aBitstreamColorSpaceFunc
) {
424 return ToMaybe(aNclx
)
425 .map([=](const auto& nclx
) {
426 return gfxUtils::CicpToColorSpace(
427 static_cast<CICP::MatrixCoefficients
>(nclx
.matrix_coefficients
),
428 static_cast<CICP::ColourPrimaries
>(nclx
.colour_primaries
),
431 .valueOrFrom(aBitstreamColorSpaceFunc
)
432 .valueOr(gfx::YUVColorSpace::BT601
);
435 static gfx::ColorRange
GetAVIFColorRange(
436 const Mp4parseNclxColourInformation
* aNclx
,
437 const gfx::ColorRange av1ColorRange
) {
438 return ToMaybe(aNclx
)
439 .map([=](const auto& nclx
) {
440 return aNclx
->full_range_flag
? gfx::ColorRange::FULL
441 : gfx::ColorRange::LIMITED
;
443 .valueOr(av1ColorRange
);
446 void AVIFDecodedData::SetCicpValues(
447 const Mp4parseNclxColourInformation
* aNclx
,
448 const gfx::CICP::ColourPrimaries aAv1ColourPrimaries
,
449 const gfx::CICP::TransferCharacteristics aAv1TransferCharacteristics
,
450 const gfx::CICP::MatrixCoefficients aAv1MatrixCoefficients
) {
451 auto cp
= CICP::ColourPrimaries::CP_UNSPECIFIED
;
452 auto tc
= CICP::TransferCharacteristics::TC_UNSPECIFIED
;
453 auto mc
= CICP::MatrixCoefficients::MC_UNSPECIFIED
;
456 cp
= static_cast<CICP::ColourPrimaries
>(aNclx
->colour_primaries
);
457 tc
= static_cast<CICP::TransferCharacteristics
>(
458 aNclx
->transfer_characteristics
);
459 mc
= static_cast<CICP::MatrixCoefficients
>(aNclx
->matrix_coefficients
);
462 if (cp
== CICP::ColourPrimaries::CP_UNSPECIFIED
) {
463 if (aAv1ColourPrimaries
!= CICP::ColourPrimaries::CP_UNSPECIFIED
) {
464 cp
= aAv1ColourPrimaries
;
465 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
466 ("Unspecified colour_primaries value specified in colr box, "
467 "using AV1 sequence header (%hhu)",
470 cp
= CICP::ColourPrimaries::CP_BT709
;
471 MOZ_LOG(sAVIFLog
, LogLevel::Warning
,
472 ("Unspecified colour_primaries value specified in colr box "
473 "or AV1 sequence header, using fallback value (%hhu)",
476 } else if (cp
!= aAv1ColourPrimaries
) {
477 MOZ_LOG(sAVIFLog
, LogLevel::Warning
,
478 ("colour_primaries mismatch: colr box = %hhu, AV1 "
479 "sequence header = %hhu, using colr box",
480 cp
, aAv1ColourPrimaries
));
483 if (tc
== CICP::TransferCharacteristics::TC_UNSPECIFIED
) {
484 if (aAv1TransferCharacteristics
!=
485 CICP::TransferCharacteristics::TC_UNSPECIFIED
) {
486 tc
= aAv1TransferCharacteristics
;
487 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
488 ("Unspecified transfer_characteristics value specified in "
489 "colr box, using AV1 sequence header (%hhu)",
492 tc
= CICP::TransferCharacteristics::TC_SRGB
;
493 MOZ_LOG(sAVIFLog
, LogLevel::Warning
,
494 ("Unspecified transfer_characteristics value specified in "
495 "colr box or AV1 sequence header, using fallback value (%hhu)",
498 } else if (tc
!= aAv1TransferCharacteristics
) {
499 MOZ_LOG(sAVIFLog
, LogLevel::Warning
,
500 ("transfer_characteristics mismatch: colr box = %hhu, "
501 "AV1 sequence header = %hhu, using colr box",
502 tc
, aAv1TransferCharacteristics
));
505 if (mc
== CICP::MatrixCoefficients::MC_UNSPECIFIED
) {
506 if (aAv1MatrixCoefficients
!= CICP::MatrixCoefficients::MC_UNSPECIFIED
) {
507 mc
= aAv1MatrixCoefficients
;
508 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
509 ("Unspecified matrix_coefficients value specified in "
510 "colr box, using AV1 sequence header (%hhu)",
513 mc
= CICP::MatrixCoefficients::MC_BT601
;
514 MOZ_LOG(sAVIFLog
, LogLevel::Warning
,
515 ("Unspecified matrix_coefficients value specified in "
516 "colr box or AV1 sequence header, using fallback value (%hhu)",
519 } else if (mc
!= aAv1MatrixCoefficients
) {
520 MOZ_LOG(sAVIFLog
, LogLevel::Warning
,
521 ("matrix_coefficients mismatch: colr box = %hhu, "
522 "AV1 sequence header = %hhu, using colr box",
523 mc
, aAv1TransferCharacteristics
));
526 mColourPrimaries
= cp
;
527 mTransferCharacteristics
= tc
;
528 mMatrixCoefficients
= mc
;
531 class Dav1dDecoder final
: AVIFDecoderInterface
{
534 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("Destroy Dav1dDecoder=%p", this));
537 dav1d_close(&mColorContext
);
538 MOZ_ASSERT(!mColorContext
);
542 dav1d_close(&mAlphaContext
);
543 MOZ_ASSERT(!mAlphaContext
);
547 static DecodeResult
Create(UniquePtr
<AVIFDecoderInterface
>& aDecoder
,
549 UniquePtr
<Dav1dDecoder
> d(new Dav1dDecoder());
550 Dav1dResult r
= d
->Init(aHasAlpha
);
552 aDecoder
.reset(d
.release());
557 DecodeResult
Decode(bool aShouldSendTelemetry
,
558 const Mp4parseAvifInfo
& aAVIFInfo
,
559 const AVIFImage
& aSamples
) override
{
560 MOZ_ASSERT(mColorContext
);
561 MOZ_ASSERT(!mDecodedData
);
562 MOZ_ASSERT(aSamples
.mColorImage
);
564 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("[this=%p] Decoding color", this));
566 OwnedDav1dPicture colorPic
= OwnedDav1dPicture(new Dav1dPicture());
567 OwnedDav1dPicture alphaPic
= nullptr;
568 Dav1dResult r
= GetPicture(*mColorContext
, *aSamples
.mColorImage
,
569 colorPic
.get(), aShouldSendTelemetry
);
574 if (aSamples
.mAlphaImage
) {
575 MOZ_ASSERT(mAlphaContext
);
576 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("[this=%p] Decoding alpha", this));
578 alphaPic
= OwnedDav1dPicture(new Dav1dPicture());
579 r
= GetPicture(*mAlphaContext
, *aSamples
.mAlphaImage
, alphaPic
.get(),
580 aShouldSendTelemetry
);
585 // Per § 4 of the AVIF spec
586 // https://aomediacodec.github.io/av1-avif/#auxiliary-images: An AV1
587 // Alpha Image Item […] shall be encoded with the same bit depth as the
588 // associated master AV1 Image Item
589 if (colorPic
->p
.bpc
!= alphaPic
->p
.bpc
) {
590 return AsVariant(NonDecoderResult::AlphaYColorDepthMismatch
);
593 if (colorPic
->stride
[0] != alphaPic
->stride
[0]) {
594 return AsVariant(NonDecoderResult::AlphaYSizeMismatch
);
598 MOZ_ASSERT_IF(!alphaPic
, !aAVIFInfo
.premultiplied_alpha
);
599 mDecodedData
= Dav1dPictureToDecodedData(
600 aAVIFInfo
.nclx_colour_information
, std::move(colorPic
),
601 std::move(alphaPic
), aAVIFInfo
.premultiplied_alpha
);
607 explicit Dav1dDecoder() {
608 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("Create Dav1dDecoder=%p", this));
611 Dav1dResult
Init(bool aHasAlpha
) {
612 MOZ_ASSERT(!mColorContext
);
613 MOZ_ASSERT(!mAlphaContext
);
615 Dav1dSettings settings
;
616 dav1d_default_settings(&settings
);
617 settings
.all_layers
= 0;
618 settings
.max_frame_delay
= 1;
619 // TODO: tune settings a la DAV1DDecoder for AV1 (Bug 1681816)
621 Dav1dResult r
= dav1d_open(&mColorContext
, &settings
);
625 MOZ_ASSERT(mColorContext
);
628 r
= dav1d_open(&mAlphaContext
, &settings
);
632 MOZ_ASSERT(mAlphaContext
);
638 static Dav1dResult
GetPicture(Dav1dContext
& aContext
,
639 const MediaRawData
& aBytes
,
640 Dav1dPicture
* aPicture
,
641 bool aShouldSendTelemetry
) {
642 MOZ_ASSERT(aPicture
);
645 Dav1dResult r
= dav1d_data_wrap(&dav1dData
, aBytes
.Data(), aBytes
.Size(),
646 Dav1dFreeCallback_s
, nullptr);
649 sAVIFLog
, r
== 0 ? LogLevel::Verbose
: LogLevel::Error
,
650 ("dav1d_data_wrap(%p, %zu) -> %d", dav1dData
.data
, dav1dData
.sz
, r
));
656 r
= dav1d_send_data(&aContext
, &dav1dData
);
658 MOZ_LOG(sAVIFLog
, r
== 0 ? LogLevel::Debug
: LogLevel::Error
,
659 ("dav1d_send_data -> %d", r
));
665 r
= dav1d_get_picture(&aContext
, aPicture
);
667 MOZ_LOG(sAVIFLog
, r
== 0 ? LogLevel::Debug
: LogLevel::Error
,
668 ("dav1d_get_picture -> %d", r
));
670 // We already have the AVIF_DECODE_RESULT histogram to record all the
671 // successful calls, so only bother recording what type of errors we see
672 // via events. Unlike AOM, dav1d returns an int, not an enum, so this is
673 // the easiest way to see if we're getting unexpected behavior to
675 if (aShouldSendTelemetry
&& r
!= 0) {
676 mozilla::glean::avif::Dav1dGetPictureReturnValueExtra extra
= {
677 .value
= Some(nsPrintfCString("%d", r
)),
679 mozilla::glean::avif::dav1d_get_picture_return_value
.Record(Some(extra
));
685 // A dummy callback for dav1d_data_wrap
686 static void Dav1dFreeCallback_s(const uint8_t* aBuf
, void* aCookie
) {
687 // The buf is managed by the mParser inside Dav1dDecoder itself. Do
691 static UniquePtr
<AVIFDecodedData
> Dav1dPictureToDecodedData(
692 const Mp4parseNclxColourInformation
* aNclx
, OwnedDav1dPicture aPicture
,
693 OwnedDav1dPicture aAlphaPlane
, bool aPremultipliedAlpha
);
695 Dav1dContext
* mColorContext
= nullptr;
696 Dav1dContext
* mAlphaContext
= nullptr;
699 OwnedAOMImage::OwnedAOMImage() {
700 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("Create OwnedAOMImage=%p", this));
703 OwnedAOMImage::~OwnedAOMImage() {
704 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("Destroy OwnedAOMImage=%p", this));
707 bool OwnedAOMImage::CloneFrom(aom_image_t
* aImage
, bool aIsAlpha
) {
710 MOZ_ASSERT(!mBuffer
);
712 uint8_t* srcY
= aImage
->planes
[AOM_PLANE_Y
];
713 int yStride
= aImage
->stride
[AOM_PLANE_Y
];
714 int yHeight
= aom_img_plane_height(aImage
, AOM_PLANE_Y
);
715 size_t yBufSize
= yStride
* yHeight
;
717 // If aImage is alpha plane. The data is located in Y channel.
719 mBuffer
= MakeUniqueFallible
<uint8_t[]>(yBufSize
);
723 uint8_t* destY
= mBuffer
.get();
724 memcpy(destY
, srcY
, yBufSize
);
725 mImage
.emplace(*aImage
);
726 mImage
->planes
[AOM_PLANE_Y
] = destY
;
731 uint8_t* srcCb
= aImage
->planes
[AOM_PLANE_U
];
732 int cbStride
= aImage
->stride
[AOM_PLANE_U
];
733 int cbHeight
= aom_img_plane_height(aImage
, AOM_PLANE_U
);
734 size_t cbBufSize
= cbStride
* cbHeight
;
736 uint8_t* srcCr
= aImage
->planes
[AOM_PLANE_V
];
737 int crStride
= aImage
->stride
[AOM_PLANE_V
];
738 int crHeight
= aom_img_plane_height(aImage
, AOM_PLANE_V
);
739 size_t crBufSize
= crStride
* crHeight
;
741 mBuffer
= MakeUniqueFallible
<uint8_t[]>(yBufSize
+ cbBufSize
+ crBufSize
);
746 uint8_t* destY
= mBuffer
.get();
747 uint8_t* destCb
= destY
+ yBufSize
;
748 uint8_t* destCr
= destCb
+ cbBufSize
;
750 memcpy(destY
, srcY
, yBufSize
);
751 memcpy(destCb
, srcCb
, cbBufSize
);
752 memcpy(destCr
, srcCr
, crBufSize
);
754 mImage
.emplace(*aImage
);
755 mImage
->planes
[AOM_PLANE_Y
] = destY
;
756 mImage
->planes
[AOM_PLANE_U
] = destCb
;
757 mImage
->planes
[AOM_PLANE_V
] = destCr
;
763 OwnedAOMImage
* OwnedAOMImage::CopyFrom(aom_image_t
* aImage
, bool aIsAlpha
) {
765 UniquePtr
<OwnedAOMImage
> img(new OwnedAOMImage());
766 if (!img
->CloneFrom(aImage
, aIsAlpha
)) {
769 return img
.release();
772 class AOMDecoder final
: AVIFDecoderInterface
{
775 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("Destroy AOMDecoder=%p", this));
777 if (mColorContext
.isSome()) {
778 aom_codec_err_t r
= aom_codec_destroy(mColorContext
.ptr());
779 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
780 ("[this=%p] aom_codec_destroy -> %d", this, r
));
783 if (mAlphaContext
.isSome()) {
784 aom_codec_err_t r
= aom_codec_destroy(mAlphaContext
.ptr());
785 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
786 ("[this=%p] aom_codec_destroy -> %d", this, r
));
790 static DecodeResult
Create(UniquePtr
<AVIFDecoderInterface
>& aDecoder
,
792 UniquePtr
<AOMDecoder
> d(new AOMDecoder());
793 aom_codec_err_t e
= d
->Init(aHasAlpha
);
794 if (e
== AOM_CODEC_OK
) {
795 aDecoder
.reset(d
.release());
797 return AsVariant(AOMResult(e
));
800 DecodeResult
Decode(bool aShouldSendTelemetry
,
801 const Mp4parseAvifInfo
& aAVIFInfo
,
802 const AVIFImage
& aSamples
) override
{
803 MOZ_ASSERT(mColorContext
.isSome());
804 MOZ_ASSERT(!mDecodedData
);
805 MOZ_ASSERT(aSamples
.mColorImage
);
807 aom_image_t
* aomImg
= nullptr;
808 DecodeResult r
= GetImage(*mColorContext
, *aSamples
.mColorImage
, &aomImg
,
809 aShouldSendTelemetry
);
810 if (!IsDecodeSuccess(r
)) {
815 // The aomImg will be released in next GetImage call (aom_codec_decode
816 // actually). The GetImage could be called again immediately if parsedImg
817 // contains alpha data. Therefore, we need to copy the image and manage it
818 // by AOMDecoder itself.
819 OwnedAOMImage
* clonedImg
= OwnedAOMImage::CopyFrom(aomImg
, false);
821 return AsVariant(NonDecoderResult::OutOfMemory
);
823 mOwnedImage
.reset(clonedImg
);
825 if (aSamples
.mAlphaImage
) {
826 MOZ_ASSERT(mAlphaContext
.isSome());
828 aom_image_t
* alphaImg
= nullptr;
829 r
= GetImage(*mAlphaContext
, *aSamples
.mAlphaImage
, &alphaImg
,
830 aShouldSendTelemetry
);
831 if (!IsDecodeSuccess(r
)) {
834 MOZ_ASSERT(alphaImg
);
836 OwnedAOMImage
* clonedAlphaImg
= OwnedAOMImage::CopyFrom(alphaImg
, true);
837 if (!clonedAlphaImg
) {
838 return AsVariant(NonDecoderResult::OutOfMemory
);
840 mOwnedAlphaPlane
.reset(clonedAlphaImg
);
842 // Per § 4 of the AVIF spec
843 // https://aomediacodec.github.io/av1-avif/#auxiliary-images: An AV1
844 // Alpha Image Item […] shall be encoded with the same bit depth as the
845 // associated master AV1 Image Item
846 MOZ_ASSERT(mOwnedImage
->GetImage() && mOwnedAlphaPlane
->GetImage());
847 if (mOwnedImage
->GetImage()->bit_depth
!=
848 mOwnedAlphaPlane
->GetImage()->bit_depth
) {
849 return AsVariant(NonDecoderResult::AlphaYColorDepthMismatch
);
852 if (mOwnedImage
->GetImage()->stride
[AOM_PLANE_Y
] !=
853 mOwnedAlphaPlane
->GetImage()->stride
[AOM_PLANE_Y
]) {
854 return AsVariant(NonDecoderResult::AlphaYSizeMismatch
);
858 MOZ_ASSERT_IF(!mOwnedAlphaPlane
, !aAVIFInfo
.premultiplied_alpha
);
859 mDecodedData
= AOMImageToToDecodedData(
860 aAVIFInfo
.nclx_colour_information
, std::move(mOwnedImage
),
861 std::move(mOwnedAlphaPlane
), aAVIFInfo
.premultiplied_alpha
);
867 explicit AOMDecoder() {
868 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
, ("Create AOMDecoder=%p", this));
871 aom_codec_err_t
Init(bool aHasAlpha
) {
872 MOZ_ASSERT(mColorContext
.isNothing());
873 MOZ_ASSERT(mAlphaContext
.isNothing());
875 aom_codec_iface_t
* iface
= aom_codec_av1_dx();
877 // Init color decoder context
878 mColorContext
.emplace();
879 aom_codec_err_t r
= aom_codec_dec_init(
880 mColorContext
.ptr(), iface
, /* cfg = */ nullptr, /* flags = */ 0);
882 MOZ_LOG(sAVIFLog
, r
== AOM_CODEC_OK
? LogLevel::Verbose
: LogLevel::Error
,
883 ("[this=%p] color decoder: aom_codec_dec_init -> %d, name = %s",
884 this, r
, mColorContext
->name
));
886 if (r
!= AOM_CODEC_OK
) {
887 mColorContext
.reset();
892 // Init alpha decoder context
893 mAlphaContext
.emplace();
894 r
= aom_codec_dec_init(mAlphaContext
.ptr(), iface
, /* cfg = */ nullptr,
897 MOZ_LOG(sAVIFLog
, r
== AOM_CODEC_OK
? LogLevel::Verbose
: LogLevel::Error
,
898 ("[this=%p] color decoder: aom_codec_dec_init -> %d, name = %s",
899 this, r
, mAlphaContext
->name
));
901 if (r
!= AOM_CODEC_OK
) {
902 mAlphaContext
.reset();
910 static DecodeResult
GetImage(aom_codec_ctx_t
& aContext
,
911 const MediaRawData
& aData
, aom_image_t
** aImage
,
912 bool aShouldSendTelemetry
) {
914 aom_codec_decode(&aContext
, aData
.Data(), aData
.Size(), nullptr);
916 MOZ_LOG(sAVIFLog
, r
== AOM_CODEC_OK
? LogLevel::Verbose
: LogLevel::Error
,
917 ("aom_codec_decode -> %d", r
));
919 if (aShouldSendTelemetry
) {
922 // No need to record any telemetry for the common case
924 case AOM_CODEC_ERROR
:
925 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::error
);
926 mozilla::glean::avif::aom_decode_error
927 .EnumGet(glean::avif::AomDecodeErrorLabel::eError
)
930 case AOM_CODEC_MEM_ERROR
:
931 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::mem_error
);
932 mozilla::glean::avif::aom_decode_error
933 .EnumGet(glean::avif::AomDecodeErrorLabel::eMemError
)
936 case AOM_CODEC_ABI_MISMATCH
:
937 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::abi_mismatch
);
938 mozilla::glean::avif::aom_decode_error
939 .EnumGet(glean::avif::AomDecodeErrorLabel::eAbiMismatch
)
942 case AOM_CODEC_INCAPABLE
:
943 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::incapable
);
944 mozilla::glean::avif::aom_decode_error
945 .EnumGet(glean::avif::AomDecodeErrorLabel::eIncapable
)
948 case AOM_CODEC_UNSUP_BITSTREAM
:
949 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::unsup_bitstream
);
950 mozilla::glean::avif::aom_decode_error
951 .EnumGet(glean::avif::AomDecodeErrorLabel::eUnsupBitstream
)
954 case AOM_CODEC_UNSUP_FEATURE
:
955 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::unsup_feature
);
956 mozilla::glean::avif::aom_decode_error
957 .EnumGet(glean::avif::AomDecodeErrorLabel::eUnsupFeature
)
960 case AOM_CODEC_CORRUPT_FRAME
:
961 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::corrupt_frame
);
962 mozilla::glean::avif::aom_decode_error
963 .EnumGet(glean::avif::AomDecodeErrorLabel::eCorruptFrame
)
966 case AOM_CODEC_INVALID_PARAM
:
967 AccumulateCategorical(LABELS_AVIF_AOM_DECODE_ERROR::invalid_param
);
968 mozilla::glean::avif::aom_decode_error
969 .EnumGet(glean::avif::AomDecodeErrorLabel::eInvalidParam
)
973 MOZ_ASSERT_UNREACHABLE(
974 "Unknown aom_codec_err_t value from aom_codec_decode");
978 if (r
!= AOM_CODEC_OK
) {
979 return AsVariant(AOMResult(r
));
982 aom_codec_iter_t iter
= nullptr;
983 aom_image_t
* img
= aom_codec_get_frame(&aContext
, &iter
);
985 MOZ_LOG(sAVIFLog
, img
== nullptr ? LogLevel::Error
: LogLevel::Verbose
,
986 ("aom_codec_get_frame -> %p", img
));
988 if (img
== nullptr) {
989 return AsVariant(AOMResult(NonAOMCodecError::NoFrame
));
992 const CheckedInt
<int> decoded_width
= img
->d_w
;
993 const CheckedInt
<int> decoded_height
= img
->d_h
;
995 if (!decoded_height
.isValid() || !decoded_width
.isValid()) {
996 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
997 ("image dimensions can't be stored in int: d_w: %u, "
999 img
->d_w
, img
->d_h
));
1000 return AsVariant(AOMResult(NonAOMCodecError::SizeOverflow
));
1004 return AsVariant(AOMResult(r
));
1007 static UniquePtr
<AVIFDecodedData
> AOMImageToToDecodedData(
1008 const Mp4parseNclxColourInformation
* aNclx
,
1009 UniquePtr
<OwnedAOMImage
> aImage
, UniquePtr
<OwnedAOMImage
> aAlphaPlane
,
1010 bool aPremultipliedAlpha
);
1012 Maybe
<aom_codec_ctx_t
> mColorContext
;
1013 Maybe
<aom_codec_ctx_t
> mAlphaContext
;
1014 UniquePtr
<OwnedAOMImage
> mOwnedImage
;
1015 UniquePtr
<OwnedAOMImage
> mOwnedAlphaPlane
;
1019 UniquePtr
<AVIFDecodedData
> Dav1dDecoder::Dav1dPictureToDecodedData(
1020 const Mp4parseNclxColourInformation
* aNclx
, OwnedDav1dPicture aPicture
,
1021 OwnedDav1dPicture aAlphaPlane
, bool aPremultipliedAlpha
) {
1022 MOZ_ASSERT(aPicture
);
1024 static_assert(std::is_same
<int, decltype(aPicture
->p
.w
)>::value
);
1025 static_assert(std::is_same
<int, decltype(aPicture
->p
.h
)>::value
);
1027 UniquePtr
<AVIFDecodedData
> data
= MakeUnique
<AVIFDecodedData
>();
1029 data
->mRenderSize
.emplace(aPicture
->frame_hdr
->render_width
,
1030 aPicture
->frame_hdr
->render_height
);
1032 data
->mYChannel
= static_cast<uint8_t*>(aPicture
->data
[0]);
1033 data
->mYStride
= aPicture
->stride
[0];
1034 data
->mYSkip
= aPicture
->stride
[0] - aPicture
->p
.w
;
1035 data
->mCbChannel
= static_cast<uint8_t*>(aPicture
->data
[1]);
1036 data
->mCrChannel
= static_cast<uint8_t*>(aPicture
->data
[2]);
1037 data
->mCbCrStride
= aPicture
->stride
[1];
1039 switch (aPicture
->p
.layout
) {
1040 case DAV1D_PIXEL_LAYOUT_I400
: // Monochrome, so no Cb or Cr channels
1042 case DAV1D_PIXEL_LAYOUT_I420
:
1043 data
->mChromaSubsampling
= ChromaSubsampling::HALF_WIDTH_AND_HEIGHT
;
1045 case DAV1D_PIXEL_LAYOUT_I422
:
1046 data
->mChromaSubsampling
= ChromaSubsampling::HALF_WIDTH
;
1048 case DAV1D_PIXEL_LAYOUT_I444
:
1051 MOZ_ASSERT_UNREACHABLE("Unknown pixel layout");
1054 data
->mCbSkip
= aPicture
->stride
[1] - aPicture
->p
.w
;
1055 data
->mCrSkip
= aPicture
->stride
[1] - aPicture
->p
.w
;
1056 data
->mPictureRect
= IntRect(0, 0, aPicture
->p
.w
, aPicture
->p
.h
);
1057 data
->mStereoMode
= StereoMode::MONO
;
1058 data
->mColorDepth
= ColorDepthForBitDepth(aPicture
->p
.bpc
);
1060 MOZ_ASSERT(aPicture
->p
.bpc
== BitDepthForColorDepth(data
->mColorDepth
));
1062 data
->mYUVColorSpace
= GetAVIFColorSpace(aNclx
, [&]() {
1063 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
1064 ("YUVColorSpace cannot be determined from colr box, using AV1 "
1065 "sequence header"));
1066 return DAV1DDecoder::GetColorSpace(*aPicture
, sAVIFLog
);
1069 auto av1ColourPrimaries
= CICP::ColourPrimaries::CP_UNSPECIFIED
;
1070 auto av1TransferCharacteristics
=
1071 CICP::TransferCharacteristics::TC_UNSPECIFIED
;
1072 auto av1MatrixCoefficients
= CICP::MatrixCoefficients::MC_UNSPECIFIED
;
1074 MOZ_ASSERT(aPicture
->seq_hdr
);
1075 auto& seq_hdr
= *aPicture
->seq_hdr
;
1077 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1078 ("seq_hdr.color_description_present: %d",
1079 seq_hdr
.color_description_present
));
1080 if (seq_hdr
.color_description_present
) {
1081 av1ColourPrimaries
= static_cast<CICP::ColourPrimaries
>(seq_hdr
.pri
);
1082 av1TransferCharacteristics
=
1083 static_cast<CICP::TransferCharacteristics
>(seq_hdr
.trc
);
1084 av1MatrixCoefficients
= static_cast<CICP::MatrixCoefficients
>(seq_hdr
.mtrx
);
1087 data
->SetCicpValues(aNclx
, av1ColourPrimaries
, av1TransferCharacteristics
,
1088 av1MatrixCoefficients
);
1090 gfx::ColorRange av1ColorRange
=
1091 seq_hdr
.color_range
? gfx::ColorRange::FULL
: gfx::ColorRange::LIMITED
;
1092 data
->mColorRange
= GetAVIFColorRange(aNclx
, av1ColorRange
);
1094 auto colorPrimaries
=
1095 gfxUtils::CicpToColorPrimaries(data
->mColourPrimaries
, sAVIFLog
);
1096 if (colorPrimaries
.isSome()) {
1097 data
->mColorPrimaries
= *colorPrimaries
;
1101 MOZ_ASSERT(aAlphaPlane
->stride
[0] == data
->mYStride
);
1102 data
->mAlpha
.emplace();
1103 data
->mAlpha
->mChannel
= static_cast<uint8_t*>(aAlphaPlane
->data
[0]);
1104 data
->mAlpha
->mSize
= gfx::IntSize(aAlphaPlane
->p
.w
, aAlphaPlane
->p
.h
);
1105 data
->mAlpha
->mPremultiplied
= aPremultipliedAlpha
;
1108 data
->mColorDav1d
= std::move(aPicture
);
1109 data
->mAlphaDav1d
= std::move(aAlphaPlane
);
1115 UniquePtr
<AVIFDecodedData
> AOMDecoder::AOMImageToToDecodedData(
1116 const Mp4parseNclxColourInformation
* aNclx
, UniquePtr
<OwnedAOMImage
> aImage
,
1117 UniquePtr
<OwnedAOMImage
> aAlphaPlane
, bool aPremultipliedAlpha
) {
1118 aom_image_t
* colorImage
= aImage
->GetImage();
1119 aom_image_t
* alphaImage
= aAlphaPlane
? aAlphaPlane
->GetImage() : nullptr;
1121 MOZ_ASSERT(colorImage
);
1122 MOZ_ASSERT(colorImage
->stride
[AOM_PLANE_Y
] >=
1123 aom_img_plane_width(colorImage
, AOM_PLANE_Y
));
1124 MOZ_ASSERT(colorImage
->stride
[AOM_PLANE_U
] ==
1125 colorImage
->stride
[AOM_PLANE_V
]);
1126 MOZ_ASSERT(colorImage
->stride
[AOM_PLANE_U
] >=
1127 aom_img_plane_width(colorImage
, AOM_PLANE_U
));
1128 MOZ_ASSERT(colorImage
->stride
[AOM_PLANE_V
] >=
1129 aom_img_plane_width(colorImage
, AOM_PLANE_V
));
1130 MOZ_ASSERT(aom_img_plane_width(colorImage
, AOM_PLANE_U
) ==
1131 aom_img_plane_width(colorImage
, AOM_PLANE_V
));
1132 MOZ_ASSERT(aom_img_plane_height(colorImage
, AOM_PLANE_U
) ==
1133 aom_img_plane_height(colorImage
, AOM_PLANE_V
));
1135 UniquePtr
<AVIFDecodedData
> data
= MakeUnique
<AVIFDecodedData
>();
1137 data
->mRenderSize
.emplace(colorImage
->r_w
, colorImage
->r_h
);
1139 data
->mYChannel
= colorImage
->planes
[AOM_PLANE_Y
];
1140 data
->mYStride
= colorImage
->stride
[AOM_PLANE_Y
];
1141 data
->mYSkip
= colorImage
->stride
[AOM_PLANE_Y
] -
1142 aom_img_plane_width(colorImage
, AOM_PLANE_Y
);
1143 data
->mCbChannel
= colorImage
->planes
[AOM_PLANE_U
];
1144 data
->mCrChannel
= colorImage
->planes
[AOM_PLANE_V
];
1145 data
->mCbCrStride
= colorImage
->stride
[AOM_PLANE_U
];
1146 data
->mCbSkip
= colorImage
->stride
[AOM_PLANE_U
] -
1147 aom_img_plane_width(colorImage
, AOM_PLANE_U
);
1148 data
->mCrSkip
= colorImage
->stride
[AOM_PLANE_V
] -
1149 aom_img_plane_width(colorImage
, AOM_PLANE_V
);
1150 data
->mPictureRect
= gfx::IntRect(0, 0, colorImage
->d_w
, colorImage
->d_h
);
1151 data
->mStereoMode
= StereoMode::MONO
;
1152 data
->mColorDepth
= ColorDepthForBitDepth(colorImage
->bit_depth
);
1154 if (colorImage
->x_chroma_shift
== 1 && colorImage
->y_chroma_shift
== 1) {
1155 data
->mChromaSubsampling
= gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT
;
1156 } else if (colorImage
->x_chroma_shift
== 1 &&
1157 colorImage
->y_chroma_shift
== 0) {
1158 data
->mChromaSubsampling
= gfx::ChromaSubsampling::HALF_WIDTH
;
1159 } else if (colorImage
->x_chroma_shift
!= 0 ||
1160 colorImage
->y_chroma_shift
!= 0) {
1161 MOZ_ASSERT_UNREACHABLE("unexpected chroma shifts");
1164 MOZ_ASSERT(colorImage
->bit_depth
== BitDepthForColorDepth(data
->mColorDepth
));
1166 auto av1ColourPrimaries
= static_cast<CICP::ColourPrimaries
>(colorImage
->cp
);
1167 auto av1TransferCharacteristics
=
1168 static_cast<CICP::TransferCharacteristics
>(colorImage
->tc
);
1169 auto av1MatrixCoefficients
=
1170 static_cast<CICP::MatrixCoefficients
>(colorImage
->mc
);
1172 data
->mYUVColorSpace
= GetAVIFColorSpace(aNclx
, [=]() {
1173 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
1174 ("YUVColorSpace cannot be determined from colr box, using AV1 "
1175 "sequence header"));
1176 return gfxUtils::CicpToColorSpace(av1MatrixCoefficients
, av1ColourPrimaries
,
1180 gfx::ColorRange av1ColorRange
;
1181 if (colorImage
->range
== AOM_CR_STUDIO_RANGE
) {
1182 av1ColorRange
= gfx::ColorRange::LIMITED
;
1184 MOZ_ASSERT(colorImage
->range
== AOM_CR_FULL_RANGE
);
1185 av1ColorRange
= gfx::ColorRange::FULL
;
1187 data
->mColorRange
= GetAVIFColorRange(aNclx
, av1ColorRange
);
1189 data
->SetCicpValues(aNclx
, av1ColourPrimaries
, av1TransferCharacteristics
,
1190 av1MatrixCoefficients
);
1192 auto colorPrimaries
=
1193 gfxUtils::CicpToColorPrimaries(data
->mColourPrimaries
, sAVIFLog
);
1194 if (colorPrimaries
.isSome()) {
1195 data
->mColorPrimaries
= *colorPrimaries
;
1199 MOZ_ASSERT(alphaImage
->stride
[AOM_PLANE_Y
] == data
->mYStride
);
1200 data
->mAlpha
.emplace();
1201 data
->mAlpha
->mChannel
= alphaImage
->planes
[AOM_PLANE_Y
];
1202 data
->mAlpha
->mSize
= gfx::IntSize(alphaImage
->d_w
, alphaImage
->d_h
);
1203 data
->mAlpha
->mPremultiplied
= aPremultipliedAlpha
;
1206 data
->mColorAOM
= std::move(aImage
);
1207 data
->mAlphaAOM
= std::move(aAlphaPlane
);
1212 // Wrapper to allow rust to call our read adaptor.
1213 intptr_t nsAVIFDecoder::ReadSource(uint8_t* aDestBuf
, uintptr_t aDestBufSize
,
1215 MOZ_ASSERT(aDestBuf
);
1216 MOZ_ASSERT(aUserData
);
1218 MOZ_LOG(sAVIFLog
, LogLevel::Verbose
,
1219 ("AVIF ReadSource, aDestBufSize: %zu", aDestBufSize
));
1221 auto* decoder
= reinterpret_cast<nsAVIFDecoder
*>(aUserData
);
1223 MOZ_ASSERT(decoder
->mReadCursor
);
1225 size_t bufferLength
= decoder
->mBufferedData
.end() - decoder
->mReadCursor
;
1226 size_t n_bytes
= std::min(aDestBufSize
, bufferLength
);
1229 sAVIFLog
, LogLevel::Verbose
,
1230 ("AVIF ReadSource, %zu bytes ready, copying %zu", bufferLength
, n_bytes
));
1232 memcpy(aDestBuf
, decoder
->mReadCursor
, n_bytes
);
1233 decoder
->mReadCursor
+= n_bytes
;
1238 nsAVIFDecoder::nsAVIFDecoder(RasterImage
* aImage
) : Decoder(aImage
) {
1239 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1240 ("[this=%p] nsAVIFDecoder::nsAVIFDecoder", this));
1243 nsAVIFDecoder::~nsAVIFDecoder() {
1244 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1245 ("[this=%p] nsAVIFDecoder::~nsAVIFDecoder", this));
1248 LexerResult
nsAVIFDecoder::DoDecode(SourceBufferIterator
& aIterator
,
1249 IResumable
* aOnResume
) {
1250 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
1251 ("[this=%p] nsAVIFDecoder::DoDecode start", this));
1253 DecodeResult result
= DoDecodeInternal(aIterator
, aOnResume
);
1255 RecordDecodeResultTelemetry(result
);
1257 if (result
.is
<NonDecoderResult
>()) {
1258 NonDecoderResult r
= result
.as
<NonDecoderResult
>();
1259 if (r
== NonDecoderResult::NeedMoreData
) {
1260 return LexerResult(Yield::NEED_MORE_DATA
);
1262 if (r
== NonDecoderResult::OutputAvailable
) {
1263 MOZ_ASSERT(HasSize());
1264 return LexerResult(Yield::OUTPUT_AVAILABLE
);
1266 if (r
== NonDecoderResult::Complete
) {
1267 MOZ_ASSERT(HasSize());
1268 return LexerResult(TerminalState::SUCCESS
);
1270 return LexerResult(TerminalState::FAILURE
);
1273 MOZ_ASSERT(result
.is
<Dav1dResult
>() || result
.is
<AOMResult
>() ||
1274 result
.is
<Mp4parseStatus
>());
1275 // If IsMetadataDecode(), a successful parse should return
1276 // NonDecoderResult::MetadataOk or else continue to the decode stage
1277 MOZ_ASSERT_IF(result
.is
<Mp4parseStatus
>(),
1278 result
.as
<Mp4parseStatus
>() != MP4PARSE_STATUS_OK
);
1279 auto rv
= LexerResult(IsDecodeSuccess(result
) ? TerminalState::SUCCESS
1280 : TerminalState::FAILURE
);
1281 MOZ_LOG(sAVIFLog
, LogLevel::Info
,
1282 ("[this=%p] nsAVIFDecoder::DoDecode end", this));
1286 Mp4parseStatus
nsAVIFDecoder::CreateParser() {
1288 Mp4parseIo io
= {nsAVIFDecoder::ReadSource
, this};
1289 mBufferStream
= new AVIFDecoderStream(&mBufferedData
);
1291 Mp4parseStatus status
= AVIFParser::Create(
1292 &io
, mBufferStream
.get(), mParser
,
1293 bool(GetDecoderFlags() & DecoderFlags::AVIF_SEQUENCES_ENABLED
),
1294 bool(GetDecoderFlags() & DecoderFlags::AVIF_ANIMATE_AVIF_MAJOR
));
1296 if (status
!= MP4PARSE_STATUS_OK
) {
1300 const Mp4parseAvifInfo
& info
= mParser
->GetInfo();
1301 mIsAnimated
= mParser
->IsAnimated();
1302 mHasAlpha
= mIsAnimated
? !!info
.alpha_track_id
: info
.has_alpha_item
;
1305 return MP4PARSE_STATUS_OK
;
1308 nsAVIFDecoder::DecodeResult
nsAVIFDecoder::CreateDecoder() {
1310 DecodeResult r
= StaticPrefs::image_avif_use_dav1d()
1311 ? Dav1dDecoder::Create(mDecoder
, mHasAlpha
)
1312 : AOMDecoder::Create(mDecoder
, mHasAlpha
);
1314 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1315 ("[this=%p] Create %sDecoder %ssuccessfully", this,
1316 StaticPrefs::image_avif_use_dav1d() ? "Dav1d" : "AOM",
1317 IsDecodeSuccess(r
) ? "" : "un"));
1322 return StaticPrefs::image_avif_use_dav1d()
1323 ? DecodeResult(Dav1dResult(0))
1324 : DecodeResult(AOMResult(AOM_CODEC_OK
));
1327 // Records all telemetry available in the AVIF metadata, called only once during
1328 // the metadata decode to avoid multiple counts.
1329 static void RecordMetadataTelem(const Mp4parseAvifInfo
& aInfo
) {
1330 if (aInfo
.pixel_aspect_ratio
) {
1331 const uint32_t& h_spacing
= aInfo
.pixel_aspect_ratio
->h_spacing
;
1332 const uint32_t& v_spacing
= aInfo
.pixel_aspect_ratio
->v_spacing
;
1334 if (h_spacing
== 0 || v_spacing
== 0) {
1335 AccumulateCategorical(LABELS_AVIF_PASP::invalid
);
1336 mozilla::glean::avif::pasp
1337 .EnumGet(mozilla::glean::avif::PaspLabel::eInvalid
)
1339 } else if (h_spacing
== v_spacing
) {
1340 AccumulateCategorical(LABELS_AVIF_PASP::square
);
1341 mozilla::glean::avif::pasp
1342 .EnumGet(mozilla::glean::avif::PaspLabel::eSquare
)
1345 AccumulateCategorical(LABELS_AVIF_PASP::nonsquare
);
1346 mozilla::glean::avif::pasp
1347 .EnumGet(mozilla::glean::avif::PaspLabel::eNonsquare
)
1351 AccumulateCategorical(LABELS_AVIF_PASP::absent
);
1352 mozilla::glean::avif::pasp
.EnumGet(mozilla::glean::avif::PaspLabel::eAbsent
)
1356 const auto& major_brand
= aInfo
.major_brand
;
1357 if (!memcmp(major_brand
, "avif", sizeof(major_brand
))) {
1358 AccumulateCategorical(LABELS_AVIF_MAJOR_BRAND::avif
);
1359 } else if (!memcmp(major_brand
, "avis", sizeof(major_brand
))) {
1360 AccumulateCategorical(LABELS_AVIF_MAJOR_BRAND::avis
);
1362 AccumulateCategorical(LABELS_AVIF_MAJOR_BRAND::other
);
1365 AccumulateCategorical(aInfo
.has_sequence
? LABELS_AVIF_SEQUENCE::present
1366 : LABELS_AVIF_SEQUENCE::absent
);
1368 #define FEATURE_TELEMETRY(fourcc) \
1369 AccumulateCategorical( \
1370 (aInfo.unsupported_features_bitfield & (1 << MP4PARSE_FEATURE_##fourcc)) \
1371 ? LABELS_AVIF_##fourcc::present \
1372 : LABELS_AVIF_##fourcc::absent)
1373 FEATURE_TELEMETRY(A1LX
);
1374 FEATURE_TELEMETRY(A1OP
);
1375 FEATURE_TELEMETRY(CLAP
);
1376 FEATURE_TELEMETRY(GRID
);
1377 FEATURE_TELEMETRY(IPRO
);
1378 FEATURE_TELEMETRY(LSEL
);
1380 #define FEATURE_RECORD_GLEAN(metric, metricLabel, fourcc) \
1381 mozilla::glean::avif::metric \
1382 .EnumGet(aInfo.unsupported_features_bitfield & \
1383 (1 << MP4PARSE_FEATURE_##fourcc) \
1384 ? mozilla::glean::avif::metricLabel::ePresent \
1385 : mozilla::glean::avif::metricLabel::eAbsent) \
1387 FEATURE_RECORD_GLEAN(a1lx
, A1lxLabel
, A1LX
);
1388 FEATURE_RECORD_GLEAN(a1op
, A1opLabel
, A1OP
);
1389 FEATURE_RECORD_GLEAN(clap
, ClapLabel
, CLAP
);
1390 FEATURE_RECORD_GLEAN(grid
, GridLabel
, GRID
);
1391 FEATURE_RECORD_GLEAN(ipro
, IproLabel
, IPRO
);
1392 FEATURE_RECORD_GLEAN(lsel
, LselLabel
, LSEL
);
1394 if (aInfo
.nclx_colour_information
&& aInfo
.icc_colour_information
.data
) {
1395 AccumulateCategorical(LABELS_AVIF_COLR::both
);
1396 mozilla::glean::avif::colr
.EnumGet(mozilla::glean::avif::ColrLabel::eBoth
)
1398 } else if (aInfo
.nclx_colour_information
) {
1399 AccumulateCategorical(LABELS_AVIF_COLR::nclx
);
1400 mozilla::glean::avif::colr
.EnumGet(mozilla::glean::avif::ColrLabel::eNclx
)
1402 } else if (aInfo
.icc_colour_information
.data
) {
1403 AccumulateCategorical(LABELS_AVIF_COLR::icc
);
1404 mozilla::glean::avif::colr
.EnumGet(mozilla::glean::avif::ColrLabel::eIcc
)
1407 AccumulateCategorical(LABELS_AVIF_COLR::absent
);
1408 mozilla::glean::avif::colr
.EnumGet(mozilla::glean::avif::ColrLabel::eAbsent
)
1413 static void RecordPixiTelemetry(uint8_t aPixiBitDepth
,
1414 uint8_t aBitstreamBitDepth
,
1415 const char* aItemName
) {
1416 if (aPixiBitDepth
== 0) {
1417 AccumulateCategorical(LABELS_AVIF_PIXI::absent
);
1418 mozilla::glean::avif::pixi
.EnumGet(mozilla::glean::avif::PixiLabel::eAbsent
)
1421 } else if (aPixiBitDepth
== aBitstreamBitDepth
) {
1422 AccumulateCategorical(LABELS_AVIF_PIXI::valid
);
1423 mozilla::glean::avif::pixi
.EnumGet(mozilla::glean::avif::PixiLabel::eValid
)
1427 MOZ_LOG(sAVIFLog
, LogLevel::Error
,
1428 ("%s item pixi bit depth (%hhu) doesn't match "
1430 aItemName
, aPixiBitDepth
, aBitstreamBitDepth
));
1431 AccumulateCategorical(LABELS_AVIF_PIXI::bitstream_mismatch
);
1432 mozilla::glean::avif::pixi
1433 .EnumGet(mozilla::glean::avif::PixiLabel::eBitstreamMismatch
)
1438 // This telemetry depends on the results of decoding.
1439 // These data must be recorded only on the first frame decoded after metadata
1441 static void RecordFrameTelem(bool aAnimated
, const Mp4parseAvifInfo
& aInfo
,
1442 const AVIFDecodedData
& aData
) {
1443 AccumulateCategorical(
1444 gColorSpaceLabel
[static_cast<size_t>(aData
.mYUVColorSpace
)]);
1445 mozilla::glean::avif::yuv_color_space
1446 .EnumGet(static_cast<mozilla::glean::avif::YuvColorSpaceLabel
>(
1447 aData
.mYUVColorSpace
))
1449 AccumulateCategorical(
1450 gColorDepthLabel
[static_cast<size_t>(aData
.mColorDepth
)]);
1451 mozilla::glean::avif::bit_depth
1453 static_cast<mozilla::glean::avif::BitDepthLabel
>(aData
.mColorDepth
))
1456 RecordPixiTelemetry(
1457 aAnimated
? aInfo
.color_track_bit_depth
: aInfo
.primary_item_bit_depth
,
1458 BitDepthForColorDepth(aData
.mColorDepth
), "color");
1461 AccumulateCategorical(LABELS_AVIF_ALPHA::present
);
1462 mozilla::glean::avif::alpha
1463 .EnumGet(mozilla::glean::avif::AlphaLabel::ePresent
)
1465 RecordPixiTelemetry(
1466 aAnimated
? aInfo
.alpha_track_bit_depth
: aInfo
.alpha_item_bit_depth
,
1467 BitDepthForColorDepth(aData
.mColorDepth
), "alpha");
1469 AccumulateCategorical(LABELS_AVIF_ALPHA::absent
);
1470 mozilla::glean::avif::alpha
1471 .EnumGet(mozilla::glean::avif::AlphaLabel::eAbsent
)
1475 if (CICP::IsReserved(aData
.mColourPrimaries
)) {
1476 AccumulateCategorical(LABELS_AVIF_CICP_CP::RESERVED_REST
);
1477 mozilla::glean::avif::cicp_cp
1478 .EnumGet(mozilla::glean::avif::CicpCpLabel::eReservedRest
)
1481 AccumulateCategorical(
1482 static_cast<LABELS_AVIF_CICP_CP
>(aData
.mColourPrimaries
));
1483 mozilla::glean::avif::cicp_cp
.EnumGet(
1484 static_cast<mozilla::glean::avif::CicpCpLabel
>(aData
.mColourPrimaries
));
1487 if (CICP::IsReserved(aData
.mTransferCharacteristics
)) {
1488 AccumulateCategorical(LABELS_AVIF_CICP_TC::RESERVED
);
1489 mozilla::glean::avif::cicp_tc
1490 .EnumGet(mozilla::glean::avif::CicpTcLabel::eReserved
)
1493 AccumulateCategorical(
1494 static_cast<LABELS_AVIF_CICP_TC
>(aData
.mTransferCharacteristics
));
1495 mozilla::glean::avif::cicp_tc
.EnumGet(
1496 static_cast<mozilla::glean::avif::CicpTcLabel
>(
1497 aData
.mTransferCharacteristics
));
1500 if (CICP::IsReserved(aData
.mMatrixCoefficients
)) {
1501 AccumulateCategorical(LABELS_AVIF_CICP_MC::RESERVED
);
1502 mozilla::glean::avif::cicp_mc
1503 .EnumGet(mozilla::glean::avif::CicpMcLabel::eReserved
)
1506 AccumulateCategorical(
1507 static_cast<LABELS_AVIF_CICP_MC
>(aData
.mMatrixCoefficients
));
1508 mozilla::glean::avif::cicp_mc
.EnumGet(
1509 static_cast<mozilla::glean::avif::CicpMcLabel
>(
1510 aData
.mMatrixCoefficients
));
1514 nsAVIFDecoder::DecodeResult
nsAVIFDecoder::DoDecodeInternal(
1515 SourceBufferIterator
& aIterator
, IResumable
* aOnResume
) {
1516 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1517 ("[this=%p] nsAVIFDecoder::DoDecodeInternal", this));
1519 // Since the SourceBufferIterator doesn't guarantee a contiguous buffer,
1520 // but the current mp4parse-rust implementation requires it, always buffer
1521 // locally. This keeps the code simpler at the cost of some performance, but
1522 // this implementation is only experimental, so we don't want to spend time
1523 // optimizing it prematurely.
1524 while (!mReadCursor
) {
1525 SourceBufferIterator::State state
=
1526 aIterator
.AdvanceOrScheduleResume(SIZE_MAX
, aOnResume
);
1528 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1529 ("[this=%p] After advance, iterator state is %d", this, state
));
1532 case SourceBufferIterator::WAITING
:
1533 return AsVariant(NonDecoderResult::NeedMoreData
);
1535 case SourceBufferIterator::COMPLETE
:
1536 mReadCursor
= mBufferedData
.begin();
1539 case SourceBufferIterator::READY
: { // copy new data to buffer
1540 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1541 ("[this=%p] SourceBufferIterator ready, %zu bytes available",
1542 this, aIterator
.Length()));
1544 bool appendSuccess
=
1545 mBufferedData
.append(aIterator
.Data(), aIterator
.Length());
1547 if (!appendSuccess
) {
1548 MOZ_LOG(sAVIFLog
, LogLevel::Error
,
1549 ("[this=%p] Failed to append %zu bytes to buffer", this,
1550 aIterator
.Length()));
1557 MOZ_ASSERT_UNREACHABLE("unexpected SourceBufferIterator state");
1561 Mp4parseStatus parserStatus
= CreateParser();
1563 if (parserStatus
!= MP4PARSE_STATUS_OK
) {
1564 return AsVariant(parserStatus
);
1567 const Mp4parseAvifInfo
& parsedInfo
= mParser
->GetInfo();
1569 if (parsedInfo
.icc_colour_information
.data
) {
1570 const auto& icc
= parsedInfo
.icc_colour_information
;
1572 sAVIFLog
, LogLevel::Debug
,
1573 ("[this=%p] colr type ICC: %zu bytes %p", this, icc
.length
, icc
.data
));
1576 if (IsMetadataDecode()) {
1577 RecordMetadataTelem(parsedInfo
);
1580 if (parsedInfo
.nclx_colour_information
) {
1581 const auto& nclx
= *parsedInfo
.nclx_colour_information
;
1583 sAVIFLog
, LogLevel::Debug
,
1584 ("[this=%p] colr type CICP: cp/tc/mc/full-range %u/%u/%u/%s", this,
1585 nclx
.colour_primaries
, nclx
.transfer_characteristics
,
1586 nclx
.matrix_coefficients
, nclx
.full_range_flag
? "true" : "false"));
1589 if (!parsedInfo
.icc_colour_information
.data
&&
1590 !parsedInfo
.nclx_colour_information
) {
1591 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1592 ("[this=%p] colr box not present", this));
1595 AVIFImage parsedImage
;
1596 DecodeResult r
= mParser
->GetImage(parsedImage
);
1597 if (!IsDecodeSuccess(r
)) {
1601 !IsMetadataDecode() && r
== DecodeResult(NonDecoderResult::Complete
);
1604 PostIsAnimated(parsedImage
.mDuration
);
1606 switch (mParser
->GetInfo().loop_mode
) {
1607 case MP4PARSE_AVIF_LOOP_MODE_LOOP_BY_COUNT
: {
1608 auto loopCount
= mParser
->GetInfo().loop_count
;
1609 PostLoopCount(loopCount
> INT32_MAX
? -1
1610 : static_cast<int32_t>(loopCount
));
1613 case MP4PARSE_AVIF_LOOP_MODE_LOOP_INFINITELY
:
1614 case MP4PARSE_AVIF_LOOP_MODE_NO_EDITS
:
1621 PostHasTransparency();
1624 Orientation orientation
= StaticPrefs::image_avif_apply_transforms()
1625 ? GetImageOrientation(parsedInfo
)
1627 // TODO: Orientation should probably also apply to animated AVIFs.
1629 orientation
= Orientation
{};
1632 Maybe
<IntSize
> ispeImageSize
= GetImageSize(parsedInfo
);
1634 bool sendDecodeTelemetry
= IsMetadataDecode();
1635 if (ispeImageSize
.isSome()) {
1636 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1637 ("[this=%p] Parser returned image size %d x %d (%d/%d bit)", this,
1638 ispeImageSize
->width
, ispeImageSize
->height
,
1639 mIsAnimated
? parsedInfo
.color_track_bit_depth
1640 : parsedInfo
.primary_item_bit_depth
,
1641 mIsAnimated
? parsedInfo
.alpha_track_bit_depth
1642 : parsedInfo
.alpha_item_bit_depth
));
1643 PostSize(ispeImageSize
->width
, ispeImageSize
->height
, orientation
);
1644 if (WantsFrameCount()) {
1645 // Note that this consumes the frame iterators, so this can only be
1646 // requested for metadata decodes. Since we had to partially decode the
1647 // first frame to determine the size, we need to add one to the result.
1648 PostFrameCount(mParser
->GetFrameCount() + 1);
1650 if (IsMetadataDecode()) {
1652 sAVIFLog
, LogLevel::Debug
,
1653 ("[this=%p] Finishing metadata decode without image decode", this));
1654 return AsVariant(NonDecoderResult::Complete
);
1656 // If we're continuing to decode here, this means we skipped decode
1657 // telemetry for the metadata decode pass. Send it this time.
1658 sendDecodeTelemetry
= true;
1660 MOZ_LOG(sAVIFLog
, LogLevel::Error
,
1661 ("[this=%p] Parser returned no image size, decoding...", this));
1664 r
= CreateDecoder();
1665 if (!IsDecodeSuccess(r
)) {
1668 MOZ_ASSERT(mDecoder
);
1669 r
= mDecoder
->Decode(sendDecodeTelemetry
, parsedInfo
, parsedImage
);
1670 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1671 ("[this=%p] Decoder%s->Decode() %s", this,
1672 StaticPrefs::image_avif_use_dav1d() ? "Dav1d" : "AOM",
1673 IsDecodeSuccess(r
) ? "succeeds" : "fails"));
1675 if (!IsDecodeSuccess(r
)) {
1679 UniquePtr
<AVIFDecodedData
> decodedData
= mDecoder
->GetDecodedData();
1681 MOZ_ASSERT_IF(mHasAlpha
, decodedData
->mAlpha
.isSome());
1683 MOZ_ASSERT(decodedData
->mColourPrimaries
!=
1684 CICP::ColourPrimaries::CP_UNSPECIFIED
);
1685 MOZ_ASSERT(decodedData
->mTransferCharacteristics
!=
1686 CICP::TransferCharacteristics::TC_UNSPECIFIED
);
1687 MOZ_ASSERT(decodedData
->mColorRange
<= gfx::ColorRange::_Last
);
1688 MOZ_ASSERT(decodedData
->mYUVColorSpace
<= gfx::YUVColorSpace::_Last
);
1690 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1691 ("[this=%p] decodedData.mColorRange: %hhd", this,
1692 static_cast<uint8_t>(decodedData
->mColorRange
)));
1694 // Technically it's valid but we don't handle it now (Bug 1682318).
1695 if (decodedData
->mAlpha
&&
1696 decodedData
->mAlpha
->mSize
!= decodedData
->YDataSize()) {
1697 return AsVariant(NonDecoderResult::AlphaYSizeMismatch
);
1700 bool isFirstFrame
= GetFrameCount() == 0;
1703 MOZ_ASSERT(isFirstFrame
);
1705 sAVIFLog
, LogLevel::Error
,
1706 ("[this=%p] Using decoded image size: %d x %d", this,
1707 decodedData
->mPictureRect
.width
, decodedData
->mPictureRect
.height
));
1708 PostSize(decodedData
->mPictureRect
.width
, decodedData
->mPictureRect
.height
,
1710 if (WantsFrameCount()) {
1711 // Note that this consumes the frame iterators, so this can only be
1712 // requested for metadata decodes. Since we had to partially decode the
1713 // first frame to determine the size, we need to add one to the result.
1714 PostFrameCount(mParser
->GetFrameCount() + 1);
1716 AccumulateCategorical(LABELS_AVIF_ISPE::absent
);
1717 mozilla::glean::avif::ispe
.EnumGet(mozilla::glean::avif::IspeLabel::eAbsent
)
1720 // Verify that the bitstream hasn't changed the image size compared to
1721 // either the ispe box or the previous frames.
1722 IntSize expectedSize
= GetImageMetadata()
1724 .ToUnoriented(Size())
1726 if (decodedData
->mPictureRect
.width
!= expectedSize
.width
||
1727 decodedData
->mPictureRect
.height
!= expectedSize
.height
) {
1730 sAVIFLog
, LogLevel::Error
,
1731 ("[this=%p] Metadata image size doesn't match decoded image size: "
1732 "(%d x %d) != (%d x %d)",
1733 this, ispeImageSize
->width
, ispeImageSize
->height
,
1734 decodedData
->mPictureRect
.width
,
1735 decodedData
->mPictureRect
.height
));
1736 AccumulateCategorical(LABELS_AVIF_ISPE::bitstream_mismatch
);
1737 mozilla::glean::avif::ispe
1738 .EnumGet(mozilla::glean::avif::IspeLabel::eBitstreamMismatch
)
1741 return AsVariant(NonDecoderResult::MetadataImageSizeMismatch
);
1745 sAVIFLog
, LogLevel::Error
,
1746 ("[this=%p] Frame size has changed in the bitstream: "
1747 "(%d x %d) != (%d x %d)",
1748 this, expectedSize
.width
, expectedSize
.height
,
1749 decodedData
->mPictureRect
.width
, decodedData
->mPictureRect
.height
));
1750 return AsVariant(NonDecoderResult::FrameSizeChanged
);
1754 AccumulateCategorical(LABELS_AVIF_ISPE::valid
);
1755 mozilla::glean::avif::ispe
1756 .EnumGet(mozilla::glean::avif::IspeLabel::eValid
)
1761 if (IsMetadataDecode()) {
1762 return AsVariant(NonDecoderResult::Complete
);
1765 IntSize rgbSize
= decodedData
->mPictureRect
.Size();
1767 if (parsedImage
.mFrameNum
== 0) {
1768 RecordFrameTelem(mIsAnimated
, parsedInfo
, *decodedData
);
1771 if (decodedData
->mRenderSize
&&
1772 decodedData
->mRenderSize
->ToUnknownSize() != rgbSize
) {
1773 // This may be supported by allowing all metadata decodes to decode a frame
1774 // and get the render size from the bitstream. However it's unlikely to be
1776 return AsVariant(NonDecoderResult::RenderSizeMismatch
);
1779 // Read color profile
1780 if (mCMSMode
!= CMSMode::Off
) {
1781 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1782 ("[this=%p] Processing color profile", this));
1784 // See comment on AVIFDecodedData
1785 if (parsedInfo
.icc_colour_information
.data
) {
1786 // same profile for every frame of image, only create it once
1788 const auto& icc
= parsedInfo
.icc_colour_information
;
1789 mInProfile
= qcms_profile_from_memory(icc
.data
, icc
.length
);
1792 // potentially different profile every frame, destroy the old one
1795 qcms_transform_release(mTransform
);
1796 mTransform
= nullptr;
1798 qcms_profile_release(mInProfile
);
1799 mInProfile
= nullptr;
1802 const auto& cp
= decodedData
->mColourPrimaries
;
1803 const auto& tc
= decodedData
->mTransferCharacteristics
;
1805 if (CICP::IsReserved(cp
)) {
1806 MOZ_LOG(sAVIFLog
, LogLevel::Error
,
1807 ("[this=%p] colour_primaries reserved value (%hhu) is invalid; "
1810 return AsVariant(NonDecoderResult::InvalidCICP
);
1813 if (CICP::IsReserved(tc
)) {
1814 MOZ_LOG(sAVIFLog
, LogLevel::Error
,
1815 ("[this=%p] transfer_characteristics reserved value (%hhu) is "
1818 return AsVariant(NonDecoderResult::InvalidCICP
);
1821 MOZ_ASSERT(cp
!= CICP::ColourPrimaries::CP_UNSPECIFIED
&&
1822 !CICP::IsReserved(cp
));
1823 MOZ_ASSERT(tc
!= CICP::TransferCharacteristics::TC_UNSPECIFIED
&&
1824 !CICP::IsReserved(tc
));
1826 mInProfile
= qcms_profile_create_cicp(cp
, tc
);
1829 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1830 ("[this=%p] mInProfile %p", this, mInProfile
));
1832 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1833 ("[this=%p] CMSMode::Off, skipping color profile", this));
1836 if (mInProfile
&& GetCMSOutputProfile() && !mTransform
) {
1837 auto intent
= static_cast<qcms_intent
>(gfxPlatform::GetRenderingIntent());
1838 qcms_data_type inType
;
1839 qcms_data_type outType
;
1841 // If we're not mandating an intent, use the one from the image.
1842 if (gfxPlatform::GetRenderingIntent() == -1) {
1843 intent
= qcms_profile_get_rendering_intent(mInProfile
);
1846 uint32_t profileSpace
= qcms_profile_get_color_space(mInProfile
);
1847 if (profileSpace
!= icSigGrayData
) {
1848 mUsePipeTransform
= true;
1849 // When we convert the data to rgb we always pass either B8G8R8A8 or
1850 // B8G8R8X8 to ConvertYCbCrToRGB32. After that we input the data to the
1851 // surface pipe where qcms happens in the pipeline. So when the data gets
1852 // to qcms it will always be in our preferred format and so
1853 // gfxPlatform::GetCMSOSRGBAType is the correct type.
1854 inType
= gfxPlatform::GetCMSOSRGBAType();
1857 // We can't use SurfacePipe to do the color management (it can't handle
1858 // grayscale data), we have to do it ourselves on the grayscale data
1859 // before passing the now RGB data to SurfacePipe.
1860 mUsePipeTransform
= false;
1862 inType
= QCMS_DATA_GRAYA_8
;
1863 outType
= gfxPlatform::GetCMSOSRGBAType();
1865 inType
= QCMS_DATA_GRAY_8
;
1866 outType
= gfxPlatform::GetCMSOSRGBAType();
1870 mTransform
= qcms_transform_create(mInProfile
, inType
,
1871 GetCMSOutputProfile(), outType
, intent
);
1874 // Get suggested format and size. Note that GetYCbCrToRGBDestFormatAndSize
1875 // force format to be B8G8R8X8 if it's not.
1876 gfx::SurfaceFormat format
= SurfaceFormat::OS_RGBX
;
1877 gfx::GetYCbCrToRGBDestFormatAndSize(*decodedData
, format
, rgbSize
);
1879 // We would use libyuv to do the YCbCrA -> ARGB convertion, which only
1880 // works for B8G8R8A8.
1881 format
= SurfaceFormat::B8G8R8A8
;
1884 const int bytesPerPixel
= BytesPerPixel(format
);
1886 const CheckedInt rgbStride
= CheckedInt
<int>(rgbSize
.width
) * bytesPerPixel
;
1887 const CheckedInt rgbBufLength
= rgbStride
* rgbSize
.height
;
1889 if (!rgbStride
.isValid() || !rgbBufLength
.isValid()) {
1890 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1891 ("[this=%p] overflow calculating rgbBufLength: rbgSize.width: %d, "
1892 "rgbSize.height: %d, "
1893 "bytesPerPixel: %u",
1894 this, rgbSize
.width
, rgbSize
.height
, bytesPerPixel
));
1895 return AsVariant(NonDecoderResult::SizeOverflow
);
1898 UniquePtr
<uint8_t[]> rgbBuf
=
1899 MakeUniqueFallible
<uint8_t[]>(rgbBufLength
.value());
1901 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1902 ("[this=%p] allocation of %u-byte rgbBuf failed", this,
1903 rgbBufLength
.value()));
1904 return AsVariant(NonDecoderResult::OutOfMemory
);
1907 PremultFunc premultOp
= nullptr;
1908 const auto wantPremultiply
=
1909 !bool(GetSurfaceFlags() & SurfaceFlags::NO_PREMULTIPLY_ALPHA
);
1910 if (decodedData
->mAlpha
) {
1911 const bool& hasPremultiply
= decodedData
->mAlpha
->mPremultiplied
;
1913 // Color management needs to be done on non-premult data, so
1914 // ConvertYCbCrToRGB32 needs to produce non-premult data, then color
1915 // management can happen (either here for grayscale data, or in surface
1916 // pipe otherwise) and then later in the surface pipe we will convert to
1917 // premult if needed.
1918 if (hasPremultiply
) {
1919 premultOp
= libyuv::ARGBUnattenuate
;
1922 // no color management, so premult conversion (if needed) can be done by
1923 // ConvertYCbCrToRGB32 before surface pipe
1924 if (wantPremultiply
&& !hasPremultiply
) {
1925 premultOp
= libyuv::ARGBAttenuate
;
1926 } else if (!wantPremultiply
&& hasPremultiply
) {
1927 premultOp
= libyuv::ARGBUnattenuate
;
1932 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1933 ("[this=%p] calling gfx::ConvertYCbCrToRGB32 premultOp: %p", this,
1935 nsresult result
= gfx::ConvertYCbCrToRGB32(*decodedData
, format
, rgbBuf
.get(),
1936 rgbStride
.value(), premultOp
);
1937 if (!NS_SUCCEEDED(result
)) {
1938 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1939 ("[this=%p] ConvertYCbCrToRGB32 failure", this));
1940 return AsVariant(NonDecoderResult::ConvertYCbCrFailure
);
1943 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1944 ("[this=%p] calling SurfacePipeFactory::CreateSurfacePipe", this));
1946 SurfacePipeFlags pipeFlags
= SurfacePipeFlags();
1947 if (decodedData
->mAlpha
&& mTransform
) {
1948 // we know data is non-premult in this case, see above, so if we
1949 // wantPremultiply then we have to ask the surface pipe to convert for us
1950 if (wantPremultiply
) {
1951 pipeFlags
|= SurfacePipeFlags::PREMULTIPLY_ALPHA
;
1955 Maybe
<SurfacePipe
> pipe
= Nothing();
1956 auto* transform
= mUsePipeTransform
? mTransform
: nullptr;
1959 SurfaceFormat outFormat
=
1960 decodedData
->mAlpha
? SurfaceFormat::OS_RGBA
: SurfaceFormat::OS_RGBX
;
1961 Maybe
<AnimationParams
> animParams
;
1962 if (!IsFirstFrameDecode()) {
1963 animParams
.emplace(FullFrame().ToUnknownRect(), parsedImage
.mDuration
,
1964 parsedImage
.mFrameNum
, BlendMethod::SOURCE
,
1965 DisposalMethod::CLEAR_ALL
);
1967 pipe
= SurfacePipeFactory::CreateSurfacePipe(
1968 this, Size(), OutputSize(), FullFrame(), format
, outFormat
, animParams
,
1969 transform
, pipeFlags
);
1971 pipe
= SurfacePipeFactory::CreateReorientSurfacePipe(
1972 this, Size(), OutputSize(), format
, transform
, GetOrientation(),
1976 if (pipe
.isNothing()) {
1977 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
1978 ("[this=%p] could not initialize surface pipe", this));
1979 return AsVariant(NonDecoderResult::PipeInitError
);
1982 MOZ_LOG(sAVIFLog
, LogLevel::Debug
, ("[this=%p] writing to surface", this));
1983 const uint8_t* endOfRgbBuf
= {rgbBuf
.get() + rgbBufLength
.value()};
1984 WriteState writeBufferResult
= WriteState::NEED_MORE_DATA
;
1985 uint8_t* grayLine
= nullptr;
1986 int32_t multiplier
= 1;
1987 if (mTransform
&& !mUsePipeTransform
) {
1991 // We know this calculation doesn't overflow because rgbStride is a larger
1992 // value and is valid here.
1993 grayLine
= new uint8_t[multiplier
* rgbSize
.width
];
1995 for (uint8_t* rowPtr
= rgbBuf
.get(); rowPtr
< endOfRgbBuf
;
1996 rowPtr
+= rgbStride
.value()) {
1997 if (mTransform
&& !mUsePipeTransform
) {
1998 // format is B8G8R8A8 or B8G8R8X8, so 1 offset picks G
1999 for (int32_t i
= 0; i
< rgbSize
.width
; i
++) {
2000 grayLine
[multiplier
* i
] = rowPtr
[i
* bytesPerPixel
+ 1];
2002 grayLine
[multiplier
* i
+ 1] = rowPtr
[i
* bytesPerPixel
+ 3];
2005 qcms_transform_data(mTransform
, grayLine
, rowPtr
, rgbSize
.width
);
2008 writeBufferResult
= pipe
->WriteBuffer(reinterpret_cast<uint32_t*>(rowPtr
));
2010 Maybe
<SurfaceInvalidRect
> invalidRect
= pipe
->TakeInvalidRect();
2012 PostInvalidation(invalidRect
->mInputSpaceRect
,
2013 Some(invalidRect
->mOutputSpaceRect
));
2016 if (writeBufferResult
== WriteState::FAILURE
) {
2017 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
2018 ("[this=%p] error writing rowPtr to surface pipe", this));
2020 } else if (writeBufferResult
== WriteState::FINISHED
) {
2021 MOZ_ASSERT(rowPtr
+ rgbStride
.value() == endOfRgbBuf
);
2024 if (mTransform
&& !mUsePipeTransform
) {
2028 MOZ_LOG(sAVIFLog
, LogLevel::Debug
,
2029 ("[this=%p] writing to surface complete", this));
2031 if (writeBufferResult
== WriteState::FINISHED
) {
2032 PostFrameStop(mHasAlpha
? Opacity::SOME_TRANSPARENCY
2033 : Opacity::FULLY_OPAQUE
);
2035 if (!mIsAnimated
|| IsFirstFrameDecode()) {
2037 return DecodeResult(NonDecoderResult::Complete
);
2042 return DecodeResult(NonDecoderResult::Complete
);
2045 return DecodeResult(NonDecoderResult::OutputAvailable
);
2048 return AsVariant(NonDecoderResult::WriteBufferError
);
2052 bool nsAVIFDecoder::IsDecodeSuccess(const DecodeResult
& aResult
) {
2053 return aResult
== DecodeResult(NonDecoderResult::OutputAvailable
) ||
2054 aResult
== DecodeResult(NonDecoderResult::Complete
) ||
2055 aResult
== DecodeResult(Dav1dResult(0)) ||
2056 aResult
== DecodeResult(AOMResult(AOM_CODEC_OK
));
2059 void nsAVIFDecoder::RecordDecodeResultTelemetry(
2060 const nsAVIFDecoder::DecodeResult
& aResult
) {
2061 if (aResult
.is
<Mp4parseStatus
>()) {
2062 switch (aResult
.as
<Mp4parseStatus
>()) {
2063 case MP4PARSE_STATUS_OK
:
2064 MOZ_ASSERT_UNREACHABLE(
2065 "Expect NonDecoderResult, Dav1dResult or AOMResult");
2067 case MP4PARSE_STATUS_BAD_ARG
:
2068 case MP4PARSE_STATUS_INVALID
:
2069 case MP4PARSE_STATUS_UNSUPPORTED
:
2070 case MP4PARSE_STATUS_EOF
:
2071 case MP4PARSE_STATUS_IO
:
2072 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::parse_error
);
2073 mozilla::glean::avif::decode_result
2074 .EnumGet(glean::avif::DecodeResultLabel::eParseError
)
2077 case MP4PARSE_STATUS_OOM
:
2078 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::out_of_memory
);
2079 mozilla::glean::avif::decode_result
2080 .EnumGet(glean::avif::DecodeResultLabel::eOutOfMemory
)
2083 case MP4PARSE_STATUS_MISSING_AVIF_OR_AVIS_BRAND
:
2084 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::missing_brand
);
2085 mozilla::glean::avif::decode_result
2086 .EnumGet(glean::avif::DecodeResultLabel::eMissingBrand
)
2089 case MP4PARSE_STATUS_FTYP_NOT_FIRST
:
2090 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::ftyp_not_first
);
2091 mozilla::glean::avif::decode_result
2092 .EnumGet(glean::avif::DecodeResultLabel::eFtypNotFirst
)
2095 case MP4PARSE_STATUS_NO_IMAGE
:
2096 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::no_image
);
2097 mozilla::glean::avif::decode_result
2098 .EnumGet(glean::avif::DecodeResultLabel::eNoImage
)
2101 case MP4PARSE_STATUS_MOOV_BAD_QUANTITY
:
2102 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::multiple_moov
);
2103 mozilla::glean::avif::decode_result
2104 .EnumGet(glean::avif::DecodeResultLabel::eMultipleMoov
)
2107 case MP4PARSE_STATUS_MOOV_MISSING
:
2108 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::no_moov
);
2109 mozilla::glean::avif::decode_result
2110 .EnumGet(glean::avif::DecodeResultLabel::eNoMoov
)
2113 case MP4PARSE_STATUS_LSEL_NO_ESSENTIAL
:
2114 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::lsel_no_essential
);
2115 mozilla::glean::avif::decode_result
2116 .EnumGet(glean::avif::DecodeResultLabel::eLselNoEssential
)
2119 case MP4PARSE_STATUS_A1OP_NO_ESSENTIAL
:
2120 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::a1op_no_essential
);
2121 mozilla::glean::avif::decode_result
2122 .EnumGet(glean::avif::DecodeResultLabel::eA1opNoEssential
)
2125 case MP4PARSE_STATUS_A1LX_ESSENTIAL
:
2126 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::a1lx_essential
);
2127 mozilla::glean::avif::decode_result
2128 .EnumGet(glean::avif::DecodeResultLabel::eA1lxEssential
)
2131 case MP4PARSE_STATUS_TXFORM_NO_ESSENTIAL
:
2132 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::txform_no_essential
);
2133 mozilla::glean::avif::decode_result
2134 .EnumGet(glean::avif::DecodeResultLabel::eTxformNoEssential
)
2137 case MP4PARSE_STATUS_PITM_MISSING
:
2138 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::no_primary_item
);
2139 mozilla::glean::avif::decode_result
2140 .EnumGet(glean::avif::DecodeResultLabel::eNoPrimaryItem
)
2143 case MP4PARSE_STATUS_IMAGE_ITEM_TYPE
:
2144 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::image_item_type
);
2145 mozilla::glean::avif::decode_result
2146 .EnumGet(glean::avif::DecodeResultLabel::eImageItemType
)
2149 case MP4PARSE_STATUS_ITEM_TYPE_MISSING
:
2150 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::item_type_missing
);
2151 mozilla::glean::avif::decode_result
2152 .EnumGet(glean::avif::DecodeResultLabel::eItemTypeMissing
)
2155 case MP4PARSE_STATUS_CONSTRUCTION_METHOD
:
2156 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::construction_method
);
2157 mozilla::glean::avif::decode_result
2158 .EnumGet(glean::avif::DecodeResultLabel::eConstructionMethod
)
2161 case MP4PARSE_STATUS_PITM_NOT_FOUND
:
2162 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::item_loc_not_found
);
2163 mozilla::glean::avif::decode_result
2164 .EnumGet(glean::avif::DecodeResultLabel::eItemLocNotFound
)
2167 case MP4PARSE_STATUS_IDAT_MISSING
:
2168 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::no_item_data_box
);
2169 mozilla::glean::avif::decode_result
2170 .EnumGet(glean::avif::DecodeResultLabel::eNoItemDataBox
)
2174 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::uncategorized
);
2175 mozilla::glean::avif::decode_result
2176 .EnumGet(glean::avif::DecodeResultLabel::eUncategorized
)
2181 MOZ_LOG(sAVIFLog
, LogLevel::Error
,
2182 ("[this=%p] unexpected Mp4parseStatus value: %d", this,
2183 aResult
.as
<Mp4parseStatus
>()));
2184 MOZ_ASSERT(false, "unexpected Mp4parseStatus value");
2185 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::invalid_parse_status
);
2186 mozilla::glean::avif::decode_result
2187 .EnumGet(glean::avif::DecodeResultLabel::eInvalidParseStatus
)
2190 } else if (aResult
.is
<NonDecoderResult
>()) {
2191 switch (aResult
.as
<NonDecoderResult
>()) {
2192 case NonDecoderResult::NeedMoreData
:
2194 case NonDecoderResult::OutputAvailable
:
2196 case NonDecoderResult::Complete
:
2198 case NonDecoderResult::SizeOverflow
:
2199 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::size_overflow
);
2200 mozilla::glean::avif::decode_result
2201 .EnumGet(glean::avif::DecodeResultLabel::eSizeOverflow
)
2204 case NonDecoderResult::OutOfMemory
:
2205 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::out_of_memory
);
2206 mozilla::glean::avif::decode_result
2207 .EnumGet(glean::avif::DecodeResultLabel::eOutOfMemory
)
2210 case NonDecoderResult::PipeInitError
:
2211 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::pipe_init_error
);
2212 mozilla::glean::avif::decode_result
2213 .EnumGet(glean::avif::DecodeResultLabel::ePipeInitError
)
2216 case NonDecoderResult::WriteBufferError
:
2217 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::write_buffer_error
);
2218 mozilla::glean::avif::decode_result
2219 .EnumGet(glean::avif::DecodeResultLabel::eWriteBufferError
)
2222 case NonDecoderResult::AlphaYSizeMismatch
:
2223 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::alpha_y_sz_mismatch
);
2224 mozilla::glean::avif::decode_result
2225 .EnumGet(glean::avif::DecodeResultLabel::eAlphaYSzMismatch
)
2228 case NonDecoderResult::AlphaYColorDepthMismatch
:
2229 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::alpha_y_bpc_mismatch
);
2230 mozilla::glean::avif::decode_result
2231 .EnumGet(glean::avif::DecodeResultLabel::eAlphaYBpcMismatch
)
2234 case NonDecoderResult::MetadataImageSizeMismatch
:
2235 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::ispe_mismatch
);
2236 mozilla::glean::avif::decode_result
2237 .EnumGet(glean::avif::DecodeResultLabel::eIspeMismatch
)
2240 case NonDecoderResult::RenderSizeMismatch
:
2241 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::render_size_mismatch
);
2242 mozilla::glean::avif::decode_result
2243 .EnumGet(glean::avif::DecodeResultLabel::eRenderSizeMismatch
)
2246 case NonDecoderResult::FrameSizeChanged
:
2247 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::frame_size_changed
);
2248 mozilla::glean::avif::decode_result
2249 .EnumGet(glean::avif::DecodeResultLabel::eFrameSizeChanged
)
2252 case NonDecoderResult::InvalidCICP
:
2253 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::invalid_cicp
);
2254 mozilla::glean::avif::decode_result
2255 .EnumGet(glean::avif::DecodeResultLabel::eInvalidCicp
)
2258 case NonDecoderResult::NoSamples
:
2259 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::no_samples
);
2260 mozilla::glean::avif::decode_result
2261 .EnumGet(glean::avif::DecodeResultLabel::eNoSamples
)
2264 case NonDecoderResult::ConvertYCbCrFailure
:
2265 AccumulateCategorical(LABELS_AVIF_DECODE_RESULT::ConvertYCbCr_failure
);
2266 mozilla::glean::avif::decode_result
2267 .EnumGet(glean::avif::DecodeResultLabel::eConvertycbcrFailure
)
2271 MOZ_ASSERT_UNREACHABLE("unknown NonDecoderResult");
2273 MOZ_ASSERT(aResult
.is
<Dav1dResult
>() || aResult
.is
<AOMResult
>());
2274 AccumulateCategorical(aResult
.is
<Dav1dResult
>() ? LABELS_AVIF_DECODER::dav1d
2275 : LABELS_AVIF_DECODER::aom
);
2276 if (aResult
.is
<Dav1dResult
>()) {
2277 mozilla::glean::avif::decoder
.EnumGet(glean::avif::DecoderLabel::eDav1d
)
2280 mozilla::glean::avif::decoder
.EnumGet(glean::avif::DecoderLabel::eAom
)
2284 AccumulateCategorical(IsDecodeSuccess(aResult
)
2285 ? LABELS_AVIF_DECODE_RESULT::success
2286 : LABELS_AVIF_DECODE_RESULT::decode_error
);
2287 if (IsDecodeSuccess(aResult
)) {
2288 mozilla::glean::avif::decode_result
2289 .EnumGet(glean::avif::DecodeResultLabel::eSuccess
)
2292 mozilla::glean::avif::decode_result
2293 .EnumGet(glean::avif::DecodeResultLabel::eDecodeError
)
2299 Maybe
<Telemetry::HistogramID
> nsAVIFDecoder::SpeedHistogram() const {
2300 return Some(Telemetry::IMAGE_DECODE_SPEED_AVIF
);
2303 } // namespace image
2304 } // namespace mozilla