meta-imx/meta-sdk/dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch
Hou Qi fe0d3587d9 chromium-ozone-wayland: Add V4L2VDA support on Linux [YOCIMX-6545]
- support on i.MX 8 series platform.
    - 8MM/8MP/8MQ support h264/hevc/vp8/vp9 hardware decoding.
    - 8QM/8QXP support h264/hevc/vp8 hardware decoding.
    - support seek, playrate change and resolution change.
    - support video acceleration for online media platform such as Youtube, bilibili.

Signed-off-by: Hou Qi <qi.hou@nxp.com>
2022-09-26 09:05:39 +08:00

314 lines
12 KiB
Diff

From e86109fa5e05268acc3557d308e5ae12136b391a Mon Sep 17 00:00:00 2001
From: Hou Qi <qi.hou@nxp.com>
Date: Mon, 5 Sep 2022 10:38:53 +0800
Subject: [PATCH 10/17] V4L2VDA: Add hevc format support
Upstream-Status: Inappropriate [NXP specific]
---
media/base/supported_types.cc | 2 +-
media/gpu/v4l2/v4l2_device.cc | 28 ++++-
media/gpu/v4l2/v4l2_vda_helpers.cc | 119 ++++++++++++++++++
media/gpu/v4l2/v4l2_vda_helpers.h | 20 +++
.../gpu/v4l2/v4l2_video_decode_accelerator.cc | 2 +-
media/media_options.gni | 4 +-
6 files changed, 170 insertions(+), 5 deletions(-)
diff --git a/media/base/supported_types.cc b/media/base/supported_types.cc
index 3e174b9320d08..727dc1867e6ff 100644
--- a/media/base/supported_types.cc
+++ b/media/base/supported_types.cc
@@ -318,7 +318,7 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
case VideoCodec::kVP9:
return IsVp9ProfileSupported(type);
case VideoCodec::kHEVC:
- return IsHevcProfileSupported(type);
+ return true;
case VideoCodec::kMPEG4:
return IsMPEG4Supported();
case VideoCodec::kUnknown:
diff --git a/media/gpu/v4l2/v4l2_device.cc b/media/gpu/v4l2/v4l2_device.cc
index 726ad1ab0f144..e090cad6626f7 100644
--- a/media/gpu/v4l2/v4l2_device.cc
+++ b/media/gpu/v4l2/v4l2_device.cc
@@ -1607,6 +1607,8 @@ uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
return V4L2_PIX_FMT_VP8;
} else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) {
return V4L2_PIX_FMT_VP9;
+ } else if (profile >= HEVCPROFILE_MIN && profile <= HEVCPROFILE_MAX) {
+ return V4L2_PIX_FMT_HEVC;
} else if (profile == HEVCPROFILE_MAIN) {
return V4L2_PIX_FMT_HEVC;
} else {
@@ -1674,6 +1676,16 @@ VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
return VP9PROFILE_PROFILE2;
}
break;
+ case VideoCodec::kHEVC:
+ switch (v4l2_profile) {
+ case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN:
+ return HEVCPROFILE_MAIN;
+ case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_10:
+ return HEVCPROFILE_MAIN10;
+ case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_STILL_PICTURE:
+ return HEVCPROFILE_MAIN_STILL_PICTURE;
+ }
+ break;
default:
VLOGF(2) << "Unsupported codec: " << GetCodecName(codec);
}
@@ -1699,6 +1711,9 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
case VideoCodec::kVP9:
query_id = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
break;
+ case VideoCodec::kHEVC:
+ query_id = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE;
+ break;
default:
return false;
}
@@ -1757,6 +1772,17 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
profiles = {VP9PROFILE_PROFILE0};
}
break;
+ case V4L2_PIX_FMT_HEVC:
+ if (!get_supported_profiles(VideoCodec::kHEVC, &profiles)) {
+ DLOG(WARNING) << "Driver doesn't support QUERY HEVC profiles, "
+ << "use default values, main, mian-10, main-still-picture";
+ profiles = {
+ HEVCPROFILE_MAIN,
+ HEVCPROFILE_MAIN10,
+ HEVCPROFILE_MAIN_STILL_PICTURE,
+ };
+ }
+ break;
default:
VLOGF(1) << "Unhandled pixelformat " << FourccToString(pix_fmt);
return {};
@@ -2091,7 +2117,7 @@ void V4L2Device::GetSupportedResolution(uint32_t pixelformat,
}
}
if (max_resolution->IsEmpty()) {
- max_resolution->SetSize(1920, 1088);
+ max_resolution->SetSize(4096, 4096);
VLOGF(1) << "GetSupportedResolution failed to get maximum resolution for "
<< "fourcc " << FourccToString(pixelformat) << ", fall back to "
<< max_resolution->ToString();
diff --git a/media/gpu/v4l2/v4l2_vda_helpers.cc b/media/gpu/v4l2/v4l2_vda_helpers.cc
index f25619077035c..5fa8593a5cf1e 100644
--- a/media/gpu/v4l2/v4l2_vda_helpers.cc
+++ b/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -12,6 +12,7 @@
#include "media/gpu/v4l2/v4l2_device.h"
#include "media/gpu/v4l2/v4l2_image_processor_backend.h"
#include "media/video/h264_parser.h"
+#include "media/video/h265_parser.h"
namespace media {
namespace v4l2_vda_helpers {
@@ -155,6 +156,9 @@ InputBufferFragmentSplitter::CreateFromProfile(
case VideoCodec::kVP9:
// VP8/VP9 don't need any frame splitting, use the default implementation.
return std::make_unique<v4l2_vda_helpers::InputBufferFragmentSplitter>();
+ case VideoCodec::kHEVC:
+ return std::make_unique<
+ v4l2_vda_helpers::H265InputBufferFragmentSplitter>();
default:
LOG(ERROR) << "Unhandled profile: " << profile;
return nullptr;
@@ -274,5 +278,120 @@ bool H264InputBufferFragmentSplitter::IsPartialFramePending() const {
return partial_frame_pending_;
}
+H265InputBufferFragmentSplitter::H265InputBufferFragmentSplitter()
+ : h265_parser_(new H265Parser()) {}
+
+H265InputBufferFragmentSplitter::~H265InputBufferFragmentSplitter() = default;
+
+bool H265InputBufferFragmentSplitter::AdvanceFrameFragment(const uint8_t* data,
+ size_t size,
+ size_t* endpos) {
+ DCHECK(h265_parser_);
+
+ // For H265, we need to feed HW one frame at a time. This is going to take
+ // some parsing of our input stream.
+ h265_parser_->SetStream(data, size);
+ H265NALU nalu;
+ H265Parser::Result result;
+ bool has_frame_data = false;
+ *endpos = 0;
+ DVLOGF(4) << "H265InputBufferFragmentSplitter::AdvanceFrameFragment size" << size;
+ // Keep on peeking the next NALs while they don't indicate a frame
+ // boundary.
+ while (true) {
+ bool end_of_frame = false;
+ result = h265_parser_->AdvanceToNextNALU(&nalu);
+ if (result == H265Parser::kInvalidStream ||
+ result == H265Parser::kUnsupportedStream) {
+ return false;
+ }
+
+ DVLOGF(4) << "NALU type " << nalu.nal_unit_type << "NALU size" << nalu.size;
+ if (result == H265Parser::kEOStream) {
+ // We've reached the end of the buffer before finding a frame boundary.
+ if (has_frame_data){
+ // partial_frame_pending_ = true;
+ // DVLOGF(4)<<"partial_frame_pending_ true as H265Parser::kEOStream has_frame_data";
+ }
+ *endpos = size;
+ DVLOGF(4)<< " MET kEOStream endpos " << *endpos <<" nalu.size " << nalu.size;
+ return true;
+ }
+ switch (nalu.nal_unit_type) {
+ case H265NALU::TRAIL_N:
+ case H265NALU::TRAIL_R:
+ case H265NALU::TSA_N:
+ case H265NALU::TSA_R:
+ case H265NALU::STSA_N:
+ case H265NALU::STSA_R:
+ case H265NALU::RADL_R:
+ case H265NALU::RADL_N:
+ case H265NALU::RASL_N:
+ case H265NALU::RASL_R:
+ case H265NALU::BLA_W_LP:
+ case H265NALU::BLA_W_RADL:
+ case H265NALU::BLA_N_LP:
+ case H265NALU::IDR_W_RADL:
+ case H265NALU::IDR_N_LP:
+ case H265NALU::CRA_NUT:
+ if (nalu.size < 1)
+ return false;
+
+ has_frame_data = true;
+
+ // For these two, if the "first_mb_in_slice" field is zero, start a
+ // new frame and return. This field is Exp-Golomb coded starting on
+ // the eighth data bit of the NAL; a zero value is encoded with a
+ // leading '1' bit in the byte, which we can detect as the byte being
+ // (unsigned) greater than or equal to 0x80.
+ if (nalu.data[1] >= 0x80) {
+ end_of_frame = true;
+ break;
+ }
+ break;
+ case H265NALU::VPS_NUT:
+ case H265NALU::SPS_NUT:
+ case H265NALU::PPS_NUT:
+ case H265NALU::AUD_NUT:
+ case H265NALU::EOS_NUT:
+ case H265NALU::EOB_NUT:
+ case H265NALU::FD_NUT:
+ case H265NALU::PREFIX_SEI_NUT:
+ case H265NALU::SUFFIX_SEI_NUT:
+ // These unconditionally signal a frame boundary.
+ end_of_frame = true;
+ break;
+ default:
+ // For all others, keep going.
+ break;
+ }
+ if (end_of_frame) {
+ if (!partial_frame_pending_ && *endpos == 0) {
+ // The frame was previously restarted, and we haven't filled the
+ // current frame with any contents yet. Start the new frame here and
+ // continue parsing NALs.
+ } else {
+ // The frame wasn't previously restarted and/or we have contents for
+ // the current frame; signal the start of a new frame here: we don't
+ // have a partial frame anymore.
+ partial_frame_pending_ = false;
+ // return true;
+ }
+ }
+ *endpos = (nalu.data + nalu.size) - data;
+ }
+ NOTREACHED();
+ return false;
+}
+
+void H265InputBufferFragmentSplitter::Reset() {
+ partial_frame_pending_ = false;
+ h265_parser_.reset(new H265Parser());
+}
+
+bool H265InputBufferFragmentSplitter::IsPartialFramePending() const {
+ return partial_frame_pending_;
+}
+
} // namespace v4l2_vda_helpers
} // namespace media
diff --git a/media/gpu/v4l2/v4l2_vda_helpers.h b/media/gpu/v4l2/v4l2_vda_helpers.h
index ebd07cf7e5b37..4b7fbd2985473 100644
--- a/media/gpu/v4l2/v4l2_vda_helpers.h
+++ b/media/gpu/v4l2/v4l2_vda_helpers.h
@@ -18,6 +18,7 @@ namespace media {
class V4L2Device;
class H264Parser;
+class H265Parser;
// Helper static methods to be shared between V4L2VideoDecodeAccelerator and
// V4L2SliceVideoDecodeAccelerator. This avoids some code duplication between
@@ -115,6 +116,25 @@ class H264InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
bool partial_frame_pending_ = false;
};
+class H265InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
+ public:
+ explicit H265InputBufferFragmentSplitter();
+ ~H265InputBufferFragmentSplitter() override;
+
+ bool AdvanceFrameFragment(const uint8_t* data,
+ size_t size,
+ size_t* endpos) override;
+ void Reset() override;
+ bool IsPartialFramePending() const override;
+
+ private:
+ // For H264 decode, hardware requires that we send it frame-sized chunks.
+ // We'll need to parse the stream.
+ std::unique_ptr<H265Parser> h265_parser_;
+ // Set if we have a pending incomplete frame in the input buffer.
+ bool partial_frame_pending_ = false;
+};
+
} // namespace v4l2_vda_helpers
} // namespace media
diff --git a/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index 018fe8c25f506..c00cd2b5f6ad7 100644
--- a/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -84,7 +84,7 @@ bool IsVp9KSVCStream(uint32_t input_format_fourcc,
// static
const uint32_t V4L2VideoDecodeAccelerator::supported_input_fourccs_[] = {
- V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9,
+ V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9, V4L2_PIX_FMT_HEVC,
};
// static
diff --git a/media/media_options.gni b/media/media_options.gni
index 1b2af27c5079d..0da73f1f81407 100644
--- a/media/media_options.gni
+++ b/media/media_options.gni
@@ -92,14 +92,14 @@ declare_args() {
# video on ChromeOS and Windows.
enable_platform_hevc =
proprietary_codecs &&
- (is_chromecast || use_fuzzing_engine || enable_platform_encrypted_hevc)
+ (is_chromecast || use_fuzzing_engine || enable_platform_encrypted_hevc || use_v4l2_codec)
# Enable HEVC/H265 decoding with hardware acceleration assist. Enabled by
# default for fuzzer builds and protected video on ChromeOS. It is also
# enabled for Chromecast by default so the unit tests get run in Chrome CQ.
enable_platform_hevc_decoding =
proprietary_codecs &&
- (is_chromecast || use_fuzzing_engine || use_chromeos_protected_media)
+ (is_chromecast || use_fuzzing_engine || use_chromeos_protected_media || use_v4l2_codec)
}
assert(
--
2.17.1