mirror of
https://github.com/nxp-imx/meta-imx.git
synced 2025-07-19 18:39:09 +02:00
143 lines
5.7 KiB
Diff
143 lines
5.7 KiB
Diff
From c175fe907f93f5db9327ca1030820cdba8df137c Mon Sep 17 00:00:00 2001
|
|
From: Hou Qi <qi.hou@nxp.com>
|
|
Date: Fri, 13 Sep 2024 22:36:04 +0900
|
|
Subject: [PATCH 03/19] V4L2VideoDecoder: Create single/multi plane queues
|
|
|
|
Decide to create single-plane queue or multi-plane queue according to
|
|
the capabilities returned by VIDIOC_QUERYCAP.
|
|
|
|
Upstream-Status: Inappropriate [NXP specific]
|
|
---
|
|
media/gpu/v4l2/v4l2_device.cc | 29 ++++++++++++++++++++--------
|
|
media/gpu/v4l2/v4l2_video_decoder.cc | 27 ++++++++++++++++++--------
|
|
2 files changed, 40 insertions(+), 16 deletions(-)
|
|
|
|
diff --git a/media/gpu/v4l2/v4l2_device.cc b/media/gpu/v4l2/v4l2_device.cc
|
|
index ebbf240a3c5e7..c04873fe2ae84 100644
|
|
--- a/media/gpu/v4l2/v4l2_device.cc
|
|
+++ b/media/gpu/v4l2/v4l2_device.cc
|
|
@@ -92,6 +92,8 @@ scoped_refptr<V4L2Queue> V4L2Device::GetQueue(enum v4l2_buf_type type) {
|
|
// Supported queue types.
|
|
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
|
|
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
|
|
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
|
|
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
|
|
break;
|
|
default:
|
|
VLOGF(1) << "Unsupported V4L2 queue type: " << type;
|
|
@@ -538,9 +540,17 @@ V4L2Device::EnumerateSupportedDecodeProfiles(
|
|
const std::vector<uint32_t>& pixelformats) {
|
|
VideoDecodeAccelerator::SupportedProfiles profiles;
|
|
|
|
- const auto v4l2_codecs_as_pix_fmts =
|
|
+ std::vector<uint32_t> enumerated_pixelformats;
|
|
+ enumerated_pixelformats =
|
|
EnumerateSupportedPixFmts(base::BindRepeating(&V4L2Device::Ioctl, this),
|
|
- V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
|
|
+ V4L2_BUF_TYPE_VIDEO_OUTPUT);
|
|
+ if (enumerated_pixelformats.empty()) {
|
|
+ VLOG(1) << "empty.... Try Multi-plane";
|
|
+ enumerated_pixelformats =
|
|
+ EnumerateSupportedPixFmts(base::BindRepeating(&V4L2Device::Ioctl, this),
|
|
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
|
|
+ }
|
|
+ const auto v4l2_codecs_as_pix_fmts = enumerated_pixelformats;
|
|
|
|
for (uint32_t pixelformat : v4l2_codecs_as_pix_fmts) {
|
|
if (!base::Contains(pixelformats, pixelformat)) {
|
|
@@ -859,27 +869,28 @@ void V4L2Device::EnumerateDevicesForType(Type type) {
|
|
static const std::string kJpegEncoderDevicePattern = "/dev/jpeg-enc";
|
|
|
|
std::string device_pattern;
|
|
- v4l2_buf_type buf_type;
|
|
+ std::vector<v4l2_buf_type> candidate_buf_types;
|
|
switch (type) {
|
|
case Type::kDecoder:
|
|
device_pattern = kDecoderDevicePattern;
|
|
- buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
|
+ candidate_buf_types.push_back(V4L2_BUF_TYPE_VIDEO_OUTPUT);
|
|
+ candidate_buf_types.push_back(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
|
|
break;
|
|
case Type::kEncoder:
|
|
device_pattern = kEncoderDevicePattern;
|
|
- buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
|
+ candidate_buf_types.push_back(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
|
|
break;
|
|
case Type::kImageProcessor:
|
|
device_pattern = kImageProcessorDevicePattern;
|
|
- buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
|
+ candidate_buf_types.push_back(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
|
|
break;
|
|
case Type::kJpegDecoder:
|
|
device_pattern = kJpegDecoderDevicePattern;
|
|
- buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
|
+ candidate_buf_types.push_back(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
|
|
break;
|
|
case Type::kJpegEncoder:
|
|
device_pattern = kJpegEncoderDevicePattern;
|
|
- buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
|
+ candidate_buf_types.push_back(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
|
|
break;
|
|
}
|
|
|
|
@@ -899,6 +910,7 @@ void V4L2Device::EnumerateDevicesForType(Type type) {
|
|
|
|
Devices devices;
|
|
for (const auto& path : candidate_paths) {
|
|
+ for (const auto& buf_type : candidate_buf_types){
|
|
if (!OpenDevicePath(path)) {
|
|
continue;
|
|
}
|
|
@@ -912,6 +924,7 @@ void V4L2Device::EnumerateDevicesForType(Type type) {
|
|
|
|
CloseDevice();
|
|
}
|
|
+ }
|
|
|
|
DCHECK_EQ(devices_by_type_.count(type), 0u);
|
|
devices_by_type_[type] = devices;
|
|
diff --git a/media/gpu/v4l2/v4l2_video_decoder.cc b/media/gpu/v4l2/v4l2_video_decoder.cc
|
|
index 5765761592344..857576ae56d6a 100644
|
|
--- a/media/gpu/v4l2/v4l2_video_decoder.cc
|
|
+++ b/media/gpu/v4l2/v4l2_video_decoder.cc
|
|
@@ -454,17 +454,28 @@ V4L2Status V4L2VideoDecoder::InitializeBackend() {
|
|
#endif // BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
|
|
|
|
struct v4l2_capability caps;
|
|
- const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
|
|
- if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) ||
|
|
- (caps.capabilities & kCapsRequired) != kCapsRequired) {
|
|
- VLOGF(1) << "ioctl() failed: VIDIOC_QUERYCAP, "
|
|
- << "caps check failed: 0x" << std::hex << caps.capabilities;
|
|
+ unsigned int device_caps;
|
|
+ enum v4l2_buf_type input_type, output_type;
|
|
+ if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0)
|
|
return V4L2Status::Codes::kFailedFileCapabilitiesCheck;
|
|
- }
|
|
+
|
|
+ if (caps.capabilities & V4L2_CAP_DEVICE_CAPS)
|
|
+ device_caps = caps.device_caps;
|
|
+ else
|
|
+ device_caps = caps.capabilities;
|
|
+
|
|
+ if (device_caps & (V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_VIDEO_M2M_MPLANE))
|
|
+ input_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
|
+ else
|
|
+ input_type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
|
|
+ if (device_caps & (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_M2M_MPLANE))
|
|
+ output_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
|
+ else
|
|
+ output_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
|
|
// Create Input/Output V4L2Queue
|
|
- input_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
|
|
- output_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
|
|
+ input_queue_ = device_->GetQueue(input_type);
|
|
+ output_queue_ = device_->GetQueue(output_type);
|
|
if (!input_queue_ || !output_queue_) {
|
|
VLOGF(1) << "Failed to create V4L2 queue.";
|
|
return V4L2Status::Codes::kFailedResourceAllocation;
|
|
--
|
|
2.34.1
|
|
|