summaryrefslogtreecommitdiffstats
path: root/dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch
diff options
context:
space:
mode:
Diffstat (limited to 'dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch')
-rw-r--r--dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch313
1 files changed, 313 insertions, 0 deletions
diff --git a/dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch b/dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch
new file mode 100644
index 00000000..aa4f1648
--- /dev/null
+++ b/dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch
@@ -0,0 +1,313 @@
1From e86109fa5e05268acc3557d308e5ae12136b391a Mon Sep 17 00:00:00 2001
2From: Hou Qi <qi.hou@nxp.com>
3Date: Mon, 5 Sep 2022 10:38:53 +0800
4Subject: [PATCH 10/17] V4L2VDA: Add hevc format support
5
6Upstream-Status: Inappropriate [NXP specific]
7---
8 media/base/supported_types.cc | 2 +-
9 media/gpu/v4l2/v4l2_device.cc | 28 ++++-
10 media/gpu/v4l2/v4l2_vda_helpers.cc | 119 ++++++++++++++++++
11 media/gpu/v4l2/v4l2_vda_helpers.h | 20 +++
12 .../gpu/v4l2/v4l2_video_decode_accelerator.cc | 2 +-
13 media/media_options.gni | 4 +-
14 6 files changed, 170 insertions(+), 5 deletions(-)
15
16diff --git a/media/base/supported_types.cc b/media/base/supported_types.cc
17index 3e174b9320d08..727dc1867e6ff 100644
18--- a/media/base/supported_types.cc
19+++ b/media/base/supported_types.cc
20@@ -318,7 +318,7 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
21 case VideoCodec::kVP9:
22 return IsVp9ProfileSupported(type);
23 case VideoCodec::kHEVC:
24- return IsHevcProfileSupported(type);
25+ return true;
26 case VideoCodec::kMPEG4:
27 return IsMPEG4Supported();
28 case VideoCodec::kUnknown:
29diff --git a/media/gpu/v4l2/v4l2_device.cc b/media/gpu/v4l2/v4l2_device.cc
30index 726ad1ab0f144..e090cad6626f7 100644
31--- a/media/gpu/v4l2/v4l2_device.cc
32+++ b/media/gpu/v4l2/v4l2_device.cc
33@@ -1607,6 +1607,8 @@ uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
34 return V4L2_PIX_FMT_VP8;
35 } else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) {
36 return V4L2_PIX_FMT_VP9;
37+ } else if (profile >= HEVCPROFILE_MIN && profile <= HEVCPROFILE_MAX) {
38+ return V4L2_PIX_FMT_HEVC;
39 } else if (profile == HEVCPROFILE_MAIN) {
40 return V4L2_PIX_FMT_HEVC;
41 } else {
42@@ -1674,6 +1676,16 @@ VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
43 return VP9PROFILE_PROFILE2;
44 }
45 break;
46+ case VideoCodec::kHEVC:
47+ switch (v4l2_profile) {
48+ case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN:
49+ return HEVCPROFILE_MAIN;
50+ case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_10:
51+ return HEVCPROFILE_MAIN10;
52+ case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_STILL_PICTURE:
53+ return HEVCPROFILE_MAIN_STILL_PICTURE;
54+ }
55+ break;
56 default:
57 VLOGF(2) << "Unsupported codec: " << GetCodecName(codec);
58 }
59@@ -1699,6 +1711,9 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
60 case VideoCodec::kVP9:
61 query_id = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
62 break;
63+ case VideoCodec::kHEVC:
64+ query_id = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE;
65+ break;
66 default:
67 return false;
68 }
69@@ -1757,6 +1772,17 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
70 profiles = {VP9PROFILE_PROFILE0};
71 }
72 break;
73+ case V4L2_PIX_FMT_HEVC:
74+ if (!get_supported_profiles(VideoCodec::kHEVC, &profiles)) {
75+ DLOG(WARNING) << "Driver doesn't support QUERY HEVC profiles, "
76+ << "use default values, main, mian-10, main-still-picture";
77+ profiles = {
78+ HEVCPROFILE_MAIN,
79+ HEVCPROFILE_MAIN10,
80+ HEVCPROFILE_MAIN_STILL_PICTURE,
81+ };
82+ }
83+ break;
84 default:
85 VLOGF(1) << "Unhandled pixelformat " << FourccToString(pix_fmt);
86 return {};
87@@ -2091,7 +2117,7 @@ void V4L2Device::GetSupportedResolution(uint32_t pixelformat,
88 }
89 }
90 if (max_resolution->IsEmpty()) {
91- max_resolution->SetSize(1920, 1088);
92+ max_resolution->SetSize(4096, 4096);
93 VLOGF(1) << "GetSupportedResolution failed to get maximum resolution for "
94 << "fourcc " << FourccToString(pixelformat) << ", fall back to "
95 << max_resolution->ToString();
96diff --git a/media/gpu/v4l2/v4l2_vda_helpers.cc b/media/gpu/v4l2/v4l2_vda_helpers.cc
97index f25619077035c..5fa8593a5cf1e 100644
98--- a/media/gpu/v4l2/v4l2_vda_helpers.cc
99+++ b/media/gpu/v4l2/v4l2_vda_helpers.cc
100@@ -12,6 +12,7 @@
101 #include "media/gpu/v4l2/v4l2_device.h"
102 #include "media/gpu/v4l2/v4l2_image_processor_backend.h"
103 #include "media/video/h264_parser.h"
104+#include "media/video/h265_parser.h"
105
106 namespace media {
107 namespace v4l2_vda_helpers {
108@@ -155,6 +156,9 @@ InputBufferFragmentSplitter::CreateFromProfile(
109 case VideoCodec::kVP9:
110 // VP8/VP9 don't need any frame splitting, use the default implementation.
111 return std::make_unique<v4l2_vda_helpers::InputBufferFragmentSplitter>();
112+ case VideoCodec::kHEVC:
113+ return std::make_unique<
114+ v4l2_vda_helpers::H265InputBufferFragmentSplitter>();
115 default:
116 LOG(ERROR) << "Unhandled profile: " << profile;
117 return nullptr;
118@@ -274,5 +278,120 @@ bool H264InputBufferFragmentSplitter::IsPartialFramePending() const {
119 return partial_frame_pending_;
120 }
121
122+H265InputBufferFragmentSplitter::H265InputBufferFragmentSplitter()
123+ : h265_parser_(new H265Parser()) {}
124+
125+H265InputBufferFragmentSplitter::~H265InputBufferFragmentSplitter() = default;
126+
127+bool H265InputBufferFragmentSplitter::AdvanceFrameFragment(const uint8_t* data,
128+ size_t size,
129+ size_t* endpos) {
130+ DCHECK(h265_parser_);
131+
132+ // For H265, we need to feed HW one frame at a time. This is going to take
133+ // some parsing of our input stream.
134+ h265_parser_->SetStream(data, size);
135+ H265NALU nalu;
136+ H265Parser::Result result;
137+ bool has_frame_data = false;
138+ *endpos = 0;
139+ DVLOGF(4) << "H265InputBufferFragmentSplitter::AdvanceFrameFragment size" << size;
140+ // Keep on peeking the next NALs while they don't indicate a frame
141+ // boundary.
142+ while (true) {
143+ bool end_of_frame = false;
144+ result = h265_parser_->AdvanceToNextNALU(&nalu);
145+ if (result == H265Parser::kInvalidStream ||
146+ result == H265Parser::kUnsupportedStream) {
147+ return false;
148+ }
149+
150+ DVLOGF(4) << "NALU type " << nalu.nal_unit_type << "NALU size" << nalu.size;
151+ if (result == H265Parser::kEOStream) {
152+ // We've reached the end of the buffer before finding a frame boundary.
153+ if (has_frame_data){
154+ // partial_frame_pending_ = true;
155+ // DVLOGF(4)<<"partial_frame_pending_ true as H265Parser::kEOStream has_frame_data";
156+ }
157+ *endpos = size;
158+ DVLOGF(4)<< " MET kEOStream endpos " << *endpos <<" nalu.size " << nalu.size;
159+ return true;
160+ }
161+ switch (nalu.nal_unit_type) {
162+ case H265NALU::TRAIL_N:
163+ case H265NALU::TRAIL_R:
164+ case H265NALU::TSA_N:
165+ case H265NALU::TSA_R:
166+ case H265NALU::STSA_N:
167+ case H265NALU::STSA_R:
168+ case H265NALU::RADL_R:
169+ case H265NALU::RADL_N:
170+ case H265NALU::RASL_N:
171+ case H265NALU::RASL_R:
172+ case H265NALU::BLA_W_LP:
173+ case H265NALU::BLA_W_RADL:
174+ case H265NALU::BLA_N_LP:
175+ case H265NALU::IDR_W_RADL:
176+ case H265NALU::IDR_N_LP:
177+ case H265NALU::CRA_NUT:
178+ if (nalu.size < 1)
179+ return false;
180+
181+ has_frame_data = true;
182+
183+ // For these two, if the "first_mb_in_slice" field is zero, start a
184+ // new frame and return. This field is Exp-Golomb coded starting on
185+ // the eighth data bit of the NAL; a zero value is encoded with a
186+ // leading '1' bit in the byte, which we can detect as the byte being
187+ // (unsigned) greater than or equal to 0x80.
188+ if (nalu.data[1] >= 0x80) {
189+ end_of_frame = true;
190+ break;
191+ }
192+ break;
193+ case H265NALU::VPS_NUT:
194+ case H265NALU::SPS_NUT:
195+ case H265NALU::PPS_NUT:
196+ case H265NALU::AUD_NUT:
197+ case H265NALU::EOS_NUT:
198+ case H265NALU::EOB_NUT:
199+ case H265NALU::FD_NUT:
200+ case H265NALU::PREFIX_SEI_NUT:
201+ case H265NALU::SUFFIX_SEI_NUT:
202+ // These unconditionally signal a frame boundary.
203+ end_of_frame = true;
204+ break;
205+ default:
206+ // For all others, keep going.
207+ break;
208+ }
209+ if (end_of_frame) {
210+ if (!partial_frame_pending_ && *endpos == 0) {
211+ // The frame was previously restarted, and we haven't filled the
212+ // current frame with any contents yet. Start the new frame here and
213+ // continue parsing NALs.
214+ } else {
215+ // The frame wasn't previously restarted and/or we have contents for
216+ // the current frame; signal the start of a new frame here: we don't
217+ // have a partial frame anymore.
218+ partial_frame_pending_ = false;
219+ // return true;
220+ }
221+ }
222+ *endpos = (nalu.data + nalu.size) - data;
223+ }
224+ NOTREACHED();
225+ return false;
226+}
227+
228+void H265InputBufferFragmentSplitter::Reset() {
229+ partial_frame_pending_ = false;
230+ h265_parser_.reset(new H265Parser());
231+}
232+
233+bool H265InputBufferFragmentSplitter::IsPartialFramePending() const {
234+ return partial_frame_pending_;
235+}
236+
237 } // namespace v4l2_vda_helpers
238 } // namespace media
239diff --git a/media/gpu/v4l2/v4l2_vda_helpers.h b/media/gpu/v4l2/v4l2_vda_helpers.h
240index ebd07cf7e5b37..4b7fbd2985473 100644
241--- a/media/gpu/v4l2/v4l2_vda_helpers.h
242+++ b/media/gpu/v4l2/v4l2_vda_helpers.h
243@@ -18,6 +18,7 @@ namespace media {
244
245 class V4L2Device;
246 class H264Parser;
247+class H265Parser;
248
249 // Helper static methods to be shared between V4L2VideoDecodeAccelerator and
250 // V4L2SliceVideoDecodeAccelerator. This avoids some code duplication between
251@@ -115,6 +116,25 @@ class H264InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
252 bool partial_frame_pending_ = false;
253 };
254
255+class H265InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
256+ public:
257+ explicit H265InputBufferFragmentSplitter();
258+ ~H265InputBufferFragmentSplitter() override;
259+
260+ bool AdvanceFrameFragment(const uint8_t* data,
261+ size_t size,
262+ size_t* endpos) override;
263+ void Reset() override;
264+ bool IsPartialFramePending() const override;
265+
266+ private:
267+ // For H264 decode, hardware requires that we send it frame-sized chunks.
268+ // We'll need to parse the stream.
269+ std::unique_ptr<H265Parser> h265_parser_;
270+ // Set if we have a pending incomplete frame in the input buffer.
271+ bool partial_frame_pending_ = false;
272+};
273+
274 } // namespace v4l2_vda_helpers
275 } // namespace media
276
277diff --git a/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
278index 018fe8c25f506..c00cd2b5f6ad7 100644
279--- a/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
280+++ b/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
281@@ -84,7 +84,7 @@ bool IsVp9KSVCStream(uint32_t input_format_fourcc,
282
283 // static
284 const uint32_t V4L2VideoDecodeAccelerator::supported_input_fourccs_[] = {
285- V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9,
286+ V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9, V4L2_PIX_FMT_HEVC,
287 };
288
289 // static
290diff --git a/media/media_options.gni b/media/media_options.gni
291index 1b2af27c5079d..0da73f1f81407 100644
292--- a/media/media_options.gni
293+++ b/media/media_options.gni
294@@ -92,14 +92,14 @@ declare_args() {
295 # video on ChromeOS and Windows.
296 enable_platform_hevc =
297 proprietary_codecs &&
298- (is_chromecast || use_fuzzing_engine || enable_platform_encrypted_hevc)
299+ (is_chromecast || use_fuzzing_engine || enable_platform_encrypted_hevc || use_v4l2_codec)
300
301 # Enable HEVC/H265 decoding with hardware acceleration assist. Enabled by
302 # default for fuzzer builds and protected video on ChromeOS. It is also
303 # enabled for Chromecast by default so the unit tests get run in Chrome CQ.
304 enable_platform_hevc_decoding =
305 proprietary_codecs &&
306- (is_chromecast || use_fuzzing_engine || use_chromeos_protected_media)
307+ (is_chromecast || use_fuzzing_engine || use_chromeos_protected_media || use_v4l2_codec)
308 }
309
310 assert(
311--
3122.17.1
313