summaryrefslogtreecommitdiffstats
path: root/dynamic-layers/chromium-browser-layer/recipes-browser/chromium/chromium-ozone-wayland/0110-V4L2VDA-Add-hevc-format-support.patch
blob: aa4f164847693982f0a2d89f66c95741870c53fd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
From e86109fa5e05268acc3557d308e5ae12136b391a Mon Sep 17 00:00:00 2001
From: Hou Qi <qi.hou@nxp.com>
Date: Mon, 5 Sep 2022 10:38:53 +0800
Subject: [PATCH 10/17] V4L2VDA: Add hevc format support

Upstream-Status: Inappropriate [NXP specific]
---
 media/base/supported_types.cc                 |   2 +-
 media/gpu/v4l2/v4l2_device.cc                 |  28 ++++-
 media/gpu/v4l2/v4l2_vda_helpers.cc            | 119 ++++++++++++++++++
 media/gpu/v4l2/v4l2_vda_helpers.h             |  20 +++
 .../gpu/v4l2/v4l2_video_decode_accelerator.cc |   2 +-
 media/media_options.gni                       |   4 +-
 6 files changed, 170 insertions(+), 5 deletions(-)

diff --git a/media/base/supported_types.cc b/media/base/supported_types.cc
index 3e174b9320d08..727dc1867e6ff 100644
--- a/media/base/supported_types.cc
+++ b/media/base/supported_types.cc
@@ -318,7 +318,7 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
     case VideoCodec::kVP9:
       return IsVp9ProfileSupported(type);
     case VideoCodec::kHEVC:
-      return IsHevcProfileSupported(type);
+      return true;
     case VideoCodec::kMPEG4:
       return IsMPEG4Supported();
     case VideoCodec::kUnknown:
diff --git a/media/gpu/v4l2/v4l2_device.cc b/media/gpu/v4l2/v4l2_device.cc
index 726ad1ab0f144..e090cad6626f7 100644
--- a/media/gpu/v4l2/v4l2_device.cc
+++ b/media/gpu/v4l2/v4l2_device.cc
@@ -1607,6 +1607,8 @@ uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
     return V4L2_PIX_FMT_VP8;
   } else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) {
     return V4L2_PIX_FMT_VP9;
+  } else if (profile >= HEVCPROFILE_MIN && profile <= HEVCPROFILE_MAX) {
+      return V4L2_PIX_FMT_HEVC;
   } else if (profile == HEVCPROFILE_MAIN) {
          return V4L2_PIX_FMT_HEVC;
   } else {
@@ -1674,6 +1676,16 @@ VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
           return VP9PROFILE_PROFILE2;
       }
       break;
+    case VideoCodec::kHEVC:
+      switch (v4l2_profile) {
+        case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN:
+          return HEVCPROFILE_MAIN;
+        case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_10:
+          return HEVCPROFILE_MAIN10;
+        case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_STILL_PICTURE:
+          return HEVCPROFILE_MAIN_STILL_PICTURE;
+      }
+      break;
     default:
       VLOGF(2) << "Unsupported codec: " << GetCodecName(codec);
   }
@@ -1699,6 +1711,9 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
       case VideoCodec::kVP9:
         query_id = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
         break;
+      case VideoCodec::kHEVC:
+        query_id = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE;
+        break;
       default:
         return false;
     }
@@ -1757,6 +1772,17 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
         profiles = {VP9PROFILE_PROFILE0};
       }
       break;
+    case V4L2_PIX_FMT_HEVC:
+      if (!get_supported_profiles(VideoCodec::kHEVC, &profiles)) {
+        DLOG(WARNING) << "Driver doesn't support QUERY HEVC profiles, "
+                      << "use default values, main, mian-10, main-still-picture";
+        profiles = {
+            HEVCPROFILE_MAIN,
+            HEVCPROFILE_MAIN10,
+            HEVCPROFILE_MAIN_STILL_PICTURE,
+        };
+      }
+      break;
     default:
       VLOGF(1) << "Unhandled pixelformat " << FourccToString(pix_fmt);
       return {};
@@ -2091,7 +2117,7 @@ void V4L2Device::GetSupportedResolution(uint32_t pixelformat,
     }
   }
   if (max_resolution->IsEmpty()) {
-    max_resolution->SetSize(1920, 1088);
+    max_resolution->SetSize(4096, 4096);
     VLOGF(1) << "GetSupportedResolution failed to get maximum resolution for "
              << "fourcc " << FourccToString(pixelformat) << ", fall back to "
              << max_resolution->ToString();
diff --git a/media/gpu/v4l2/v4l2_vda_helpers.cc b/media/gpu/v4l2/v4l2_vda_helpers.cc
index f25619077035c..5fa8593a5cf1e 100644
--- a/media/gpu/v4l2/v4l2_vda_helpers.cc
+++ b/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -12,6 +12,7 @@
 #include "media/gpu/v4l2/v4l2_device.h"
 #include "media/gpu/v4l2/v4l2_image_processor_backend.h"
 #include "media/video/h264_parser.h"
+#include "media/video/h265_parser.h"
 
 namespace media {
 namespace v4l2_vda_helpers {
@@ -155,6 +156,9 @@ InputBufferFragmentSplitter::CreateFromProfile(
     case VideoCodec::kVP9:
       // VP8/VP9 don't need any frame splitting, use the default implementation.
       return std::make_unique<v4l2_vda_helpers::InputBufferFragmentSplitter>();
+    case VideoCodec::kHEVC:
+      return std::make_unique<
+	      v4l2_vda_helpers::H265InputBufferFragmentSplitter>();
     default:
       LOG(ERROR) << "Unhandled profile: " << profile;
       return nullptr;
@@ -274,5 +278,120 @@ bool H264InputBufferFragmentSplitter::IsPartialFramePending() const {
   return partial_frame_pending_;
 }
 
+H265InputBufferFragmentSplitter::H265InputBufferFragmentSplitter()
+    : h265_parser_(new H265Parser()) {}
+
+H265InputBufferFragmentSplitter::~H265InputBufferFragmentSplitter() = default;
+
+bool H265InputBufferFragmentSplitter::AdvanceFrameFragment(const uint8_t* data,
+                                                           size_t size,
+                                                           size_t* endpos) {
+  DCHECK(h265_parser_);
+
+  // For H265, we need to feed HW one frame at a time.  This is going to take
+  // some parsing of our input stream.
+  h265_parser_->SetStream(data, size);
+  H265NALU nalu;
+  H265Parser::Result result;
+  bool has_frame_data = false;
+  *endpos = 0;
+  DVLOGF(4) << "H265InputBufferFragmentSplitter::AdvanceFrameFragment size" << size;
+  // Keep on peeking the next NALs while they don't indicate a frame
+  // boundary.
+  while (true) {
+    bool end_of_frame = false;
+    result = h265_parser_->AdvanceToNextNALU(&nalu);
+    if (result == H265Parser::kInvalidStream ||
+        result == H265Parser::kUnsupportedStream) {
+      return false;
+    }
+
+    DVLOGF(4) << "NALU type " << nalu.nal_unit_type << "NALU size" << nalu.size;
+    if (result == H265Parser::kEOStream) {
+      // We've reached the end of the buffer before finding a frame boundary.
+      if (has_frame_data){
+	      //    partial_frame_pending_ = true;
+	      //    DVLOGF(4)<<"partial_frame_pending_ true as H265Parser::kEOStream has_frame_data";
+      }
+      *endpos = size;
+      DVLOGF(4)<<  " MET kEOStream  endpos " << *endpos <<" nalu.size " << nalu.size;
+      return true;
+    }
+    switch (nalu.nal_unit_type) {
+      case H265NALU::TRAIL_N:
+      case H265NALU::TRAIL_R:
+      case H265NALU::TSA_N:
+      case H265NALU::TSA_R:
+      case H265NALU::STSA_N:
+      case H265NALU::STSA_R:
+      case H265NALU::RADL_R:
+      case H265NALU::RADL_N:
+      case H265NALU::RASL_N:
+      case H265NALU::RASL_R:
+      case H265NALU::BLA_W_LP:
+      case H265NALU::BLA_W_RADL:
+      case H265NALU::BLA_N_LP:
+      case H265NALU::IDR_W_RADL:
+      case H265NALU::IDR_N_LP:
+      case H265NALU::CRA_NUT:
+        if (nalu.size < 1)
+          return false;
+
+        has_frame_data = true;
+
+        // For these two, if the "first_mb_in_slice" field is zero, start a
+        // new frame and return.  This field is Exp-Golomb coded starting on
+        // the eighth data bit of the NAL; a zero value is encoded with a
+        // leading '1' bit in the byte, which we can detect as the byte being
+        // (unsigned) greater than or equal to 0x80.
+        if (nalu.data[1] >= 0x80) {
+          end_of_frame = true;
+          break;
+        }
+        break;
+      case H265NALU::VPS_NUT:
+      case H265NALU::SPS_NUT:
+      case H265NALU::PPS_NUT:
+      case H265NALU::AUD_NUT:
+      case H265NALU::EOS_NUT:
+      case H265NALU::EOB_NUT:
+      case H265NALU::FD_NUT:
+      case H265NALU::PREFIX_SEI_NUT:
+      case H265NALU::SUFFIX_SEI_NUT:
+        // These unconditionally signal a frame boundary.
+        end_of_frame = true;
+        break;
+      default:
+        // For all others, keep going.
+        break;
+    }
+    if (end_of_frame) {
+      if (!partial_frame_pending_ && *endpos == 0) {
+        // The frame was previously restarted, and we haven't filled the
+        // current frame with any contents yet.  Start the new frame here and
+        // continue parsing NALs.
+      } else  {
+        // The frame wasn't previously restarted and/or we have contents for
+        // the current frame; signal the start of a new frame here: we don't
+        // have a partial frame anymore.
+        partial_frame_pending_ = false;
+      //  return true;
+      }
+    }
+    *endpos = (nalu.data + nalu.size) - data;
+  }
+  NOTREACHED();
+  return false;
+}
+
+void H265InputBufferFragmentSplitter::Reset() {
+  partial_frame_pending_ = false;
+  h265_parser_.reset(new H265Parser());
+}
+
+bool H265InputBufferFragmentSplitter::IsPartialFramePending() const {
+  return partial_frame_pending_;
+}
+
 }  // namespace v4l2_vda_helpers
 }  // namespace media
diff --git a/media/gpu/v4l2/v4l2_vda_helpers.h b/media/gpu/v4l2/v4l2_vda_helpers.h
index ebd07cf7e5b37..4b7fbd2985473 100644
--- a/media/gpu/v4l2/v4l2_vda_helpers.h
+++ b/media/gpu/v4l2/v4l2_vda_helpers.h
@@ -18,6 +18,7 @@ namespace media {
 
 class V4L2Device;
 class H264Parser;
+class H265Parser;
 
 // Helper static methods to be shared between V4L2VideoDecodeAccelerator and
 // V4L2SliceVideoDecodeAccelerator. This avoids some code duplication between
@@ -115,6 +116,25 @@ class H264InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
   bool partial_frame_pending_ = false;
 };
 
+class H265InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
+ public:
+  explicit H265InputBufferFragmentSplitter();
+  ~H265InputBufferFragmentSplitter() override;
+
+  bool AdvanceFrameFragment(const uint8_t* data,
+                            size_t size,
+                            size_t* endpos) override;
+  void Reset() override;
+  bool IsPartialFramePending() const override;
+
+ private:
+  // For H264 decode, hardware requires that we send it frame-sized chunks.
+  // We'll need to parse the stream.
+  std::unique_ptr<H265Parser> h265_parser_;
+  // Set if we have a pending incomplete frame in the input buffer.
+  bool partial_frame_pending_ = false;
+};
+
 }  // namespace v4l2_vda_helpers
 }  // namespace media
 
diff --git a/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index 018fe8c25f506..c00cd2b5f6ad7 100644
--- a/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -84,7 +84,7 @@ bool IsVp9KSVCStream(uint32_t input_format_fourcc,
 
 // static
 const uint32_t V4L2VideoDecodeAccelerator::supported_input_fourccs_[] = {
-    V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9,
+    V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9, V4L2_PIX_FMT_HEVC,
 };
 
 // static
diff --git a/media/media_options.gni b/media/media_options.gni
index 1b2af27c5079d..0da73f1f81407 100644
--- a/media/media_options.gni
+++ b/media/media_options.gni
@@ -92,14 +92,14 @@ declare_args() {
   # video on ChromeOS and Windows.
   enable_platform_hevc =
       proprietary_codecs &&
-      (is_chromecast || use_fuzzing_engine || enable_platform_encrypted_hevc)
+      (is_chromecast || use_fuzzing_engine || enable_platform_encrypted_hevc || use_v4l2_codec)
 
   # Enable HEVC/H265 decoding with hardware acceleration assist. Enabled by
   # default for fuzzer builds and protected video on ChromeOS. It is also
   # enabled for Chromecast by default so the unit tests get run in Chrome CQ.
   enable_platform_hevc_decoding =
       proprietary_codecs &&
-      (is_chromecast || use_fuzzing_engine || use_chromeos_protected_media)
+      (is_chromecast || use_fuzzing_engine || use_chromeos_protected_media || use_v4l2_codec)
 }
 
 assert(
-- 
2.17.1