diff options
author | Ross Burton <ross.burton@intel.com> | 2013-12-10 19:44:58 +0000 |
---|---|---|
committer | Darren Hart <darren@dvhart.com> | 2014-07-28 16:23:28 +0000 |
commit | 67b7f6f3a03ddbdcf66823eb5a236edbc0aa4858 (patch) | |
tree | 3710a6232f0bdd3af374185042a18ac1aa0e1bd7 /common/recipes-multimedia/gstreamer | |
parent | c3a5385bd09ea0dd711ef8af1bda8b91e281b601 (diff) | |
download | meta-intel-67b7f6f3a03ddbdcf66823eb5a236edbc0aa4858.tar.gz |
gstreamer-vaapi: Upgrade to 0.5.8
For the 0.10 variant:
- Drop wayland-compile patch merged upstream
- Add files which were missing in the tarball
- Disable Wayland support as it doesn't build
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Darren Hart <dvhart@linux.intel.com>
Diffstat (limited to 'common/recipes-multimedia/gstreamer')
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.6.bb | 7 | ||||
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb | 14 | ||||
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb (renamed from common/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.6.bb) | 0 | ||||
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi.inc | 4 | ||||
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c | 1712 | ||||
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h | 308 | ||||
-rw-r--r-- | common/recipes-multimedia/gstreamer/gstreamer-vaapi/wayland-compile.patch | 17 |
7 files changed, 2036 insertions, 26 deletions
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.6.bb b/common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.6.bb deleted file mode 100644 index 784e0c55..00000000 --- a/common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.6.bb +++ /dev/null | |||
@@ -1,7 +0,0 @@ | |||
1 | require gstreamer-vaapi.inc | ||
2 | |||
3 | DEPENDS += "gstreamer gst-plugins-base gst-plugins-bad" | ||
4 | |||
5 | SRC_URI += "file://wayland-compile.patch" | ||
6 | |||
7 | GST_API_VERSION = "0.10" | ||
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb b/common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb new file mode 100644 index 00000000..98e475df --- /dev/null +++ b/common/recipes-multimedia/gstreamer/gstreamer-vaapi-0.10_0.5.8.bb | |||
@@ -0,0 +1,14 @@ | |||
1 | require gstreamer-vaapi.inc | ||
2 | |||
3 | DEPENDS += "gstreamer gst-plugins-base gst-plugins-bad" | ||
4 | |||
5 | GST_API_VERSION = "0.10" | ||
6 | |||
7 | SRC_URI += "file://gstvideoencoder.c file://gstvideoencoder.h" | ||
8 | |||
9 | PACKAGECONFIG_remove = "wayland" | ||
10 | |||
11 | # SRC_URI subdir parameter is broken for files, when fixed do the move there | ||
12 | do_compile_prepend() { | ||
13 | cp -f ${WORKDIR}/gstvideoencoder.[ch] ${S}/ext/videoutils/gst-libs/gst/video/ | ||
14 | } | ||
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.6.bb b/common/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb index 886b3c16..886b3c16 100644 --- a/common/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.6.bb +++ b/common/recipes-multimedia/gstreamer/gstreamer-vaapi-1.0_0.5.8.bb | |||
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi.inc b/common/recipes-multimedia/gstreamer/gstreamer-vaapi.inc index 321ae4cf..2f5b6f16 100644 --- a/common/recipes-multimedia/gstreamer/gstreamer-vaapi.inc +++ b/common/recipes-multimedia/gstreamer/gstreamer-vaapi.inc | |||
@@ -14,8 +14,8 @@ DEPENDS = "libva" | |||
14 | SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/${REALPN}/${REALPN}-${PV}.tar.bz2 \ | 14 | SRC_URI = "http://www.freedesktop.org/software/vaapi/releases/${REALPN}/${REALPN}-${PV}.tar.bz2 \ |
15 | file://install-tests.patch" | 15 | file://install-tests.patch" |
16 | 16 | ||
17 | SRC_URI[md5sum] = "0a3e645d12c8f275e8ea221ecb89f981" | 17 | SRC_URI[md5sum] = "375ddbab556e53ccc311b792f2c649a7" |
18 | SRC_URI[sha256sum] = "3bef196f8e05d775f4e2b5b7111c4ba7393093bd1bc3297e781224f3bf51dea1" | 18 | SRC_URI[sha256sum] = "24fee8a1ca4cb99ed7739d876b17a4085e81b28550350867dee5105300d343c6" |
19 | 19 | ||
20 | S = "${WORKDIR}/${REALPN}-${PV}" | 20 | S = "${WORKDIR}/${REALPN}-${PV}" |
21 | 21 | ||
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c b/common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c new file mode 100644 index 00000000..7ca6aab1 --- /dev/null +++ b/common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.c | |||
@@ -0,0 +1,1712 @@ | |||
1 | /* GStreamer | ||
2 | * Copyright (C) 2008 David Schleef <ds@schleef.org> | ||
3 | * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>. | ||
4 | * Copyright (C) 2011 Nokia Corporation. All rights reserved. | ||
5 | * Contact: Stefan Kost <stefan.kost@nokia.com> | ||
6 | * Copyright (C) 2012 Collabora Ltd. | ||
7 | * Author : Edward Hervey <edward@collabora.com> | ||
8 | * | ||
9 | * This library is free software; you can redistribute it and/or | ||
10 | * modify it under the terms of the GNU Library General Public | ||
11 | * License as published by the Free Software Foundation; either | ||
12 | * version 2 of the License, or (at your option) any later version. | ||
13 | * | ||
14 | * This library is distributed in the hope that it will be useful, | ||
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
17 | * Library General Public License for more details. | ||
18 | * | ||
19 | * You should have received a copy of the GNU Library General Public | ||
20 | * License along with this library; if not, write to the | ||
21 | * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
22 | * Boston, MA 02111-1307, USA. | ||
23 | */ | ||
24 | |||
25 | /** | ||
26 | * SECTION:gstvideoencoder | ||
27 | * @short_description: Base class for video encoders | ||
28 | * @see_also: | ||
29 | * | ||
30 | * This base class is for video encoders turning raw video into | ||
31 | * encoded video data. | ||
32 | * | ||
33 | * GstVideoEncoder and subclass should cooperate as follows. | ||
34 | * <orderedlist> | ||
35 | * <listitem> | ||
36 | * <itemizedlist><title>Configuration</title> | ||
37 | * <listitem><para> | ||
38 | * Initially, GstVideoEncoder calls @start when the encoder element | ||
39 | * is activated, which allows subclass to perform any global setup. | ||
40 | * </para></listitem> | ||
41 | * <listitem><para> | ||
42 | * GstVideoEncoder calls @set_format to inform subclass of the format | ||
43 | * of input video data that it is about to receive. Subclass should | ||
44 | * setup for encoding and configure base class as appropriate | ||
45 | * (e.g. latency). While unlikely, it might be called more than once, | ||
46 | * if changing input parameters require reconfiguration. Baseclass | ||
47 | * will ensure that processing of current configuration is finished. | ||
48 | * </para></listitem> | ||
49 | * <listitem><para> | ||
50 | * GstVideoEncoder calls @stop at end of all processing. | ||
51 | * </para></listitem> | ||
52 | * </itemizedlist> | ||
53 | * </listitem> | ||
54 | * <listitem> | ||
55 | * <itemizedlist> | ||
56 | * <title>Data processing</title> | ||
57 | * <listitem><para> | ||
58 | * Base class collects input data and metadata into a frame and hands | ||
59 | * this to subclass' @handle_frame. | ||
60 | * </para></listitem> | ||
61 | * <listitem><para> | ||
62 | * If codec processing results in encoded data, subclass should call | ||
63 | * @gst_video_encoder_finish_frame to have encoded data pushed | ||
64 | * downstream. | ||
65 | * </para></listitem> | ||
66 | * <listitem><para> | ||
67 | * If implemented, baseclass calls subclass @pre_push just prior to | ||
68 | * pushing to allow subclasses to modify some metadata on the buffer. | ||
69 | * If it returns GST_FLOW_OK, the buffer is pushed downstream. | ||
70 | * </para></listitem> | ||
71 | * <listitem><para> | ||
72 | * GstVideoEncoderClass will handle both srcpad and sinkpad events. | ||
73 | * Sink events will be passed to subclass if @event callback has been | ||
74 | * provided. | ||
75 | * </para></listitem> | ||
76 | * </itemizedlist> | ||
77 | * </listitem> | ||
78 | * <listitem> | ||
79 | * <itemizedlist><title>Shutdown phase</title> | ||
80 | * <listitem><para> | ||
81 | * GstVideoEncoder class calls @stop to inform the subclass that data | ||
82 | * parsing will be stopped. | ||
83 | * </para></listitem> | ||
84 | * </itemizedlist> | ||
85 | * </listitem> | ||
86 | * </orderedlist> | ||
87 | * | ||
88 | * Subclass is responsible for providing pad template caps for | ||
89 | * source and sink pads. The pads need to be named "sink" and "src". It should | ||
90 | * also be able to provide fixed src pad caps in @getcaps by the time it calls | ||
91 | * @gst_video_encoder_finish_frame. | ||
92 | * | ||
93 | * Things that subclass need to take care of: | ||
94 | * <itemizedlist> | ||
95 | * <listitem><para>Provide pad templates</para></listitem> | ||
96 | * <listitem><para> | ||
97 | * Provide source pad caps before pushing the first buffer | ||
98 | * </para></listitem> | ||
99 | * <listitem><para> | ||
100 | * Accept data in @handle_frame and provide encoded results to | ||
101 | * @gst_video_encoder_finish_frame. | ||
102 | * </para></listitem> | ||
103 | * </itemizedlist> | ||
104 | * | ||
105 | */ | ||
106 | |||
107 | #ifdef HAVE_CONFIG_H | ||
108 | #include "config.h" | ||
109 | #endif | ||
110 | |||
111 | /* TODO | ||
112 | * | ||
113 | * * Change _set_output_format() to steal the reference of the provided caps | ||
114 | * * Calculate actual latency based on input/output timestamp/frame_number | ||
115 | * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY | ||
116 | */ | ||
117 | |||
118 | /* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex | ||
119 | * with newer GLib versions (>= 2.31.0) */ | ||
120 | #define GLIB_DISABLE_DEPRECATION_WARNINGS | ||
121 | |||
122 | #include "gstvideoencoder.h" | ||
123 | #include "gstvideoutils.h" | ||
124 | |||
125 | #include <string.h> | ||
126 | |||
127 | GST_DEBUG_CATEGORY (videoencoder_debug); | ||
128 | #define GST_CAT_DEFAULT videoencoder_debug | ||
129 | |||
130 | #define GST_VIDEO_ENCODER_GET_PRIVATE(obj) \ | ||
131 | (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VIDEO_ENCODER, \ | ||
132 | GstVideoEncoderPrivate)) | ||
133 | |||
134 | struct _GstVideoEncoderPrivate | ||
135 | { | ||
136 | guint64 presentation_frame_number; | ||
137 | int distance_from_sync; | ||
138 | |||
139 | /* FIXME : (and introduce a context ?) */ | ||
140 | gboolean drained; | ||
141 | gboolean at_eos; | ||
142 | |||
143 | gint64 min_latency; | ||
144 | gint64 max_latency; | ||
145 | |||
146 | GList *current_frame_events; | ||
147 | |||
148 | GList *headers; | ||
149 | gboolean new_headers; /* Whether new headers were just set */ | ||
150 | |||
151 | GList *force_key_unit; /* List of pending forced keyunits */ | ||
152 | |||
153 | guint32 system_frame_number; | ||
154 | |||
155 | GList *frames; /* Protected with OBJECT_LOCK */ | ||
156 | GstVideoCodecState *input_state; | ||
157 | GstVideoCodecState *output_state; | ||
158 | gboolean output_state_changed; | ||
159 | |||
160 | gint64 bytes; | ||
161 | gint64 time; | ||
162 | }; | ||
163 | |||
164 | typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent; | ||
165 | struct _ForcedKeyUnitEvent | ||
166 | { | ||
167 | GstClockTime running_time; | ||
168 | gboolean pending; /* TRUE if this was requested already */ | ||
169 | gboolean all_headers; | ||
170 | guint count; | ||
171 | }; | ||
172 | |||
173 | static void | ||
174 | forced_key_unit_event_free (ForcedKeyUnitEvent * evt) | ||
175 | { | ||
176 | g_slice_free (ForcedKeyUnitEvent, evt); | ||
177 | } | ||
178 | |||
179 | static ForcedKeyUnitEvent * | ||
180 | forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers, | ||
181 | guint count) | ||
182 | { | ||
183 | ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent); | ||
184 | |||
185 | evt->running_time = running_time; | ||
186 | evt->all_headers = all_headers; | ||
187 | evt->count = count; | ||
188 | |||
189 | return evt; | ||
190 | } | ||
191 | |||
192 | static void gst_video_encoder_finalize (GObject * object); | ||
193 | |||
194 | static gboolean gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps); | ||
195 | static GstCaps *gst_video_encoder_sink_getcaps (GstPad * pad); | ||
196 | static gboolean gst_video_encoder_src_event (GstPad * pad, GstEvent * event); | ||
197 | static gboolean gst_video_encoder_sink_event (GstPad * pad, GstEvent * event); | ||
198 | static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstBuffer * buf); | ||
199 | static GstStateChangeReturn gst_video_encoder_change_state (GstElement * | ||
200 | element, GstStateChange transition); | ||
201 | static const GstQueryType *gst_video_encoder_get_query_types (GstPad * pad); | ||
202 | static gboolean gst_video_encoder_src_query (GstPad * pad, GstQuery * query); | ||
203 | static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder * | ||
204 | encoder, GstBuffer * buf, GstClockTime timestamp, GstClockTime duration); | ||
205 | |||
206 | static void | ||
207 | _do_init (GType object_type) | ||
208 | { | ||
209 | const GInterfaceInfo preset_interface_info = { | ||
210 | NULL, /* interface_init */ | ||
211 | NULL, /* interface_finalize */ | ||
212 | NULL /* interface_data */ | ||
213 | }; | ||
214 | |||
215 | g_type_add_interface_static (object_type, GST_TYPE_PRESET, | ||
216 | &preset_interface_info); | ||
217 | } | ||
218 | |||
219 | GST_BOILERPLATE_FULL (GstVideoEncoder, gst_video_encoder, | ||
220 | GstElement, GST_TYPE_ELEMENT, _do_init); | ||
221 | |||
222 | static void | ||
223 | gst_video_encoder_base_init (gpointer g_class) | ||
224 | { | ||
225 | GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0, | ||
226 | "Base Video Encoder"); | ||
227 | } | ||
228 | |||
229 | static void | ||
230 | gst_video_encoder_class_init (GstVideoEncoderClass * klass) | ||
231 | { | ||
232 | GObjectClass *gobject_class; | ||
233 | GstElementClass *gstelement_class; | ||
234 | |||
235 | gobject_class = G_OBJECT_CLASS (klass); | ||
236 | gstelement_class = GST_ELEMENT_CLASS (klass); | ||
237 | |||
238 | g_type_class_add_private (klass, sizeof (GstVideoEncoderPrivate)); | ||
239 | |||
240 | gobject_class->finalize = gst_video_encoder_finalize; | ||
241 | |||
242 | gstelement_class->change_state = | ||
243 | GST_DEBUG_FUNCPTR (gst_video_encoder_change_state); | ||
244 | } | ||
245 | |||
246 | static void | ||
247 | gst_video_encoder_reset (GstVideoEncoder * encoder) | ||
248 | { | ||
249 | GstVideoEncoderPrivate *priv = encoder->priv; | ||
250 | GList *g; | ||
251 | |||
252 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
253 | |||
254 | priv->presentation_frame_number = 0; | ||
255 | priv->distance_from_sync = 0; | ||
256 | |||
257 | g_list_foreach (priv->force_key_unit, (GFunc) forced_key_unit_event_free, | ||
258 | NULL); | ||
259 | g_list_free (priv->force_key_unit); | ||
260 | priv->force_key_unit = NULL; | ||
261 | |||
262 | priv->drained = TRUE; | ||
263 | priv->min_latency = 0; | ||
264 | priv->max_latency = 0; | ||
265 | |||
266 | g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL); | ||
267 | g_list_free (priv->headers); | ||
268 | priv->headers = NULL; | ||
269 | priv->new_headers = FALSE; | ||
270 | |||
271 | g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL); | ||
272 | g_list_free (priv->current_frame_events); | ||
273 | priv->current_frame_events = NULL; | ||
274 | |||
275 | for (g = priv->frames; g; g = g->next) { | ||
276 | gst_video_codec_frame_unref ((GstVideoCodecFrame *) g->data); | ||
277 | } | ||
278 | g_list_free (priv->frames); | ||
279 | priv->frames = NULL; | ||
280 | |||
281 | priv->bytes = 0; | ||
282 | priv->time = 0; | ||
283 | |||
284 | if (priv->input_state) | ||
285 | gst_video_codec_state_unref (priv->input_state); | ||
286 | priv->input_state = NULL; | ||
287 | if (priv->output_state) | ||
288 | gst_video_codec_state_unref (priv->output_state); | ||
289 | priv->output_state = NULL; | ||
290 | |||
291 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
292 | } | ||
293 | |||
294 | static void | ||
295 | gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass) | ||
296 | { | ||
297 | GstVideoEncoderPrivate *priv; | ||
298 | GstPadTemplate *pad_template; | ||
299 | GstPad *pad; | ||
300 | |||
301 | GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init"); | ||
302 | |||
303 | priv = encoder->priv = GST_VIDEO_ENCODER_GET_PRIVATE (encoder); | ||
304 | |||
305 | pad_template = | ||
306 | gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink"); | ||
307 | g_return_if_fail (pad_template != NULL); | ||
308 | |||
309 | encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink"); | ||
310 | |||
311 | gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain)); | ||
312 | gst_pad_set_event_function (pad, | ||
313 | GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event)); | ||
314 | gst_pad_set_setcaps_function (pad, | ||
315 | GST_DEBUG_FUNCPTR (gst_video_encoder_sink_setcaps)); | ||
316 | gst_pad_set_getcaps_function (pad, | ||
317 | GST_DEBUG_FUNCPTR (gst_video_encoder_sink_getcaps)); | ||
318 | gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad); | ||
319 | |||
320 | pad_template = | ||
321 | gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src"); | ||
322 | g_return_if_fail (pad_template != NULL); | ||
323 | |||
324 | encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src"); | ||
325 | |||
326 | gst_pad_set_query_type_function (pad, | ||
327 | GST_DEBUG_FUNCPTR (gst_video_encoder_get_query_types)); | ||
328 | gst_pad_set_query_function (pad, | ||
329 | GST_DEBUG_FUNCPTR (gst_video_encoder_src_query)); | ||
330 | gst_pad_set_event_function (pad, | ||
331 | GST_DEBUG_FUNCPTR (gst_video_encoder_src_event)); | ||
332 | gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad); | ||
333 | |||
334 | gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME); | ||
335 | gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME); | ||
336 | |||
337 | g_static_rec_mutex_init (&encoder->stream_lock); | ||
338 | |||
339 | priv->at_eos = FALSE; | ||
340 | priv->headers = NULL; | ||
341 | priv->new_headers = FALSE; | ||
342 | |||
343 | gst_video_encoder_reset (encoder); | ||
344 | } | ||
345 | |||
346 | static gboolean | ||
347 | gst_video_encoded_video_convert (gint64 bytes, gint64 time, | ||
348 | GstFormat src_format, gint64 src_value, GstFormat * dest_format, | ||
349 | gint64 * dest_value) | ||
350 | { | ||
351 | gboolean res = FALSE; | ||
352 | |||
353 | g_return_val_if_fail (dest_format != NULL, FALSE); | ||
354 | g_return_val_if_fail (dest_value != NULL, FALSE); | ||
355 | |||
356 | if (G_UNLIKELY (src_format == *dest_format || src_value == 0 || | ||
357 | src_value == -1)) { | ||
358 | if (dest_value) | ||
359 | *dest_value = src_value; | ||
360 | return TRUE; | ||
361 | } | ||
362 | |||
363 | if (bytes <= 0 || time <= 0) { | ||
364 | GST_DEBUG ("not enough metadata yet to convert"); | ||
365 | goto exit; | ||
366 | } | ||
367 | |||
368 | switch (src_format) { | ||
369 | case GST_FORMAT_BYTES: | ||
370 | switch (*dest_format) { | ||
371 | case GST_FORMAT_TIME: | ||
372 | *dest_value = gst_util_uint64_scale (src_value, time, bytes); | ||
373 | res = TRUE; | ||
374 | break; | ||
375 | default: | ||
376 | res = FALSE; | ||
377 | } | ||
378 | break; | ||
379 | case GST_FORMAT_TIME: | ||
380 | switch (*dest_format) { | ||
381 | case GST_FORMAT_BYTES: | ||
382 | *dest_value = gst_util_uint64_scale (src_value, bytes, time); | ||
383 | res = TRUE; | ||
384 | break; | ||
385 | default: | ||
386 | res = FALSE; | ||
387 | } | ||
388 | break; | ||
389 | default: | ||
390 | GST_DEBUG ("unhandled conversion from %d to %d", src_format, | ||
391 | *dest_format); | ||
392 | res = FALSE; | ||
393 | } | ||
394 | |||
395 | exit: | ||
396 | return res; | ||
397 | } | ||
398 | |||
399 | /** | ||
400 | * gst_video_encoder_set_headers: | ||
401 | * @encoder: a #GstVideoEncoder | ||
402 | * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header | ||
403 | * | ||
404 | * Set the codec headers to be sent downstream whenever requested. | ||
405 | * | ||
406 | * Since: 0.10.37 | ||
407 | */ | ||
408 | void | ||
409 | gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers) | ||
410 | { | ||
411 | GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder); | ||
412 | |||
413 | GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers); | ||
414 | if (video_encoder->priv->headers) { | ||
415 | g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref, | ||
416 | NULL); | ||
417 | g_list_free (video_encoder->priv->headers); | ||
418 | } | ||
419 | video_encoder->priv->headers = headers; | ||
420 | video_encoder->priv->new_headers = TRUE; | ||
421 | |||
422 | GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder); | ||
423 | } | ||
424 | |||
425 | static gboolean | ||
426 | gst_video_encoder_drain (GstVideoEncoder * enc) | ||
427 | { | ||
428 | GstVideoEncoderPrivate *priv; | ||
429 | GstVideoEncoderClass *enc_class; | ||
430 | gboolean ret = TRUE; | ||
431 | |||
432 | enc_class = GST_VIDEO_ENCODER_GET_CLASS (enc); | ||
433 | priv = enc->priv; | ||
434 | |||
435 | GST_DEBUG_OBJECT (enc, "draining"); | ||
436 | |||
437 | if (priv->drained) { | ||
438 | GST_DEBUG_OBJECT (enc, "already drained"); | ||
439 | return TRUE; | ||
440 | } | ||
441 | |||
442 | if (enc_class->reset) { | ||
443 | GST_DEBUG_OBJECT (enc, "requesting subclass to finish"); | ||
444 | ret = enc_class->reset (enc, TRUE); | ||
445 | } | ||
446 | /* everything should be away now */ | ||
447 | if (priv->frames) { | ||
448 | /* not fatal/impossible though if subclass/enc eats stuff */ | ||
449 | g_list_foreach (priv->frames, (GFunc) gst_video_codec_frame_unref, NULL); | ||
450 | g_list_free (priv->frames); | ||
451 | priv->frames = NULL; | ||
452 | } | ||
453 | |||
454 | return ret; | ||
455 | } | ||
456 | |||
457 | static GstVideoCodecState * | ||
458 | _new_output_state (GstCaps * caps, GstVideoCodecState * reference) | ||
459 | { | ||
460 | GstVideoCodecState *state; | ||
461 | |||
462 | state = g_slice_new0 (GstVideoCodecState); | ||
463 | state->ref_count = 1; | ||
464 | gst_video_info_init (&state->info); | ||
465 | gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0); | ||
466 | |||
467 | state->caps = caps; | ||
468 | |||
469 | if (reference) { | ||
470 | GstVideoInfo *tgt, *ref; | ||
471 | |||
472 | tgt = &state->info; | ||
473 | ref = &reference->info; | ||
474 | |||
475 | /* Copy over extra fields from reference state */ | ||
476 | tgt->interlace_mode = ref->interlace_mode; | ||
477 | tgt->flags = ref->flags; | ||
478 | tgt->width = ref->width; | ||
479 | tgt->height = ref->height; | ||
480 | tgt->chroma_site = ref->chroma_site; | ||
481 | tgt->colorimetry = ref->colorimetry; | ||
482 | tgt->par_n = ref->par_n; | ||
483 | tgt->par_d = ref->par_d; | ||
484 | tgt->fps_n = ref->fps_n; | ||
485 | tgt->fps_d = ref->fps_d; | ||
486 | } | ||
487 | |||
488 | return state; | ||
489 | } | ||
490 | |||
491 | static GstVideoCodecState * | ||
492 | _new_input_state (GstCaps * caps) | ||
493 | { | ||
494 | GstVideoCodecState *state; | ||
495 | |||
496 | state = g_slice_new0 (GstVideoCodecState); | ||
497 | state->ref_count = 1; | ||
498 | gst_video_info_init (&state->info); | ||
499 | if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps))) | ||
500 | goto parse_fail; | ||
501 | state->caps = gst_caps_ref (caps); | ||
502 | |||
503 | return state; | ||
504 | |||
505 | parse_fail: | ||
506 | { | ||
507 | g_slice_free (GstVideoCodecState, state); | ||
508 | return NULL; | ||
509 | } | ||
510 | } | ||
511 | |||
512 | static gboolean | ||
513 | gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) | ||
514 | { | ||
515 | GstVideoEncoder *encoder; | ||
516 | GstVideoEncoderClass *encoder_class; | ||
517 | GstVideoCodecState *state; | ||
518 | gboolean ret; | ||
519 | gboolean samecaps = FALSE; | ||
520 | |||
521 | encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad)); | ||
522 | encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder); | ||
523 | |||
524 | /* subclass should do something here ... */ | ||
525 | g_return_val_if_fail (encoder_class->set_format != NULL, FALSE); | ||
526 | |||
527 | GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps); | ||
528 | |||
529 | state = _new_input_state (caps); | ||
530 | if (G_UNLIKELY (!state)) | ||
531 | goto parse_fail; | ||
532 | |||
533 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
534 | |||
535 | if (encoder->priv->input_state) | ||
536 | samecaps = | ||
537 | gst_video_info_is_equal (&state->info, | ||
538 | &encoder->priv->input_state->info); | ||
539 | |||
540 | if (!samecaps) { | ||
541 | /* arrange draining pending frames */ | ||
542 | gst_video_encoder_drain (encoder); | ||
543 | |||
544 | /* and subclass should be ready to configure format at any time around */ | ||
545 | ret = encoder_class->set_format (encoder, state); | ||
546 | if (ret) { | ||
547 | if (encoder->priv->input_state) | ||
548 | gst_video_codec_state_unref (encoder->priv->input_state); | ||
549 | encoder->priv->input_state = state; | ||
550 | } else | ||
551 | gst_video_codec_state_unref (state); | ||
552 | } else { | ||
553 | /* no need to stir things up */ | ||
554 | GST_DEBUG_OBJECT (encoder, | ||
555 | "new video format identical to configured format"); | ||
556 | gst_video_codec_state_unref (state); | ||
557 | ret = TRUE; | ||
558 | } | ||
559 | |||
560 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
561 | |||
562 | if (!ret) | ||
563 | GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps); | ||
564 | |||
565 | gst_object_unref (encoder); | ||
566 | |||
567 | return ret; | ||
568 | |||
569 | parse_fail: | ||
570 | { | ||
571 | GST_WARNING_OBJECT (encoder, "Failed to parse caps"); | ||
572 | gst_object_unref (encoder); | ||
573 | return FALSE; | ||
574 | } | ||
575 | } | ||
576 | |||
577 | /** | ||
578 | * gst_video_encoder_proxy_getcaps: | ||
579 | * @enc: a #GstVideoEncoder | ||
580 | * @caps: initial caps | ||
581 | * | ||
582 | * Returns caps that express @caps (or sink template caps if @caps == NULL) | ||
583 | * restricted to resolution/format/... combinations supported by downstream | ||
584 | * elements (e.g. muxers). | ||
585 | * | ||
586 | * Returns: a #GstCaps owned by caller | ||
587 | * | ||
588 | * Since: 0.10.37 | ||
589 | */ | ||
590 | GstCaps * | ||
591 | gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps) | ||
592 | { | ||
593 | const GstCaps *templ_caps; | ||
594 | GstCaps *allowed; | ||
595 | GstCaps *fcaps, *filter_caps; | ||
596 | gint i, j; | ||
597 | |||
598 | /* Allow downstream to specify width/height/framerate/PAR constraints | ||
599 | * and forward them upstream for video converters to handle | ||
600 | */ | ||
601 | templ_caps = caps ? caps : gst_pad_get_pad_template_caps (encoder->sinkpad); | ||
602 | allowed = gst_pad_get_allowed_caps (encoder->srcpad); | ||
603 | |||
604 | if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) { | ||
605 | fcaps = gst_caps_copy (templ_caps); | ||
606 | goto done; | ||
607 | } | ||
608 | |||
609 | GST_LOG_OBJECT (encoder, "template caps %" GST_PTR_FORMAT, templ_caps); | ||
610 | GST_LOG_OBJECT (encoder, "allowed caps %" GST_PTR_FORMAT, allowed); | ||
611 | |||
612 | filter_caps = gst_caps_new_empty (); | ||
613 | |||
614 | for (i = 0; i < gst_caps_get_size (templ_caps); i++) { | ||
615 | GQuark q_name = | ||
616 | gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i)); | ||
617 | |||
618 | for (j = 0; j < gst_caps_get_size (allowed); j++) { | ||
619 | const GstStructure *allowed_s = gst_caps_get_structure (allowed, j); | ||
620 | const GValue *val; | ||
621 | GstStructure *s; | ||
622 | |||
623 | s = gst_structure_id_empty_new (q_name); | ||
624 | if ((val = gst_structure_get_value (allowed_s, "width"))) | ||
625 | gst_structure_set_value (s, "width", val); | ||
626 | if ((val = gst_structure_get_value (allowed_s, "height"))) | ||
627 | gst_structure_set_value (s, "height", val); | ||
628 | if ((val = gst_structure_get_value (allowed_s, "framerate"))) | ||
629 | gst_structure_set_value (s, "framerate", val); | ||
630 | if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio"))) | ||
631 | gst_structure_set_value (s, "pixel-aspect-ratio", val); | ||
632 | |||
633 | gst_caps_merge_structure (filter_caps, s); | ||
634 | } | ||
635 | } | ||
636 | |||
637 | fcaps = gst_caps_intersect (filter_caps, templ_caps); | ||
638 | gst_caps_unref (filter_caps); | ||
639 | |||
640 | done: | ||
641 | gst_caps_replace (&allowed, NULL); | ||
642 | |||
643 | GST_LOG_OBJECT (encoder, "proxy caps %" GST_PTR_FORMAT, fcaps); | ||
644 | |||
645 | return fcaps; | ||
646 | } | ||
647 | |||
648 | static GstCaps * | ||
649 | gst_video_encoder_sink_getcaps (GstPad * pad) | ||
650 | { | ||
651 | GstVideoEncoder *encoder; | ||
652 | GstVideoEncoderClass *klass; | ||
653 | GstCaps *caps; | ||
654 | |||
655 | encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad)); | ||
656 | klass = GST_VIDEO_ENCODER_GET_CLASS (encoder); | ||
657 | |||
658 | if (klass->getcaps) | ||
659 | caps = klass->getcaps (encoder); | ||
660 | else | ||
661 | caps = gst_video_encoder_proxy_getcaps (encoder, NULL); | ||
662 | |||
663 | GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps); | ||
664 | |||
665 | gst_object_unref (encoder); | ||
666 | |||
667 | return caps; | ||
668 | } | ||
669 | |||
670 | static void | ||
671 | gst_video_encoder_finalize (GObject * object) | ||
672 | { | ||
673 | GstVideoEncoder *encoder; | ||
674 | |||
675 | GST_DEBUG_OBJECT (object, "finalize"); | ||
676 | |||
677 | encoder = GST_VIDEO_ENCODER (object); | ||
678 | if (encoder->priv->headers) { | ||
679 | g_list_foreach (encoder->priv->headers, (GFunc) gst_buffer_unref, NULL); | ||
680 | g_list_free (encoder->priv->headers); | ||
681 | } | ||
682 | g_static_rec_mutex_free (&encoder->stream_lock); | ||
683 | |||
684 | G_OBJECT_CLASS (parent_class)->finalize (object); | ||
685 | } | ||
686 | |||
687 | static gboolean | ||
688 | gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event) | ||
689 | { | ||
690 | switch (GST_EVENT_TYPE (event)) { | ||
691 | case GST_EVENT_NEWSEGMENT: | ||
692 | { | ||
693 | gboolean update; | ||
694 | double rate; | ||
695 | double applied_rate; | ||
696 | GstFormat format; | ||
697 | gint64 start; | ||
698 | gint64 stop; | ||
699 | gint64 position; | ||
700 | |||
701 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
702 | gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate, | ||
703 | &format, &start, &stop, &position); | ||
704 | |||
705 | GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, " | ||
706 | "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT | ||
707 | ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format, | ||
708 | GST_TIME_ARGS (start), GST_TIME_ARGS (stop), | ||
709 | GST_TIME_ARGS (position)); | ||
710 | |||
711 | if (format != GST_FORMAT_TIME) { | ||
712 | GST_DEBUG_OBJECT (encoder, "received non TIME newsegment"); | ||
713 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
714 | break; | ||
715 | } | ||
716 | |||
717 | gst_segment_set_newsegment_full (&encoder->output_segment, update, rate, | ||
718 | applied_rate, format, start, stop, position); | ||
719 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
720 | break; | ||
721 | } | ||
722 | default: | ||
723 | break; | ||
724 | } | ||
725 | |||
726 | return gst_pad_push_event (encoder->srcpad, event); | ||
727 | } | ||
728 | |||
729 | static gboolean | ||
730 | gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event) | ||
731 | { | ||
732 | GstVideoEncoderClass *encoder_class; | ||
733 | gboolean ret = FALSE; | ||
734 | |||
735 | encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder); | ||
736 | |||
737 | switch (GST_EVENT_TYPE (event)) { | ||
738 | case GST_EVENT_EOS: | ||
739 | { | ||
740 | GstFlowReturn flow_ret; | ||
741 | |||
742 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
743 | encoder->priv->at_eos = TRUE; | ||
744 | |||
745 | if (encoder_class->finish) { | ||
746 | flow_ret = encoder_class->finish (encoder); | ||
747 | } else { | ||
748 | flow_ret = GST_FLOW_OK; | ||
749 | } | ||
750 | |||
751 | ret = (flow_ret == GST_VIDEO_ENCODER_FLOW_DROPPED); | ||
752 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
753 | break; | ||
754 | } | ||
755 | case GST_EVENT_NEWSEGMENT: | ||
756 | { | ||
757 | gboolean update; | ||
758 | double rate; | ||
759 | double applied_rate; | ||
760 | GstFormat format; | ||
761 | gint64 start; | ||
762 | gint64 stop; | ||
763 | gint64 position; | ||
764 | |||
765 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
766 | gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate, | ||
767 | &format, &start, &stop, &position); | ||
768 | |||
769 | GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, " | ||
770 | "format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT | ||
771 | ", pos = %" GST_TIME_FORMAT, rate, applied_rate, format, | ||
772 | GST_TIME_ARGS (start), GST_TIME_ARGS (stop), | ||
773 | GST_TIME_ARGS (position)); | ||
774 | |||
775 | if (format != GST_FORMAT_TIME) { | ||
776 | GST_DEBUG_OBJECT (encoder, "received non TIME newsegment"); | ||
777 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
778 | break; | ||
779 | } | ||
780 | |||
781 | encoder->priv->at_eos = FALSE; | ||
782 | |||
783 | gst_segment_set_newsegment_full (&encoder->input_segment, update, rate, | ||
784 | applied_rate, format, start, stop, position); | ||
785 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
786 | break; | ||
787 | } | ||
788 | case GST_EVENT_CUSTOM_DOWNSTREAM: | ||
789 | { | ||
790 | if (gst_video_event_is_force_key_unit (event)) { | ||
791 | GstClockTime running_time; | ||
792 | gboolean all_headers; | ||
793 | guint count; | ||
794 | |||
795 | if (gst_video_event_parse_downstream_force_key_unit (event, | ||
796 | NULL, NULL, &running_time, &all_headers, &count)) { | ||
797 | ForcedKeyUnitEvent *fevt; | ||
798 | |||
799 | GST_OBJECT_LOCK (encoder); | ||
800 | fevt = forced_key_unit_event_new (running_time, all_headers, count); | ||
801 | encoder->priv->force_key_unit = | ||
802 | g_list_append (encoder->priv->force_key_unit, fevt); | ||
803 | GST_OBJECT_UNLOCK (encoder); | ||
804 | |||
805 | GST_DEBUG_OBJECT (encoder, | ||
806 | "force-key-unit event: running-time %" GST_TIME_FORMAT | ||
807 | ", all_headers %d, count %u", | ||
808 | GST_TIME_ARGS (running_time), all_headers, count); | ||
809 | } | ||
810 | gst_event_unref (event); | ||
811 | ret = TRUE; | ||
812 | } | ||
813 | break; | ||
814 | } | ||
815 | default: | ||
816 | break; | ||
817 | } | ||
818 | |||
819 | return ret; | ||
820 | } | ||
821 | |||
822 | static gboolean | ||
823 | gst_video_encoder_sink_event (GstPad * pad, GstEvent * event) | ||
824 | { | ||
825 | GstVideoEncoder *enc; | ||
826 | GstVideoEncoderClass *klass; | ||
827 | gboolean handled = FALSE; | ||
828 | gboolean ret = TRUE; | ||
829 | |||
830 | enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad)); | ||
831 | klass = GST_VIDEO_ENCODER_GET_CLASS (enc); | ||
832 | |||
833 | GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event), | ||
834 | GST_EVENT_TYPE_NAME (event)); | ||
835 | |||
836 | if (klass->sink_event) | ||
837 | handled = klass->sink_event (enc, event); | ||
838 | |||
839 | if (!handled) | ||
840 | handled = gst_video_encoder_sink_eventfunc (enc, event); | ||
841 | |||
842 | if (!handled) { | ||
843 | /* Forward non-serialized events and EOS/FLUSH_STOP immediately. | ||
844 | * For EOS this is required because no buffer or serialized event | ||
845 | * will come after EOS and nothing could trigger another | ||
846 | * _finish_frame() call. * | ||
847 | * If the subclass handles sending of EOS manually it can return | ||
848 | * _DROPPED from ::finish() and all other subclasses should have | ||
849 | * decoded/flushed all remaining data before this | ||
850 | * | ||
851 | * For FLUSH_STOP this is required because it is expected | ||
852 | * to be forwarded immediately and no buffers are queued anyway. | ||
853 | */ | ||
854 | if (!GST_EVENT_IS_SERIALIZED (event) | ||
855 | || GST_EVENT_TYPE (event) == GST_EVENT_EOS | ||
856 | || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) { | ||
857 | ret = gst_video_encoder_push_event (enc, event); | ||
858 | } else { | ||
859 | GST_VIDEO_ENCODER_STREAM_LOCK (enc); | ||
860 | enc->priv->current_frame_events = | ||
861 | g_list_prepend (enc->priv->current_frame_events, event); | ||
862 | GST_VIDEO_ENCODER_STREAM_UNLOCK (enc); | ||
863 | ret = TRUE; | ||
864 | } | ||
865 | } | ||
866 | |||
867 | GST_DEBUG_OBJECT (enc, "event handled"); | ||
868 | |||
869 | gst_object_unref (enc); | ||
870 | return ret; | ||
871 | } | ||
872 | |||
873 | static gboolean | ||
874 | gst_video_encoder_src_eventfunc (GstVideoEncoder * encoder, GstEvent * event) | ||
875 | { | ||
876 | gboolean handled = FALSE; | ||
877 | |||
878 | switch (GST_EVENT_TYPE (event)) { | ||
879 | case GST_EVENT_CUSTOM_UPSTREAM: | ||
880 | { | ||
881 | if (gst_video_event_is_force_key_unit (event)) { | ||
882 | GstClockTime running_time; | ||
883 | gboolean all_headers; | ||
884 | guint count; | ||
885 | |||
886 | if (gst_video_event_parse_upstream_force_key_unit (event, | ||
887 | &running_time, &all_headers, &count)) { | ||
888 | ForcedKeyUnitEvent *fevt; | ||
889 | |||
890 | GST_OBJECT_LOCK (encoder); | ||
891 | fevt = forced_key_unit_event_new (running_time, all_headers, count); | ||
892 | encoder->priv->force_key_unit = | ||
893 | g_list_append (encoder->priv->force_key_unit, fevt); | ||
894 | GST_OBJECT_UNLOCK (encoder); | ||
895 | |||
896 | GST_DEBUG_OBJECT (encoder, | ||
897 | "force-key-unit event: running-time %" GST_TIME_FORMAT | ||
898 | ", all_headers %d, count %u", | ||
899 | GST_TIME_ARGS (running_time), all_headers, count); | ||
900 | } | ||
901 | gst_event_unref (event); | ||
902 | handled = TRUE; | ||
903 | } | ||
904 | break; | ||
905 | } | ||
906 | default: | ||
907 | break; | ||
908 | } | ||
909 | |||
910 | return handled; | ||
911 | } | ||
912 | |||
913 | static gboolean | ||
914 | gst_video_encoder_src_event (GstPad * pad, GstEvent * event) | ||
915 | { | ||
916 | GstVideoEncoder *encoder; | ||
917 | GstVideoEncoderClass *klass; | ||
918 | gboolean ret = FALSE; | ||
919 | gboolean handled = FALSE; | ||
920 | |||
921 | encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad)); | ||
922 | klass = GST_VIDEO_ENCODER_GET_CLASS (encoder); | ||
923 | |||
924 | GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event); | ||
925 | |||
926 | if (klass->src_event) | ||
927 | handled = klass->src_event (encoder, event); | ||
928 | |||
929 | if (!handled) | ||
930 | handled = gst_video_encoder_src_eventfunc (encoder, event); | ||
931 | |||
932 | if (!handled) | ||
933 | ret = gst_pad_event_default (pad, event); | ||
934 | |||
935 | gst_object_unref (encoder); | ||
936 | |||
937 | return ret; | ||
938 | } | ||
939 | |||
940 | static const GstQueryType * | ||
941 | gst_video_encoder_get_query_types (GstPad * pad) | ||
942 | { | ||
943 | static const GstQueryType query_types[] = { | ||
944 | GST_QUERY_CONVERT, | ||
945 | GST_QUERY_LATENCY, | ||
946 | 0 | ||
947 | }; | ||
948 | |||
949 | return query_types; | ||
950 | } | ||
951 | |||
952 | static gboolean | ||
953 | gst_video_encoder_src_query (GstPad * pad, GstQuery * query) | ||
954 | { | ||
955 | GstVideoEncoderPrivate *priv; | ||
956 | GstVideoEncoder *enc; | ||
957 | gboolean res; | ||
958 | GstPad *peerpad; | ||
959 | |||
960 | enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad)); | ||
961 | priv = enc->priv; | ||
962 | peerpad = gst_pad_get_peer (enc->sinkpad); | ||
963 | |||
964 | GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query); | ||
965 | |||
966 | switch (GST_QUERY_TYPE (query)) { | ||
967 | case GST_QUERY_CONVERT: | ||
968 | { | ||
969 | GstFormat src_fmt, dest_fmt; | ||
970 | gint64 src_val, dest_val; | ||
971 | |||
972 | gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); | ||
973 | res = | ||
974 | gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt, | ||
975 | src_val, &dest_fmt, &dest_val); | ||
976 | if (!res) | ||
977 | goto error; | ||
978 | gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); | ||
979 | break; | ||
980 | } | ||
981 | case GST_QUERY_LATENCY: | ||
982 | { | ||
983 | gboolean live; | ||
984 | GstClockTime min_latency, max_latency; | ||
985 | |||
986 | res = gst_pad_query (peerpad, query); | ||
987 | if (res) { | ||
988 | gst_query_parse_latency (query, &live, &min_latency, &max_latency); | ||
989 | GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %" | ||
990 | GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live, | ||
991 | GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency)); | ||
992 | |||
993 | GST_OBJECT_LOCK (enc); | ||
994 | min_latency += priv->min_latency; | ||
995 | if (enc->priv->max_latency == GST_CLOCK_TIME_NONE) { | ||
996 | max_latency = GST_CLOCK_TIME_NONE; | ||
997 | } else if (max_latency != GST_CLOCK_TIME_NONE) { | ||
998 | max_latency += enc->priv->max_latency; | ||
999 | } | ||
1000 | GST_OBJECT_UNLOCK (enc); | ||
1001 | |||
1002 | gst_query_set_latency (query, live, min_latency, max_latency); | ||
1003 | } | ||
1004 | } | ||
1005 | break; | ||
1006 | default: | ||
1007 | res = gst_pad_query_default (pad, query); | ||
1008 | } | ||
1009 | gst_object_unref (peerpad); | ||
1010 | gst_object_unref (enc); | ||
1011 | return res; | ||
1012 | |||
1013 | error: | ||
1014 | GST_DEBUG_OBJECT (enc, "query failed"); | ||
1015 | gst_object_unref (peerpad); | ||
1016 | gst_object_unref (enc); | ||
1017 | return res; | ||
1018 | } | ||
1019 | |||
1020 | static GstVideoCodecFrame * | ||
1021 | gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf, | ||
1022 | GstClockTime timestamp, GstClockTime duration) | ||
1023 | { | ||
1024 | GstVideoEncoderPrivate *priv = encoder->priv; | ||
1025 | GstVideoCodecFrame *frame; | ||
1026 | |||
1027 | frame = g_slice_new0 (GstVideoCodecFrame); | ||
1028 | |||
1029 | frame->ref_count = 1; | ||
1030 | |||
1031 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1032 | frame->system_frame_number = priv->system_frame_number; | ||
1033 | priv->system_frame_number++; | ||
1034 | |||
1035 | frame->presentation_frame_number = priv->presentation_frame_number; | ||
1036 | priv->presentation_frame_number++; | ||
1037 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1038 | |||
1039 | frame->events = priv->current_frame_events; | ||
1040 | priv->current_frame_events = NULL; | ||
1041 | frame->input_buffer = buf; | ||
1042 | frame->pts = timestamp; | ||
1043 | frame->duration = duration; | ||
1044 | |||
1045 | if (GST_VIDEO_INFO_IS_INTERLACED (&encoder->priv->input_state->info)) { | ||
1046 | if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_TFF)) { | ||
1047 | GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_TFF); | ||
1048 | } else { | ||
1049 | GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame, GST_VIDEO_CODEC_FRAME_FLAG_TFF); | ||
1050 | } | ||
1051 | if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_RFF)) { | ||
1052 | GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_RFF); | ||
1053 | } else { | ||
1054 | GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame, GST_VIDEO_CODEC_FRAME_FLAG_RFF); | ||
1055 | } | ||
1056 | if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_ONEFIELD)) { | ||
1057 | GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, | ||
1058 | GST_VIDEO_CODEC_FRAME_FLAG_ONEFIELD); | ||
1059 | } else { | ||
1060 | GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame, | ||
1061 | GST_VIDEO_CODEC_FRAME_FLAG_ONEFIELD); | ||
1062 | } | ||
1063 | } | ||
1064 | |||
1065 | return frame; | ||
1066 | } | ||
1067 | |||
1068 | |||
1069 | static GstFlowReturn | ||
1070 | gst_video_encoder_chain (GstPad * pad, GstBuffer * buf) | ||
1071 | { | ||
1072 | GstVideoEncoder *encoder; | ||
1073 | GstVideoEncoderPrivate *priv; | ||
1074 | GstVideoEncoderClass *klass; | ||
1075 | GstVideoCodecFrame *frame; | ||
1076 | GstFlowReturn ret = GST_FLOW_OK; | ||
1077 | gint64 start, stop = GST_CLOCK_TIME_NONE, cstart, cstop; | ||
1078 | |||
1079 | encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad)); | ||
1080 | priv = encoder->priv; | ||
1081 | klass = GST_VIDEO_ENCODER_GET_CLASS (encoder); | ||
1082 | |||
1083 | g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR); | ||
1084 | |||
1085 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1086 | |||
1087 | /* .... ?? */ | ||
1088 | if (!GST_PAD_CAPS (pad)) { | ||
1089 | ret = GST_FLOW_NOT_NEGOTIATED; | ||
1090 | goto done; | ||
1091 | } | ||
1092 | |||
1093 | start = GST_BUFFER_TIMESTAMP (buf); | ||
1094 | if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf))) | ||
1095 | stop = start + GST_BUFFER_DURATION (buf); | ||
1096 | |||
1097 | GST_LOG_OBJECT (encoder, | ||
1098 | "received buffer of size %d with ts %" GST_TIME_FORMAT | ||
1099 | ", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf), | ||
1100 | GST_TIME_ARGS (start), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); | ||
1101 | |||
1102 | if (priv->at_eos) { | ||
1103 | ret = GST_FLOW_UNEXPECTED; | ||
1104 | goto done; | ||
1105 | } | ||
1106 | |||
1107 | /* Drop buffers outside of segment */ | ||
1108 | if (!gst_segment_clip (&encoder->input_segment, | ||
1109 | GST_FORMAT_TIME, start, stop, &cstart, &cstop)) { | ||
1110 | GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame"); | ||
1111 | gst_buffer_unref (buf); | ||
1112 | goto done; | ||
1113 | } | ||
1114 | |||
1115 | frame = gst_video_encoder_new_frame (encoder, buf, cstart, cstop - cstart); | ||
1116 | |||
1117 | GST_OBJECT_LOCK (encoder); | ||
1118 | if (priv->force_key_unit) { | ||
1119 | ForcedKeyUnitEvent *fevt = NULL; | ||
1120 | GstClockTime running_time; | ||
1121 | GList *l; | ||
1122 | |||
1123 | running_time = | ||
1124 | gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME, | ||
1125 | GST_BUFFER_TIMESTAMP (buf)); | ||
1126 | |||
1127 | for (l = priv->force_key_unit; l; l = l->next) { | ||
1128 | ForcedKeyUnitEvent *tmp = l->data; | ||
1129 | |||
1130 | /* Skip pending keyunits */ | ||
1131 | if (tmp->pending) | ||
1132 | continue; | ||
1133 | |||
1134 | /* Simple case, keyunit ASAP */ | ||
1135 | if (tmp->running_time == GST_CLOCK_TIME_NONE) { | ||
1136 | fevt = tmp; | ||
1137 | break; | ||
1138 | } | ||
1139 | |||
1140 | /* Event for before this frame */ | ||
1141 | if (tmp->running_time <= running_time) { | ||
1142 | fevt = tmp; | ||
1143 | break; | ||
1144 | } | ||
1145 | } | ||
1146 | |||
1147 | if (fevt) { | ||
1148 | GST_DEBUG_OBJECT (encoder, | ||
1149 | "Forcing a key unit at running time %" GST_TIME_FORMAT, | ||
1150 | GST_TIME_ARGS (running_time)); | ||
1151 | GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame); | ||
1152 | if (fevt->all_headers) | ||
1153 | GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame); | ||
1154 | fevt->pending = TRUE; | ||
1155 | } | ||
1156 | } | ||
1157 | GST_OBJECT_UNLOCK (encoder); | ||
1158 | |||
1159 | priv->frames = g_list_append (priv->frames, frame); | ||
1160 | |||
1161 | /* new data, more finish needed */ | ||
1162 | priv->drained = FALSE; | ||
1163 | |||
1164 | GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass", | ||
1165 | frame->presentation_frame_number); | ||
1166 | |||
1167 | gst_video_codec_frame_ref (frame); | ||
1168 | ret = klass->handle_frame (encoder, frame); | ||
1169 | |||
1170 | done: | ||
1171 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1172 | |||
1173 | gst_object_unref (encoder); | ||
1174 | |||
1175 | return ret; | ||
1176 | } | ||
1177 | |||
1178 | static GstStateChangeReturn | ||
1179 | gst_video_encoder_change_state (GstElement * element, GstStateChange transition) | ||
1180 | { | ||
1181 | GstVideoEncoder *encoder; | ||
1182 | GstVideoEncoderClass *encoder_class; | ||
1183 | GstStateChangeReturn ret; | ||
1184 | |||
1185 | encoder = GST_VIDEO_ENCODER (element); | ||
1186 | encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element); | ||
1187 | |||
1188 | switch (transition) { | ||
1189 | case GST_STATE_CHANGE_NULL_TO_READY: | ||
1190 | /* open device/library if needed */ | ||
1191 | if (encoder_class->open && !encoder_class->open (encoder)) | ||
1192 | goto open_failed; | ||
1193 | break; | ||
1194 | case GST_STATE_CHANGE_READY_TO_PAUSED: | ||
1195 | /* Initialize device/library if needed */ | ||
1196 | if (encoder_class->start && !encoder_class->start (encoder)) | ||
1197 | goto start_failed; | ||
1198 | break; | ||
1199 | default: | ||
1200 | break; | ||
1201 | } | ||
1202 | |||
1203 | ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); | ||
1204 | |||
1205 | switch (transition) { | ||
1206 | case GST_STATE_CHANGE_PAUSED_TO_READY: | ||
1207 | gst_video_encoder_reset (encoder); | ||
1208 | if (encoder_class->stop && !encoder_class->stop (encoder)) | ||
1209 | goto stop_failed; | ||
1210 | break; | ||
1211 | case GST_STATE_CHANGE_READY_TO_NULL: | ||
1212 | /* close device/library if needed */ | ||
1213 | if (encoder_class->close && !encoder_class->close (encoder)) | ||
1214 | goto close_failed; | ||
1215 | break; | ||
1216 | default: | ||
1217 | break; | ||
1218 | } | ||
1219 | |||
1220 | return ret; | ||
1221 | |||
1222 | /* Errors */ | ||
1223 | |||
1224 | open_failed: | ||
1225 | { | ||
1226 | GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL), | ||
1227 | ("Failed to open encoder")); | ||
1228 | return GST_STATE_CHANGE_FAILURE; | ||
1229 | } | ||
1230 | |||
1231 | start_failed: | ||
1232 | { | ||
1233 | GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL), | ||
1234 | ("Failed to start encoder")); | ||
1235 | return GST_STATE_CHANGE_FAILURE; | ||
1236 | } | ||
1237 | |||
1238 | stop_failed: | ||
1239 | { | ||
1240 | GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL), | ||
1241 | ("Failed to stop encoder")); | ||
1242 | return GST_STATE_CHANGE_FAILURE; | ||
1243 | } | ||
1244 | |||
1245 | close_failed: | ||
1246 | { | ||
1247 | GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL), | ||
1248 | ("Failed to close encoder")); | ||
1249 | return GST_STATE_CHANGE_FAILURE; | ||
1250 | } | ||
1251 | } | ||
1252 | |||
1253 | static gboolean | ||
1254 | gst_video_encoder_set_src_caps (GstVideoEncoder * encoder) | ||
1255 | { | ||
1256 | gboolean ret; | ||
1257 | GstVideoCodecState *state = encoder->priv->output_state; | ||
1258 | GstVideoInfo *info = &state->info; | ||
1259 | |||
1260 | g_return_val_if_fail (state->caps != NULL, FALSE); | ||
1261 | |||
1262 | if (encoder->priv->output_state_changed) { | ||
1263 | state->caps = gst_caps_make_writable (state->caps); | ||
1264 | |||
1265 | /* Fill caps */ | ||
1266 | gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width, | ||
1267 | "height", G_TYPE_INT, info->height, | ||
1268 | "pixel-aspect-ratio", GST_TYPE_FRACTION, | ||
1269 | info->par_n, info->par_d, NULL); | ||
1270 | if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) { | ||
1271 | /* variable fps with a max-framerate */ | ||
1272 | gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1, | ||
1273 | "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL); | ||
1274 | } else { | ||
1275 | /* no variable fps or no max-framerate */ | ||
1276 | gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, | ||
1277 | info->fps_n, info->fps_d, NULL); | ||
1278 | } | ||
1279 | if (state->codec_data) | ||
1280 | gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER, | ||
1281 | state->codec_data, NULL); | ||
1282 | encoder->priv->output_state_changed = FALSE; | ||
1283 | } | ||
1284 | |||
1285 | ret = gst_pad_set_caps (encoder->srcpad, state->caps); | ||
1286 | |||
1287 | return ret; | ||
1288 | } | ||
1289 | |||
1290 | /** | ||
1291 | * gst_video_encoder_finish_frame: | ||
1292 | * @encoder: a #GstVideoEncoder | ||
1293 | * @frame: (transfer full): an encoded #GstVideoCodecFrame | ||
1294 | * | ||
1295 | * @frame must have a valid encoded data buffer, whose metadata fields | ||
1296 | * are then appropriately set according to frame data or no buffer at | ||
1297 | * all if the frame should be dropped. | ||
1298 | * It is subsequently pushed downstream or provided to @pre_push. | ||
1299 | * In any case, the frame is considered finished and released. | ||
1300 | * | ||
1301 | * Returns: a #GstFlowReturn resulting from sending data downstream | ||
1302 | * | ||
1303 | * Since: 0.10.37 | ||
1304 | */ | ||
1305 | GstFlowReturn | ||
1306 | gst_video_encoder_finish_frame (GstVideoEncoder * encoder, | ||
1307 | GstVideoCodecFrame * frame) | ||
1308 | { | ||
1309 | GstVideoEncoderPrivate *priv = encoder->priv; | ||
1310 | GstFlowReturn ret = GST_FLOW_OK; | ||
1311 | GstVideoEncoderClass *encoder_class; | ||
1312 | GList *l; | ||
1313 | gboolean send_headers = FALSE; | ||
1314 | gboolean discont = (frame->presentation_frame_number == 0); | ||
1315 | |||
1316 | encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder); | ||
1317 | |||
1318 | GST_LOG_OBJECT (encoder, | ||
1319 | "finish frame fpn %d", frame->presentation_frame_number); | ||
1320 | |||
1321 | GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT | ||
1322 | ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts), | ||
1323 | GST_TIME_ARGS (frame->dts)); | ||
1324 | |||
1325 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1326 | |||
1327 | if (G_UNLIKELY (priv->output_state_changed)) | ||
1328 | gst_video_encoder_set_src_caps (encoder); | ||
1329 | |||
1330 | if (G_UNLIKELY (priv->output_state == NULL)) | ||
1331 | goto no_output_state; | ||
1332 | |||
1333 | /* Push all pending events that arrived before this frame */ | ||
1334 | for (l = priv->frames; l; l = l->next) { | ||
1335 | GstVideoCodecFrame *tmp = l->data; | ||
1336 | |||
1337 | if (tmp->events) { | ||
1338 | GList *k; | ||
1339 | |||
1340 | for (k = g_list_last (tmp->events); k; k = k->prev) | ||
1341 | gst_video_encoder_push_event (encoder, k->data); | ||
1342 | g_list_free (tmp->events); | ||
1343 | tmp->events = NULL; | ||
1344 | } | ||
1345 | |||
1346 | if (tmp == frame) | ||
1347 | break; | ||
1348 | } | ||
1349 | |||
1350 | /* no buffer data means this frame is skipped/dropped */ | ||
1351 | if (!frame->output_buffer) { | ||
1352 | GST_DEBUG_OBJECT (encoder, "skipping frame %" GST_TIME_FORMAT, | ||
1353 | GST_TIME_ARGS (frame->pts)); | ||
1354 | goto done; | ||
1355 | } | ||
1356 | |||
1357 | if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) { | ||
1358 | GstClockTime stream_time, running_time; | ||
1359 | GstEvent *ev; | ||
1360 | ForcedKeyUnitEvent *fevt = NULL; | ||
1361 | GList *l; | ||
1362 | |||
1363 | running_time = | ||
1364 | gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME, | ||
1365 | frame->pts); | ||
1366 | |||
1367 | GST_OBJECT_LOCK (encoder); | ||
1368 | for (l = priv->force_key_unit; l; l = l->next) { | ||
1369 | ForcedKeyUnitEvent *tmp = l->data; | ||
1370 | |||
1371 | /* Skip non-pending keyunits */ | ||
1372 | if (!tmp->pending) | ||
1373 | continue; | ||
1374 | |||
1375 | /* Simple case, keyunit ASAP */ | ||
1376 | if (tmp->running_time == GST_CLOCK_TIME_NONE) { | ||
1377 | fevt = tmp; | ||
1378 | break; | ||
1379 | } | ||
1380 | |||
1381 | /* Event for before this frame */ | ||
1382 | if (tmp->running_time <= running_time) { | ||
1383 | fevt = tmp; | ||
1384 | break; | ||
1385 | } | ||
1386 | } | ||
1387 | |||
1388 | if (fevt) { | ||
1389 | priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt); | ||
1390 | } | ||
1391 | GST_OBJECT_UNLOCK (encoder); | ||
1392 | |||
1393 | if (fevt) { | ||
1394 | stream_time = | ||
1395 | gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME, | ||
1396 | frame->pts); | ||
1397 | |||
1398 | ev = gst_video_event_new_downstream_force_key_unit | ||
1399 | (frame->pts, stream_time, running_time, | ||
1400 | fevt->all_headers, fevt->count); | ||
1401 | |||
1402 | gst_video_encoder_push_event (encoder, ev); | ||
1403 | |||
1404 | if (fevt->all_headers) | ||
1405 | send_headers = TRUE; | ||
1406 | |||
1407 | GST_DEBUG_OBJECT (encoder, | ||
1408 | "Forced key unit: running-time %" GST_TIME_FORMAT | ||
1409 | ", all_headers %d, count %u", | ||
1410 | GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count); | ||
1411 | forced_key_unit_event_free (fevt); | ||
1412 | } | ||
1413 | } | ||
1414 | |||
1415 | if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { | ||
1416 | priv->distance_from_sync = 0; | ||
1417 | GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT); | ||
1418 | /* For keyframes, DTS = PTS */ | ||
1419 | if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) { | ||
1420 | frame->dts = frame->pts; | ||
1421 | } else if (GST_CLOCK_TIME_IS_VALID (frame->pts) && frame->pts != frame->dts) { | ||
1422 | GST_WARNING_OBJECT (encoder, "keyframe PTS != DTS"); | ||
1423 | } | ||
1424 | } else { | ||
1425 | GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT); | ||
1426 | } | ||
1427 | |||
1428 | frame->distance_from_sync = priv->distance_from_sync; | ||
1429 | priv->distance_from_sync++; | ||
1430 | |||
1431 | GST_BUFFER_TIMESTAMP (frame->output_buffer) = frame->pts; | ||
1432 | GST_BUFFER_DURATION (frame->output_buffer) = frame->duration; | ||
1433 | |||
1434 | /* update rate estimate */ | ||
1435 | priv->bytes += GST_BUFFER_SIZE (frame->output_buffer); | ||
1436 | if (GST_CLOCK_TIME_IS_VALID (frame->duration)) { | ||
1437 | priv->time += frame->duration; | ||
1438 | } else { | ||
1439 | /* better none than nothing valid */ | ||
1440 | priv->time = GST_CLOCK_TIME_NONE; | ||
1441 | } | ||
1442 | |||
1443 | if (G_UNLIKELY (send_headers || priv->new_headers)) { | ||
1444 | GList *tmp, *copy = NULL; | ||
1445 | |||
1446 | GST_DEBUG_OBJECT (encoder, "Sending headers"); | ||
1447 | |||
1448 | /* First make all buffers metadata-writable */ | ||
1449 | for (tmp = priv->headers; tmp; tmp = tmp->next) { | ||
1450 | GstBuffer *tmpbuf = GST_BUFFER (tmp->data); | ||
1451 | |||
1452 | copy = g_list_append (copy, gst_buffer_make_metadata_writable (tmpbuf)); | ||
1453 | } | ||
1454 | g_list_free (priv->headers); | ||
1455 | priv->headers = copy; | ||
1456 | |||
1457 | for (tmp = priv->headers; tmp; tmp = tmp->next) { | ||
1458 | GstBuffer *tmpbuf = GST_BUFFER (tmp->data); | ||
1459 | |||
1460 | gst_buffer_set_caps (tmpbuf, GST_PAD_CAPS (encoder->srcpad)); | ||
1461 | gst_buffer_ref (tmpbuf); | ||
1462 | priv->bytes += GST_BUFFER_SIZE (tmpbuf); | ||
1463 | if (G_UNLIKELY (discont)) { | ||
1464 | GST_LOG_OBJECT (encoder, "marking discont"); | ||
1465 | GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT); | ||
1466 | discont = FALSE; | ||
1467 | } | ||
1468 | |||
1469 | gst_pad_push (encoder->srcpad, tmpbuf); | ||
1470 | } | ||
1471 | priv->new_headers = FALSE; | ||
1472 | } | ||
1473 | |||
1474 | if (G_UNLIKELY (discont)) { | ||
1475 | GST_LOG_OBJECT (encoder, "marking discont"); | ||
1476 | GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT); | ||
1477 | } | ||
1478 | |||
1479 | gst_buffer_set_caps (GST_BUFFER (frame->output_buffer), | ||
1480 | GST_PAD_CAPS (encoder->srcpad)); | ||
1481 | |||
1482 | if (encoder_class->pre_push) | ||
1483 | ret = encoder_class->pre_push (encoder, frame); | ||
1484 | |||
1485 | if (ret == GST_FLOW_OK) | ||
1486 | ret = gst_pad_push (encoder->srcpad, frame->output_buffer); | ||
1487 | |||
1488 | frame->output_buffer = NULL; | ||
1489 | |||
1490 | done: | ||
1491 | /* handed out */ | ||
1492 | |||
1493 | /* unref once from the list */ | ||
1494 | l = g_list_find (priv->frames, frame); | ||
1495 | if (l) { | ||
1496 | gst_video_codec_frame_unref (frame); | ||
1497 | priv->frames = g_list_delete_link (priv->frames, l); | ||
1498 | } | ||
1499 | /* unref because this function takes ownership */ | ||
1500 | gst_video_codec_frame_unref (frame); | ||
1501 | |||
1502 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1503 | |||
1504 | return ret; | ||
1505 | |||
1506 | /* ERRORS */ | ||
1507 | no_output_state: | ||
1508 | { | ||
1509 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1510 | GST_ERROR_OBJECT (encoder, "Output state was not configured"); | ||
1511 | return GST_FLOW_ERROR; | ||
1512 | } | ||
1513 | } | ||
1514 | |||
1515 | /** | ||
1516 | * gst_video_encoder_get_output_state: | ||
1517 | * @encoder: a #GstVideoEncoder | ||
1518 | * | ||
1519 | * Get the current #GstVideoCodecState | ||
1520 | * | ||
1521 | * Returns: (transfer full): #GstVideoCodecState describing format of video data. | ||
1522 | * | ||
1523 | * Since: 0.10.37 | ||
1524 | */ | ||
1525 | GstVideoCodecState * | ||
1526 | gst_video_encoder_get_output_state (GstVideoEncoder * encoder) | ||
1527 | { | ||
1528 | GstVideoCodecState *state; | ||
1529 | |||
1530 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1531 | state = gst_video_codec_state_ref (encoder->priv->output_state); | ||
1532 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1533 | |||
1534 | return state; | ||
1535 | } | ||
1536 | |||
1537 | /** | ||
1538 | * gst_video_encoder_set_output_state: | ||
1539 | * @encoder: a #GstVideoEncoder | ||
1540 | * @caps: (transfer full): the #GstCaps to use for the output | ||
1541 | * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState | ||
1542 | * | ||
1543 | * Creates a new #GstVideoCodecState with the specified caps as the output state | ||
1544 | * for the encoder. | ||
1545 | * Any previously set output state on @decoder will be replaced by the newly | ||
1546 | * created one. | ||
1547 | * | ||
1548 | * The specified @caps should not contain any resolution, pixel-aspect-ratio, | ||
1549 | * framerate, codec-data, .... Those should be specified instead in the returned | ||
1550 | * #GstVideoCodecState. | ||
1551 | * | ||
1552 | * If the subclass wishes to copy over existing fields (like pixel aspect ratio, | ||
1553 | * or framerate) from an existing #GstVideoCodecState, it can be provided as a | ||
1554 | * @reference. | ||
1555 | * | ||
1556 | * If the subclass wishes to override some fields from the output state (like | ||
1557 | * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState. | ||
1558 | * | ||
1559 | * The new output state will only take effect (set on pads and buffers) starting | ||
1560 | * from the next call to #gst_video_encoder_finish_frame(). | ||
1561 | * | ||
1562 | * Returns: (transfer full): the newly configured output state. | ||
1563 | * | ||
1564 | * Since: 0.10.37 | ||
1565 | */ | ||
1566 | GstVideoCodecState * | ||
1567 | gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps, | ||
1568 | GstVideoCodecState * reference) | ||
1569 | { | ||
1570 | GstVideoEncoderPrivate *priv = encoder->priv; | ||
1571 | GstVideoCodecState *state; | ||
1572 | |||
1573 | g_return_val_if_fail (caps != NULL, NULL); | ||
1574 | |||
1575 | state = _new_output_state (caps, reference); | ||
1576 | |||
1577 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1578 | if (priv->output_state) | ||
1579 | gst_video_codec_state_unref (priv->output_state); | ||
1580 | priv->output_state = gst_video_codec_state_ref (state); | ||
1581 | |||
1582 | priv->output_state_changed = TRUE; | ||
1583 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1584 | |||
1585 | return state; | ||
1586 | } | ||
1587 | |||
1588 | /** | ||
1589 | * gst_video_encoder_set_latency: | ||
1590 | * @encoder: a #GstVideoEncoder | ||
1591 | * @min_latency: minimum latency | ||
1592 | * @max_latency: maximum latency | ||
1593 | * | ||
1594 | * Informs baseclass of encoding latency. | ||
1595 | * | ||
1596 | * Since: 0.10.37 | ||
1597 | */ | ||
1598 | void | ||
1599 | gst_video_encoder_set_latency (GstVideoEncoder * encoder, | ||
1600 | GstClockTime min_latency, GstClockTime max_latency) | ||
1601 | { | ||
1602 | g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency)); | ||
1603 | g_return_if_fail (max_latency >= min_latency); | ||
1604 | |||
1605 | GST_OBJECT_LOCK (encoder); | ||
1606 | encoder->priv->min_latency = min_latency; | ||
1607 | encoder->priv->max_latency = max_latency; | ||
1608 | GST_OBJECT_UNLOCK (encoder); | ||
1609 | |||
1610 | gst_element_post_message (GST_ELEMENT_CAST (encoder), | ||
1611 | gst_message_new_latency (GST_OBJECT_CAST (encoder))); | ||
1612 | } | ||
1613 | |||
1614 | /** | ||
1615 | * gst_video_encoder_get_latency: | ||
1616 | * @encoder: a #GstVideoEncoder | ||
1617 | * @min_latency: (out) (allow-none): the configured minimum latency | ||
1618 | * @max_latency: (out) (allow-none): the configured maximum latency | ||
1619 | * | ||
1620 | * Returns the configured encoding latency. | ||
1621 | * | ||
1622 | * Since: 0.10.37 | ||
1623 | */ | ||
1624 | void | ||
1625 | gst_video_encoder_get_latency (GstVideoEncoder * encoder, | ||
1626 | GstClockTime * min_latency, GstClockTime * max_latency) | ||
1627 | { | ||
1628 | GST_OBJECT_LOCK (encoder); | ||
1629 | if (min_latency) | ||
1630 | *min_latency = encoder->priv->min_latency; | ||
1631 | if (max_latency) | ||
1632 | *max_latency = encoder->priv->max_latency; | ||
1633 | GST_OBJECT_UNLOCK (encoder); | ||
1634 | } | ||
1635 | |||
1636 | /** | ||
1637 | * gst_video_encoder_get_oldest_frame: | ||
1638 | * @encoder: a #GstVideoEncoder | ||
1639 | * | ||
1640 | * Get the oldest unfinished pending #GstVideoCodecFrame | ||
1641 | * | ||
1642 | * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame | ||
1643 | * | ||
1644 | * Since: 0.10.37 | ||
1645 | */ | ||
1646 | GstVideoCodecFrame * | ||
1647 | gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder) | ||
1648 | { | ||
1649 | GstVideoCodecFrame *frame = NULL; | ||
1650 | |||
1651 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1652 | if (encoder->priv->frames) | ||
1653 | frame = gst_video_codec_frame_ref (encoder->priv->frames->data); | ||
1654 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1655 | |||
1656 | return (GstVideoCodecFrame *) frame; | ||
1657 | } | ||
1658 | |||
1659 | /** | ||
1660 | * gst_video_encoder_get_frame: | ||
1661 | * @encoder: a #GstVideoEnccoder | ||
1662 | * @frame_number: system_frame_number of a frame | ||
1663 | * | ||
1664 | * Get a pending unfinished #GstVideoCodecFrame | ||
1665 | * | ||
1666 | * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number. | ||
1667 | * | ||
1668 | * Since: 0.10.37 | ||
1669 | */ | ||
1670 | GstVideoCodecFrame * | ||
1671 | gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number) | ||
1672 | { | ||
1673 | GList *g; | ||
1674 | GstVideoCodecFrame *frame = NULL; | ||
1675 | |||
1676 | GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number); | ||
1677 | |||
1678 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1679 | for (g = encoder->priv->frames; g; g = g->next) { | ||
1680 | GstVideoCodecFrame *tmp = g->data; | ||
1681 | |||
1682 | if (tmp->system_frame_number == frame_number) { | ||
1683 | frame = tmp; | ||
1684 | gst_video_codec_frame_ref (frame); | ||
1685 | break; | ||
1686 | } | ||
1687 | } | ||
1688 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1689 | |||
1690 | return frame; | ||
1691 | } | ||
1692 | |||
1693 | /** | ||
1694 | * gst_video_encoder_get_frames: | ||
1695 | * @encoder: a #GstVideoEncoder | ||
1696 | * | ||
1697 | * Get all pending unfinished #GstVideoCodecFrame | ||
1698 | * | ||
1699 | * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame. | ||
1700 | */ | ||
1701 | GList * | ||
1702 | gst_video_encoder_get_frames (GstVideoEncoder * encoder) | ||
1703 | { | ||
1704 | GList *frames; | ||
1705 | |||
1706 | GST_VIDEO_ENCODER_STREAM_LOCK (encoder); | ||
1707 | frames = g_list_copy (encoder->priv->frames); | ||
1708 | g_list_foreach (frames, (GFunc) gst_video_codec_frame_ref, NULL); | ||
1709 | GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); | ||
1710 | |||
1711 | return frames; | ||
1712 | } | ||
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h b/common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h new file mode 100644 index 00000000..9ae3516e --- /dev/null +++ b/common/recipes-multimedia/gstreamer/gstreamer-vaapi/gstvideoencoder.h | |||
@@ -0,0 +1,308 @@ | |||
1 | /* GStreamer | ||
2 | * Copyright (C) 2008 David Schleef <ds@schleef.org> | ||
3 | * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>. | ||
4 | * Copyright (C) 2011 Nokia Corporation. All rights reserved. | ||
5 | * Contact: Stefan Kost <stefan.kost@nokia.com> | ||
6 | * Copyright (C) 2012 Collabora Ltd. | ||
7 | * Author : Edward Hervey <edward@collabora.com> | ||
8 | * | ||
9 | * This library is free software; you can redistribute it and/or | ||
10 | * modify it under the terms of the GNU Library General Public | ||
11 | * License as published by the Free Software Foundation; either | ||
12 | * version 2 of the License, or (at your option) any later version. | ||
13 | * | ||
14 | * This library is distributed in the hope that it will be useful, | ||
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
17 | * Library General Public License for more details. | ||
18 | * | ||
19 | * You should have received a copy of the GNU Library General Public | ||
20 | * License along with this library; if not, write to the | ||
21 | * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
22 | * Boston, MA 02111-1307, USA. | ||
23 | */ | ||
24 | |||
25 | #ifndef _GST_VIDEO_ENCODER_H_ | ||
26 | #define _GST_VIDEO_ENCODER_H_ | ||
27 | |||
28 | #include <gst/video/gstvideoutils.h> | ||
29 | |||
30 | G_BEGIN_DECLS | ||
31 | |||
32 | #define GST_TYPE_VIDEO_ENCODER \ | ||
33 | (gst_video_encoder_get_type()) | ||
34 | #define GST_VIDEO_ENCODER(obj) \ | ||
35 | (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoder)) | ||
36 | #define GST_VIDEO_ENCODER_CLASS(klass) \ | ||
37 | (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass)) | ||
38 | #define GST_VIDEO_ENCODER_GET_CLASS(obj) \ | ||
39 | (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass)) | ||
40 | #define GST_IS_VIDEO_ENCODER(obj) \ | ||
41 | (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_ENCODER)) | ||
42 | #define GST_IS_VIDEO_ENCODER_CLASS(obj) \ | ||
43 | (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_ENCODER)) | ||
44 | #define GST_VIDEO_ENCODER_CAST(enc) ((GstVideoEncoder*)enc) | ||
45 | |||
46 | /** | ||
47 | * GST_VIDEO_ENCODER_SINK_NAME: | ||
48 | * | ||
49 | * The name of the templates for the sink pad. | ||
50 | * | ||
51 | * Since: 0.10.37 | ||
52 | */ | ||
53 | #define GST_VIDEO_ENCODER_SINK_NAME "sink" | ||
54 | /** | ||
55 | * GST_VIDEO_ENCODER_SRC_NAME: | ||
56 | * | ||
57 | * The name of the templates for the source pad. | ||
58 | * | ||
59 | * Since: 0.10.37 | ||
60 | */ | ||
61 | #define GST_VIDEO_ENCODER_SRC_NAME "src" | ||
62 | |||
63 | /** | ||
64 | * GST_VIDEO_ENCODER_FLOW_DROPPED: | ||
65 | * | ||
66 | * Returned when the event/buffer should be dropped. | ||
67 | * | ||
68 | * Since: 0.10.37 | ||
69 | */ | ||
70 | #define GST_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1 | ||
71 | |||
72 | /** | ||
73 | * GST_VIDEO_ENCODER_SRC_PAD: | ||
74 | * @obj: a #GstVideoEncoder | ||
75 | * | ||
76 | * Gives the pointer to the source #GstPad object of the element. | ||
77 | * | ||
78 | * Since: 0.10.37 | ||
79 | */ | ||
80 | #define GST_VIDEO_ENCODER_SRC_PAD(obj) (((GstVideoEncoder *) (obj))->srcpad) | ||
81 | |||
82 | /** | ||
83 | * GST_VIDEO_ENCODER_SINK_PAD: | ||
84 | * @obj: a #GstVideoEncoder | ||
85 | * | ||
86 | * Gives the pointer to the sink #GstPad object of the element. | ||
87 | * | ||
88 | * Since: 0.10.37 | ||
89 | */ | ||
90 | #define GST_VIDEO_ENCODER_SINK_PAD(obj) (((GstVideoEncoder *) (obj))->sinkpad) | ||
91 | |||
92 | /** | ||
93 | * GST_VIDEO_ENCODER_FLOW_NEED_DATA: | ||
94 | * | ||
95 | * Returned while parsing to indicate more data is needed. | ||
96 | * | ||
97 | * Since: 0.10.37 | ||
98 | **/ | ||
99 | #define GST_VIDEO_ENCODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS | ||
100 | |||
101 | /** | ||
102 | * GST_VIDEO_ENCODER_FLOW_DROPPED: | ||
103 | * | ||
104 | * Returned when the event/buffer should be dropped. | ||
105 | * | ||
106 | * Since: 0.10.37 | ||
107 | */ | ||
108 | #define GST_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1 | ||
109 | |||
110 | /** | ||
111 | * GST_VIDEO_ENCODER_INPUT_SEGMENT: | ||
112 | * @obj: base parse instance | ||
113 | * | ||
114 | * Gives the segment of the element. | ||
115 | * | ||
116 | * Since: 0.10.37 | ||
117 | */ | ||
118 | #define GST_VIDEO_ENCODER_INPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->input_segment) | ||
119 | |||
120 | /** | ||
121 | * GST_VIDEO_ENCODER_OUTPUT_SEGMENT: | ||
122 | * @obj: base parse instance | ||
123 | * | ||
124 | * Gives the segment of the element. | ||
125 | * | ||
126 | * Since: 0.10.37 | ||
127 | */ | ||
128 | #define GST_VIDEO_ENCODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->output_segment) | ||
129 | |||
130 | /** | ||
131 | * GST_VIDEO_ENCODER_STREAM_LOCK: | ||
132 | * @encoder: video encoder instance | ||
133 | * | ||
134 | * Obtain a lock to protect the encoder function from concurrent access. | ||
135 | * | ||
136 | * Since: 0.10.37 | ||
137 | */ | ||
138 | #define GST_VIDEO_ENCODER_STREAM_LOCK(encoder) g_static_rec_mutex_lock (&GST_VIDEO_ENCODER (encoder)->stream_lock) | ||
139 | |||
140 | /** | ||
141 | * GST_VIDEO_ENCODER_STREAM_UNLOCK: | ||
142 | * @encoder: video encoder instance | ||
143 | * | ||
144 | * Release the lock that protects the encoder function from concurrent access. | ||
145 | * | ||
146 | * Since: 0.10.37 | ||
147 | */ | ||
148 | #define GST_VIDEO_ENCODER_STREAM_UNLOCK(encoder) g_static_rec_mutex_unlock (&GST_VIDEO_ENCODER (encoder)->stream_lock) | ||
149 | |||
150 | typedef struct _GstVideoEncoder GstVideoEncoder; | ||
151 | typedef struct _GstVideoEncoderPrivate GstVideoEncoderPrivate; | ||
152 | typedef struct _GstVideoEncoderClass GstVideoEncoderClass; | ||
153 | |||
154 | /** | ||
155 | * GstVideoEncoder: | ||
156 | * | ||
157 | * The opaque #GstVideoEncoder data structure. | ||
158 | * | ||
159 | * Since: 0.10.37 | ||
160 | */ | ||
161 | struct _GstVideoEncoder | ||
162 | { | ||
163 | /*< private >*/ | ||
164 | GstElement element; | ||
165 | |||
166 | /*< protected >*/ | ||
167 | GstPad *sinkpad; | ||
168 | GstPad *srcpad; | ||
169 | |||
170 | /* protects all data processing, i.e. is locked | ||
171 | * in the chain function, finish_frame and when | ||
172 | * processing serialized events */ | ||
173 | GStaticRecMutex stream_lock; | ||
174 | |||
175 | /* MT-protected (with STREAM_LOCK) */ | ||
176 | GstSegment input_segment; | ||
177 | GstSegment output_segment; | ||
178 | |||
179 | GstVideoEncoderPrivate *priv; | ||
180 | |||
181 | /*< private >*/ | ||
182 | gpointer _gst_reserved[GST_PADDING_LARGE]; | ||
183 | }; | ||
184 | |||
185 | /** | ||
186 | * GstVideoEncoderClass: | ||
187 | * @open: Optional. | ||
188 | * Called when the element changes to GST_STATE_READY. | ||
189 | * Allows opening external resources. Since: 0.10.37. | ||
190 | * @close: Optional. | ||
191 | * Called when the element changes to GST_STATE_NULL. | ||
192 | * Allows closing external resources. Since: 0.10.37. | ||
193 | * @start: Optional. | ||
194 | * Called when the element starts processing. | ||
195 | * Allows opening external resources. | ||
196 | * @stop: Optional. | ||
197 | * Called when the element stops processing. | ||
198 | * Allows closing external resources. | ||
199 | * @set_format: Optional. | ||
200 | * Notifies subclass of incoming data format. | ||
201 | * GstVideoCodecState fields have already been | ||
202 | * set according to provided caps. | ||
203 | * @handle_frame: Provides input frame to subclass. | ||
204 | * @reset: Optional. | ||
205 | * Allows subclass (encoder) to perform post-seek semantics reset. | ||
206 | * @finish: Optional. | ||
207 | * Called to request subclass to dispatch any pending remaining | ||
208 | * data (e.g. at EOS). | ||
209 | * @pre_push: Optional. | ||
210 | * Allows subclass to push frame downstream in whatever | ||
211 | * shape or form it deems appropriate. If not provided, | ||
212 | * provided encoded frame data is simply pushed downstream. | ||
213 | * @getcaps: Optional. | ||
214 | * Allows for a custom sink getcaps implementation (e.g. | ||
215 | * for multichannel input specification). If not implemented, | ||
216 | * default returns gst_video_encoder_proxy_getcaps | ||
217 | * applied to sink template caps. | ||
218 | * @sink_event: Optional. | ||
219 | * Event handler on the sink pad. This function should return | ||
220 | * TRUE if the event was handled and should be discarded | ||
221 | * (i.e. not unref'ed). | ||
222 | * @src_event: Optional. | ||
223 | * Event handler on the source pad. This function should return | ||
224 | * TRUE if the event was handled and should be discarded | ||
225 | * (i.e. not unref'ed). | ||
226 | * | ||
227 | * Subclasses can override any of the available virtual methods or not, as | ||
228 | * needed. At minimum @handle_frame needs to be overridden, and @set_format | ||
229 | * and @get_caps are likely needed as well. | ||
230 | * | ||
231 | * Since: 0.10.37 | ||
232 | */ | ||
233 | struct _GstVideoEncoderClass | ||
234 | { | ||
235 | /*< private >*/ | ||
236 | GstElementClass element_class; | ||
237 | |||
238 | /*< public >*/ | ||
239 | /* virtual methods for subclasses */ | ||
240 | gboolean (*open) (GstVideoEncoder *encoder); | ||
241 | |||
242 | gboolean (*close) (GstVideoEncoder *encoder); | ||
243 | |||
244 | gboolean (*start) (GstVideoEncoder *encoder); | ||
245 | |||
246 | gboolean (*stop) (GstVideoEncoder *encoder); | ||
247 | |||
248 | gboolean (*set_format) (GstVideoEncoder *encoder, | ||
249 | GstVideoCodecState *state); | ||
250 | |||
251 | GstFlowReturn (*handle_frame) (GstVideoEncoder *encoder, | ||
252 | GstVideoCodecFrame *frame); | ||
253 | |||
254 | gboolean (*reset) (GstVideoEncoder *encoder, | ||
255 | gboolean hard); | ||
256 | |||
257 | GstFlowReturn (*finish) (GstVideoEncoder *encoder); | ||
258 | |||
259 | GstFlowReturn (*pre_push) (GstVideoEncoder *encoder, | ||
260 | GstVideoCodecFrame *frame); | ||
261 | |||
262 | GstCaps * (*getcaps) (GstVideoEncoder *enc); | ||
263 | |||
264 | gboolean (*sink_event) (GstVideoEncoder *encoder, | ||
265 | GstEvent *event); | ||
266 | |||
267 | gboolean (*src_event) (GstVideoEncoder *encoder, | ||
268 | GstEvent *event); | ||
269 | |||
270 | /*< private >*/ | ||
271 | gpointer _gst_reserved[GST_PADDING_LARGE]; | ||
272 | }; | ||
273 | |||
274 | GType gst_video_encoder_get_type (void); | ||
275 | |||
276 | GstVideoCodecState* gst_video_encoder_get_output_state (GstVideoEncoder *encoder); | ||
277 | |||
278 | GstVideoCodecState* gst_video_encoder_set_output_state (GstVideoEncoder * encoder, | ||
279 | GstCaps * caps, | ||
280 | GstVideoCodecState * reference); | ||
281 | |||
282 | GstVideoCodecFrame* gst_video_encoder_get_frame (GstVideoEncoder *encoder, | ||
283 | int frame_number); | ||
284 | GstVideoCodecFrame* gst_video_encoder_get_oldest_frame (GstVideoEncoder *encoder); | ||
285 | |||
286 | GList * gst_video_encoder_get_frames (GstVideoEncoder *encoder); | ||
287 | |||
288 | GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder, | ||
289 | GstVideoCodecFrame *frame); | ||
290 | |||
291 | GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc, | ||
292 | GstCaps * caps); | ||
293 | void gst_video_encoder_set_discont (GstVideoEncoder *encoder); | ||
294 | gboolean gst_video_encoder_get_discont (GstVideoEncoder *encoder); | ||
295 | |||
296 | void gst_video_encoder_set_latency (GstVideoEncoder *encoder, | ||
297 | GstClockTime min_latency, | ||
298 | GstClockTime max_latency); | ||
299 | void gst_video_encoder_get_latency (GstVideoEncoder *encoder, | ||
300 | GstClockTime *min_latency, | ||
301 | GstClockTime *max_latency); | ||
302 | |||
303 | void gst_video_encoder_set_headers (GstVideoEncoder *encoder, | ||
304 | GList *headers); | ||
305 | G_END_DECLS | ||
306 | |||
307 | #endif | ||
308 | |||
diff --git a/common/recipes-multimedia/gstreamer/gstreamer-vaapi/wayland-compile.patch b/common/recipes-multimedia/gstreamer/gstreamer-vaapi/wayland-compile.patch deleted file mode 100644 index c42e01fe..00000000 --- a/common/recipes-multimedia/gstreamer/gstreamer-vaapi/wayland-compile.patch +++ /dev/null | |||
@@ -1,17 +0,0 @@ | |||
1 | Fix compilation of the Wayland backend when using GStreamer 0.10. | ||
2 | |||
3 | Upstream-Status: Submitted (https://bugzilla.gnome.org/show_bug.cgi?id=712282) | ||
4 | Signed-off-by: Ross Burton <ross.burton@intel.com> | ||
5 | |||
6 | diff --git a/gst-libs/gst/vaapi/Makefile.am b/gst-libs/gst/vaapi/Makefile.am | ||
7 | index ab605e1..0a3fa38 100644 | ||
8 | --- a/gst-libs/gst/vaapi/Makefile.am | ||
9 | +++ b/gst-libs/gst/vaapi/Makefile.am | ||
10 | @@ -364,6 +364,7 @@ libgstvaapi_wayland_@GST_API_VERSION@_la_CFLAGS = \ | ||
11 | -I$(top_srcdir)/gst-libs \ | ||
12 | $(GLIB_CFLAGS) \ | ||
13 | $(GST_BASE_CFLAGS) \ | ||
14 | + $(top_builddir)/gst-libs/gst/video/libgstvaapi-videoutils.la \ | ||
15 | $(WAYLAND_CFLAGS) \ | ||
16 | $(LIBVA_WAYLAND_CFLAGS) \ | ||
17 | $(NULL) | ||