diff options
author | Martin Jansa <martin.jansa@gmail.com> | 2014-08-08 15:57:52 +0200 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2014-08-17 10:01:35 +0100 |
commit | 77ebfe74824be4248e4683f70b5ff218dc3b38e1 (patch) | |
tree | a46c02e894bcd27b72b36dabc0471c605810f88c /meta/recipes-multimedia | |
parent | 1e001d0e714c8541441bd9486f916d4c325bd47c (diff) | |
download | poky-77ebfe74824be4248e4683f70b5ff218dc3b38e1.tar.gz |
gst-ffmpeg: add PACKAGECONFIG for libav9 and patch from Gentoo
* apply the patch only when PACKAGECONFIG is selected, because the changes
aren't backwards compatible
(From OE-Core rev: 7324d7deb18b81943100bc35301b0c4aa22dc404)
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/recipes-multimedia')
-rw-r--r-- | meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/libav-9.patch | 9304 | ||||
-rw-r--r-- | meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb | 2 |
2 files changed, 9306 insertions, 0 deletions
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/libav-9.patch b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/libav-9.patch new file mode 100644 index 0000000000..9055b341dc --- /dev/null +++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/libav-9.patch | |||
@@ -0,0 +1,9304 @@ | |||
1 | Taken from gentoo patchset: | ||
2 | http://dev.gentoo.org/~tetromino/distfiles/gst-plugins-ffmpeg/gst-ffmpeg-0.10.13_p2012.11-libav-9-patches.tar.xz | ||
3 | |||
4 | Upstream-Status: Pending | ||
5 | |||
6 | Contains following changes, rebased to apply on top of our changes | ||
7 | 0002-Fix-includes-for-systemwide-build.patch | ||
8 | 0003-libav-Switch-to-non-deprecated-symbols.patch | ||
9 | 0005-av-Update-for-some-constant-changes.patch | ||
10 | 0006-av-Remove-palette-support-for-now.patch | ||
11 | 0007-av-Port-remaining-simple-bits.patch | ||
12 | 0008-av-Use-av_codec_is_-en-de-coder-API-instead-of-priva.patch | ||
13 | 0009-avprotocol-Port-from-the-URL-protocol-handler-to-san.patch | ||
14 | 0010-avdec-don-t-wait-for-keyframe.patch | ||
15 | |||
16 | Following changes were skipped: | ||
17 | 0001-Partially-revert-commit-0300801b.patch | ||
18 | 0004-av-update-to-use-AVOption-variants.patch | ||
19 | 0011-av_get_bits_per_sample_format-was-removed-in-libav-9.patch | ||
20 | |||
21 | Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com> | ||
22 | |||
23 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpeg.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpeg.c | ||
24 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpeg.c 2011-10-31 11:14:03.000000000 +0100 | ||
25 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpeg.c 2014-08-08 15:26:07.872857555 +0200 | ||
26 | @@ -151,9 +151,6 @@ | ||
27 | #endif | ||
28 | gst_ffmpegaudioresample_register (plugin); | ||
29 | |||
30 | - av_register_protocol2 (&gstreamer_protocol, sizeof (URLProtocol)); | ||
31 | - av_register_protocol2 (&gstpipe_protocol, sizeof (URLProtocol)); | ||
32 | - | ||
33 | /* Now we can return the pointer to the newly created Plugin object. */ | ||
34 | return TRUE; | ||
35 | } | ||
36 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpeg.h gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpeg.h | ||
37 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpeg.h 2011-05-17 10:53:16.000000000 +0200 | ||
38 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpeg.h 2014-08-08 15:26:07.872857555 +0200 | ||
39 | @@ -58,10 +58,13 @@ | ||
40 | int gst_ffmpeg_avcodec_close (AVCodecContext *avctx); | ||
41 | int gst_ffmpeg_av_find_stream_info(AVFormatContext *ic); | ||
42 | |||
43 | -G_END_DECLS | ||
44 | +int gst_ffmpegdata_open (GstPad * pad, int flags, AVIOContext ** context); | ||
45 | +int gst_ffmpegdata_close (AVIOContext * h); | ||
46 | +typedef struct _GstFFMpegPipe GstFFMpegPipe; | ||
47 | +int gst_ffmpeg_pipe_open (GstFFMpegPipe *ffpipe, int flags, AVIOContext ** context); | ||
48 | +int gst_ffmpeg_pipe_close (AVIOContext * h); | ||
49 | |||
50 | -extern URLProtocol gstreamer_protocol; | ||
51 | -extern URLProtocol gstpipe_protocol; | ||
52 | +G_END_DECLS | ||
53 | |||
54 | /* use GST_FFMPEG URL_STREAMHEADER with URL_WRONLY if the first | ||
55 | * buffer should be used as streamheader property on the pad's caps. */ | ||
56 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcfg.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcfg.c | ||
57 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcfg.c 2011-07-12 16:35:27.000000000 +0200 | ||
58 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcfg.c 2014-08-08 15:24:17.899853612 +0200 | ||
59 | @@ -147,7 +147,6 @@ | ||
60 | {FF_DCT_FASTINT, "Fast Integer", "fastint"}, | ||
61 | {FF_DCT_INT, "Accurate Integer", "int"}, | ||
62 | {FF_DCT_MMX, "MMX", "mmx"}, | ||
63 | - {FF_DCT_MLIB, "MLIB", "mlib"}, | ||
64 | {FF_DCT_ALTIVEC, "ALTIVEC", "altivec"}, | ||
65 | {FF_DCT_FAAN, "FAAN", "faan"}, | ||
66 | {0, NULL, NULL}, | ||
67 | @@ -173,8 +172,6 @@ | ||
68 | {FF_IDCT_SIMPLE, "Simple", "simple"}, | ||
69 | {FF_IDCT_SIMPLEMMX, "Simple MMX", "simplemmx"}, | ||
70 | {FF_IDCT_LIBMPEG2MMX, "LIBMPEG2MMX", "libmpeg2mmx"}, | ||
71 | - {FF_IDCT_PS2, "PS2", "ps2"}, | ||
72 | - {FF_IDCT_MLIB, "MLIB", "mlib"}, | ||
73 | {FF_IDCT_ARM, "ARM", "arm"}, | ||
74 | {FF_IDCT_ALTIVEC, "ALTIVEC", "altivec"}, | ||
75 | {FF_IDCT_SH4, "SH4", "sh4"}, | ||
76 | @@ -263,16 +260,11 @@ | ||
77 | |||
78 | if (!ffmpeg_flags_type) { | ||
79 | static const GFlagsValue ffmpeg_flags[] = { | ||
80 | - {CODEC_FLAG_OBMC, "Use overlapped block motion compensation (h263+)", | ||
81 | - "obmc"}, | ||
82 | {CODEC_FLAG_QSCALE, "Use fixed qscale", "qscale"}, | ||
83 | {CODEC_FLAG_4MV, "Allow 4 MV per MB", "4mv"}, | ||
84 | - {CODEC_FLAG_H263P_AIV, "H.263 alternative inter VLC", "aiv"}, | ||
85 | {CODEC_FLAG_QPEL, "Quartel Pel Motion Compensation", "qpel"}, | ||
86 | {CODEC_FLAG_GMC, "GMC", "gmc"}, | ||
87 | {CODEC_FLAG_MV0, "Always try a MB with MV (0,0)", "mv0"}, | ||
88 | - {CODEC_FLAG_PART, | ||
89 | - "Store MV, DC and AC coefficients in seperate partitions", "part"}, | ||
90 | {CODEC_FLAG_LOOP_FILTER, "Loop filter", "loop-filter"}, | ||
91 | {CODEC_FLAG_GRAY, "Only decode/encode grayscale", "gray"}, | ||
92 | {CODEC_FLAG_NORMALIZE_AQP, | ||
93 | @@ -282,13 +274,9 @@ | ||
94 | "global-headers"}, | ||
95 | {CODEC_FLAG_AC_PRED, "H263 Advanced Intra Coding / MPEG4 AC prediction", | ||
96 | "aic"}, | ||
97 | - {CODEC_FLAG_H263P_UMV, "Unlimited Motion Vector", "umv"}, | ||
98 | {CODEC_FLAG_CBP_RD, "Rate Distoration Optimization for CBP", "cbp-rd"}, | ||
99 | {CODEC_FLAG_QP_RD, "Rate Distoration Optimization for QP selection", | ||
100 | "qp-rd"}, | ||
101 | - {CODEC_FLAG_H263P_SLICE_STRUCT, "H263 slice struct", "ss"}, | ||
102 | - {CODEC_FLAG_SVCD_SCAN_OFFSET, | ||
103 | - "Reserve space for SVCD scan offset user data", "scanoffset"}, | ||
104 | {CODEC_FLAG_CLOSED_GOP, "Closed GOP", "closedgop"}, | ||
105 | {0, NULL, NULL}, | ||
106 | }; | ||
107 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcodecmap.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcodecmap.c | ||
108 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcodecmap.c 2011-10-31 11:14:03.000000000 +0100 | ||
109 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcodecmap.c 2014-08-08 15:31:30.968869139 +0200 | ||
110 | @@ -25,8 +25,10 @@ | ||
111 | #include <gst/gst.h> | ||
112 | #ifdef HAVE_FFMPEG_UNINSTALLED | ||
113 | #include <avcodec.h> | ||
114 | +#include <channel_layout.h>> | ||
115 | #else | ||
116 | #include <libavcodec/avcodec.h> | ||
117 | +#include <libavutil/channel_layout.h> | ||
118 | #endif | ||
119 | #include <string.h> | ||
120 | |||
121 | @@ -35,43 +37,6 @@ | ||
122 | |||
123 | #include <gst/pbutils/codec-utils.h> | ||
124 | |||
125 | -/* | ||
126 | - * Read a palette from a caps. | ||
127 | - */ | ||
128 | - | ||
129 | -static void | ||
130 | -gst_ffmpeg_get_palette (const GstCaps * caps, AVCodecContext * context) | ||
131 | -{ | ||
132 | - GstStructure *str = gst_caps_get_structure (caps, 0); | ||
133 | - const GValue *palette_v; | ||
134 | - const GstBuffer *palette; | ||
135 | - | ||
136 | - /* do we have a palette? */ | ||
137 | - if ((palette_v = gst_structure_get_value (str, "palette_data")) && context) { | ||
138 | - palette = gst_value_get_buffer (palette_v); | ||
139 | - if (GST_BUFFER_SIZE (palette) >= AVPALETTE_SIZE) { | ||
140 | - if (context->palctrl) | ||
141 | - av_free (context->palctrl); | ||
142 | - context->palctrl = av_malloc (sizeof (AVPaletteControl)); | ||
143 | - context->palctrl->palette_changed = 1; | ||
144 | - memcpy (context->palctrl->palette, GST_BUFFER_DATA (palette), | ||
145 | - AVPALETTE_SIZE); | ||
146 | - } | ||
147 | - } | ||
148 | -} | ||
149 | - | ||
150 | -static void | ||
151 | -gst_ffmpeg_set_palette (GstCaps * caps, AVCodecContext * context) | ||
152 | -{ | ||
153 | - if (context->palctrl) { | ||
154 | - GstBuffer *palette = gst_buffer_new_and_alloc (AVPALETTE_SIZE); | ||
155 | - | ||
156 | - memcpy (GST_BUFFER_DATA (palette), context->palctrl->palette, | ||
157 | - AVPALETTE_SIZE); | ||
158 | - gst_caps_set_simple (caps, "palette_data", GST_TYPE_BUFFER, palette, NULL); | ||
159 | - } | ||
160 | -} | ||
161 | - | ||
162 | /* IMPORTANT: Keep this sorted by the ffmpeg channel masks */ | ||
163 | static const struct | ||
164 | { | ||
165 | @@ -79,26 +44,26 @@ | ||
166 | GstAudioChannelPosition gst; | ||
167 | } _ff_to_gst_layout[] = { | ||
168 | { | ||
169 | - CH_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { | ||
170 | - CH_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, { | ||
171 | - CH_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER}, { | ||
172 | - CH_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE}, { | ||
173 | - CH_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT}, { | ||
174 | - CH_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}, { | ||
175 | - CH_FRONT_LEFT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, { | ||
176 | - CH_FRONT_RIGHT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, { | ||
177 | - CH_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, { | ||
178 | - CH_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT}, { | ||
179 | - CH_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}, { | ||
180 | - CH_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
181 | - CH_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
182 | - CH_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
183 | - CH_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
184 | - CH_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
185 | - CH_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
186 | - CH_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
187 | - CH_STEREO_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { | ||
188 | - CH_STEREO_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT} | ||
189 | + AV_CH_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { | ||
190 | + AV_CH_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, { | ||
191 | + AV_CH_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER}, { | ||
192 | + AV_CH_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE}, { | ||
193 | + AV_CH_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT}, { | ||
194 | + AV_CH_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}, { | ||
195 | + AV_CH_FRONT_LEFT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, { | ||
196 | + AV_CH_FRONT_RIGHT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, { | ||
197 | + AV_CH_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, { | ||
198 | + AV_CH_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT}, { | ||
199 | + AV_CH_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}, { | ||
200 | + AV_CH_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
201 | + AV_CH_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
202 | + AV_CH_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
203 | + AV_CH_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
204 | + AV_CH_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
205 | + AV_CH_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
206 | + AV_CH_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
207 | + AV_CH_STEREO_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { | ||
208 | + AV_CH_STEREO_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT} | ||
209 | }; | ||
210 | |||
211 | static GstAudioChannelPosition * | ||
212 | @@ -342,8 +307,8 @@ | ||
213 | |||
214 | if (channel_layout == 0) { | ||
215 | const guint64 default_channel_set[] = { | ||
216 | - 0, 0, CH_LAYOUT_SURROUND, CH_LAYOUT_QUAD, CH_LAYOUT_5POINT0, | ||
217 | - CH_LAYOUT_5POINT1, 0, CH_LAYOUT_7POINT1 | ||
218 | + 0, 0, AV_CH_LAYOUT_SURROUND, AV_CH_LAYOUT_QUAD, AV_CH_LAYOUT_5POINT0, | ||
219 | + AV_CH_LAYOUT_5POINT1, 0, AV_CH_LAYOUT_7POINT1 | ||
220 | }; | ||
221 | |||
222 | switch (codec_id) { | ||
223 | @@ -1267,8 +1232,6 @@ | ||
224 | case CODEC_ID_FLIC: | ||
225 | case CODEC_ID_VMDVIDEO: | ||
226 | case CODEC_ID_VMDAUDIO: | ||
227 | - case CODEC_ID_SONIC: | ||
228 | - case CODEC_ID_SONIC_LS: | ||
229 | case CODEC_ID_SNOW: | ||
230 | case CODEC_ID_VIXL: | ||
231 | case CODEC_ID_QPEG: | ||
232 | @@ -1689,11 +1652,6 @@ | ||
233 | gst_buffer_unref (data); | ||
234 | } | ||
235 | |||
236 | - /* palette */ | ||
237 | - if (context) { | ||
238 | - gst_ffmpeg_set_palette (caps, context); | ||
239 | - } | ||
240 | - | ||
241 | GST_LOG ("caps for codec_id=%d: %" GST_PTR_FORMAT, codec_id, caps); | ||
242 | |||
243 | } else { | ||
244 | @@ -1830,9 +1788,6 @@ | ||
245 | "bpp", G_TYPE_INT, bpp, | ||
246 | "depth", G_TYPE_INT, depth, | ||
247 | "endianness", G_TYPE_INT, endianness, NULL); | ||
248 | - if (caps && context) { | ||
249 | - gst_ffmpeg_set_palette (caps, context); | ||
250 | - } | ||
251 | } | ||
252 | } else if (fmt) { | ||
253 | caps = gst_ff_vid_caps_new (context, codec_id, "video/x-raw-yuv", | ||
254 | @@ -1857,7 +1812,7 @@ | ||
255 | */ | ||
256 | |||
257 | static GstCaps * | ||
258 | -gst_ffmpeg_smpfmt_to_caps (enum SampleFormat sample_fmt, | ||
259 | +gst_ffmpeg_smpfmt_to_caps (enum AVSampleFormat sample_fmt, | ||
260 | AVCodecContext * context, enum CodecID codec_id) | ||
261 | { | ||
262 | GstCaps *caps = NULL; | ||
263 | @@ -1867,22 +1822,22 @@ | ||
264 | gboolean signedness = FALSE; | ||
265 | |||
266 | switch (sample_fmt) { | ||
267 | - case SAMPLE_FMT_S16: | ||
268 | + case AV_SAMPLE_FMT_S16: | ||
269 | signedness = TRUE; | ||
270 | bpp = 16; | ||
271 | break; | ||
272 | |||
273 | - case SAMPLE_FMT_S32: | ||
274 | + case AV_SAMPLE_FMT_S32: | ||
275 | signedness = TRUE; | ||
276 | bpp = 32; | ||
277 | break; | ||
278 | |||
279 | - case SAMPLE_FMT_FLT: | ||
280 | + case AV_SAMPLE_FMT_FLT: | ||
281 | integer = FALSE; | ||
282 | bpp = 32; | ||
283 | break; | ||
284 | |||
285 | - case SAMPLE_FMT_DBL: | ||
286 | + case AV_SAMPLE_FMT_DBL: | ||
287 | integer = FALSE; | ||
288 | bpp = 64; | ||
289 | break; | ||
290 | @@ -1941,12 +1896,12 @@ | ||
291 | } | ||
292 | } else { | ||
293 | GstCaps *temp; | ||
294 | - enum SampleFormat i; | ||
295 | + enum AVSampleFormat i; | ||
296 | AVCodecContext ctx = { 0, }; | ||
297 | |||
298 | ctx.channels = -1; | ||
299 | caps = gst_caps_new_empty (); | ||
300 | - for (i = 0; i <= SAMPLE_FMT_DBL; i++) { | ||
301 | + for (i = 0; i <= AV_SAMPLE_FMT_DBL; i++) { | ||
302 | temp = gst_ffmpeg_smpfmt_to_caps (i, encode ? &ctx : NULL, codec_id); | ||
303 | if (temp != NULL) { | ||
304 | gst_caps_append (caps, temp); | ||
305 | @@ -2049,9 +2004,9 @@ | ||
306 | gst_structure_get_int (structure, "endianness", &endianness)) { | ||
307 | if (endianness == G_BYTE_ORDER) { | ||
308 | if (width == 32) | ||
309 | - context->sample_fmt = SAMPLE_FMT_FLT; | ||
310 | + context->sample_fmt = AV_SAMPLE_FMT_FLT; | ||
311 | else if (width == 64) | ||
312 | - context->sample_fmt = SAMPLE_FMT_DBL; | ||
313 | + context->sample_fmt = AV_SAMPLE_FMT_DBL; | ||
314 | } | ||
315 | } | ||
316 | } else { | ||
317 | @@ -2062,9 +2017,9 @@ | ||
318 | gst_structure_get_int (structure, "endianness", &endianness)) { | ||
319 | if ((endianness == G_BYTE_ORDER) && (signedness == TRUE)) { | ||
320 | if ((width == 16) && (depth == 16)) | ||
321 | - context->sample_fmt = SAMPLE_FMT_S16; | ||
322 | + context->sample_fmt = AV_SAMPLE_FMT_S16; | ||
323 | else if ((width == 32) && (depth == 32)) | ||
324 | - context->sample_fmt = SAMPLE_FMT_S32; | ||
325 | + context->sample_fmt = AV_SAMPLE_FMT_S32; | ||
326 | } | ||
327 | } | ||
328 | } | ||
329 | @@ -2190,7 +2145,6 @@ | ||
330 | } else { | ||
331 | if (bpp == 8) { | ||
332 | context->pix_fmt = PIX_FMT_PAL8; | ||
333 | - gst_ffmpeg_get_palette (caps, context); | ||
334 | } | ||
335 | } | ||
336 | } | ||
337 | @@ -2576,7 +2530,6 @@ | ||
338 | switch (codec_type) { | ||
339 | case AVMEDIA_TYPE_VIDEO: | ||
340 | gst_ffmpeg_caps_to_pixfmt (caps, context, codec_id == CODEC_ID_RAWVIDEO); | ||
341 | - gst_ffmpeg_get_palette (caps, context); | ||
342 | break; | ||
343 | case AVMEDIA_TYPE_AUDIO: | ||
344 | gst_ffmpeg_caps_to_smpfmt (caps, context, FALSE); | ||
345 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcodecmap.c.orig gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcodecmap.c.orig | ||
346 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcodecmap.c.orig 1970-01-01 01:00:00.000000000 +0100 | ||
347 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcodecmap.c.orig 2014-08-08 15:30:34.006867097 +0200 | ||
348 | @@ -0,0 +1,3447 @@ | ||
349 | +/* GStreamer | ||
350 | + * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> | ||
351 | + * This file: | ||
352 | + * Copyright (c) 2002-2004 Ronald Bultje <rbultje@ronald.bitfreak.net> | ||
353 | + * | ||
354 | + * This library is free software; you can redistribute it and/or | ||
355 | + * modify it under the terms of the GNU Library General Public | ||
356 | + * License as published by the Free Software Foundation; either | ||
357 | + * version 2 of the License, or (at your option) any later version. | ||
358 | + * | ||
359 | + * This library is distributed in the hope that it will be useful, | ||
360 | + * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
361 | + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
362 | + * Library General Public License for more details. | ||
363 | + * | ||
364 | + * You should have received a copy of the GNU Library General Public | ||
365 | + * License along with this library; if not, write to the | ||
366 | + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
367 | + * Boston, MA 02111-1307, USA. | ||
368 | + */ | ||
369 | + | ||
370 | +#ifdef HAVE_CONFIG_H | ||
371 | +#include "config.h" | ||
372 | +#endif | ||
373 | +#include <gst/gst.h> | ||
374 | +#ifdef HAVE_FFMPEG_UNINSTALLED | ||
375 | +#include <avcodec.h> | ||
376 | +#include <channel_layout.h>> | ||
377 | +#else | ||
378 | +#include <libavcodec/avcodec.h> | ||
379 | +#include <libavutil/channel_layout.h> | ||
380 | +#endif | ||
381 | +#include <string.h> | ||
382 | + | ||
383 | +#include "gstffmpeg.h" | ||
384 | +#include "gstffmpegcodecmap.h" | ||
385 | + | ||
386 | +#include <gst/pbutils/codec-utils.h> | ||
387 | + | ||
388 | +/* | ||
389 | + * Read a palette from a caps. | ||
390 | + */ | ||
391 | + | ||
392 | +static void | ||
393 | +gst_ffmpeg_get_palette (const GstCaps * caps, AVCodecContext * context) | ||
394 | +{ | ||
395 | + GstStructure *str = gst_caps_get_structure (caps, 0); | ||
396 | + const GValue *palette_v; | ||
397 | + const GstBuffer *palette; | ||
398 | + | ||
399 | + /* do we have a palette? */ | ||
400 | + if ((palette_v = gst_structure_get_value (str, "palette_data")) && context) { | ||
401 | + palette = gst_value_get_buffer (palette_v); | ||
402 | + if (GST_BUFFER_SIZE (palette) >= AVPALETTE_SIZE) { | ||
403 | + if (context->palctrl) | ||
404 | + av_free (context->palctrl); | ||
405 | + context->palctrl = av_malloc (sizeof (AVPaletteControl)); | ||
406 | + context->palctrl->palette_changed = 1; | ||
407 | + memcpy (context->palctrl->palette, GST_BUFFER_DATA (palette), | ||
408 | + AVPALETTE_SIZE); | ||
409 | + } | ||
410 | + } | ||
411 | +} | ||
412 | + | ||
413 | +static void | ||
414 | +gst_ffmpeg_set_palette (GstCaps * caps, AVCodecContext * context) | ||
415 | +{ | ||
416 | + if (context->palctrl) { | ||
417 | + GstBuffer *palette = gst_buffer_new_and_alloc (AVPALETTE_SIZE); | ||
418 | + | ||
419 | + memcpy (GST_BUFFER_DATA (palette), context->palctrl->palette, | ||
420 | + AVPALETTE_SIZE); | ||
421 | + gst_caps_set_simple (caps, "palette_data", GST_TYPE_BUFFER, palette, NULL); | ||
422 | + } | ||
423 | +} | ||
424 | + | ||
425 | +/* IMPORTANT: Keep this sorted by the ffmpeg channel masks */ | ||
426 | +static const struct | ||
427 | +{ | ||
428 | + guint64 ff; | ||
429 | + GstAudioChannelPosition gst; | ||
430 | +} _ff_to_gst_layout[] = { | ||
431 | + { | ||
432 | + AV_CH_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { | ||
433 | + AV_CH_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, { | ||
434 | + AV_CH_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER}, { | ||
435 | + AV_CH_LOW_FREQUENCY, GST_AUDIO_CHANNEL_POSITION_LFE}, { | ||
436 | + AV_CH_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT}, { | ||
437 | + AV_CH_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}, { | ||
438 | + AV_CH_FRONT_LEFT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, { | ||
439 | + AV_CH_FRONT_RIGHT_OF_CENTER, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, { | ||
440 | + AV_CH_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, { | ||
441 | + AV_CH_SIDE_LEFT, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT}, { | ||
442 | + AV_CH_SIDE_RIGHT, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}, { | ||
443 | + AV_CH_TOP_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
444 | + AV_CH_TOP_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
445 | + AV_CH_TOP_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
446 | + AV_CH_TOP_FRONT_RIGHT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
447 | + AV_CH_TOP_BACK_LEFT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
448 | + AV_CH_TOP_BACK_CENTER, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
449 | + AV_CH_TOP_BACK_RIGHT, GST_AUDIO_CHANNEL_POSITION_NONE}, { | ||
450 | + AV_CH_STEREO_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, { | ||
451 | + AV_CH_STEREO_RIGHT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT} | ||
452 | +}; | ||
453 | + | ||
454 | +static GstAudioChannelPosition * | ||
455 | +gst_ff_channel_layout_to_gst (guint64 channel_layout, guint channels) | ||
456 | +{ | ||
457 | + guint nchannels = 0, i, j; | ||
458 | + GstAudioChannelPosition *pos = NULL; | ||
459 | + gboolean none_layout = FALSE; | ||
460 | + | ||
461 | + for (i = 0; i < 64; i++) { | ||
462 | + if ((channel_layout & (G_GUINT64_CONSTANT (1) << i)) != 0) { | ||
463 | + nchannels++; | ||
464 | + } | ||
465 | + } | ||
466 | + | ||
467 | + if (channel_layout == 0) { | ||
468 | + nchannels = channels; | ||
469 | + none_layout = TRUE; | ||
470 | + } | ||
471 | + | ||
472 | + if (nchannels != channels) { | ||
473 | + GST_ERROR ("Number of channels is different (%u != %u)", channels, | ||
474 | + nchannels); | ||
475 | + return NULL; | ||
476 | + } | ||
477 | + | ||
478 | + pos = g_new (GstAudioChannelPosition, nchannels); | ||
479 | + | ||
480 | + for (i = 0, j = 0; i < G_N_ELEMENTS (_ff_to_gst_layout); i++) { | ||
481 | + if ((channel_layout & _ff_to_gst_layout[i].ff) != 0) { | ||
482 | + pos[j++] = _ff_to_gst_layout[i].gst; | ||
483 | + | ||
484 | + if (_ff_to_gst_layout[i].gst == GST_AUDIO_CHANNEL_POSITION_NONE) | ||
485 | + none_layout = TRUE; | ||
486 | + } | ||
487 | + } | ||
488 | + | ||
489 | + if (j != nchannels) { | ||
490 | + GST_WARNING ("Unknown channels in channel layout - assuming NONE layout"); | ||
491 | + none_layout = TRUE; | ||
492 | + } | ||
493 | + | ||
494 | + if (!none_layout && !gst_audio_check_channel_positions (pos, nchannels)) { | ||
495 | + GST_ERROR ("Invalid channel layout %" G_GUINT64_FORMAT | ||
496 | + " - assuming NONE layout", channel_layout); | ||
497 | + none_layout = TRUE; | ||
498 | + } | ||
499 | + | ||
500 | + if (none_layout) { | ||
501 | + if (nchannels == 1) { | ||
502 | + pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_MONO; | ||
503 | + } else if (nchannels == 2) { | ||
504 | + pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT; | ||
505 | + pos[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT; | ||
506 | + } else if (channel_layout == 0) { | ||
507 | + g_free (pos); | ||
508 | + pos = NULL; | ||
509 | + } else { | ||
510 | + for (i = 0; i < nchannels; i++) | ||
511 | + pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE; | ||
512 | + } | ||
513 | + } | ||
514 | + | ||
515 | + if (nchannels == 1 && pos[0] == GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER) { | ||
516 | + GST_DEBUG ("mono common case; won't set channel positions"); | ||
517 | + g_free (pos); | ||
518 | + pos = NULL; | ||
519 | + } else if (nchannels == 2 && pos[0] == GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT | ||
520 | + && pos[1] == GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT) { | ||
521 | + GST_DEBUG ("stereo common case; won't set channel positions"); | ||
522 | + g_free (pos); | ||
523 | + pos = NULL; | ||
524 | + } | ||
525 | + | ||
526 | + return pos; | ||
527 | +} | ||
528 | + | ||
529 | +/* this macro makes a caps width fixed or unfixed width/height | ||
530 | + * properties depending on whether we've got a context. | ||
531 | + * | ||
532 | + * See below for why we use this. | ||
533 | + * | ||
534 | + * We should actually do this stuff at the end, like in riff-media.c, | ||
535 | + * but I'm too lazy today. Maybe later. | ||
536 | + */ | ||
537 | +static GstCaps * | ||
538 | +gst_ff_vid_caps_new (AVCodecContext * context, enum CodecID codec_id, | ||
539 | + const char *mimetype, const char *fieldname, ...) | ||
540 | +{ | ||
541 | + GstStructure *structure = NULL; | ||
542 | + GstCaps *caps = NULL; | ||
543 | + va_list var_args; | ||
544 | + gint i; | ||
545 | + | ||
546 | + GST_LOG ("context:%p, codec_id:%d, mimetype:%s", context, codec_id, mimetype); | ||
547 | + | ||
548 | + /* fixed, non probing context */ | ||
549 | + if (context != NULL && context->width != -1) { | ||
550 | + gint num, denom; | ||
551 | + | ||
552 | + caps = gst_caps_new_simple (mimetype, | ||
553 | + "width", G_TYPE_INT, context->width, | ||
554 | + "height", G_TYPE_INT, context->height, NULL); | ||
555 | + | ||
556 | + num = context->time_base.den / context->ticks_per_frame; | ||
557 | + denom = context->time_base.num; | ||
558 | + | ||
559 | + if (!denom) { | ||
560 | + GST_LOG ("invalid framerate: %d/0, -> %d/1", num, num); | ||
561 | + denom = 1; | ||
562 | + } | ||
563 | + if (gst_util_fraction_compare (num, denom, 1000, 1) > 0) { | ||
564 | + GST_LOG ("excessive framerate: %d/%d, -> 0/1", num, denom); | ||
565 | + num = 0; | ||
566 | + denom = 1; | ||
567 | + } | ||
568 | + GST_LOG ("setting framerate: %d/%d", num, denom); | ||
569 | + gst_caps_set_simple (caps, | ||
570 | + "framerate", GST_TYPE_FRACTION, num, denom, NULL); | ||
571 | + } else { | ||
572 | + /* so we are after restricted caps in this case */ | ||
573 | + switch (codec_id) { | ||
574 | + case CODEC_ID_H261: | ||
575 | + { | ||
576 | + caps = gst_caps_new_simple (mimetype, | ||
577 | + "width", G_TYPE_INT, 352, | ||
578 | + "height", G_TYPE_INT, 288, | ||
579 | + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); | ||
580 | + gst_caps_append (caps, gst_caps_new_simple (mimetype, | ||
581 | + "width", G_TYPE_INT, 176, | ||
582 | + "height", G_TYPE_INT, 144, | ||
583 | + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL)); | ||
584 | + break; | ||
585 | + } | ||
586 | + case CODEC_ID_H263: | ||
587 | + { | ||
588 | + /* 128x96, 176x144, 352x288, 704x576, and 1408x1152. slightly reordered | ||
589 | + * because we want automatic negotiation to go as close to 320x240 as | ||
590 | + * possible. */ | ||
591 | + const static gint widths[] = { 352, 704, 176, 1408, 128 }; | ||
592 | + const static gint heights[] = { 288, 576, 144, 1152, 96 }; | ||
593 | + GstCaps *temp; | ||
594 | + gint n_sizes = G_N_ELEMENTS (widths); | ||
595 | + | ||
596 | + caps = gst_caps_new_empty (); | ||
597 | + for (i = 0; i < n_sizes; i++) { | ||
598 | + temp = gst_caps_new_simple (mimetype, | ||
599 | + "width", G_TYPE_INT, widths[i], | ||
600 | + "height", G_TYPE_INT, heights[i], | ||
601 | + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); | ||
602 | + | ||
603 | + gst_caps_append (caps, temp); | ||
604 | + } | ||
605 | + break; | ||
606 | + } | ||
607 | + case CODEC_ID_DVVIDEO: | ||
608 | + { | ||
609 | + static struct | ||
610 | + { | ||
611 | + guint32 csp; | ||
612 | + gint width, height; | ||
613 | + gint par_n, par_d; | ||
614 | + gint framerate_n, framerate_d; | ||
615 | + } profiles[] = { | ||
616 | + { | ||
617 | + GST_MAKE_FOURCC ('Y', '4', '1', 'B'), 720, 480, 10, 11, 30000, 1001}, { | ||
618 | + GST_MAKE_FOURCC ('Y', '4', '1', 'B'), 720, 480, 40, 33, 30000, 1001}, { | ||
619 | + GST_MAKE_FOURCC ('I', '4', '2', '0'), 720, 576, 59, 54, 25, 1}, { | ||
620 | + GST_MAKE_FOURCC ('I', '4', '2', '0'), 720, 576, 118, 81, 25, 1}, { | ||
621 | + GST_MAKE_FOURCC ('Y', '4', '1', 'B'), 720, 576, 59, 54, 25, 1}, { | ||
622 | + GST_MAKE_FOURCC ('Y', '4', '1', 'B'), 720, 576, 118, 81, 25, 1} | ||
623 | + }; | ||
624 | + GstCaps *temp; | ||
625 | + gint n_sizes = G_N_ELEMENTS (profiles); | ||
626 | + | ||
627 | + caps = gst_caps_new_empty (); | ||
628 | + for (i = 0; i < n_sizes; i++) { | ||
629 | + temp = gst_caps_new_simple (mimetype, | ||
630 | + "width", G_TYPE_INT, profiles[i].width, | ||
631 | + "height", G_TYPE_INT, profiles[i].height, | ||
632 | + "framerate", GST_TYPE_FRACTION, profiles[i].framerate_n, | ||
633 | + profiles[i].framerate_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, | ||
634 | + profiles[i].par_n, profiles[i].par_d, NULL); | ||
635 | + | ||
636 | + gst_caps_append (caps, temp); | ||
637 | + } | ||
638 | + break; | ||
639 | + } | ||
640 | + case CODEC_ID_DNXHD: | ||
641 | + { | ||
642 | + caps = gst_caps_new_simple (mimetype, | ||
643 | + "width", G_TYPE_INT, 1920, | ||
644 | + "height", G_TYPE_INT, 1080, | ||
645 | + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); | ||
646 | + gst_caps_append (caps, gst_caps_new_simple (mimetype, | ||
647 | + "width", G_TYPE_INT, 1280, | ||
648 | + "height", G_TYPE_INT, 720, | ||
649 | + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL)); | ||
650 | + break; | ||
651 | + } | ||
652 | + default: | ||
653 | + break; | ||
654 | + } | ||
655 | + } | ||
656 | + | ||
657 | + /* no fixed caps or special restrictions applied; | ||
658 | + * default unfixed setting */ | ||
659 | + if (!caps) { | ||
660 | + GST_DEBUG ("Creating default caps"); | ||
661 | + caps = gst_caps_new_simple (mimetype, | ||
662 | + "width", GST_TYPE_INT_RANGE, 16, 4096, | ||
663 | + "height", GST_TYPE_INT_RANGE, 16, 4096, | ||
664 | + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); | ||
665 | + } | ||
666 | + | ||
667 | + for (i = 0; i < gst_caps_get_size (caps); i++) { | ||
668 | + va_start (var_args, fieldname); | ||
669 | + structure = gst_caps_get_structure (caps, i); | ||
670 | + gst_structure_set_valist (structure, fieldname, var_args); | ||
671 | + va_end (var_args); | ||
672 | + } | ||
673 | + | ||
674 | + return caps; | ||
675 | +} | ||
676 | + | ||
677 | +/* same for audio - now with channels/sample rate | ||
678 | + */ | ||
679 | +static GstCaps * | ||
680 | +gst_ff_aud_caps_new (AVCodecContext * context, enum CodecID codec_id, | ||
681 | + const char *mimetype, const char *fieldname, ...) | ||
682 | +{ | ||
683 | + GstCaps *caps = NULL; | ||
684 | + GstStructure *structure = NULL; | ||
685 | + gint i; | ||
686 | + va_list var_args; | ||
687 | + | ||
688 | + /* fixed, non-probing context */ | ||
689 | + if (context != NULL && context->channels != -1) { | ||
690 | + GstAudioChannelPosition *pos; | ||
691 | + guint64 channel_layout = context->channel_layout; | ||
692 | + | ||
693 | + if (channel_layout == 0) { | ||
694 | + const guint64 default_channel_set[] = { | ||
695 | + 0, 0, AV_CH_LAYOUT_SURROUND, AV_CH_LAYOUT_QUAD, AV_CH_LAYOUT_5POINT0, | ||
696 | + AV_CH_LAYOUT_5POINT1, 0, AV_CH_LAYOUT_7POINT1 | ||
697 | + }; | ||
698 | + | ||
699 | + switch (codec_id) { | ||
700 | + case CODEC_ID_EAC3: | ||
701 | + case CODEC_ID_AC3: | ||
702 | + case CODEC_ID_DTS: | ||
703 | + if (context->channels > 0 | ||
704 | + && context->channels < G_N_ELEMENTS (default_channel_set)) | ||
705 | + channel_layout = default_channel_set[context->channels - 1]; | ||
706 | + break; | ||
707 | + default: | ||
708 | + break; | ||
709 | + } | ||
710 | + } | ||
711 | + | ||
712 | + caps = gst_caps_new_simple (mimetype, | ||
713 | + "rate", G_TYPE_INT, context->sample_rate, | ||
714 | + "channels", G_TYPE_INT, context->channels, NULL); | ||
715 | + | ||
716 | + pos = gst_ff_channel_layout_to_gst (channel_layout, context->channels); | ||
717 | + if (pos != NULL) { | ||
718 | + gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos); | ||
719 | + g_free (pos); | ||
720 | + } | ||
721 | + } else { | ||
722 | + gint maxchannels = 2; | ||
723 | + const gint *rates = NULL; | ||
724 | + gint n_rates = 0; | ||
725 | + | ||
726 | + /* so we must be after restricted caps in this case */ | ||
727 | + switch (codec_id) { | ||
728 | + case CODEC_ID_AAC: | ||
729 | + case CODEC_ID_AAC_LATM: | ||
730 | + case CODEC_ID_DTS: | ||
731 | + maxchannels = 6; | ||
732 | + break; | ||
733 | + case CODEC_ID_MP2: | ||
734 | + { | ||
735 | + const static gint l_rates[] = | ||
736 | + { 48000, 44100, 32000, 24000, 22050, 16000 }; | ||
737 | + n_rates = G_N_ELEMENTS (l_rates); | ||
738 | + rates = l_rates; | ||
739 | + break; | ||
740 | + } | ||
741 | + case CODEC_ID_EAC3: | ||
742 | + case CODEC_ID_AC3: | ||
743 | + { | ||
744 | + const static gint l_rates[] = { 48000, 44100, 32000 }; | ||
745 | + maxchannels = 6; | ||
746 | + n_rates = G_N_ELEMENTS (l_rates); | ||
747 | + rates = l_rates; | ||
748 | + break; | ||
749 | + } | ||
750 | + case CODEC_ID_ADPCM_G722: | ||
751 | + { | ||
752 | + const static gint l_rates[] = { 16000 }; | ||
753 | + n_rates = G_N_ELEMENTS (l_rates); | ||
754 | + rates = l_rates; | ||
755 | + maxchannels = 1; | ||
756 | + break; | ||
757 | + } | ||
758 | + case CODEC_ID_ADPCM_G726: | ||
759 | + { | ||
760 | + const static gint l_rates[] = { 8000 }; | ||
761 | + n_rates = G_N_ELEMENTS (l_rates); | ||
762 | + rates = l_rates; | ||
763 | + maxchannels = 1; | ||
764 | + break; | ||
765 | + } | ||
766 | + case CODEC_ID_ADPCM_SWF: | ||
767 | + { | ||
768 | + const static gint l_rates[] = { 11025, 22050, 44100 }; | ||
769 | + n_rates = G_N_ELEMENTS (l_rates); | ||
770 | + rates = l_rates; | ||
771 | + break; | ||
772 | + } | ||
773 | + case CODEC_ID_ROQ_DPCM: | ||
774 | + { | ||
775 | + const static gint l_rates[] = { 22050 }; | ||
776 | + n_rates = G_N_ELEMENTS (l_rates); | ||
777 | + rates = l_rates; | ||
778 | + break; | ||
779 | + } | ||
780 | + case CODEC_ID_AMR_NB: | ||
781 | + { | ||
782 | + const static gint l_rates[] = { 8000 }; | ||
783 | + maxchannels = 1; | ||
784 | + n_rates = G_N_ELEMENTS (l_rates); | ||
785 | + rates = l_rates; | ||
786 | + break; | ||
787 | + } | ||
788 | + case CODEC_ID_AMR_WB: | ||
789 | + { | ||
790 | + const static gint l_rates[] = { 16000 }; | ||
791 | + maxchannels = 1; | ||
792 | + n_rates = G_N_ELEMENTS (l_rates); | ||
793 | + rates = l_rates; | ||
794 | + break; | ||
795 | + } | ||
796 | + default: | ||
797 | + break; | ||
798 | + } | ||
799 | + | ||
800 | + /* TODO: handle context->channel_layouts here to set | ||
801 | + * the list of channel layouts supported by the encoder. | ||
802 | + * Unfortunately no encoder uses this yet.... | ||
803 | + */ | ||
804 | + /* regardless of encode/decode, open up channels if applicable */ | ||
805 | + /* Until decoders/encoders expose the maximum number of channels | ||
806 | + * they support, we whitelist them here. */ | ||
807 | + switch (codec_id) { | ||
808 | + case CODEC_ID_WMAPRO: | ||
809 | + case CODEC_ID_TRUEHD: | ||
810 | + maxchannels = 8; | ||
811 | + break; | ||
812 | + default: | ||
813 | + break; | ||
814 | + } | ||
815 | + | ||
816 | + if (maxchannels == 1) | ||
817 | + caps = gst_caps_new_simple (mimetype, | ||
818 | + "channels", G_TYPE_INT, maxchannels, NULL); | ||
819 | + else | ||
820 | + caps = gst_caps_new_simple (mimetype, | ||
821 | + "channels", GST_TYPE_INT_RANGE, 1, maxchannels, NULL); | ||
822 | + if (n_rates) { | ||
823 | + GValue list = { 0, }; | ||
824 | + GstStructure *structure; | ||
825 | + | ||
826 | + g_value_init (&list, GST_TYPE_LIST); | ||
827 | + for (i = 0; i < n_rates; i++) { | ||
828 | + GValue v = { 0, }; | ||
829 | + | ||
830 | + g_value_init (&v, G_TYPE_INT); | ||
831 | + g_value_set_int (&v, rates[i]); | ||
832 | + gst_value_list_append_value (&list, &v); | ||
833 | + g_value_unset (&v); | ||
834 | + } | ||
835 | + structure = gst_caps_get_structure (caps, 0); | ||
836 | + gst_structure_set_value (structure, "rate", &list); | ||
837 | + g_value_unset (&list); | ||
838 | + } else | ||
839 | + gst_caps_set_simple (caps, "rate", GST_TYPE_INT_RANGE, 4000, 96000, NULL); | ||
840 | + } | ||
841 | + | ||
842 | + for (i = 0; i < gst_caps_get_size (caps); i++) { | ||
843 | + va_start (var_args, fieldname); | ||
844 | + structure = gst_caps_get_structure (caps, i); | ||
845 | + gst_structure_set_valist (structure, fieldname, var_args); | ||
846 | + va_end (var_args); | ||
847 | + } | ||
848 | + | ||
849 | + return caps; | ||
850 | +} | ||
851 | + | ||
852 | +/* Convert a FFMPEG codec ID and optional AVCodecContext | ||
853 | + * to a GstCaps. If the context is ommitted, no fixed values | ||
854 | + * for video/audio size will be included in the GstCaps | ||
855 | + * | ||
856 | + * CodecID is primarily meant for compressed data GstCaps! | ||
857 | + * | ||
858 | + * encode is a special parameter. gstffmpegdec will say | ||
859 | + * FALSE, gstffmpegenc will say TRUE. The output caps | ||
860 | + * depends on this, in such a way that it will be very | ||
861 | + * specific, defined, fixed and correct caps for encoders, | ||
862 | + * yet very wide, "forgiving" caps for decoders. Example | ||
863 | + * for mp3: decode: audio/mpeg,mpegversion=1,layer=[1-3] | ||
864 | + * but encode: audio/mpeg,mpegversion=1,layer=3,bitrate=x, | ||
865 | + * rate=x,channels=x. | ||
866 | + */ | ||
867 | + | ||
868 | +GstCaps * | ||
869 | +gst_ffmpeg_codecid_to_caps (enum CodecID codec_id, | ||
870 | + AVCodecContext * context, gboolean encode) | ||
871 | +{ | ||
872 | + GstCaps *caps = NULL; | ||
873 | + gboolean buildcaps = FALSE; | ||
874 | + | ||
875 | + GST_LOG ("codec_id:%d, context:%p, encode:%d", codec_id, context, encode); | ||
876 | + | ||
877 | + switch (codec_id) { | ||
878 | + case CODEC_ID_MPEG1VIDEO: | ||
879 | + /* FIXME: bitrate */ | ||
880 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/mpeg", | ||
881 | + "mpegversion", G_TYPE_INT, 1, | ||
882 | + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); | ||
883 | + break; | ||
884 | + | ||
885 | + case CODEC_ID_MPEG2VIDEO: | ||
886 | + if (encode) { | ||
887 | + /* FIXME: bitrate */ | ||
888 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/mpeg", | ||
889 | + "mpegversion", G_TYPE_INT, 2, | ||
890 | + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); | ||
891 | + } else { | ||
892 | + /* decode both MPEG-1 and MPEG-2; width/height/fps are all in | ||
893 | + * the MPEG video stream headers, so may be omitted from caps. */ | ||
894 | + caps = gst_caps_new_simple ("video/mpeg", | ||
895 | + "mpegversion", GST_TYPE_INT_RANGE, 1, 2, | ||
896 | + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); | ||
897 | + } | ||
898 | + break; | ||
899 | + | ||
900 | + case CODEC_ID_MPEG2VIDEO_XVMC: | ||
901 | + /* this is a special ID - don't need it in GStreamer, I think */ | ||
902 | + break; | ||
903 | + | ||
904 | + case CODEC_ID_H263: | ||
905 | + if (encode) { | ||
906 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-h263", | ||
907 | + "variant", G_TYPE_STRING, "itu", | ||
908 | + "h263version", G_TYPE_STRING, "h263", NULL); | ||
909 | + } else { | ||
910 | + /* don't pass codec_id, we can decode other variants with the H263 | ||
911 | + * decoder that don't have specific size requirements | ||
912 | + */ | ||
913 | + caps = gst_ff_vid_caps_new (context, CODEC_ID_NONE, "video/x-h263", | ||
914 | + "variant", G_TYPE_STRING, "itu", NULL); | ||
915 | + } | ||
916 | + break; | ||
917 | + | ||
918 | + case CODEC_ID_H263P: | ||
919 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-h263", | ||
920 | + "variant", G_TYPE_STRING, "itu", | ||
921 | + "h263version", G_TYPE_STRING, "h263p", NULL); | ||
922 | + if (encode && context) { | ||
923 | + | ||
924 | + gst_caps_set_simple (caps, | ||
925 | + "annex-f", G_TYPE_BOOLEAN, context->flags & CODEC_FLAG_4MV, | ||
926 | + "annex-j", G_TYPE_BOOLEAN, context->flags & CODEC_FLAG_LOOP_FILTER, | ||
927 | + "annex-i", G_TYPE_BOOLEAN, context->flags & CODEC_FLAG_AC_PRED, | ||
928 | + "annex-t", G_TYPE_BOOLEAN, context->flags & CODEC_FLAG_AC_PRED, | ||
929 | + NULL); | ||
930 | + } | ||
931 | + break; | ||
932 | + | ||
933 | + case CODEC_ID_H263I: | ||
934 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-intel-h263", | ||
935 | + "variant", G_TYPE_STRING, "intel", NULL); | ||
936 | + break; | ||
937 | + | ||
938 | + case CODEC_ID_H261: | ||
939 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-h261", NULL); | ||
940 | + break; | ||
941 | + | ||
942 | + case CODEC_ID_RV10: | ||
943 | + case CODEC_ID_RV20: | ||
944 | + case CODEC_ID_RV30: | ||
945 | + case CODEC_ID_RV40: | ||
946 | + { | ||
947 | + gint version; | ||
948 | + | ||
949 | + switch (codec_id) { | ||
950 | + case CODEC_ID_RV40: | ||
951 | + version = 4; | ||
952 | + break; | ||
953 | + case CODEC_ID_RV30: | ||
954 | + version = 3; | ||
955 | + break; | ||
956 | + case CODEC_ID_RV20: | ||
957 | + version = 2; | ||
958 | + break; | ||
959 | + default: | ||
960 | + version = 1; | ||
961 | + break; | ||
962 | + } | ||
963 | + | ||
964 | + /* FIXME: context->sub_id must be filled in during decoding */ | ||
965 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-pn-realvideo", | ||
966 | + "systemstream", G_TYPE_BOOLEAN, FALSE, | ||
967 | + "rmversion", G_TYPE_INT, version, NULL); | ||
968 | + if (context) { | ||
969 | + gst_caps_set_simple (caps, "format", G_TYPE_INT, context->sub_id, NULL); | ||
970 | + if (context->extradata_size >= 8) { | ||
971 | + gst_caps_set_simple (caps, | ||
972 | + "subformat", G_TYPE_INT, GST_READ_UINT32_BE (context->extradata), | ||
973 | + NULL); | ||
974 | + } | ||
975 | + } | ||
976 | + } | ||
977 | + break; | ||
978 | + | ||
979 | + case CODEC_ID_MP1: | ||
980 | + /* FIXME: bitrate */ | ||
981 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/mpeg", | ||
982 | + "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 1, NULL); | ||
983 | + break; | ||
984 | + | ||
985 | + case CODEC_ID_MP2: | ||
986 | + /* FIXME: bitrate */ | ||
987 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/mpeg", | ||
988 | + "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 2, NULL); | ||
989 | + break; | ||
990 | + | ||
991 | + case CODEC_ID_MP3: | ||
992 | + if (encode) { | ||
993 | + /* FIXME: bitrate */ | ||
994 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/mpeg", | ||
995 | + "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 3, NULL); | ||
996 | + } else { | ||
997 | + /* Decodes MPEG-1 layer 1/2/3. Samplerate, channels et al are | ||
998 | + * in the MPEG audio header, so may be omitted from caps. */ | ||
999 | + caps = gst_caps_new_simple ("audio/mpeg", | ||
1000 | + "mpegversion", G_TYPE_INT, 1, | ||
1001 | + "layer", GST_TYPE_INT_RANGE, 1, 3, NULL); | ||
1002 | + } | ||
1003 | + break; | ||
1004 | + | ||
1005 | + case CODEC_ID_MUSEPACK7: | ||
1006 | + caps = | ||
1007 | + gst_ff_aud_caps_new (context, codec_id, | ||
1008 | + "audio/x-ffmpeg-parsed-musepack", "streamversion", G_TYPE_INT, 7, | ||
1009 | + NULL); | ||
1010 | + break; | ||
1011 | + | ||
1012 | + case CODEC_ID_MUSEPACK8: | ||
1013 | + caps = | ||
1014 | + gst_ff_aud_caps_new (context, codec_id, | ||
1015 | + "audio/x-ffmpeg-parsed-musepack", "streamversion", G_TYPE_INT, 8, | ||
1016 | + NULL); | ||
1017 | + break; | ||
1018 | + | ||
1019 | + case CODEC_ID_AC3: | ||
1020 | + /* FIXME: bitrate */ | ||
1021 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-ac3", NULL); | ||
1022 | + break; | ||
1023 | + | ||
1024 | + case CODEC_ID_EAC3: | ||
1025 | + /* FIXME: bitrate */ | ||
1026 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-eac3", NULL); | ||
1027 | + break; | ||
1028 | + | ||
1029 | + case CODEC_ID_TRUEHD: | ||
1030 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-true-hd", NULL); | ||
1031 | + break; | ||
1032 | + | ||
1033 | + case CODEC_ID_ATRAC1: | ||
1034 | + caps = | ||
1035 | + gst_ff_aud_caps_new (context, codec_id, "audio/x-vnd.sony.atrac1", | ||
1036 | + NULL); | ||
1037 | + break; | ||
1038 | + | ||
1039 | + case CODEC_ID_ATRAC3: | ||
1040 | + caps = | ||
1041 | + gst_ff_aud_caps_new (context, codec_id, "audio/x-vnd.sony.atrac3", | ||
1042 | + NULL); | ||
1043 | + break; | ||
1044 | + | ||
1045 | + case CODEC_ID_DTS: | ||
1046 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-dts", NULL); | ||
1047 | + break; | ||
1048 | + | ||
1049 | + case CODEC_ID_APE: | ||
1050 | + caps = | ||
1051 | + gst_ff_aud_caps_new (context, codec_id, "audio/x-ffmpeg-parsed-ape", | ||
1052 | + NULL); | ||
1053 | + if (context) { | ||
1054 | + gst_caps_set_simple (caps, | ||
1055 | + "depth", G_TYPE_INT, context->bits_per_coded_sample, NULL); | ||
1056 | + } | ||
1057 | + break; | ||
1058 | + | ||
1059 | + case CODEC_ID_MLP: | ||
1060 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-mlp", NULL); | ||
1061 | + break; | ||
1062 | + | ||
1063 | + case CODEC_ID_IMC: | ||
1064 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-imc", NULL); | ||
1065 | + break; | ||
1066 | + | ||
1067 | + /* MJPEG is normal JPEG, Motion-JPEG and Quicktime MJPEG-A. MJPEGB | ||
1068 | + * is Quicktime's MJPEG-B. LJPEG is lossless JPEG. I don't know what | ||
1069 | + * sp5x is, but it's apparently something JPEG... We don't separate | ||
1070 | + * between those in GStreamer. Should we (at least between MJPEG, | ||
1071 | + * MJPEG-B and sp5x decoding...)? */ | ||
1072 | + case CODEC_ID_MJPEG: | ||
1073 | + case CODEC_ID_LJPEG: | ||
1074 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/jpeg", NULL); | ||
1075 | + break; | ||
1076 | + | ||
1077 | + case CODEC_ID_SP5X: | ||
1078 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/sp5x", NULL); | ||
1079 | + break; | ||
1080 | + | ||
1081 | + case CODEC_ID_MJPEGB: | ||
1082 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-mjpeg-b", NULL); | ||
1083 | + break; | ||
1084 | + | ||
1085 | + case CODEC_ID_MPEG4: | ||
1086 | + if (encode && context != NULL) { | ||
1087 | + /* I'm not exactly sure what ffmpeg outputs... ffmpeg itself uses | ||
1088 | + * the AVI fourcc 'DIVX', but 'mp4v' for Quicktime... */ | ||
1089 | + switch (context->codec_tag) { | ||
1090 | + case GST_MAKE_FOURCC ('D', 'I', 'V', 'X'): | ||
1091 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-divx", | ||
1092 | + "divxversion", G_TYPE_INT, 5, NULL); | ||
1093 | + break; | ||
1094 | + case GST_MAKE_FOURCC ('m', 'p', '4', 'v'): | ||
1095 | + default: | ||
1096 | + /* FIXME: bitrate */ | ||
1097 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/mpeg", | ||
1098 | + "systemstream", G_TYPE_BOOLEAN, FALSE, | ||
1099 | + "mpegversion", G_TYPE_INT, 4, NULL); | ||
1100 | + break; | ||
1101 | + } | ||
1102 | + } else { | ||
1103 | + /* The trick here is to separate xvid, divx, mpeg4, 3ivx et al */ | ||
1104 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/mpeg", | ||
1105 | + "mpegversion", G_TYPE_INT, 4, | ||
1106 | + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); | ||
1107 | + if (encode) { | ||
1108 | + gst_caps_append (caps, gst_ff_vid_caps_new (context, codec_id, | ||
1109 | + "video/x-divx", "divxversion", G_TYPE_INT, 5, NULL)); | ||
1110 | + } else { | ||
1111 | + gst_caps_append (caps, gst_ff_vid_caps_new (context, codec_id, | ||
1112 | + "video/x-divx", "divxversion", GST_TYPE_INT_RANGE, 4, 5, | ||
1113 | + NULL)); | ||
1114 | + gst_caps_append (caps, gst_ff_vid_caps_new (context, codec_id, | ||
1115 | + "video/x-xvid", NULL)); | ||
1116 | + gst_caps_append (caps, gst_ff_vid_caps_new (context, codec_id, | ||
1117 | + "video/x-3ivx", NULL)); | ||
1118 | + } | ||
1119 | + } | ||
1120 | + break; | ||
1121 | + | ||
1122 | + case CODEC_ID_RAWVIDEO: | ||
1123 | + caps = | ||
1124 | + gst_ffmpeg_codectype_to_caps (AVMEDIA_TYPE_VIDEO, context, codec_id, | ||
1125 | + encode); | ||
1126 | + break; | ||
1127 | + | ||
1128 | + case CODEC_ID_MSMPEG4V1: | ||
1129 | + case CODEC_ID_MSMPEG4V2: | ||
1130 | + case CODEC_ID_MSMPEG4V3: | ||
1131 | + { | ||
1132 | + gint version = 41 + codec_id - CODEC_ID_MSMPEG4V1; | ||
1133 | + | ||
1134 | + /* encode-FIXME: bitrate */ | ||
1135 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-msmpeg", | ||
1136 | + "msmpegversion", G_TYPE_INT, version, NULL); | ||
1137 | + if (!encode && codec_id == CODEC_ID_MSMPEG4V3) { | ||
1138 | + gst_caps_append (caps, gst_ff_vid_caps_new (context, codec_id, | ||
1139 | + "video/x-divx", "divxversion", G_TYPE_INT, 3, NULL)); | ||
1140 | + } | ||
1141 | + } | ||
1142 | + break; | ||
1143 | + | ||
1144 | + case CODEC_ID_WMV1: | ||
1145 | + case CODEC_ID_WMV2: | ||
1146 | + { | ||
1147 | + gint version = (codec_id == CODEC_ID_WMV1) ? 1 : 2; | ||
1148 | + | ||
1149 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-wmv", | ||
1150 | + "wmvversion", G_TYPE_INT, version, NULL); | ||
1151 | + } | ||
1152 | + break; | ||
1153 | + | ||
1154 | + case CODEC_ID_FLV1: | ||
1155 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-flash-video", | ||
1156 | + "flvversion", G_TYPE_INT, 1, NULL); | ||
1157 | + break; | ||
1158 | + | ||
1159 | + case CODEC_ID_SVQ1: | ||
1160 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-svq", | ||
1161 | + "svqversion", G_TYPE_INT, 1, NULL); | ||
1162 | + break; | ||
1163 | + | ||
1164 | + case CODEC_ID_SVQ3: | ||
1165 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-svq", | ||
1166 | + "svqversion", G_TYPE_INT, 3, NULL); | ||
1167 | + break; | ||
1168 | + | ||
1169 | + case CODEC_ID_DVAUDIO: | ||
1170 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-dv", NULL); | ||
1171 | + break; | ||
1172 | + | ||
1173 | + case CODEC_ID_DVVIDEO: | ||
1174 | + { | ||
1175 | + if (encode && context) { | ||
1176 | + guint32 fourcc; | ||
1177 | + | ||
1178 | + switch (context->pix_fmt) { | ||
1179 | + case PIX_FMT_YUYV422: | ||
1180 | + fourcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'); | ||
1181 | + break; | ||
1182 | + case PIX_FMT_YUV420P: | ||
1183 | + fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0'); | ||
1184 | + break; | ||
1185 | + case PIX_FMT_YUVA420P: | ||
1186 | + fourcc = GST_MAKE_FOURCC ('A', '4', '2', '0'); | ||
1187 | + break; | ||
1188 | + case PIX_FMT_YUV411P: | ||
1189 | + fourcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B'); | ||
1190 | + break; | ||
1191 | + case PIX_FMT_YUV422P: | ||
1192 | + fourcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B'); | ||
1193 | + break; | ||
1194 | + case PIX_FMT_YUV410P: | ||
1195 | + fourcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9'); | ||
1196 | + break; | ||
1197 | + default: | ||
1198 | + GST_WARNING | ||
1199 | + ("Couldnt' find fourcc for pixfmt %d, defaulting to I420", | ||
1200 | + context->pix_fmt); | ||
1201 | + fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0'); | ||
1202 | + break; | ||
1203 | + } | ||
1204 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-dv", | ||
1205 | + "systemstream", G_TYPE_BOOLEAN, FALSE, | ||
1206 | + "format", GST_TYPE_FOURCC, fourcc, NULL); | ||
1207 | + } else { | ||
1208 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-dv", | ||
1209 | + "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); | ||
1210 | + } | ||
1211 | + } | ||
1212 | + break; | ||
1213 | + | ||
1214 | + case CODEC_ID_WMAV1: | ||
1215 | + case CODEC_ID_WMAV2: | ||
1216 | + { | ||
1217 | + gint version = (codec_id == CODEC_ID_WMAV1) ? 1 : 2; | ||
1218 | + | ||
1219 | + if (context) { | ||
1220 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-wma", | ||
1221 | + "wmaversion", G_TYPE_INT, version, | ||
1222 | + "block_align", G_TYPE_INT, context->block_align, | ||
1223 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1224 | + } else { | ||
1225 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-wma", | ||
1226 | + "wmaversion", G_TYPE_INT, version, | ||
1227 | + "block_align", GST_TYPE_INT_RANGE, 0, G_MAXINT, | ||
1228 | + "bitrate", GST_TYPE_INT_RANGE, 0, G_MAXINT, NULL); | ||
1229 | + } | ||
1230 | + } | ||
1231 | + break; | ||
1232 | + case CODEC_ID_WMAPRO: | ||
1233 | + { | ||
1234 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-wma", | ||
1235 | + "wmaversion", G_TYPE_INT, 3, NULL); | ||
1236 | + break; | ||
1237 | + } | ||
1238 | + | ||
1239 | + case CODEC_ID_WMAVOICE: | ||
1240 | + { | ||
1241 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-wms", NULL); | ||
1242 | + break; | ||
1243 | + } | ||
1244 | + | ||
1245 | + case CODEC_ID_MACE3: | ||
1246 | + case CODEC_ID_MACE6: | ||
1247 | + { | ||
1248 | + gint version = (codec_id == CODEC_ID_MACE3) ? 3 : 6; | ||
1249 | + | ||
1250 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-mace", | ||
1251 | + "maceversion", G_TYPE_INT, version, NULL); | ||
1252 | + } | ||
1253 | + break; | ||
1254 | + | ||
1255 | + case CODEC_ID_HUFFYUV: | ||
1256 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-huffyuv", NULL); | ||
1257 | + if (context) { | ||
1258 | + gst_caps_set_simple (caps, | ||
1259 | + "bpp", G_TYPE_INT, context->bits_per_coded_sample, NULL); | ||
1260 | + } | ||
1261 | + break; | ||
1262 | + | ||
1263 | + case CODEC_ID_CYUV: | ||
1264 | + caps = | ||
1265 | + gst_ff_vid_caps_new (context, codec_id, "video/x-compressed-yuv", | ||
1266 | + NULL); | ||
1267 | + break; | ||
1268 | + | ||
1269 | + case CODEC_ID_H264: | ||
1270 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-h264", NULL); | ||
1271 | + break; | ||
1272 | + | ||
1273 | + case CODEC_ID_INDEO5: | ||
1274 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-indeo", | ||
1275 | + "indeoversion", G_TYPE_INT, 5, NULL); | ||
1276 | + break; | ||
1277 | + | ||
1278 | + case CODEC_ID_INDEO3: | ||
1279 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-indeo", | ||
1280 | + "indeoversion", G_TYPE_INT, 3, NULL); | ||
1281 | + break; | ||
1282 | + | ||
1283 | + case CODEC_ID_INDEO2: | ||
1284 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-indeo", | ||
1285 | + "indeoversion", G_TYPE_INT, 2, NULL); | ||
1286 | + break; | ||
1287 | + | ||
1288 | + case CODEC_ID_FLASHSV: | ||
1289 | + caps = | ||
1290 | + gst_ff_vid_caps_new (context, codec_id, "video/x-flash-screen", NULL); | ||
1291 | + break; | ||
1292 | + | ||
1293 | + case CODEC_ID_VP3: | ||
1294 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vp3", NULL); | ||
1295 | + break; | ||
1296 | + | ||
1297 | + case CODEC_ID_VP5: | ||
1298 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vp5", NULL); | ||
1299 | + break; | ||
1300 | + | ||
1301 | + case CODEC_ID_VP6: | ||
1302 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vp6", NULL); | ||
1303 | + break; | ||
1304 | + | ||
1305 | + case CODEC_ID_VP6F: | ||
1306 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vp6-flash", NULL); | ||
1307 | + break; | ||
1308 | + | ||
1309 | + case CODEC_ID_VP6A: | ||
1310 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vp6-alpha", NULL); | ||
1311 | + break; | ||
1312 | + | ||
1313 | + case CODEC_ID_VP8: | ||
1314 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vp8", NULL); | ||
1315 | + break; | ||
1316 | + | ||
1317 | + case CODEC_ID_THEORA: | ||
1318 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-theora", NULL); | ||
1319 | + break; | ||
1320 | + | ||
1321 | + case CODEC_ID_AAC: | ||
1322 | + { | ||
1323 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/mpeg", NULL); | ||
1324 | + | ||
1325 | + if (!encode) { | ||
1326 | + GValue arr = { 0, }; | ||
1327 | + GValue item = { 0, }; | ||
1328 | + | ||
1329 | + g_value_init (&arr, GST_TYPE_LIST); | ||
1330 | + g_value_init (&item, G_TYPE_INT); | ||
1331 | + g_value_set_int (&item, 2); | ||
1332 | + gst_value_list_append_value (&arr, &item); | ||
1333 | + g_value_set_int (&item, 4); | ||
1334 | + gst_value_list_append_value (&arr, &item); | ||
1335 | + g_value_unset (&item); | ||
1336 | + | ||
1337 | + gst_caps_set_value (caps, "mpegversion", &arr); | ||
1338 | + g_value_unset (&arr); | ||
1339 | + | ||
1340 | + g_value_init (&arr, GST_TYPE_LIST); | ||
1341 | + g_value_init (&item, G_TYPE_STRING); | ||
1342 | + g_value_set_string (&item, "raw"); | ||
1343 | + gst_value_list_append_value (&arr, &item); | ||
1344 | + g_value_set_string (&item, "adts"); | ||
1345 | + gst_value_list_append_value (&arr, &item); | ||
1346 | + g_value_set_string (&item, "adif"); | ||
1347 | + gst_value_list_append_value (&arr, &item); | ||
1348 | + g_value_unset (&item); | ||
1349 | + | ||
1350 | + gst_caps_set_value (caps, "stream-format", &arr); | ||
1351 | + g_value_unset (&arr); | ||
1352 | + } else { | ||
1353 | + gst_caps_set_simple (caps, "mpegversion", G_TYPE_INT, 4, | ||
1354 | + "stream-format", G_TYPE_STRING, "raw", | ||
1355 | + "base-profile", G_TYPE_STRING, "lc", NULL); | ||
1356 | + | ||
1357 | + if (context && context->extradata_size > 0) | ||
1358 | + gst_codec_utils_aac_caps_set_level_and_profile (caps, | ||
1359 | + context->extradata, context->extradata_size); | ||
1360 | + } | ||
1361 | + | ||
1362 | + break; | ||
1363 | + } | ||
1364 | + case CODEC_ID_AAC_LATM: /* LATM/LOAS AAC syntax */ | ||
1365 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/mpeg", | ||
1366 | + "mpegversion", G_TYPE_INT, 4, "stream-format", G_TYPE_STRING, "loas", | ||
1367 | + NULL); | ||
1368 | + break; | ||
1369 | + | ||
1370 | + case CODEC_ID_ASV1: | ||
1371 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-asus", | ||
1372 | + "asusversion", G_TYPE_INT, 1, NULL); | ||
1373 | + break; | ||
1374 | + case CODEC_ID_ASV2: | ||
1375 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-asus", | ||
1376 | + "asusversion", G_TYPE_INT, 2, NULL); | ||
1377 | + break; | ||
1378 | + | ||
1379 | + case CODEC_ID_FFV1: | ||
1380 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-ffv", | ||
1381 | + "ffvversion", G_TYPE_INT, 1, NULL); | ||
1382 | + break; | ||
1383 | + | ||
1384 | + case CODEC_ID_4XM: | ||
1385 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-4xm", NULL); | ||
1386 | + break; | ||
1387 | + | ||
1388 | + case CODEC_ID_XAN_WC3: | ||
1389 | + case CODEC_ID_XAN_WC4: | ||
1390 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-xan", | ||
1391 | + "wcversion", G_TYPE_INT, 3 - CODEC_ID_XAN_WC3 + codec_id, NULL); | ||
1392 | + break; | ||
1393 | + | ||
1394 | + case CODEC_ID_CLJR: | ||
1395 | + caps = | ||
1396 | + gst_ff_vid_caps_new (context, codec_id, | ||
1397 | + "video/x-cirrus-logic-accupak", NULL); | ||
1398 | + break; | ||
1399 | + | ||
1400 | + case CODEC_ID_FRAPS: | ||
1401 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-fraps", NULL); | ||
1402 | + break; | ||
1403 | + | ||
1404 | + case CODEC_ID_MDEC: | ||
1405 | + case CODEC_ID_ROQ: | ||
1406 | + case CODEC_ID_INTERPLAY_VIDEO: | ||
1407 | + buildcaps = TRUE; | ||
1408 | + break; | ||
1409 | + | ||
1410 | + case CODEC_ID_VCR1: | ||
1411 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-ati-vcr", | ||
1412 | + "vcrversion", G_TYPE_INT, 1, NULL); | ||
1413 | + break; | ||
1414 | + | ||
1415 | + case CODEC_ID_RPZA: | ||
1416 | + caps = | ||
1417 | + gst_ff_vid_caps_new (context, codec_id, "video/x-apple-video", NULL); | ||
1418 | + break; | ||
1419 | + | ||
1420 | + case CODEC_ID_CINEPAK: | ||
1421 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-cinepak", NULL); | ||
1422 | + break; | ||
1423 | + | ||
1424 | + /* WS_VQA belogns here (order) */ | ||
1425 | + | ||
1426 | + case CODEC_ID_MSRLE: | ||
1427 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-rle", | ||
1428 | + "layout", G_TYPE_STRING, "microsoft", NULL); | ||
1429 | + if (context) { | ||
1430 | + gst_caps_set_simple (caps, | ||
1431 | + "depth", G_TYPE_INT, (gint) context->bits_per_coded_sample, NULL); | ||
1432 | + } else { | ||
1433 | + gst_caps_set_simple (caps, "depth", GST_TYPE_INT_RANGE, 1, 64, NULL); | ||
1434 | + } | ||
1435 | + break; | ||
1436 | + | ||
1437 | + case CODEC_ID_QTRLE: | ||
1438 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-rle", | ||
1439 | + "layout", G_TYPE_STRING, "quicktime", NULL); | ||
1440 | + if (context) { | ||
1441 | + gst_caps_set_simple (caps, | ||
1442 | + "depth", G_TYPE_INT, (gint) context->bits_per_coded_sample, NULL); | ||
1443 | + } else { | ||
1444 | + gst_caps_set_simple (caps, "depth", GST_TYPE_INT_RANGE, 1, 64, NULL); | ||
1445 | + } | ||
1446 | + break; | ||
1447 | + | ||
1448 | + case CODEC_ID_MSVIDEO1: | ||
1449 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-msvideocodec", | ||
1450 | + "msvideoversion", G_TYPE_INT, 1, NULL); | ||
1451 | + break; | ||
1452 | + | ||
1453 | + case CODEC_ID_WMV3: | ||
1454 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-wmv", | ||
1455 | + "wmvversion", G_TYPE_INT, 3, NULL); | ||
1456 | + break; | ||
1457 | + case CODEC_ID_VC1: | ||
1458 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-wmv", | ||
1459 | + "wmvversion", G_TYPE_INT, 3, "format", GST_TYPE_FOURCC, | ||
1460 | + GST_MAKE_FOURCC ('W', 'V', 'C', '1'), NULL); | ||
1461 | + break; | ||
1462 | + case CODEC_ID_QDM2: | ||
1463 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-qdm2", NULL); | ||
1464 | + break; | ||
1465 | + | ||
1466 | + case CODEC_ID_MSZH: | ||
1467 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-mszh", NULL); | ||
1468 | + break; | ||
1469 | + | ||
1470 | + case CODEC_ID_ZLIB: | ||
1471 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-zlib", NULL); | ||
1472 | + break; | ||
1473 | + | ||
1474 | + case CODEC_ID_TRUEMOTION1: | ||
1475 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-truemotion", | ||
1476 | + "trueversion", G_TYPE_INT, 1, NULL); | ||
1477 | + break; | ||
1478 | + case CODEC_ID_TRUEMOTION2: | ||
1479 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-truemotion", | ||
1480 | + "trueversion", G_TYPE_INT, 2, NULL); | ||
1481 | + break; | ||
1482 | + | ||
1483 | + case CODEC_ID_ULTI: | ||
1484 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-ultimotion", | ||
1485 | + NULL); | ||
1486 | + break; | ||
1487 | + | ||
1488 | + case CODEC_ID_TSCC: | ||
1489 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-camtasia", NULL); | ||
1490 | + if (context) { | ||
1491 | + gst_caps_set_simple (caps, | ||
1492 | + "depth", G_TYPE_INT, (gint) context->bits_per_coded_sample, NULL); | ||
1493 | + } else { | ||
1494 | + gst_caps_set_simple (caps, "depth", GST_TYPE_INT_RANGE, 8, 32, NULL); | ||
1495 | + } | ||
1496 | + break; | ||
1497 | + | ||
1498 | + case CODEC_ID_KMVC: | ||
1499 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-kmvc", NULL); | ||
1500 | + break; | ||
1501 | + | ||
1502 | + case CODEC_ID_NUV: | ||
1503 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-nuv", NULL); | ||
1504 | + break; | ||
1505 | + | ||
1506 | + case CODEC_ID_GIF: | ||
1507 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/gif", NULL); | ||
1508 | + break; | ||
1509 | + | ||
1510 | + case CODEC_ID_PNG: | ||
1511 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/png", NULL); | ||
1512 | + break; | ||
1513 | + | ||
1514 | + case CODEC_ID_PPM: | ||
1515 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/ppm", NULL); | ||
1516 | + break; | ||
1517 | + | ||
1518 | + case CODEC_ID_PBM: | ||
1519 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/pbm", NULL); | ||
1520 | + break; | ||
1521 | + | ||
1522 | + case CODEC_ID_PAM: | ||
1523 | + caps = | ||
1524 | + gst_ff_vid_caps_new (context, codec_id, "image/x-portable-anymap", | ||
1525 | + NULL); | ||
1526 | + break; | ||
1527 | + | ||
1528 | + case CODEC_ID_PGM: | ||
1529 | + caps = | ||
1530 | + gst_ff_vid_caps_new (context, codec_id, "image/x-portable-graymap", | ||
1531 | + NULL); | ||
1532 | + break; | ||
1533 | + | ||
1534 | + case CODEC_ID_PCX: | ||
1535 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/x-pcx", NULL); | ||
1536 | + break; | ||
1537 | + | ||
1538 | + case CODEC_ID_SGI: | ||
1539 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/x-sgi", NULL); | ||
1540 | + break; | ||
1541 | + | ||
1542 | + case CODEC_ID_TARGA: | ||
1543 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/x-tga", NULL); | ||
1544 | + break; | ||
1545 | + | ||
1546 | + case CODEC_ID_TIFF: | ||
1547 | + caps = gst_ff_vid_caps_new (context, codec_id, "image/tiff", NULL); | ||
1548 | + break; | ||
1549 | + | ||
1550 | + case CODEC_ID_SUNRAST: | ||
1551 | + caps = | ||
1552 | + gst_ff_vid_caps_new (context, codec_id, "image/x-sun-raster", NULL); | ||
1553 | + break; | ||
1554 | + | ||
1555 | + case CODEC_ID_SMC: | ||
1556 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-smc", NULL); | ||
1557 | + break; | ||
1558 | + | ||
1559 | + case CODEC_ID_QDRAW: | ||
1560 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-qdrw", NULL); | ||
1561 | + break; | ||
1562 | + | ||
1563 | + case CODEC_ID_DNXHD: | ||
1564 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-dnxhd", NULL); | ||
1565 | + break; | ||
1566 | + | ||
1567 | + case CODEC_ID_MIMIC: | ||
1568 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-mimic", NULL); | ||
1569 | + break; | ||
1570 | + | ||
1571 | + case CODEC_ID_VMNC: | ||
1572 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-vmnc", NULL); | ||
1573 | + break; | ||
1574 | + | ||
1575 | + case CODEC_ID_TRUESPEECH: | ||
1576 | + caps = | ||
1577 | + gst_ff_aud_caps_new (context, codec_id, "audio/x-truespeech", NULL); | ||
1578 | + break; | ||
1579 | + | ||
1580 | + case CODEC_ID_QCELP: | ||
1581 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/qcelp", NULL); | ||
1582 | + break; | ||
1583 | + | ||
1584 | + case CODEC_ID_AMV: | ||
1585 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-amv", NULL); | ||
1586 | + break; | ||
1587 | + | ||
1588 | + case CODEC_ID_AASC: | ||
1589 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-aasc", NULL); | ||
1590 | + break; | ||
1591 | + | ||
1592 | + case CODEC_ID_LOCO: | ||
1593 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-loco", NULL); | ||
1594 | + break; | ||
1595 | + | ||
1596 | + case CODEC_ID_ZMBV: | ||
1597 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-zmbv", NULL); | ||
1598 | + break; | ||
1599 | + | ||
1600 | + case CODEC_ID_LAGARITH: | ||
1601 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-lagarith", NULL); | ||
1602 | + break; | ||
1603 | + | ||
1604 | + case CODEC_ID_CSCD: | ||
1605 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-camstudio", NULL); | ||
1606 | + if (context) { | ||
1607 | + gst_caps_set_simple (caps, | ||
1608 | + "depth", G_TYPE_INT, (gint) context->bits_per_coded_sample, NULL); | ||
1609 | + } else { | ||
1610 | + gst_caps_set_simple (caps, "depth", GST_TYPE_INT_RANGE, 8, 32, NULL); | ||
1611 | + } | ||
1612 | + break; | ||
1613 | + | ||
1614 | + case CODEC_ID_WS_VQA: | ||
1615 | + case CODEC_ID_IDCIN: | ||
1616 | + case CODEC_ID_8BPS: | ||
1617 | + case CODEC_ID_FLIC: | ||
1618 | + case CODEC_ID_VMDVIDEO: | ||
1619 | + case CODEC_ID_VMDAUDIO: | ||
1620 | + case CODEC_ID_SNOW: | ||
1621 | + case CODEC_ID_VIXL: | ||
1622 | + case CODEC_ID_QPEG: | ||
1623 | + case CODEC_ID_PGMYUV: | ||
1624 | + case CODEC_ID_FFVHUFF: | ||
1625 | + case CODEC_ID_WNV1: | ||
1626 | + case CODEC_ID_MP3ADU: | ||
1627 | + case CODEC_ID_MP3ON4: | ||
1628 | + case CODEC_ID_WESTWOOD_SND1: | ||
1629 | + case CODEC_ID_MMVIDEO: | ||
1630 | + case CODEC_ID_AVS: | ||
1631 | + case CODEC_ID_CAVS: | ||
1632 | + buildcaps = TRUE; | ||
1633 | + break; | ||
1634 | + | ||
1635 | + /* weird quasi-codecs for the demuxers only */ | ||
1636 | + case CODEC_ID_PCM_S16LE: | ||
1637 | + case CODEC_ID_PCM_S16BE: | ||
1638 | + case CODEC_ID_PCM_U16LE: | ||
1639 | + case CODEC_ID_PCM_U16BE: | ||
1640 | + case CODEC_ID_PCM_S8: | ||
1641 | + case CODEC_ID_PCM_U8: | ||
1642 | + { | ||
1643 | + gint width = 0, depth = 0, endianness = 0; | ||
1644 | + gboolean signedness = FALSE; /* blabla */ | ||
1645 | + | ||
1646 | + switch (codec_id) { | ||
1647 | + case CODEC_ID_PCM_S16LE: | ||
1648 | + width = 16; | ||
1649 | + depth = 16; | ||
1650 | + endianness = G_LITTLE_ENDIAN; | ||
1651 | + signedness = TRUE; | ||
1652 | + break; | ||
1653 | + case CODEC_ID_PCM_S16BE: | ||
1654 | + width = 16; | ||
1655 | + depth = 16; | ||
1656 | + endianness = G_BIG_ENDIAN; | ||
1657 | + signedness = TRUE; | ||
1658 | + break; | ||
1659 | + case CODEC_ID_PCM_U16LE: | ||
1660 | + width = 16; | ||
1661 | + depth = 16; | ||
1662 | + endianness = G_LITTLE_ENDIAN; | ||
1663 | + signedness = FALSE; | ||
1664 | + break; | ||
1665 | + case CODEC_ID_PCM_U16BE: | ||
1666 | + width = 16; | ||
1667 | + depth = 16; | ||
1668 | + endianness = G_BIG_ENDIAN; | ||
1669 | + signedness = FALSE; | ||
1670 | + break; | ||
1671 | + case CODEC_ID_PCM_S8: | ||
1672 | + width = 8; | ||
1673 | + depth = 8; | ||
1674 | + endianness = G_BYTE_ORDER; | ||
1675 | + signedness = TRUE; | ||
1676 | + break; | ||
1677 | + case CODEC_ID_PCM_U8: | ||
1678 | + width = 8; | ||
1679 | + depth = 8; | ||
1680 | + endianness = G_BYTE_ORDER; | ||
1681 | + signedness = FALSE; | ||
1682 | + break; | ||
1683 | + default: | ||
1684 | + g_assert (0); /* don't worry, we never get here */ | ||
1685 | + break; | ||
1686 | + } | ||
1687 | + | ||
1688 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-raw-int", | ||
1689 | + "width", G_TYPE_INT, width, | ||
1690 | + "depth", G_TYPE_INT, depth, | ||
1691 | + "endianness", G_TYPE_INT, endianness, | ||
1692 | + "signed", G_TYPE_BOOLEAN, signedness, NULL); | ||
1693 | + } | ||
1694 | + break; | ||
1695 | + | ||
1696 | + case CODEC_ID_PCM_MULAW: | ||
1697 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-mulaw", NULL); | ||
1698 | + break; | ||
1699 | + | ||
1700 | + case CODEC_ID_PCM_ALAW: | ||
1701 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-alaw", NULL); | ||
1702 | + break; | ||
1703 | + | ||
1704 | + case CODEC_ID_ADPCM_G722: | ||
1705 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/G722", NULL); | ||
1706 | + if (context) | ||
1707 | + gst_caps_set_simple (caps, | ||
1708 | + "block_align", G_TYPE_INT, context->block_align, | ||
1709 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1710 | + break; | ||
1711 | + | ||
1712 | + case CODEC_ID_ADPCM_G726: | ||
1713 | + { | ||
1714 | + /* the G726 decoder can also handle G721 */ | ||
1715 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-adpcm", | ||
1716 | + "layout", G_TYPE_STRING, "g726", NULL); | ||
1717 | + if (context) | ||
1718 | + gst_caps_set_simple (caps, | ||
1719 | + "block_align", G_TYPE_INT, context->block_align, | ||
1720 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1721 | + | ||
1722 | + if (!encode) { | ||
1723 | + gst_caps_append (caps, gst_caps_new_simple ("audio/x-adpcm", | ||
1724 | + "layout", G_TYPE_STRING, "g721", | ||
1725 | + "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 8000, NULL)); | ||
1726 | + } | ||
1727 | + break; | ||
1728 | + } | ||
1729 | + case CODEC_ID_ADPCM_IMA_QT: | ||
1730 | + case CODEC_ID_ADPCM_IMA_WAV: | ||
1731 | + case CODEC_ID_ADPCM_IMA_DK3: | ||
1732 | + case CODEC_ID_ADPCM_IMA_DK4: | ||
1733 | + case CODEC_ID_ADPCM_IMA_WS: | ||
1734 | + case CODEC_ID_ADPCM_IMA_SMJPEG: | ||
1735 | + case CODEC_ID_ADPCM_IMA_AMV: | ||
1736 | + case CODEC_ID_ADPCM_IMA_ISS: | ||
1737 | + case CODEC_ID_ADPCM_IMA_EA_EACS: | ||
1738 | + case CODEC_ID_ADPCM_IMA_EA_SEAD: | ||
1739 | + case CODEC_ID_ADPCM_MS: | ||
1740 | + case CODEC_ID_ADPCM_4XM: | ||
1741 | + case CODEC_ID_ADPCM_XA: | ||
1742 | + case CODEC_ID_ADPCM_ADX: | ||
1743 | + case CODEC_ID_ADPCM_EA: | ||
1744 | + case CODEC_ID_ADPCM_CT: | ||
1745 | + case CODEC_ID_ADPCM_SWF: | ||
1746 | + case CODEC_ID_ADPCM_YAMAHA: | ||
1747 | + case CODEC_ID_ADPCM_SBPRO_2: | ||
1748 | + case CODEC_ID_ADPCM_SBPRO_3: | ||
1749 | + case CODEC_ID_ADPCM_SBPRO_4: | ||
1750 | + case CODEC_ID_ADPCM_EA_R1: | ||
1751 | + case CODEC_ID_ADPCM_EA_R2: | ||
1752 | + case CODEC_ID_ADPCM_EA_R3: | ||
1753 | + case CODEC_ID_ADPCM_EA_MAXIS_XA: | ||
1754 | + case CODEC_ID_ADPCM_EA_XAS: | ||
1755 | + case CODEC_ID_ADPCM_THP: | ||
1756 | + { | ||
1757 | + const gchar *layout = NULL; | ||
1758 | + | ||
1759 | + switch (codec_id) { | ||
1760 | + case CODEC_ID_ADPCM_IMA_QT: | ||
1761 | + layout = "quicktime"; | ||
1762 | + break; | ||
1763 | + case CODEC_ID_ADPCM_IMA_WAV: | ||
1764 | + layout = "dvi"; | ||
1765 | + break; | ||
1766 | + case CODEC_ID_ADPCM_IMA_DK3: | ||
1767 | + layout = "dk3"; | ||
1768 | + break; | ||
1769 | + case CODEC_ID_ADPCM_IMA_DK4: | ||
1770 | + layout = "dk4"; | ||
1771 | + break; | ||
1772 | + case CODEC_ID_ADPCM_IMA_WS: | ||
1773 | + layout = "westwood"; | ||
1774 | + break; | ||
1775 | + case CODEC_ID_ADPCM_IMA_SMJPEG: | ||
1776 | + layout = "smjpeg"; | ||
1777 | + break; | ||
1778 | + case CODEC_ID_ADPCM_IMA_AMV: | ||
1779 | + layout = "amv"; | ||
1780 | + break; | ||
1781 | + case CODEC_ID_ADPCM_IMA_ISS: | ||
1782 | + layout = "iss"; | ||
1783 | + break; | ||
1784 | + case CODEC_ID_ADPCM_IMA_EA_EACS: | ||
1785 | + layout = "ea-eacs"; | ||
1786 | + break; | ||
1787 | + case CODEC_ID_ADPCM_IMA_EA_SEAD: | ||
1788 | + layout = "ea-sead"; | ||
1789 | + break; | ||
1790 | + case CODEC_ID_ADPCM_MS: | ||
1791 | + layout = "microsoft"; | ||
1792 | + break; | ||
1793 | + case CODEC_ID_ADPCM_4XM: | ||
1794 | + layout = "4xm"; | ||
1795 | + break; | ||
1796 | + case CODEC_ID_ADPCM_XA: | ||
1797 | + layout = "xa"; | ||
1798 | + break; | ||
1799 | + case CODEC_ID_ADPCM_ADX: | ||
1800 | + layout = "adx"; | ||
1801 | + break; | ||
1802 | + case CODEC_ID_ADPCM_EA: | ||
1803 | + layout = "ea"; | ||
1804 | + break; | ||
1805 | + case CODEC_ID_ADPCM_CT: | ||
1806 | + layout = "ct"; | ||
1807 | + break; | ||
1808 | + case CODEC_ID_ADPCM_SWF: | ||
1809 | + layout = "swf"; | ||
1810 | + break; | ||
1811 | + case CODEC_ID_ADPCM_YAMAHA: | ||
1812 | + layout = "yamaha"; | ||
1813 | + break; | ||
1814 | + case CODEC_ID_ADPCM_SBPRO_2: | ||
1815 | + layout = "sbpro2"; | ||
1816 | + break; | ||
1817 | + case CODEC_ID_ADPCM_SBPRO_3: | ||
1818 | + layout = "sbpro3"; | ||
1819 | + break; | ||
1820 | + case CODEC_ID_ADPCM_SBPRO_4: | ||
1821 | + layout = "sbpro4"; | ||
1822 | + break; | ||
1823 | + case CODEC_ID_ADPCM_EA_R1: | ||
1824 | + layout = "ea-r1"; | ||
1825 | + break; | ||
1826 | + case CODEC_ID_ADPCM_EA_R2: | ||
1827 | + layout = "ea-r3"; | ||
1828 | + break; | ||
1829 | + case CODEC_ID_ADPCM_EA_R3: | ||
1830 | + layout = "ea-r3"; | ||
1831 | + break; | ||
1832 | + case CODEC_ID_ADPCM_EA_MAXIS_XA: | ||
1833 | + layout = "ea-maxis-xa"; | ||
1834 | + break; | ||
1835 | + case CODEC_ID_ADPCM_EA_XAS: | ||
1836 | + layout = "ea-xas"; | ||
1837 | + break; | ||
1838 | + case CODEC_ID_ADPCM_THP: | ||
1839 | + layout = "thp"; | ||
1840 | + break; | ||
1841 | + default: | ||
1842 | + g_assert (0); /* don't worry, we never get here */ | ||
1843 | + break; | ||
1844 | + } | ||
1845 | + | ||
1846 | + /* FIXME: someone please check whether we need additional properties | ||
1847 | + * in this caps definition. */ | ||
1848 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-adpcm", | ||
1849 | + "layout", G_TYPE_STRING, layout, NULL); | ||
1850 | + if (context) | ||
1851 | + gst_caps_set_simple (caps, | ||
1852 | + "block_align", G_TYPE_INT, context->block_align, | ||
1853 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1854 | + } | ||
1855 | + break; | ||
1856 | + | ||
1857 | + case CODEC_ID_AMR_NB: | ||
1858 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/AMR", NULL); | ||
1859 | + break; | ||
1860 | + | ||
1861 | + case CODEC_ID_AMR_WB: | ||
1862 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/AMR-WB", NULL); | ||
1863 | + break; | ||
1864 | + | ||
1865 | + case CODEC_ID_GSM: | ||
1866 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-gsm", NULL); | ||
1867 | + break; | ||
1868 | + | ||
1869 | + case CODEC_ID_GSM_MS: | ||
1870 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/ms-gsm", NULL); | ||
1871 | + break; | ||
1872 | + | ||
1873 | + case CODEC_ID_NELLYMOSER: | ||
1874 | + caps = | ||
1875 | + gst_ff_aud_caps_new (context, codec_id, "audio/x-nellymoser", NULL); | ||
1876 | + break; | ||
1877 | + | ||
1878 | + case CODEC_ID_SIPR: | ||
1879 | + { | ||
1880 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-sipro", NULL); | ||
1881 | + if (context) { | ||
1882 | + gst_caps_set_simple (caps, | ||
1883 | + "leaf_size", G_TYPE_INT, context->block_align, | ||
1884 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1885 | + } | ||
1886 | + } | ||
1887 | + break; | ||
1888 | + | ||
1889 | + case CODEC_ID_RA_144: | ||
1890 | + case CODEC_ID_RA_288: | ||
1891 | + case CODEC_ID_COOK: | ||
1892 | + { | ||
1893 | + gint version = 0; | ||
1894 | + | ||
1895 | + switch (codec_id) { | ||
1896 | + case CODEC_ID_RA_144: | ||
1897 | + version = 1; | ||
1898 | + break; | ||
1899 | + case CODEC_ID_RA_288: | ||
1900 | + version = 2; | ||
1901 | + break; | ||
1902 | + case CODEC_ID_COOK: | ||
1903 | + version = 8; | ||
1904 | + break; | ||
1905 | + default: | ||
1906 | + break; | ||
1907 | + } | ||
1908 | + | ||
1909 | + /* FIXME: properties? */ | ||
1910 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-pn-realaudio", | ||
1911 | + "raversion", G_TYPE_INT, version, NULL); | ||
1912 | + if (context) { | ||
1913 | + gst_caps_set_simple (caps, | ||
1914 | + "leaf_size", G_TYPE_INT, context->block_align, | ||
1915 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1916 | + } | ||
1917 | + } | ||
1918 | + break; | ||
1919 | + | ||
1920 | + case CODEC_ID_ROQ_DPCM: | ||
1921 | + case CODEC_ID_INTERPLAY_DPCM: | ||
1922 | + case CODEC_ID_XAN_DPCM: | ||
1923 | + case CODEC_ID_SOL_DPCM: | ||
1924 | + { | ||
1925 | + const gchar *layout = NULL; | ||
1926 | + | ||
1927 | + switch (codec_id) { | ||
1928 | + case CODEC_ID_ROQ_DPCM: | ||
1929 | + layout = "roq"; | ||
1930 | + break; | ||
1931 | + case CODEC_ID_INTERPLAY_DPCM: | ||
1932 | + layout = "interplay"; | ||
1933 | + break; | ||
1934 | + case CODEC_ID_XAN_DPCM: | ||
1935 | + layout = "xan"; | ||
1936 | + break; | ||
1937 | + case CODEC_ID_SOL_DPCM: | ||
1938 | + layout = "sol"; | ||
1939 | + break; | ||
1940 | + default: | ||
1941 | + g_assert (0); /* don't worry, we never get here */ | ||
1942 | + break; | ||
1943 | + } | ||
1944 | + | ||
1945 | + /* FIXME: someone please check whether we need additional properties | ||
1946 | + * in this caps definition. */ | ||
1947 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-dpcm", | ||
1948 | + "layout", G_TYPE_STRING, layout, NULL); | ||
1949 | + if (context) | ||
1950 | + gst_caps_set_simple (caps, | ||
1951 | + "block_align", G_TYPE_INT, context->block_align, | ||
1952 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
1953 | + } | ||
1954 | + break; | ||
1955 | + | ||
1956 | + case CODEC_ID_SHORTEN: | ||
1957 | + caps = gst_caps_new_simple ("audio/x-shorten", NULL); | ||
1958 | + break; | ||
1959 | + | ||
1960 | + case CODEC_ID_ALAC: | ||
1961 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-alac", NULL); | ||
1962 | + if (context) { | ||
1963 | + gst_caps_set_simple (caps, | ||
1964 | + "samplesize", G_TYPE_INT, context->bits_per_coded_sample, NULL); | ||
1965 | + } | ||
1966 | + break; | ||
1967 | + | ||
1968 | + case CODEC_ID_FLAC: | ||
1969 | + /* Note that ffmpeg has no encoder yet, but just for safety. In the | ||
1970 | + * encoder case, we want to add things like samplerate, channels... */ | ||
1971 | + if (!encode) { | ||
1972 | + caps = gst_caps_new_simple ("audio/x-flac", NULL); | ||
1973 | + } | ||
1974 | + break; | ||
1975 | + | ||
1976 | + case CODEC_ID_DVD_SUBTITLE: | ||
1977 | + case CODEC_ID_DVB_SUBTITLE: | ||
1978 | + caps = NULL; | ||
1979 | + break; | ||
1980 | + case CODEC_ID_BMP: | ||
1981 | + caps = gst_caps_new_simple ("image/bmp", NULL); | ||
1982 | + break; | ||
1983 | + case CODEC_ID_TTA: | ||
1984 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-tta", NULL); | ||
1985 | + if (context) { | ||
1986 | + gst_caps_set_simple (caps, | ||
1987 | + "samplesize", G_TYPE_INT, context->bits_per_coded_sample, NULL); | ||
1988 | + } | ||
1989 | + break; | ||
1990 | + case CODEC_ID_TWINVQ: | ||
1991 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-twin-vq", NULL); | ||
1992 | + break; | ||
1993 | + default: | ||
1994 | + GST_DEBUG ("Unknown codec ID %d, please add mapping here", codec_id); | ||
1995 | + break; | ||
1996 | + } | ||
1997 | + | ||
1998 | + if (buildcaps) { | ||
1999 | + AVCodec *codec; | ||
2000 | + | ||
2001 | + if ((codec = avcodec_find_decoder (codec_id)) || | ||
2002 | + (codec = avcodec_find_encoder (codec_id))) { | ||
2003 | + gchar *mime = NULL; | ||
2004 | + | ||
2005 | + GST_LOG ("Could not create stream format caps for %s", codec->name); | ||
2006 | + | ||
2007 | + switch (codec->type) { | ||
2008 | + case AVMEDIA_TYPE_VIDEO: | ||
2009 | + mime = g_strdup_printf ("video/x-gst_ff-%s", codec->name); | ||
2010 | + caps = gst_ff_vid_caps_new (context, codec_id, mime, NULL); | ||
2011 | + g_free (mime); | ||
2012 | + break; | ||
2013 | + case AVMEDIA_TYPE_AUDIO: | ||
2014 | + mime = g_strdup_printf ("audio/x-gst_ff-%s", codec->name); | ||
2015 | + caps = gst_ff_aud_caps_new (context, codec_id, mime, NULL); | ||
2016 | + if (context) | ||
2017 | + gst_caps_set_simple (caps, | ||
2018 | + "block_align", G_TYPE_INT, context->block_align, | ||
2019 | + "bitrate", G_TYPE_INT, context->bit_rate, NULL); | ||
2020 | + g_free (mime); | ||
2021 | + break; | ||
2022 | + default: | ||
2023 | + break; | ||
2024 | + } | ||
2025 | + } | ||
2026 | + } | ||
2027 | + | ||
2028 | + if (caps != NULL) { | ||
2029 | + | ||
2030 | + /* set private data */ | ||
2031 | + if (context && context->extradata_size > 0) { | ||
2032 | + GstBuffer *data = gst_buffer_new_and_alloc (context->extradata_size); | ||
2033 | + | ||
2034 | + memcpy (GST_BUFFER_DATA (data), context->extradata, | ||
2035 | + context->extradata_size); | ||
2036 | + gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, data, NULL); | ||
2037 | + gst_buffer_unref (data); | ||
2038 | + } | ||
2039 | + | ||
2040 | + /* palette */ | ||
2041 | + if (context) { | ||
2042 | + gst_ffmpeg_set_palette (caps, context); | ||
2043 | + } | ||
2044 | + | ||
2045 | + GST_LOG ("caps for codec_id=%d: %" GST_PTR_FORMAT, codec_id, caps); | ||
2046 | + | ||
2047 | + } else { | ||
2048 | + GST_LOG ("No caps found for codec_id=%d", codec_id); | ||
2049 | + } | ||
2050 | + | ||
2051 | + return caps; | ||
2052 | +} | ||
2053 | + | ||
2054 | +/* Convert a FFMPEG Pixel Format and optional AVCodecContext | ||
2055 | + * to a GstCaps. If the context is ommitted, no fixed values | ||
2056 | + * for video/audio size will be included in the GstCaps | ||
2057 | + * | ||
2058 | + * See below for usefullness | ||
2059 | + */ | ||
2060 | + | ||
2061 | +GstCaps * | ||
2062 | +gst_ffmpeg_pixfmt_to_caps (enum PixelFormat pix_fmt, AVCodecContext * context, | ||
2063 | + enum CodecID codec_id) | ||
2064 | +{ | ||
2065 | + GstCaps *caps = NULL; | ||
2066 | + | ||
2067 | + int bpp = 0, depth = 0, endianness = 0; | ||
2068 | + gulong g_mask = 0, r_mask = 0, b_mask = 0, a_mask = 0; | ||
2069 | + guint32 fmt = 0; | ||
2070 | + | ||
2071 | + switch (pix_fmt) { | ||
2072 | + case PIX_FMT_YUVJ420P: | ||
2073 | + case PIX_FMT_YUV420P: | ||
2074 | + fmt = GST_MAKE_FOURCC ('I', '4', '2', '0'); | ||
2075 | + break; | ||
2076 | + case PIX_FMT_YUVA420P: | ||
2077 | + fmt = GST_MAKE_FOURCC ('A', '4', '2', '0'); | ||
2078 | + break; | ||
2079 | + case PIX_FMT_YUYV422: | ||
2080 | + fmt = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'); | ||
2081 | + break; | ||
2082 | + case PIX_FMT_RGB24: | ||
2083 | + bpp = depth = 24; | ||
2084 | + endianness = G_BIG_ENDIAN; | ||
2085 | + r_mask = 0xff0000; | ||
2086 | + g_mask = 0x00ff00; | ||
2087 | + b_mask = 0x0000ff; | ||
2088 | + break; | ||
2089 | + case PIX_FMT_BGR24: | ||
2090 | + bpp = depth = 24; | ||
2091 | + endianness = G_BIG_ENDIAN; | ||
2092 | + r_mask = 0x0000ff; | ||
2093 | + g_mask = 0x00ff00; | ||
2094 | + b_mask = 0xff0000; | ||
2095 | + break; | ||
2096 | + case PIX_FMT_YUVJ422P: | ||
2097 | + case PIX_FMT_YUV422P: | ||
2098 | + fmt = GST_MAKE_FOURCC ('Y', '4', '2', 'B'); | ||
2099 | + break; | ||
2100 | + case PIX_FMT_YUVJ444P: | ||
2101 | + case PIX_FMT_YUV444P: | ||
2102 | + fmt = GST_MAKE_FOURCC ('Y', '4', '4', '4'); | ||
2103 | + break; | ||
2104 | + case PIX_FMT_RGB32: | ||
2105 | + bpp = 32; | ||
2106 | + depth = 32; | ||
2107 | + endianness = G_BIG_ENDIAN; | ||
2108 | +#if (G_BYTE_ORDER == G_BIG_ENDIAN) | ||
2109 | + r_mask = 0x00ff0000; | ||
2110 | + g_mask = 0x0000ff00; | ||
2111 | + b_mask = 0x000000ff; | ||
2112 | + a_mask = 0xff000000; | ||
2113 | +#else | ||
2114 | + r_mask = 0x0000ff00; | ||
2115 | + g_mask = 0x00ff0000; | ||
2116 | + b_mask = 0xff000000; | ||
2117 | + a_mask = 0x000000ff; | ||
2118 | +#endif | ||
2119 | + break; | ||
2120 | + case PIX_FMT_YUV410P: | ||
2121 | + fmt = GST_MAKE_FOURCC ('Y', 'U', 'V', '9'); | ||
2122 | + break; | ||
2123 | + case PIX_FMT_YUV411P: | ||
2124 | + fmt = GST_MAKE_FOURCC ('Y', '4', '1', 'B'); | ||
2125 | + break; | ||
2126 | + case PIX_FMT_RGB565: | ||
2127 | + bpp = depth = 16; | ||
2128 | + endianness = G_BYTE_ORDER; | ||
2129 | + r_mask = 0xf800; | ||
2130 | + g_mask = 0x07e0; | ||
2131 | + b_mask = 0x001f; | ||
2132 | + break; | ||
2133 | + case PIX_FMT_RGB555: | ||
2134 | + bpp = 16; | ||
2135 | + depth = 15; | ||
2136 | + endianness = G_BYTE_ORDER; | ||
2137 | + r_mask = 0x7c00; | ||
2138 | + g_mask = 0x03e0; | ||
2139 | + b_mask = 0x001f; | ||
2140 | + break; | ||
2141 | + case PIX_FMT_PAL8: | ||
2142 | + bpp = depth = 8; | ||
2143 | + endianness = G_BYTE_ORDER; | ||
2144 | + break; | ||
2145 | + case PIX_FMT_GRAY8: | ||
2146 | + bpp = depth = 8; | ||
2147 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-raw-gray", | ||
2148 | + "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, NULL); | ||
2149 | + break; | ||
2150 | + default: | ||
2151 | + /* give up ... */ | ||
2152 | + break; | ||
2153 | + } | ||
2154 | + | ||
2155 | + if (caps == NULL) { | ||
2156 | + if (bpp != 0) { | ||
2157 | + if (r_mask != 0) { | ||
2158 | + if (a_mask) { | ||
2159 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-raw-rgb", | ||
2160 | + "bpp", G_TYPE_INT, bpp, | ||
2161 | + "depth", G_TYPE_INT, depth, | ||
2162 | + "red_mask", G_TYPE_INT, r_mask, | ||
2163 | + "green_mask", G_TYPE_INT, g_mask, | ||
2164 | + "blue_mask", G_TYPE_INT, b_mask, | ||
2165 | + "alpha_mask", G_TYPE_INT, a_mask, | ||
2166 | + "endianness", G_TYPE_INT, endianness, NULL); | ||
2167 | + } else { | ||
2168 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-raw-rgb", | ||
2169 | + "bpp", G_TYPE_INT, bpp, | ||
2170 | + "depth", G_TYPE_INT, depth, | ||
2171 | + "red_mask", G_TYPE_INT, r_mask, | ||
2172 | + "green_mask", G_TYPE_INT, g_mask, | ||
2173 | + "blue_mask", G_TYPE_INT, b_mask, | ||
2174 | + "endianness", G_TYPE_INT, endianness, NULL); | ||
2175 | + } | ||
2176 | + } else { | ||
2177 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-raw-rgb", | ||
2178 | + "bpp", G_TYPE_INT, bpp, | ||
2179 | + "depth", G_TYPE_INT, depth, | ||
2180 | + "endianness", G_TYPE_INT, endianness, NULL); | ||
2181 | + if (caps && context) { | ||
2182 | + gst_ffmpeg_set_palette (caps, context); | ||
2183 | + } | ||
2184 | + } | ||
2185 | + } else if (fmt) { | ||
2186 | + caps = gst_ff_vid_caps_new (context, codec_id, "video/x-raw-yuv", | ||
2187 | + "format", GST_TYPE_FOURCC, fmt, NULL); | ||
2188 | + } | ||
2189 | + } | ||
2190 | + | ||
2191 | + if (caps != NULL) { | ||
2192 | + GST_DEBUG ("caps for pix_fmt=%d: %" GST_PTR_FORMAT, pix_fmt, caps); | ||
2193 | + } else { | ||
2194 | + GST_LOG ("No caps found for pix_fmt=%d", pix_fmt); | ||
2195 | + } | ||
2196 | + | ||
2197 | + return caps; | ||
2198 | +} | ||
2199 | + | ||
2200 | +/* Convert a FFMPEG Sample Format and optional AVCodecContext | ||
2201 | + * to a GstCaps. If the context is ommitted, no fixed values | ||
2202 | + * for video/audio size will be included in the GstCaps | ||
2203 | + * | ||
2204 | + * See below for usefullness | ||
2205 | + */ | ||
2206 | + | ||
2207 | +static GstCaps * | ||
2208 | +gst_ffmpeg_smpfmt_to_caps (enum AVSampleFormat sample_fmt, | ||
2209 | + AVCodecContext * context, enum CodecID codec_id) | ||
2210 | +{ | ||
2211 | + GstCaps *caps = NULL; | ||
2212 | + | ||
2213 | + int bpp = 0; | ||
2214 | + gboolean integer = TRUE; | ||
2215 | + gboolean signedness = FALSE; | ||
2216 | + | ||
2217 | + switch (sample_fmt) { | ||
2218 | + case AV_SAMPLE_FMT_S16: | ||
2219 | + signedness = TRUE; | ||
2220 | + bpp = 16; | ||
2221 | + break; | ||
2222 | + | ||
2223 | + case AV_SAMPLE_FMT_S32: | ||
2224 | + signedness = TRUE; | ||
2225 | + bpp = 32; | ||
2226 | + break; | ||
2227 | + | ||
2228 | + case AV_SAMPLE_FMT_FLT: | ||
2229 | + integer = FALSE; | ||
2230 | + bpp = 32; | ||
2231 | + break; | ||
2232 | + | ||
2233 | + case AV_SAMPLE_FMT_DBL: | ||
2234 | + integer = FALSE; | ||
2235 | + bpp = 64; | ||
2236 | + break; | ||
2237 | + default: | ||
2238 | + /* .. */ | ||
2239 | + break; | ||
2240 | + } | ||
2241 | + | ||
2242 | + if (bpp) { | ||
2243 | + if (integer) { | ||
2244 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-raw-int", | ||
2245 | + "signed", G_TYPE_BOOLEAN, signedness, | ||
2246 | + "endianness", G_TYPE_INT, G_BYTE_ORDER, | ||
2247 | + "width", G_TYPE_INT, bpp, "depth", G_TYPE_INT, bpp, NULL); | ||
2248 | + } else { | ||
2249 | + caps = gst_ff_aud_caps_new (context, codec_id, "audio/x-raw-float", | ||
2250 | + "endianness", G_TYPE_INT, G_BYTE_ORDER, | ||
2251 | + "width", G_TYPE_INT, bpp, NULL); | ||
2252 | + } | ||
2253 | + } | ||
2254 | + | ||
2255 | + if (caps != NULL) { | ||
2256 | + GST_LOG ("caps for sample_fmt=%d: %" GST_PTR_FORMAT, sample_fmt, caps); | ||
2257 | + } else { | ||
2258 | + GST_LOG ("No caps found for sample_fmt=%d", sample_fmt); | ||
2259 | + } | ||
2260 | + | ||
2261 | + return caps; | ||
2262 | +} | ||
2263 | + | ||
2264 | +GstCaps * | ||
2265 | +gst_ffmpeg_codectype_to_audio_caps (AVCodecContext * context, | ||
2266 | + enum CodecID codec_id, gboolean encode, AVCodec * codec) | ||
2267 | +{ | ||
2268 | + GstCaps *caps = NULL; | ||
2269 | + | ||
2270 | + GST_DEBUG ("context:%p, codec_id:%d, encode:%d, codec:%p", | ||
2271 | + context, codec_id, encode, codec); | ||
2272 | + if (codec) | ||
2273 | + GST_DEBUG ("sample_fmts:%p, samplerates:%p", | ||
2274 | + codec->sample_fmts, codec->supported_samplerates); | ||
2275 | + | ||
2276 | + if (context) { | ||
2277 | + /* Specific codec context */ | ||
2278 | + caps = gst_ffmpeg_smpfmt_to_caps (context->sample_fmt, context, codec_id); | ||
2279 | + } else if (codec && codec->sample_fmts) { | ||
2280 | + GstCaps *temp; | ||
2281 | + int i; | ||
2282 | + | ||
2283 | + caps = gst_caps_new_empty (); | ||
2284 | + for (i = 0; codec->sample_fmts[i] != -1; i++) { | ||
2285 | + temp = | ||
2286 | + gst_ffmpeg_smpfmt_to_caps (codec->sample_fmts[i], context, codec_id); | ||
2287 | + if (temp != NULL) | ||
2288 | + gst_caps_append (caps, temp); | ||
2289 | + } | ||
2290 | + } else { | ||
2291 | + GstCaps *temp; | ||
2292 | + enum AVSampleFormat i; | ||
2293 | + AVCodecContext ctx = { 0, }; | ||
2294 | + | ||
2295 | + ctx.channels = -1; | ||
2296 | + caps = gst_caps_new_empty (); | ||
2297 | + for (i = 0; i <= AV_SAMPLE_FMT_DBL; i++) { | ||
2298 | + temp = gst_ffmpeg_smpfmt_to_caps (i, encode ? &ctx : NULL, codec_id); | ||
2299 | + if (temp != NULL) { | ||
2300 | + gst_caps_append (caps, temp); | ||
2301 | + } | ||
2302 | + } | ||
2303 | + } | ||
2304 | + return caps; | ||
2305 | +} | ||
2306 | + | ||
2307 | +GstCaps * | ||
2308 | +gst_ffmpeg_codectype_to_video_caps (AVCodecContext * context, | ||
2309 | + enum CodecID codec_id, gboolean encode, AVCodec * codec) | ||
2310 | +{ | ||
2311 | + GstCaps *caps; | ||
2312 | + | ||
2313 | + GST_LOG ("context:%p, codec_id:%d, encode:%d, codec:%p", | ||
2314 | + context, codec_id, encode, codec); | ||
2315 | + | ||
2316 | + if (context) { | ||
2317 | + caps = gst_ffmpeg_pixfmt_to_caps (context->pix_fmt, context, codec_id); | ||
2318 | + } else { | ||
2319 | + GstCaps *temp; | ||
2320 | + enum PixelFormat i; | ||
2321 | + AVCodecContext ctx = { 0, }; | ||
2322 | + | ||
2323 | + caps = gst_caps_new_empty (); | ||
2324 | + for (i = 0; i < PIX_FMT_NB; i++) { | ||
2325 | + ctx.width = -1; | ||
2326 | + ctx.pix_fmt = i; | ||
2327 | + temp = gst_ffmpeg_pixfmt_to_caps (i, encode ? &ctx : NULL, codec_id); | ||
2328 | + if (temp != NULL) { | ||
2329 | + gst_caps_append (caps, temp); | ||
2330 | + } | ||
2331 | + } | ||
2332 | + } | ||
2333 | + return caps; | ||
2334 | +} | ||
2335 | + | ||
2336 | +/* Convert a FFMPEG codec Type and optional AVCodecContext | ||
2337 | + * to a GstCaps. If the context is ommitted, no fixed values | ||
2338 | + * for video/audio size will be included in the GstCaps | ||
2339 | + * | ||
2340 | + * AVMediaType is primarily meant for uncompressed data GstCaps! | ||
2341 | + */ | ||
2342 | + | ||
2343 | +GstCaps * | ||
2344 | +gst_ffmpeg_codectype_to_caps (enum AVMediaType codec_type, | ||
2345 | + AVCodecContext * context, enum CodecID codec_id, gboolean encode) | ||
2346 | +{ | ||
2347 | + GstCaps *caps; | ||
2348 | + | ||
2349 | + switch (codec_type) { | ||
2350 | + case AVMEDIA_TYPE_VIDEO: | ||
2351 | + caps = | ||
2352 | + gst_ffmpeg_codectype_to_video_caps (context, codec_id, encode, NULL); | ||
2353 | + break; | ||
2354 | + case AVMEDIA_TYPE_AUDIO: | ||
2355 | + caps = | ||
2356 | + gst_ffmpeg_codectype_to_audio_caps (context, codec_id, encode, NULL); | ||
2357 | + break; | ||
2358 | + default: | ||
2359 | + caps = NULL; | ||
2360 | + break; | ||
2361 | + } | ||
2362 | + | ||
2363 | + return caps; | ||
2364 | +} | ||
2365 | + | ||
2366 | +/* Convert a GstCaps (audio/raw) to a FFMPEG SampleFmt | ||
2367 | + * and other audio properties in a AVCodecContext. | ||
2368 | + * | ||
2369 | + * For usefullness, see below | ||
2370 | + */ | ||
2371 | + | ||
2372 | +static void | ||
2373 | +gst_ffmpeg_caps_to_smpfmt (const GstCaps * caps, | ||
2374 | + AVCodecContext * context, gboolean raw) | ||
2375 | +{ | ||
2376 | + GstStructure *structure; | ||
2377 | + gint depth = 0, width = 0, endianness = 0; | ||
2378 | + gboolean signedness = FALSE; | ||
2379 | + const gchar *name; | ||
2380 | + | ||
2381 | + g_return_if_fail (gst_caps_get_size (caps) == 1); | ||
2382 | + structure = gst_caps_get_structure (caps, 0); | ||
2383 | + | ||
2384 | + gst_structure_get_int (structure, "channels", &context->channels); | ||
2385 | + gst_structure_get_int (structure, "rate", &context->sample_rate); | ||
2386 | + gst_structure_get_int (structure, "block_align", &context->block_align); | ||
2387 | + gst_structure_get_int (structure, "bitrate", &context->bit_rate); | ||
2388 | + | ||
2389 | + if (!raw) | ||
2390 | + return; | ||
2391 | + | ||
2392 | + name = gst_structure_get_name (structure); | ||
2393 | + | ||
2394 | + if (!strcmp (name, "audio/x-raw-float")) { | ||
2395 | + /* FLOAT */ | ||
2396 | + if (gst_structure_get_int (structure, "width", &width) && | ||
2397 | + gst_structure_get_int (structure, "endianness", &endianness)) { | ||
2398 | + if (endianness == G_BYTE_ORDER) { | ||
2399 | + if (width == 32) | ||
2400 | + context->sample_fmt = AV_SAMPLE_FMT_FLT; | ||
2401 | + else if (width == 64) | ||
2402 | + context->sample_fmt = AV_SAMPLE_FMT_DBL; | ||
2403 | + } | ||
2404 | + } | ||
2405 | + } else { | ||
2406 | + /* INT */ | ||
2407 | + if (gst_structure_get_int (structure, "width", &width) && | ||
2408 | + gst_structure_get_int (structure, "depth", &depth) && | ||
2409 | + gst_structure_get_boolean (structure, "signed", &signedness) && | ||
2410 | + gst_structure_get_int (structure, "endianness", &endianness)) { | ||
2411 | + if ((endianness == G_BYTE_ORDER) && (signedness == TRUE)) { | ||
2412 | + if ((width == 16) && (depth == 16)) | ||
2413 | + context->sample_fmt = AV_SAMPLE_FMT_S16; | ||
2414 | + else if ((width == 32) && (depth == 32)) | ||
2415 | + context->sample_fmt = AV_SAMPLE_FMT_S32; | ||
2416 | + } | ||
2417 | + } | ||
2418 | + } | ||
2419 | +} | ||
2420 | + | ||
2421 | + | ||
2422 | +/* Convert a GstCaps (video/raw) to a FFMPEG PixFmt | ||
2423 | + * and other video properties in a AVCodecContext. | ||
2424 | + * | ||
2425 | + * For usefullness, see below | ||
2426 | + */ | ||
2427 | + | ||
2428 | +static void | ||
2429 | +gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps, | ||
2430 | + AVCodecContext * context, gboolean raw) | ||
2431 | +{ | ||
2432 | + GstStructure *structure; | ||
2433 | + const GValue *fps; | ||
2434 | + const GValue *par = NULL; | ||
2435 | + | ||
2436 | + GST_DEBUG ("converting caps %" GST_PTR_FORMAT, caps); | ||
2437 | + g_return_if_fail (gst_caps_get_size (caps) == 1); | ||
2438 | + structure = gst_caps_get_structure (caps, 0); | ||
2439 | + | ||
2440 | + gst_structure_get_int (structure, "width", &context->width); | ||
2441 | + gst_structure_get_int (structure, "height", &context->height); | ||
2442 | + gst_structure_get_int (structure, "bpp", &context->bits_per_coded_sample); | ||
2443 | + | ||
2444 | + fps = gst_structure_get_value (structure, "framerate"); | ||
2445 | + if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) { | ||
2446 | + | ||
2447 | + /* somehow these seem mixed up.. */ | ||
2448 | + context->time_base.den = gst_value_get_fraction_numerator (fps); | ||
2449 | + context->time_base.num = gst_value_get_fraction_denominator (fps); | ||
2450 | + context->ticks_per_frame = 1; | ||
2451 | + | ||
2452 | + GST_DEBUG ("setting framerate %d/%d = %lf", | ||
2453 | + context->time_base.den, context->time_base.num, | ||
2454 | + 1. * context->time_base.den / context->time_base.num); | ||
2455 | + } | ||
2456 | + | ||
2457 | + par = gst_structure_get_value (structure, "pixel-aspect-ratio"); | ||
2458 | + if (par && GST_VALUE_HOLDS_FRACTION (par)) { | ||
2459 | + | ||
2460 | + context->sample_aspect_ratio.num = gst_value_get_fraction_numerator (par); | ||
2461 | + context->sample_aspect_ratio.den = gst_value_get_fraction_denominator (par); | ||
2462 | + | ||
2463 | + GST_DEBUG ("setting pixel-aspect-ratio %d/%d = %lf", | ||
2464 | + context->sample_aspect_ratio.den, context->sample_aspect_ratio.num, | ||
2465 | + 1. * context->sample_aspect_ratio.den / | ||
2466 | + context->sample_aspect_ratio.num); | ||
2467 | + } | ||
2468 | + | ||
2469 | + if (!raw) | ||
2470 | + return; | ||
2471 | + | ||
2472 | + g_return_if_fail (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)); | ||
2473 | + | ||
2474 | + if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0) { | ||
2475 | + guint32 fourcc; | ||
2476 | + | ||
2477 | + if (gst_structure_get_fourcc (structure, "format", &fourcc)) { | ||
2478 | + switch (fourcc) { | ||
2479 | + case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): | ||
2480 | + context->pix_fmt = PIX_FMT_YUYV422; | ||
2481 | + break; | ||
2482 | + case GST_MAKE_FOURCC ('I', '4', '2', '0'): | ||
2483 | + context->pix_fmt = PIX_FMT_YUV420P; | ||
2484 | + break; | ||
2485 | + case GST_MAKE_FOURCC ('A', '4', '2', '0'): | ||
2486 | + context->pix_fmt = PIX_FMT_YUVA420P; | ||
2487 | + break; | ||
2488 | + case GST_MAKE_FOURCC ('Y', '4', '1', 'B'): | ||
2489 | + context->pix_fmt = PIX_FMT_YUV411P; | ||
2490 | + break; | ||
2491 | + case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): | ||
2492 | + context->pix_fmt = PIX_FMT_YUV422P; | ||
2493 | + break; | ||
2494 | + case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'): | ||
2495 | + context->pix_fmt = PIX_FMT_YUV410P; | ||
2496 | + break; | ||
2497 | +#if 0 | ||
2498 | + case FIXME: | ||
2499 | + context->pix_fmt = PIX_FMT_YUV444P; | ||
2500 | + break; | ||
2501 | +#endif | ||
2502 | + } | ||
2503 | + } | ||
2504 | + } else if (strcmp (gst_structure_get_name (structure), | ||
2505 | + "video/x-raw-rgb") == 0) { | ||
2506 | + gint bpp = 0, rmask = 0, endianness = 0; | ||
2507 | + | ||
2508 | + if (gst_structure_get_int (structure, "bpp", &bpp) && | ||
2509 | + gst_structure_get_int (structure, "endianness", &endianness)) { | ||
2510 | + if (gst_structure_get_int (structure, "red_mask", &rmask)) { | ||
2511 | + switch (bpp) { | ||
2512 | + case 32: | ||
2513 | +#if (G_BYTE_ORDER == G_BIG_ENDIAN) | ||
2514 | + if (rmask == 0x00ff0000) | ||
2515 | +#else | ||
2516 | + if (rmask == 0x0000ff00) | ||
2517 | +#endif | ||
2518 | + context->pix_fmt = PIX_FMT_RGB32; | ||
2519 | + break; | ||
2520 | + case 24: | ||
2521 | + if (rmask == 0x0000FF) | ||
2522 | + context->pix_fmt = PIX_FMT_BGR24; | ||
2523 | + else | ||
2524 | + context->pix_fmt = PIX_FMT_RGB24; | ||
2525 | + break; | ||
2526 | + case 16: | ||
2527 | + if (endianness == G_BYTE_ORDER) | ||
2528 | + context->pix_fmt = PIX_FMT_RGB565; | ||
2529 | + break; | ||
2530 | + case 15: | ||
2531 | + if (endianness == G_BYTE_ORDER) | ||
2532 | + context->pix_fmt = PIX_FMT_RGB555; | ||
2533 | + break; | ||
2534 | + default: | ||
2535 | + /* nothing */ | ||
2536 | + break; | ||
2537 | + } | ||
2538 | + } else { | ||
2539 | + if (bpp == 8) { | ||
2540 | + context->pix_fmt = PIX_FMT_PAL8; | ||
2541 | + gst_ffmpeg_get_palette (caps, context); | ||
2542 | + } | ||
2543 | + } | ||
2544 | + } | ||
2545 | + } else if (strcmp (gst_structure_get_name (structure), | ||
2546 | + "video/x-raw-gray") == 0) { | ||
2547 | + gint bpp = 0; | ||
2548 | + | ||
2549 | + if (gst_structure_get_int (structure, "bpp", &bpp)) { | ||
2550 | + switch (bpp) { | ||
2551 | + case 8: | ||
2552 | + context->pix_fmt = PIX_FMT_GRAY8; | ||
2553 | + break; | ||
2554 | + } | ||
2555 | + } | ||
2556 | + } | ||
2557 | +} | ||
2558 | + | ||
2559 | +/* Convert a GstCaps and a FFMPEG codec Type to a | ||
2560 | + * AVCodecContext. If the context is ommitted, no fixed values | ||
2561 | + * for video/audio size will be included in the context | ||
2562 | + * | ||
2563 | + * AVMediaType is primarily meant for uncompressed data GstCaps! | ||
2564 | + */ | ||
2565 | + | ||
2566 | +void | ||
2567 | +gst_ffmpeg_caps_with_codectype (enum AVMediaType type, | ||
2568 | + const GstCaps * caps, AVCodecContext * context) | ||
2569 | +{ | ||
2570 | + if (context == NULL) | ||
2571 | + return; | ||
2572 | + | ||
2573 | + switch (type) { | ||
2574 | + case AVMEDIA_TYPE_VIDEO: | ||
2575 | + gst_ffmpeg_caps_to_pixfmt (caps, context, TRUE); | ||
2576 | + break; | ||
2577 | + | ||
2578 | + case AVMEDIA_TYPE_AUDIO: | ||
2579 | + gst_ffmpeg_caps_to_smpfmt (caps, context, TRUE); | ||
2580 | + break; | ||
2581 | + | ||
2582 | + default: | ||
2583 | + /* unknown */ | ||
2584 | + break; | ||
2585 | + } | ||
2586 | +} | ||
2587 | + | ||
2588 | +#if 0 | ||
2589 | +static void | ||
2590 | +nal_escape (guint8 * dst, guint8 * src, guint size, guint * destsize) | ||
2591 | +{ | ||
2592 | + guint8 *dstp = dst; | ||
2593 | + guint8 *srcp = src; | ||
2594 | + guint8 *end = src + size; | ||
2595 | + gint count = 0; | ||
2596 | + | ||
2597 | + while (srcp < end) { | ||
2598 | + if (count == 2 && *srcp <= 0x03) { | ||
2599 | + GST_DEBUG ("added escape code"); | ||
2600 | + *dstp++ = 0x03; | ||
2601 | + count = 0; | ||
2602 | + } | ||
2603 | + if (*srcp == 0) | ||
2604 | + count++; | ||
2605 | + else | ||
2606 | + count = 0; | ||
2607 | + | ||
2608 | + GST_DEBUG ("copy %02x, count %d", *srcp, count); | ||
2609 | + *dstp++ = *srcp++; | ||
2610 | + } | ||
2611 | + *destsize = dstp - dst; | ||
2612 | +} | ||
2613 | + | ||
2614 | +/* copy the config, escaping NAL units as we iterate them, if something fails we | ||
2615 | + * copy everything and hope for the best. */ | ||
2616 | +static void | ||
2617 | +copy_config (guint8 * dst, guint8 * src, guint size, guint * destsize) | ||
2618 | +{ | ||
2619 | + guint8 *dstp = dst; | ||
2620 | + guint8 *srcp = src; | ||
2621 | + gint cnt, i; | ||
2622 | + guint nalsize, esize; | ||
2623 | + | ||
2624 | + /* check size */ | ||
2625 | + if (size < 7) | ||
2626 | + goto full_copy; | ||
2627 | + | ||
2628 | + /* check version */ | ||
2629 | + if (*srcp != 1) | ||
2630 | + goto full_copy; | ||
2631 | + | ||
2632 | + cnt = *(srcp + 5) & 0x1f; /* Number of sps */ | ||
2633 | + | ||
2634 | + GST_DEBUG ("num SPS %d", cnt); | ||
2635 | + | ||
2636 | + memcpy (dstp, srcp, 6); | ||
2637 | + srcp += 6; | ||
2638 | + dstp += 6; | ||
2639 | + | ||
2640 | + for (i = 0; i < cnt; i++) { | ||
2641 | + GST_DEBUG ("copy SPS %d", i); | ||
2642 | + nalsize = (srcp[0] << 8) | srcp[1]; | ||
2643 | + nal_escape (dstp + 2, srcp + 2, nalsize, &esize); | ||
2644 | + dstp[0] = esize >> 8; | ||
2645 | + dstp[1] = esize & 0xff; | ||
2646 | + dstp += esize + 2; | ||
2647 | + srcp += nalsize + 2; | ||
2648 | + } | ||
2649 | + | ||
2650 | + cnt = *(dstp++) = *(srcp++); /* Number of pps */ | ||
2651 | + | ||
2652 | + GST_DEBUG ("num PPS %d", cnt); | ||
2653 | + | ||
2654 | + for (i = 0; i < cnt; i++) { | ||
2655 | + GST_DEBUG ("copy PPS %d", i); | ||
2656 | + nalsize = (srcp[0] << 8) | srcp[1]; | ||
2657 | + nal_escape (dstp + 2, srcp + 2, nalsize, &esize); | ||
2658 | + dstp[0] = esize >> 8; | ||
2659 | + dstp[1] = esize & 0xff; | ||
2660 | + dstp += esize + 2; | ||
2661 | + srcp += nalsize + 2; | ||
2662 | + } | ||
2663 | + *destsize = dstp - dst; | ||
2664 | + | ||
2665 | + return; | ||
2666 | + | ||
2667 | +full_copy: | ||
2668 | + { | ||
2669 | + GST_DEBUG ("something unexpected, doing full copy"); | ||
2670 | + memcpy (dst, src, size); | ||
2671 | + *destsize = size; | ||
2672 | + return; | ||
2673 | + } | ||
2674 | +} | ||
2675 | +#endif | ||
2676 | + | ||
2677 | +/* | ||
2678 | + * caps_with_codecid () transforms a GstCaps for a known codec | ||
2679 | + * ID into a filled-in context. | ||
2680 | + * codec_data from caps will override possible extradata already in the context | ||
2681 | + */ | ||
2682 | + | ||
2683 | +void | ||
2684 | +gst_ffmpeg_caps_with_codecid (enum CodecID codec_id, | ||
2685 | + enum AVMediaType codec_type, const GstCaps * caps, AVCodecContext * context) | ||
2686 | +{ | ||
2687 | + GstStructure *str; | ||
2688 | + const GValue *value; | ||
2689 | + const GstBuffer *buf; | ||
2690 | + | ||
2691 | + GST_LOG ("codec_id:%d, codec_type:%d, caps:%" GST_PTR_FORMAT " context:%p", | ||
2692 | + codec_id, codec_type, caps, context); | ||
2693 | + | ||
2694 | + if (!context || !gst_caps_get_size (caps)) | ||
2695 | + return; | ||
2696 | + | ||
2697 | + str = gst_caps_get_structure (caps, 0); | ||
2698 | + | ||
2699 | + /* extradata parsing (esds [mpeg4], wma/wmv, msmpeg4v1/2/3, etc.) */ | ||
2700 | + if ((value = gst_structure_get_value (str, "codec_data"))) { | ||
2701 | + guint size; | ||
2702 | + guint8 *data; | ||
2703 | + | ||
2704 | + buf = GST_BUFFER_CAST (gst_value_get_mini_object (value)); | ||
2705 | + size = GST_BUFFER_SIZE (buf); | ||
2706 | + data = GST_BUFFER_DATA (buf); | ||
2707 | + | ||
2708 | + /* free the old one if it is there */ | ||
2709 | + if (context->extradata) | ||
2710 | + av_free (context->extradata); | ||
2711 | + | ||
2712 | +#if 0 | ||
2713 | + if (codec_id == CODEC_ID_H264) { | ||
2714 | + guint extrasize; | ||
2715 | + | ||
2716 | + GST_DEBUG ("copy, escaping codec_data %d", size); | ||
2717 | + /* ffmpeg h264 expects the codec_data to be escaped, there is no real | ||
2718 | + * reason for this but let's just escape it for now. Start by allocating | ||
2719 | + * enough space, x2 is more than enough. | ||
2720 | + * | ||
2721 | + * FIXME, we disabled escaping because some file already contain escaped | ||
2722 | + * codec_data and then we escape twice and fail. It's better to leave it | ||
2723 | + * as is, as that is what most players do. */ | ||
2724 | + context->extradata = | ||
2725 | + av_mallocz (GST_ROUND_UP_16 (size * 2 + | ||
2726 | + FF_INPUT_BUFFER_PADDING_SIZE)); | ||
2727 | + copy_config (context->extradata, data, size, &extrasize); | ||
2728 | + GST_DEBUG ("escaped size: %d", extrasize); | ||
2729 | + context->extradata_size = extrasize; | ||
2730 | + } else | ||
2731 | +#endif | ||
2732 | + { | ||
2733 | + /* allocate with enough padding */ | ||
2734 | + GST_DEBUG ("copy codec_data"); | ||
2735 | + context->extradata = | ||
2736 | + av_mallocz (GST_ROUND_UP_16 (size + FF_INPUT_BUFFER_PADDING_SIZE)); | ||
2737 | + memcpy (context->extradata, data, size); | ||
2738 | + context->extradata_size = size; | ||
2739 | + } | ||
2740 | + | ||
2741 | + /* Hack for VC1. Sometimes the first (length) byte is 0 for some files */ | ||
2742 | + if (codec_id == CODEC_ID_VC1 && size > 0 && data[0] == 0) { | ||
2743 | + context->extradata[0] = (guint8) size; | ||
2744 | + } | ||
2745 | + | ||
2746 | + GST_DEBUG ("have codec data of size %d", size); | ||
2747 | + } else if (context->extradata == NULL && codec_id != CODEC_ID_AAC_LATM && | ||
2748 | + codec_id != CODEC_ID_FLAC) { | ||
2749 | + /* no extradata, alloc dummy with 0 sized, some codecs insist on reading | ||
2750 | + * extradata anyway which makes then segfault. */ | ||
2751 | + context->extradata = | ||
2752 | + av_mallocz (GST_ROUND_UP_16 (FF_INPUT_BUFFER_PADDING_SIZE)); | ||
2753 | + context->extradata_size = 0; | ||
2754 | + GST_DEBUG ("no codec data"); | ||
2755 | + } | ||
2756 | + | ||
2757 | + switch (codec_id) { | ||
2758 | + case CODEC_ID_MPEG4: | ||
2759 | + { | ||
2760 | + const gchar *mime = gst_structure_get_name (str); | ||
2761 | + | ||
2762 | + if (!strcmp (mime, "video/x-divx")) | ||
2763 | + context->codec_tag = GST_MAKE_FOURCC ('D', 'I', 'V', 'X'); | ||
2764 | + else if (!strcmp (mime, "video/x-xvid")) | ||
2765 | + context->codec_tag = GST_MAKE_FOURCC ('X', 'V', 'I', 'D'); | ||
2766 | + else if (!strcmp (mime, "video/x-3ivx")) | ||
2767 | + context->codec_tag = GST_MAKE_FOURCC ('3', 'I', 'V', '1'); | ||
2768 | + else if (!strcmp (mime, "video/mpeg")) | ||
2769 | + context->codec_tag = GST_MAKE_FOURCC ('m', 'p', '4', 'v'); | ||
2770 | + } | ||
2771 | + break; | ||
2772 | + | ||
2773 | + case CODEC_ID_SVQ3: | ||
2774 | + /* FIXME: this is a workaround for older gst-plugins releases | ||
2775 | + * (<= 0.8.9). This should be removed at some point, because | ||
2776 | + * it causes wrong decoded frame order. */ | ||
2777 | + if (!context->extradata) { | ||
2778 | + gint halfpel_flag, thirdpel_flag, low_delay, unknown_svq3_flag; | ||
2779 | + guint16 flags; | ||
2780 | + | ||
2781 | + if (gst_structure_get_int (str, "halfpel_flag", &halfpel_flag) || | ||
2782 | + gst_structure_get_int (str, "thirdpel_flag", &thirdpel_flag) || | ||
2783 | + gst_structure_get_int (str, "low_delay", &low_delay) || | ||
2784 | + gst_structure_get_int (str, "unknown_svq3_flag", | ||
2785 | + &unknown_svq3_flag)) { | ||
2786 | + context->extradata = (guint8 *) av_mallocz (0x64); | ||
2787 | + g_stpcpy ((gchar *) context->extradata, "SVQ3"); | ||
2788 | + flags = 1 << 3; | ||
2789 | + flags |= low_delay; | ||
2790 | + flags = flags << 2; | ||
2791 | + flags |= unknown_svq3_flag; | ||
2792 | + flags = flags << 6; | ||
2793 | + flags |= halfpel_flag; | ||
2794 | + flags = flags << 1; | ||
2795 | + flags |= thirdpel_flag; | ||
2796 | + flags = flags << 3; | ||
2797 | + | ||
2798 | + flags = GUINT16_FROM_LE (flags); | ||
2799 | + | ||
2800 | + memcpy ((gchar *) context->extradata + 0x62, &flags, 2); | ||
2801 | + context->extradata_size = 0x64; | ||
2802 | + } | ||
2803 | + } | ||
2804 | + break; | ||
2805 | + | ||
2806 | + case CODEC_ID_MSRLE: | ||
2807 | + case CODEC_ID_QTRLE: | ||
2808 | + case CODEC_ID_TSCC: | ||
2809 | + case CODEC_ID_CSCD: | ||
2810 | + case CODEC_ID_APE: | ||
2811 | + { | ||
2812 | + gint depth; | ||
2813 | + | ||
2814 | + if (gst_structure_get_int (str, "depth", &depth)) { | ||
2815 | + context->bits_per_coded_sample = depth; | ||
2816 | + } else { | ||
2817 | + GST_WARNING ("No depth field in caps %" GST_PTR_FORMAT, caps); | ||
2818 | + } | ||
2819 | + | ||
2820 | + } | ||
2821 | + break; | ||
2822 | + | ||
2823 | + case CODEC_ID_RV10: | ||
2824 | + case CODEC_ID_RV20: | ||
2825 | + case CODEC_ID_RV30: | ||
2826 | + case CODEC_ID_RV40: | ||
2827 | + { | ||
2828 | + gint format; | ||
2829 | + | ||
2830 | + if (gst_structure_get_int (str, "format", &format)) | ||
2831 | + context->sub_id = format; | ||
2832 | + | ||
2833 | + break; | ||
2834 | + } | ||
2835 | + case CODEC_ID_COOK: | ||
2836 | + case CODEC_ID_RA_288: | ||
2837 | + case CODEC_ID_RA_144: | ||
2838 | + case CODEC_ID_SIPR: | ||
2839 | + { | ||
2840 | + gint leaf_size; | ||
2841 | + gint bitrate; | ||
2842 | + | ||
2843 | + if (gst_structure_get_int (str, "leaf_size", &leaf_size)) | ||
2844 | + context->block_align = leaf_size; | ||
2845 | + if (gst_structure_get_int (str, "bitrate", &bitrate)) | ||
2846 | + context->bit_rate = bitrate; | ||
2847 | + } | ||
2848 | + case CODEC_ID_ALAC: | ||
2849 | + gst_structure_get_int (str, "samplesize", | ||
2850 | + &context->bits_per_coded_sample); | ||
2851 | + break; | ||
2852 | + | ||
2853 | + case CODEC_ID_DVVIDEO: | ||
2854 | + { | ||
2855 | + guint32 fourcc; | ||
2856 | + | ||
2857 | + if (gst_structure_get_fourcc (str, "format", &fourcc)) | ||
2858 | + switch (fourcc) { | ||
2859 | + case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): | ||
2860 | + context->pix_fmt = PIX_FMT_YUYV422; | ||
2861 | + break; | ||
2862 | + case GST_MAKE_FOURCC ('I', '4', '2', '0'): | ||
2863 | + context->pix_fmt = PIX_FMT_YUV420P; | ||
2864 | + break; | ||
2865 | + case GST_MAKE_FOURCC ('A', '4', '2', '0'): | ||
2866 | + context->pix_fmt = PIX_FMT_YUVA420P; | ||
2867 | + break; | ||
2868 | + case GST_MAKE_FOURCC ('Y', '4', '1', 'B'): | ||
2869 | + context->pix_fmt = PIX_FMT_YUV411P; | ||
2870 | + break; | ||
2871 | + case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): | ||
2872 | + context->pix_fmt = PIX_FMT_YUV422P; | ||
2873 | + break; | ||
2874 | + case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'): | ||
2875 | + context->pix_fmt = PIX_FMT_YUV410P; | ||
2876 | + break; | ||
2877 | + default: | ||
2878 | + GST_WARNING ("couldn't convert fourcc %" GST_FOURCC_FORMAT | ||
2879 | + " to a pixel format", GST_FOURCC_ARGS (fourcc)); | ||
2880 | + break; | ||
2881 | + } | ||
2882 | + break; | ||
2883 | + } | ||
2884 | + case CODEC_ID_H263P: | ||
2885 | + { | ||
2886 | + gboolean val; | ||
2887 | + | ||
2888 | + if (!gst_structure_get_boolean (str, "annex-f", &val) || val) | ||
2889 | + context->flags |= CODEC_FLAG_4MV; | ||
2890 | + else | ||
2891 | + context->flags &= ~CODEC_FLAG_4MV; | ||
2892 | + if ((!gst_structure_get_boolean (str, "annex-i", &val) || val) && | ||
2893 | + (!gst_structure_get_boolean (str, "annex-t", &val) || val)) | ||
2894 | + context->flags |= CODEC_FLAG_AC_PRED; | ||
2895 | + else | ||
2896 | + context->flags &= ~CODEC_FLAG_AC_PRED; | ||
2897 | + if (!gst_structure_get_boolean (str, "annex-j", &val) || val) | ||
2898 | + context->flags |= CODEC_FLAG_LOOP_FILTER; | ||
2899 | + else | ||
2900 | + context->flags &= ~CODEC_FLAG_LOOP_FILTER; | ||
2901 | + break; | ||
2902 | + } | ||
2903 | + case CODEC_ID_ADPCM_G726: | ||
2904 | + { | ||
2905 | + const gchar *layout; | ||
2906 | + | ||
2907 | + if ((layout = gst_structure_get_string (str, "layout"))) { | ||
2908 | + if (!strcmp (layout, "g721")) { | ||
2909 | + context->sample_rate = 8000; | ||
2910 | + context->channels = 1; | ||
2911 | + context->bit_rate = 32000; | ||
2912 | + } | ||
2913 | + } | ||
2914 | + break; | ||
2915 | + } | ||
2916 | + default: | ||
2917 | + break; | ||
2918 | + } | ||
2919 | + | ||
2920 | + if (!gst_caps_is_fixed (caps)) | ||
2921 | + return; | ||
2922 | + | ||
2923 | + /* common properties (width, height, fps) */ | ||
2924 | + switch (codec_type) { | ||
2925 | + case AVMEDIA_TYPE_VIDEO: | ||
2926 | + gst_ffmpeg_caps_to_pixfmt (caps, context, codec_id == CODEC_ID_RAWVIDEO); | ||
2927 | + gst_ffmpeg_get_palette (caps, context); | ||
2928 | + break; | ||
2929 | + case AVMEDIA_TYPE_AUDIO: | ||
2930 | + gst_ffmpeg_caps_to_smpfmt (caps, context, FALSE); | ||
2931 | + break; | ||
2932 | + default: | ||
2933 | + break; | ||
2934 | + } | ||
2935 | + | ||
2936 | + /* fixup of default settings */ | ||
2937 | + switch (codec_id) { | ||
2938 | + case CODEC_ID_QCELP: | ||
2939 | + /* QCELP is always mono, no matter what the caps say */ | ||
2940 | + context->channels = 1; | ||
2941 | + break; | ||
2942 | + default: | ||
2943 | + break; | ||
2944 | + } | ||
2945 | +} | ||
2946 | + | ||
2947 | +/* _formatid_to_caps () is meant for muxers/demuxers, it | ||
2948 | + * transforms a name (ffmpeg way of ID'ing these, why don't | ||
2949 | + * they have unique numerical IDs?) to the corresponding | ||
2950 | + * caps belonging to that mux-format | ||
2951 | + * | ||
2952 | + * Note: we don't need any additional info because the caps | ||
2953 | + * isn't supposed to contain any useful info besides the | ||
2954 | + * media type anyway | ||
2955 | + */ | ||
2956 | + | ||
2957 | +GstCaps * | ||
2958 | +gst_ffmpeg_formatid_to_caps (const gchar * format_name) | ||
2959 | +{ | ||
2960 | + GstCaps *caps = NULL; | ||
2961 | + | ||
2962 | + if (!strcmp (format_name, "mpeg")) { | ||
2963 | + caps = gst_caps_new_simple ("video/mpeg", | ||
2964 | + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); | ||
2965 | + } else if (!strcmp (format_name, "mpegts")) { | ||
2966 | + caps = gst_caps_new_simple ("video/mpegts", | ||
2967 | + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); | ||
2968 | + } else if (!strcmp (format_name, "rm")) { | ||
2969 | + caps = gst_caps_new_simple ("application/x-pn-realmedia", | ||
2970 | + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); | ||
2971 | + } else if (!strcmp (format_name, "asf")) { | ||
2972 | + caps = gst_caps_new_simple ("video/x-ms-asf", NULL); | ||
2973 | + } else if (!strcmp (format_name, "avi")) { | ||
2974 | + caps = gst_caps_new_simple ("video/x-msvideo", NULL); | ||
2975 | + } else if (!strcmp (format_name, "wav")) { | ||
2976 | + caps = gst_caps_new_simple ("audio/x-wav", NULL); | ||
2977 | + } else if (!strcmp (format_name, "ape")) { | ||
2978 | + caps = gst_caps_new_simple ("application/x-ape", NULL); | ||
2979 | + } else if (!strcmp (format_name, "swf")) { | ||
2980 | + caps = gst_caps_new_simple ("application/x-shockwave-flash", NULL); | ||
2981 | + } else if (!strcmp (format_name, "au")) { | ||
2982 | + caps = gst_caps_new_simple ("audio/x-au", NULL); | ||
2983 | + } else if (!strcmp (format_name, "dv")) { | ||
2984 | + caps = gst_caps_new_simple ("video/x-dv", | ||
2985 | + "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); | ||
2986 | + } else if (!strcmp (format_name, "4xm")) { | ||
2987 | + caps = gst_caps_new_simple ("video/x-4xm", NULL); | ||
2988 | + } else if (!strcmp (format_name, "matroska")) { | ||
2989 | + caps = gst_caps_new_simple ("video/x-matroska", NULL); | ||
2990 | + } else if (!strcmp (format_name, "mp3")) { | ||
2991 | + caps = gst_caps_new_simple ("application/x-id3", NULL); | ||
2992 | + } else if (!strcmp (format_name, "flic")) { | ||
2993 | + caps = gst_caps_new_simple ("video/x-fli", NULL); | ||
2994 | + } else if (!strcmp (format_name, "flv")) { | ||
2995 | + caps = gst_caps_new_simple ("video/x-flv", NULL); | ||
2996 | + } else if (!strcmp (format_name, "tta")) { | ||
2997 | + caps = gst_caps_new_simple ("audio/x-ttafile", NULL); | ||
2998 | + } else if (!strcmp (format_name, "aiff")) { | ||
2999 | + caps = gst_caps_new_simple ("audio/x-aiff", NULL); | ||
3000 | + } else if (!strcmp (format_name, "mov_mp4_m4a_3gp_3g2")) { | ||
3001 | + caps = | ||
3002 | + gst_caps_from_string | ||
3003 | + ("application/x-3gp; video/quicktime; audio/x-m4a"); | ||
3004 | + } else if (!strcmp (format_name, "mov")) { | ||
3005 | + caps = gst_caps_from_string ("video/quicktime,variant=(string)apple"); | ||
3006 | + } else if (!strcmp (format_name, "mp4")) { | ||
3007 | + caps = gst_caps_from_string ("video/quicktime,variant=(string)iso"); | ||
3008 | + } else if (!strcmp (format_name, "3gp")) { | ||
3009 | + caps = gst_caps_from_string ("video/quicktime,variant=(string)3gpp"); | ||
3010 | + } else if (!strcmp (format_name, "3g2")) { | ||
3011 | + caps = gst_caps_from_string ("video/quicktime,variant=(string)3g2"); | ||
3012 | + } else if (!strcmp (format_name, "psp")) { | ||
3013 | + caps = gst_caps_from_string ("video/quicktime,variant=(string)psp"); | ||
3014 | + } else if (!strcmp (format_name, "ipod")) { | ||
3015 | + caps = gst_caps_from_string ("video/quicktime,variant=(string)ipod"); | ||
3016 | + } else if (!strcmp (format_name, "aac")) { | ||
3017 | + caps = gst_caps_new_simple ("audio/mpeg", | ||
3018 | + "mpegversion", G_TYPE_INT, 4, NULL); | ||
3019 | + } else if (!strcmp (format_name, "gif")) { | ||
3020 | + caps = gst_caps_from_string ("image/gif"); | ||
3021 | + } else if (!strcmp (format_name, "ogg")) { | ||
3022 | + caps = gst_caps_from_string ("application/ogg"); | ||
3023 | + } else if (!strcmp (format_name, "mxf") || !strcmp (format_name, "mxf_d10")) { | ||
3024 | + caps = gst_caps_from_string ("application/mxf"); | ||
3025 | + } else if (!strcmp (format_name, "gxf")) { | ||
3026 | + caps = gst_caps_from_string ("application/gxf"); | ||
3027 | + } else if (!strcmp (format_name, "yuv4mpegpipe")) { | ||
3028 | + caps = gst_caps_new_simple ("application/x-yuv4mpeg", | ||
3029 | + "y4mversion", G_TYPE_INT, 2, NULL); | ||
3030 | + } else if (!strcmp (format_name, "mpc")) { | ||
3031 | + caps = gst_caps_from_string ("audio/x-musepack, streamversion = (int) 7"); | ||
3032 | + } else if (!strcmp (format_name, "vqf")) { | ||
3033 | + caps = gst_caps_from_string ("audio/x-vqf"); | ||
3034 | + } else if (!strcmp (format_name, "nsv")) { | ||
3035 | + caps = gst_caps_from_string ("video/x-nsv"); | ||
3036 | + } else if (!strcmp (format_name, "amr")) { | ||
3037 | + caps = gst_caps_from_string ("audio/x-amr-nb-sh"); | ||
3038 | + } else if (!strcmp (format_name, "webm")) { | ||
3039 | + caps = gst_caps_from_string ("video/webm"); | ||
3040 | + } else { | ||
3041 | + gchar *name; | ||
3042 | + | ||
3043 | + GST_LOG ("Could not create stream format caps for %s", format_name); | ||
3044 | + name = g_strdup_printf ("application/x-gst_ff-%s", format_name); | ||
3045 | + caps = gst_caps_new_simple (name, NULL); | ||
3046 | + g_free (name); | ||
3047 | + } | ||
3048 | + | ||
3049 | + return caps; | ||
3050 | +} | ||
3051 | + | ||
3052 | +gboolean | ||
3053 | +gst_ffmpeg_formatid_get_codecids (const gchar * format_name, | ||
3054 | + enum CodecID ** video_codec_list, enum CodecID ** audio_codec_list, | ||
3055 | + AVOutputFormat * plugin) | ||
3056 | +{ | ||
3057 | + static enum CodecID tmp_vlist[] = { | ||
3058 | + CODEC_ID_NONE, | ||
3059 | + CODEC_ID_NONE | ||
3060 | + }; | ||
3061 | + static enum CodecID tmp_alist[] = { | ||
3062 | + CODEC_ID_NONE, | ||
3063 | + CODEC_ID_NONE | ||
3064 | + }; | ||
3065 | + | ||
3066 | + GST_LOG ("format_name : %s", format_name); | ||
3067 | + | ||
3068 | + if (!strcmp (format_name, "mp4")) { | ||
3069 | + static enum CodecID mp4_video_list[] = { | ||
3070 | + CODEC_ID_MPEG4, CODEC_ID_H264, | ||
3071 | + CODEC_ID_MJPEG, | ||
3072 | + CODEC_ID_NONE | ||
3073 | + }; | ||
3074 | + static enum CodecID mp4_audio_list[] = { | ||
3075 | + CODEC_ID_AAC, CODEC_ID_MP3, | ||
3076 | + CODEC_ID_NONE | ||
3077 | + }; | ||
3078 | + | ||
3079 | + *video_codec_list = mp4_video_list; | ||
3080 | + *audio_codec_list = mp4_audio_list; | ||
3081 | + } else if (!strcmp (format_name, "mpeg")) { | ||
3082 | + static enum CodecID mpeg_video_list[] = { CODEC_ID_MPEG1VIDEO, | ||
3083 | + CODEC_ID_MPEG2VIDEO, | ||
3084 | + CODEC_ID_H264, | ||
3085 | + CODEC_ID_NONE | ||
3086 | + }; | ||
3087 | + static enum CodecID mpeg_audio_list[] = { CODEC_ID_MP1, | ||
3088 | + CODEC_ID_MP2, | ||
3089 | + CODEC_ID_MP3, | ||
3090 | + CODEC_ID_NONE | ||
3091 | + }; | ||
3092 | + | ||
3093 | + *video_codec_list = mpeg_video_list; | ||
3094 | + *audio_codec_list = mpeg_audio_list; | ||
3095 | + } else if (!strcmp (format_name, "dvd")) { | ||
3096 | + static enum CodecID mpeg_video_list[] = { CODEC_ID_MPEG2VIDEO, | ||
3097 | + CODEC_ID_NONE | ||
3098 | + }; | ||
3099 | + static enum CodecID mpeg_audio_list[] = { CODEC_ID_MP2, | ||
3100 | + CODEC_ID_AC3, | ||
3101 | + CODEC_ID_DTS, | ||
3102 | + CODEC_ID_PCM_S16BE, | ||
3103 | + CODEC_ID_NONE | ||
3104 | + }; | ||
3105 | + | ||
3106 | + *video_codec_list = mpeg_video_list; | ||
3107 | + *audio_codec_list = mpeg_audio_list; | ||
3108 | + } else if (!strcmp (format_name, "mpegts")) { | ||
3109 | + static enum CodecID mpegts_video_list[] = { CODEC_ID_MPEG1VIDEO, | ||
3110 | + CODEC_ID_MPEG2VIDEO, | ||
3111 | + CODEC_ID_H264, | ||
3112 | + CODEC_ID_NONE | ||
3113 | + }; | ||
3114 | + static enum CodecID mpegts_audio_list[] = { CODEC_ID_MP2, | ||
3115 | + CODEC_ID_MP3, | ||
3116 | + CODEC_ID_AC3, | ||
3117 | + CODEC_ID_DTS, | ||
3118 | + CODEC_ID_AAC, | ||
3119 | + CODEC_ID_NONE | ||
3120 | + }; | ||
3121 | + | ||
3122 | + *video_codec_list = mpegts_video_list; | ||
3123 | + *audio_codec_list = mpegts_audio_list; | ||
3124 | + } else if (!strcmp (format_name, "vob")) { | ||
3125 | + static enum CodecID vob_video_list[] = | ||
3126 | + { CODEC_ID_MPEG2VIDEO, CODEC_ID_NONE }; | ||
3127 | + static enum CodecID vob_audio_list[] = { CODEC_ID_MP2, CODEC_ID_AC3, | ||
3128 | + CODEC_ID_DTS, CODEC_ID_NONE | ||
3129 | + }; | ||
3130 | + | ||
3131 | + *video_codec_list = vob_video_list; | ||
3132 | + *audio_codec_list = vob_audio_list; | ||
3133 | + } else if (!strcmp (format_name, "flv")) { | ||
3134 | + static enum CodecID flv_video_list[] = { CODEC_ID_FLV1, CODEC_ID_NONE }; | ||
3135 | + static enum CodecID flv_audio_list[] = { CODEC_ID_MP3, CODEC_ID_NONE }; | ||
3136 | + | ||
3137 | + *video_codec_list = flv_video_list; | ||
3138 | + *audio_codec_list = flv_audio_list; | ||
3139 | + } else if (!strcmp (format_name, "asf")) { | ||
3140 | + static enum CodecID asf_video_list[] = | ||
3141 | + { CODEC_ID_WMV1, CODEC_ID_WMV2, CODEC_ID_MSMPEG4V3, CODEC_ID_NONE }; | ||
3142 | + static enum CodecID asf_audio_list[] = | ||
3143 | + { CODEC_ID_WMAV1, CODEC_ID_WMAV2, CODEC_ID_MP3, CODEC_ID_NONE }; | ||
3144 | + | ||
3145 | + *video_codec_list = asf_video_list; | ||
3146 | + *audio_codec_list = asf_audio_list; | ||
3147 | + } else if (!strcmp (format_name, "dv")) { | ||
3148 | + static enum CodecID dv_video_list[] = { CODEC_ID_DVVIDEO, CODEC_ID_NONE }; | ||
3149 | + static enum CodecID dv_audio_list[] = { CODEC_ID_PCM_S16LE, CODEC_ID_NONE }; | ||
3150 | + | ||
3151 | + *video_codec_list = dv_video_list; | ||
3152 | + *audio_codec_list = dv_audio_list; | ||
3153 | + } else if (!strcmp (format_name, "mov")) { | ||
3154 | + static enum CodecID mov_video_list[] = { | ||
3155 | + CODEC_ID_SVQ1, CODEC_ID_SVQ3, CODEC_ID_MPEG4, | ||
3156 | + CODEC_ID_H263, CODEC_ID_H263P, | ||
3157 | + CODEC_ID_H264, CODEC_ID_DVVIDEO, | ||
3158 | + CODEC_ID_MJPEG, | ||
3159 | + CODEC_ID_NONE | ||
3160 | + }; | ||
3161 | + static enum CodecID mov_audio_list[] = { | ||
3162 | + CODEC_ID_PCM_MULAW, CODEC_ID_PCM_ALAW, CODEC_ID_ADPCM_IMA_QT, | ||
3163 | + CODEC_ID_MACE3, CODEC_ID_MACE6, CODEC_ID_AAC, | ||
3164 | + CODEC_ID_AMR_NB, CODEC_ID_AMR_WB, | ||
3165 | + CODEC_ID_PCM_S16BE, CODEC_ID_PCM_S16LE, | ||
3166 | + CODEC_ID_MP3, CODEC_ID_NONE | ||
3167 | + }; | ||
3168 | + | ||
3169 | + *video_codec_list = mov_video_list; | ||
3170 | + *audio_codec_list = mov_audio_list; | ||
3171 | + } else if ((!strcmp (format_name, "3gp") || !strcmp (format_name, "3g2"))) { | ||
3172 | + static enum CodecID tgp_video_list[] = { | ||
3173 | + CODEC_ID_MPEG4, CODEC_ID_H263, CODEC_ID_H263P, CODEC_ID_H264, | ||
3174 | + CODEC_ID_NONE | ||
3175 | + }; | ||
3176 | + static enum CodecID tgp_audio_list[] = { | ||
3177 | + CODEC_ID_AMR_NB, CODEC_ID_AMR_WB, | ||
3178 | + CODEC_ID_AAC, | ||
3179 | + CODEC_ID_NONE | ||
3180 | + }; | ||
3181 | + | ||
3182 | + *video_codec_list = tgp_video_list; | ||
3183 | + *audio_codec_list = tgp_audio_list; | ||
3184 | + } else if (!strcmp (format_name, "mmf")) { | ||
3185 | + static enum CodecID mmf_audio_list[] = { | ||
3186 | + CODEC_ID_ADPCM_YAMAHA, CODEC_ID_NONE | ||
3187 | + }; | ||
3188 | + *video_codec_list = NULL; | ||
3189 | + *audio_codec_list = mmf_audio_list; | ||
3190 | + } else if (!strcmp (format_name, "amr")) { | ||
3191 | + static enum CodecID amr_audio_list[] = { | ||
3192 | + CODEC_ID_AMR_NB, CODEC_ID_AMR_WB, | ||
3193 | + CODEC_ID_NONE | ||
3194 | + }; | ||
3195 | + *video_codec_list = NULL; | ||
3196 | + *audio_codec_list = amr_audio_list; | ||
3197 | + } else if (!strcmp (format_name, "gif")) { | ||
3198 | + static enum CodecID gif_image_list[] = { | ||
3199 | + CODEC_ID_RAWVIDEO, CODEC_ID_NONE | ||
3200 | + }; | ||
3201 | + *video_codec_list = gif_image_list; | ||
3202 | + *audio_codec_list = NULL; | ||
3203 | + } else if ((plugin->audio_codec != CODEC_ID_NONE) || | ||
3204 | + (plugin->video_codec != CODEC_ID_NONE)) { | ||
3205 | + tmp_vlist[0] = plugin->video_codec; | ||
3206 | + tmp_alist[0] = plugin->audio_codec; | ||
3207 | + | ||
3208 | + *video_codec_list = tmp_vlist; | ||
3209 | + *audio_codec_list = tmp_alist; | ||
3210 | + } else { | ||
3211 | + GST_LOG ("Format %s not found", format_name); | ||
3212 | + return FALSE; | ||
3213 | + } | ||
3214 | + | ||
3215 | + return TRUE; | ||
3216 | +} | ||
3217 | + | ||
3218 | +/* Convert a GstCaps to a FFMPEG codec ID. Size et all | ||
3219 | + * are omitted, that can be queried by the user itself, | ||
3220 | + * we're not eating the GstCaps or anything | ||
3221 | + * A pointer to an allocated context is also needed for | ||
3222 | + * optional extra info | ||
3223 | + */ | ||
3224 | + | ||
3225 | +enum CodecID | ||
3226 | +gst_ffmpeg_caps_to_codecid (const GstCaps * caps, AVCodecContext * context) | ||
3227 | +{ | ||
3228 | + enum CodecID id = CODEC_ID_NONE; | ||
3229 | + const gchar *mimetype; | ||
3230 | + const GstStructure *structure; | ||
3231 | + gboolean video = FALSE, audio = FALSE; /* we want to be sure! */ | ||
3232 | + | ||
3233 | + g_return_val_if_fail (caps != NULL, CODEC_ID_NONE); | ||
3234 | + g_return_val_if_fail (gst_caps_get_size (caps) == 1, CODEC_ID_NONE); | ||
3235 | + structure = gst_caps_get_structure (caps, 0); | ||
3236 | + | ||
3237 | + mimetype = gst_structure_get_name (structure); | ||
3238 | + | ||
3239 | + if (!strcmp (mimetype, "video/x-raw-rgb") || | ||
3240 | + !strcmp (mimetype, "video/x-raw-yuv")) { | ||
3241 | + id = CODEC_ID_RAWVIDEO; | ||
3242 | + video = TRUE; | ||
3243 | + } else if (!strcmp (mimetype, "audio/x-raw-int")) { | ||
3244 | + gint depth, width, endianness; | ||
3245 | + gboolean signedness; | ||
3246 | + | ||
3247 | + if (gst_structure_get_int (structure, "endianness", &endianness) && | ||
3248 | + gst_structure_get_boolean (structure, "signed", &signedness) && | ||
3249 | + gst_structure_get_int (structure, "width", &width) && | ||
3250 | + gst_structure_get_int (structure, "depth", &depth) && depth == width) { | ||
3251 | + switch (depth) { | ||
3252 | + case 8: | ||
3253 | + if (signedness) { | ||
3254 | + id = CODEC_ID_PCM_S8; | ||
3255 | + } else { | ||
3256 | + id = CODEC_ID_PCM_U8; | ||
3257 | + } | ||
3258 | + break; | ||
3259 | + case 16: | ||
3260 | + switch (endianness) { | ||
3261 | + case G_BIG_ENDIAN: | ||
3262 | + if (signedness) { | ||
3263 | + id = CODEC_ID_PCM_S16BE; | ||
3264 | + } else { | ||
3265 | + id = CODEC_ID_PCM_U16BE; | ||
3266 | + } | ||
3267 | + break; | ||
3268 | + case G_LITTLE_ENDIAN: | ||
3269 | + if (signedness) { | ||
3270 | + id = CODEC_ID_PCM_S16LE; | ||
3271 | + } else { | ||
3272 | + id = CODEC_ID_PCM_U16LE; | ||
3273 | + } | ||
3274 | + break; | ||
3275 | + } | ||
3276 | + break; | ||
3277 | + } | ||
3278 | + if (id != CODEC_ID_NONE) | ||
3279 | + audio = TRUE; | ||
3280 | + } | ||
3281 | + } else if (!strcmp (mimetype, "audio/x-mulaw")) { | ||
3282 | + id = CODEC_ID_PCM_MULAW; | ||
3283 | + audio = TRUE; | ||
3284 | + } else if (!strcmp (mimetype, "audio/x-alaw")) { | ||
3285 | + id = CODEC_ID_PCM_ALAW; | ||
3286 | + audio = TRUE; | ||
3287 | + } else if (!strcmp (mimetype, "video/x-dv")) { | ||
3288 | + gboolean sys_strm; | ||
3289 | + | ||
3290 | + if (gst_structure_get_boolean (structure, "systemstream", &sys_strm) && | ||
3291 | + !sys_strm) { | ||
3292 | + id = CODEC_ID_DVVIDEO; | ||
3293 | + video = TRUE; | ||
3294 | + } | ||
3295 | + } else if (!strcmp (mimetype, "audio/x-dv")) { /* ??? */ | ||
3296 | + id = CODEC_ID_DVAUDIO; | ||
3297 | + audio = TRUE; | ||
3298 | + } else if (!strcmp (mimetype, "video/x-h263")) { | ||
3299 | + const gchar *h263version = | ||
3300 | + gst_structure_get_string (structure, "h263version"); | ||
3301 | + if (h263version && !strcmp (h263version, "h263p")) | ||
3302 | + id = CODEC_ID_H263P; | ||
3303 | + else | ||
3304 | + id = CODEC_ID_H263; | ||
3305 | + video = TRUE; | ||
3306 | + } else if (!strcmp (mimetype, "video/x-intel-h263")) { | ||
3307 | + id = CODEC_ID_H263I; | ||
3308 | + video = TRUE; | ||
3309 | + } else if (!strcmp (mimetype, "video/x-h261")) { | ||
3310 | + id = CODEC_ID_H261; | ||
3311 | + video = TRUE; | ||
3312 | + } else if (!strcmp (mimetype, "video/mpeg")) { | ||
3313 | + gboolean sys_strm; | ||
3314 | + gint mpegversion; | ||
3315 | + | ||
3316 | + if (gst_structure_get_boolean (structure, "systemstream", &sys_strm) && | ||
3317 | + gst_structure_get_int (structure, "mpegversion", &mpegversion) && | ||
3318 | + !sys_strm) { | ||
3319 | + switch (mpegversion) { | ||
3320 | + case 1: | ||
3321 | + id = CODEC_ID_MPEG1VIDEO; | ||
3322 | + break; | ||
3323 | + case 2: | ||
3324 | + id = CODEC_ID_MPEG2VIDEO; | ||
3325 | + break; | ||
3326 | + case 4: | ||
3327 | + id = CODEC_ID_MPEG4; | ||
3328 | + break; | ||
3329 | + } | ||
3330 | + } | ||
3331 | + if (id != CODEC_ID_NONE) | ||
3332 | + video = TRUE; | ||
3333 | + } else if (!strcmp (mimetype, "image/jpeg")) { | ||
3334 | + id = CODEC_ID_MJPEG; /* A... B... */ | ||
3335 | + video = TRUE; | ||
3336 | + } else if (!strcmp (mimetype, "video/x-jpeg-b")) { | ||
3337 | + id = CODEC_ID_MJPEGB; | ||
3338 | + video = TRUE; | ||
3339 | + } else if (!strcmp (mimetype, "video/x-wmv")) { | ||
3340 | + gint wmvversion = 0; | ||
3341 | + | ||
3342 | + if (gst_structure_get_int (structure, "wmvversion", &wmvversion)) { | ||
3343 | + switch (wmvversion) { | ||
3344 | + case 1: | ||
3345 | + id = CODEC_ID_WMV1; | ||
3346 | + break; | ||
3347 | + case 2: | ||
3348 | + id = CODEC_ID_WMV2; | ||
3349 | + break; | ||
3350 | + case 3: | ||
3351 | + { | ||
3352 | + guint32 fourcc; | ||
3353 | + | ||
3354 | + /* WMV3 unless the fourcc exists and says otherwise */ | ||
3355 | + id = CODEC_ID_WMV3; | ||
3356 | + | ||
3357 | + if (gst_structure_get_fourcc (structure, "format", &fourcc)) { | ||
3358 | + if ((fourcc == GST_MAKE_FOURCC ('W', 'V', 'C', '1')) || | ||
3359 | + (fourcc == GST_MAKE_FOURCC ('W', 'M', 'V', 'A'))) { | ||
3360 | + id = CODEC_ID_VC1; | ||
3361 | + } | ||
3362 | + } | ||
3363 | + } | ||
3364 | + break; | ||
3365 | + } | ||
3366 | + } | ||
3367 | + if (id != CODEC_ID_NONE) | ||
3368 | + video = TRUE; | ||
3369 | + } else if (!strcmp (mimetype, "audio/x-vorbis")) { | ||
3370 | + id = CODEC_ID_VORBIS; | ||
3371 | + audio = TRUE; | ||
3372 | + } else if (!strcmp (mimetype, "audio/x-qdm2")) { | ||
3373 | + id = CODEC_ID_QDM2; | ||
3374 | + audio = TRUE; | ||
3375 | + } else if (!strcmp (mimetype, "audio/mpeg")) { | ||
3376 | + gint layer = 0; | ||
3377 | + gint mpegversion = 0; | ||
3378 | + | ||
3379 | + if (gst_structure_get_int (structure, "mpegversion", &mpegversion)) { | ||
3380 | + switch (mpegversion) { | ||
3381 | + case 2: /* ffmpeg uses faad for both... */ | ||
3382 | + case 4: | ||
3383 | + id = CODEC_ID_AAC; | ||
3384 | + break; | ||
3385 | + case 1: | ||
3386 | + if (gst_structure_get_int (structure, "layer", &layer)) { | ||
3387 | + switch (layer) { | ||
3388 | + case 1: | ||
3389 | + id = CODEC_ID_MP1; | ||
3390 | + break; | ||
3391 | + case 2: | ||
3392 | + id = CODEC_ID_MP2; | ||
3393 | + break; | ||
3394 | + case 3: | ||
3395 | + id = CODEC_ID_MP3; | ||
3396 | + break; | ||
3397 | + } | ||
3398 | + } | ||
3399 | + } | ||
3400 | + } | ||
3401 | + if (id != CODEC_ID_NONE) | ||
3402 | + audio = TRUE; | ||
3403 | + } else if (!strcmp (mimetype, "audio/x-musepack")) { | ||
3404 | + gint streamversion = -1; | ||
3405 | + | ||
3406 | + if (gst_structure_get_int (structure, "streamversion", &streamversion)) { | ||
3407 | + if (streamversion == 7) | ||
3408 | + id = CODEC_ID_MUSEPACK7; | ||
3409 | + } else { | ||
3410 | + id = CODEC_ID_MUSEPACK7; | ||
3411 | + } | ||
3412 | + } else if (!strcmp (mimetype, "audio/x-wma")) { | ||
3413 | + gint wmaversion = 0; | ||
3414 | + | ||
3415 | + if (gst_structure_get_int (structure, "wmaversion", &wmaversion)) { | ||
3416 | + switch (wmaversion) { | ||
3417 | + case 1: | ||
3418 | + id = CODEC_ID_WMAV1; | ||
3419 | + break; | ||
3420 | + case 2: | ||
3421 | + id = CODEC_ID_WMAV2; | ||
3422 | + break; | ||
3423 | + case 3: | ||
3424 | + id = CODEC_ID_WMAPRO; | ||
3425 | + break; | ||
3426 | + } | ||
3427 | + } | ||
3428 | + if (id != CODEC_ID_NONE) | ||
3429 | + audio = TRUE; | ||
3430 | + } else if (!strcmp (mimetype, "audio/x-wms")) { | ||
3431 | + id = CODEC_ID_WMAVOICE; | ||
3432 | + audio = TRUE; | ||
3433 | + } else if (!strcmp (mimetype, "audio/x-ac3")) { | ||
3434 | + id = CODEC_ID_AC3; | ||
3435 | + audio = TRUE; | ||
3436 | + } else if (!strcmp (mimetype, "audio/x-eac3")) { | ||
3437 | + id = CODEC_ID_EAC3; | ||
3438 | + audio = TRUE; | ||
3439 | + } else if (!strcmp (mimetype, "audio/x-vnd.sony.atrac3") || | ||
3440 | + !strcmp (mimetype, "audio/atrac3")) { | ||
3441 | + id = CODEC_ID_ATRAC3; | ||
3442 | + audio = TRUE; | ||
3443 | + } else if (!strcmp (mimetype, "audio/x-dts")) { | ||
3444 | + id = CODEC_ID_DTS; | ||
3445 | + audio = TRUE; | ||
3446 | + } else if (!strcmp (mimetype, "application/x-ape")) { | ||
3447 | + id = CODEC_ID_APE; | ||
3448 | + audio = TRUE; | ||
3449 | + } else if (!strcmp (mimetype, "video/x-msmpeg")) { | ||
3450 | + gint msmpegversion = 0; | ||
3451 | + | ||
3452 | + if (gst_structure_get_int (structure, "msmpegversion", &msmpegversion)) { | ||
3453 | + switch (msmpegversion) { | ||
3454 | + case 41: | ||
3455 | + id = CODEC_ID_MSMPEG4V1; | ||
3456 | + break; | ||
3457 | + case 42: | ||
3458 | + id = CODEC_ID_MSMPEG4V2; | ||
3459 | + break; | ||
3460 | + case 43: | ||
3461 | + id = CODEC_ID_MSMPEG4V3; | ||
3462 | + break; | ||
3463 | + } | ||
3464 | + } | ||
3465 | + if (id != CODEC_ID_NONE) | ||
3466 | + video = TRUE; | ||
3467 | + } else if (!strcmp (mimetype, "video/x-svq")) { | ||
3468 | + gint svqversion = 0; | ||
3469 | + | ||
3470 | + if (gst_structure_get_int (structure, "svqversion", &svqversion)) { | ||
3471 | + switch (svqversion) { | ||
3472 | + case 1: | ||
3473 | + id = CODEC_ID_SVQ1; | ||
3474 | + break; | ||
3475 | + case 3: | ||
3476 | + id = CODEC_ID_SVQ3; | ||
3477 | + break; | ||
3478 | + } | ||
3479 | + } | ||
3480 | + if (id != CODEC_ID_NONE) | ||
3481 | + video = TRUE; | ||
3482 | + } else if (!strcmp (mimetype, "video/x-huffyuv")) { | ||
3483 | + id = CODEC_ID_HUFFYUV; | ||
3484 | + video = TRUE; | ||
3485 | + } else if (!strcmp (mimetype, "audio/x-mace")) { | ||
3486 | + gint maceversion = 0; | ||
3487 | + | ||
3488 | + if (gst_structure_get_int (structure, "maceversion", &maceversion)) { | ||
3489 | + switch (maceversion) { | ||
3490 | + case 3: | ||
3491 | + id = CODEC_ID_MACE3; | ||
3492 | + break; | ||
3493 | + case 6: | ||
3494 | + id = CODEC_ID_MACE6; | ||
3495 | + break; | ||
3496 | + } | ||
3497 | + } | ||
3498 | + if (id != CODEC_ID_NONE) | ||
3499 | + audio = TRUE; | ||
3500 | + } else if (!strcmp (mimetype, "video/x-theora")) { | ||
3501 | + id = CODEC_ID_THEORA; | ||
3502 | + video = TRUE; | ||
3503 | + } else if (!strcmp (mimetype, "video/x-vp3")) { | ||
3504 | + id = CODEC_ID_VP3; | ||
3505 | + video = TRUE; | ||
3506 | + } else if (!strcmp (mimetype, "video/x-vp5")) { | ||
3507 | + id = CODEC_ID_VP5; | ||
3508 | + video = TRUE; | ||
3509 | + } else if (!strcmp (mimetype, "video/x-vp6")) { | ||
3510 | + id = CODEC_ID_VP6; | ||
3511 | + video = TRUE; | ||
3512 | + } else if (!strcmp (mimetype, "video/x-vp6-flash")) { | ||
3513 | + id = CODEC_ID_VP6F; | ||
3514 | + video = TRUE; | ||
3515 | + } else if (!strcmp (mimetype, "video/x-vp6-alpha")) { | ||
3516 | + id = CODEC_ID_VP6A; | ||
3517 | + video = TRUE; | ||
3518 | + } else if (!strcmp (mimetype, "video/x-vp8")) { | ||
3519 | + id = CODEC_ID_VP8; | ||
3520 | + video = TRUE; | ||
3521 | + } else if (!strcmp (mimetype, "video/x-flash-screen")) { | ||
3522 | + id = CODEC_ID_FLASHSV; | ||
3523 | + video = TRUE; | ||
3524 | + } else if (!strcmp (mimetype, "video/x-indeo")) { | ||
3525 | + gint indeoversion = 0; | ||
3526 | + | ||
3527 | + if (gst_structure_get_int (structure, "indeoversion", &indeoversion)) { | ||
3528 | + switch (indeoversion) { | ||
3529 | + case 5: | ||
3530 | + id = CODEC_ID_INDEO5; | ||
3531 | + break; | ||
3532 | + case 3: | ||
3533 | + id = CODEC_ID_INDEO3; | ||
3534 | + break; | ||
3535 | + case 2: | ||
3536 | + id = CODEC_ID_INDEO2; | ||
3537 | + break; | ||
3538 | + } | ||
3539 | + if (id != CODEC_ID_NONE) | ||
3540 | + video = TRUE; | ||
3541 | + } | ||
3542 | + } else if (!strcmp (mimetype, "video/x-divx")) { | ||
3543 | + gint divxversion = 0; | ||
3544 | + | ||
3545 | + if (gst_structure_get_int (structure, "divxversion", &divxversion)) { | ||
3546 | + switch (divxversion) { | ||
3547 | + case 3: | ||
3548 | + id = CODEC_ID_MSMPEG4V3; | ||
3549 | + break; | ||
3550 | + case 4: | ||
3551 | + case 5: | ||
3552 | + id = CODEC_ID_MPEG4; | ||
3553 | + break; | ||
3554 | + } | ||
3555 | + } | ||
3556 | + if (id != CODEC_ID_NONE) | ||
3557 | + video = TRUE; | ||
3558 | + } else if (!strcmp (mimetype, "video/x-3ivx")) { | ||
3559 | + id = CODEC_ID_MPEG4; | ||
3560 | + video = TRUE; | ||
3561 | + } else if (!strcmp (mimetype, "video/x-xvid")) { | ||
3562 | + id = CODEC_ID_MPEG4; | ||
3563 | + video = TRUE; | ||
3564 | + } else if (!strcmp (mimetype, "video/x-ffv")) { | ||
3565 | + gint ffvversion = 0; | ||
3566 | + | ||
3567 | + if (gst_structure_get_int (structure, "ffvversion", &ffvversion) && | ||
3568 | + ffvversion == 1) { | ||
3569 | + id = CODEC_ID_FFV1; | ||
3570 | + video = TRUE; | ||
3571 | + } | ||
3572 | + } else if (!strcmp (mimetype, "audio/x-adpcm")) { | ||
3573 | + const gchar *layout; | ||
3574 | + | ||
3575 | + layout = gst_structure_get_string (structure, "layout"); | ||
3576 | + if (layout == NULL) { | ||
3577 | + /* break */ | ||
3578 | + } else if (!strcmp (layout, "quicktime")) { | ||
3579 | + id = CODEC_ID_ADPCM_IMA_QT; | ||
3580 | + } else if (!strcmp (layout, "microsoft")) { | ||
3581 | + id = CODEC_ID_ADPCM_MS; | ||
3582 | + } else if (!strcmp (layout, "dvi")) { | ||
3583 | + id = CODEC_ID_ADPCM_IMA_WAV; | ||
3584 | + } else if (!strcmp (layout, "4xm")) { | ||
3585 | + id = CODEC_ID_ADPCM_4XM; | ||
3586 | + } else if (!strcmp (layout, "smjpeg")) { | ||
3587 | + id = CODEC_ID_ADPCM_IMA_SMJPEG; | ||
3588 | + } else if (!strcmp (layout, "dk3")) { | ||
3589 | + id = CODEC_ID_ADPCM_IMA_DK3; | ||
3590 | + } else if (!strcmp (layout, "dk4")) { | ||
3591 | + id = CODEC_ID_ADPCM_IMA_DK4; | ||
3592 | + } else if (!strcmp (layout, "westwood")) { | ||
3593 | + id = CODEC_ID_ADPCM_IMA_WS; | ||
3594 | + } else if (!strcmp (layout, "iss")) { | ||
3595 | + id = CODEC_ID_ADPCM_IMA_ISS; | ||
3596 | + } else if (!strcmp (layout, "xa")) { | ||
3597 | + id = CODEC_ID_ADPCM_XA; | ||
3598 | + } else if (!strcmp (layout, "adx")) { | ||
3599 | + id = CODEC_ID_ADPCM_ADX; | ||
3600 | + } else if (!strcmp (layout, "ea")) { | ||
3601 | + id = CODEC_ID_ADPCM_EA; | ||
3602 | + } else if (!strcmp (layout, "g726")) { | ||
3603 | + id = CODEC_ID_ADPCM_G726; | ||
3604 | + } else if (!strcmp (layout, "g721")) { | ||
3605 | + id = CODEC_ID_ADPCM_G726; | ||
3606 | + } else if (!strcmp (layout, "ct")) { | ||
3607 | + id = CODEC_ID_ADPCM_CT; | ||
3608 | + } else if (!strcmp (layout, "swf")) { | ||
3609 | + id = CODEC_ID_ADPCM_SWF; | ||
3610 | + } else if (!strcmp (layout, "yamaha")) { | ||
3611 | + id = CODEC_ID_ADPCM_YAMAHA; | ||
3612 | + } else if (!strcmp (layout, "sbpro2")) { | ||
3613 | + id = CODEC_ID_ADPCM_SBPRO_2; | ||
3614 | + } else if (!strcmp (layout, "sbpro3")) { | ||
3615 | + id = CODEC_ID_ADPCM_SBPRO_3; | ||
3616 | + } else if (!strcmp (layout, "sbpro4")) { | ||
3617 | + id = CODEC_ID_ADPCM_SBPRO_4; | ||
3618 | + } | ||
3619 | + if (id != CODEC_ID_NONE) | ||
3620 | + audio = TRUE; | ||
3621 | + } else if (!strcmp (mimetype, "video/x-4xm")) { | ||
3622 | + id = CODEC_ID_4XM; | ||
3623 | + video = TRUE; | ||
3624 | + } else if (!strcmp (mimetype, "audio/x-dpcm")) { | ||
3625 | + const gchar *layout; | ||
3626 | + | ||
3627 | + layout = gst_structure_get_string (structure, "layout"); | ||
3628 | + if (!layout) { | ||
3629 | + /* .. */ | ||
3630 | + } else if (!strcmp (layout, "roq")) { | ||
3631 | + id = CODEC_ID_ROQ_DPCM; | ||
3632 | + } else if (!strcmp (layout, "interplay")) { | ||
3633 | + id = CODEC_ID_INTERPLAY_DPCM; | ||
3634 | + } else if (!strcmp (layout, "xan")) { | ||
3635 | + id = CODEC_ID_XAN_DPCM; | ||
3636 | + } else if (!strcmp (layout, "sol")) { | ||
3637 | + id = CODEC_ID_SOL_DPCM; | ||
3638 | + } | ||
3639 | + if (id != CODEC_ID_NONE) | ||
3640 | + audio = TRUE; | ||
3641 | + } else if (!strcmp (mimetype, "audio/x-flac")) { | ||
3642 | + id = CODEC_ID_FLAC; | ||
3643 | + audio = TRUE; | ||
3644 | + } else if (!strcmp (mimetype, "audio/x-shorten")) { | ||
3645 | + id = CODEC_ID_SHORTEN; | ||
3646 | + audio = TRUE; | ||
3647 | + } else if (!strcmp (mimetype, "audio/x-alac")) { | ||
3648 | + id = CODEC_ID_ALAC; | ||
3649 | + audio = TRUE; | ||
3650 | + } else if (!strcmp (mimetype, "video/x-cinepak")) { | ||
3651 | + id = CODEC_ID_CINEPAK; | ||
3652 | + video = TRUE; | ||
3653 | + } else if (!strcmp (mimetype, "video/x-pn-realvideo")) { | ||
3654 | + gint rmversion; | ||
3655 | + | ||
3656 | + if (gst_structure_get_int (structure, "rmversion", &rmversion)) { | ||
3657 | + switch (rmversion) { | ||
3658 | + case 1: | ||
3659 | + id = CODEC_ID_RV10; | ||
3660 | + break; | ||
3661 | + case 2: | ||
3662 | + id = CODEC_ID_RV20; | ||
3663 | + break; | ||
3664 | + case 3: | ||
3665 | + id = CODEC_ID_RV30; | ||
3666 | + break; | ||
3667 | + case 4: | ||
3668 | + id = CODEC_ID_RV40; | ||
3669 | + break; | ||
3670 | + } | ||
3671 | + } | ||
3672 | + if (id != CODEC_ID_NONE) | ||
3673 | + video = TRUE; | ||
3674 | + } else if (!strcmp (mimetype, "audio/x-sipro")) { | ||
3675 | + id = CODEC_ID_SIPR; | ||
3676 | + audio = TRUE; | ||
3677 | + } else if (!strcmp (mimetype, "audio/x-pn-realaudio")) { | ||
3678 | + gint raversion; | ||
3679 | + | ||
3680 | + if (gst_structure_get_int (structure, "raversion", &raversion)) { | ||
3681 | + switch (raversion) { | ||
3682 | + case 1: | ||
3683 | + id = CODEC_ID_RA_144; | ||
3684 | + break; | ||
3685 | + case 2: | ||
3686 | + id = CODEC_ID_RA_288; | ||
3687 | + break; | ||
3688 | + case 8: | ||
3689 | + id = CODEC_ID_COOK; | ||
3690 | + break; | ||
3691 | + } | ||
3692 | + } | ||
3693 | + if (id != CODEC_ID_NONE) | ||
3694 | + audio = TRUE; | ||
3695 | + } else if (!strcmp (mimetype, "video/x-rle")) { | ||
3696 | + const gchar *layout; | ||
3697 | + | ||
3698 | + if ((layout = gst_structure_get_string (structure, "layout"))) { | ||
3699 | + if (!strcmp (layout, "microsoft")) { | ||
3700 | + id = CODEC_ID_MSRLE; | ||
3701 | + video = TRUE; | ||
3702 | + } | ||
3703 | + } | ||
3704 | + } else if (!strcmp (mimetype, "video/x-xan")) { | ||
3705 | + gint wcversion = 0; | ||
3706 | + | ||
3707 | + if ((gst_structure_get_int (structure, "wcversion", &wcversion))) { | ||
3708 | + switch (wcversion) { | ||
3709 | + case 3: | ||
3710 | + id = CODEC_ID_XAN_WC3; | ||
3711 | + video = TRUE; | ||
3712 | + break; | ||
3713 | + case 4: | ||
3714 | + id = CODEC_ID_XAN_WC4; | ||
3715 | + video = TRUE; | ||
3716 | + break; | ||
3717 | + default: | ||
3718 | + break; | ||
3719 | + } | ||
3720 | + } | ||
3721 | + } else if (!strcmp (mimetype, "audio/AMR")) { | ||
3722 | + audio = TRUE; | ||
3723 | + id = CODEC_ID_AMR_NB; | ||
3724 | + } else if (!strcmp (mimetype, "audio/AMR-WB")) { | ||
3725 | + id = CODEC_ID_AMR_WB; | ||
3726 | + audio = TRUE; | ||
3727 | + } else if (!strcmp (mimetype, "audio/qcelp")) { | ||
3728 | + id = CODEC_ID_QCELP; | ||
3729 | + audio = TRUE; | ||
3730 | + } else if (!strcmp (mimetype, "video/x-h264")) { | ||
3731 | + id = CODEC_ID_H264; | ||
3732 | + video = TRUE; | ||
3733 | + } else if (!strcmp (mimetype, "video/x-flash-video")) { | ||
3734 | + gint flvversion = 0; | ||
3735 | + | ||
3736 | + if ((gst_structure_get_int (structure, "flvversion", &flvversion))) { | ||
3737 | + switch (flvversion) { | ||
3738 | + case 1: | ||
3739 | + id = CODEC_ID_FLV1; | ||
3740 | + video = TRUE; | ||
3741 | + break; | ||
3742 | + default: | ||
3743 | + break; | ||
3744 | + } | ||
3745 | + } | ||
3746 | + | ||
3747 | + } else if (!strcmp (mimetype, "audio/x-nellymoser")) { | ||
3748 | + id = CODEC_ID_NELLYMOSER; | ||
3749 | + audio = TRUE; | ||
3750 | + } else if (!strncmp (mimetype, "audio/x-gst_ff-", 15)) { | ||
3751 | + gchar ext[16]; | ||
3752 | + AVCodec *codec; | ||
3753 | + | ||
3754 | + if (strlen (mimetype) <= 30 && | ||
3755 | + sscanf (mimetype, "audio/x-gst_ff-%s", ext) == 1) { | ||
3756 | + if ((codec = avcodec_find_decoder_by_name (ext)) || | ||
3757 | + (codec = avcodec_find_encoder_by_name (ext))) { | ||
3758 | + id = codec->id; | ||
3759 | + audio = TRUE; | ||
3760 | + } | ||
3761 | + } | ||
3762 | + } else if (!strncmp (mimetype, "video/x-gst_ff-", 15)) { | ||
3763 | + gchar ext[16]; | ||
3764 | + AVCodec *codec; | ||
3765 | + | ||
3766 | + if (strlen (mimetype) <= 30 && | ||
3767 | + sscanf (mimetype, "video/x-gst_ff-%s", ext) == 1) { | ||
3768 | + if ((codec = avcodec_find_decoder_by_name (ext)) || | ||
3769 | + (codec = avcodec_find_encoder_by_name (ext))) { | ||
3770 | + id = codec->id; | ||
3771 | + video = TRUE; | ||
3772 | + } | ||
3773 | + } | ||
3774 | + } | ||
3775 | + | ||
3776 | + if (context != NULL) { | ||
3777 | + if (video == TRUE) { | ||
3778 | + context->codec_type = AVMEDIA_TYPE_VIDEO; | ||
3779 | + } else if (audio == TRUE) { | ||
3780 | + context->codec_type = AVMEDIA_TYPE_AUDIO; | ||
3781 | + } else { | ||
3782 | + context->codec_type = AVMEDIA_TYPE_UNKNOWN; | ||
3783 | + } | ||
3784 | + context->codec_id = id; | ||
3785 | + gst_ffmpeg_caps_with_codecid (id, context->codec_type, caps, context); | ||
3786 | + } | ||
3787 | + | ||
3788 | + if (id != CODEC_ID_NONE) { | ||
3789 | + GST_DEBUG ("The id=%d belongs to the caps %" GST_PTR_FORMAT, id, caps); | ||
3790 | + } else { | ||
3791 | + GST_WARNING ("Couldn't figure out the id for caps %" GST_PTR_FORMAT, caps); | ||
3792 | + } | ||
3793 | + | ||
3794 | + return id; | ||
3795 | +} | ||
3796 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcodecmap.c.rej gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcodecmap.c.rej | ||
3797 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegcodecmap.c.rej 1970-01-01 01:00:00.000000000 +0100 | ||
3798 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegcodecmap.c.rej 2014-08-08 15:31:06.055868246 +0200 | ||
3799 | @@ -0,0 +1,12 @@ | ||
3800 | +--- ext/ffmpeg/gstffmpegcodecmap.c | ||
3801 | ++++ ext/ffmpeg/gstffmpegcodecmap.c | ||
3802 | +@@ -1884,9 +1842,6 @@ | ||
3803 | + gst_ff_vid_caps_new (context, codec_id, encode, "video/x-raw-rgb", | ||
3804 | + "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, "endianness", | ||
3805 | + G_TYPE_INT, endianness, NULL); | ||
3806 | +- if (caps && context) { | ||
3807 | +- gst_ffmpeg_set_palette (caps, context); | ||
3808 | +- } | ||
3809 | + } | ||
3810 | + } else if (fmt) { | ||
3811 | + caps = gst_ff_vid_caps_new (context, codec_id, encode, "video/x-raw-yuv", | ||
3812 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdec.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdec.c | ||
3813 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdec.c 2014-08-08 14:46:31.462772351 +0200 | ||
3814 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdec.c 2014-08-08 15:32:18.608870847 +0200 | ||
3815 | @@ -88,7 +88,6 @@ | ||
3816 | gint depth; | ||
3817 | } audio; | ||
3818 | } format; | ||
3819 | - gboolean waiting_for_key; | ||
3820 | gboolean discont; | ||
3821 | gboolean clear_ts; | ||
3822 | |||
3823 | @@ -438,7 +437,6 @@ | ||
3824 | ffmpegdec->pcache = NULL; | ||
3825 | ffmpegdec->par = NULL; | ||
3826 | ffmpegdec->opened = FALSE; | ||
3827 | - ffmpegdec->waiting_for_key = TRUE; | ||
3828 | ffmpegdec->skip_frame = ffmpegdec->lowres = 0; | ||
3829 | ffmpegdec->direct_rendering = DEFAULT_DIRECT_RENDERING; | ||
3830 | ffmpegdec->do_padding = DEFAULT_DO_PADDING; | ||
3831 | @@ -608,11 +606,6 @@ | ||
3832 | gst_ffmpeg_avcodec_close (ffmpegdec->context); | ||
3833 | ffmpegdec->opened = FALSE; | ||
3834 | |||
3835 | - if (ffmpegdec->context->palctrl) { | ||
3836 | - av_free (ffmpegdec->context->palctrl); | ||
3837 | - ffmpegdec->context->palctrl = NULL; | ||
3838 | - } | ||
3839 | - | ||
3840 | if (ffmpegdec->context->extradata) { | ||
3841 | av_free (ffmpegdec->context->extradata); | ||
3842 | ffmpegdec->context->extradata = NULL; | ||
3843 | @@ -864,7 +857,7 @@ | ||
3844 | |||
3845 | /* workaround encoder bugs */ | ||
3846 | ffmpegdec->context->workaround_bugs |= FF_BUG_AUTODETECT; | ||
3847 | - ffmpegdec->context->error_recognition = 1; | ||
3848 | + ffmpegdec->context->err_recognition = 1; | ||
3849 | |||
3850 | /* for slow cpus */ | ||
3851 | ffmpegdec->context->lowres = ffmpegdec->lowres; | ||
3852 | @@ -944,7 +937,7 @@ | ||
3853 | fsize = gst_ffmpeg_avpicture_get_size (ffmpegdec->context->pix_fmt, | ||
3854 | width, height); | ||
3855 | |||
3856 | - if (!ffmpegdec->context->palctrl && ffmpegdec->can_allocate_aligned) { | ||
3857 | + if (ffmpegdec->can_allocate_aligned) { | ||
3858 | GST_LOG_OBJECT (ffmpegdec, "calling pad_alloc"); | ||
3859 | /* no pallete, we can use the buffer size to alloc */ | ||
3860 | ret = gst_pad_alloc_buffer_and_set_caps (ffmpegdec->srcpad, | ||
3861 | @@ -1083,7 +1076,6 @@ | ||
3862 | /* tell ffmpeg we own this buffer, tranfer the ref we have on the buffer to | ||
3863 | * the opaque data. */ | ||
3864 | picture->type = FF_BUFFER_TYPE_USER; | ||
3865 | - picture->age = 256 * 256 * 256 * 64; | ||
3866 | picture->opaque = buf; | ||
3867 | |||
3868 | #ifdef EXTRA_REF | ||
3869 | @@ -1414,10 +1406,6 @@ | ||
3870 | } else { | ||
3871 | if (diff >= 0) { | ||
3872 | /* we're too slow, try to speed up */ | ||
3873 | - if (ffmpegdec->waiting_for_key) { | ||
3874 | - /* we were waiting for a keyframe, that's ok */ | ||
3875 | - goto skipping; | ||
3876 | - } | ||
3877 | /* switch to skip_frame mode */ | ||
3878 | goto skip_frame; | ||
3879 | } | ||
3880 | @@ -1427,11 +1415,6 @@ | ||
3881 | ffmpegdec->processed++; | ||
3882 | return TRUE; | ||
3883 | |||
3884 | -skipping: | ||
3885 | - { | ||
3886 | - res = FALSE; | ||
3887 | - goto drop_qos; | ||
3888 | - } | ||
3889 | normal_mode: | ||
3890 | { | ||
3891 | if (ffmpegdec->context->skip_frame != AVDISCARD_DEFAULT) { | ||
3892 | @@ -1528,43 +1511,6 @@ | ||
3893 | } | ||
3894 | |||
3895 | |||
3896 | -/* figure out if the current picture is a keyframe, return TRUE if that is | ||
3897 | - * the case. */ | ||
3898 | -static gboolean | ||
3899 | -check_keyframe (GstFFMpegDec * ffmpegdec) | ||
3900 | -{ | ||
3901 | - GstFFMpegDecClass *oclass; | ||
3902 | - gboolean is_itype = FALSE; | ||
3903 | - gboolean is_reference = FALSE; | ||
3904 | - gboolean iskeyframe; | ||
3905 | - | ||
3906 | - /* figure out if we are dealing with a keyframe */ | ||
3907 | - oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
3908 | - | ||
3909 | - /* remember that we have B frames, we need this for the DTS -> PTS conversion | ||
3910 | - * code */ | ||
3911 | - if (!ffmpegdec->has_b_frames && ffmpegdec->picture->pict_type == FF_B_TYPE) { | ||
3912 | - GST_DEBUG_OBJECT (ffmpegdec, "we have B frames"); | ||
3913 | - ffmpegdec->has_b_frames = TRUE; | ||
3914 | - } | ||
3915 | - | ||
3916 | - is_itype = (ffmpegdec->picture->pict_type == FF_I_TYPE); | ||
3917 | - is_reference = (ffmpegdec->picture->reference == 1); | ||
3918 | - | ||
3919 | - iskeyframe = (is_itype || is_reference || ffmpegdec->picture->key_frame) | ||
3920 | - || (oclass->in_plugin->id == CODEC_ID_INDEO3) | ||
3921 | - || (oclass->in_plugin->id == CODEC_ID_MSZH) | ||
3922 | - || (oclass->in_plugin->id == CODEC_ID_ZLIB) | ||
3923 | - || (oclass->in_plugin->id == CODEC_ID_VP3) | ||
3924 | - || (oclass->in_plugin->id == CODEC_ID_HUFFYUV); | ||
3925 | - | ||
3926 | - GST_LOG_OBJECT (ffmpegdec, | ||
3927 | - "current picture: type: %d, is_keyframe:%d, is_itype:%d, is_reference:%d", | ||
3928 | - ffmpegdec->picture->pict_type, iskeyframe, is_itype, is_reference); | ||
3929 | - | ||
3930 | - return iskeyframe; | ||
3931 | -} | ||
3932 | - | ||
3933 | /* get an outbuf buffer with the current picture */ | ||
3934 | static GstFlowReturn | ||
3935 | get_output_buffer (GstFFMpegDec * ffmpegdec, GstBuffer ** outbuf) | ||
3936 | @@ -1694,7 +1640,6 @@ | ||
3937 | { | ||
3938 | gint len = -1; | ||
3939 | gint have_data; | ||
3940 | - gboolean iskeyframe; | ||
3941 | gboolean mode_switch; | ||
3942 | gboolean decode; | ||
3943 | gint skip_frame = AVDISCARD_DEFAULT; | ||
3944 | @@ -1809,7 +1754,6 @@ | ||
3945 | gst_ffmpegdec_negotiate (ffmpegdec, TRUE); | ||
3946 | } | ||
3947 | |||
3948 | - | ||
3949 | /* Whether a frame is interlaced or not is unknown at the time of | ||
3950 | buffer allocation, so caps on the buffer in opaque will have | ||
3951 | the previous frame's interlaced flag set. So if interlacedness | ||
3952 | @@ -1831,10 +1775,6 @@ | ||
3953 | } | ||
3954 | } | ||
3955 | |||
3956 | - /* check if we are dealing with a keyframe here, this will also check if we | ||
3957 | - * are dealing with B frames. */ | ||
3958 | - iskeyframe = check_keyframe (ffmpegdec); | ||
3959 | - | ||
3960 | /* check that the timestamps go upwards */ | ||
3961 | if (ffmpegdec->last_out != -1 && ffmpegdec->last_out > out_pts) { | ||
3962 | /* timestamps go backwards, this means frames were reordered and we must | ||
3963 | @@ -1865,7 +1805,7 @@ | ||
3964 | * timestamps */ | ||
3965 | if (!ffmpegdec->reordered_in && ffmpegdec->reordered_out) { | ||
3966 | /* PTS and DTS are the same for keyframes */ | ||
3967 | - if (!iskeyframe && ffmpegdec->next_out != -1) { | ||
3968 | + if (ffmpegdec->next_out != -1) { | ||
3969 | /* interpolate all timestamps except for keyframes, FIXME, this is | ||
3970 | * wrong when QoS is active. */ | ||
3971 | GST_DEBUG_OBJECT (ffmpegdec, "interpolate timestamps"); | ||
3972 | @@ -1874,16 +1814,6 @@ | ||
3973 | } | ||
3974 | } | ||
3975 | |||
3976 | - /* when we're waiting for a keyframe, see if we have one or drop the current | ||
3977 | - * non-keyframe */ | ||
3978 | - if (G_UNLIKELY (ffmpegdec->waiting_for_key)) { | ||
3979 | - if (G_LIKELY (!iskeyframe)) | ||
3980 | - goto drop_non_keyframe; | ||
3981 | - | ||
3982 | - /* we have a keyframe, we can stop waiting for one */ | ||
3983 | - ffmpegdec->waiting_for_key = FALSE; | ||
3984 | - } | ||
3985 | - | ||
3986 | /* get a handle to the output buffer */ | ||
3987 | *ret = get_output_buffer (ffmpegdec, outbuf); | ||
3988 | if (G_UNLIKELY (*ret != GST_FLOW_OK)) | ||
3989 | @@ -2000,20 +1930,11 @@ | ||
3990 | else | ||
3991 | ffmpegdec->next_out = -1; | ||
3992 | |||
3993 | - /* palette is not part of raw video frame in gst and the size | ||
3994 | - * of the outgoing buffer needs to be adjusted accordingly */ | ||
3995 | - if (ffmpegdec->context->palctrl != NULL) | ||
3996 | - GST_BUFFER_SIZE (*outbuf) -= AVPALETTE_SIZE; | ||
3997 | - | ||
3998 | /* now see if we need to clip the buffer against the segment boundaries. */ | ||
3999 | if (G_UNLIKELY (!clip_video_buffer (ffmpegdec, *outbuf, out_timestamp, | ||
4000 | out_duration))) | ||
4001 | goto clipped; | ||
4002 | |||
4003 | - /* mark as keyframe or delta unit */ | ||
4004 | - if (!iskeyframe) | ||
4005 | - GST_BUFFER_FLAG_SET (*outbuf, GST_BUFFER_FLAG_DELTA_UNIT); | ||
4006 | - | ||
4007 | if (ffmpegdec->picture->top_field_first) | ||
4008 | GST_BUFFER_FLAG_SET (*outbuf, GST_VIDEO_BUFFER_TFF); | ||
4009 | |||
4010 | @@ -2024,11 +1945,6 @@ | ||
4011 | return len; | ||
4012 | |||
4013 | /* special cases */ | ||
4014 | -drop_non_keyframe: | ||
4015 | - { | ||
4016 | - GST_WARNING_OBJECT (ffmpegdec, "Dropping non-keyframe (seek/init)"); | ||
4017 | - goto beach; | ||
4018 | - } | ||
4019 | no_output: | ||
4020 | { | ||
4021 | GST_DEBUG_OBJECT (ffmpegdec, "no output buffer"); | ||
4022 | @@ -2422,7 +2338,6 @@ | ||
4023 | gst_ffmpegdec_reset_ts (ffmpegdec); | ||
4024 | gst_ffmpegdec_reset_qos (ffmpegdec); | ||
4025 | gst_ffmpegdec_flush_pcache (ffmpegdec); | ||
4026 | - ffmpegdec->waiting_for_key = TRUE; | ||
4027 | gst_segment_init (&ffmpegdec->segment, GST_FORMAT_TIME); | ||
4028 | clear_queued (ffmpegdec); | ||
4029 | break; | ||
4030 | @@ -2560,17 +2475,6 @@ | ||
4031 | |||
4032 | oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
4033 | |||
4034 | - /* do early keyframe check pretty bad to rely on the keyframe flag in the | ||
4035 | - * source for this as it might not even be parsed (UDP/file/..). */ | ||
4036 | - if (G_UNLIKELY (ffmpegdec->waiting_for_key)) { | ||
4037 | - GST_DEBUG_OBJECT (ffmpegdec, "waiting for keyframe"); | ||
4038 | - if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT) && | ||
4039 | - oclass->in_plugin->type != AVMEDIA_TYPE_AUDIO) | ||
4040 | - goto skip_keyframe; | ||
4041 | - | ||
4042 | - GST_DEBUG_OBJECT (ffmpegdec, "got keyframe"); | ||
4043 | - ffmpegdec->waiting_for_key = FALSE; | ||
4044 | - } | ||
4045 | /* parse cache joining. If there is cached data */ | ||
4046 | if (ffmpegdec->pcache) { | ||
4047 | /* join with previous data */ | ||
4048 | @@ -2805,12 +2709,6 @@ | ||
4049 | gst_buffer_unref (inbuf); | ||
4050 | return GST_FLOW_NOT_NEGOTIATED; | ||
4051 | } | ||
4052 | -skip_keyframe: | ||
4053 | - { | ||
4054 | - GST_DEBUG_OBJECT (ffmpegdec, "skipping non keyframe"); | ||
4055 | - gst_buffer_unref (inbuf); | ||
4056 | - return GST_FLOW_OK; | ||
4057 | - } | ||
4058 | } | ||
4059 | |||
4060 | static GstStateChangeReturn | ||
4061 | @@ -2936,7 +2834,7 @@ | ||
4062 | gchar *plugin_name; | ||
4063 | |||
4064 | /* only decoders */ | ||
4065 | - if (!in_plugin->decode) { | ||
4066 | + if (!av_codec_is_decoder (in_plugin)) { | ||
4067 | goto next; | ||
4068 | } | ||
4069 | |||
4070 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdec.c.orig gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdec.c.orig | ||
4071 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdec.c.orig 1970-01-01 01:00:00.000000000 +0100 | ||
4072 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdec.c.orig 2014-08-08 15:31:06.057868246 +0200 | ||
4073 | @@ -0,0 +1,2973 @@ | ||
4074 | +/* GStreamer | ||
4075 | + * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> | ||
4076 | + * | ||
4077 | + * This library is free software; you can redistribute it and/or | ||
4078 | + * modify it under the terms of the GNU Library General Public | ||
4079 | + * License as published by the Free Software Foundation; either | ||
4080 | + * version 2 of the License, or (at your option) any later version. | ||
4081 | + * | ||
4082 | + * This library is distributed in the hope that it will be useful, | ||
4083 | + * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
4084 | + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
4085 | + * Library General Public License for more details. | ||
4086 | + * | ||
4087 | + * You should have received a copy of the GNU Library General Public | ||
4088 | + * License along with this library; if not, write to the | ||
4089 | + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
4090 | + * Boston, MA 02111-1307, USA. | ||
4091 | + */ | ||
4092 | + | ||
4093 | +#ifdef HAVE_CONFIG_H | ||
4094 | +#include "config.h" | ||
4095 | +#endif | ||
4096 | + | ||
4097 | +#include <assert.h> | ||
4098 | +#include <string.h> | ||
4099 | + | ||
4100 | +#ifdef HAVE_FFMPEG_UNINSTALLED | ||
4101 | +#include <avcodec.h> | ||
4102 | +#else | ||
4103 | +#include <libavcodec/avcodec.h> | ||
4104 | +#endif | ||
4105 | + | ||
4106 | +#include <gst/gst.h> | ||
4107 | +#include <gst/video/video.h> | ||
4108 | + | ||
4109 | +#include "gstffmpeg.h" | ||
4110 | +#include "gstffmpegcodecmap.h" | ||
4111 | +#include "gstffmpegutils.h" | ||
4112 | + | ||
4113 | +/* define to enable alternative buffer refcounting algorithm */ | ||
4114 | +#undef EXTRA_REF | ||
4115 | + | ||
4116 | +typedef struct _GstFFMpegDec GstFFMpegDec; | ||
4117 | + | ||
4118 | +#define MAX_TS_MASK 0xff | ||
4119 | + | ||
4120 | +/* for each incomming buffer we keep all timing info in a structure like this. | ||
4121 | + * We keep a circular array of these structures around to store the timing info. | ||
4122 | + * The index in the array is what we pass as opaque data (to pictures) and | ||
4123 | + * pts (to parsers) so that ffmpeg can remember them for us. */ | ||
4124 | +typedef struct | ||
4125 | +{ | ||
4126 | + gint idx; | ||
4127 | + GstClockTime timestamp; | ||
4128 | + GstClockTime duration; | ||
4129 | + gint64 offset; | ||
4130 | +} GstTSInfo; | ||
4131 | + | ||
4132 | +struct _GstFFMpegDec | ||
4133 | +{ | ||
4134 | + GstElement element; | ||
4135 | + | ||
4136 | + /* We need to keep track of our pads, so we do so here. */ | ||
4137 | + GstPad *srcpad; | ||
4138 | + GstPad *sinkpad; | ||
4139 | + | ||
4140 | + /* decoding */ | ||
4141 | + AVCodecContext *context; | ||
4142 | + AVFrame *picture; | ||
4143 | + gboolean opened; | ||
4144 | + union | ||
4145 | + { | ||
4146 | + struct | ||
4147 | + { | ||
4148 | + gint width, height; | ||
4149 | + gint clip_width, clip_height; | ||
4150 | + gint par_n, par_d; | ||
4151 | + gint fps_n, fps_d; | ||
4152 | + gint old_fps_n, old_fps_d; | ||
4153 | + gboolean interlaced; | ||
4154 | + | ||
4155 | + enum PixelFormat pix_fmt; | ||
4156 | + } video; | ||
4157 | + struct | ||
4158 | + { | ||
4159 | + gint channels; | ||
4160 | + gint samplerate; | ||
4161 | + gint depth; | ||
4162 | + } audio; | ||
4163 | + } format; | ||
4164 | + gboolean discont; | ||
4165 | + gboolean clear_ts; | ||
4166 | + | ||
4167 | + /* for tracking DTS/PTS */ | ||
4168 | + gboolean has_b_frames; | ||
4169 | + gboolean reordered_in; | ||
4170 | + GstClockTime last_in; | ||
4171 | + GstClockTime last_diff; | ||
4172 | + guint last_frames; | ||
4173 | + gboolean reordered_out; | ||
4174 | + GstClockTime last_out; | ||
4175 | + GstClockTime next_out; | ||
4176 | + | ||
4177 | + /* parsing */ | ||
4178 | + gboolean turnoff_parser; /* used for turning off aac raw parsing | ||
4179 | + * See bug #566250 */ | ||
4180 | + AVCodecParserContext *pctx; | ||
4181 | + GstBuffer *pcache; | ||
4182 | + guint8 *padded; | ||
4183 | + guint padded_size; | ||
4184 | + | ||
4185 | + GValue *par; /* pixel aspect ratio of incoming data */ | ||
4186 | + gboolean current_dr; /* if direct rendering is enabled */ | ||
4187 | + gboolean extra_ref; /* keep extra ref around in get/release */ | ||
4188 | + | ||
4189 | + /* some properties */ | ||
4190 | + enum AVDiscard skip_frame; | ||
4191 | + gint lowres; | ||
4192 | + gboolean direct_rendering; | ||
4193 | + gboolean do_padding; | ||
4194 | + gboolean debug_mv; | ||
4195 | + gboolean crop; | ||
4196 | + int max_threads; | ||
4197 | + | ||
4198 | + /* QoS stuff *//* with LOCK */ | ||
4199 | + gdouble proportion; | ||
4200 | + GstClockTime earliest_time; | ||
4201 | + gint64 processed; | ||
4202 | + gint64 dropped; | ||
4203 | + | ||
4204 | + /* clipping segment */ | ||
4205 | + GstSegment segment; | ||
4206 | + | ||
4207 | + gboolean is_realvideo; | ||
4208 | + | ||
4209 | + GstTSInfo ts_info[MAX_TS_MASK + 1]; | ||
4210 | + gint ts_idx; | ||
4211 | + | ||
4212 | + /* reverse playback queue */ | ||
4213 | + GList *queued; | ||
4214 | + | ||
4215 | + /* Can downstream allocate 16bytes aligned data. */ | ||
4216 | + gboolean can_allocate_aligned; | ||
4217 | +}; | ||
4218 | + | ||
4219 | +typedef struct _GstFFMpegDecClass GstFFMpegDecClass; | ||
4220 | + | ||
4221 | +struct _GstFFMpegDecClass | ||
4222 | +{ | ||
4223 | + GstElementClass parent_class; | ||
4224 | + | ||
4225 | + AVCodec *in_plugin; | ||
4226 | + GstPadTemplate *srctempl, *sinktempl; | ||
4227 | +}; | ||
4228 | + | ||
4229 | +#define GST_TS_INFO_NONE &ts_info_none | ||
4230 | +static const GstTSInfo ts_info_none = { -1, -1, -1, -1 }; | ||
4231 | + | ||
4232 | +static const GstTSInfo * | ||
4233 | +gst_ts_info_store (GstFFMpegDec * dec, GstClockTime timestamp, | ||
4234 | + GstClockTime duration, gint64 offset) | ||
4235 | +{ | ||
4236 | + gint idx = dec->ts_idx; | ||
4237 | + dec->ts_info[idx].idx = idx; | ||
4238 | + dec->ts_info[idx].timestamp = timestamp; | ||
4239 | + dec->ts_info[idx].duration = duration; | ||
4240 | + dec->ts_info[idx].offset = offset; | ||
4241 | + dec->ts_idx = (idx + 1) & MAX_TS_MASK; | ||
4242 | + | ||
4243 | + return &dec->ts_info[idx]; | ||
4244 | +} | ||
4245 | + | ||
4246 | +static const GstTSInfo * | ||
4247 | +gst_ts_info_get (GstFFMpegDec * dec, gint idx) | ||
4248 | +{ | ||
4249 | + if (G_UNLIKELY (idx < 0 || idx > MAX_TS_MASK)) | ||
4250 | + return GST_TS_INFO_NONE; | ||
4251 | + | ||
4252 | + return &dec->ts_info[idx]; | ||
4253 | +} | ||
4254 | + | ||
4255 | +#define GST_TYPE_FFMPEGDEC \ | ||
4256 | + (gst_ffmpegdec_get_type()) | ||
4257 | +#define GST_FFMPEGDEC(obj) \ | ||
4258 | + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FFMPEGDEC,GstFFMpegDec)) | ||
4259 | +#define GST_FFMPEGDEC_CLASS(klass) \ | ||
4260 | + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FFMPEGDEC,GstFFMpegDecClass)) | ||
4261 | +#define GST_IS_FFMPEGDEC(obj) \ | ||
4262 | + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FFMPEGDEC)) | ||
4263 | +#define GST_IS_FFMPEGDEC_CLASS(klass) \ | ||
4264 | + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGDEC)) | ||
4265 | + | ||
4266 | +#define DEFAULT_LOWRES 0 | ||
4267 | +#define DEFAULT_SKIPFRAME 0 | ||
4268 | +#define DEFAULT_DIRECT_RENDERING TRUE | ||
4269 | +#define DEFAULT_DO_PADDING TRUE | ||
4270 | +#define DEFAULT_DEBUG_MV FALSE | ||
4271 | +#define DEFAULT_CROP TRUE | ||
4272 | +#define DEFAULT_MAX_THREADS 1 | ||
4273 | + | ||
4274 | +enum | ||
4275 | +{ | ||
4276 | + PROP_0, | ||
4277 | + PROP_LOWRES, | ||
4278 | + PROP_SKIPFRAME, | ||
4279 | + PROP_DIRECT_RENDERING, | ||
4280 | + PROP_DO_PADDING, | ||
4281 | + PROP_DEBUG_MV, | ||
4282 | + PROP_CROP, | ||
4283 | + PROP_MAX_THREADS, | ||
4284 | + PROP_LAST | ||
4285 | +}; | ||
4286 | + | ||
4287 | +/* A number of function prototypes are given so we can refer to them later. */ | ||
4288 | +static void gst_ffmpegdec_base_init (GstFFMpegDecClass * klass); | ||
4289 | +static void gst_ffmpegdec_class_init (GstFFMpegDecClass * klass); | ||
4290 | +static void gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec); | ||
4291 | +static void gst_ffmpegdec_finalize (GObject * object); | ||
4292 | + | ||
4293 | +static gboolean gst_ffmpegdec_query (GstPad * pad, GstQuery * query); | ||
4294 | +static gboolean gst_ffmpegdec_src_event (GstPad * pad, GstEvent * event); | ||
4295 | + | ||
4296 | +static gboolean gst_ffmpegdec_setcaps (GstPad * pad, GstCaps * caps); | ||
4297 | +static gboolean gst_ffmpegdec_sink_event (GstPad * pad, GstEvent * event); | ||
4298 | +static GstFlowReturn gst_ffmpegdec_chain (GstPad * pad, GstBuffer * buf); | ||
4299 | + | ||
4300 | +static GstStateChangeReturn gst_ffmpegdec_change_state (GstElement * element, | ||
4301 | + GstStateChange transition); | ||
4302 | + | ||
4303 | +static void gst_ffmpegdec_set_property (GObject * object, | ||
4304 | + guint prop_id, const GValue * value, GParamSpec * pspec); | ||
4305 | +static void gst_ffmpegdec_get_property (GObject * object, | ||
4306 | + guint prop_id, GValue * value, GParamSpec * pspec); | ||
4307 | + | ||
4308 | +static gboolean gst_ffmpegdec_negotiate (GstFFMpegDec * ffmpegdec, | ||
4309 | + gboolean force); | ||
4310 | + | ||
4311 | +/* some sort of bufferpool handling, but different */ | ||
4312 | +static int gst_ffmpegdec_get_buffer (AVCodecContext * context, | ||
4313 | + AVFrame * picture); | ||
4314 | +static void gst_ffmpegdec_release_buffer (AVCodecContext * context, | ||
4315 | + AVFrame * picture); | ||
4316 | + | ||
4317 | +static void gst_ffmpegdec_drain (GstFFMpegDec * ffmpegdec); | ||
4318 | + | ||
4319 | +#define GST_FFDEC_PARAMS_QDATA g_quark_from_static_string("ffdec-params") | ||
4320 | + | ||
4321 | +static GstElementClass *parent_class = NULL; | ||
4322 | + | ||
4323 | +#define GST_FFMPEGDEC_TYPE_LOWRES (gst_ffmpegdec_lowres_get_type()) | ||
4324 | +static GType | ||
4325 | +gst_ffmpegdec_lowres_get_type (void) | ||
4326 | +{ | ||
4327 | + static GType ffmpegdec_lowres_type = 0; | ||
4328 | + | ||
4329 | + if (!ffmpegdec_lowres_type) { | ||
4330 | + static const GEnumValue ffmpegdec_lowres[] = { | ||
4331 | + {0, "0", "full"}, | ||
4332 | + {1, "1", "1/2-size"}, | ||
4333 | + {2, "2", "1/4-size"}, | ||
4334 | + {0, NULL, NULL}, | ||
4335 | + }; | ||
4336 | + | ||
4337 | + ffmpegdec_lowres_type = | ||
4338 | + g_enum_register_static ("GstFFMpegDecLowres", ffmpegdec_lowres); | ||
4339 | + } | ||
4340 | + | ||
4341 | + return ffmpegdec_lowres_type; | ||
4342 | +} | ||
4343 | + | ||
4344 | +#define GST_FFMPEGDEC_TYPE_SKIPFRAME (gst_ffmpegdec_skipframe_get_type()) | ||
4345 | +static GType | ||
4346 | +gst_ffmpegdec_skipframe_get_type (void) | ||
4347 | +{ | ||
4348 | + static GType ffmpegdec_skipframe_type = 0; | ||
4349 | + | ||
4350 | + if (!ffmpegdec_skipframe_type) { | ||
4351 | + static const GEnumValue ffmpegdec_skipframe[] = { | ||
4352 | + {0, "0", "Skip nothing"}, | ||
4353 | + {1, "1", "Skip B-frames"}, | ||
4354 | + {2, "2", "Skip IDCT/Dequantization"}, | ||
4355 | + {5, "5", "Skip everything"}, | ||
4356 | + {0, NULL, NULL}, | ||
4357 | + }; | ||
4358 | + | ||
4359 | + ffmpegdec_skipframe_type = | ||
4360 | + g_enum_register_static ("GstFFMpegDecSkipFrame", ffmpegdec_skipframe); | ||
4361 | + } | ||
4362 | + | ||
4363 | + return ffmpegdec_skipframe_type; | ||
4364 | +} | ||
4365 | + | ||
4366 | +static void | ||
4367 | +gst_ffmpegdec_base_init (GstFFMpegDecClass * klass) | ||
4368 | +{ | ||
4369 | + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); | ||
4370 | + GstPadTemplate *sinktempl, *srctempl; | ||
4371 | + GstCaps *sinkcaps, *srccaps; | ||
4372 | + AVCodec *in_plugin; | ||
4373 | + gchar *longname, *classification, *description; | ||
4374 | + | ||
4375 | + in_plugin = | ||
4376 | + (AVCodec *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), | ||
4377 | + GST_FFDEC_PARAMS_QDATA); | ||
4378 | + g_assert (in_plugin != NULL); | ||
4379 | + | ||
4380 | + /* construct the element details struct */ | ||
4381 | + longname = g_strdup_printf ("FFmpeg %s decoder", in_plugin->long_name); | ||
4382 | + classification = g_strdup_printf ("Codec/Decoder/%s", | ||
4383 | + (in_plugin->type == AVMEDIA_TYPE_VIDEO) ? "Video" : "Audio"); | ||
4384 | + description = g_strdup_printf ("FFmpeg %s decoder", in_plugin->name); | ||
4385 | + gst_element_class_set_details_simple (element_class, longname, classification, | ||
4386 | + description, | ||
4387 | + "Wim Taymans <wim.taymans@gmail.com>, " | ||
4388 | + "Ronald Bultje <rbultje@ronald.bitfreak.net>, " | ||
4389 | + "Edward Hervey <bilboed@bilboed.com>"); | ||
4390 | + g_free (longname); | ||
4391 | + g_free (classification); | ||
4392 | + g_free (description); | ||
4393 | + | ||
4394 | + /* get the caps */ | ||
4395 | + sinkcaps = gst_ffmpeg_codecid_to_caps (in_plugin->id, NULL, FALSE); | ||
4396 | + if (!sinkcaps) { | ||
4397 | + GST_DEBUG ("Couldn't get sink caps for decoder '%s'", in_plugin->name); | ||
4398 | + sinkcaps = gst_caps_from_string ("unknown/unknown"); | ||
4399 | + } | ||
4400 | + if (in_plugin->type == AVMEDIA_TYPE_VIDEO) { | ||
4401 | + srccaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv"); | ||
4402 | + } else { | ||
4403 | + srccaps = gst_ffmpeg_codectype_to_audio_caps (NULL, | ||
4404 | + in_plugin->id, FALSE, in_plugin); | ||
4405 | + } | ||
4406 | + if (!srccaps) { | ||
4407 | + GST_DEBUG ("Couldn't get source caps for decoder '%s'", in_plugin->name); | ||
4408 | + srccaps = gst_caps_from_string ("unknown/unknown"); | ||
4409 | + } | ||
4410 | + | ||
4411 | + /* pad templates */ | ||
4412 | + sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK, | ||
4413 | + GST_PAD_ALWAYS, sinkcaps); | ||
4414 | + srctempl = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps); | ||
4415 | + | ||
4416 | + gst_element_class_add_pad_template (element_class, srctempl); | ||
4417 | + gst_element_class_add_pad_template (element_class, sinktempl); | ||
4418 | + | ||
4419 | + klass->in_plugin = in_plugin; | ||
4420 | + klass->srctempl = srctempl; | ||
4421 | + klass->sinktempl = sinktempl; | ||
4422 | +} | ||
4423 | + | ||
4424 | +static void | ||
4425 | +gst_ffmpegdec_class_init (GstFFMpegDecClass * klass) | ||
4426 | +{ | ||
4427 | + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); | ||
4428 | + GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); | ||
4429 | + | ||
4430 | + parent_class = g_type_class_peek_parent (klass); | ||
4431 | + | ||
4432 | + gobject_class->finalize = gst_ffmpegdec_finalize; | ||
4433 | + | ||
4434 | + gobject_class->set_property = gst_ffmpegdec_set_property; | ||
4435 | + gobject_class->get_property = gst_ffmpegdec_get_property; | ||
4436 | + | ||
4437 | + if (klass->in_plugin->type == AVMEDIA_TYPE_VIDEO) { | ||
4438 | + int caps; | ||
4439 | + | ||
4440 | + g_object_class_install_property (gobject_class, PROP_SKIPFRAME, | ||
4441 | + g_param_spec_enum ("skip-frame", "Skip frames", | ||
4442 | + "Which types of frames to skip during decoding", | ||
4443 | + GST_FFMPEGDEC_TYPE_SKIPFRAME, 0, | ||
4444 | + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4445 | + g_object_class_install_property (gobject_class, PROP_LOWRES, | ||
4446 | + g_param_spec_enum ("lowres", "Low resolution", | ||
4447 | + "At which resolution to decode images", GST_FFMPEGDEC_TYPE_LOWRES, | ||
4448 | + 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4449 | + g_object_class_install_property (gobject_class, PROP_DIRECT_RENDERING, | ||
4450 | + g_param_spec_boolean ("direct-rendering", "Direct Rendering", | ||
4451 | + "Enable direct rendering", DEFAULT_DIRECT_RENDERING, | ||
4452 | + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4453 | + g_object_class_install_property (gobject_class, PROP_DO_PADDING, | ||
4454 | + g_param_spec_boolean ("do-padding", "Do Padding", | ||
4455 | + "Add 0 padding before decoding data", DEFAULT_DO_PADDING, | ||
4456 | + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4457 | + g_object_class_install_property (gobject_class, PROP_DEBUG_MV, | ||
4458 | + g_param_spec_boolean ("debug-mv", "Debug motion vectors", | ||
4459 | + "Whether ffmpeg should print motion vectors on top of the image", | ||
4460 | + DEFAULT_DEBUG_MV, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4461 | +#if 0 | ||
4462 | + g_object_class_install_property (gobject_class, PROP_CROP, | ||
4463 | + g_param_spec_boolean ("crop", "Crop", | ||
4464 | + "Crop images to the display region", | ||
4465 | + DEFAULT_CROP, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4466 | +#endif | ||
4467 | + | ||
4468 | + caps = klass->in_plugin->capabilities; | ||
4469 | + if (caps & (CODEC_CAP_FRAME_THREADS | CODEC_CAP_SLICE_THREADS)) { | ||
4470 | + g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MAX_THREADS, | ||
4471 | + g_param_spec_int ("max-threads", "Maximum decode threads", | ||
4472 | + "Maximum number of worker threads to spawn. (0 = auto)", | ||
4473 | + 0, G_MAXINT, DEFAULT_MAX_THREADS, | ||
4474 | + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
4475 | + } | ||
4476 | + } | ||
4477 | + | ||
4478 | + gstelement_class->change_state = gst_ffmpegdec_change_state; | ||
4479 | +} | ||
4480 | + | ||
4481 | +static void | ||
4482 | +gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec) | ||
4483 | +{ | ||
4484 | + GstFFMpegDecClass *oclass; | ||
4485 | + | ||
4486 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
4487 | + | ||
4488 | + /* setup pads */ | ||
4489 | + ffmpegdec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink"); | ||
4490 | + gst_pad_set_setcaps_function (ffmpegdec->sinkpad, | ||
4491 | + GST_DEBUG_FUNCPTR (gst_ffmpegdec_setcaps)); | ||
4492 | + gst_pad_set_event_function (ffmpegdec->sinkpad, | ||
4493 | + GST_DEBUG_FUNCPTR (gst_ffmpegdec_sink_event)); | ||
4494 | + gst_pad_set_chain_function (ffmpegdec->sinkpad, | ||
4495 | + GST_DEBUG_FUNCPTR (gst_ffmpegdec_chain)); | ||
4496 | + gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->sinkpad); | ||
4497 | + | ||
4498 | + ffmpegdec->srcpad = gst_pad_new_from_template (oclass->srctempl, "src"); | ||
4499 | + gst_pad_use_fixed_caps (ffmpegdec->srcpad); | ||
4500 | + gst_pad_set_event_function (ffmpegdec->srcpad, | ||
4501 | + GST_DEBUG_FUNCPTR (gst_ffmpegdec_src_event)); | ||
4502 | + gst_pad_set_query_function (ffmpegdec->srcpad, | ||
4503 | + GST_DEBUG_FUNCPTR (gst_ffmpegdec_query)); | ||
4504 | + gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->srcpad); | ||
4505 | + | ||
4506 | + /* some ffmpeg data */ | ||
4507 | + ffmpegdec->context = avcodec_alloc_context (); | ||
4508 | + ffmpegdec->picture = avcodec_alloc_frame (); | ||
4509 | + ffmpegdec->pctx = NULL; | ||
4510 | + ffmpegdec->pcache = NULL; | ||
4511 | + ffmpegdec->par = NULL; | ||
4512 | + ffmpegdec->opened = FALSE; | ||
4513 | + ffmpegdec->skip_frame = ffmpegdec->lowres = 0; | ||
4514 | + ffmpegdec->direct_rendering = DEFAULT_DIRECT_RENDERING; | ||
4515 | + ffmpegdec->do_padding = DEFAULT_DO_PADDING; | ||
4516 | + ffmpegdec->debug_mv = DEFAULT_DEBUG_MV; | ||
4517 | + ffmpegdec->crop = DEFAULT_CROP; | ||
4518 | + ffmpegdec->max_threads = DEFAULT_MAX_THREADS; | ||
4519 | + | ||
4520 | + ffmpegdec->format.video.par_n = -1; | ||
4521 | + ffmpegdec->format.video.fps_n = -1; | ||
4522 | + ffmpegdec->format.video.old_fps_n = -1; | ||
4523 | + gst_segment_init (&ffmpegdec->segment, GST_FORMAT_TIME); | ||
4524 | + | ||
4525 | + /* We initially assume downstream can allocate 16 bytes aligned buffers */ | ||
4526 | + ffmpegdec->can_allocate_aligned = TRUE; | ||
4527 | +} | ||
4528 | + | ||
4529 | +static void | ||
4530 | +gst_ffmpegdec_finalize (GObject * object) | ||
4531 | +{ | ||
4532 | + GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) object; | ||
4533 | + | ||
4534 | + if (ffmpegdec->context != NULL) { | ||
4535 | + av_free (ffmpegdec->context); | ||
4536 | + ffmpegdec->context = NULL; | ||
4537 | + } | ||
4538 | + | ||
4539 | + if (ffmpegdec->picture != NULL) { | ||
4540 | + av_free (ffmpegdec->picture); | ||
4541 | + ffmpegdec->picture = NULL; | ||
4542 | + } | ||
4543 | + | ||
4544 | + G_OBJECT_CLASS (parent_class)->finalize (object); | ||
4545 | +} | ||
4546 | + | ||
4547 | +static gboolean | ||
4548 | +gst_ffmpegdec_query (GstPad * pad, GstQuery * query) | ||
4549 | +{ | ||
4550 | + GstFFMpegDec *ffmpegdec; | ||
4551 | + GstPad *peer; | ||
4552 | + gboolean res; | ||
4553 | + | ||
4554 | + ffmpegdec = (GstFFMpegDec *) gst_pad_get_parent (pad); | ||
4555 | + | ||
4556 | + res = FALSE; | ||
4557 | + | ||
4558 | + if ((peer = gst_pad_get_peer (ffmpegdec->sinkpad))) { | ||
4559 | + /* just forward to peer */ | ||
4560 | + res = gst_pad_query (peer, query); | ||
4561 | + gst_object_unref (peer); | ||
4562 | + } | ||
4563 | +#if 0 | ||
4564 | + { | ||
4565 | + GstFormat bfmt; | ||
4566 | + | ||
4567 | + bfmt = GST_FORMAT_BYTES; | ||
4568 | + | ||
4569 | + /* ok, do bitrate calc... */ | ||
4570 | + if ((type != GST_QUERY_POSITION && type != GST_QUERY_TOTAL) || | ||
4571 | + *fmt != GST_FORMAT_TIME || ffmpegdec->context->bit_rate == 0 || | ||
4572 | + !gst_pad_query (peer, type, &bfmt, value)) | ||
4573 | + return FALSE; | ||
4574 | + | ||
4575 | + if (ffmpegdec->pcache && type == GST_QUERY_POSITION) | ||
4576 | + *value -= GST_BUFFER_SIZE (ffmpegdec->pcache); | ||
4577 | + *value *= GST_SECOND / ffmpegdec->context->bit_rate; | ||
4578 | + } | ||
4579 | +#endif | ||
4580 | + | ||
4581 | + gst_object_unref (ffmpegdec); | ||
4582 | + | ||
4583 | + return res; | ||
4584 | +} | ||
4585 | + | ||
4586 | +static void | ||
4587 | +gst_ffmpegdec_reset_ts (GstFFMpegDec * ffmpegdec) | ||
4588 | +{ | ||
4589 | + ffmpegdec->last_in = GST_CLOCK_TIME_NONE; | ||
4590 | + ffmpegdec->last_diff = GST_CLOCK_TIME_NONE; | ||
4591 | + ffmpegdec->last_frames = 0; | ||
4592 | + ffmpegdec->last_out = GST_CLOCK_TIME_NONE; | ||
4593 | + ffmpegdec->next_out = GST_CLOCK_TIME_NONE; | ||
4594 | + ffmpegdec->reordered_in = FALSE; | ||
4595 | + ffmpegdec->reordered_out = FALSE; | ||
4596 | +} | ||
4597 | + | ||
4598 | +static void | ||
4599 | +gst_ffmpegdec_update_qos (GstFFMpegDec * ffmpegdec, gdouble proportion, | ||
4600 | + GstClockTime timestamp) | ||
4601 | +{ | ||
4602 | + GST_LOG_OBJECT (ffmpegdec, "update QOS: %f, %" GST_TIME_FORMAT, | ||
4603 | + proportion, GST_TIME_ARGS (timestamp)); | ||
4604 | + | ||
4605 | + GST_OBJECT_LOCK (ffmpegdec); | ||
4606 | + ffmpegdec->proportion = proportion; | ||
4607 | + ffmpegdec->earliest_time = timestamp; | ||
4608 | + GST_OBJECT_UNLOCK (ffmpegdec); | ||
4609 | +} | ||
4610 | + | ||
4611 | +static void | ||
4612 | +gst_ffmpegdec_reset_qos (GstFFMpegDec * ffmpegdec) | ||
4613 | +{ | ||
4614 | + gst_ffmpegdec_update_qos (ffmpegdec, 0.5, GST_CLOCK_TIME_NONE); | ||
4615 | + ffmpegdec->processed = 0; | ||
4616 | + ffmpegdec->dropped = 0; | ||
4617 | +} | ||
4618 | + | ||
4619 | +static void | ||
4620 | +gst_ffmpegdec_read_qos (GstFFMpegDec * ffmpegdec, gdouble * proportion, | ||
4621 | + GstClockTime * timestamp) | ||
4622 | +{ | ||
4623 | + GST_OBJECT_LOCK (ffmpegdec); | ||
4624 | + *proportion = ffmpegdec->proportion; | ||
4625 | + *timestamp = ffmpegdec->earliest_time; | ||
4626 | + GST_OBJECT_UNLOCK (ffmpegdec); | ||
4627 | +} | ||
4628 | + | ||
4629 | +static gboolean | ||
4630 | +gst_ffmpegdec_src_event (GstPad * pad, GstEvent * event) | ||
4631 | +{ | ||
4632 | + GstFFMpegDec *ffmpegdec; | ||
4633 | + gboolean res; | ||
4634 | + | ||
4635 | + ffmpegdec = (GstFFMpegDec *) gst_pad_get_parent (pad); | ||
4636 | + | ||
4637 | + switch (GST_EVENT_TYPE (event)) { | ||
4638 | + case GST_EVENT_QOS: | ||
4639 | + { | ||
4640 | + gdouble proportion; | ||
4641 | + GstClockTimeDiff diff; | ||
4642 | + GstClockTime timestamp; | ||
4643 | + | ||
4644 | + gst_event_parse_qos (event, &proportion, &diff, ×tamp); | ||
4645 | + | ||
4646 | + /* update our QoS values */ | ||
4647 | + gst_ffmpegdec_update_qos (ffmpegdec, proportion, timestamp + diff); | ||
4648 | + | ||
4649 | + /* forward upstream */ | ||
4650 | + res = gst_pad_push_event (ffmpegdec->sinkpad, event); | ||
4651 | + break; | ||
4652 | + } | ||
4653 | + default: | ||
4654 | + /* forward upstream */ | ||
4655 | + res = gst_pad_push_event (ffmpegdec->sinkpad, event); | ||
4656 | + break; | ||
4657 | + } | ||
4658 | + | ||
4659 | + gst_object_unref (ffmpegdec); | ||
4660 | + | ||
4661 | + return res; | ||
4662 | +} | ||
4663 | + | ||
4664 | +/* with LOCK */ | ||
4665 | +static void | ||
4666 | +gst_ffmpegdec_close (GstFFMpegDec * ffmpegdec) | ||
4667 | +{ | ||
4668 | + if (!ffmpegdec->opened) | ||
4669 | + return; | ||
4670 | + | ||
4671 | + GST_LOG_OBJECT (ffmpegdec, "closing ffmpeg codec"); | ||
4672 | + | ||
4673 | + if (ffmpegdec->par) { | ||
4674 | + g_free (ffmpegdec->par); | ||
4675 | + ffmpegdec->par = NULL; | ||
4676 | + } | ||
4677 | + | ||
4678 | + if (ffmpegdec->context->priv_data) | ||
4679 | + gst_ffmpeg_avcodec_close (ffmpegdec->context); | ||
4680 | + ffmpegdec->opened = FALSE; | ||
4681 | + | ||
4682 | + if (ffmpegdec->context->extradata) { | ||
4683 | + av_free (ffmpegdec->context->extradata); | ||
4684 | + ffmpegdec->context->extradata = NULL; | ||
4685 | + } | ||
4686 | + | ||
4687 | + if (ffmpegdec->pctx) { | ||
4688 | + if (ffmpegdec->pcache) { | ||
4689 | + gst_buffer_unref (ffmpegdec->pcache); | ||
4690 | + ffmpegdec->pcache = NULL; | ||
4691 | + } | ||
4692 | + av_parser_close (ffmpegdec->pctx); | ||
4693 | + ffmpegdec->pctx = NULL; | ||
4694 | + } | ||
4695 | + | ||
4696 | + ffmpegdec->format.video.par_n = -1; | ||
4697 | + ffmpegdec->format.video.fps_n = -1; | ||
4698 | + ffmpegdec->format.video.old_fps_n = -1; | ||
4699 | + ffmpegdec->format.video.interlaced = FALSE; | ||
4700 | +} | ||
4701 | + | ||
4702 | +/* with LOCK */ | ||
4703 | +static gboolean | ||
4704 | +gst_ffmpegdec_open (GstFFMpegDec * ffmpegdec) | ||
4705 | +{ | ||
4706 | + GstFFMpegDecClass *oclass; | ||
4707 | + | ||
4708 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
4709 | + | ||
4710 | + if (gst_ffmpeg_avcodec_open (ffmpegdec->context, oclass->in_plugin) < 0) | ||
4711 | + goto could_not_open; | ||
4712 | + | ||
4713 | + ffmpegdec->opened = TRUE; | ||
4714 | + ffmpegdec->is_realvideo = FALSE; | ||
4715 | + | ||
4716 | + GST_LOG_OBJECT (ffmpegdec, "Opened ffmpeg codec %s, id %d", | ||
4717 | + oclass->in_plugin->name, oclass->in_plugin->id); | ||
4718 | + | ||
4719 | + /* open a parser if we can */ | ||
4720 | + switch (oclass->in_plugin->id) { | ||
4721 | + case CODEC_ID_MPEG4: | ||
4722 | + case CODEC_ID_MJPEG: | ||
4723 | + case CODEC_ID_VC1: | ||
4724 | + GST_LOG_OBJECT (ffmpegdec, "not using parser, blacklisted codec"); | ||
4725 | + ffmpegdec->pctx = NULL; | ||
4726 | + break; | ||
4727 | + case CODEC_ID_H264: | ||
4728 | + /* For H264, only use a parser if there is no context data, if there is, | ||
4729 | + * we're talking AVC */ | ||
4730 | + if (ffmpegdec->context->extradata_size == 0) { | ||
4731 | + GST_LOG_OBJECT (ffmpegdec, "H264 with no extradata, creating parser"); | ||
4732 | + ffmpegdec->pctx = av_parser_init (oclass->in_plugin->id); | ||
4733 | + } else { | ||
4734 | + GST_LOG_OBJECT (ffmpegdec, | ||
4735 | + "H264 with extradata implies framed data - not using parser"); | ||
4736 | + ffmpegdec->pctx = NULL; | ||
4737 | + } | ||
4738 | + break; | ||
4739 | + case CODEC_ID_RV10: | ||
4740 | + case CODEC_ID_RV30: | ||
4741 | + case CODEC_ID_RV20: | ||
4742 | + case CODEC_ID_RV40: | ||
4743 | + ffmpegdec->is_realvideo = TRUE; | ||
4744 | + break; | ||
4745 | + default: | ||
4746 | + if (!ffmpegdec->turnoff_parser) { | ||
4747 | + ffmpegdec->pctx = av_parser_init (oclass->in_plugin->id); | ||
4748 | + if (ffmpegdec->pctx) | ||
4749 | + GST_LOG_OBJECT (ffmpegdec, "Using parser %p", ffmpegdec->pctx); | ||
4750 | + else | ||
4751 | + GST_LOG_OBJECT (ffmpegdec, "No parser for codec"); | ||
4752 | + } else { | ||
4753 | + GST_LOG_OBJECT (ffmpegdec, "Parser deactivated for format"); | ||
4754 | + } | ||
4755 | + break; | ||
4756 | + } | ||
4757 | + | ||
4758 | + switch (oclass->in_plugin->type) { | ||
4759 | + case AVMEDIA_TYPE_VIDEO: | ||
4760 | + ffmpegdec->format.video.width = 0; | ||
4761 | + ffmpegdec->format.video.height = 0; | ||
4762 | + ffmpegdec->format.video.clip_width = -1; | ||
4763 | + ffmpegdec->format.video.clip_height = -1; | ||
4764 | + ffmpegdec->format.video.pix_fmt = PIX_FMT_NB; | ||
4765 | + ffmpegdec->format.video.interlaced = FALSE; | ||
4766 | + break; | ||
4767 | + case AVMEDIA_TYPE_AUDIO: | ||
4768 | + ffmpegdec->format.audio.samplerate = 0; | ||
4769 | + ffmpegdec->format.audio.channels = 0; | ||
4770 | + ffmpegdec->format.audio.depth = 0; | ||
4771 | + break; | ||
4772 | + default: | ||
4773 | + break; | ||
4774 | + } | ||
4775 | + | ||
4776 | + gst_ffmpegdec_reset_ts (ffmpegdec); | ||
4777 | + /* FIXME, reset_qos holds the LOCK */ | ||
4778 | + ffmpegdec->proportion = 0.0; | ||
4779 | + ffmpegdec->earliest_time = -1; | ||
4780 | + | ||
4781 | + return TRUE; | ||
4782 | + | ||
4783 | + /* ERRORS */ | ||
4784 | +could_not_open: | ||
4785 | + { | ||
4786 | + gst_ffmpegdec_close (ffmpegdec); | ||
4787 | + GST_DEBUG_OBJECT (ffmpegdec, "ffdec_%s: Failed to open FFMPEG codec", | ||
4788 | + oclass->in_plugin->name); | ||
4789 | + return FALSE; | ||
4790 | + } | ||
4791 | +} | ||
4792 | + | ||
4793 | +static gboolean | ||
4794 | +gst_ffmpegdec_setcaps (GstPad * pad, GstCaps * caps) | ||
4795 | +{ | ||
4796 | + GstFFMpegDec *ffmpegdec; | ||
4797 | + GstFFMpegDecClass *oclass; | ||
4798 | + GstStructure *structure; | ||
4799 | + const GValue *par; | ||
4800 | + const GValue *fps; | ||
4801 | + gboolean ret = TRUE; | ||
4802 | + | ||
4803 | + ffmpegdec = (GstFFMpegDec *) (gst_pad_get_parent (pad)); | ||
4804 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
4805 | + | ||
4806 | + GST_DEBUG_OBJECT (pad, "setcaps called"); | ||
4807 | + | ||
4808 | + GST_OBJECT_LOCK (ffmpegdec); | ||
4809 | + | ||
4810 | + /* stupid check for VC1 */ | ||
4811 | + if ((oclass->in_plugin->id == CODEC_ID_WMV3) || | ||
4812 | + (oclass->in_plugin->id == CODEC_ID_VC1)) | ||
4813 | + oclass->in_plugin->id = gst_ffmpeg_caps_to_codecid (caps, NULL); | ||
4814 | + | ||
4815 | + /* close old session */ | ||
4816 | + if (ffmpegdec->opened) { | ||
4817 | + GST_OBJECT_UNLOCK (ffmpegdec); | ||
4818 | + gst_ffmpegdec_drain (ffmpegdec); | ||
4819 | + GST_OBJECT_LOCK (ffmpegdec); | ||
4820 | + gst_ffmpegdec_close (ffmpegdec); | ||
4821 | + | ||
4822 | + /* and reset the defaults that were set when a context is created */ | ||
4823 | + avcodec_get_context_defaults (ffmpegdec->context); | ||
4824 | + } | ||
4825 | + | ||
4826 | + /* set buffer functions */ | ||
4827 | + ffmpegdec->context->get_buffer = gst_ffmpegdec_get_buffer; | ||
4828 | + ffmpegdec->context->release_buffer = gst_ffmpegdec_release_buffer; | ||
4829 | + ffmpegdec->context->draw_horiz_band = NULL; | ||
4830 | + | ||
4831 | + /* default is to let format decide if it needs a parser */ | ||
4832 | + ffmpegdec->turnoff_parser = FALSE; | ||
4833 | + | ||
4834 | + ffmpegdec->has_b_frames = FALSE; | ||
4835 | + | ||
4836 | + GST_LOG_OBJECT (ffmpegdec, "size %dx%d", ffmpegdec->context->width, | ||
4837 | + ffmpegdec->context->height); | ||
4838 | + | ||
4839 | + /* get size and so */ | ||
4840 | + gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id, | ||
4841 | + oclass->in_plugin->type, caps, ffmpegdec->context); | ||
4842 | + | ||
4843 | + GST_LOG_OBJECT (ffmpegdec, "size after %dx%d", ffmpegdec->context->width, | ||
4844 | + ffmpegdec->context->height); | ||
4845 | + | ||
4846 | + if (!ffmpegdec->context->time_base.den || !ffmpegdec->context->time_base.num) { | ||
4847 | + GST_DEBUG_OBJECT (ffmpegdec, "forcing 25/1 framerate"); | ||
4848 | + ffmpegdec->context->time_base.num = 1; | ||
4849 | + ffmpegdec->context->time_base.den = 25; | ||
4850 | + } | ||
4851 | + | ||
4852 | + /* get pixel aspect ratio if it's set */ | ||
4853 | + structure = gst_caps_get_structure (caps, 0); | ||
4854 | + | ||
4855 | + par = gst_structure_get_value (structure, "pixel-aspect-ratio"); | ||
4856 | + if (par) { | ||
4857 | + GST_DEBUG_OBJECT (ffmpegdec, "sink caps have pixel-aspect-ratio of %d:%d", | ||
4858 | + gst_value_get_fraction_numerator (par), | ||
4859 | + gst_value_get_fraction_denominator (par)); | ||
4860 | + /* should be NULL */ | ||
4861 | + if (ffmpegdec->par) | ||
4862 | + g_free (ffmpegdec->par); | ||
4863 | + ffmpegdec->par = g_new0 (GValue, 1); | ||
4864 | + gst_value_init_and_copy (ffmpegdec->par, par); | ||
4865 | + } | ||
4866 | + | ||
4867 | + /* get the framerate from incoming caps. fps_n is set to -1 when | ||
4868 | + * there is no valid framerate */ | ||
4869 | + fps = gst_structure_get_value (structure, "framerate"); | ||
4870 | + if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) { | ||
4871 | + ffmpegdec->format.video.fps_n = gst_value_get_fraction_numerator (fps); | ||
4872 | + ffmpegdec->format.video.fps_d = gst_value_get_fraction_denominator (fps); | ||
4873 | + GST_DEBUG_OBJECT (ffmpegdec, "Using framerate %d/%d from incoming caps", | ||
4874 | + ffmpegdec->format.video.fps_n, ffmpegdec->format.video.fps_d); | ||
4875 | + } else { | ||
4876 | + ffmpegdec->format.video.fps_n = -1; | ||
4877 | + GST_DEBUG_OBJECT (ffmpegdec, "Using framerate from codec"); | ||
4878 | + } | ||
4879 | + | ||
4880 | + /* figure out if we can use direct rendering */ | ||
4881 | + ffmpegdec->current_dr = FALSE; | ||
4882 | + ffmpegdec->extra_ref = FALSE; | ||
4883 | + if (ffmpegdec->direct_rendering) { | ||
4884 | + GST_DEBUG_OBJECT (ffmpegdec, "trying to enable direct rendering"); | ||
4885 | + if (oclass->in_plugin->capabilities & CODEC_CAP_DR1) { | ||
4886 | + if (oclass->in_plugin->id == CODEC_ID_H264) { | ||
4887 | + GST_DEBUG_OBJECT (ffmpegdec, "disable direct rendering setup for H264"); | ||
4888 | + /* does not work, many stuff reads outside of the planes */ | ||
4889 | + ffmpegdec->current_dr = FALSE; | ||
4890 | + ffmpegdec->extra_ref = TRUE; | ||
4891 | + } else if ((oclass->in_plugin->id == CODEC_ID_SVQ1) || | ||
4892 | + (oclass->in_plugin->id == CODEC_ID_VP5) || | ||
4893 | + (oclass->in_plugin->id == CODEC_ID_VP6) || | ||
4894 | + (oclass->in_plugin->id == CODEC_ID_VP6F) || | ||
4895 | + (oclass->in_plugin->id == CODEC_ID_VP6A)) { | ||
4896 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
4897 | + "disable direct rendering setup for broken stride support"); | ||
4898 | + /* does not work, uses a incompatible stride. See #610613 */ | ||
4899 | + ffmpegdec->current_dr = FALSE; | ||
4900 | + ffmpegdec->extra_ref = TRUE; | ||
4901 | + } else { | ||
4902 | + GST_DEBUG_OBJECT (ffmpegdec, "enabled direct rendering"); | ||
4903 | + ffmpegdec->current_dr = TRUE; | ||
4904 | + } | ||
4905 | + } else { | ||
4906 | + GST_DEBUG_OBJECT (ffmpegdec, "direct rendering not supported"); | ||
4907 | + } | ||
4908 | + } | ||
4909 | + if (ffmpegdec->current_dr) { | ||
4910 | + /* do *not* draw edges when in direct rendering, for some reason it draws | ||
4911 | + * outside of the memory. */ | ||
4912 | + ffmpegdec->context->flags |= CODEC_FLAG_EMU_EDGE; | ||
4913 | + } | ||
4914 | + | ||
4915 | + /* for AAC we only use av_parse if not on stream-format==raw or ==loas */ | ||
4916 | + if (oclass->in_plugin->id == CODEC_ID_AAC | ||
4917 | + || oclass->in_plugin->id == CODEC_ID_AAC_LATM) { | ||
4918 | + const gchar *format = gst_structure_get_string (structure, "stream-format"); | ||
4919 | + | ||
4920 | + if (format == NULL || strcmp (format, "raw") == 0) { | ||
4921 | + ffmpegdec->turnoff_parser = TRUE; | ||
4922 | + } | ||
4923 | + } | ||
4924 | + | ||
4925 | + /* for FLAC, don't parse if it's already parsed */ | ||
4926 | + if (oclass->in_plugin->id == CODEC_ID_FLAC) { | ||
4927 | + if (gst_structure_has_field (structure, "streamheader")) | ||
4928 | + ffmpegdec->turnoff_parser = TRUE; | ||
4929 | + } | ||
4930 | + | ||
4931 | + /* workaround encoder bugs */ | ||
4932 | + ffmpegdec->context->workaround_bugs |= FF_BUG_AUTODETECT; | ||
4933 | + ffmpegdec->context->err_recognition = 1; | ||
4934 | + | ||
4935 | + /* for slow cpus */ | ||
4936 | + ffmpegdec->context->lowres = ffmpegdec->lowres; | ||
4937 | + ffmpegdec->context->skip_frame = ffmpegdec->skip_frame; | ||
4938 | + | ||
4939 | + /* ffmpeg can draw motion vectors on top of the image (not every decoder | ||
4940 | + * supports it) */ | ||
4941 | + ffmpegdec->context->debug_mv = ffmpegdec->debug_mv; | ||
4942 | + | ||
4943 | + if (ffmpegdec->max_threads == 0) | ||
4944 | + ffmpegdec->context->thread_count = gst_ffmpeg_auto_max_threads (); | ||
4945 | + else | ||
4946 | + ffmpegdec->context->thread_count = ffmpegdec->max_threads; | ||
4947 | + | ||
4948 | + /* open codec - we don't select an output pix_fmt yet, | ||
4949 | + * simply because we don't know! We only get it | ||
4950 | + * during playback... */ | ||
4951 | + if (!gst_ffmpegdec_open (ffmpegdec)) | ||
4952 | + goto open_failed; | ||
4953 | + | ||
4954 | + /* clipping region */ | ||
4955 | + gst_structure_get_int (structure, "width", | ||
4956 | + &ffmpegdec->format.video.clip_width); | ||
4957 | + gst_structure_get_int (structure, "height", | ||
4958 | + &ffmpegdec->format.video.clip_height); | ||
4959 | + | ||
4960 | + GST_DEBUG_OBJECT (pad, "clipping to %dx%d", | ||
4961 | + ffmpegdec->format.video.clip_width, ffmpegdec->format.video.clip_height); | ||
4962 | + | ||
4963 | + /* take into account the lowres property */ | ||
4964 | + if (ffmpegdec->format.video.clip_width != -1) | ||
4965 | + ffmpegdec->format.video.clip_width >>= ffmpegdec->lowres; | ||
4966 | + if (ffmpegdec->format.video.clip_height != -1) | ||
4967 | + ffmpegdec->format.video.clip_height >>= ffmpegdec->lowres; | ||
4968 | + | ||
4969 | + GST_DEBUG_OBJECT (pad, "final clipping to %dx%d", | ||
4970 | + ffmpegdec->format.video.clip_width, ffmpegdec->format.video.clip_height); | ||
4971 | + | ||
4972 | +done: | ||
4973 | + GST_OBJECT_UNLOCK (ffmpegdec); | ||
4974 | + | ||
4975 | + gst_object_unref (ffmpegdec); | ||
4976 | + | ||
4977 | + return ret; | ||
4978 | + | ||
4979 | + /* ERRORS */ | ||
4980 | +open_failed: | ||
4981 | + { | ||
4982 | + GST_DEBUG_OBJECT (ffmpegdec, "Failed to open"); | ||
4983 | + if (ffmpegdec->par) { | ||
4984 | + g_free (ffmpegdec->par); | ||
4985 | + ffmpegdec->par = NULL; | ||
4986 | + } | ||
4987 | + ret = FALSE; | ||
4988 | + goto done; | ||
4989 | + } | ||
4990 | +} | ||
4991 | + | ||
4992 | +static GstFlowReturn | ||
4993 | +alloc_output_buffer (GstFFMpegDec * ffmpegdec, GstBuffer ** outbuf, | ||
4994 | + gint width, gint height) | ||
4995 | +{ | ||
4996 | + GstFlowReturn ret; | ||
4997 | + gint fsize; | ||
4998 | + | ||
4999 | + ret = GST_FLOW_ERROR; | ||
5000 | + *outbuf = NULL; | ||
5001 | + | ||
5002 | + GST_LOG_OBJECT (ffmpegdec, "alloc output buffer"); | ||
5003 | + | ||
5004 | + /* see if we need renegotiation */ | ||
5005 | + if (G_UNLIKELY (!gst_ffmpegdec_negotiate (ffmpegdec, FALSE))) | ||
5006 | + goto negotiate_failed; | ||
5007 | + | ||
5008 | + /* get the size of the gstreamer output buffer given a | ||
5009 | + * width/height/format */ | ||
5010 | + fsize = gst_ffmpeg_avpicture_get_size (ffmpegdec->context->pix_fmt, | ||
5011 | + width, height); | ||
5012 | + | ||
5013 | + if (ffmpegdec->can_allocate_aligned) { | ||
5014 | + GST_LOG_OBJECT (ffmpegdec, "calling pad_alloc"); | ||
5015 | + /* no pallete, we can use the buffer size to alloc */ | ||
5016 | + ret = gst_pad_alloc_buffer_and_set_caps (ffmpegdec->srcpad, | ||
5017 | + GST_BUFFER_OFFSET_NONE, fsize, | ||
5018 | + GST_PAD_CAPS (ffmpegdec->srcpad), outbuf); | ||
5019 | + if (G_UNLIKELY (ret != GST_FLOW_OK)) | ||
5020 | + goto alloc_failed; | ||
5021 | + | ||
5022 | + /* If buffer isn't 128-bit aligned, create a memaligned one ourselves */ | ||
5023 | + if (((uintptr_t) GST_BUFFER_DATA (*outbuf)) % 16) { | ||
5024 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5025 | + "Downstream can't allocate aligned buffers."); | ||
5026 | + ffmpegdec->can_allocate_aligned = FALSE; | ||
5027 | + gst_buffer_unref (*outbuf); | ||
5028 | + *outbuf = new_aligned_buffer (fsize, GST_PAD_CAPS (ffmpegdec->srcpad)); | ||
5029 | + } | ||
5030 | + } else { | ||
5031 | + GST_LOG_OBJECT (ffmpegdec, | ||
5032 | + "not calling pad_alloc, we have a pallete or downstream can't give 16 byte aligned buffers."); | ||
5033 | + /* for paletted data we can't use pad_alloc_buffer(), because | ||
5034 | + * fsize contains the size of the palette, so the overall size | ||
5035 | + * is bigger than ffmpegcolorspace's unit size, which will | ||
5036 | + * prompt GstBaseTransform to complain endlessly ... */ | ||
5037 | + *outbuf = new_aligned_buffer (fsize, GST_PAD_CAPS (ffmpegdec->srcpad)); | ||
5038 | + ret = GST_FLOW_OK; | ||
5039 | + } | ||
5040 | + /* set caps, we do this here because the buffer is still writable here and we | ||
5041 | + * are sure to be negotiated */ | ||
5042 | + gst_buffer_set_caps (*outbuf, GST_PAD_CAPS (ffmpegdec->srcpad)); | ||
5043 | + | ||
5044 | + return ret; | ||
5045 | + | ||
5046 | + /* special cases */ | ||
5047 | +negotiate_failed: | ||
5048 | + { | ||
5049 | + GST_DEBUG_OBJECT (ffmpegdec, "negotiate failed"); | ||
5050 | + return GST_FLOW_NOT_NEGOTIATED; | ||
5051 | + } | ||
5052 | +alloc_failed: | ||
5053 | + { | ||
5054 | + GST_DEBUG_OBJECT (ffmpegdec, "pad_alloc failed %d (%s)", ret, | ||
5055 | + gst_flow_get_name (ret)); | ||
5056 | + return ret; | ||
5057 | + } | ||
5058 | +} | ||
5059 | + | ||
5060 | +static int | ||
5061 | +gst_ffmpegdec_get_buffer (AVCodecContext * context, AVFrame * picture) | ||
5062 | +{ | ||
5063 | + GstBuffer *buf = NULL; | ||
5064 | + GstFFMpegDec *ffmpegdec; | ||
5065 | + gint width, height; | ||
5066 | + gint coded_width, coded_height; | ||
5067 | + gint res; | ||
5068 | + | ||
5069 | + ffmpegdec = (GstFFMpegDec *) context->opaque; | ||
5070 | + | ||
5071 | + GST_DEBUG_OBJECT (ffmpegdec, "getting buffer"); | ||
5072 | + | ||
5073 | + /* apply the last info we have seen to this picture, when we get the | ||
5074 | + * picture back from ffmpeg we can use this to correctly timestamp the output | ||
5075 | + * buffer */ | ||
5076 | + picture->reordered_opaque = context->reordered_opaque; | ||
5077 | + /* make sure we don't free the buffer when it's not ours */ | ||
5078 | + picture->opaque = NULL; | ||
5079 | + | ||
5080 | + /* take width and height before clipping */ | ||
5081 | + width = context->width; | ||
5082 | + height = context->height; | ||
5083 | + coded_width = context->coded_width; | ||
5084 | + coded_height = context->coded_height; | ||
5085 | + | ||
5086 | + GST_LOG_OBJECT (ffmpegdec, "dimension %dx%d, coded %dx%d", width, height, | ||
5087 | + coded_width, coded_height); | ||
5088 | + if (!ffmpegdec->current_dr) { | ||
5089 | + GST_LOG_OBJECT (ffmpegdec, "direct rendering disabled, fallback alloc"); | ||
5090 | + res = avcodec_default_get_buffer (context, picture); | ||
5091 | + | ||
5092 | + GST_LOG_OBJECT (ffmpegdec, "linsize %d %d %d", picture->linesize[0], | ||
5093 | + picture->linesize[1], picture->linesize[2]); | ||
5094 | + GST_LOG_OBJECT (ffmpegdec, "data %u %u %u", 0, | ||
5095 | + (guint) (picture->data[1] - picture->data[0]), | ||
5096 | + (guint) (picture->data[2] - picture->data[0])); | ||
5097 | + return res; | ||
5098 | + } | ||
5099 | + | ||
5100 | + switch (context->codec_type) { | ||
5101 | + case AVMEDIA_TYPE_VIDEO: | ||
5102 | + /* some ffmpeg video plugins don't see the point in setting codec_type ... */ | ||
5103 | + case AVMEDIA_TYPE_UNKNOWN: | ||
5104 | + { | ||
5105 | + GstFlowReturn ret; | ||
5106 | + gint clip_width, clip_height; | ||
5107 | + | ||
5108 | + /* take final clipped output size */ | ||
5109 | + if ((clip_width = ffmpegdec->format.video.clip_width) == -1) | ||
5110 | + clip_width = width; | ||
5111 | + if ((clip_height = ffmpegdec->format.video.clip_height) == -1) | ||
5112 | + clip_height = height; | ||
5113 | + | ||
5114 | + GST_LOG_OBJECT (ffmpegdec, "raw outsize %d/%d", width, height); | ||
5115 | + | ||
5116 | + /* this is the size ffmpeg needs for the buffer */ | ||
5117 | + avcodec_align_dimensions (context, &width, &height); | ||
5118 | + | ||
5119 | + GST_LOG_OBJECT (ffmpegdec, "aligned outsize %d/%d, clip %d/%d", | ||
5120 | + width, height, clip_width, clip_height); | ||
5121 | + | ||
5122 | + if (width != clip_width || height != clip_height) { | ||
5123 | + /* We can't alloc if we need to clip the output buffer later */ | ||
5124 | + GST_LOG_OBJECT (ffmpegdec, "we need clipping, fallback alloc"); | ||
5125 | + return avcodec_default_get_buffer (context, picture); | ||
5126 | + } | ||
5127 | + | ||
5128 | + /* alloc with aligned dimensions for ffmpeg */ | ||
5129 | + ret = alloc_output_buffer (ffmpegdec, &buf, width, height); | ||
5130 | + if (G_UNLIKELY (ret != GST_FLOW_OK)) { | ||
5131 | + /* alloc default buffer when we can't get one from downstream */ | ||
5132 | + GST_LOG_OBJECT (ffmpegdec, "alloc failed, fallback alloc"); | ||
5133 | + return avcodec_default_get_buffer (context, picture); | ||
5134 | + } | ||
5135 | + | ||
5136 | + /* copy the right pointers and strides in the picture object */ | ||
5137 | + gst_ffmpeg_avpicture_fill ((AVPicture *) picture, | ||
5138 | + GST_BUFFER_DATA (buf), context->pix_fmt, width, height); | ||
5139 | + break; | ||
5140 | + } | ||
5141 | + case AVMEDIA_TYPE_AUDIO: | ||
5142 | + default: | ||
5143 | + GST_ERROR_OBJECT (ffmpegdec, | ||
5144 | + "_get_buffer() should never get called for non-video buffers !"); | ||
5145 | + g_assert_not_reached (); | ||
5146 | + break; | ||
5147 | + } | ||
5148 | + | ||
5149 | + /* tell ffmpeg we own this buffer, tranfer the ref we have on the buffer to | ||
5150 | + * the opaque data. */ | ||
5151 | + picture->type = FF_BUFFER_TYPE_USER; | ||
5152 | + picture->opaque = buf; | ||
5153 | + | ||
5154 | +#ifdef EXTRA_REF | ||
5155 | + if (picture->reference != 0 || ffmpegdec->extra_ref) { | ||
5156 | + GST_DEBUG_OBJECT (ffmpegdec, "adding extra ref"); | ||
5157 | + gst_buffer_ref (buf); | ||
5158 | + } | ||
5159 | +#endif | ||
5160 | + | ||
5161 | + GST_LOG_OBJECT (ffmpegdec, "returned buffer %p", buf); | ||
5162 | + | ||
5163 | + return 0; | ||
5164 | +} | ||
5165 | + | ||
5166 | +static void | ||
5167 | +gst_ffmpegdec_release_buffer (AVCodecContext * context, AVFrame * picture) | ||
5168 | +{ | ||
5169 | + gint i; | ||
5170 | + GstBuffer *buf; | ||
5171 | + GstFFMpegDec *ffmpegdec; | ||
5172 | + | ||
5173 | + ffmpegdec = (GstFFMpegDec *) context->opaque; | ||
5174 | + | ||
5175 | + /* check if it was our buffer */ | ||
5176 | + if (picture->opaque == NULL) { | ||
5177 | + GST_DEBUG_OBJECT (ffmpegdec, "default release buffer"); | ||
5178 | + avcodec_default_release_buffer (context, picture); | ||
5179 | + return; | ||
5180 | + } | ||
5181 | + | ||
5182 | + /* we remove the opaque data now */ | ||
5183 | + buf = GST_BUFFER_CAST (picture->opaque); | ||
5184 | + GST_DEBUG_OBJECT (ffmpegdec, "release buffer %p", buf); | ||
5185 | + picture->opaque = NULL; | ||
5186 | + | ||
5187 | +#ifdef EXTRA_REF | ||
5188 | + if (picture->reference != 0 || ffmpegdec->extra_ref) { | ||
5189 | + GST_DEBUG_OBJECT (ffmpegdec, "remove extra ref"); | ||
5190 | + gst_buffer_unref (buf); | ||
5191 | + } | ||
5192 | +#else | ||
5193 | + gst_buffer_unref (buf); | ||
5194 | +#endif | ||
5195 | + | ||
5196 | + /* zero out the reference in ffmpeg */ | ||
5197 | + for (i = 0; i < 4; i++) { | ||
5198 | + picture->data[i] = NULL; | ||
5199 | + picture->linesize[i] = 0; | ||
5200 | + } | ||
5201 | +} | ||
5202 | + | ||
5203 | +static void | ||
5204 | +gst_ffmpegdec_add_pixel_aspect_ratio (GstFFMpegDec * ffmpegdec, | ||
5205 | + GstStructure * s) | ||
5206 | +{ | ||
5207 | + gboolean demuxer_par_set = FALSE; | ||
5208 | + gboolean decoder_par_set = FALSE; | ||
5209 | + gint demuxer_num = 1, demuxer_denom = 1; | ||
5210 | + gint decoder_num = 1, decoder_denom = 1; | ||
5211 | + | ||
5212 | + GST_OBJECT_LOCK (ffmpegdec); | ||
5213 | + | ||
5214 | + if (ffmpegdec->par) { | ||
5215 | + demuxer_num = gst_value_get_fraction_numerator (ffmpegdec->par); | ||
5216 | + demuxer_denom = gst_value_get_fraction_denominator (ffmpegdec->par); | ||
5217 | + demuxer_par_set = TRUE; | ||
5218 | + GST_DEBUG_OBJECT (ffmpegdec, "Demuxer PAR: %d:%d", demuxer_num, | ||
5219 | + demuxer_denom); | ||
5220 | + } | ||
5221 | + | ||
5222 | + if (ffmpegdec->context->sample_aspect_ratio.num && | ||
5223 | + ffmpegdec->context->sample_aspect_ratio.den) { | ||
5224 | + decoder_num = ffmpegdec->context->sample_aspect_ratio.num; | ||
5225 | + decoder_denom = ffmpegdec->context->sample_aspect_ratio.den; | ||
5226 | + decoder_par_set = TRUE; | ||
5227 | + GST_DEBUG_OBJECT (ffmpegdec, "Decoder PAR: %d:%d", decoder_num, | ||
5228 | + decoder_denom); | ||
5229 | + } | ||
5230 | + | ||
5231 | + GST_OBJECT_UNLOCK (ffmpegdec); | ||
5232 | + | ||
5233 | + if (!demuxer_par_set && !decoder_par_set) | ||
5234 | + goto no_par; | ||
5235 | + | ||
5236 | + if (demuxer_par_set && !decoder_par_set) | ||
5237 | + goto use_demuxer_par; | ||
5238 | + | ||
5239 | + if (decoder_par_set && !demuxer_par_set) | ||
5240 | + goto use_decoder_par; | ||
5241 | + | ||
5242 | + /* Both the demuxer and the decoder provide a PAR. If one of | ||
5243 | + * the two PARs is 1:1 and the other one is not, use the one | ||
5244 | + * that is not 1:1. */ | ||
5245 | + if (demuxer_num == demuxer_denom && decoder_num != decoder_denom) | ||
5246 | + goto use_decoder_par; | ||
5247 | + | ||
5248 | + if (decoder_num == decoder_denom && demuxer_num != demuxer_denom) | ||
5249 | + goto use_demuxer_par; | ||
5250 | + | ||
5251 | + /* Both PARs are non-1:1, so use the PAR provided by the demuxer */ | ||
5252 | + goto use_demuxer_par; | ||
5253 | + | ||
5254 | +use_decoder_par: | ||
5255 | + { | ||
5256 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5257 | + "Setting decoder provided pixel-aspect-ratio of %u:%u", decoder_num, | ||
5258 | + decoder_denom); | ||
5259 | + gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, decoder_num, | ||
5260 | + decoder_denom, NULL); | ||
5261 | + return; | ||
5262 | + } | ||
5263 | + | ||
5264 | +use_demuxer_par: | ||
5265 | + { | ||
5266 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5267 | + "Setting demuxer provided pixel-aspect-ratio of %u:%u", demuxer_num, | ||
5268 | + demuxer_denom); | ||
5269 | + gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, demuxer_num, | ||
5270 | + demuxer_denom, NULL); | ||
5271 | + return; | ||
5272 | + } | ||
5273 | +no_par: | ||
5274 | + { | ||
5275 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5276 | + "Neither demuxer nor codec provide a pixel-aspect-ratio"); | ||
5277 | + return; | ||
5278 | + } | ||
5279 | +} | ||
5280 | + | ||
5281 | +static gboolean | ||
5282 | +gst_ffmpegdec_negotiate (GstFFMpegDec * ffmpegdec, gboolean force) | ||
5283 | +{ | ||
5284 | + GstFFMpegDecClass *oclass; | ||
5285 | + GstCaps *caps; | ||
5286 | + | ||
5287 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
5288 | + | ||
5289 | + switch (oclass->in_plugin->type) { | ||
5290 | + case AVMEDIA_TYPE_VIDEO: | ||
5291 | + if (!force && ffmpegdec->format.video.width == ffmpegdec->context->width | ||
5292 | + && ffmpegdec->format.video.height == ffmpegdec->context->height | ||
5293 | + && ffmpegdec->format.video.fps_n == ffmpegdec->format.video.old_fps_n | ||
5294 | + && ffmpegdec->format.video.fps_d == ffmpegdec->format.video.old_fps_d | ||
5295 | + && ffmpegdec->format.video.pix_fmt == ffmpegdec->context->pix_fmt | ||
5296 | + && ffmpegdec->format.video.par_n == | ||
5297 | + ffmpegdec->context->sample_aspect_ratio.num | ||
5298 | + && ffmpegdec->format.video.par_d == | ||
5299 | + ffmpegdec->context->sample_aspect_ratio.den) | ||
5300 | + return TRUE; | ||
5301 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5302 | + "Renegotiating video from %dx%d@ %d:%d PAR %d/%d fps to %dx%d@ %d:%d PAR %d/%d fps", | ||
5303 | + ffmpegdec->format.video.width, ffmpegdec->format.video.height, | ||
5304 | + ffmpegdec->format.video.par_n, ffmpegdec->format.video.par_d, | ||
5305 | + ffmpegdec->format.video.old_fps_n, ffmpegdec->format.video.old_fps_n, | ||
5306 | + ffmpegdec->context->width, ffmpegdec->context->height, | ||
5307 | + ffmpegdec->context->sample_aspect_ratio.num, | ||
5308 | + ffmpegdec->context->sample_aspect_ratio.den, | ||
5309 | + ffmpegdec->format.video.fps_n, ffmpegdec->format.video.fps_d); | ||
5310 | + ffmpegdec->format.video.width = ffmpegdec->context->width; | ||
5311 | + ffmpegdec->format.video.height = ffmpegdec->context->height; | ||
5312 | + ffmpegdec->format.video.old_fps_n = ffmpegdec->format.video.fps_n; | ||
5313 | + ffmpegdec->format.video.old_fps_d = ffmpegdec->format.video.fps_d; | ||
5314 | + ffmpegdec->format.video.pix_fmt = ffmpegdec->context->pix_fmt; | ||
5315 | + ffmpegdec->format.video.par_n = | ||
5316 | + ffmpegdec->context->sample_aspect_ratio.num; | ||
5317 | + ffmpegdec->format.video.par_d = | ||
5318 | + ffmpegdec->context->sample_aspect_ratio.den; | ||
5319 | + break; | ||
5320 | + case AVMEDIA_TYPE_AUDIO: | ||
5321 | + { | ||
5322 | + gint depth = av_smp_format_depth (ffmpegdec->context->sample_fmt); | ||
5323 | + if (!force && ffmpegdec->format.audio.samplerate == | ||
5324 | + ffmpegdec->context->sample_rate && | ||
5325 | + ffmpegdec->format.audio.channels == ffmpegdec->context->channels && | ||
5326 | + ffmpegdec->format.audio.depth == depth) | ||
5327 | + return TRUE; | ||
5328 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5329 | + "Renegotiating audio from %dHz@%dchannels (%d) to %dHz@%dchannels (%d)", | ||
5330 | + ffmpegdec->format.audio.samplerate, ffmpegdec->format.audio.channels, | ||
5331 | + ffmpegdec->format.audio.depth, | ||
5332 | + ffmpegdec->context->sample_rate, ffmpegdec->context->channels, depth); | ||
5333 | + ffmpegdec->format.audio.samplerate = ffmpegdec->context->sample_rate; | ||
5334 | + ffmpegdec->format.audio.channels = ffmpegdec->context->channels; | ||
5335 | + ffmpegdec->format.audio.depth = depth; | ||
5336 | + } | ||
5337 | + break; | ||
5338 | + default: | ||
5339 | + break; | ||
5340 | + } | ||
5341 | + | ||
5342 | + caps = gst_ffmpeg_codectype_to_caps (oclass->in_plugin->type, | ||
5343 | + ffmpegdec->context, oclass->in_plugin->id, FALSE); | ||
5344 | + | ||
5345 | + if (caps == NULL) | ||
5346 | + goto no_caps; | ||
5347 | + | ||
5348 | + switch (oclass->in_plugin->type) { | ||
5349 | + case AVMEDIA_TYPE_VIDEO: | ||
5350 | + { | ||
5351 | + gint width, height; | ||
5352 | + gboolean interlaced; | ||
5353 | + | ||
5354 | + width = ffmpegdec->format.video.clip_width; | ||
5355 | + height = ffmpegdec->format.video.clip_height; | ||
5356 | + interlaced = ffmpegdec->format.video.interlaced; | ||
5357 | + | ||
5358 | + if (width != -1 && height != -1) { | ||
5359 | + /* overwrite the output size with the dimension of the | ||
5360 | + * clipping region but only if they are smaller. */ | ||
5361 | + if (width < ffmpegdec->context->width) | ||
5362 | + gst_caps_set_simple (caps, "width", G_TYPE_INT, width, NULL); | ||
5363 | + if (height < ffmpegdec->context->height) | ||
5364 | + gst_caps_set_simple (caps, "height", G_TYPE_INT, height, NULL); | ||
5365 | + } | ||
5366 | + gst_caps_set_simple (caps, "interlaced", G_TYPE_BOOLEAN, interlaced, | ||
5367 | + NULL); | ||
5368 | + | ||
5369 | + /* If a demuxer provided a framerate then use it (#313970) */ | ||
5370 | + if (ffmpegdec->format.video.fps_n != -1) { | ||
5371 | + gst_caps_set_simple (caps, "framerate", | ||
5372 | + GST_TYPE_FRACTION, ffmpegdec->format.video.fps_n, | ||
5373 | + ffmpegdec->format.video.fps_d, NULL); | ||
5374 | + } | ||
5375 | + gst_ffmpegdec_add_pixel_aspect_ratio (ffmpegdec, | ||
5376 | + gst_caps_get_structure (caps, 0)); | ||
5377 | + break; | ||
5378 | + } | ||
5379 | + case AVMEDIA_TYPE_AUDIO: | ||
5380 | + { | ||
5381 | + break; | ||
5382 | + } | ||
5383 | + default: | ||
5384 | + break; | ||
5385 | + } | ||
5386 | + | ||
5387 | + if (!gst_pad_set_caps (ffmpegdec->srcpad, caps)) | ||
5388 | + goto caps_failed; | ||
5389 | + | ||
5390 | + gst_caps_unref (caps); | ||
5391 | + | ||
5392 | + return TRUE; | ||
5393 | + | ||
5394 | + /* ERRORS */ | ||
5395 | +no_caps: | ||
5396 | + { | ||
5397 | +#ifdef HAVE_FFMPEG_UNINSTALLED | ||
5398 | + /* using internal ffmpeg snapshot */ | ||
5399 | + GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, | ||
5400 | + ("Could not find GStreamer caps mapping for FFmpeg codec '%s'.", | ||
5401 | + oclass->in_plugin->name), (NULL)); | ||
5402 | +#else | ||
5403 | + /* using external ffmpeg */ | ||
5404 | + GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, | ||
5405 | + ("Could not find GStreamer caps mapping for FFmpeg codec '%s', and " | ||
5406 | + "you are using an external libavcodec. This is most likely due to " | ||
5407 | + "a packaging problem and/or libavcodec having been upgraded to a " | ||
5408 | + "version that is not compatible with this version of " | ||
5409 | + "gstreamer-ffmpeg. Make sure your gstreamer-ffmpeg and libavcodec " | ||
5410 | + "packages come from the same source/repository.", | ||
5411 | + oclass->in_plugin->name), (NULL)); | ||
5412 | +#endif | ||
5413 | + return FALSE; | ||
5414 | + } | ||
5415 | +caps_failed: | ||
5416 | + { | ||
5417 | + GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL), | ||
5418 | + ("Could not set caps for ffmpeg decoder (%s), not fixed?", | ||
5419 | + oclass->in_plugin->name)); | ||
5420 | + gst_caps_unref (caps); | ||
5421 | + | ||
5422 | + return FALSE; | ||
5423 | + } | ||
5424 | +} | ||
5425 | + | ||
5426 | +/* perform qos calculations before decoding the next frame. | ||
5427 | + * | ||
5428 | + * Sets the skip_frame flag and if things are really bad, skips to the next | ||
5429 | + * keyframe. | ||
5430 | + * | ||
5431 | + * Returns TRUE if the frame should be decoded, FALSE if the frame can be dropped | ||
5432 | + * entirely. | ||
5433 | + */ | ||
5434 | +static gboolean | ||
5435 | +gst_ffmpegdec_do_qos (GstFFMpegDec * ffmpegdec, GstClockTime timestamp, | ||
5436 | + gboolean * mode_switch) | ||
5437 | +{ | ||
5438 | + GstClockTimeDiff diff; | ||
5439 | + gdouble proportion; | ||
5440 | + GstClockTime qostime, earliest_time; | ||
5441 | + gboolean res = TRUE; | ||
5442 | + | ||
5443 | + *mode_switch = FALSE; | ||
5444 | + | ||
5445 | + /* no timestamp, can't do QoS */ | ||
5446 | + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) | ||
5447 | + goto no_qos; | ||
5448 | + | ||
5449 | + /* get latest QoS observation values */ | ||
5450 | + gst_ffmpegdec_read_qos (ffmpegdec, &proportion, &earliest_time); | ||
5451 | + | ||
5452 | + /* skip qos if we have no observation (yet) */ | ||
5453 | + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) { | ||
5454 | + /* no skip_frame initialy */ | ||
5455 | + ffmpegdec->context->skip_frame = AVDISCARD_DEFAULT; | ||
5456 | + goto no_qos; | ||
5457 | + } | ||
5458 | + | ||
5459 | + /* qos is done on running time of the timestamp */ | ||
5460 | + qostime = gst_segment_to_running_time (&ffmpegdec->segment, GST_FORMAT_TIME, | ||
5461 | + timestamp); | ||
5462 | + | ||
5463 | + /* timestamp can be out of segment, then we don't do QoS */ | ||
5464 | + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (qostime))) | ||
5465 | + goto no_qos; | ||
5466 | + | ||
5467 | + /* see how our next timestamp relates to the latest qos timestamp. negative | ||
5468 | + * values mean we are early, positive values mean we are too late. */ | ||
5469 | + diff = GST_CLOCK_DIFF (qostime, earliest_time); | ||
5470 | + | ||
5471 | + GST_DEBUG_OBJECT (ffmpegdec, "QOS: qostime %" GST_TIME_FORMAT | ||
5472 | + ", earliest %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), | ||
5473 | + GST_TIME_ARGS (earliest_time)); | ||
5474 | + | ||
5475 | + /* if we using less than 40% of the available time, we can try to | ||
5476 | + * speed up again when we were slow. */ | ||
5477 | + if (proportion < 0.4 && diff < 0) { | ||
5478 | + goto normal_mode; | ||
5479 | + } else { | ||
5480 | + if (diff >= 0) { | ||
5481 | + /* we're too slow, try to speed up */ | ||
5482 | + /* switch to skip_frame mode */ | ||
5483 | + goto skip_frame; | ||
5484 | + } | ||
5485 | + } | ||
5486 | + | ||
5487 | +no_qos: | ||
5488 | + ffmpegdec->processed++; | ||
5489 | + return TRUE; | ||
5490 | + | ||
5491 | +normal_mode: | ||
5492 | + { | ||
5493 | + if (ffmpegdec->context->skip_frame != AVDISCARD_DEFAULT) { | ||
5494 | + ffmpegdec->context->skip_frame = AVDISCARD_DEFAULT; | ||
5495 | + *mode_switch = TRUE; | ||
5496 | + GST_DEBUG_OBJECT (ffmpegdec, "QOS: normal mode %g < 0.4", proportion); | ||
5497 | + } | ||
5498 | + ffmpegdec->processed++; | ||
5499 | + return TRUE; | ||
5500 | + } | ||
5501 | +skip_frame: | ||
5502 | + { | ||
5503 | + if (ffmpegdec->context->skip_frame != AVDISCARD_NONREF) { | ||
5504 | + ffmpegdec->context->skip_frame = AVDISCARD_NONREF; | ||
5505 | + *mode_switch = TRUE; | ||
5506 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5507 | + "QOS: hurry up, diff %" G_GINT64_FORMAT " >= 0", diff); | ||
5508 | + } | ||
5509 | + goto drop_qos; | ||
5510 | + } | ||
5511 | +drop_qos: | ||
5512 | + { | ||
5513 | + GstClockTime stream_time, jitter; | ||
5514 | + GstMessage *qos_msg; | ||
5515 | + | ||
5516 | + ffmpegdec->dropped++; | ||
5517 | + stream_time = | ||
5518 | + gst_segment_to_stream_time (&ffmpegdec->segment, GST_FORMAT_TIME, | ||
5519 | + timestamp); | ||
5520 | + jitter = GST_CLOCK_DIFF (qostime, earliest_time); | ||
5521 | + qos_msg = | ||
5522 | + gst_message_new_qos (GST_OBJECT_CAST (ffmpegdec), FALSE, qostime, | ||
5523 | + stream_time, timestamp, GST_CLOCK_TIME_NONE); | ||
5524 | + gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000); | ||
5525 | + gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS, | ||
5526 | + ffmpegdec->processed, ffmpegdec->dropped); | ||
5527 | + gst_element_post_message (GST_ELEMENT_CAST (ffmpegdec), qos_msg); | ||
5528 | + | ||
5529 | + return res; | ||
5530 | + } | ||
5531 | +} | ||
5532 | + | ||
5533 | +/* returns TRUE if buffer is within segment, else FALSE. | ||
5534 | + * if Buffer is on segment border, it's timestamp and duration will be clipped */ | ||
5535 | +static gboolean | ||
5536 | +clip_video_buffer (GstFFMpegDec * dec, GstBuffer * buf, GstClockTime in_ts, | ||
5537 | + GstClockTime in_dur) | ||
5538 | +{ | ||
5539 | + gboolean res = TRUE; | ||
5540 | + gint64 cstart, cstop; | ||
5541 | + GstClockTime stop; | ||
5542 | + | ||
5543 | + GST_LOG_OBJECT (dec, | ||
5544 | + "timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT, | ||
5545 | + GST_TIME_ARGS (in_ts), GST_TIME_ARGS (in_dur)); | ||
5546 | + | ||
5547 | + /* can't clip without TIME segment */ | ||
5548 | + if (G_UNLIKELY (dec->segment.format != GST_FORMAT_TIME)) | ||
5549 | + goto beach; | ||
5550 | + | ||
5551 | + /* we need a start time */ | ||
5552 | + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (in_ts))) | ||
5553 | + goto beach; | ||
5554 | + | ||
5555 | + /* generate valid stop, if duration unknown, we have unknown stop */ | ||
5556 | + stop = | ||
5557 | + GST_CLOCK_TIME_IS_VALID (in_dur) ? (in_ts + in_dur) : GST_CLOCK_TIME_NONE; | ||
5558 | + | ||
5559 | + /* now clip */ | ||
5560 | + res = | ||
5561 | + gst_segment_clip (&dec->segment, GST_FORMAT_TIME, in_ts, stop, &cstart, | ||
5562 | + &cstop); | ||
5563 | + if (G_UNLIKELY (!res)) | ||
5564 | + goto beach; | ||
5565 | + | ||
5566 | + /* we're pretty sure the duration of this buffer is not till the end of this | ||
5567 | + * segment (which _clip will assume when the stop is -1) */ | ||
5568 | + if (stop == GST_CLOCK_TIME_NONE) | ||
5569 | + cstop = GST_CLOCK_TIME_NONE; | ||
5570 | + | ||
5571 | + /* update timestamp and possibly duration if the clipped stop time is | ||
5572 | + * valid */ | ||
5573 | + GST_BUFFER_TIMESTAMP (buf) = cstart; | ||
5574 | + if (GST_CLOCK_TIME_IS_VALID (cstop)) | ||
5575 | + GST_BUFFER_DURATION (buf) = cstop - cstart; | ||
5576 | + | ||
5577 | + GST_LOG_OBJECT (dec, | ||
5578 | + "clipped timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT, | ||
5579 | + GST_TIME_ARGS (cstart), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); | ||
5580 | + | ||
5581 | +beach: | ||
5582 | + GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : "")); | ||
5583 | + return res; | ||
5584 | +} | ||
5585 | + | ||
5586 | + | ||
5587 | +/* get an outbuf buffer with the current picture */ | ||
5588 | +static GstFlowReturn | ||
5589 | +get_output_buffer (GstFFMpegDec * ffmpegdec, GstBuffer ** outbuf) | ||
5590 | +{ | ||
5591 | + GstFlowReturn ret; | ||
5592 | + | ||
5593 | + ret = GST_FLOW_OK; | ||
5594 | + *outbuf = NULL; | ||
5595 | + | ||
5596 | + if (ffmpegdec->picture->opaque != NULL) { | ||
5597 | + /* we allocated a picture already for ffmpeg to decode into, let's pick it | ||
5598 | + * up and use it now. */ | ||
5599 | + *outbuf = (GstBuffer *) ffmpegdec->picture->opaque; | ||
5600 | + GST_LOG_OBJECT (ffmpegdec, "using opaque buffer %p", *outbuf); | ||
5601 | +#ifndef EXTRA_REF | ||
5602 | + gst_buffer_ref (*outbuf); | ||
5603 | +#endif | ||
5604 | + } else { | ||
5605 | + AVPicture pic, *outpic; | ||
5606 | + gint width, height; | ||
5607 | + | ||
5608 | + GST_LOG_OBJECT (ffmpegdec, "get output buffer"); | ||
5609 | + | ||
5610 | + /* figure out size of output buffer, this is the clipped output size because | ||
5611 | + * we will copy the picture into it but only when the clipping region is | ||
5612 | + * smaller than the actual picture size. */ | ||
5613 | + if ((width = ffmpegdec->format.video.clip_width) == -1) | ||
5614 | + width = ffmpegdec->context->width; | ||
5615 | + else if (width > ffmpegdec->context->width) | ||
5616 | + width = ffmpegdec->context->width; | ||
5617 | + | ||
5618 | + if ((height = ffmpegdec->format.video.clip_height) == -1) | ||
5619 | + height = ffmpegdec->context->height; | ||
5620 | + else if (height > ffmpegdec->context->height) | ||
5621 | + height = ffmpegdec->context->height; | ||
5622 | + | ||
5623 | + GST_LOG_OBJECT (ffmpegdec, "clip width %d/height %d", width, height); | ||
5624 | + | ||
5625 | + ret = alloc_output_buffer (ffmpegdec, outbuf, width, height); | ||
5626 | + if (G_UNLIKELY (ret != GST_FLOW_OK)) | ||
5627 | + goto alloc_failed; | ||
5628 | + | ||
5629 | + /* original ffmpeg code does not handle odd sizes correctly. | ||
5630 | + * This patched up version does */ | ||
5631 | + gst_ffmpeg_avpicture_fill (&pic, GST_BUFFER_DATA (*outbuf), | ||
5632 | + ffmpegdec->context->pix_fmt, width, height); | ||
5633 | + | ||
5634 | + outpic = (AVPicture *) ffmpegdec->picture; | ||
5635 | + | ||
5636 | + GST_LOG_OBJECT (ffmpegdec, "linsize %d %d %d", outpic->linesize[0], | ||
5637 | + outpic->linesize[1], outpic->linesize[2]); | ||
5638 | + GST_LOG_OBJECT (ffmpegdec, "data %u %u %u", 0, | ||
5639 | + (guint) (outpic->data[1] - outpic->data[0]), | ||
5640 | + (guint) (outpic->data[2] - outpic->data[0])); | ||
5641 | + | ||
5642 | + av_picture_copy (&pic, outpic, ffmpegdec->context->pix_fmt, width, height); | ||
5643 | + } | ||
5644 | + ffmpegdec->picture->reordered_opaque = -1; | ||
5645 | + | ||
5646 | + return ret; | ||
5647 | + | ||
5648 | + /* special cases */ | ||
5649 | +alloc_failed: | ||
5650 | + { | ||
5651 | + GST_DEBUG_OBJECT (ffmpegdec, "pad_alloc failed"); | ||
5652 | + return ret; | ||
5653 | + } | ||
5654 | +} | ||
5655 | + | ||
5656 | +static void | ||
5657 | +clear_queued (GstFFMpegDec * ffmpegdec) | ||
5658 | +{ | ||
5659 | + g_list_foreach (ffmpegdec->queued, (GFunc) gst_mini_object_unref, NULL); | ||
5660 | + g_list_free (ffmpegdec->queued); | ||
5661 | + ffmpegdec->queued = NULL; | ||
5662 | +} | ||
5663 | + | ||
5664 | +static GstFlowReturn | ||
5665 | +flush_queued (GstFFMpegDec * ffmpegdec) | ||
5666 | +{ | ||
5667 | + GstFlowReturn res = GST_FLOW_OK; | ||
5668 | + | ||
5669 | + while (ffmpegdec->queued) { | ||
5670 | + GstBuffer *buf = GST_BUFFER_CAST (ffmpegdec->queued->data); | ||
5671 | + | ||
5672 | + GST_LOG_OBJECT (ffmpegdec, "pushing buffer %p, offset %" | ||
5673 | + G_GUINT64_FORMAT ", timestamp %" | ||
5674 | + GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, buf, | ||
5675 | + GST_BUFFER_OFFSET (buf), | ||
5676 | + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), | ||
5677 | + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); | ||
5678 | + | ||
5679 | + /* iterate ouput queue an push downstream */ | ||
5680 | + res = gst_pad_push (ffmpegdec->srcpad, buf); | ||
5681 | + | ||
5682 | + ffmpegdec->queued = | ||
5683 | + g_list_delete_link (ffmpegdec->queued, ffmpegdec->queued); | ||
5684 | + } | ||
5685 | + return res; | ||
5686 | +} | ||
5687 | + | ||
5688 | +static void | ||
5689 | +gst_avpacket_init (AVPacket * packet, guint8 * data, guint size) | ||
5690 | +{ | ||
5691 | + memset (packet, 0, sizeof (AVPacket)); | ||
5692 | + packet->data = data; | ||
5693 | + packet->size = size; | ||
5694 | +} | ||
5695 | + | ||
5696 | +/* gst_ffmpegdec_[video|audio]_frame: | ||
5697 | + * ffmpegdec: | ||
5698 | + * data: pointer to the data to decode | ||
5699 | + * size: size of data in bytes | ||
5700 | + * in_timestamp: incoming timestamp. | ||
5701 | + * in_duration: incoming duration. | ||
5702 | + * in_offset: incoming offset (frame number). | ||
5703 | + * outbuf: outgoing buffer. Different from NULL ONLY if it contains decoded data. | ||
5704 | + * ret: Return flow. | ||
5705 | + * | ||
5706 | + * Returns: number of bytes used in decoding. The check for successful decode is | ||
5707 | + * outbuf being non-NULL. | ||
5708 | + */ | ||
5709 | +static gint | ||
5710 | +gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec, | ||
5711 | + guint8 * data, guint size, | ||
5712 | + const GstTSInfo * dec_info, GstBuffer ** outbuf, GstFlowReturn * ret) | ||
5713 | +{ | ||
5714 | + gint len = -1; | ||
5715 | + gint have_data; | ||
5716 | + gboolean mode_switch; | ||
5717 | + gboolean decode; | ||
5718 | + gint skip_frame = AVDISCARD_DEFAULT; | ||
5719 | + GstClockTime out_timestamp, out_duration, out_pts; | ||
5720 | + gint64 out_offset; | ||
5721 | + const GstTSInfo *out_info; | ||
5722 | + AVPacket packet; | ||
5723 | + | ||
5724 | + *ret = GST_FLOW_OK; | ||
5725 | + *outbuf = NULL; | ||
5726 | + | ||
5727 | + ffmpegdec->context->opaque = ffmpegdec; | ||
5728 | + | ||
5729 | + /* in case we skip frames */ | ||
5730 | + ffmpegdec->picture->pict_type = -1; | ||
5731 | + | ||
5732 | + /* run QoS code, we don't stop decoding the frame when we are late because | ||
5733 | + * else we might skip a reference frame */ | ||
5734 | + decode = gst_ffmpegdec_do_qos (ffmpegdec, dec_info->timestamp, &mode_switch); | ||
5735 | + | ||
5736 | + if (ffmpegdec->is_realvideo && data != NULL) { | ||
5737 | + gint slice_count; | ||
5738 | + gint i; | ||
5739 | + | ||
5740 | + /* setup the slice table for realvideo */ | ||
5741 | + if (ffmpegdec->context->slice_offset == NULL) | ||
5742 | + ffmpegdec->context->slice_offset = g_malloc (sizeof (guint32) * 1000); | ||
5743 | + | ||
5744 | + slice_count = (*data++) + 1; | ||
5745 | + ffmpegdec->context->slice_count = slice_count; | ||
5746 | + | ||
5747 | + for (i = 0; i < slice_count; i++) { | ||
5748 | + data += 4; | ||
5749 | + ffmpegdec->context->slice_offset[i] = GST_READ_UINT32_LE (data); | ||
5750 | + data += 4; | ||
5751 | + } | ||
5752 | + } | ||
5753 | + | ||
5754 | + if (!decode) { | ||
5755 | + /* no decoding needed, save previous skip_frame value and brutely skip | ||
5756 | + * decoding everything */ | ||
5757 | + skip_frame = ffmpegdec->context->skip_frame; | ||
5758 | + ffmpegdec->context->skip_frame = AVDISCARD_NONREF; | ||
5759 | + } | ||
5760 | + | ||
5761 | + /* save reference to the timing info */ | ||
5762 | + ffmpegdec->context->reordered_opaque = (gint64) dec_info->idx; | ||
5763 | + ffmpegdec->picture->reordered_opaque = (gint64) dec_info->idx; | ||
5764 | + | ||
5765 | + GST_DEBUG_OBJECT (ffmpegdec, "stored opaque values idx %d", dec_info->idx); | ||
5766 | + | ||
5767 | + /* now decode the frame */ | ||
5768 | + gst_avpacket_init (&packet, data, size); | ||
5769 | + len = avcodec_decode_video2 (ffmpegdec->context, | ||
5770 | + ffmpegdec->picture, &have_data, &packet); | ||
5771 | + | ||
5772 | + /* restore previous state */ | ||
5773 | + if (!decode) | ||
5774 | + ffmpegdec->context->skip_frame = skip_frame; | ||
5775 | + | ||
5776 | + GST_DEBUG_OBJECT (ffmpegdec, "after decode: len %d, have_data %d", | ||
5777 | + len, have_data); | ||
5778 | + | ||
5779 | + /* when we are in skip_frame mode, don't complain when ffmpeg returned | ||
5780 | + * no data because we told it to skip stuff. */ | ||
5781 | + if (len < 0 && (mode_switch || ffmpegdec->context->skip_frame)) | ||
5782 | + len = 0; | ||
5783 | + | ||
5784 | + if (len > 0 && have_data <= 0 && (mode_switch | ||
5785 | + || ffmpegdec->context->skip_frame)) { | ||
5786 | + /* we consumed some bytes but nothing decoded and we are skipping frames, | ||
5787 | + * disable the interpollation of DTS timestamps */ | ||
5788 | + ffmpegdec->last_out = -1; | ||
5789 | + } | ||
5790 | + | ||
5791 | + /* no data, we're done */ | ||
5792 | + if (len < 0 || have_data <= 0) | ||
5793 | + goto beach; | ||
5794 | + | ||
5795 | + /* get the output picture timing info again */ | ||
5796 | + out_info = gst_ts_info_get (ffmpegdec, ffmpegdec->picture->reordered_opaque); | ||
5797 | + out_pts = out_info->timestamp; | ||
5798 | + out_duration = out_info->duration; | ||
5799 | + out_offset = out_info->offset; | ||
5800 | + | ||
5801 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5802 | + "pts %" G_GUINT64_FORMAT " duration %" G_GUINT64_FORMAT " offset %" | ||
5803 | + G_GINT64_FORMAT, out_pts, out_duration, out_offset); | ||
5804 | + GST_DEBUG_OBJECT (ffmpegdec, "picture: pts %" G_GUINT64_FORMAT, | ||
5805 | + (guint64) ffmpegdec->picture->pts); | ||
5806 | + GST_DEBUG_OBJECT (ffmpegdec, "picture: num %d", | ||
5807 | + ffmpegdec->picture->coded_picture_number); | ||
5808 | + GST_DEBUG_OBJECT (ffmpegdec, "picture: ref %d", | ||
5809 | + ffmpegdec->picture->reference); | ||
5810 | + GST_DEBUG_OBJECT (ffmpegdec, "picture: display %d", | ||
5811 | + ffmpegdec->picture->display_picture_number); | ||
5812 | + GST_DEBUG_OBJECT (ffmpegdec, "picture: opaque %p", | ||
5813 | + ffmpegdec->picture->opaque); | ||
5814 | + GST_DEBUG_OBJECT (ffmpegdec, "picture: reordered opaque %" G_GUINT64_FORMAT, | ||
5815 | + (guint64) ffmpegdec->picture->reordered_opaque); | ||
5816 | + GST_DEBUG_OBJECT (ffmpegdec, "repeat_pict:%d", | ||
5817 | + ffmpegdec->picture->repeat_pict); | ||
5818 | + GST_DEBUG_OBJECT (ffmpegdec, "interlaced_frame:%d", | ||
5819 | + ffmpegdec->picture->interlaced_frame); | ||
5820 | + | ||
5821 | + if (G_UNLIKELY (ffmpegdec->picture->interlaced_frame != | ||
5822 | + ffmpegdec->format.video.interlaced)) { | ||
5823 | + GST_WARNING ("Change in interlacing ! picture:%d, recorded:%d", | ||
5824 | + ffmpegdec->picture->interlaced_frame, | ||
5825 | + ffmpegdec->format.video.interlaced); | ||
5826 | + ffmpegdec->format.video.interlaced = ffmpegdec->picture->interlaced_frame; | ||
5827 | + gst_ffmpegdec_negotiate (ffmpegdec, TRUE); | ||
5828 | + } | ||
5829 | + | ||
5830 | + /* Whether a frame is interlaced or not is unknown at the time of | ||
5831 | + buffer allocation, so caps on the buffer in opaque will have | ||
5832 | + the previous frame's interlaced flag set. So if interlacedness | ||
5833 | + has changed since allocation, we update the buffer (if any) | ||
5834 | + caps now with the correct interlaced flag. */ | ||
5835 | + if (ffmpegdec->picture->opaque != NULL) { | ||
5836 | + GstBuffer *buffer = ffmpegdec->picture->opaque; | ||
5837 | + if (GST_BUFFER_CAPS (buffer) && GST_PAD_CAPS (ffmpegdec->srcpad)) { | ||
5838 | + GstStructure *s = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0); | ||
5839 | + gboolean interlaced; | ||
5840 | + gboolean found = gst_structure_get_boolean (s, "interlaced", &interlaced); | ||
5841 | + if (!found || (!!interlaced != !!ffmpegdec->format.video.interlaced)) { | ||
5842 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
5843 | + "Buffer interlacing does not match pad, updating"); | ||
5844 | + buffer = gst_buffer_make_metadata_writable (buffer); | ||
5845 | + gst_buffer_set_caps (buffer, GST_PAD_CAPS (ffmpegdec->srcpad)); | ||
5846 | + ffmpegdec->picture->opaque = buffer; | ||
5847 | + } | ||
5848 | + } | ||
5849 | + } | ||
5850 | + | ||
5851 | + /* check that the timestamps go upwards */ | ||
5852 | + if (ffmpegdec->last_out != -1 && ffmpegdec->last_out > out_pts) { | ||
5853 | + /* timestamps go backwards, this means frames were reordered and we must | ||
5854 | + * be dealing with DTS as the buffer timestamps */ | ||
5855 | + if (!ffmpegdec->reordered_out) { | ||
5856 | + GST_DEBUG_OBJECT (ffmpegdec, "detected reordered out timestamps"); | ||
5857 | + ffmpegdec->reordered_out = TRUE; | ||
5858 | + } | ||
5859 | + if (ffmpegdec->reordered_in) { | ||
5860 | + /* we reset the input reordering here because we want to recover from an | ||
5861 | + * occasionally wrong reordered input timestamp */ | ||
5862 | + GST_DEBUG_OBJECT (ffmpegdec, "assuming DTS input timestamps"); | ||
5863 | + ffmpegdec->reordered_in = FALSE; | ||
5864 | + } | ||
5865 | + } | ||
5866 | + | ||
5867 | + if (out_pts == 0 && out_pts == ffmpegdec->last_out) { | ||
5868 | + GST_LOG_OBJECT (ffmpegdec, "ffmpeg returns 0 timestamps, ignoring"); | ||
5869 | + /* some codecs only output 0 timestamps, when that happens, make us select an | ||
5870 | + * output timestamp based on the input timestamp. We do this by making the | ||
5871 | + * ffmpeg timestamp and the interpollated next timestamp invalid. */ | ||
5872 | + out_pts = -1; | ||
5873 | + ffmpegdec->next_out = -1; | ||
5874 | + } else | ||
5875 | + ffmpegdec->last_out = out_pts; | ||
5876 | + | ||
5877 | + /* we assume DTS as input timestamps unless we see reordered input | ||
5878 | + * timestamps */ | ||
5879 | + if (!ffmpegdec->reordered_in && ffmpegdec->reordered_out) { | ||
5880 | + /* PTS and DTS are the same for keyframes */ | ||
5881 | + if (ffmpegdec->next_out != -1) { | ||
5882 | + /* interpolate all timestamps except for keyframes, FIXME, this is | ||
5883 | + * wrong when QoS is active. */ | ||
5884 | + GST_DEBUG_OBJECT (ffmpegdec, "interpolate timestamps"); | ||
5885 | + out_pts = -1; | ||
5886 | + out_offset = -1; | ||
5887 | + } | ||
5888 | + } | ||
5889 | + | ||
5890 | + /* get a handle to the output buffer */ | ||
5891 | + *ret = get_output_buffer (ffmpegdec, outbuf); | ||
5892 | + if (G_UNLIKELY (*ret != GST_FLOW_OK)) | ||
5893 | + goto no_output; | ||
5894 | + | ||
5895 | + /* | ||
5896 | + * Timestamps: | ||
5897 | + * | ||
5898 | + * 1) Copy picture timestamp if valid | ||
5899 | + * 2) else interpolate from previous output timestamp | ||
5900 | + * 3) else copy input timestamp | ||
5901 | + */ | ||
5902 | + out_timestamp = -1; | ||
5903 | + if (out_pts != -1) { | ||
5904 | + /* Get (interpolated) timestamp from FFMPEG */ | ||
5905 | + out_timestamp = (GstClockTime) out_pts; | ||
5906 | + GST_LOG_OBJECT (ffmpegdec, "using timestamp %" GST_TIME_FORMAT | ||
5907 | + " returned by ffmpeg", GST_TIME_ARGS (out_timestamp)); | ||
5908 | + } | ||
5909 | + if (!GST_CLOCK_TIME_IS_VALID (out_timestamp) && ffmpegdec->next_out != -1) { | ||
5910 | + out_timestamp = ffmpegdec->next_out; | ||
5911 | + GST_LOG_OBJECT (ffmpegdec, "using next timestamp %" GST_TIME_FORMAT, | ||
5912 | + GST_TIME_ARGS (out_timestamp)); | ||
5913 | + } | ||
5914 | + if (!GST_CLOCK_TIME_IS_VALID (out_timestamp)) { | ||
5915 | + out_timestamp = dec_info->timestamp; | ||
5916 | + GST_LOG_OBJECT (ffmpegdec, "using in timestamp %" GST_TIME_FORMAT, | ||
5917 | + GST_TIME_ARGS (out_timestamp)); | ||
5918 | + } | ||
5919 | + GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp; | ||
5920 | + | ||
5921 | + /* | ||
5922 | + * Offset: | ||
5923 | + * 0) Use stored input offset (from opaque) | ||
5924 | + * 1) Use value converted from timestamp if valid | ||
5925 | + * 2) Use input offset if valid | ||
5926 | + */ | ||
5927 | + if (out_offset != GST_BUFFER_OFFSET_NONE) { | ||
5928 | + /* out_offset already contains the offset from ts_info */ | ||
5929 | + GST_LOG_OBJECT (ffmpegdec, "Using offset returned by ffmpeg"); | ||
5930 | + } else if (out_timestamp != GST_CLOCK_TIME_NONE) { | ||
5931 | + GstFormat out_fmt = GST_FORMAT_DEFAULT; | ||
5932 | + GST_LOG_OBJECT (ffmpegdec, "Using offset converted from timestamp"); | ||
5933 | + /* FIXME, we should really remove this as it's not nice at all to do | ||
5934 | + * upstream queries for each frame to get the frame offset. We also can't | ||
5935 | + * really remove this because it is the only way of setting frame offsets | ||
5936 | + * on outgoing buffers. We should have metadata so that the upstream peer | ||
5937 | + * can set a frame number on the encoded data. */ | ||
5938 | + gst_pad_query_peer_convert (ffmpegdec->sinkpad, | ||
5939 | + GST_FORMAT_TIME, out_timestamp, &out_fmt, &out_offset); | ||
5940 | + } else if (dec_info->offset != GST_BUFFER_OFFSET_NONE) { | ||
5941 | + /* FIXME, the input offset is input media specific and might not | ||
5942 | + * be the same for the output media. (byte offset as input, frame number | ||
5943 | + * as output, for example) */ | ||
5944 | + GST_LOG_OBJECT (ffmpegdec, "using in_offset %" G_GINT64_FORMAT, | ||
5945 | + dec_info->offset); | ||
5946 | + out_offset = dec_info->offset; | ||
5947 | + } else { | ||
5948 | + GST_LOG_OBJECT (ffmpegdec, "no valid offset found"); | ||
5949 | + out_offset = GST_BUFFER_OFFSET_NONE; | ||
5950 | + } | ||
5951 | + GST_BUFFER_OFFSET (*outbuf) = out_offset; | ||
5952 | + | ||
5953 | + /* | ||
5954 | + * Duration: | ||
5955 | + * | ||
5956 | + * 1) Use reordered input duration if valid | ||
5957 | + * 2) Else use input duration | ||
5958 | + * 3) else use input framerate | ||
5959 | + * 4) else use ffmpeg framerate | ||
5960 | + */ | ||
5961 | + if (GST_CLOCK_TIME_IS_VALID (out_duration)) { | ||
5962 | + /* We have a valid (reordered) duration */ | ||
5963 | + GST_LOG_OBJECT (ffmpegdec, "Using duration returned by ffmpeg"); | ||
5964 | + } else if (GST_CLOCK_TIME_IS_VALID (dec_info->duration)) { | ||
5965 | + GST_LOG_OBJECT (ffmpegdec, "using in_duration"); | ||
5966 | + out_duration = dec_info->duration; | ||
5967 | + } else if (GST_CLOCK_TIME_IS_VALID (ffmpegdec->last_diff)) { | ||
5968 | + GST_LOG_OBJECT (ffmpegdec, "using last-diff"); | ||
5969 | + out_duration = ffmpegdec->last_diff; | ||
5970 | + } else { | ||
5971 | + /* if we have an input framerate, use that */ | ||
5972 | + if (ffmpegdec->format.video.fps_n != -1 && | ||
5973 | + (ffmpegdec->format.video.fps_n != 1000 && | ||
5974 | + ffmpegdec->format.video.fps_d != 1)) { | ||
5975 | + GST_LOG_OBJECT (ffmpegdec, "using input framerate for duration"); | ||
5976 | + out_duration = gst_util_uint64_scale_int (GST_SECOND, | ||
5977 | + ffmpegdec->format.video.fps_d, ffmpegdec->format.video.fps_n); | ||
5978 | + } else { | ||
5979 | + /* don't try to use the decoder's framerate when it seems a bit abnormal, | ||
5980 | + * which we assume when den >= 1000... */ | ||
5981 | + if (ffmpegdec->context->time_base.num != 0 && | ||
5982 | + (ffmpegdec->context->time_base.den > 0 && | ||
5983 | + ffmpegdec->context->time_base.den < 1000)) { | ||
5984 | + GST_LOG_OBJECT (ffmpegdec, "using decoder's framerate for duration"); | ||
5985 | + out_duration = gst_util_uint64_scale_int (GST_SECOND, | ||
5986 | + ffmpegdec->context->time_base.num * | ||
5987 | + ffmpegdec->context->ticks_per_frame, | ||
5988 | + ffmpegdec->context->time_base.den); | ||
5989 | + } else { | ||
5990 | + GST_LOG_OBJECT (ffmpegdec, "no valid duration found"); | ||
5991 | + } | ||
5992 | + } | ||
5993 | + } | ||
5994 | + | ||
5995 | + /* Take repeat_pict into account */ | ||
5996 | + if (GST_CLOCK_TIME_IS_VALID (out_duration)) { | ||
5997 | + out_duration += out_duration * ffmpegdec->picture->repeat_pict / 2; | ||
5998 | + } | ||
5999 | + GST_BUFFER_DURATION (*outbuf) = out_duration; | ||
6000 | + | ||
6001 | + if (out_timestamp != -1 && out_duration != -1 && out_duration != 0) | ||
6002 | + ffmpegdec->next_out = out_timestamp + out_duration; | ||
6003 | + else | ||
6004 | + ffmpegdec->next_out = -1; | ||
6005 | + | ||
6006 | + /* now see if we need to clip the buffer against the segment boundaries. */ | ||
6007 | + if (G_UNLIKELY (!clip_video_buffer (ffmpegdec, *outbuf, out_timestamp, | ||
6008 | + out_duration))) | ||
6009 | + goto clipped; | ||
6010 | + | ||
6011 | + if (ffmpegdec->picture->top_field_first) | ||
6012 | + GST_BUFFER_FLAG_SET (*outbuf, GST_VIDEO_BUFFER_TFF); | ||
6013 | + | ||
6014 | + | ||
6015 | +beach: | ||
6016 | + GST_DEBUG_OBJECT (ffmpegdec, "return flow %d, out %p, len %d", | ||
6017 | + *ret, *outbuf, len); | ||
6018 | + return len; | ||
6019 | + | ||
6020 | + /* special cases */ | ||
6021 | +no_output: | ||
6022 | + { | ||
6023 | + GST_DEBUG_OBJECT (ffmpegdec, "no output buffer"); | ||
6024 | + len = -1; | ||
6025 | + goto beach; | ||
6026 | + } | ||
6027 | +clipped: | ||
6028 | + { | ||
6029 | + GST_DEBUG_OBJECT (ffmpegdec, "buffer clipped"); | ||
6030 | + gst_buffer_unref (*outbuf); | ||
6031 | + *outbuf = NULL; | ||
6032 | + goto beach; | ||
6033 | + } | ||
6034 | +} | ||
6035 | + | ||
6036 | +/* returns TRUE if buffer is within segment, else FALSE. | ||
6037 | + * if Buffer is on segment border, it's timestamp and duration will be clipped */ | ||
6038 | +static gboolean | ||
6039 | +clip_audio_buffer (GstFFMpegDec * dec, GstBuffer * buf, GstClockTime in_ts, | ||
6040 | + GstClockTime in_dur) | ||
6041 | +{ | ||
6042 | + GstClockTime stop; | ||
6043 | + gint64 diff, ctime, cstop; | ||
6044 | + gboolean res = TRUE; | ||
6045 | + | ||
6046 | + GST_LOG_OBJECT (dec, | ||
6047 | + "timestamp:%" GST_TIME_FORMAT ", duration:%" GST_TIME_FORMAT | ||
6048 | + ", size %u", GST_TIME_ARGS (in_ts), GST_TIME_ARGS (in_dur), | ||
6049 | + GST_BUFFER_SIZE (buf)); | ||
6050 | + | ||
6051 | + /* can't clip without TIME segment */ | ||
6052 | + if (G_UNLIKELY (dec->segment.format != GST_FORMAT_TIME)) | ||
6053 | + goto beach; | ||
6054 | + | ||
6055 | + /* we need a start time */ | ||
6056 | + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (in_ts))) | ||
6057 | + goto beach; | ||
6058 | + | ||
6059 | + /* trust duration */ | ||
6060 | + stop = in_ts + in_dur; | ||
6061 | + | ||
6062 | + res = gst_segment_clip (&dec->segment, GST_FORMAT_TIME, in_ts, stop, &ctime, | ||
6063 | + &cstop); | ||
6064 | + if (G_UNLIKELY (!res)) | ||
6065 | + goto out_of_segment; | ||
6066 | + | ||
6067 | + /* see if some clipping happened */ | ||
6068 | + if (G_UNLIKELY ((diff = ctime - in_ts) > 0)) { | ||
6069 | + /* bring clipped time to bytes */ | ||
6070 | + diff = | ||
6071 | + gst_util_uint64_scale_int (diff, dec->format.audio.samplerate, | ||
6072 | + GST_SECOND) * (dec->format.audio.depth * dec->format.audio.channels); | ||
6073 | + | ||
6074 | + GST_DEBUG_OBJECT (dec, "clipping start to %" GST_TIME_FORMAT " %" | ||
6075 | + G_GINT64_FORMAT " bytes", GST_TIME_ARGS (ctime), diff); | ||
6076 | + | ||
6077 | + GST_BUFFER_SIZE (buf) -= diff; | ||
6078 | + GST_BUFFER_DATA (buf) += diff; | ||
6079 | + } | ||
6080 | + if (G_UNLIKELY ((diff = stop - cstop) > 0)) { | ||
6081 | + /* bring clipped time to bytes */ | ||
6082 | + diff = | ||
6083 | + gst_util_uint64_scale_int (diff, dec->format.audio.samplerate, | ||
6084 | + GST_SECOND) * (dec->format.audio.depth * dec->format.audio.channels); | ||
6085 | + | ||
6086 | + GST_DEBUG_OBJECT (dec, "clipping stop to %" GST_TIME_FORMAT " %" | ||
6087 | + G_GINT64_FORMAT " bytes", GST_TIME_ARGS (cstop), diff); | ||
6088 | + | ||
6089 | + GST_BUFFER_SIZE (buf) -= diff; | ||
6090 | + } | ||
6091 | + GST_BUFFER_TIMESTAMP (buf) = ctime; | ||
6092 | + GST_BUFFER_DURATION (buf) = cstop - ctime; | ||
6093 | + | ||
6094 | +beach: | ||
6095 | + GST_LOG_OBJECT (dec, "%sdropping", (res ? "not " : "")); | ||
6096 | + return res; | ||
6097 | + | ||
6098 | + /* ERRORS */ | ||
6099 | +out_of_segment: | ||
6100 | + { | ||
6101 | + GST_LOG_OBJECT (dec, "out of segment"); | ||
6102 | + goto beach; | ||
6103 | + } | ||
6104 | +} | ||
6105 | + | ||
6106 | +static gint | ||
6107 | +gst_ffmpegdec_audio_frame (GstFFMpegDec * ffmpegdec, | ||
6108 | + AVCodec * in_plugin, guint8 * data, guint size, | ||
6109 | + const GstTSInfo * dec_info, GstBuffer ** outbuf, GstFlowReturn * ret) | ||
6110 | +{ | ||
6111 | + gint len = -1; | ||
6112 | + gint have_data = AVCODEC_MAX_AUDIO_FRAME_SIZE; | ||
6113 | + GstClockTime out_timestamp, out_duration; | ||
6114 | + gint64 out_offset; | ||
6115 | + AVPacket packet; | ||
6116 | + | ||
6117 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
6118 | + "size:%d, offset:%" G_GINT64_FORMAT ", ts:%" GST_TIME_FORMAT ", dur:%" | ||
6119 | + GST_TIME_FORMAT ", ffmpegdec->next_out:%" GST_TIME_FORMAT, size, | ||
6120 | + dec_info->offset, GST_TIME_ARGS (dec_info->timestamp), | ||
6121 | + GST_TIME_ARGS (dec_info->duration), GST_TIME_ARGS (ffmpegdec->next_out)); | ||
6122 | + | ||
6123 | + *outbuf = | ||
6124 | + new_aligned_buffer (AVCODEC_MAX_AUDIO_FRAME_SIZE, | ||
6125 | + GST_PAD_CAPS (ffmpegdec->srcpad)); | ||
6126 | + | ||
6127 | + gst_avpacket_init (&packet, data, size); | ||
6128 | + len = avcodec_decode_audio3 (ffmpegdec->context, | ||
6129 | + (int16_t *) GST_BUFFER_DATA (*outbuf), &have_data, &packet); | ||
6130 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
6131 | + "Decode audio: len=%d, have_data=%d", len, have_data); | ||
6132 | + | ||
6133 | + if (len >= 0 && have_data > 0) { | ||
6134 | + GST_DEBUG_OBJECT (ffmpegdec, "Creating output buffer"); | ||
6135 | + if (!gst_ffmpegdec_negotiate (ffmpegdec, FALSE)) { | ||
6136 | + gst_buffer_unref (*outbuf); | ||
6137 | + *outbuf = NULL; | ||
6138 | + len = -1; | ||
6139 | + goto beach; | ||
6140 | + } | ||
6141 | + | ||
6142 | + /* Buffer size */ | ||
6143 | + GST_BUFFER_SIZE (*outbuf) = have_data; | ||
6144 | + | ||
6145 | + /* | ||
6146 | + * Timestamps: | ||
6147 | + * | ||
6148 | + * 1) Copy input timestamp if valid | ||
6149 | + * 2) else interpolate from previous input timestamp | ||
6150 | + */ | ||
6151 | + /* always take timestamps from the input buffer if any */ | ||
6152 | + if (GST_CLOCK_TIME_IS_VALID (dec_info->timestamp)) { | ||
6153 | + out_timestamp = dec_info->timestamp; | ||
6154 | + } else { | ||
6155 | + out_timestamp = ffmpegdec->next_out; | ||
6156 | + } | ||
6157 | + | ||
6158 | + /* | ||
6159 | + * Duration: | ||
6160 | + * | ||
6161 | + * 1) calculate based on number of samples | ||
6162 | + */ | ||
6163 | + out_duration = gst_util_uint64_scale (have_data, GST_SECOND, | ||
6164 | + ffmpegdec->format.audio.depth * ffmpegdec->format.audio.channels * | ||
6165 | + ffmpegdec->format.audio.samplerate); | ||
6166 | + | ||
6167 | + /* offset: | ||
6168 | + * | ||
6169 | + * Just copy | ||
6170 | + */ | ||
6171 | + out_offset = dec_info->offset; | ||
6172 | + | ||
6173 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
6174 | + "Buffer created. Size:%d , timestamp:%" GST_TIME_FORMAT " , duration:%" | ||
6175 | + GST_TIME_FORMAT, have_data, | ||
6176 | + GST_TIME_ARGS (out_timestamp), GST_TIME_ARGS (out_duration)); | ||
6177 | + | ||
6178 | + GST_BUFFER_TIMESTAMP (*outbuf) = out_timestamp; | ||
6179 | + GST_BUFFER_DURATION (*outbuf) = out_duration; | ||
6180 | + GST_BUFFER_OFFSET (*outbuf) = out_offset; | ||
6181 | + gst_buffer_set_caps (*outbuf, GST_PAD_CAPS (ffmpegdec->srcpad)); | ||
6182 | + | ||
6183 | + /* the next timestamp we'll use when interpolating */ | ||
6184 | + if (GST_CLOCK_TIME_IS_VALID (out_timestamp)) | ||
6185 | + ffmpegdec->next_out = out_timestamp + out_duration; | ||
6186 | + | ||
6187 | + /* now see if we need to clip the buffer against the segment boundaries. */ | ||
6188 | + if (G_UNLIKELY (!clip_audio_buffer (ffmpegdec, *outbuf, out_timestamp, | ||
6189 | + out_duration))) | ||
6190 | + goto clipped; | ||
6191 | + | ||
6192 | + } else { | ||
6193 | + gst_buffer_unref (*outbuf); | ||
6194 | + *outbuf = NULL; | ||
6195 | + } | ||
6196 | + | ||
6197 | + /* If we don't error out after the first failed read with the AAC decoder, | ||
6198 | + * we must *not* carry on pushing data, else we'll cause segfaults... */ | ||
6199 | + if (len == -1 && (in_plugin->id == CODEC_ID_AAC | ||
6200 | + || in_plugin->id == CODEC_ID_AAC_LATM)) { | ||
6201 | + GST_ELEMENT_ERROR (ffmpegdec, STREAM, DECODE, (NULL), | ||
6202 | + ("Decoding of AAC stream by FFMPEG failed.")); | ||
6203 | + *ret = GST_FLOW_ERROR; | ||
6204 | + } | ||
6205 | + | ||
6206 | +beach: | ||
6207 | + GST_DEBUG_OBJECT (ffmpegdec, "return flow %d, out %p, len %d", | ||
6208 | + *ret, *outbuf, len); | ||
6209 | + return len; | ||
6210 | + | ||
6211 | + /* ERRORS */ | ||
6212 | +clipped: | ||
6213 | + { | ||
6214 | + GST_DEBUG_OBJECT (ffmpegdec, "buffer clipped"); | ||
6215 | + gst_buffer_unref (*outbuf); | ||
6216 | + *outbuf = NULL; | ||
6217 | + goto beach; | ||
6218 | + } | ||
6219 | +} | ||
6220 | + | ||
6221 | +/* gst_ffmpegdec_frame: | ||
6222 | + * ffmpegdec: | ||
6223 | + * data: pointer to the data to decode | ||
6224 | + * size: size of data in bytes | ||
6225 | + * got_data: 0 if no data was decoded, != 0 otherwise. | ||
6226 | + * in_time: timestamp of data | ||
6227 | + * in_duration: duration of data | ||
6228 | + * ret: GstFlowReturn to return in the chain function | ||
6229 | + * | ||
6230 | + * Decode the given frame and pushes it downstream. | ||
6231 | + * | ||
6232 | + * Returns: Number of bytes used in decoding, -1 on error/failure. | ||
6233 | + */ | ||
6234 | + | ||
6235 | +static gint | ||
6236 | +gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec, | ||
6237 | + guint8 * data, guint size, gint * got_data, const GstTSInfo * dec_info, | ||
6238 | + GstFlowReturn * ret) | ||
6239 | +{ | ||
6240 | + GstFFMpegDecClass *oclass; | ||
6241 | + GstBuffer *outbuf = NULL; | ||
6242 | + gint have_data = 0, len = 0; | ||
6243 | + | ||
6244 | + if (G_UNLIKELY (ffmpegdec->context->codec == NULL)) | ||
6245 | + goto no_codec; | ||
6246 | + | ||
6247 | + GST_LOG_OBJECT (ffmpegdec, "data:%p, size:%d, id:%d", data, size, | ||
6248 | + dec_info->idx); | ||
6249 | + | ||
6250 | + *ret = GST_FLOW_OK; | ||
6251 | + ffmpegdec->context->frame_number++; | ||
6252 | + | ||
6253 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
6254 | + | ||
6255 | + switch (oclass->in_plugin->type) { | ||
6256 | + case AVMEDIA_TYPE_VIDEO: | ||
6257 | + len = | ||
6258 | + gst_ffmpegdec_video_frame (ffmpegdec, data, size, dec_info, &outbuf, | ||
6259 | + ret); | ||
6260 | + break; | ||
6261 | + case AVMEDIA_TYPE_AUDIO: | ||
6262 | + len = | ||
6263 | + gst_ffmpegdec_audio_frame (ffmpegdec, oclass->in_plugin, data, size, | ||
6264 | + dec_info, &outbuf, ret); | ||
6265 | + | ||
6266 | + /* if we did not get an output buffer and we have a pending discont, don't | ||
6267 | + * clear the input timestamps, we will put them on the next buffer because | ||
6268 | + * else we might create the first buffer with a very big timestamp gap. */ | ||
6269 | + if (outbuf == NULL && ffmpegdec->discont) { | ||
6270 | + GST_DEBUG_OBJECT (ffmpegdec, "no buffer but keeping timestamp"); | ||
6271 | + ffmpegdec->clear_ts = FALSE; | ||
6272 | + } | ||
6273 | + break; | ||
6274 | + default: | ||
6275 | + GST_ERROR_OBJECT (ffmpegdec, "Asked to decode non-audio/video frame !"); | ||
6276 | + g_assert_not_reached (); | ||
6277 | + break; | ||
6278 | + } | ||
6279 | + | ||
6280 | + if (outbuf) | ||
6281 | + have_data = 1; | ||
6282 | + | ||
6283 | + if (len < 0 || have_data < 0) { | ||
6284 | + GST_WARNING_OBJECT (ffmpegdec, | ||
6285 | + "ffdec_%s: decoding error (len: %d, have_data: %d)", | ||
6286 | + oclass->in_plugin->name, len, have_data); | ||
6287 | + *got_data = 0; | ||
6288 | + goto beach; | ||
6289 | + } else if (len == 0 && have_data == 0) { | ||
6290 | + *got_data = 0; | ||
6291 | + goto beach; | ||
6292 | + } else { | ||
6293 | + /* this is where I lost my last clue on ffmpeg... */ | ||
6294 | + *got_data = 1; | ||
6295 | + } | ||
6296 | + | ||
6297 | + if (outbuf) { | ||
6298 | + GST_LOG_OBJECT (ffmpegdec, | ||
6299 | + "Decoded data, now pushing buffer %p with offset %" G_GINT64_FORMAT | ||
6300 | + ", timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, | ||
6301 | + outbuf, GST_BUFFER_OFFSET (outbuf), | ||
6302 | + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), | ||
6303 | + GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf))); | ||
6304 | + | ||
6305 | + /* mark pending discont */ | ||
6306 | + if (ffmpegdec->discont) { | ||
6307 | + GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); | ||
6308 | + ffmpegdec->discont = FALSE; | ||
6309 | + } | ||
6310 | + | ||
6311 | + if (ffmpegdec->segment.rate > 0.0) { | ||
6312 | + /* and off we go */ | ||
6313 | + *ret = gst_pad_push (ffmpegdec->srcpad, outbuf); | ||
6314 | + } else { | ||
6315 | + /* reverse playback, queue frame till later when we get a discont. */ | ||
6316 | + GST_DEBUG_OBJECT (ffmpegdec, "queued frame"); | ||
6317 | + ffmpegdec->queued = g_list_prepend (ffmpegdec->queued, outbuf); | ||
6318 | + *ret = GST_FLOW_OK; | ||
6319 | + } | ||
6320 | + } else { | ||
6321 | + GST_DEBUG_OBJECT (ffmpegdec, "We didn't get a decoded buffer"); | ||
6322 | + } | ||
6323 | + | ||
6324 | +beach: | ||
6325 | + return len; | ||
6326 | + | ||
6327 | + /* ERRORS */ | ||
6328 | +no_codec: | ||
6329 | + { | ||
6330 | + GST_ERROR_OBJECT (ffmpegdec, "no codec context"); | ||
6331 | + return -1; | ||
6332 | + } | ||
6333 | +} | ||
6334 | + | ||
6335 | +static void | ||
6336 | +gst_ffmpegdec_drain (GstFFMpegDec * ffmpegdec) | ||
6337 | +{ | ||
6338 | + GstFFMpegDecClass *oclass; | ||
6339 | + | ||
6340 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
6341 | + | ||
6342 | + if (oclass->in_plugin->capabilities & CODEC_CAP_DELAY) { | ||
6343 | + gint have_data, len, try = 0; | ||
6344 | + | ||
6345 | + GST_LOG_OBJECT (ffmpegdec, | ||
6346 | + "codec has delay capabilities, calling until ffmpeg has drained everything"); | ||
6347 | + | ||
6348 | + do { | ||
6349 | + GstFlowReturn ret; | ||
6350 | + | ||
6351 | + len = | ||
6352 | + gst_ffmpegdec_frame (ffmpegdec, NULL, 0, &have_data, &ts_info_none, | ||
6353 | + &ret); | ||
6354 | + if (len < 0 || have_data == 0) | ||
6355 | + break; | ||
6356 | + } while (try++ < 10); | ||
6357 | + } | ||
6358 | + if (ffmpegdec->segment.rate < 0.0) { | ||
6359 | + /* if we have some queued frames for reverse playback, flush them now */ | ||
6360 | + flush_queued (ffmpegdec); | ||
6361 | + } | ||
6362 | +} | ||
6363 | + | ||
6364 | +static void | ||
6365 | +gst_ffmpegdec_flush_pcache (GstFFMpegDec * ffmpegdec) | ||
6366 | +{ | ||
6367 | + if (ffmpegdec->pctx) { | ||
6368 | + gint size, bsize; | ||
6369 | + guint8 *data; | ||
6370 | + guint8 bdata[FF_INPUT_BUFFER_PADDING_SIZE]; | ||
6371 | + | ||
6372 | + bsize = FF_INPUT_BUFFER_PADDING_SIZE; | ||
6373 | + memset (bdata, 0, bsize); | ||
6374 | + | ||
6375 | + /* parse some dummy data to work around some ffmpeg weirdness where it keeps | ||
6376 | + * the previous pts around */ | ||
6377 | + av_parser_parse2 (ffmpegdec->pctx, ffmpegdec->context, | ||
6378 | + &data, &size, bdata, bsize, -1, -1, -1); | ||
6379 | + ffmpegdec->pctx->pts = -1; | ||
6380 | + ffmpegdec->pctx->dts = -1; | ||
6381 | + } | ||
6382 | + | ||
6383 | + if (ffmpegdec->pcache) { | ||
6384 | + gst_buffer_unref (ffmpegdec->pcache); | ||
6385 | + ffmpegdec->pcache = NULL; | ||
6386 | + } | ||
6387 | +} | ||
6388 | + | ||
6389 | +static gboolean | ||
6390 | +gst_ffmpegdec_sink_event (GstPad * pad, GstEvent * event) | ||
6391 | +{ | ||
6392 | + GstFFMpegDec *ffmpegdec; | ||
6393 | + gboolean ret = FALSE; | ||
6394 | + | ||
6395 | + ffmpegdec = (GstFFMpegDec *) gst_pad_get_parent (pad); | ||
6396 | + | ||
6397 | + GST_DEBUG_OBJECT (ffmpegdec, "Handling %s event", | ||
6398 | + GST_EVENT_TYPE_NAME (event)); | ||
6399 | + | ||
6400 | + switch (GST_EVENT_TYPE (event)) { | ||
6401 | + case GST_EVENT_EOS: | ||
6402 | + { | ||
6403 | + gst_ffmpegdec_drain (ffmpegdec); | ||
6404 | + break; | ||
6405 | + } | ||
6406 | + case GST_EVENT_FLUSH_STOP: | ||
6407 | + { | ||
6408 | + if (ffmpegdec->opened) { | ||
6409 | + avcodec_flush_buffers (ffmpegdec->context); | ||
6410 | + } | ||
6411 | + gst_ffmpegdec_reset_ts (ffmpegdec); | ||
6412 | + gst_ffmpegdec_reset_qos (ffmpegdec); | ||
6413 | + gst_ffmpegdec_flush_pcache (ffmpegdec); | ||
6414 | + gst_segment_init (&ffmpegdec->segment, GST_FORMAT_TIME); | ||
6415 | + clear_queued (ffmpegdec); | ||
6416 | + break; | ||
6417 | + } | ||
6418 | + case GST_EVENT_NEWSEGMENT: | ||
6419 | + { | ||
6420 | + gboolean update; | ||
6421 | + GstFormat fmt; | ||
6422 | + gint64 start, stop, time; | ||
6423 | + gdouble rate, arate; | ||
6424 | + | ||
6425 | + gst_event_parse_new_segment_full (event, &update, &rate, &arate, &fmt, | ||
6426 | + &start, &stop, &time); | ||
6427 | + | ||
6428 | + switch (fmt) { | ||
6429 | + case GST_FORMAT_TIME: | ||
6430 | + /* fine, our native segment format */ | ||
6431 | + break; | ||
6432 | + case GST_FORMAT_BYTES: | ||
6433 | + { | ||
6434 | + gint bit_rate; | ||
6435 | + | ||
6436 | + bit_rate = ffmpegdec->context->bit_rate; | ||
6437 | + | ||
6438 | + /* convert to time or fail */ | ||
6439 | + if (!bit_rate) | ||
6440 | + goto no_bitrate; | ||
6441 | + | ||
6442 | + GST_DEBUG_OBJECT (ffmpegdec, "bitrate: %d", bit_rate); | ||
6443 | + | ||
6444 | + /* convert values to TIME */ | ||
6445 | + if (start != -1) | ||
6446 | + start = gst_util_uint64_scale_int (start, GST_SECOND, bit_rate); | ||
6447 | + if (stop != -1) | ||
6448 | + stop = gst_util_uint64_scale_int (stop, GST_SECOND, bit_rate); | ||
6449 | + if (time != -1) | ||
6450 | + time = gst_util_uint64_scale_int (time, GST_SECOND, bit_rate); | ||
6451 | + | ||
6452 | + /* unref old event */ | ||
6453 | + gst_event_unref (event); | ||
6454 | + | ||
6455 | + /* create new converted time segment */ | ||
6456 | + fmt = GST_FORMAT_TIME; | ||
6457 | + /* FIXME, bitrate is not good enough too find a good stop, let's | ||
6458 | + * hope start and time were 0... meh. */ | ||
6459 | + stop = -1; | ||
6460 | + event = gst_event_new_new_segment (update, rate, fmt, | ||
6461 | + start, stop, time); | ||
6462 | + break; | ||
6463 | + } | ||
6464 | + default: | ||
6465 | + /* invalid format */ | ||
6466 | + goto invalid_format; | ||
6467 | + } | ||
6468 | + | ||
6469 | + /* drain pending frames before trying to use the new segment, queued | ||
6470 | + * buffers belonged to the previous segment. */ | ||
6471 | + if (ffmpegdec->context->codec) | ||
6472 | + gst_ffmpegdec_drain (ffmpegdec); | ||
6473 | + | ||
6474 | + GST_DEBUG_OBJECT (ffmpegdec, | ||
6475 | + "NEWSEGMENT in time start %" GST_TIME_FORMAT " -- stop %" | ||
6476 | + GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop)); | ||
6477 | + | ||
6478 | + /* and store the values */ | ||
6479 | + gst_segment_set_newsegment_full (&ffmpegdec->segment, update, | ||
6480 | + rate, arate, fmt, start, stop, time); | ||
6481 | + break; | ||
6482 | + } | ||
6483 | + default: | ||
6484 | + break; | ||
6485 | + } | ||
6486 | + | ||
6487 | + /* and push segment downstream */ | ||
6488 | + ret = gst_pad_push_event (ffmpegdec->srcpad, event); | ||
6489 | + | ||
6490 | +done: | ||
6491 | + gst_object_unref (ffmpegdec); | ||
6492 | + | ||
6493 | + return ret; | ||
6494 | + | ||
6495 | + /* ERRORS */ | ||
6496 | +no_bitrate: | ||
6497 | + { | ||
6498 | + GST_WARNING_OBJECT (ffmpegdec, "no bitrate to convert BYTES to TIME"); | ||
6499 | + gst_event_unref (event); | ||
6500 | + goto done; | ||
6501 | + } | ||
6502 | +invalid_format: | ||
6503 | + { | ||
6504 | + GST_WARNING_OBJECT (ffmpegdec, "unknown format received in NEWSEGMENT"); | ||
6505 | + gst_event_unref (event); | ||
6506 | + goto done; | ||
6507 | + } | ||
6508 | +} | ||
6509 | + | ||
6510 | +static GstFlowReturn | ||
6511 | +gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf) | ||
6512 | +{ | ||
6513 | + GstFFMpegDec *ffmpegdec; | ||
6514 | + GstFFMpegDecClass *oclass; | ||
6515 | + guint8 *data, *bdata; | ||
6516 | + gint size, bsize, len, have_data; | ||
6517 | + GstFlowReturn ret = GST_FLOW_OK; | ||
6518 | + GstClockTime in_timestamp; | ||
6519 | + GstClockTime in_duration; | ||
6520 | + gboolean discont; | ||
6521 | + gint64 in_offset; | ||
6522 | + const GstTSInfo *in_info; | ||
6523 | + const GstTSInfo *dec_info; | ||
6524 | + | ||
6525 | + ffmpegdec = (GstFFMpegDec *) (GST_PAD_PARENT (pad)); | ||
6526 | + | ||
6527 | + if (G_UNLIKELY (!ffmpegdec->opened)) | ||
6528 | + goto not_negotiated; | ||
6529 | + | ||
6530 | + discont = GST_BUFFER_IS_DISCONT (inbuf); | ||
6531 | + | ||
6532 | + /* The discont flags marks a buffer that is not continuous with the previous | ||
6533 | + * buffer. This means we need to clear whatever data we currently have. We | ||
6534 | + * currently also wait for a new keyframe, which might be suboptimal in the | ||
6535 | + * case of a network error, better show the errors than to drop all data.. */ | ||
6536 | + if (G_UNLIKELY (discont)) { | ||
6537 | + GST_DEBUG_OBJECT (ffmpegdec, "received DISCONT"); | ||
6538 | + /* drain what we have queued */ | ||
6539 | + gst_ffmpegdec_drain (ffmpegdec); | ||
6540 | + gst_ffmpegdec_flush_pcache (ffmpegdec); | ||
6541 | + avcodec_flush_buffers (ffmpegdec->context); | ||
6542 | + ffmpegdec->discont = TRUE; | ||
6543 | + gst_ffmpegdec_reset_ts (ffmpegdec); | ||
6544 | + } | ||
6545 | + /* by default we clear the input timestamp after decoding each frame so that | ||
6546 | + * interpollation can work. */ | ||
6547 | + ffmpegdec->clear_ts = TRUE; | ||
6548 | + | ||
6549 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
6550 | + | ||
6551 | + /* parse cache joining. If there is cached data */ | ||
6552 | + if (ffmpegdec->pcache) { | ||
6553 | + /* join with previous data */ | ||
6554 | + GST_LOG_OBJECT (ffmpegdec, "join parse cache"); | ||
6555 | + inbuf = gst_buffer_join (ffmpegdec->pcache, inbuf); | ||
6556 | + /* no more cached data, we assume we can consume the complete cache */ | ||
6557 | + ffmpegdec->pcache = NULL; | ||
6558 | + } | ||
6559 | + | ||
6560 | + in_timestamp = GST_BUFFER_TIMESTAMP (inbuf); | ||
6561 | + in_duration = GST_BUFFER_DURATION (inbuf); | ||
6562 | + in_offset = GST_BUFFER_OFFSET (inbuf); | ||
6563 | + | ||
6564 | + /* get handle to timestamp info, we can pass this around to ffmpeg */ | ||
6565 | + in_info = gst_ts_info_store (ffmpegdec, in_timestamp, in_duration, in_offset); | ||
6566 | + | ||
6567 | + if (in_timestamp != -1) { | ||
6568 | + /* check for increasing timestamps if they are jumping backwards, we | ||
6569 | + * probably are dealing with PTS as timestamps */ | ||
6570 | + if (!ffmpegdec->reordered_in && ffmpegdec->last_in != -1) { | ||
6571 | + if (in_timestamp < ffmpegdec->last_in) { | ||
6572 | + GST_LOG_OBJECT (ffmpegdec, "detected reordered input timestamps"); | ||
6573 | + ffmpegdec->reordered_in = TRUE; | ||
6574 | + ffmpegdec->last_diff = GST_CLOCK_TIME_NONE; | ||
6575 | + } else if (in_timestamp > ffmpegdec->last_in) { | ||
6576 | + GstClockTime diff; | ||
6577 | + /* keep track of timestamp diff to estimate duration */ | ||
6578 | + diff = in_timestamp - ffmpegdec->last_in; | ||
6579 | + /* need to scale with amount of frames in the interval */ | ||
6580 | + if (ffmpegdec->last_frames) | ||
6581 | + diff /= ffmpegdec->last_frames; | ||
6582 | + | ||
6583 | + GST_LOG_OBJECT (ffmpegdec, "estimated duration %" GST_TIME_FORMAT " %u", | ||
6584 | + GST_TIME_ARGS (diff), ffmpegdec->last_frames); | ||
6585 | + | ||
6586 | + ffmpegdec->last_diff = diff; | ||
6587 | + } | ||
6588 | + } | ||
6589 | + ffmpegdec->last_in = in_timestamp; | ||
6590 | + ffmpegdec->last_frames = 0; | ||
6591 | + } | ||
6592 | + | ||
6593 | + GST_LOG_OBJECT (ffmpegdec, | ||
6594 | + "Received new data of size %u, offset:%" G_GUINT64_FORMAT ", ts:%" | ||
6595 | + GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT ", info %d", | ||
6596 | + GST_BUFFER_SIZE (inbuf), GST_BUFFER_OFFSET (inbuf), | ||
6597 | + GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration), in_info->idx); | ||
6598 | + | ||
6599 | + /* workarounds, functions write to buffers: | ||
6600 | + * libavcodec/svq1.c:svq1_decode_frame writes to the given buffer. | ||
6601 | + * libavcodec/svq3.c:svq3_decode_slice_header too. | ||
6602 | + * ffmpeg devs know about it and will fix it (they said). */ | ||
6603 | + if (oclass->in_plugin->id == CODEC_ID_SVQ1 || | ||
6604 | + oclass->in_plugin->id == CODEC_ID_SVQ3) { | ||
6605 | + inbuf = gst_buffer_make_writable (inbuf); | ||
6606 | + } | ||
6607 | + | ||
6608 | + bdata = GST_BUFFER_DATA (inbuf); | ||
6609 | + bsize = GST_BUFFER_SIZE (inbuf); | ||
6610 | + | ||
6611 | + if (ffmpegdec->do_padding) { | ||
6612 | + /* add padding */ | ||
6613 | + if (ffmpegdec->padded_size < bsize + FF_INPUT_BUFFER_PADDING_SIZE) { | ||
6614 | + ffmpegdec->padded_size = bsize + FF_INPUT_BUFFER_PADDING_SIZE; | ||
6615 | + ffmpegdec->padded = g_realloc (ffmpegdec->padded, ffmpegdec->padded_size); | ||
6616 | + GST_LOG_OBJECT (ffmpegdec, "resized padding buffer to %d", | ||
6617 | + ffmpegdec->padded_size); | ||
6618 | + } | ||
6619 | + memcpy (ffmpegdec->padded, bdata, bsize); | ||
6620 | + memset (ffmpegdec->padded + bsize, 0, FF_INPUT_BUFFER_PADDING_SIZE); | ||
6621 | + | ||
6622 | + bdata = ffmpegdec->padded; | ||
6623 | + } | ||
6624 | + | ||
6625 | + do { | ||
6626 | + guint8 tmp_padding[FF_INPUT_BUFFER_PADDING_SIZE]; | ||
6627 | + | ||
6628 | + /* parse, if at all possible */ | ||
6629 | + if (ffmpegdec->pctx) { | ||
6630 | + gint res; | ||
6631 | + | ||
6632 | + GST_LOG_OBJECT (ffmpegdec, | ||
6633 | + "Calling av_parser_parse2 with offset %" G_GINT64_FORMAT ", ts:%" | ||
6634 | + GST_TIME_FORMAT " size %d", in_offset, GST_TIME_ARGS (in_timestamp), | ||
6635 | + bsize); | ||
6636 | + | ||
6637 | + /* feed the parser. We pass the timestamp info so that we can recover all | ||
6638 | + * info again later */ | ||
6639 | + res = av_parser_parse2 (ffmpegdec->pctx, ffmpegdec->context, | ||
6640 | + &data, &size, bdata, bsize, in_info->idx, in_info->idx, in_offset); | ||
6641 | + | ||
6642 | + GST_LOG_OBJECT (ffmpegdec, | ||
6643 | + "parser returned res %d and size %d, id %" G_GINT64_FORMAT, res, size, | ||
6644 | + ffmpegdec->pctx->pts); | ||
6645 | + | ||
6646 | + /* store pts for decoding */ | ||
6647 | + if (ffmpegdec->pctx->pts != AV_NOPTS_VALUE && ffmpegdec->pctx->pts != -1) | ||
6648 | + dec_info = gst_ts_info_get (ffmpegdec, ffmpegdec->pctx->pts); | ||
6649 | + else { | ||
6650 | + /* ffmpeg sometimes loses track after a flush, help it by feeding a | ||
6651 | + * valid start time */ | ||
6652 | + ffmpegdec->pctx->pts = in_info->idx; | ||
6653 | + ffmpegdec->pctx->dts = in_info->idx; | ||
6654 | + dec_info = in_info; | ||
6655 | + } | ||
6656 | + | ||
6657 | + GST_LOG_OBJECT (ffmpegdec, "consuming %d bytes. id %d", size, | ||
6658 | + dec_info->idx); | ||
6659 | + | ||
6660 | + if (res) { | ||
6661 | + /* there is output, set pointers for next round. */ | ||
6662 | + bsize -= res; | ||
6663 | + bdata += res; | ||
6664 | + } else { | ||
6665 | + /* Parser did not consume any data, make sure we don't clear the | ||
6666 | + * timestamp for the next round */ | ||
6667 | + ffmpegdec->clear_ts = FALSE; | ||
6668 | + } | ||
6669 | + | ||
6670 | + /* if there is no output, we must break and wait for more data. also the | ||
6671 | + * timestamp in the context is not updated. */ | ||
6672 | + if (size == 0) { | ||
6673 | + if (bsize > 0) | ||
6674 | + continue; | ||
6675 | + else | ||
6676 | + break; | ||
6677 | + } | ||
6678 | + } else { | ||
6679 | + data = bdata; | ||
6680 | + size = bsize; | ||
6681 | + | ||
6682 | + dec_info = in_info; | ||
6683 | + } | ||
6684 | + | ||
6685 | + if (ffmpegdec->do_padding) { | ||
6686 | + /* add temporary padding */ | ||
6687 | + memcpy (tmp_padding, data + size, FF_INPUT_BUFFER_PADDING_SIZE); | ||
6688 | + memset (data + size, 0, FF_INPUT_BUFFER_PADDING_SIZE); | ||
6689 | + } | ||
6690 | + | ||
6691 | + /* decode a frame of audio/video now */ | ||
6692 | + len = | ||
6693 | + gst_ffmpegdec_frame (ffmpegdec, data, size, &have_data, dec_info, &ret); | ||
6694 | + | ||
6695 | + if (ffmpegdec->do_padding) { | ||
6696 | + memcpy (data + size, tmp_padding, FF_INPUT_BUFFER_PADDING_SIZE); | ||
6697 | + } | ||
6698 | + | ||
6699 | + if (ret != GST_FLOW_OK) { | ||
6700 | + GST_LOG_OBJECT (ffmpegdec, "breaking because of flow ret %s", | ||
6701 | + gst_flow_get_name (ret)); | ||
6702 | + /* bad flow retun, make sure we discard all data and exit */ | ||
6703 | + bsize = 0; | ||
6704 | + break; | ||
6705 | + } | ||
6706 | + if (!ffmpegdec->pctx) { | ||
6707 | + if (len == 0 && !have_data) { | ||
6708 | + /* nothing was decoded, this could be because no data was available or | ||
6709 | + * because we were skipping frames. | ||
6710 | + * If we have no context we must exit and wait for more data, we keep the | ||
6711 | + * data we tried. */ | ||
6712 | + GST_LOG_OBJECT (ffmpegdec, "Decoding didn't return any data, breaking"); | ||
6713 | + break; | ||
6714 | + } else if (len < 0) { | ||
6715 | + /* a decoding error happened, we must break and try again with next data. */ | ||
6716 | + GST_LOG_OBJECT (ffmpegdec, "Decoding error, breaking"); | ||
6717 | + bsize = 0; | ||
6718 | + break; | ||
6719 | + } | ||
6720 | + /* prepare for the next round, for codecs with a context we did this | ||
6721 | + * already when using the parser. */ | ||
6722 | + bsize -= len; | ||
6723 | + bdata += len; | ||
6724 | + } else { | ||
6725 | + if (len == 0) { | ||
6726 | + /* nothing was decoded, this could be because no data was available or | ||
6727 | + * because we were skipping frames. Since we have a parser we can | ||
6728 | + * continue with the next frame */ | ||
6729 | + GST_LOG_OBJECT (ffmpegdec, | ||
6730 | + "Decoding didn't return any data, trying next"); | ||
6731 | + } else if (len < 0) { | ||
6732 | + /* we have a context that will bring us to the next frame */ | ||
6733 | + GST_LOG_OBJECT (ffmpegdec, "Decoding error, trying next"); | ||
6734 | + } | ||
6735 | + } | ||
6736 | + | ||
6737 | + /* make sure we don't use the same old timestamp for the next frame and let | ||
6738 | + * the interpollation take care of it. */ | ||
6739 | + if (ffmpegdec->clear_ts) { | ||
6740 | + in_timestamp = GST_CLOCK_TIME_NONE; | ||
6741 | + in_duration = GST_CLOCK_TIME_NONE; | ||
6742 | + in_offset = GST_BUFFER_OFFSET_NONE; | ||
6743 | + in_info = GST_TS_INFO_NONE; | ||
6744 | + } else { | ||
6745 | + ffmpegdec->clear_ts = TRUE; | ||
6746 | + } | ||
6747 | + ffmpegdec->last_frames++; | ||
6748 | + | ||
6749 | + GST_LOG_OBJECT (ffmpegdec, "Before (while bsize>0). bsize:%d , bdata:%p", | ||
6750 | + bsize, bdata); | ||
6751 | + } while (bsize > 0); | ||
6752 | + | ||
6753 | + /* keep left-over */ | ||
6754 | + if (ffmpegdec->pctx && bsize > 0) { | ||
6755 | + in_timestamp = GST_BUFFER_TIMESTAMP (inbuf); | ||
6756 | + in_offset = GST_BUFFER_OFFSET (inbuf); | ||
6757 | + | ||
6758 | + GST_LOG_OBJECT (ffmpegdec, | ||
6759 | + "Keeping %d bytes of data with offset %" G_GINT64_FORMAT ", timestamp %" | ||
6760 | + GST_TIME_FORMAT, bsize, in_offset, GST_TIME_ARGS (in_timestamp)); | ||
6761 | + | ||
6762 | + ffmpegdec->pcache = gst_buffer_create_sub (inbuf, | ||
6763 | + GST_BUFFER_SIZE (inbuf) - bsize, bsize); | ||
6764 | + /* we keep timestamp, even though all we really know is that the correct | ||
6765 | + * timestamp is not below the one from inbuf */ | ||
6766 | + GST_BUFFER_TIMESTAMP (ffmpegdec->pcache) = in_timestamp; | ||
6767 | + GST_BUFFER_OFFSET (ffmpegdec->pcache) = in_offset; | ||
6768 | + } else if (bsize > 0) { | ||
6769 | + GST_DEBUG_OBJECT (ffmpegdec, "Dropping %d bytes of data", bsize); | ||
6770 | + } | ||
6771 | + gst_buffer_unref (inbuf); | ||
6772 | + | ||
6773 | + return ret; | ||
6774 | + | ||
6775 | + /* ERRORS */ | ||
6776 | +not_negotiated: | ||
6777 | + { | ||
6778 | + oclass = (GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); | ||
6779 | + GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL), | ||
6780 | + ("ffdec_%s: input format was not set before data start", | ||
6781 | + oclass->in_plugin->name)); | ||
6782 | + gst_buffer_unref (inbuf); | ||
6783 | + return GST_FLOW_NOT_NEGOTIATED; | ||
6784 | + } | ||
6785 | +} | ||
6786 | + | ||
6787 | +static GstStateChangeReturn | ||
6788 | +gst_ffmpegdec_change_state (GstElement * element, GstStateChange transition) | ||
6789 | +{ | ||
6790 | + GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) element; | ||
6791 | + GstStateChangeReturn ret; | ||
6792 | + | ||
6793 | + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); | ||
6794 | + | ||
6795 | + switch (transition) { | ||
6796 | + case GST_STATE_CHANGE_PAUSED_TO_READY: | ||
6797 | + GST_OBJECT_LOCK (ffmpegdec); | ||
6798 | + gst_ffmpegdec_close (ffmpegdec); | ||
6799 | + GST_OBJECT_UNLOCK (ffmpegdec); | ||
6800 | + clear_queued (ffmpegdec); | ||
6801 | + g_free (ffmpegdec->padded); | ||
6802 | + ffmpegdec->padded = NULL; | ||
6803 | + ffmpegdec->padded_size = 0; | ||
6804 | + ffmpegdec->can_allocate_aligned = TRUE; | ||
6805 | + break; | ||
6806 | + default: | ||
6807 | + break; | ||
6808 | + } | ||
6809 | + | ||
6810 | + return ret; | ||
6811 | +} | ||
6812 | + | ||
6813 | +static void | ||
6814 | +gst_ffmpegdec_set_property (GObject * object, | ||
6815 | + guint prop_id, const GValue * value, GParamSpec * pspec) | ||
6816 | +{ | ||
6817 | + GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) object; | ||
6818 | + | ||
6819 | + switch (prop_id) { | ||
6820 | + case PROP_LOWRES: | ||
6821 | + ffmpegdec->lowres = ffmpegdec->context->lowres = g_value_get_enum (value); | ||
6822 | + break; | ||
6823 | + case PROP_SKIPFRAME: | ||
6824 | + ffmpegdec->skip_frame = ffmpegdec->context->skip_frame = | ||
6825 | + g_value_get_enum (value); | ||
6826 | + break; | ||
6827 | + case PROP_DIRECT_RENDERING: | ||
6828 | + ffmpegdec->direct_rendering = g_value_get_boolean (value); | ||
6829 | + break; | ||
6830 | + case PROP_DO_PADDING: | ||
6831 | + ffmpegdec->do_padding = g_value_get_boolean (value); | ||
6832 | + break; | ||
6833 | + case PROP_DEBUG_MV: | ||
6834 | + ffmpegdec->debug_mv = ffmpegdec->context->debug_mv = | ||
6835 | + g_value_get_boolean (value); | ||
6836 | + break; | ||
6837 | + case PROP_CROP: | ||
6838 | + ffmpegdec->crop = g_value_get_boolean (value); | ||
6839 | + break; | ||
6840 | + case PROP_MAX_THREADS: | ||
6841 | + ffmpegdec->max_threads = g_value_get_int (value); | ||
6842 | + break; | ||
6843 | + default: | ||
6844 | + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); | ||
6845 | + break; | ||
6846 | + } | ||
6847 | +} | ||
6848 | + | ||
6849 | +static void | ||
6850 | +gst_ffmpegdec_get_property (GObject * object, | ||
6851 | + guint prop_id, GValue * value, GParamSpec * pspec) | ||
6852 | +{ | ||
6853 | + GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) object; | ||
6854 | + | ||
6855 | + switch (prop_id) { | ||
6856 | + case PROP_LOWRES: | ||
6857 | + g_value_set_enum (value, ffmpegdec->context->lowres); | ||
6858 | + break; | ||
6859 | + case PROP_SKIPFRAME: | ||
6860 | + g_value_set_enum (value, ffmpegdec->context->skip_frame); | ||
6861 | + break; | ||
6862 | + case PROP_DIRECT_RENDERING: | ||
6863 | + g_value_set_boolean (value, ffmpegdec->direct_rendering); | ||
6864 | + break; | ||
6865 | + case PROP_DO_PADDING: | ||
6866 | + g_value_set_boolean (value, ffmpegdec->do_padding); | ||
6867 | + break; | ||
6868 | + case PROP_DEBUG_MV: | ||
6869 | + g_value_set_boolean (value, ffmpegdec->context->debug_mv); | ||
6870 | + break; | ||
6871 | + case PROP_CROP: | ||
6872 | + g_value_set_boolean (value, ffmpegdec->crop); | ||
6873 | + break; | ||
6874 | + case PROP_MAX_THREADS: | ||
6875 | + g_value_set_int (value, ffmpegdec->max_threads); | ||
6876 | + break; | ||
6877 | + default: | ||
6878 | + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); | ||
6879 | + break; | ||
6880 | + } | ||
6881 | +} | ||
6882 | + | ||
6883 | +gboolean | ||
6884 | +gst_ffmpegdec_register (GstPlugin * plugin) | ||
6885 | +{ | ||
6886 | + GTypeInfo typeinfo = { | ||
6887 | + sizeof (GstFFMpegDecClass), | ||
6888 | + (GBaseInitFunc) gst_ffmpegdec_base_init, | ||
6889 | + NULL, | ||
6890 | + (GClassInitFunc) gst_ffmpegdec_class_init, | ||
6891 | + NULL, | ||
6892 | + NULL, | ||
6893 | + sizeof (GstFFMpegDec), | ||
6894 | + 0, | ||
6895 | + (GInstanceInitFunc) gst_ffmpegdec_init, | ||
6896 | + }; | ||
6897 | + GType type; | ||
6898 | + AVCodec *in_plugin; | ||
6899 | + gint rank; | ||
6900 | + | ||
6901 | + in_plugin = av_codec_next (NULL); | ||
6902 | + | ||
6903 | + GST_LOG ("Registering decoders"); | ||
6904 | + | ||
6905 | + while (in_plugin) { | ||
6906 | + gchar *type_name; | ||
6907 | + gchar *plugin_name; | ||
6908 | + | ||
6909 | + /* only decoders */ | ||
6910 | + if (!in_plugin->decode) { | ||
6911 | + goto next; | ||
6912 | + } | ||
6913 | + | ||
6914 | + /* no quasi-codecs, please */ | ||
6915 | + if (in_plugin->id == CODEC_ID_RAWVIDEO || | ||
6916 | + in_plugin->id == CODEC_ID_V210 || | ||
6917 | + in_plugin->id == CODEC_ID_V210X || | ||
6918 | + in_plugin->id == CODEC_ID_R210 || | ||
6919 | + (in_plugin->id >= CODEC_ID_PCM_S16LE && | ||
6920 | + in_plugin->id <= CODEC_ID_PCM_BLURAY)) { | ||
6921 | + goto next; | ||
6922 | + } | ||
6923 | + | ||
6924 | + /* No decoders depending on external libraries (we don't build them, but | ||
6925 | + * people who build against an external ffmpeg might have them. | ||
6926 | + * We have native gstreamer plugins for all of those libraries anyway. */ | ||
6927 | + if (!strncmp (in_plugin->name, "lib", 3)) { | ||
6928 | + GST_DEBUG | ||
6929 | + ("Not using external library decoder %s. Use the gstreamer-native ones instead.", | ||
6930 | + in_plugin->name); | ||
6931 | + goto next; | ||
6932 | + } | ||
6933 | + | ||
6934 | + /* No vdpau plugins until we can figure out how to properly use them | ||
6935 | + * outside of ffmpeg. */ | ||
6936 | + if (g_str_has_suffix (in_plugin->name, "_vdpau")) { | ||
6937 | + GST_DEBUG | ||
6938 | + ("Ignoring VDPAU decoder %s. We can't handle this outside of ffmpeg", | ||
6939 | + in_plugin->name); | ||
6940 | + goto next; | ||
6941 | + } | ||
6942 | + | ||
6943 | + if (g_str_has_suffix (in_plugin->name, "_xvmc")) { | ||
6944 | + GST_DEBUG | ||
6945 | + ("Ignoring XVMC decoder %s. We can't handle this outside of ffmpeg", | ||
6946 | + in_plugin->name); | ||
6947 | + goto next; | ||
6948 | + } | ||
6949 | + | ||
6950 | + GST_DEBUG ("Trying plugin %s [%s]", in_plugin->name, in_plugin->long_name); | ||
6951 | + | ||
6952 | + /* no codecs for which we're GUARANTEED to have better alternatives */ | ||
6953 | + /* MPEG1VIDEO : the mpeg2video decoder is preferred */ | ||
6954 | + /* MP1 : Use MP3 for decoding */ | ||
6955 | + /* MP2 : Use MP3 for decoding */ | ||
6956 | + /* Theora: Use libtheora based theoradec */ | ||
6957 | + if (!strcmp (in_plugin->name, "gif") || | ||
6958 | + !strcmp (in_plugin->name, "vorbis") || | ||
6959 | + !strcmp (in_plugin->name, "theora") || | ||
6960 | + !strcmp (in_plugin->name, "mpeg1video") || | ||
6961 | + !strcmp (in_plugin->name, "wavpack") || | ||
6962 | + !strcmp (in_plugin->name, "mp1") || | ||
6963 | + !strcmp (in_plugin->name, "mp2") || | ||
6964 | + !strcmp (in_plugin->name, "libfaad") || | ||
6965 | + !strcmp (in_plugin->name, "mpeg4aac") || | ||
6966 | + !strcmp (in_plugin->name, "ass") || | ||
6967 | + !strcmp (in_plugin->name, "srt") || | ||
6968 | + !strcmp (in_plugin->name, "pgssub") || | ||
6969 | + !strcmp (in_plugin->name, "dvdsub") || | ||
6970 | + !strcmp (in_plugin->name, "dvbsub")) { | ||
6971 | + GST_LOG ("Ignoring decoder %s", in_plugin->name); | ||
6972 | + goto next; | ||
6973 | + } | ||
6974 | + | ||
6975 | + /* construct the type */ | ||
6976 | + plugin_name = g_strdup ((gchar *) in_plugin->name); | ||
6977 | + g_strdelimit (plugin_name, NULL, '_'); | ||
6978 | + type_name = g_strdup_printf ("ffdec_%s", plugin_name); | ||
6979 | + g_free (plugin_name); | ||
6980 | + | ||
6981 | + type = g_type_from_name (type_name); | ||
6982 | + | ||
6983 | + if (!type) { | ||
6984 | + /* create the gtype now */ | ||
6985 | + type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0); | ||
6986 | + g_type_set_qdata (type, GST_FFDEC_PARAMS_QDATA, (gpointer) in_plugin); | ||
6987 | + } | ||
6988 | + | ||
6989 | + /* (Ronald) MPEG-4 gets a higher priority because it has been well- | ||
6990 | + * tested and by far outperforms divxdec/xviddec - so we prefer it. | ||
6991 | + * msmpeg4v3 same, as it outperforms divxdec for divx3 playback. | ||
6992 | + * VC1/WMV3 are not working and thus unpreferred for now. */ | ||
6993 | + switch (in_plugin->id) { | ||
6994 | + case CODEC_ID_MPEG4: | ||
6995 | + case CODEC_ID_MSMPEG4V3: | ||
6996 | + case CODEC_ID_H264: | ||
6997 | + case CODEC_ID_RA_144: | ||
6998 | + case CODEC_ID_RA_288: | ||
6999 | + case CODEC_ID_RV10: | ||
7000 | + case CODEC_ID_RV20: | ||
7001 | + case CODEC_ID_RV30: | ||
7002 | + case CODEC_ID_RV40: | ||
7003 | + case CODEC_ID_COOK: | ||
7004 | + rank = GST_RANK_SECONDARY; | ||
7005 | + break; | ||
7006 | + /* DVVIDEO: we have a good dv decoder, fast on both ppc as well as x86. | ||
7007 | + * They say libdv's quality is better though. leave as secondary. | ||
7008 | + * note: if you change this, see the code in gstdv.c in good/ext/dv. | ||
7009 | + * | ||
7010 | + * SIPR: decoder should have a higher rank than realaudiodec. | ||
7011 | + */ | ||
7012 | + case CODEC_ID_DVVIDEO: | ||
7013 | + case CODEC_ID_SIPR: | ||
7014 | + rank = GST_RANK_SECONDARY; | ||
7015 | + break; | ||
7016 | + case CODEC_ID_MP3: | ||
7017 | + rank = GST_RANK_NONE; | ||
7018 | + break; | ||
7019 | + /* TEMPORARILY DISABLING AC3/EAC3/DTS for 0.10.12 release | ||
7020 | + * due to downmixing failure. | ||
7021 | + * See Bug #608892 for more details */ | ||
7022 | + case CODEC_ID_EAC3: | ||
7023 | + case CODEC_ID_AC3: | ||
7024 | + case CODEC_ID_DTS: | ||
7025 | + rank = GST_RANK_NONE; | ||
7026 | + break; | ||
7027 | + default: | ||
7028 | + rank = GST_RANK_MARGINAL; | ||
7029 | + break; | ||
7030 | + } | ||
7031 | + if (!gst_element_register (plugin, type_name, rank, type)) { | ||
7032 | + g_warning ("Failed to register %s", type_name); | ||
7033 | + g_free (type_name); | ||
7034 | + return FALSE; | ||
7035 | + } | ||
7036 | + | ||
7037 | + g_free (type_name); | ||
7038 | + | ||
7039 | + next: | ||
7040 | + in_plugin = av_codec_next (in_plugin); | ||
7041 | + } | ||
7042 | + | ||
7043 | + GST_LOG ("Finished Registering decoders"); | ||
7044 | + | ||
7045 | + return TRUE; | ||
7046 | +} | ||
7047 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdec.c.rej gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdec.c.rej | ||
7048 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdec.c.rej 1970-01-01 01:00:00.000000000 +0100 | ||
7049 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdec.c.rej 2014-08-08 15:26:38.471858652 +0200 | ||
7050 | @@ -0,0 +1,11 @@ | ||
7051 | +--- ext/ffmpeg/gstffmpegdec.c | ||
7052 | ++++ ext/ffmpeg/gstffmpegdec.c | ||
7053 | +@@ -1565,7 +1564,7 @@ | ||
7054 | + gst_message_new_latency (GST_OBJECT_CAST (ffmpegdec))); | ||
7055 | + } | ||
7056 | + | ||
7057 | +- is_itype = (ffmpegdec->picture->pict_type == FF_I_TYPE); | ||
7058 | ++ is_itype = (ffmpegdec->picture->pict_type == AV_PICTURE_TYPE_I); | ||
7059 | + is_reference = (ffmpegdec->picture->reference == 1); | ||
7060 | + | ||
7061 | + iskeyframe = (is_itype || is_reference || ffmpegdec->picture->key_frame) | ||
7062 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdemux.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdemux.c | ||
7063 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegdemux.c 2011-07-13 11:07:28.000000000 +0200 | ||
7064 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegdemux.c 2014-08-08 15:26:07.874857555 +0200 | ||
7065 | @@ -343,8 +343,11 @@ | ||
7066 | demux->audiopads = 0; | ||
7067 | |||
7068 | /* close demuxer context from ffmpeg */ | ||
7069 | - av_close_input_file (demux->context); | ||
7070 | - demux->context = NULL; | ||
7071 | + if (demux->seekable) | ||
7072 | + gst_ffmpegdata_close (demux->context->pb); | ||
7073 | + else | ||
7074 | + gst_ffmpeg_pipe_close (demux->context->pb); | ||
7075 | + avformat_close_input (&demux->context); | ||
7076 | |||
7077 | GST_OBJECT_LOCK (demux); | ||
7078 | demux->opened = FALSE; | ||
7079 | @@ -1146,9 +1149,9 @@ | ||
7080 | static gboolean | ||
7081 | gst_ffmpegdemux_open (GstFFMpegDemux * demux) | ||
7082 | { | ||
7083 | + AVIOContext *iocontext = NULL; | ||
7084 | GstFFMpegDemuxClass *oclass = | ||
7085 | (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux); | ||
7086 | - gchar *location; | ||
7087 | gint res, n_streams, i; | ||
7088 | #if 0 | ||
7089 | /* Re-enable once converted to new AVMetaData API | ||
7090 | @@ -1164,15 +1167,14 @@ | ||
7091 | |||
7092 | /* open via our input protocol hack */ | ||
7093 | if (demux->seekable) | ||
7094 | - location = g_strdup_printf ("gstreamer://%p", demux->sinkpad); | ||
7095 | + res = gst_ffmpegdata_open (demux->sinkpad, AVIO_FLAG_READ, &iocontext); | ||
7096 | else | ||
7097 | - location = g_strdup_printf ("gstpipe://%p", &demux->ffpipe); | ||
7098 | - GST_DEBUG_OBJECT (demux, "about to call av_open_input_file %s", location); | ||
7099 | + res = gst_ffmpeg_pipe_open (&demux->ffpipe, AVIO_FLAG_READ, &iocontext); | ||
7100 | |||
7101 | - res = av_open_input_file (&demux->context, location, | ||
7102 | - oclass->in_plugin, 0, NULL); | ||
7103 | + demux->context = avformat_alloc_context (); | ||
7104 | + demux->context->pb = iocontext; | ||
7105 | + res = avformat_open_input (&demux->context, NULL, oclass->in_plugin, NULL); | ||
7106 | |||
7107 | - g_free (location); | ||
7108 | GST_DEBUG_OBJECT (demux, "av_open_input returned %d", res); | ||
7109 | if (res < 0) | ||
7110 | goto open_failed; | ||
7111 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegenc.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegenc.c | ||
7112 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegenc.c 2011-10-31 11:14:03.000000000 +0100 | ||
7113 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegenc.c 2014-08-08 15:32:18.608870847 +0200 | ||
7114 | @@ -770,7 +770,7 @@ | ||
7115 | GST_OBJECT_UNLOCK (ffmpegenc); | ||
7116 | |||
7117 | if (force_keyframe) | ||
7118 | - ffmpegenc->picture->pict_type = FF_I_TYPE; | ||
7119 | + ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I; | ||
7120 | |||
7121 | frame_size = gst_ffmpeg_avpicture_fill ((AVPicture *) ffmpegenc->picture, | ||
7122 | GST_BUFFER_DATA (inbuf), | ||
7123 | @@ -1136,7 +1136,7 @@ | ||
7124 | const GstStructure *s; | ||
7125 | s = gst_event_get_structure (event); | ||
7126 | if (gst_structure_has_name (s, "GstForceKeyUnit")) { | ||
7127 | - ffmpegenc->picture->pict_type = FF_I_TYPE; | ||
7128 | + ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I; | ||
7129 | } | ||
7130 | break; | ||
7131 | } | ||
7132 | @@ -1339,7 +1339,7 @@ | ||
7133 | } | ||
7134 | |||
7135 | /* only encoders */ | ||
7136 | - if (!in_plugin->encode) { | ||
7137 | + if (!av_codec_is_encoder (in_plugin)) { | ||
7138 | goto next; | ||
7139 | } | ||
7140 | |||
7141 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegmux.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegmux.c | ||
7142 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegmux.c 2011-07-13 11:07:28.000000000 +0200 | ||
7143 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegmux.c 2014-08-08 15:26:07.874857555 +0200 | ||
7144 | @@ -24,8 +24,10 @@ | ||
7145 | #include <string.h> | ||
7146 | #ifdef HAVE_FFMPEG_UNINSTALLED | ||
7147 | #include <avformat.h> | ||
7148 | +#include <opt.h> | ||
7149 | #else | ||
7150 | #include <libavformat/avformat.h> | ||
7151 | +#include <libavutil/opt.h> | ||
7152 | #endif | ||
7153 | |||
7154 | #include <gst/gst.h> | ||
7155 | @@ -336,9 +338,6 @@ | ||
7156 | ffmpegmux->context = g_new0 (AVFormatContext, 1); | ||
7157 | ffmpegmux->context->oformat = oclass->in_plugin; | ||
7158 | ffmpegmux->context->nb_streams = 0; | ||
7159 | - g_snprintf (ffmpegmux->context->filename, | ||
7160 | - sizeof (ffmpegmux->context->filename), | ||
7161 | - "gstreamer://%p", ffmpegmux->srcpad); | ||
7162 | ffmpegmux->opened = FALSE; | ||
7163 | |||
7164 | ffmpegmux->videopads = 0; | ||
7165 | @@ -450,10 +449,10 @@ | ||
7166 | gst_element_add_pad (element, pad); | ||
7167 | |||
7168 | /* AVStream needs to be created */ | ||
7169 | - st = av_new_stream (ffmpegmux->context, collect_pad->padnum); | ||
7170 | + st = avformat_new_stream (ffmpegmux->context, NULL); | ||
7171 | + st->id = collect_pad->padnum; | ||
7172 | st->codec->codec_type = type; | ||
7173 | st->codec->codec_id = CODEC_ID_NONE; /* this is a check afterwards */ | ||
7174 | - st->stream_copy = 1; /* we're not the actual encoder */ | ||
7175 | st->codec->bit_rate = bitrate; | ||
7176 | st->codec->frame_size = framesize; | ||
7177 | /* we fill in codec during capsnego */ | ||
7178 | @@ -485,7 +484,7 @@ | ||
7179 | collect_pad = (GstFFMpegMuxPad *) gst_pad_get_element_private (pad); | ||
7180 | |||
7181 | st = ffmpegmux->context->streams[collect_pad->padnum]; | ||
7182 | - ffmpegmux->context->preload = ffmpegmux->preload; | ||
7183 | + av_opt_set_int (&ffmpegmux->context, "preload", ffmpegmux->preload, 0); | ||
7184 | ffmpegmux->context->max_delay = ffmpegmux->max_delay; | ||
7185 | |||
7186 | /* for the format-specific guesses, we'll go to | ||
7187 | @@ -552,7 +551,7 @@ | ||
7188 | |||
7189 | /* open "file" (gstreamer protocol to next element) */ | ||
7190 | if (!ffmpegmux->opened) { | ||
7191 | - int open_flags = URL_WRONLY; | ||
7192 | + int open_flags = AVIO_FLAG_WRITE; | ||
7193 | |||
7194 | /* we do need all streams to have started capsnego, | ||
7195 | * or things will go horribly wrong */ | ||
7196 | @@ -646,19 +645,13 @@ | ||
7197 | open_flags |= GST_FFMPEG_URL_STREAMHEADER; | ||
7198 | } | ||
7199 | |||
7200 | - if (url_fopen (&ffmpegmux->context->pb, | ||
7201 | - ffmpegmux->context->filename, open_flags) < 0) { | ||
7202 | + if (gst_ffmpegdata_open (ffmpegmux->srcpad, open_flags, | ||
7203 | + &ffmpegmux->context->pb) < 0) { | ||
7204 | GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL), | ||
7205 | ("Failed to open stream context in ffmux")); | ||
7206 | return GST_FLOW_ERROR; | ||
7207 | } | ||
7208 | |||
7209 | - if (av_set_parameters (ffmpegmux->context, NULL) < 0) { | ||
7210 | - GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, INIT, (NULL), | ||
7211 | - ("Failed to initialize muxer")); | ||
7212 | - return GST_FLOW_ERROR; | ||
7213 | - } | ||
7214 | - | ||
7215 | /* now open the mux format */ | ||
7216 | if (av_write_header (ffmpegmux->context) < 0) { | ||
7217 | GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL), | ||
7218 | @@ -670,7 +663,7 @@ | ||
7219 | ffmpegmux->opened = TRUE; | ||
7220 | |||
7221 | /* flush the header so it will be used as streamheader */ | ||
7222 | - put_flush_packet (ffmpegmux->context->pb); | ||
7223 | + avio_flush (ffmpegmux->context->pb); | ||
7224 | } | ||
7225 | |||
7226 | /* take the one with earliest timestamp, | ||
7227 | @@ -770,8 +763,8 @@ | ||
7228 | /* close down */ | ||
7229 | av_write_trailer (ffmpegmux->context); | ||
7230 | ffmpegmux->opened = FALSE; | ||
7231 | - put_flush_packet (ffmpegmux->context->pb); | ||
7232 | - url_fclose (ffmpegmux->context->pb); | ||
7233 | + avio_flush (ffmpegmux->context->pb); | ||
7234 | + gst_ffmpegdata_close (ffmpegmux->context->pb); | ||
7235 | gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ()); | ||
7236 | return GST_FLOW_UNEXPECTED; | ||
7237 | } | ||
7238 | @@ -795,6 +788,10 @@ | ||
7239 | break; | ||
7240 | case GST_STATE_CHANGE_PAUSED_TO_READY: | ||
7241 | gst_collect_pads_stop (ffmpegmux->collect); | ||
7242 | + if (ffmpegmux->opened) { | ||
7243 | + ffmpegmux->opened = FALSE; | ||
7244 | + gst_ffmpegdata_close (ffmpegmux->context->pb); | ||
7245 | + } | ||
7246 | break; | ||
7247 | default: | ||
7248 | break; | ||
7249 | @@ -809,7 +806,7 @@ | ||
7250 | gst_tag_setter_reset_tags (GST_TAG_SETTER (ffmpegmux)); | ||
7251 | if (ffmpegmux->opened) { | ||
7252 | ffmpegmux->opened = FALSE; | ||
7253 | - url_fclose (ffmpegmux->context->pb); | ||
7254 | + avio_close (ffmpegmux->context->pb); | ||
7255 | } | ||
7256 | break; | ||
7257 | case GST_STATE_CHANGE_READY_TO_NULL: | ||
7258 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegmux.c.orig gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegmux.c.orig | ||
7259 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegmux.c.orig 1970-01-01 01:00:00.000000000 +0100 | ||
7260 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegmux.c.orig 2011-07-13 11:07:28.000000000 +0200 | ||
7261 | @@ -0,0 +1,970 @@ | ||
7262 | +/* GStreamer | ||
7263 | + * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> | ||
7264 | + * | ||
7265 | + * This library is free software; you can redistribute it and/or | ||
7266 | + * modify it under the terms of the GNU Library General Public | ||
7267 | + * License as published by the Free Software Foundation; either | ||
7268 | + * version 2 of the License, or (at your option) any later version. | ||
7269 | + * | ||
7270 | + * This library is distributed in the hope that it will be useful, | ||
7271 | + * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
7272 | + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
7273 | + * Library General Public License for more details. | ||
7274 | + * | ||
7275 | + * You should have received a copy of the GNU Library General Public | ||
7276 | + * License along with this library; if not, write to the | ||
7277 | + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
7278 | + * Boston, MA 02111-1307, USA. | ||
7279 | + */ | ||
7280 | + | ||
7281 | +#ifdef HAVE_CONFIG_H | ||
7282 | +#include "config.h" | ||
7283 | +#endif | ||
7284 | + | ||
7285 | +#include <string.h> | ||
7286 | +#ifdef HAVE_FFMPEG_UNINSTALLED | ||
7287 | +#include <avformat.h> | ||
7288 | +#else | ||
7289 | +#include <libavformat/avformat.h> | ||
7290 | +#endif | ||
7291 | + | ||
7292 | +#include <gst/gst.h> | ||
7293 | +#include <gst/base/gstcollectpads.h> | ||
7294 | + | ||
7295 | +#include "gstffmpeg.h" | ||
7296 | +#include "gstffmpegcodecmap.h" | ||
7297 | +#include "gstffmpegutils.h" | ||
7298 | + | ||
7299 | +typedef struct _GstFFMpegMux GstFFMpegMux; | ||
7300 | +typedef struct _GstFFMpegMuxPad GstFFMpegMuxPad; | ||
7301 | + | ||
7302 | +struct _GstFFMpegMuxPad | ||
7303 | +{ | ||
7304 | + GstCollectData collect; /* we extend the CollectData */ | ||
7305 | + | ||
7306 | + gint padnum; | ||
7307 | +}; | ||
7308 | + | ||
7309 | +struct _GstFFMpegMux | ||
7310 | +{ | ||
7311 | + GstElement element; | ||
7312 | + | ||
7313 | + GstCollectPads *collect; | ||
7314 | + /* We need to keep track of our pads, so we do so here. */ | ||
7315 | + GstPad *srcpad; | ||
7316 | + | ||
7317 | + AVFormatContext *context; | ||
7318 | + gboolean opened; | ||
7319 | + | ||
7320 | + gint videopads, audiopads; | ||
7321 | + | ||
7322 | + /*< private > */ | ||
7323 | + /* event_function is the collectpads default eventfunction */ | ||
7324 | + GstPadEventFunction event_function; | ||
7325 | + int preload; | ||
7326 | + int max_delay; | ||
7327 | +}; | ||
7328 | + | ||
7329 | +typedef struct _GstFFMpegMuxClass GstFFMpegMuxClass; | ||
7330 | + | ||
7331 | +struct _GstFFMpegMuxClass | ||
7332 | +{ | ||
7333 | + GstElementClass parent_class; | ||
7334 | + | ||
7335 | + AVOutputFormat *in_plugin; | ||
7336 | +}; | ||
7337 | + | ||
7338 | +#define GST_TYPE_FFMPEGMUX \ | ||
7339 | + (gst_ffmpegdec_get_type()) | ||
7340 | +#define GST_FFMPEGMUX(obj) \ | ||
7341 | + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FFMPEGMUX,GstFFMpegMux)) | ||
7342 | +#define GST_FFMPEGMUX_CLASS(klass) \ | ||
7343 | + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FFMPEGMUX,GstFFMpegMuxClass)) | ||
7344 | +#define GST_IS_FFMPEGMUX(obj) \ | ||
7345 | + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FFMPEGMUX)) | ||
7346 | +#define GST_IS_FFMPEGMUX_CLASS(klass) \ | ||
7347 | + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGMUX)) | ||
7348 | + | ||
7349 | +enum | ||
7350 | +{ | ||
7351 | + /* FILL ME */ | ||
7352 | + LAST_SIGNAL | ||
7353 | +}; | ||
7354 | + | ||
7355 | +enum | ||
7356 | +{ | ||
7357 | + ARG_0, | ||
7358 | + /* FILL ME */ | ||
7359 | +}; | ||
7360 | + | ||
7361 | +enum | ||
7362 | +{ | ||
7363 | + PROP_0, | ||
7364 | + PROP_PRELOAD, | ||
7365 | + PROP_MAXDELAY | ||
7366 | +}; | ||
7367 | + | ||
7368 | +/* A number of function prototypes are given so we can refer to them later. */ | ||
7369 | +static void gst_ffmpegmux_class_init (GstFFMpegMuxClass * klass); | ||
7370 | +static void gst_ffmpegmux_base_init (gpointer g_class); | ||
7371 | +static void gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux, | ||
7372 | + GstFFMpegMuxClass * g_class); | ||
7373 | +static void gst_ffmpegmux_finalize (GObject * object); | ||
7374 | + | ||
7375 | +static gboolean gst_ffmpegmux_setcaps (GstPad * pad, GstCaps * caps); | ||
7376 | +static GstPad *gst_ffmpegmux_request_new_pad (GstElement * element, | ||
7377 | + GstPadTemplate * templ, const gchar * name); | ||
7378 | +static GstFlowReturn gst_ffmpegmux_collected (GstCollectPads * pads, | ||
7379 | + gpointer user_data); | ||
7380 | + | ||
7381 | +static gboolean gst_ffmpegmux_sink_event (GstPad * pad, GstEvent * event); | ||
7382 | + | ||
7383 | +static GstStateChangeReturn gst_ffmpegmux_change_state (GstElement * element, | ||
7384 | + GstStateChange transition); | ||
7385 | + | ||
7386 | +static void gst_ffmpegmux_set_property (GObject * object, guint prop_id, | ||
7387 | + const GValue * value, GParamSpec * pspec); | ||
7388 | +static void gst_ffmpegmux_get_property (GObject * object, guint prop_id, | ||
7389 | + GValue * value, GParamSpec * pspec); | ||
7390 | + | ||
7391 | +static GstCaps *gst_ffmpegmux_get_id_caps (enum CodecID *id_list); | ||
7392 | +static void gst_ffmpeg_mux_simple_caps_set_int_list (GstCaps * caps, | ||
7393 | + const gchar * field, guint num, const gint * values); | ||
7394 | + | ||
7395 | +#define GST_FFMUX_PARAMS_QDATA g_quark_from_static_string("ffmux-params") | ||
7396 | + | ||
7397 | +static GstElementClass *parent_class = NULL; | ||
7398 | + | ||
7399 | +/*static guint gst_ffmpegmux_signals[LAST_SIGNAL] = { 0 }; */ | ||
7400 | + | ||
7401 | +typedef struct | ||
7402 | +{ | ||
7403 | + const char *name; | ||
7404 | + const char *replacement; | ||
7405 | +} GstFFMpegMuxReplacement; | ||
7406 | + | ||
7407 | +static const char * | ||
7408 | +gst_ffmpegmux_get_replacement (const char *name) | ||
7409 | +{ | ||
7410 | + static const GstFFMpegMuxReplacement blacklist[] = { | ||
7411 | + {"avi", "avimux"}, | ||
7412 | + {"matroska", "matroskamux"}, | ||
7413 | + {"mov", "qtmux"}, | ||
7414 | + {"mpegts", "mpegtsmux"}, | ||
7415 | + {"mp4", "mp4mux"}, | ||
7416 | + {"mpjpeg", "multipartmux"}, | ||
7417 | + {"ogg", "oggmux"}, | ||
7418 | + {"wav", "wavenc"}, | ||
7419 | + {"webm", "webmmux"}, | ||
7420 | + {"mxf", "mxfmux"}, | ||
7421 | + {"3gp", "gppmux"}, | ||
7422 | + {"yuv4mpegpipe", "y4menc"}, | ||
7423 | + {"aiff", "aiffmux"}, | ||
7424 | + {"adts", "aacparse"}, | ||
7425 | + {"asf", "asfmux"}, | ||
7426 | + {"asf_stream", "asfmux"}, | ||
7427 | + {"flv", "flvmux"}, | ||
7428 | + {"mp3", "id3v2mux"}, | ||
7429 | + {"mp2", "id3v2mux"} | ||
7430 | + }; | ||
7431 | + int i; | ||
7432 | + | ||
7433 | + for (i = 0; i < sizeof (blacklist) / sizeof (blacklist[0]); i++) { | ||
7434 | + if (strcmp (blacklist[i].name, name) == 0) { | ||
7435 | + return blacklist[i].replacement; | ||
7436 | + } | ||
7437 | + } | ||
7438 | + | ||
7439 | + return NULL; | ||
7440 | +} | ||
7441 | + | ||
7442 | +static gboolean | ||
7443 | +gst_ffmpegmux_is_formatter (const char *name) | ||
7444 | +{ | ||
7445 | + static const char *replace[] = { | ||
7446 | + "mp2", "mp3", NULL | ||
7447 | + }; | ||
7448 | + int i; | ||
7449 | + | ||
7450 | + for (i = 0; replace[i]; i++) | ||
7451 | + if (strcmp (replace[i], name) == 0) | ||
7452 | + return TRUE; | ||
7453 | + return FALSE; | ||
7454 | +} | ||
7455 | + | ||
7456 | +static void | ||
7457 | +gst_ffmpegmux_base_init (gpointer g_class) | ||
7458 | +{ | ||
7459 | + GstFFMpegMuxClass *klass = (GstFFMpegMuxClass *) g_class; | ||
7460 | + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); | ||
7461 | + GstPadTemplate *videosinktempl, *audiosinktempl, *srctempl; | ||
7462 | + AVOutputFormat *in_plugin; | ||
7463 | + GstCaps *srccaps, *audiosinkcaps, *videosinkcaps; | ||
7464 | + enum CodecID *video_ids = NULL, *audio_ids = NULL; | ||
7465 | + gchar *longname, *description; | ||
7466 | + const char *replacement; | ||
7467 | + gboolean is_formatter; | ||
7468 | + | ||
7469 | + in_plugin = | ||
7470 | + (AVOutputFormat *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), | ||
7471 | + GST_FFMUX_PARAMS_QDATA); | ||
7472 | + g_assert (in_plugin != NULL); | ||
7473 | + | ||
7474 | + /* construct the element details struct */ | ||
7475 | + replacement = gst_ffmpegmux_get_replacement (in_plugin->name); | ||
7476 | + is_formatter = gst_ffmpegmux_is_formatter (in_plugin->name); | ||
7477 | + if (replacement != NULL) { | ||
7478 | + longname = | ||
7479 | + g_strdup_printf ("FFmpeg %s %s (not recommended, use %s instead)", | ||
7480 | + in_plugin->long_name, is_formatter ? "formatter" : "muxer", | ||
7481 | + replacement); | ||
7482 | + description = | ||
7483 | + g_strdup_printf ("FFmpeg %s %s (not recommended, use %s instead)", | ||
7484 | + in_plugin->long_name, is_formatter ? "formatter" : "muxer", | ||
7485 | + replacement); | ||
7486 | + } else { | ||
7487 | + longname = g_strdup_printf ("FFmpeg %s %s", in_plugin->long_name, | ||
7488 | + is_formatter ? "formatter" : "muxer"); | ||
7489 | + description = g_strdup_printf ("FFmpeg %s %s", in_plugin->long_name, | ||
7490 | + is_formatter ? "formatter" : "muxer"); | ||
7491 | + } | ||
7492 | + gst_element_class_set_details_simple (element_class, longname, | ||
7493 | + is_formatter ? "Formatter/Metadata" : "Codec/Muxer", description, | ||
7494 | + "Wim Taymans <wim.taymans@chello.be>, " | ||
7495 | + "Ronald Bultje <rbultje@ronald.bitfreak.net>"); | ||
7496 | + g_free (longname); | ||
7497 | + g_free (description); | ||
7498 | + | ||
7499 | + /* Try to find the caps that belongs here */ | ||
7500 | + srccaps = gst_ffmpeg_formatid_to_caps (in_plugin->name); | ||
7501 | + if (!srccaps) { | ||
7502 | + GST_DEBUG ("Couldn't get source caps for muxer '%s', skipping format", | ||
7503 | + in_plugin->name); | ||
7504 | + goto beach; | ||
7505 | + } | ||
7506 | + | ||
7507 | + if (!gst_ffmpeg_formatid_get_codecids (in_plugin->name, | ||
7508 | + &video_ids, &audio_ids, in_plugin)) { | ||
7509 | + gst_caps_unref (srccaps); | ||
7510 | + GST_DEBUG | ||
7511 | + ("Couldn't get sink caps for muxer '%s'. Most likely because no input format mapping exists.", | ||
7512 | + in_plugin->name); | ||
7513 | + goto beach; | ||
7514 | + } | ||
7515 | + | ||
7516 | + videosinkcaps = video_ids ? gst_ffmpegmux_get_id_caps (video_ids) : NULL; | ||
7517 | + audiosinkcaps = audio_ids ? gst_ffmpegmux_get_id_caps (audio_ids) : NULL; | ||
7518 | + | ||
7519 | + /* fix up allowed caps for some muxers */ | ||
7520 | + /* FIXME : This should be in gstffmpegcodecmap.c ! */ | ||
7521 | + if (strcmp (in_plugin->name, "flv") == 0) { | ||
7522 | + const gint rates[] = { 44100, 22050, 11025 }; | ||
7523 | + | ||
7524 | + gst_ffmpeg_mux_simple_caps_set_int_list (audiosinkcaps, "rate", 3, rates); | ||
7525 | + } else if (strcmp (in_plugin->name, "gif") == 0) { | ||
7526 | + if (videosinkcaps) | ||
7527 | + gst_caps_unref (videosinkcaps); | ||
7528 | + | ||
7529 | + videosinkcaps = | ||
7530 | + gst_caps_from_string ("video/x-raw-rgb, bpp=(int)24, depth=(int)24"); | ||
7531 | + } | ||
7532 | + | ||
7533 | + /* pad templates */ | ||
7534 | + srctempl = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps); | ||
7535 | + gst_element_class_add_pad_template (element_class, srctempl); | ||
7536 | + | ||
7537 | + if (audiosinkcaps) { | ||
7538 | + audiosinktempl = gst_pad_template_new ("audio_%d", | ||
7539 | + GST_PAD_SINK, GST_PAD_REQUEST, audiosinkcaps); | ||
7540 | + gst_element_class_add_pad_template (element_class, audiosinktempl); | ||
7541 | + } | ||
7542 | + | ||
7543 | + if (videosinkcaps) { | ||
7544 | + videosinktempl = gst_pad_template_new ("video_%d", | ||
7545 | + GST_PAD_SINK, GST_PAD_REQUEST, videosinkcaps); | ||
7546 | + gst_element_class_add_pad_template (element_class, videosinktempl); | ||
7547 | + } | ||
7548 | + | ||
7549 | +beach: | ||
7550 | + klass->in_plugin = in_plugin; | ||
7551 | +} | ||
7552 | + | ||
7553 | +static void | ||
7554 | +gst_ffmpegmux_class_init (GstFFMpegMuxClass * klass) | ||
7555 | +{ | ||
7556 | + GObjectClass *gobject_class; | ||
7557 | + GstElementClass *gstelement_class; | ||
7558 | + | ||
7559 | + gobject_class = (GObjectClass *) klass; | ||
7560 | + gstelement_class = (GstElementClass *) klass; | ||
7561 | + | ||
7562 | + parent_class = g_type_class_peek_parent (klass); | ||
7563 | + | ||
7564 | + gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_ffmpegmux_set_property); | ||
7565 | + gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_ffmpegmux_get_property); | ||
7566 | + | ||
7567 | + g_object_class_install_property (gobject_class, PROP_PRELOAD, | ||
7568 | + g_param_spec_int ("preload", "preload", | ||
7569 | + "Set the initial demux-decode delay (in microseconds)", 0, G_MAXINT, | ||
7570 | + 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
7571 | + | ||
7572 | + g_object_class_install_property (gobject_class, PROP_MAXDELAY, | ||
7573 | + g_param_spec_int ("maxdelay", "maxdelay", | ||
7574 | + "Set the maximum demux-decode delay (in microseconds)", 0, G_MAXINT, | ||
7575 | + 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); | ||
7576 | + | ||
7577 | + gstelement_class->request_new_pad = gst_ffmpegmux_request_new_pad; | ||
7578 | + gstelement_class->change_state = gst_ffmpegmux_change_state; | ||
7579 | + gobject_class->finalize = gst_ffmpegmux_finalize; | ||
7580 | +} | ||
7581 | + | ||
7582 | +static void | ||
7583 | +gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux, GstFFMpegMuxClass * g_class) | ||
7584 | +{ | ||
7585 | + GstElementClass *klass = GST_ELEMENT_CLASS (g_class); | ||
7586 | + GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass *) klass; | ||
7587 | + GstPadTemplate *templ = gst_element_class_get_pad_template (klass, "src"); | ||
7588 | + | ||
7589 | + ffmpegmux->srcpad = gst_pad_new_from_template (templ, "src"); | ||
7590 | + gst_pad_set_caps (ffmpegmux->srcpad, gst_pad_template_get_caps (templ)); | ||
7591 | + gst_element_add_pad (GST_ELEMENT (ffmpegmux), ffmpegmux->srcpad); | ||
7592 | + | ||
7593 | + ffmpegmux->collect = gst_collect_pads_new (); | ||
7594 | + gst_collect_pads_set_function (ffmpegmux->collect, | ||
7595 | + (GstCollectPadsFunction) gst_ffmpegmux_collected, ffmpegmux); | ||
7596 | + | ||
7597 | + ffmpegmux->context = g_new0 (AVFormatContext, 1); | ||
7598 | + ffmpegmux->context->oformat = oclass->in_plugin; | ||
7599 | + ffmpegmux->context->nb_streams = 0; | ||
7600 | + g_snprintf (ffmpegmux->context->filename, | ||
7601 | + sizeof (ffmpegmux->context->filename), | ||
7602 | + "gstreamer://%p", ffmpegmux->srcpad); | ||
7603 | + ffmpegmux->opened = FALSE; | ||
7604 | + | ||
7605 | + ffmpegmux->videopads = 0; | ||
7606 | + ffmpegmux->audiopads = 0; | ||
7607 | + ffmpegmux->preload = 0; | ||
7608 | + ffmpegmux->max_delay = 0; | ||
7609 | +} | ||
7610 | + | ||
7611 | +static void | ||
7612 | +gst_ffmpegmux_set_property (GObject * object, guint prop_id, | ||
7613 | + const GValue * value, GParamSpec * pspec) | ||
7614 | +{ | ||
7615 | + GstFFMpegMux *src; | ||
7616 | + | ||
7617 | + src = (GstFFMpegMux *) object; | ||
7618 | + | ||
7619 | + switch (prop_id) { | ||
7620 | + case PROP_PRELOAD: | ||
7621 | + src->preload = g_value_get_int (value); | ||
7622 | + break; | ||
7623 | + case PROP_MAXDELAY: | ||
7624 | + src->max_delay = g_value_get_int (value); | ||
7625 | + break; | ||
7626 | + default: | ||
7627 | + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); | ||
7628 | + break; | ||
7629 | + } | ||
7630 | +} | ||
7631 | + | ||
7632 | +static void | ||
7633 | +gst_ffmpegmux_get_property (GObject * object, guint prop_id, GValue * value, | ||
7634 | + GParamSpec * pspec) | ||
7635 | +{ | ||
7636 | + GstFFMpegMux *src; | ||
7637 | + | ||
7638 | + src = (GstFFMpegMux *) object; | ||
7639 | + | ||
7640 | + switch (prop_id) { | ||
7641 | + case PROP_PRELOAD: | ||
7642 | + g_value_set_int (value, src->preload); | ||
7643 | + break; | ||
7644 | + case PROP_MAXDELAY: | ||
7645 | + g_value_set_int (value, src->max_delay); | ||
7646 | + break; | ||
7647 | + default: | ||
7648 | + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); | ||
7649 | + break; | ||
7650 | + } | ||
7651 | +} | ||
7652 | + | ||
7653 | + | ||
7654 | +static void | ||
7655 | +gst_ffmpegmux_finalize (GObject * object) | ||
7656 | +{ | ||
7657 | + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) object; | ||
7658 | + | ||
7659 | + g_free (ffmpegmux->context); | ||
7660 | + gst_object_unref (ffmpegmux->collect); | ||
7661 | + | ||
7662 | + if (G_OBJECT_CLASS (parent_class)->finalize) | ||
7663 | + G_OBJECT_CLASS (parent_class)->finalize (object); | ||
7664 | +} | ||
7665 | + | ||
7666 | +static GstPad * | ||
7667 | +gst_ffmpegmux_request_new_pad (GstElement * element, | ||
7668 | + GstPadTemplate * templ, const gchar * name) | ||
7669 | +{ | ||
7670 | + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element; | ||
7671 | + GstElementClass *klass = GST_ELEMENT_GET_CLASS (element); | ||
7672 | + GstFFMpegMuxPad *collect_pad; | ||
7673 | + gchar *padname; | ||
7674 | + GstPad *pad; | ||
7675 | + AVStream *st; | ||
7676 | + enum AVMediaType type; | ||
7677 | + gint bitrate = 0, framesize = 0; | ||
7678 | + | ||
7679 | + g_return_val_if_fail (templ != NULL, NULL); | ||
7680 | + g_return_val_if_fail (templ->direction == GST_PAD_SINK, NULL); | ||
7681 | + g_return_val_if_fail (ffmpegmux->opened == FALSE, NULL); | ||
7682 | + | ||
7683 | + /* figure out a name that *we* like */ | ||
7684 | + if (templ == gst_element_class_get_pad_template (klass, "video_%d")) { | ||
7685 | + padname = g_strdup_printf ("video_%d", ffmpegmux->videopads++); | ||
7686 | + type = AVMEDIA_TYPE_VIDEO; | ||
7687 | + bitrate = 64 * 1024; | ||
7688 | + framesize = 1152; | ||
7689 | + } else if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) { | ||
7690 | + padname = g_strdup_printf ("audio_%d", ffmpegmux->audiopads++); | ||
7691 | + type = AVMEDIA_TYPE_AUDIO; | ||
7692 | + bitrate = 285 * 1024; | ||
7693 | + } else { | ||
7694 | + g_warning ("ffmux: unknown pad template!"); | ||
7695 | + return NULL; | ||
7696 | + } | ||
7697 | + | ||
7698 | + /* create pad */ | ||
7699 | + pad = gst_pad_new_from_template (templ, padname); | ||
7700 | + collect_pad = (GstFFMpegMuxPad *) | ||
7701 | + gst_collect_pads_add_pad (ffmpegmux->collect, pad, | ||
7702 | + sizeof (GstFFMpegMuxPad)); | ||
7703 | + collect_pad->padnum = ffmpegmux->context->nb_streams; | ||
7704 | + | ||
7705 | + /* small hack to put our own event pad function and chain up to collect pad */ | ||
7706 | + ffmpegmux->event_function = GST_PAD_EVENTFUNC (pad); | ||
7707 | + gst_pad_set_event_function (pad, | ||
7708 | + GST_DEBUG_FUNCPTR (gst_ffmpegmux_sink_event)); | ||
7709 | + | ||
7710 | + gst_pad_set_setcaps_function (pad, GST_DEBUG_FUNCPTR (gst_ffmpegmux_setcaps)); | ||
7711 | + gst_element_add_pad (element, pad); | ||
7712 | + | ||
7713 | + /* AVStream needs to be created */ | ||
7714 | + st = av_new_stream (ffmpegmux->context, collect_pad->padnum); | ||
7715 | + st->codec->codec_type = type; | ||
7716 | + st->codec->codec_id = CODEC_ID_NONE; /* this is a check afterwards */ | ||
7717 | + st->stream_copy = 1; /* we're not the actual encoder */ | ||
7718 | + st->codec->bit_rate = bitrate; | ||
7719 | + st->codec->frame_size = framesize; | ||
7720 | + /* we fill in codec during capsnego */ | ||
7721 | + | ||
7722 | + /* we love debug output (c) (tm) (r) */ | ||
7723 | + GST_DEBUG ("Created %s pad for ffmux_%s element", | ||
7724 | + padname, ((GstFFMpegMuxClass *) klass)->in_plugin->name); | ||
7725 | + g_free (padname); | ||
7726 | + | ||
7727 | + return pad; | ||
7728 | +} | ||
7729 | + | ||
7730 | +/** | ||
7731 | + * gst_ffmpegmux_setcaps | ||
7732 | + * @pad: #GstPad | ||
7733 | + * @caps: New caps. | ||
7734 | + * | ||
7735 | + * Set caps to pad. | ||
7736 | + * | ||
7737 | + * Returns: #TRUE on success. | ||
7738 | + */ | ||
7739 | +static gboolean | ||
7740 | +gst_ffmpegmux_setcaps (GstPad * pad, GstCaps * caps) | ||
7741 | +{ | ||
7742 | + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) (gst_pad_get_parent (pad)); | ||
7743 | + GstFFMpegMuxPad *collect_pad; | ||
7744 | + AVStream *st; | ||
7745 | + | ||
7746 | + collect_pad = (GstFFMpegMuxPad *) gst_pad_get_element_private (pad); | ||
7747 | + | ||
7748 | + st = ffmpegmux->context->streams[collect_pad->padnum]; | ||
7749 | + ffmpegmux->context->preload = ffmpegmux->preload; | ||
7750 | + ffmpegmux->context->max_delay = ffmpegmux->max_delay; | ||
7751 | + | ||
7752 | + /* for the format-specific guesses, we'll go to | ||
7753 | + * our famous codec mapper */ | ||
7754 | + if (gst_ffmpeg_caps_to_codecid (caps, st->codec) == CODEC_ID_NONE) | ||
7755 | + goto not_accepted; | ||
7756 | + | ||
7757 | + /* copy over the aspect ratios, ffmpeg expects the stream aspect to match the | ||
7758 | + * codec aspect. */ | ||
7759 | + st->sample_aspect_ratio = st->codec->sample_aspect_ratio; | ||
7760 | + | ||
7761 | + GST_LOG_OBJECT (pad, "accepted caps %" GST_PTR_FORMAT, caps); | ||
7762 | + return TRUE; | ||
7763 | + | ||
7764 | + /* ERRORS */ | ||
7765 | +not_accepted: | ||
7766 | + { | ||
7767 | + GST_LOG_OBJECT (pad, "rejecting caps %" GST_PTR_FORMAT, caps); | ||
7768 | + return FALSE; | ||
7769 | + } | ||
7770 | +} | ||
7771 | + | ||
7772 | + | ||
7773 | +static gboolean | ||
7774 | +gst_ffmpegmux_sink_event (GstPad * pad, GstEvent * event) | ||
7775 | +{ | ||
7776 | + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) gst_pad_get_parent (pad); | ||
7777 | + gboolean res = TRUE; | ||
7778 | + | ||
7779 | + switch (GST_EVENT_TYPE (event)) { | ||
7780 | + case GST_EVENT_TAG:{ | ||
7781 | + GstTagList *taglist; | ||
7782 | + GstTagSetter *setter = GST_TAG_SETTER (ffmpegmux); | ||
7783 | + const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter); | ||
7784 | + | ||
7785 | + gst_event_parse_tag (event, &taglist); | ||
7786 | + gst_tag_setter_merge_tags (setter, taglist, mode); | ||
7787 | + break; | ||
7788 | + } | ||
7789 | + default: | ||
7790 | + break; | ||
7791 | + } | ||
7792 | + | ||
7793 | + /* chaining up to collectpads default event function */ | ||
7794 | + res = ffmpegmux->event_function (pad, event); | ||
7795 | + | ||
7796 | + gst_object_unref (ffmpegmux); | ||
7797 | + return res; | ||
7798 | +} | ||
7799 | + | ||
7800 | +static GstFlowReturn | ||
7801 | +gst_ffmpegmux_collected (GstCollectPads * pads, gpointer user_data) | ||
7802 | +{ | ||
7803 | + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) user_data; | ||
7804 | + GSList *collected; | ||
7805 | + GstFFMpegMuxPad *best_pad; | ||
7806 | + GstClockTime best_time; | ||
7807 | +#if 0 | ||
7808 | + /* Re-enable once converted to new AVMetaData API | ||
7809 | + * See #566605 | ||
7810 | + */ | ||
7811 | + const GstTagList *tags; | ||
7812 | +#endif | ||
7813 | + | ||
7814 | + /* open "file" (gstreamer protocol to next element) */ | ||
7815 | + if (!ffmpegmux->opened) { | ||
7816 | + int open_flags = URL_WRONLY; | ||
7817 | + | ||
7818 | + /* we do need all streams to have started capsnego, | ||
7819 | + * or things will go horribly wrong */ | ||
7820 | + for (collected = ffmpegmux->collect->data; collected; | ||
7821 | + collected = g_slist_next (collected)) { | ||
7822 | + GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data; | ||
7823 | + AVStream *st = ffmpegmux->context->streams[collect_pad->padnum]; | ||
7824 | + | ||
7825 | + /* check whether the pad has successfully completed capsnego */ | ||
7826 | + if (st->codec->codec_id == CODEC_ID_NONE) { | ||
7827 | + GST_ELEMENT_ERROR (ffmpegmux, CORE, NEGOTIATION, (NULL), | ||
7828 | + ("no caps set on stream %d (%s)", collect_pad->padnum, | ||
7829 | + (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) ? | ||
7830 | + "video" : "audio")); | ||
7831 | + return GST_FLOW_ERROR; | ||
7832 | + } | ||
7833 | + /* set framerate for audio */ | ||
7834 | + if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { | ||
7835 | + switch (st->codec->codec_id) { | ||
7836 | + case CODEC_ID_PCM_S16LE: | ||
7837 | + case CODEC_ID_PCM_S16BE: | ||
7838 | + case CODEC_ID_PCM_U16LE: | ||
7839 | + case CODEC_ID_PCM_U16BE: | ||
7840 | + case CODEC_ID_PCM_S8: | ||
7841 | + case CODEC_ID_PCM_U8: | ||
7842 | + st->codec->frame_size = 1; | ||
7843 | + break; | ||
7844 | + default: | ||
7845 | + { | ||
7846 | + GstBuffer *buffer; | ||
7847 | + | ||
7848 | + /* FIXME : This doesn't work for RAW AUDIO... | ||
7849 | + * in fact I'm wondering if it even works for any kind of audio... */ | ||
7850 | + buffer = gst_collect_pads_peek (ffmpegmux->collect, | ||
7851 | + (GstCollectData *) collect_pad); | ||
7852 | + if (buffer) { | ||
7853 | + st->codec->frame_size = | ||
7854 | + st->codec->sample_rate * | ||
7855 | + GST_BUFFER_DURATION (buffer) / GST_SECOND; | ||
7856 | + gst_buffer_unref (buffer); | ||
7857 | + } | ||
7858 | + } | ||
7859 | + } | ||
7860 | + } | ||
7861 | + } | ||
7862 | + | ||
7863 | +#if 0 | ||
7864 | + /* Re-enable once converted to new AVMetaData API | ||
7865 | + * See #566605 | ||
7866 | + */ | ||
7867 | + | ||
7868 | + /* tags */ | ||
7869 | + tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ffmpegmux)); | ||
7870 | + if (tags) { | ||
7871 | + gint i; | ||
7872 | + gchar *s; | ||
7873 | + | ||
7874 | + /* get the interesting ones */ | ||
7875 | + if (gst_tag_list_get_string (tags, GST_TAG_TITLE, &s)) { | ||
7876 | + strncpy (ffmpegmux->context->title, s, | ||
7877 | + sizeof (ffmpegmux->context->title)); | ||
7878 | + } | ||
7879 | + if (gst_tag_list_get_string (tags, GST_TAG_ARTIST, &s)) { | ||
7880 | + strncpy (ffmpegmux->context->author, s, | ||
7881 | + sizeof (ffmpegmux->context->author)); | ||
7882 | + } | ||
7883 | + if (gst_tag_list_get_string (tags, GST_TAG_COPYRIGHT, &s)) { | ||
7884 | + strncpy (ffmpegmux->context->copyright, s, | ||
7885 | + sizeof (ffmpegmux->context->copyright)); | ||
7886 | + } | ||
7887 | + if (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &s)) { | ||
7888 | + strncpy (ffmpegmux->context->comment, s, | ||
7889 | + sizeof (ffmpegmux->context->comment)); | ||
7890 | + } | ||
7891 | + if (gst_tag_list_get_string (tags, GST_TAG_ALBUM, &s)) { | ||
7892 | + strncpy (ffmpegmux->context->album, s, | ||
7893 | + sizeof (ffmpegmux->context->album)); | ||
7894 | + } | ||
7895 | + if (gst_tag_list_get_string (tags, GST_TAG_GENRE, &s)) { | ||
7896 | + strncpy (ffmpegmux->context->genre, s, | ||
7897 | + sizeof (ffmpegmux->context->genre)); | ||
7898 | + } | ||
7899 | + if (gst_tag_list_get_int (tags, GST_TAG_TRACK_NUMBER, &i)) { | ||
7900 | + ffmpegmux->context->track = i; | ||
7901 | + } | ||
7902 | + } | ||
7903 | +#endif | ||
7904 | + | ||
7905 | + /* set the streamheader flag for gstffmpegprotocol if codec supports it */ | ||
7906 | + if (!strcmp (ffmpegmux->context->oformat->name, "flv")) { | ||
7907 | + open_flags |= GST_FFMPEG_URL_STREAMHEADER; | ||
7908 | + } | ||
7909 | + | ||
7910 | + if (url_fopen (&ffmpegmux->context->pb, | ||
7911 | + ffmpegmux->context->filename, open_flags) < 0) { | ||
7912 | + GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL), | ||
7913 | + ("Failed to open stream context in ffmux")); | ||
7914 | + return GST_FLOW_ERROR; | ||
7915 | + } | ||
7916 | + | ||
7917 | + if (av_set_parameters (ffmpegmux->context, NULL) < 0) { | ||
7918 | + GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, INIT, (NULL), | ||
7919 | + ("Failed to initialize muxer")); | ||
7920 | + return GST_FLOW_ERROR; | ||
7921 | + } | ||
7922 | + | ||
7923 | + /* now open the mux format */ | ||
7924 | + if (av_write_header (ffmpegmux->context) < 0) { | ||
7925 | + GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL), | ||
7926 | + ("Failed to write file header - check codec settings")); | ||
7927 | + return GST_FLOW_ERROR; | ||
7928 | + } | ||
7929 | + | ||
7930 | + /* we're now opened */ | ||
7931 | + ffmpegmux->opened = TRUE; | ||
7932 | + | ||
7933 | + /* flush the header so it will be used as streamheader */ | ||
7934 | + put_flush_packet (ffmpegmux->context->pb); | ||
7935 | + } | ||
7936 | + | ||
7937 | + /* take the one with earliest timestamp, | ||
7938 | + * and push it forward */ | ||
7939 | + best_pad = NULL; | ||
7940 | + best_time = GST_CLOCK_TIME_NONE; | ||
7941 | + for (collected = ffmpegmux->collect->data; collected; | ||
7942 | + collected = g_slist_next (collected)) { | ||
7943 | + GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data; | ||
7944 | + GstBuffer *buffer = gst_collect_pads_peek (ffmpegmux->collect, | ||
7945 | + (GstCollectData *) collect_pad); | ||
7946 | + | ||
7947 | + /* if there's no buffer, just continue */ | ||
7948 | + if (buffer == NULL) { | ||
7949 | + continue; | ||
7950 | + } | ||
7951 | + | ||
7952 | + /* if we have no buffer yet, just use the first one */ | ||
7953 | + if (best_pad == NULL) { | ||
7954 | + best_pad = collect_pad; | ||
7955 | + best_time = GST_BUFFER_TIMESTAMP (buffer); | ||
7956 | + goto next_pad; | ||
7957 | + } | ||
7958 | + | ||
7959 | + /* if we do have one, only use this one if it's older */ | ||
7960 | + if (GST_BUFFER_TIMESTAMP (buffer) < best_time) { | ||
7961 | + best_time = GST_BUFFER_TIMESTAMP (buffer); | ||
7962 | + best_pad = collect_pad; | ||
7963 | + } | ||
7964 | + | ||
7965 | + next_pad: | ||
7966 | + gst_buffer_unref (buffer); | ||
7967 | + | ||
7968 | + /* Mux buffers with invalid timestamp first */ | ||
7969 | + if (!GST_CLOCK_TIME_IS_VALID (best_time)) | ||
7970 | + break; | ||
7971 | + } | ||
7972 | + | ||
7973 | + /* now handle the buffer, or signal EOS if we have | ||
7974 | + * no buffers left */ | ||
7975 | + if (best_pad != NULL) { | ||
7976 | + GstBuffer *buf; | ||
7977 | + AVPacket pkt; | ||
7978 | + gboolean need_free = FALSE; | ||
7979 | + | ||
7980 | + /* push out current buffer */ | ||
7981 | + buf = gst_collect_pads_pop (ffmpegmux->collect, | ||
7982 | + (GstCollectData *) best_pad); | ||
7983 | + | ||
7984 | + ffmpegmux->context->streams[best_pad->padnum]->codec->frame_number++; | ||
7985 | + | ||
7986 | + /* set time */ | ||
7987 | + pkt.pts = gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buf), | ||
7988 | + ffmpegmux->context->streams[best_pad->padnum]->time_base); | ||
7989 | + pkt.dts = pkt.pts; | ||
7990 | + | ||
7991 | + if (strcmp (ffmpegmux->context->oformat->name, "gif") == 0) { | ||
7992 | + AVStream *st = ffmpegmux->context->streams[best_pad->padnum]; | ||
7993 | + AVPicture src, dst; | ||
7994 | + | ||
7995 | + need_free = TRUE; | ||
7996 | + pkt.size = st->codec->width * st->codec->height * 3; | ||
7997 | + pkt.data = g_malloc (pkt.size); | ||
7998 | + | ||
7999 | + dst.data[0] = pkt.data; | ||
8000 | + dst.data[1] = NULL; | ||
8001 | + dst.data[2] = NULL; | ||
8002 | + dst.linesize[0] = st->codec->width * 3; | ||
8003 | + | ||
8004 | + gst_ffmpeg_avpicture_fill (&src, GST_BUFFER_DATA (buf), | ||
8005 | + PIX_FMT_RGB24, st->codec->width, st->codec->height); | ||
8006 | + | ||
8007 | + av_picture_copy (&dst, &src, PIX_FMT_RGB24, | ||
8008 | + st->codec->width, st->codec->height); | ||
8009 | + } else { | ||
8010 | + pkt.data = GST_BUFFER_DATA (buf); | ||
8011 | + pkt.size = GST_BUFFER_SIZE (buf); | ||
8012 | + } | ||
8013 | + | ||
8014 | + pkt.stream_index = best_pad->padnum; | ||
8015 | + pkt.flags = 0; | ||
8016 | + | ||
8017 | + if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT)) | ||
8018 | + pkt.flags |= AV_PKT_FLAG_KEY; | ||
8019 | + | ||
8020 | + if (GST_BUFFER_DURATION_IS_VALID (buf)) | ||
8021 | + pkt.duration = | ||
8022 | + gst_ffmpeg_time_gst_to_ff (GST_BUFFER_DURATION (buf), | ||
8023 | + ffmpegmux->context->streams[best_pad->padnum]->time_base); | ||
8024 | + else | ||
8025 | + pkt.duration = 0; | ||
8026 | + av_write_frame (ffmpegmux->context, &pkt); | ||
8027 | + gst_buffer_unref (buf); | ||
8028 | + if (need_free) | ||
8029 | + g_free (pkt.data); | ||
8030 | + } else { | ||
8031 | + /* close down */ | ||
8032 | + av_write_trailer (ffmpegmux->context); | ||
8033 | + ffmpegmux->opened = FALSE; | ||
8034 | + put_flush_packet (ffmpegmux->context->pb); | ||
8035 | + url_fclose (ffmpegmux->context->pb); | ||
8036 | + gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ()); | ||
8037 | + return GST_FLOW_UNEXPECTED; | ||
8038 | + } | ||
8039 | + | ||
8040 | + return GST_FLOW_OK; | ||
8041 | +} | ||
8042 | + | ||
8043 | +static GstStateChangeReturn | ||
8044 | +gst_ffmpegmux_change_state (GstElement * element, GstStateChange transition) | ||
8045 | +{ | ||
8046 | + GstFlowReturn ret; | ||
8047 | + GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) (element); | ||
8048 | + | ||
8049 | + switch (transition) { | ||
8050 | + case GST_STATE_CHANGE_NULL_TO_READY: | ||
8051 | + break; | ||
8052 | + case GST_STATE_CHANGE_READY_TO_PAUSED: | ||
8053 | + gst_collect_pads_start (ffmpegmux->collect); | ||
8054 | + break; | ||
8055 | + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: | ||
8056 | + break; | ||
8057 | + case GST_STATE_CHANGE_PAUSED_TO_READY: | ||
8058 | + gst_collect_pads_stop (ffmpegmux->collect); | ||
8059 | + break; | ||
8060 | + default: | ||
8061 | + break; | ||
8062 | + } | ||
8063 | + | ||
8064 | + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); | ||
8065 | + | ||
8066 | + switch (transition) { | ||
8067 | + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: | ||
8068 | + break; | ||
8069 | + case GST_STATE_CHANGE_PAUSED_TO_READY: | ||
8070 | + gst_tag_setter_reset_tags (GST_TAG_SETTER (ffmpegmux)); | ||
8071 | + if (ffmpegmux->opened) { | ||
8072 | + ffmpegmux->opened = FALSE; | ||
8073 | + url_fclose (ffmpegmux->context->pb); | ||
8074 | + } | ||
8075 | + break; | ||
8076 | + case GST_STATE_CHANGE_READY_TO_NULL: | ||
8077 | + break; | ||
8078 | + default: | ||
8079 | + break; | ||
8080 | + } | ||
8081 | + | ||
8082 | + return ret; | ||
8083 | +} | ||
8084 | + | ||
8085 | +static GstCaps * | ||
8086 | +gst_ffmpegmux_get_id_caps (enum CodecID *id_list) | ||
8087 | +{ | ||
8088 | + GstCaps *caps, *t; | ||
8089 | + gint i; | ||
8090 | + | ||
8091 | + caps = gst_caps_new_empty (); | ||
8092 | + for (i = 0; id_list[i] != CODEC_ID_NONE; i++) { | ||
8093 | + if ((t = gst_ffmpeg_codecid_to_caps (id_list[i], NULL, TRUE))) | ||
8094 | + gst_caps_append (caps, t); | ||
8095 | + } | ||
8096 | + if (gst_caps_is_empty (caps)) { | ||
8097 | + gst_caps_unref (caps); | ||
8098 | + return NULL; | ||
8099 | + } | ||
8100 | + | ||
8101 | + return caps; | ||
8102 | +} | ||
8103 | + | ||
8104 | +/* set a list of integer values on the caps, e.g. for sample rates */ | ||
8105 | +static void | ||
8106 | +gst_ffmpeg_mux_simple_caps_set_int_list (GstCaps * caps, const gchar * field, | ||
8107 | + guint num, const gint * values) | ||
8108 | +{ | ||
8109 | + GValue list = { 0, }; | ||
8110 | + GValue val = { 0, }; | ||
8111 | + gint i; | ||
8112 | + | ||
8113 | + g_return_if_fail (GST_CAPS_IS_SIMPLE (caps)); | ||
8114 | + | ||
8115 | + g_value_init (&list, GST_TYPE_LIST); | ||
8116 | + g_value_init (&val, G_TYPE_INT); | ||
8117 | + | ||
8118 | + for (i = 0; i < num; ++i) { | ||
8119 | + g_value_set_int (&val, values[i]); | ||
8120 | + gst_value_list_append_value (&list, &val); | ||
8121 | + } | ||
8122 | + | ||
8123 | + gst_structure_set_value (gst_caps_get_structure (caps, 0), field, &list); | ||
8124 | + | ||
8125 | + g_value_unset (&val); | ||
8126 | + g_value_unset (&list); | ||
8127 | +} | ||
8128 | + | ||
8129 | +gboolean | ||
8130 | +gst_ffmpegmux_register (GstPlugin * plugin) | ||
8131 | +{ | ||
8132 | + GTypeInfo typeinfo = { | ||
8133 | + sizeof (GstFFMpegMuxClass), | ||
8134 | + (GBaseInitFunc) gst_ffmpegmux_base_init, | ||
8135 | + NULL, | ||
8136 | + (GClassInitFunc) gst_ffmpegmux_class_init, | ||
8137 | + NULL, | ||
8138 | + NULL, | ||
8139 | + sizeof (GstFFMpegMux), | ||
8140 | + 0, | ||
8141 | + (GInstanceInitFunc) gst_ffmpegmux_init, | ||
8142 | + }; | ||
8143 | + static const GInterfaceInfo tag_setter_info = { | ||
8144 | + NULL, NULL, NULL | ||
8145 | + }; | ||
8146 | + GType type; | ||
8147 | + AVOutputFormat *in_plugin; | ||
8148 | + | ||
8149 | + in_plugin = av_oformat_next (NULL); | ||
8150 | + | ||
8151 | + GST_LOG ("Registering muxers"); | ||
8152 | + | ||
8153 | + while (in_plugin) { | ||
8154 | + gchar *type_name; | ||
8155 | + gchar *p; | ||
8156 | + GstRank rank = GST_RANK_MARGINAL; | ||
8157 | + | ||
8158 | + if ((!strncmp (in_plugin->name, "u16", 3)) || | ||
8159 | + (!strncmp (in_plugin->name, "s16", 3)) || | ||
8160 | + (!strncmp (in_plugin->name, "u24", 3)) || | ||
8161 | + (!strncmp (in_plugin->name, "s24", 3)) || | ||
8162 | + (!strncmp (in_plugin->name, "u8", 2)) || | ||
8163 | + (!strncmp (in_plugin->name, "s8", 2)) || | ||
8164 | + (!strncmp (in_plugin->name, "u32", 3)) || | ||
8165 | + (!strncmp (in_plugin->name, "s32", 3)) || | ||
8166 | + (!strncmp (in_plugin->name, "f32", 3)) || | ||
8167 | + (!strncmp (in_plugin->name, "f64", 3)) || | ||
8168 | + (!strncmp (in_plugin->name, "raw", 3)) || | ||
8169 | + (!strncmp (in_plugin->name, "crc", 3)) || | ||
8170 | + (!strncmp (in_plugin->name, "null", 4)) || | ||
8171 | + (!strncmp (in_plugin->name, "gif", 3)) || | ||
8172 | + (!strncmp (in_plugin->name, "frame", 5)) || | ||
8173 | + (!strncmp (in_plugin->name, "image", 5)) || | ||
8174 | + (!strncmp (in_plugin->name, "mulaw", 5)) || | ||
8175 | + (!strncmp (in_plugin->name, "alaw", 4)) || | ||
8176 | + (!strncmp (in_plugin->name, "h26", 3)) || | ||
8177 | + (!strncmp (in_plugin->name, "rtp", 3)) || | ||
8178 | + (!strncmp (in_plugin->name, "ass", 3)) || | ||
8179 | + (!strncmp (in_plugin->name, "ffmetadata", 10)) || | ||
8180 | + (!strncmp (in_plugin->name, "srt", 3)) | ||
8181 | + ) { | ||
8182 | + GST_LOG ("Ignoring muxer %s", in_plugin->name); | ||
8183 | + goto next; | ||
8184 | + } | ||
8185 | + | ||
8186 | + if ((!strncmp (in_plugin->long_name, "raw ", 4))) { | ||
8187 | + GST_LOG ("Ignoring raw muxer %s", in_plugin->name); | ||
8188 | + goto next; | ||
8189 | + } | ||
8190 | + | ||
8191 | + if (gst_ffmpegmux_get_replacement (in_plugin->name)) | ||
8192 | + rank = GST_RANK_NONE; | ||
8193 | + | ||
8194 | + /* FIXME : We need a fast way to know whether we have mappings for this | ||
8195 | + * muxer type. */ | ||
8196 | + | ||
8197 | + /* construct the type */ | ||
8198 | + type_name = g_strdup_printf ("ffmux_%s", in_plugin->name); | ||
8199 | + | ||
8200 | + p = type_name; | ||
8201 | + | ||
8202 | + while (*p) { | ||
8203 | + if (*p == '.') | ||
8204 | + *p = '_'; | ||
8205 | + p++; | ||
8206 | + } | ||
8207 | + | ||
8208 | + type = g_type_from_name (type_name); | ||
8209 | + | ||
8210 | + if (!type) { | ||
8211 | + /* create the type now */ | ||
8212 | + type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0); | ||
8213 | + g_type_set_qdata (type, GST_FFMUX_PARAMS_QDATA, (gpointer) in_plugin); | ||
8214 | + g_type_add_interface_static (type, GST_TYPE_TAG_SETTER, &tag_setter_info); | ||
8215 | + } | ||
8216 | + | ||
8217 | + if (!gst_element_register (plugin, type_name, rank, type)) { | ||
8218 | + g_free (type_name); | ||
8219 | + return FALSE; | ||
8220 | + } | ||
8221 | + | ||
8222 | + g_free (type_name); | ||
8223 | + | ||
8224 | + next: | ||
8225 | + in_plugin = av_oformat_next (in_plugin); | ||
8226 | + } | ||
8227 | + | ||
8228 | + GST_LOG ("Finished registering muxers"); | ||
8229 | + | ||
8230 | + return TRUE; | ||
8231 | +} | ||
8232 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegprotocol.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegprotocol.c | ||
8233 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegprotocol.c 2011-07-12 16:35:28.000000000 +0200 | ||
8234 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegprotocol.c 2014-08-08 15:26:07.875857555 +0200 | ||
8235 | @@ -46,63 +46,14 @@ | ||
8236 | }; | ||
8237 | |||
8238 | static int | ||
8239 | -gst_ffmpegdata_open (URLContext * h, const char *filename, int flags) | ||
8240 | -{ | ||
8241 | - GstProtocolInfo *info; | ||
8242 | - GstPad *pad; | ||
8243 | - | ||
8244 | - GST_LOG ("Opening %s", filename); | ||
8245 | - | ||
8246 | - info = g_new0 (GstProtocolInfo, 1); | ||
8247 | - | ||
8248 | - info->set_streamheader = flags & GST_FFMPEG_URL_STREAMHEADER; | ||
8249 | - flags &= ~GST_FFMPEG_URL_STREAMHEADER; | ||
8250 | - h->flags &= ~GST_FFMPEG_URL_STREAMHEADER; | ||
8251 | - | ||
8252 | - /* we don't support R/W together */ | ||
8253 | - if (flags != URL_RDONLY && flags != URL_WRONLY) { | ||
8254 | - GST_WARNING ("Only read-only or write-only are supported"); | ||
8255 | - return -EINVAL; | ||
8256 | - } | ||
8257 | - | ||
8258 | - if (sscanf (&filename[12], "%p", &pad) != 1) { | ||
8259 | - GST_WARNING ("could not decode pad from %s", filename); | ||
8260 | - return -EIO; | ||
8261 | - } | ||
8262 | - | ||
8263 | - /* make sure we're a pad and that we're of the right type */ | ||
8264 | - g_return_val_if_fail (GST_IS_PAD (pad), -EINVAL); | ||
8265 | - | ||
8266 | - switch (flags) { | ||
8267 | - case URL_RDONLY: | ||
8268 | - g_return_val_if_fail (GST_PAD_IS_SINK (pad), -EINVAL); | ||
8269 | - break; | ||
8270 | - case URL_WRONLY: | ||
8271 | - g_return_val_if_fail (GST_PAD_IS_SRC (pad), -EINVAL); | ||
8272 | - break; | ||
8273 | - } | ||
8274 | - | ||
8275 | - info->eos = FALSE; | ||
8276 | - info->pad = pad; | ||
8277 | - info->offset = 0; | ||
8278 | - | ||
8279 | - h->priv_data = (void *) info; | ||
8280 | - h->is_streamed = FALSE; | ||
8281 | - h->max_packet_size = 0; | ||
8282 | - | ||
8283 | - return 0; | ||
8284 | -} | ||
8285 | - | ||
8286 | -static int | ||
8287 | -gst_ffmpegdata_peek (URLContext * h, unsigned char *buf, int size) | ||
8288 | +gst_ffmpegdata_peek (void *priv_data, unsigned char *buf, int size) | ||
8289 | { | ||
8290 | GstProtocolInfo *info; | ||
8291 | GstBuffer *inbuf = NULL; | ||
8292 | GstFlowReturn ret; | ||
8293 | int total = 0; | ||
8294 | |||
8295 | - g_return_val_if_fail (h->flags == URL_RDONLY, AVERROR (EIO)); | ||
8296 | - info = (GstProtocolInfo *) h->priv_data; | ||
8297 | + info = (GstProtocolInfo *) priv_data; | ||
8298 | |||
8299 | GST_DEBUG ("Pulling %d bytes at position %" G_GUINT64_FORMAT, size, | ||
8300 | info->offset); | ||
8301 | @@ -134,17 +85,17 @@ | ||
8302 | } | ||
8303 | |||
8304 | static int | ||
8305 | -gst_ffmpegdata_read (URLContext * h, unsigned char *buf, int size) | ||
8306 | +gst_ffmpegdata_read (void *priv_data, unsigned char *buf, int size) | ||
8307 | { | ||
8308 | gint res; | ||
8309 | GstProtocolInfo *info; | ||
8310 | |||
8311 | - info = (GstProtocolInfo *) h->priv_data; | ||
8312 | + info = (GstProtocolInfo *) priv_data; | ||
8313 | |||
8314 | GST_DEBUG ("Reading %d bytes of data at position %" G_GUINT64_FORMAT, size, | ||
8315 | info->offset); | ||
8316 | |||
8317 | - res = gst_ffmpegdata_peek (h, buf, size); | ||
8318 | + res = gst_ffmpegdata_peek (priv_data, buf, size); | ||
8319 | if (res >= 0) | ||
8320 | info->offset += res; | ||
8321 | |||
8322 | @@ -154,15 +105,13 @@ | ||
8323 | } | ||
8324 | |||
8325 | static int | ||
8326 | -gst_ffmpegdata_write (URLContext * h, const unsigned char *buf, int size) | ||
8327 | +gst_ffmpegdata_write (void *priv_data, const unsigned char *buf, int size) | ||
8328 | { | ||
8329 | GstProtocolInfo *info; | ||
8330 | GstBuffer *outbuf; | ||
8331 | |||
8332 | GST_DEBUG ("Writing %d bytes", size); | ||
8333 | - info = (GstProtocolInfo *) h->priv_data; | ||
8334 | - | ||
8335 | - g_return_val_if_fail (h->flags != URL_RDONLY, -EIO); | ||
8336 | + info = (GstProtocolInfo *) priv_data; | ||
8337 | |||
8338 | /* create buffer and push data further */ | ||
8339 | if (gst_pad_alloc_buffer_and_set_caps (info->pad, | ||
8340 | @@ -179,7 +128,7 @@ | ||
8341 | } | ||
8342 | |||
8343 | static int64_t | ||
8344 | -gst_ffmpegdata_seek (URLContext * h, int64_t pos, int whence) | ||
8345 | +gst_ffmpegdata_seek (void *priv_data, int64_t pos, int whence) | ||
8346 | { | ||
8347 | GstProtocolInfo *info; | ||
8348 | guint64 newpos = 0; | ||
8349 | @@ -187,70 +136,62 @@ | ||
8350 | GST_DEBUG ("Seeking to %" G_GINT64_FORMAT ", whence=%d", | ||
8351 | (gint64) pos, whence); | ||
8352 | |||
8353 | - info = (GstProtocolInfo *) h->priv_data; | ||
8354 | + info = (GstProtocolInfo *) priv_data; | ||
8355 | |||
8356 | /* TODO : if we are push-based, we need to return sensible info */ | ||
8357 | |||
8358 | - switch (h->flags) { | ||
8359 | - case URL_RDONLY: | ||
8360 | - { | ||
8361 | - /* sinkpad */ | ||
8362 | - switch (whence) { | ||
8363 | - case SEEK_SET: | ||
8364 | - newpos = (guint64) pos; | ||
8365 | - break; | ||
8366 | - case SEEK_CUR: | ||
8367 | - newpos = info->offset + pos; | ||
8368 | - break; | ||
8369 | - case SEEK_END: | ||
8370 | - case AVSEEK_SIZE: | ||
8371 | - /* ffmpeg wants to know the current end position in bytes ! */ | ||
8372 | - { | ||
8373 | - GstFormat format = GST_FORMAT_BYTES; | ||
8374 | - gint64 duration; | ||
8375 | - | ||
8376 | - GST_DEBUG ("Seek end"); | ||
8377 | - | ||
8378 | - if (gst_pad_is_linked (info->pad)) | ||
8379 | - if (gst_pad_query_duration (GST_PAD_PEER (info->pad), &format, | ||
8380 | - &duration)) | ||
8381 | - newpos = ((guint64) duration) + pos; | ||
8382 | - } | ||
8383 | - break; | ||
8384 | - default: | ||
8385 | - g_assert (0); | ||
8386 | - break; | ||
8387 | + if (GST_PAD_IS_SINK (info->pad)) { | ||
8388 | + /* sinkpad */ | ||
8389 | + switch (whence) { | ||
8390 | + case SEEK_SET: | ||
8391 | + newpos = (guint64) pos; | ||
8392 | + break; | ||
8393 | + case SEEK_CUR: | ||
8394 | + newpos = info->offset + pos; | ||
8395 | + break; | ||
8396 | + case SEEK_END: | ||
8397 | + case AVSEEK_SIZE: | ||
8398 | + /* ffmpeg wants to know the current end position in bytes ! */ | ||
8399 | + { | ||
8400 | + GstFormat format = GST_FORMAT_BYTES; | ||
8401 | + gint64 duration; | ||
8402 | + | ||
8403 | + GST_DEBUG ("Seek end"); | ||
8404 | + | ||
8405 | + if (gst_pad_is_linked (info->pad)) | ||
8406 | + if (gst_pad_query_duration (GST_PAD_PEER (info->pad), &format, | ||
8407 | + &duration)) | ||
8408 | + newpos = ((guint64) duration) + pos; | ||
8409 | } | ||
8410 | - /* FIXME : implement case for push-based behaviour */ | ||
8411 | - if (whence != AVSEEK_SIZE) | ||
8412 | - info->offset = newpos; | ||
8413 | + break; | ||
8414 | + default: | ||
8415 | + g_assert (0); | ||
8416 | + break; | ||
8417 | } | ||
8418 | - break; | ||
8419 | - case URL_WRONLY: | ||
8420 | - { | ||
8421 | - /* srcpad */ | ||
8422 | - switch (whence) { | ||
8423 | - case SEEK_SET: | ||
8424 | - info->offset = (guint64) pos; | ||
8425 | - gst_pad_push_event (info->pad, gst_event_new_new_segment | ||
8426 | - (TRUE, 1.0, GST_FORMAT_BYTES, info->offset, | ||
8427 | - GST_CLOCK_TIME_NONE, info->offset)); | ||
8428 | - break; | ||
8429 | - case SEEK_CUR: | ||
8430 | - info->offset += pos; | ||
8431 | - gst_pad_push_event (info->pad, gst_event_new_new_segment | ||
8432 | - (TRUE, 1.0, GST_FORMAT_BYTES, info->offset, | ||
8433 | - GST_CLOCK_TIME_NONE, info->offset)); | ||
8434 | - break; | ||
8435 | - default: | ||
8436 | - break; | ||
8437 | - } | ||
8438 | - newpos = info->offset; | ||
8439 | + /* FIXME : implement case for push-based behaviour */ | ||
8440 | + if (whence != AVSEEK_SIZE) | ||
8441 | + info->offset = newpos; | ||
8442 | + } else if (GST_PAD_IS_SRC (info->pad)) { | ||
8443 | + /* srcpad */ | ||
8444 | + switch (whence) { | ||
8445 | + case SEEK_SET: | ||
8446 | + info->offset = (guint64) pos; | ||
8447 | + gst_pad_push_event (info->pad, gst_event_new_new_segment | ||
8448 | + (TRUE, 1.0, GST_FORMAT_BYTES, info->offset, | ||
8449 | + GST_CLOCK_TIME_NONE, info->offset)); | ||
8450 | + break; | ||
8451 | + case SEEK_CUR: | ||
8452 | + info->offset += pos; | ||
8453 | + gst_pad_push_event (info->pad, gst_event_new_new_segment | ||
8454 | + (TRUE, 1.0, GST_FORMAT_BYTES, info->offset, | ||
8455 | + GST_CLOCK_TIME_NONE, info->offset)); | ||
8456 | + break; | ||
8457 | + default: | ||
8458 | + break; | ||
8459 | } | ||
8460 | - break; | ||
8461 | - default: | ||
8462 | - g_assert (0); | ||
8463 | - break; | ||
8464 | + newpos = info->offset; | ||
8465 | + } else { | ||
8466 | + g_assert_not_reached (); | ||
8467 | } | ||
8468 | |||
8469 | GST_DEBUG ("Now at offset %" G_GUINT64_FORMAT " (returning %" G_GUINT64_FORMAT | ||
8470 | @@ -258,85 +199,91 @@ | ||
8471 | return newpos; | ||
8472 | } | ||
8473 | |||
8474 | -static int | ||
8475 | -gst_ffmpegdata_close (URLContext * h) | ||
8476 | +int | ||
8477 | +gst_ffmpegdata_close (AVIOContext * h) | ||
8478 | { | ||
8479 | GstProtocolInfo *info; | ||
8480 | |||
8481 | - info = (GstProtocolInfo *) h->priv_data; | ||
8482 | + info = (GstProtocolInfo *) h->opaque; | ||
8483 | if (info == NULL) | ||
8484 | return 0; | ||
8485 | |||
8486 | GST_LOG ("Closing file"); | ||
8487 | |||
8488 | - switch (h->flags) { | ||
8489 | - case URL_WRONLY: | ||
8490 | - { | ||
8491 | - /* send EOS - that closes down the stream */ | ||
8492 | - gst_pad_push_event (info->pad, gst_event_new_eos ()); | ||
8493 | - break; | ||
8494 | - } | ||
8495 | - default: | ||
8496 | - break; | ||
8497 | + if (GST_PAD_IS_SRC (info->pad)) { | ||
8498 | + /* send EOS - that closes down the stream */ | ||
8499 | + gst_pad_push_event (info->pad, gst_event_new_eos ()); | ||
8500 | } | ||
8501 | |||
8502 | /* clean up data */ | ||
8503 | g_free (info); | ||
8504 | - h->priv_data = NULL; | ||
8505 | + h->opaque = NULL; | ||
8506 | + | ||
8507 | + av_freep (&h->buffer); | ||
8508 | + av_free (h); | ||
8509 | |||
8510 | return 0; | ||
8511 | } | ||
8512 | |||
8513 | +int | ||
8514 | +gst_ffmpegdata_open (GstPad * pad, int flags, AVIOContext ** context) | ||
8515 | +{ | ||
8516 | + GstProtocolInfo *info; | ||
8517 | + static const int buffer_size = 4096; | ||
8518 | + unsigned char *buffer = NULL; | ||
8519 | |||
8520 | -URLProtocol gstreamer_protocol = { | ||
8521 | - /*.name = */ "gstreamer", | ||
8522 | - /*.url_open = */ gst_ffmpegdata_open, | ||
8523 | - /*.url_read = */ gst_ffmpegdata_read, | ||
8524 | - /*.url_write = */ gst_ffmpegdata_write, | ||
8525 | - /*.url_seek = */ gst_ffmpegdata_seek, | ||
8526 | - /*.url_close = */ gst_ffmpegdata_close, | ||
8527 | -}; | ||
8528 | + info = g_new0 (GstProtocolInfo, 1); | ||
8529 | |||
8530 | + info->set_streamheader = flags & GST_FFMPEG_URL_STREAMHEADER; | ||
8531 | + flags &= ~GST_FFMPEG_URL_STREAMHEADER; | ||
8532 | |||
8533 | -/* specialized protocol for cross-thread pushing, | ||
8534 | - * based on ffmpeg's pipe protocol */ | ||
8535 | + /* we don't support R/W together */ | ||
8536 | + if ((flags & AVIO_FLAG_WRITE) && (flags & AVIO_FLAG_READ)) { | ||
8537 | + GST_WARNING ("Only read-only or write-only are supported"); | ||
8538 | + return -EINVAL; | ||
8539 | + } | ||
8540 | |||
8541 | -static int | ||
8542 | -gst_ffmpeg_pipe_open (URLContext * h, const char *filename, int flags) | ||
8543 | -{ | ||
8544 | - GstFFMpegPipe *ffpipe; | ||
8545 | + /* make sure we're a pad and that we're of the right type */ | ||
8546 | + g_return_val_if_fail (GST_IS_PAD (pad), -EINVAL); | ||
8547 | |||
8548 | - GST_LOG ("Opening %s", filename); | ||
8549 | + if ((flags & AVIO_FLAG_READ)) | ||
8550 | + g_return_val_if_fail (GST_PAD_IS_SINK (pad), -EINVAL); | ||
8551 | + if ((flags & AVIO_FLAG_WRITE)) | ||
8552 | + g_return_val_if_fail (GST_PAD_IS_SRC (pad), -EINVAL); | ||
8553 | |||
8554 | - /* we don't support W together */ | ||
8555 | - if (flags != URL_RDONLY) { | ||
8556 | - GST_WARNING ("Only read-only is supported"); | ||
8557 | - return -EINVAL; | ||
8558 | - } | ||
8559 | + info->eos = FALSE; | ||
8560 | + info->pad = pad; | ||
8561 | + info->offset = 0; | ||
8562 | |||
8563 | - if (sscanf (&filename[10], "%p", &ffpipe) != 1) { | ||
8564 | - GST_WARNING ("could not decode pipe info from %s", filename); | ||
8565 | - return -EIO; | ||
8566 | + buffer = av_malloc (buffer_size); | ||
8567 | + if (buffer == NULL) { | ||
8568 | + GST_WARNING ("Failed to allocate buffer"); | ||
8569 | + return -ENOMEM; | ||
8570 | } | ||
8571 | |||
8572 | - /* sanity check */ | ||
8573 | - g_return_val_if_fail (GST_IS_ADAPTER (ffpipe->adapter), -EINVAL); | ||
8574 | - | ||
8575 | - h->priv_data = (void *) ffpipe; | ||
8576 | - h->is_streamed = TRUE; | ||
8577 | - h->max_packet_size = 0; | ||
8578 | + *context = | ||
8579 | + avio_alloc_context (buffer, buffer_size, flags, (void *) info, | ||
8580 | + gst_ffmpegdata_read, gst_ffmpegdata_write, gst_ffmpegdata_seek); | ||
8581 | + (*context)->seekable = AVIO_SEEKABLE_NORMAL; | ||
8582 | + if (!(flags & AVIO_FLAG_WRITE)) { | ||
8583 | + (*context)->buf_ptr = (*context)->buf_end; | ||
8584 | + (*context)->write_flag = 0; | ||
8585 | + } | ||
8586 | |||
8587 | return 0; | ||
8588 | } | ||
8589 | |||
8590 | +/* specialized protocol for cross-thread pushing, | ||
8591 | + * based on ffmpeg's pipe protocol */ | ||
8592 | + | ||
8593 | static int | ||
8594 | -gst_ffmpeg_pipe_read (URLContext * h, unsigned char *buf, int size) | ||
8595 | +gst_ffmpeg_pipe_read (void *priv_data, unsigned char *buf, int size) | ||
8596 | { | ||
8597 | GstFFMpegPipe *ffpipe; | ||
8598 | const guint8 *data; | ||
8599 | guint available; | ||
8600 | |||
8601 | - ffpipe = (GstFFMpegPipe *) h->priv_data; | ||
8602 | + ffpipe = (GstFFMpegPipe *) priv_data; | ||
8603 | |||
8604 | GST_LOG ("requested size %d", size); | ||
8605 | |||
8606 | @@ -367,21 +314,38 @@ | ||
8607 | return size; | ||
8608 | } | ||
8609 | |||
8610 | -static int | ||
8611 | -gst_ffmpeg_pipe_close (URLContext * h) | ||
8612 | +int | ||
8613 | +gst_ffmpeg_pipe_close (AVIOContext * h) | ||
8614 | { | ||
8615 | GST_LOG ("Closing pipe"); | ||
8616 | |||
8617 | - h->priv_data = NULL; | ||
8618 | + h->opaque = NULL; | ||
8619 | + av_freep (&h->buffer); | ||
8620 | + av_free (h); | ||
8621 | |||
8622 | return 0; | ||
8623 | } | ||
8624 | |||
8625 | -URLProtocol gstpipe_protocol = { | ||
8626 | - "gstpipe", | ||
8627 | - gst_ffmpeg_pipe_open, | ||
8628 | - gst_ffmpeg_pipe_read, | ||
8629 | - NULL, | ||
8630 | - NULL, | ||
8631 | - gst_ffmpeg_pipe_close, | ||
8632 | -}; | ||
8633 | +int | ||
8634 | +gst_ffmpeg_pipe_open (GstFFMpegPipe * ffpipe, int flags, AVIOContext ** context) | ||
8635 | +{ | ||
8636 | + static const int buffer_size = 4096; | ||
8637 | + unsigned char *buffer = NULL; | ||
8638 | + | ||
8639 | + /* sanity check */ | ||
8640 | + g_return_val_if_fail (GST_IS_ADAPTER (ffpipe->adapter), -EINVAL); | ||
8641 | + | ||
8642 | + buffer = av_malloc (buffer_size); | ||
8643 | + if (buffer == NULL) { | ||
8644 | + GST_WARNING ("Failed to allocate buffer"); | ||
8645 | + return -ENOMEM; | ||
8646 | + } | ||
8647 | + | ||
8648 | + *context = | ||
8649 | + avio_alloc_context (buffer, buffer_size, 0, (void *) ffpipe, | ||
8650 | + gst_ffmpeg_pipe_read, NULL, NULL); | ||
8651 | + (*context)->seekable = 0; | ||
8652 | + (*context)->buf_ptr = (*context)->buf_end; | ||
8653 | + | ||
8654 | + return 0; | ||
8655 | +} | ||
8656 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.c gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.c | ||
8657 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.c 2011-07-13 11:07:28.000000000 +0200 | ||
8658 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.c 2014-08-08 15:34:04.007874626 +0200 | ||
8659 | @@ -25,6 +25,11 @@ | ||
8660 | #ifdef __APPLE__ | ||
8661 | #include <sys/sysctl.h> | ||
8662 | #endif | ||
8663 | +#ifdef HAVE_FFMPEG_UNINSTALLED | ||
8664 | +#include <avformat.h> | ||
8665 | +#else | ||
8666 | +#include <libavformat/avformat.h> | ||
8667 | +#endif | ||
8668 | |||
8669 | G_CONST_RETURN gchar * | ||
8670 | gst_ffmpeg_get_codecid_longname (enum CodecID codec_id) | ||
8671 | @@ -39,21 +44,21 @@ | ||
8672 | } | ||
8673 | |||
8674 | gint | ||
8675 | -av_smp_format_depth (enum SampleFormat smp_fmt) | ||
8676 | +av_smp_format_depth (enum AVSampleFormat smp_fmt) | ||
8677 | { | ||
8678 | gint depth = -1; | ||
8679 | switch (smp_fmt) { | ||
8680 | - case SAMPLE_FMT_U8: | ||
8681 | + case AV_SAMPLE_FMT_U8: | ||
8682 | depth = 1; | ||
8683 | break; | ||
8684 | - case SAMPLE_FMT_S16: | ||
8685 | + case AV_SAMPLE_FMT_S16: | ||
8686 | depth = 2; | ||
8687 | break; | ||
8688 | - case SAMPLE_FMT_S32: | ||
8689 | - case SAMPLE_FMT_FLT: | ||
8690 | + case AV_SAMPLE_FMT_S32: | ||
8691 | + case AV_SAMPLE_FMT_FLT: | ||
8692 | depth = 4; | ||
8693 | break; | ||
8694 | - case SAMPLE_FMT_DBL: | ||
8695 | + case AV_SAMPLE_FMT_DBL: | ||
8696 | depth = 8; | ||
8697 | break; | ||
8698 | default: | ||
8699 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.c.orig gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.c.orig | ||
8700 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.c.orig 1970-01-01 01:00:00.000000000 +0100 | ||
8701 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.c.orig 2011-07-13 11:07:28.000000000 +0200 | ||
8702 | @@ -0,0 +1,483 @@ | ||
8703 | +/* GStreamer | ||
8704 | + * Copyright (c) 2009 Edward Hervey <bilboed@bilboed.com> | ||
8705 | + * | ||
8706 | + * This library is free software; you can redistribute it and/or | ||
8707 | + * modify it under the terms of the GNU Library General Public | ||
8708 | + * License as published by the Free Software Foundation; either | ||
8709 | + * version 2 of the License, or (at your option) any later version. | ||
8710 | + * | ||
8711 | + * This library is distributed in the hope that it will be useful, | ||
8712 | + * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
8713 | + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
8714 | + * Library General Public License for more details. | ||
8715 | + * | ||
8716 | + * You should have received a copy of the GNU Library General Public | ||
8717 | + * License along with this library; if not, write to the | ||
8718 | + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
8719 | + * Boston, MA 02111-1307, USA. | ||
8720 | + */ | ||
8721 | + | ||
8722 | +#ifdef HAVE_CONFIG_H | ||
8723 | +#include "config.h" | ||
8724 | +#endif | ||
8725 | +#include "gstffmpegutils.h" | ||
8726 | +#include <unistd.h> | ||
8727 | +#ifdef __APPLE__ | ||
8728 | +#include <sys/sysctl.h> | ||
8729 | +#endif | ||
8730 | + | ||
8731 | +G_CONST_RETURN gchar * | ||
8732 | +gst_ffmpeg_get_codecid_longname (enum CodecID codec_id) | ||
8733 | +{ | ||
8734 | + AVCodec *codec; | ||
8735 | + /* Let's use what ffmpeg can provide us */ | ||
8736 | + | ||
8737 | + if ((codec = avcodec_find_decoder (codec_id)) || | ||
8738 | + (codec = avcodec_find_encoder (codec_id))) | ||
8739 | + return codec->long_name; | ||
8740 | + return NULL; | ||
8741 | +} | ||
8742 | + | ||
8743 | +gint | ||
8744 | +av_smp_format_depth (enum SampleFormat smp_fmt) | ||
8745 | +{ | ||
8746 | + gint depth = -1; | ||
8747 | + switch (smp_fmt) { | ||
8748 | + case SAMPLE_FMT_U8: | ||
8749 | + depth = 1; | ||
8750 | + break; | ||
8751 | + case SAMPLE_FMT_S16: | ||
8752 | + depth = 2; | ||
8753 | + break; | ||
8754 | + case SAMPLE_FMT_S32: | ||
8755 | + case SAMPLE_FMT_FLT: | ||
8756 | + depth = 4; | ||
8757 | + break; | ||
8758 | + case SAMPLE_FMT_DBL: | ||
8759 | + depth = 8; | ||
8760 | + break; | ||
8761 | + default: | ||
8762 | + GST_ERROR ("UNHANDLED SAMPLE FORMAT !"); | ||
8763 | + break; | ||
8764 | + } | ||
8765 | + return depth; | ||
8766 | +} | ||
8767 | + | ||
8768 | + | ||
8769 | +/* | ||
8770 | + * Fill in pointers to memory in a AVPicture, where | ||
8771 | + * everything is aligned by 4 (as required by X). | ||
8772 | + * This is mostly a copy from imgconvert.c with some | ||
8773 | + * small changes. | ||
8774 | + */ | ||
8775 | + | ||
8776 | +#define FF_COLOR_RGB 0 /* RGB color space */ | ||
8777 | +#define FF_COLOR_GRAY 1 /* gray color space */ | ||
8778 | +#define FF_COLOR_YUV 2 /* YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */ | ||
8779 | +#define FF_COLOR_YUV_JPEG 3 /* YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */ | ||
8780 | + | ||
8781 | +#define FF_PIXEL_PLANAR 0 /* each channel has one component in AVPicture */ | ||
8782 | +#define FF_PIXEL_PACKED 1 /* only one components containing all the channels */ | ||
8783 | +#define FF_PIXEL_PALETTE 2 /* one components containing indexes for a palette */ | ||
8784 | + | ||
8785 | +typedef struct PixFmtInfo | ||
8786 | +{ | ||
8787 | + const char *name; | ||
8788 | + uint8_t nb_channels; /* number of channels (including alpha) */ | ||
8789 | + uint8_t color_type; /* color type (see FF_COLOR_xxx constants) */ | ||
8790 | + uint8_t pixel_type; /* pixel storage type (see FF_PIXEL_xxx constants) */ | ||
8791 | + uint8_t is_alpha:1; /* true if alpha can be specified */ | ||
8792 | + uint8_t x_chroma_shift; /* X chroma subsampling factor is 2 ^ shift */ | ||
8793 | + uint8_t y_chroma_shift; /* Y chroma subsampling factor is 2 ^ shift */ | ||
8794 | + uint8_t depth; /* bit depth of the color components */ | ||
8795 | +} PixFmtInfo; | ||
8796 | + | ||
8797 | + | ||
8798 | +/* this table gives more information about formats */ | ||
8799 | +static PixFmtInfo pix_fmt_info[PIX_FMT_NB]; | ||
8800 | +void | ||
8801 | +gst_ffmpeg_init_pix_fmt_info (void) | ||
8802 | +{ | ||
8803 | + /* YUV formats */ | ||
8804 | + pix_fmt_info[PIX_FMT_YUV420P].name = g_strdup ("yuv420p"); | ||
8805 | + pix_fmt_info[PIX_FMT_YUV420P].nb_channels = 3; | ||
8806 | + pix_fmt_info[PIX_FMT_YUV420P].color_type = FF_COLOR_YUV; | ||
8807 | + pix_fmt_info[PIX_FMT_YUV420P].pixel_type = FF_PIXEL_PLANAR; | ||
8808 | + pix_fmt_info[PIX_FMT_YUV420P].depth = 8, | ||
8809 | + pix_fmt_info[PIX_FMT_YUV420P].x_chroma_shift = 1, | ||
8810 | + pix_fmt_info[PIX_FMT_YUV420P].y_chroma_shift = 1; | ||
8811 | + | ||
8812 | + pix_fmt_info[PIX_FMT_YUV422P].name = g_strdup ("yuv422p"); | ||
8813 | + pix_fmt_info[PIX_FMT_YUV422P].nb_channels = 3; | ||
8814 | + pix_fmt_info[PIX_FMT_YUV422P].color_type = FF_COLOR_YUV; | ||
8815 | + pix_fmt_info[PIX_FMT_YUV422P].pixel_type = FF_PIXEL_PLANAR; | ||
8816 | + pix_fmt_info[PIX_FMT_YUV422P].depth = 8; | ||
8817 | + pix_fmt_info[PIX_FMT_YUV422P].x_chroma_shift = 1; | ||
8818 | + pix_fmt_info[PIX_FMT_YUV422P].y_chroma_shift = 0; | ||
8819 | + | ||
8820 | + pix_fmt_info[PIX_FMT_YUV444P].name = g_strdup ("yuv444p"); | ||
8821 | + pix_fmt_info[PIX_FMT_YUV444P].nb_channels = 3; | ||
8822 | + pix_fmt_info[PIX_FMT_YUV444P].color_type = FF_COLOR_YUV; | ||
8823 | + pix_fmt_info[PIX_FMT_YUV444P].pixel_type = FF_PIXEL_PLANAR; | ||
8824 | + pix_fmt_info[PIX_FMT_YUV444P].depth = 8; | ||
8825 | + pix_fmt_info[PIX_FMT_YUV444P].x_chroma_shift = 0; | ||
8826 | + pix_fmt_info[PIX_FMT_YUV444P].y_chroma_shift = 0; | ||
8827 | + | ||
8828 | + pix_fmt_info[PIX_FMT_YUYV422].name = g_strdup ("yuv422"); | ||
8829 | + pix_fmt_info[PIX_FMT_YUYV422].nb_channels = 1; | ||
8830 | + pix_fmt_info[PIX_FMT_YUYV422].color_type = FF_COLOR_YUV; | ||
8831 | + pix_fmt_info[PIX_FMT_YUYV422].pixel_type = FF_PIXEL_PACKED; | ||
8832 | + pix_fmt_info[PIX_FMT_YUYV422].depth = 8; | ||
8833 | + pix_fmt_info[PIX_FMT_YUYV422].x_chroma_shift = 1; | ||
8834 | + pix_fmt_info[PIX_FMT_YUYV422].y_chroma_shift = 0; | ||
8835 | + | ||
8836 | + pix_fmt_info[PIX_FMT_YUV410P].name = g_strdup ("yuv410p"); | ||
8837 | + pix_fmt_info[PIX_FMT_YUV410P].nb_channels = 3; | ||
8838 | + pix_fmt_info[PIX_FMT_YUV410P].color_type = FF_COLOR_YUV; | ||
8839 | + pix_fmt_info[PIX_FMT_YUV410P].pixel_type = FF_PIXEL_PLANAR; | ||
8840 | + pix_fmt_info[PIX_FMT_YUV410P].depth = 8; | ||
8841 | + pix_fmt_info[PIX_FMT_YUV410P].x_chroma_shift = 2; | ||
8842 | + pix_fmt_info[PIX_FMT_YUV410P].y_chroma_shift = 2; | ||
8843 | + | ||
8844 | + pix_fmt_info[PIX_FMT_YUV411P].name = g_strdup ("yuv411p"); | ||
8845 | + pix_fmt_info[PIX_FMT_YUV411P].nb_channels = 3; | ||
8846 | + pix_fmt_info[PIX_FMT_YUV411P].color_type = FF_COLOR_YUV; | ||
8847 | + pix_fmt_info[PIX_FMT_YUV411P].pixel_type = FF_PIXEL_PLANAR; | ||
8848 | + pix_fmt_info[PIX_FMT_YUV411P].depth = 8; | ||
8849 | + pix_fmt_info[PIX_FMT_YUV411P].x_chroma_shift = 2; | ||
8850 | + pix_fmt_info[PIX_FMT_YUV411P].y_chroma_shift = 0; | ||
8851 | + | ||
8852 | + /* JPEG YUV */ | ||
8853 | + pix_fmt_info[PIX_FMT_YUVJ420P].name = g_strdup ("yuvj420p"); | ||
8854 | + pix_fmt_info[PIX_FMT_YUVJ420P].nb_channels = 3; | ||
8855 | + pix_fmt_info[PIX_FMT_YUVJ420P].color_type = FF_COLOR_YUV_JPEG; | ||
8856 | + pix_fmt_info[PIX_FMT_YUVJ420P].pixel_type = FF_PIXEL_PLANAR; | ||
8857 | + pix_fmt_info[PIX_FMT_YUVJ420P].depth = 8; | ||
8858 | + pix_fmt_info[PIX_FMT_YUVJ420P].x_chroma_shift = 1; | ||
8859 | + pix_fmt_info[PIX_FMT_YUVJ420P].y_chroma_shift = 1; | ||
8860 | + | ||
8861 | + pix_fmt_info[PIX_FMT_YUVJ422P].name = g_strdup ("yuvj422p"); | ||
8862 | + pix_fmt_info[PIX_FMT_YUVJ422P].nb_channels = 3; | ||
8863 | + pix_fmt_info[PIX_FMT_YUVJ422P].color_type = FF_COLOR_YUV_JPEG; | ||
8864 | + pix_fmt_info[PIX_FMT_YUVJ422P].pixel_type = FF_PIXEL_PLANAR; | ||
8865 | + pix_fmt_info[PIX_FMT_YUVJ422P].depth = 8; | ||
8866 | + pix_fmt_info[PIX_FMT_YUVJ422P].x_chroma_shift = 1; | ||
8867 | + pix_fmt_info[PIX_FMT_YUVJ422P].y_chroma_shift = 0; | ||
8868 | + | ||
8869 | + pix_fmt_info[PIX_FMT_YUVJ444P].name = g_strdup ("yuvj444p"); | ||
8870 | + pix_fmt_info[PIX_FMT_YUVJ444P].nb_channels = 3; | ||
8871 | + pix_fmt_info[PIX_FMT_YUVJ444P].color_type = FF_COLOR_YUV_JPEG; | ||
8872 | + pix_fmt_info[PIX_FMT_YUVJ444P].pixel_type = FF_PIXEL_PLANAR; | ||
8873 | + pix_fmt_info[PIX_FMT_YUVJ444P].depth = 8; | ||
8874 | + pix_fmt_info[PIX_FMT_YUVJ444P].x_chroma_shift = 0; | ||
8875 | + pix_fmt_info[PIX_FMT_YUVJ444P].y_chroma_shift = 0; | ||
8876 | + | ||
8877 | + /* RGB formats */ | ||
8878 | + pix_fmt_info[PIX_FMT_RGB24].name = g_strdup ("rgb24"); | ||
8879 | + pix_fmt_info[PIX_FMT_RGB24].nb_channels = 3; | ||
8880 | + pix_fmt_info[PIX_FMT_RGB24].color_type = FF_COLOR_RGB; | ||
8881 | + pix_fmt_info[PIX_FMT_RGB24].pixel_type = FF_PIXEL_PACKED; | ||
8882 | + pix_fmt_info[PIX_FMT_RGB24].depth = 8; | ||
8883 | + pix_fmt_info[PIX_FMT_RGB24].x_chroma_shift = 0; | ||
8884 | + pix_fmt_info[PIX_FMT_RGB24].y_chroma_shift = 0; | ||
8885 | + | ||
8886 | + pix_fmt_info[PIX_FMT_BGR24].name = g_strdup ("bgr24"); | ||
8887 | + pix_fmt_info[PIX_FMT_BGR24].nb_channels = 3; | ||
8888 | + pix_fmt_info[PIX_FMT_BGR24].color_type = FF_COLOR_RGB; | ||
8889 | + pix_fmt_info[PIX_FMT_BGR24].pixel_type = FF_PIXEL_PACKED; | ||
8890 | + pix_fmt_info[PIX_FMT_BGR24].depth = 8; | ||
8891 | + pix_fmt_info[PIX_FMT_BGR24].x_chroma_shift = 0; | ||
8892 | + pix_fmt_info[PIX_FMT_BGR24].y_chroma_shift = 0; | ||
8893 | + | ||
8894 | + pix_fmt_info[PIX_FMT_RGB32].name = g_strdup ("rgba32"); | ||
8895 | + pix_fmt_info[PIX_FMT_RGB32].nb_channels = 4; | ||
8896 | + pix_fmt_info[PIX_FMT_RGB32].is_alpha = 1; | ||
8897 | + pix_fmt_info[PIX_FMT_RGB32].color_type = FF_COLOR_RGB; | ||
8898 | + pix_fmt_info[PIX_FMT_RGB32].pixel_type = FF_PIXEL_PACKED; | ||
8899 | + pix_fmt_info[PIX_FMT_RGB32].depth = 8; | ||
8900 | + pix_fmt_info[PIX_FMT_RGB32].x_chroma_shift = 0; | ||
8901 | + pix_fmt_info[PIX_FMT_RGB32].y_chroma_shift = 0; | ||
8902 | + | ||
8903 | + pix_fmt_info[PIX_FMT_RGB565].name = g_strdup ("rgb565"); | ||
8904 | + pix_fmt_info[PIX_FMT_RGB565].nb_channels = 3; | ||
8905 | + pix_fmt_info[PIX_FMT_RGB565].color_type = FF_COLOR_RGB; | ||
8906 | + pix_fmt_info[PIX_FMT_RGB565].pixel_type = FF_PIXEL_PACKED; | ||
8907 | + pix_fmt_info[PIX_FMT_RGB565].depth = 5; | ||
8908 | + pix_fmt_info[PIX_FMT_RGB565].x_chroma_shift = 0; | ||
8909 | + pix_fmt_info[PIX_FMT_RGB565].y_chroma_shift = 0; | ||
8910 | + | ||
8911 | + pix_fmt_info[PIX_FMT_RGB555].name = g_strdup ("rgb555"); | ||
8912 | + pix_fmt_info[PIX_FMT_RGB555].nb_channels = 4; | ||
8913 | + pix_fmt_info[PIX_FMT_RGB555].is_alpha = 1; | ||
8914 | + pix_fmt_info[PIX_FMT_RGB555].color_type = FF_COLOR_RGB; | ||
8915 | + pix_fmt_info[PIX_FMT_RGB555].pixel_type = FF_PIXEL_PACKED; | ||
8916 | + pix_fmt_info[PIX_FMT_RGB555].depth = 5; | ||
8917 | + pix_fmt_info[PIX_FMT_RGB555].x_chroma_shift = 0; | ||
8918 | + pix_fmt_info[PIX_FMT_RGB555].y_chroma_shift = 0; | ||
8919 | + | ||
8920 | + /* gray / mono formats */ | ||
8921 | + pix_fmt_info[PIX_FMT_GRAY8].name = g_strdup ("gray"); | ||
8922 | + pix_fmt_info[PIX_FMT_GRAY8].nb_channels = 1; | ||
8923 | + pix_fmt_info[PIX_FMT_GRAY8].color_type = FF_COLOR_GRAY; | ||
8924 | + pix_fmt_info[PIX_FMT_GRAY8].pixel_type = FF_PIXEL_PLANAR; | ||
8925 | + pix_fmt_info[PIX_FMT_GRAY8].depth = 8; | ||
8926 | + | ||
8927 | + pix_fmt_info[PIX_FMT_MONOWHITE].name = g_strdup ("monow"); | ||
8928 | + pix_fmt_info[PIX_FMT_MONOWHITE].nb_channels = 1; | ||
8929 | + pix_fmt_info[PIX_FMT_MONOWHITE].color_type = FF_COLOR_GRAY; | ||
8930 | + pix_fmt_info[PIX_FMT_MONOWHITE].pixel_type = FF_PIXEL_PLANAR; | ||
8931 | + pix_fmt_info[PIX_FMT_MONOWHITE].depth = 1; | ||
8932 | + | ||
8933 | + pix_fmt_info[PIX_FMT_MONOBLACK].name = g_strdup ("monob"); | ||
8934 | + pix_fmt_info[PIX_FMT_MONOBLACK].nb_channels = 1; | ||
8935 | + pix_fmt_info[PIX_FMT_MONOBLACK].color_type = FF_COLOR_GRAY; | ||
8936 | + pix_fmt_info[PIX_FMT_MONOBLACK].pixel_type = FF_PIXEL_PLANAR; | ||
8937 | + pix_fmt_info[PIX_FMT_MONOBLACK].depth = 1; | ||
8938 | + | ||
8939 | + /* paletted formats */ | ||
8940 | + pix_fmt_info[PIX_FMT_PAL8].name = g_strdup ("pal8"); | ||
8941 | + pix_fmt_info[PIX_FMT_PAL8].nb_channels = 4; | ||
8942 | + pix_fmt_info[PIX_FMT_PAL8].is_alpha = 1; | ||
8943 | + pix_fmt_info[PIX_FMT_PAL8].color_type = FF_COLOR_RGB; | ||
8944 | + pix_fmt_info[PIX_FMT_PAL8].pixel_type = FF_PIXEL_PALETTE; | ||
8945 | + pix_fmt_info[PIX_FMT_PAL8].depth = 8; | ||
8946 | + | ||
8947 | + pix_fmt_info[PIX_FMT_YUVA420P].name = g_strdup ("yuva420p"); | ||
8948 | + pix_fmt_info[PIX_FMT_YUVA420P].nb_channels = 4; | ||
8949 | + pix_fmt_info[PIX_FMT_YUVA420P].is_alpha = 1; | ||
8950 | + pix_fmt_info[PIX_FMT_YUVA420P].color_type = FF_COLOR_YUV; | ||
8951 | + pix_fmt_info[PIX_FMT_YUVA420P].pixel_type = FF_PIXEL_PLANAR; | ||
8952 | + pix_fmt_info[PIX_FMT_YUVA420P].depth = 8, | ||
8953 | + pix_fmt_info[PIX_FMT_YUVA420P].x_chroma_shift = 1, | ||
8954 | + pix_fmt_info[PIX_FMT_YUVA420P].y_chroma_shift = 1; | ||
8955 | +}; | ||
8956 | + | ||
8957 | +int | ||
8958 | +gst_ffmpeg_avpicture_get_size (int pix_fmt, int width, int height) | ||
8959 | +{ | ||
8960 | + AVPicture dummy_pict; | ||
8961 | + | ||
8962 | + return gst_ffmpeg_avpicture_fill (&dummy_pict, NULL, pix_fmt, width, height); | ||
8963 | +} | ||
8964 | + | ||
8965 | +#define GEN_MASK(x) ((1<<(x))-1) | ||
8966 | +#define ROUND_UP_X(v,x) (((v) + GEN_MASK(x)) & ~GEN_MASK(x)) | ||
8967 | +#define ROUND_UP_2(x) ROUND_UP_X (x, 1) | ||
8968 | +#define ROUND_UP_4(x) ROUND_UP_X (x, 2) | ||
8969 | +#define ROUND_UP_8(x) ROUND_UP_X (x, 3) | ||
8970 | +#define DIV_ROUND_UP_X(v,x) (((v) + GEN_MASK(x)) >> (x)) | ||
8971 | + | ||
8972 | +int | ||
8973 | +gst_ffmpeg_avpicture_fill (AVPicture * picture, | ||
8974 | + uint8_t * ptr, enum PixelFormat pix_fmt, int width, int height) | ||
8975 | +{ | ||
8976 | + int size, w2, h2, size2; | ||
8977 | + int stride, stride2; | ||
8978 | + PixFmtInfo *pinfo; | ||
8979 | + | ||
8980 | + pinfo = &pix_fmt_info[pix_fmt]; | ||
8981 | + | ||
8982 | + switch (pix_fmt) { | ||
8983 | + case PIX_FMT_YUV420P: | ||
8984 | + case PIX_FMT_YUV422P: | ||
8985 | + case PIX_FMT_YUV444P: | ||
8986 | + case PIX_FMT_YUV410P: | ||
8987 | + case PIX_FMT_YUV411P: | ||
8988 | + case PIX_FMT_YUVJ420P: | ||
8989 | + case PIX_FMT_YUVJ422P: | ||
8990 | + case PIX_FMT_YUVJ444P: | ||
8991 | + stride = ROUND_UP_4 (width); | ||
8992 | + h2 = ROUND_UP_X (height, pinfo->y_chroma_shift); | ||
8993 | + size = stride * h2; | ||
8994 | + w2 = DIV_ROUND_UP_X (width, pinfo->x_chroma_shift); | ||
8995 | + stride2 = ROUND_UP_4 (w2); | ||
8996 | + h2 = DIV_ROUND_UP_X (height, pinfo->y_chroma_shift); | ||
8997 | + size2 = stride2 * h2; | ||
8998 | + picture->data[0] = ptr; | ||
8999 | + picture->data[1] = picture->data[0] + size; | ||
9000 | + picture->data[2] = picture->data[1] + size2; | ||
9001 | + picture->data[3] = NULL; | ||
9002 | + picture->linesize[0] = stride; | ||
9003 | + picture->linesize[1] = stride2; | ||
9004 | + picture->linesize[2] = stride2; | ||
9005 | + picture->linesize[3] = 0; | ||
9006 | + GST_DEBUG ("planes %d %d %d", 0, size, size + size2); | ||
9007 | + GST_DEBUG ("strides %d %d %d", stride, stride2, stride2); | ||
9008 | + return size + 2 * size2; | ||
9009 | + case PIX_FMT_YUVA420P: | ||
9010 | + stride = ROUND_UP_4 (width); | ||
9011 | + h2 = ROUND_UP_X (height, pinfo->y_chroma_shift); | ||
9012 | + size = stride * h2; | ||
9013 | + w2 = DIV_ROUND_UP_X (width, pinfo->x_chroma_shift); | ||
9014 | + stride2 = ROUND_UP_4 (w2); | ||
9015 | + h2 = DIV_ROUND_UP_X (height, pinfo->y_chroma_shift); | ||
9016 | + size2 = stride2 * h2; | ||
9017 | + picture->data[0] = ptr; | ||
9018 | + picture->data[1] = picture->data[0] + size; | ||
9019 | + picture->data[2] = picture->data[1] + size2; | ||
9020 | + picture->data[3] = picture->data[2] + size2; | ||
9021 | + picture->linesize[0] = stride; | ||
9022 | + picture->linesize[1] = stride2; | ||
9023 | + picture->linesize[2] = stride2; | ||
9024 | + picture->linesize[3] = stride; | ||
9025 | + GST_DEBUG ("planes %d %d %d %d", 0, size, size + size2, size + 2 * size2); | ||
9026 | + GST_DEBUG ("strides %d %d %d %d", stride, stride2, stride2, stride); | ||
9027 | + return 2 * size + 2 * size2; | ||
9028 | + case PIX_FMT_RGB24: | ||
9029 | + case PIX_FMT_BGR24: | ||
9030 | + stride = ROUND_UP_4 (width * 3); | ||
9031 | + size = stride * height; | ||
9032 | + picture->data[0] = ptr; | ||
9033 | + picture->data[1] = NULL; | ||
9034 | + picture->data[2] = NULL; | ||
9035 | + picture->data[3] = NULL; | ||
9036 | + picture->linesize[0] = stride; | ||
9037 | + picture->linesize[1] = 0; | ||
9038 | + picture->linesize[2] = 0; | ||
9039 | + picture->linesize[3] = 0; | ||
9040 | + return size; | ||
9041 | + /*case PIX_FMT_AYUV4444: | ||
9042 | + case PIX_FMT_BGR32: | ||
9043 | + case PIX_FMT_BGRA32: | ||
9044 | + case PIX_FMT_RGB32: */ | ||
9045 | + case PIX_FMT_RGB32: | ||
9046 | + stride = width * 4; | ||
9047 | + size = stride * height; | ||
9048 | + picture->data[0] = ptr; | ||
9049 | + picture->data[1] = NULL; | ||
9050 | + picture->data[2] = NULL; | ||
9051 | + picture->data[3] = NULL; | ||
9052 | + picture->linesize[0] = stride; | ||
9053 | + picture->linesize[1] = 0; | ||
9054 | + picture->linesize[2] = 0; | ||
9055 | + picture->linesize[3] = 0; | ||
9056 | + return size; | ||
9057 | + case PIX_FMT_RGB555: | ||
9058 | + case PIX_FMT_RGB565: | ||
9059 | + case PIX_FMT_YUYV422: | ||
9060 | + case PIX_FMT_UYVY422: | ||
9061 | + stride = ROUND_UP_4 (width * 2); | ||
9062 | + size = stride * height; | ||
9063 | + picture->data[0] = ptr; | ||
9064 | + picture->data[1] = NULL; | ||
9065 | + picture->data[2] = NULL; | ||
9066 | + picture->data[3] = NULL; | ||
9067 | + picture->linesize[0] = stride; | ||
9068 | + picture->linesize[1] = 0; | ||
9069 | + picture->linesize[2] = 0; | ||
9070 | + picture->linesize[3] = 0; | ||
9071 | + return size; | ||
9072 | + case PIX_FMT_UYYVYY411: | ||
9073 | + /* FIXME, probably not the right stride */ | ||
9074 | + stride = ROUND_UP_4 (width); | ||
9075 | + size = stride * height; | ||
9076 | + picture->data[0] = ptr; | ||
9077 | + picture->data[1] = NULL; | ||
9078 | + picture->data[2] = NULL; | ||
9079 | + picture->data[3] = NULL; | ||
9080 | + picture->linesize[0] = width + width / 2; | ||
9081 | + picture->linesize[1] = 0; | ||
9082 | + picture->linesize[2] = 0; | ||
9083 | + picture->linesize[3] = 0; | ||
9084 | + return size + size / 2; | ||
9085 | + case PIX_FMT_GRAY8: | ||
9086 | + stride = ROUND_UP_4 (width); | ||
9087 | + size = stride * height; | ||
9088 | + picture->data[0] = ptr; | ||
9089 | + picture->data[1] = NULL; | ||
9090 | + picture->data[2] = NULL; | ||
9091 | + picture->data[3] = NULL; | ||
9092 | + picture->linesize[0] = stride; | ||
9093 | + picture->linesize[1] = 0; | ||
9094 | + picture->linesize[2] = 0; | ||
9095 | + picture->linesize[3] = 0; | ||
9096 | + return size; | ||
9097 | + case PIX_FMT_MONOWHITE: | ||
9098 | + case PIX_FMT_MONOBLACK: | ||
9099 | + stride = ROUND_UP_4 ((width + 7) >> 3); | ||
9100 | + size = stride * height; | ||
9101 | + picture->data[0] = ptr; | ||
9102 | + picture->data[1] = NULL; | ||
9103 | + picture->data[2] = NULL; | ||
9104 | + picture->data[3] = NULL; | ||
9105 | + picture->linesize[0] = stride; | ||
9106 | + picture->linesize[1] = 0; | ||
9107 | + picture->linesize[2] = 0; | ||
9108 | + picture->linesize[3] = 0; | ||
9109 | + return size; | ||
9110 | + case PIX_FMT_PAL8: | ||
9111 | + /* already forced to be with stride, so same result as other function */ | ||
9112 | + stride = ROUND_UP_4 (width); | ||
9113 | + size = stride * height; | ||
9114 | + picture->data[0] = ptr; | ||
9115 | + picture->data[1] = ptr + size; /* palette is stored here as 256 32 bit words */ | ||
9116 | + picture->data[2] = NULL; | ||
9117 | + picture->data[3] = NULL; | ||
9118 | + picture->linesize[0] = stride; | ||
9119 | + picture->linesize[1] = 4; | ||
9120 | + picture->linesize[2] = 0; | ||
9121 | + picture->linesize[3] = 0; | ||
9122 | + return size + 256 * 4; | ||
9123 | + default: | ||
9124 | + picture->data[0] = NULL; | ||
9125 | + picture->data[1] = NULL; | ||
9126 | + picture->data[2] = NULL; | ||
9127 | + picture->data[3] = NULL; | ||
9128 | + return -1; | ||
9129 | + } | ||
9130 | + | ||
9131 | + return 0; | ||
9132 | +} | ||
9133 | + | ||
9134 | +/* Create a GstBuffer of the requested size and caps. | ||
9135 | + * The memory will be allocated by ffmpeg, making sure it's properly aligned | ||
9136 | + * for any processing. */ | ||
9137 | + | ||
9138 | +GstBuffer * | ||
9139 | +new_aligned_buffer (gint size, GstCaps * caps) | ||
9140 | +{ | ||
9141 | + GstBuffer *buf; | ||
9142 | + | ||
9143 | + buf = gst_buffer_new (); | ||
9144 | + GST_BUFFER_DATA (buf) = GST_BUFFER_MALLOCDATA (buf) = av_malloc (size); | ||
9145 | + GST_BUFFER_SIZE (buf) = size; | ||
9146 | + GST_BUFFER_FREE_FUNC (buf) = av_free; | ||
9147 | + if (caps) | ||
9148 | + gst_buffer_set_caps (buf, caps); | ||
9149 | + | ||
9150 | + return buf; | ||
9151 | +} | ||
9152 | + | ||
9153 | +int | ||
9154 | +gst_ffmpeg_auto_max_threads (void) | ||
9155 | +{ | ||
9156 | + static gsize n_threads = 0; | ||
9157 | + if (g_once_init_enter (&n_threads)) { | ||
9158 | + int n = 1; | ||
9159 | +#if defined(_WIN32) | ||
9160 | + { | ||
9161 | + const char *s = getenv ("NUMBER_OF_PROCESSORS"); | ||
9162 | + if (s) { | ||
9163 | + n = atoi (s); | ||
9164 | + } | ||
9165 | + } | ||
9166 | +#elif defined(__APPLE__) | ||
9167 | + { | ||
9168 | + int mib[] = { CTL_HW, HW_NCPU }; | ||
9169 | + size_t dataSize = sizeof (int); | ||
9170 | + | ||
9171 | + if (sysctl (mib, 2, &n_threads, &dataSize, NULL, 0)) { | ||
9172 | + n = 1; | ||
9173 | + } | ||
9174 | + } | ||
9175 | +#else | ||
9176 | + n = sysconf (_SC_NPROCESSORS_CONF); | ||
9177 | +#endif | ||
9178 | + if (n < 1) | ||
9179 | + n = 1; | ||
9180 | + | ||
9181 | + g_once_init_leave (&n_threads, n); | ||
9182 | + } | ||
9183 | + | ||
9184 | + return (int) (n_threads); | ||
9185 | +} | ||
9186 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.h gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.h | ||
9187 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.h 2011-11-02 14:04:05.000000000 +0100 | ||
9188 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.h 2014-08-08 15:34:04.007874626 +0200 | ||
9189 | @@ -23,6 +23,7 @@ | ||
9190 | #ifdef HAVE_FFMPEG_UNINSTALLED | ||
9191 | #include <avcodec.h> | ||
9192 | #else | ||
9193 | +#include <libavutil/mathematics.h> | ||
9194 | #include <libavcodec/avcodec.h> | ||
9195 | #endif | ||
9196 | #include <gst/gst.h> | ||
9197 | @@ -87,7 +88,7 @@ | ||
9198 | gst_ffmpeg_get_codecid_longname (enum CodecID codec_id); | ||
9199 | |||
9200 | gint | ||
9201 | -av_smp_format_depth(enum SampleFormat smp_fmt); | ||
9202 | +av_smp_format_depth(enum AVSampleFormat smp_fmt); | ||
9203 | |||
9204 | GstBuffer * | ||
9205 | new_aligned_buffer (gint size, GstCaps * caps); | ||
9206 | diff -uNr gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.h.orig gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.h.orig | ||
9207 | --- gst-ffmpeg-0.10.13.orig/ext/ffmpeg/gstffmpegutils.h.orig 1970-01-01 01:00:00.000000000 +0100 | ||
9208 | +++ gst-ffmpeg-0.10.13/ext/ffmpeg/gstffmpegutils.h.orig 2014-08-08 15:26:38.473858652 +0200 | ||
9209 | @@ -0,0 +1,95 @@ | ||
9210 | +/* GStreamer | ||
9211 | + * Copyright (C) <2009> Edward Hervey <bilboed@bilboed.com> | ||
9212 | + * | ||
9213 | + * This library is free software; you can redistribute it and/or | ||
9214 | + * modify it under the terms of the GNU Library General Public | ||
9215 | + * License as published by the Free Software Foundation; either | ||
9216 | + * version 2 of the License, or (at your option) any later version. | ||
9217 | + * | ||
9218 | + * This library is distributed in the hope that it will be useful, | ||
9219 | + * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
9220 | + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
9221 | + * Library General Public License for more details. | ||
9222 | + * | ||
9223 | + * You should have received a copy of the GNU Library General Public | ||
9224 | + * License along with this library; if not, write to the | ||
9225 | + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, | ||
9226 | + * Boston, MA 02111-1307, USA. | ||
9227 | + */ | ||
9228 | + | ||
9229 | +#ifndef __GST_FFMPEG_UTILS_H__ | ||
9230 | +#define __GST_FFMPEG_UTILS_H__ | ||
9231 | + | ||
9232 | +#ifdef HAVE_FFMPEG_UNINSTALLED | ||
9233 | +#include <avcodec.h> | ||
9234 | +#else | ||
9235 | +#include <libavcodec/avcodec.h> | ||
9236 | +#endif | ||
9237 | +#include <gst/gst.h> | ||
9238 | + | ||
9239 | +/* | ||
9240 | + *Get the size of an picture | ||
9241 | + */ | ||
9242 | +int | ||
9243 | +gst_ffmpeg_avpicture_get_size (int pix_fmt, int width, int height); | ||
9244 | + | ||
9245 | +/* | ||
9246 | + * Fill in pointers in an AVPicture, aligned by 4 (required by X). | ||
9247 | + */ | ||
9248 | + | ||
9249 | +int | ||
9250 | +gst_ffmpeg_avpicture_fill (AVPicture * picture, | ||
9251 | + uint8_t * ptr, | ||
9252 | + enum PixelFormat pix_fmt, | ||
9253 | + int width, | ||
9254 | + int height); | ||
9255 | + | ||
9256 | +/* | ||
9257 | + * Convert from/to a GStreamer <-> FFMpeg timestamp. | ||
9258 | + */ | ||
9259 | +static inline guint64 | ||
9260 | +gst_ffmpeg_time_ff_to_gst (gint64 pts, AVRational base) | ||
9261 | +{ | ||
9262 | + guint64 out; | ||
9263 | + | ||
9264 | + if (pts == AV_NOPTS_VALUE){ | ||
9265 | + out = GST_CLOCK_TIME_NONE; | ||
9266 | + } else { | ||
9267 | + AVRational bq = { 1, GST_SECOND }; | ||
9268 | + out = av_rescale_q (pts, base, bq); | ||
9269 | + } | ||
9270 | + | ||
9271 | + return out; | ||
9272 | +} | ||
9273 | + | ||
9274 | +static inline gint64 | ||
9275 | +gst_ffmpeg_time_gst_to_ff (guint64 time, AVRational base) | ||
9276 | +{ | ||
9277 | + gint64 out; | ||
9278 | + | ||
9279 | + if (!GST_CLOCK_TIME_IS_VALID (time) || base.num == 0) { | ||
9280 | + out = AV_NOPTS_VALUE; | ||
9281 | + } else { | ||
9282 | + AVRational bq = { 1, GST_SECOND }; | ||
9283 | + out = av_rescale_q (time, bq, base); | ||
9284 | + } | ||
9285 | + | ||
9286 | + return out; | ||
9287 | +} | ||
9288 | + | ||
9289 | +void | ||
9290 | +gst_ffmpeg_init_pix_fmt_info(void); | ||
9291 | + | ||
9292 | +int | ||
9293 | +gst_ffmpeg_auto_max_threads(void); | ||
9294 | + | ||
9295 | +G_CONST_RETURN gchar * | ||
9296 | +gst_ffmpeg_get_codecid_longname (enum CodecID codec_id); | ||
9297 | + | ||
9298 | +gint | ||
9299 | +av_smp_format_depth(enum AVSampleFormat smp_fmt); | ||
9300 | + | ||
9301 | +GstBuffer * | ||
9302 | +new_aligned_buffer (gint size, GstCaps * caps); | ||
9303 | + | ||
9304 | +#endif /* __GST_FFMPEG_UTILS_H__ */ | ||
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb b/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb index e26b267f32..bbe3308b03 100644 --- a/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb +++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb | |||
@@ -53,6 +53,7 @@ SRC_URI = "http://gstreamer.freedesktop.org/src/${BPN}/${BPN}-${PV}.tar.bz2 \ | |||
53 | file://0001-qdm2-check-array-index-before-use-fix-out-of-array-a.patch \ | 53 | file://0001-qdm2-check-array-index-before-use-fix-out-of-array-a.patch \ |
54 | file://0001-lavf-compute-probe-buffer-size-more-reliably.patch \ | 54 | file://0001-lavf-compute-probe-buffer-size-more-reliably.patch \ |
55 | file://0001-ffserver-set-oformat.patch \ | 55 | file://0001-ffserver-set-oformat.patch \ |
56 | ${@bb.utils.contains('PACKAGECONFIG', 'libav9', 'file://libav-9.patch', '', d)} \ | ||
56 | " | 57 | " |
57 | 58 | ||
58 | SRC_URI[md5sum] = "7f5beacaf1312db2db30a026b36888c4" | 59 | SRC_URI[md5sum] = "7f5beacaf1312db2db30a026b36888c4" |
@@ -79,6 +80,7 @@ EXTRA_OECONF = "${FFMPEG_EXTRA_CONFIGURE_COMMON}" | |||
79 | PACKAGECONFIG ??= "external-libav" | 80 | PACKAGECONFIG ??= "external-libav" |
80 | PACKAGECONFIG[external-libav] = "--with-system-ffmpeg,,libav" | 81 | PACKAGECONFIG[external-libav] = "--with-system-ffmpeg,,libav" |
81 | PACKAGECONFIG[orc] = "--enable-orc,--disable-orc,orc" | 82 | PACKAGECONFIG[orc] = "--enable-orc,--disable-orc,orc" |
83 | PACKAGECONFIG[libav9] = ",,," | ||
82 | 84 | ||
83 | FILES_${PN} += "${libdir}/gstreamer-0.10/*.so" | 85 | FILES_${PN} += "${libdir}/gstreamer-0.10/*.so" |
84 | FILES_${PN}-dbg += "${libdir}/gstreamer-0.10/.debug" | 86 | FILES_${PN}-dbg += "${libdir}/gstreamer-0.10/.debug" |