Line | |
---|---|
1 | /*M/// |
2 | // |
3 | // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. |
4 | // |
5 | // By downloading, copying, installing or using the software you agree to this license. |
6 | // If you do not agree to this license, do not download, install, |
7 | // copy or use the software. |
8 | // |
9 | // |
10 | // Intel License Agreement |
11 | // For Open Source Computer Vision Library |
12 | // |
13 | // Copyright (C) 2008, Nils Hasler, all rights reserved. |
14 | // Third party copyrights are property of their respective owners. |
15 | // |
16 | // Redistribution and use in source and binary forms, with or without modification, |
17 | // are permitted provided that the following conditions are met: |
18 | // |
19 | // * Redistribution's of source code must retain the above copyright notice, |
20 | // this list of conditions and the following disclaimer. |
21 | // |
22 | // * Redistribution's in binary form must reproduce the above copyright notice, |
23 | // this list of conditions and the following disclaimer in the documentation |
24 | // and/or other materials provided with the distribution. |
25 | // |
26 | // * The name of Intel Corporation may not be used to endorse or promote products |
27 | // derived from this software without specific prior written permission. |
28 | // |
29 | // This software is provided by the copyright holders and contributors "as is" and |
30 | // any express or implied warranties, including, but not limited to, the implied |
31 | // warranties of merchantability and fitness for a particular purpose are disclaimed. |
32 | // In no event shall the Intel Corporation or contributors be liable for any direct, |
33 | // indirect, incidental, special, exemplary, or consequential damages |
34 | // (including, but not limited to, procurement of substitute goods or services; |
35 | // loss of use, data, or profits; or business interruption) however caused |
36 | // and on any theory of liability, whether in contract, strict liability, |
37 | // or tort (including negligence or otherwise) arising in any way out of |
38 | // the use of this software, even if advised of the possibility of such damage. |
39 | // |
40 | //M*/ |
41 | |
42 | // Author: Nils Hasler <hasler@mpi-inf.mpg.de> |
43 | // |
44 | // Max-Planck-Institut Informatik |
45 | // |
46 | // this implementation was inspired by gnash's gstreamer interface |
47 | |
48 | // |
49 | // use GStreamer to read a video |
50 | // |
51 | |
52 | #include "_highgui.h" |
53 | #include <unistd.h> |
54 | #include <string.h> |
55 | #include <gst/gst.h> |
56 | #ifdef HAVE_GSTREAMER_APP |
57 | #include <gst/app/gstappsink.h> |
58 | #else |
59 | #include "gstappsink.h" |
60 | #endif |
61 | |
62 | #ifdef NDEBUG |
63 | #define CV_WARN(message) |
64 | #else |
65 | #define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)/n", message, __FILE__, __LINE__) |
66 | #endif |
67 | |
68 | static bool isInited = false; |
69 | |
70 | typedef struct CvCapture_GStreamer |
71 | { |
72 | /// method call table |
73 | int type; // one of [1394, v4l2, v4l, file] |
74 | |
75 | GstElement *pipeline; |
76 | GstElement *source; |
77 | GstElement *decodebin; |
78 | GstElement *colour; |
79 | GstElement *appsink; |
80 | |
81 | GstBuffer *buffer; |
82 | |
83 | GstCaps *caps; // filter caps inserted right after the source |
84 | |
85 | IplImage *frame; |
86 | } CvCapture_GStreamer; |
87 | |
88 | static void icvClose_GStreamer(CvCapture_GStreamer *cap) |
89 | { |
90 | if(cap->pipeline) { |
91 | gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_NULL); |
92 | gst_object_unref(GST_OBJECT(cap->pipeline)); |
93 | } |
94 | |
95 | if(cap->buffer) |
96 | gst_buffer_unref(cap->buffer); |
97 | |
98 | if(cap->frame) |
99 | cvReleaseImage(&cap->frame); |
100 | |
101 | if(cap->caps) |
102 | gst_caps_unref(cap->caps); |
103 | } |
104 | |
105 | static void icvHandleMessage(CvCapture_GStreamer *cap) |
106 | { |
107 | GstBus* bus = gst_element_get_bus(cap->pipeline); |
108 | |
109 | while(gst_bus_have_pending(bus)) { |
110 | GstMessage* msg = gst_bus_pop(bus); |
111 | |
112 | // printf("Got %s message/n", GST_MESSAGE_TYPE_NAME(msg)); |
113 | |
114 | switch (GST_MESSAGE_TYPE (msg)) { |
115 | case GST_MESSAGE_STATE_CHANGED: |
116 | GstState oldstate, newstate, pendstate; |
117 | gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); |
118 | // printf("state changed from %d to %d (%d)/n", oldstate, newstate, pendstate); |
119 | break; |
120 | case GST_MESSAGE_ERROR: { |
121 | GError *err; |
122 | gchar *debug; |
123 | gst_message_parse_error(msg, &err, &debug); |
124 | |
125 | fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s/n", |
126 | gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); |
127 | |
128 | g_error_free(err); |
129 | g_free(debug); |
130 | |
131 | gst_element_set_state(cap->pipeline, GST_STATE_NULL); |
132 | |
133 | break; |
134 | } |
135 | case GST_MESSAGE_EOS: |
136 | // CV_WARN("NetStream has reached the end of the stream."); |
137 | |
138 | break; |
139 | default: |
140 | // CV_WARN("unhandled message/n"); |
141 | break; |
142 | } |
143 | |
144 | gst_message_unref(msg); |
145 | } |
146 | |
147 | gst_object_unref(GST_OBJECT(bus)); |
148 | } |
149 | |
150 | // |
151 | // start the pipeline, grab a buffer, and pause again |
152 | // |
153 | static int icvGrabFrame_GStreamer(CvCapture_GStreamer *cap) |
154 | { |
155 | if(!cap->pipeline) |
156 | return 0; |
157 | |
158 | if(gst_app_sink_is_eos(GST_APP_SINK(cap->appsink))) { |
159 | //printf("end of stream/n"); |
160 | return 0; |
161 | } |
162 | |
163 | if(cap->buffer) |
164 | gst_buffer_unref(cap->buffer); |
165 | |
166 | icvHandleMessage(cap); |
167 | |
168 | #ifndef HAVE_GSTREAMER_APP |
169 | if(gst_app_sink_get_queue_length(GST_APP_SINK(cap->appsink))) |
170 | { |
171 | // printf("peeking buffer, %d buffers in queue/n", |
172 | // gst_app_sink_get_queue_length(GST_APP_SINK(cap->appsink))); |
173 | cap->buffer = gst_app_sink_peek_buffer(GST_APP_SINK(cap->appsink)); |
174 | } |
175 | else |
176 | #endif |
177 | { |
178 | // printf("no buffers queued, starting pipeline/n"); |
179 | |
180 | if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PLAYING) == |
181 | GST_STATE_CHANGE_FAILURE) { |
182 | icvHandleMessage(cap); |
183 | return 0; |
184 | } |
185 | |
186 | // icvHandleMessage(cap); |
187 | // |
188 | // // check whether stream contains an acceptable video stream |
189 | // GstPad *sinkpad = gst_element_get_pad(cap->colour, "sink"); |
190 | // if(!GST_PAD_IS_LINKED(sinkpad)) { |
191 | // gst_object_unref(sinkpad); |
192 | // fprintf(stderr, "GStreamer: Pipeline is NOT ready. Format unknown?/n"); |
193 | // return 0; |
194 | // } |
195 | // gst_object_unref(sinkpad); |
196 | |
197 | // printf("pulling preroll/n"); |
198 | // |
199 | // if(!gst_app_sink_pull_preroll(GST_APP_SINK(cap->appsink))) { |
200 | // printf("no preroll/n"); |
201 | // return 0; |
202 | // } |
203 | |
204 | // printf("pulling buffer/n"); |
205 | |
206 | cap->buffer = gst_app_sink_pull_buffer(GST_APP_SINK(cap->appsink)); |
207 | |
208 | // printf("pausing pipeline/n"); |
209 | |
210 | if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PAUSED) == |
211 | GST_STATE_CHANGE_FAILURE) { |
212 | icvHandleMessage(cap); |
213 | return 0; |
214 | } |
215 | |
216 | // printf("pipeline paused/n"); |
217 | } |
218 | |
219 | if(!cap->buffer) |
220 | return 0; |
221 | |
222 | // printf("pulled buffer %p/n", cap->buffer); |
223 | |
224 | return 1; |
225 | } |
226 | |
227 | // |
228 | // decode buffer |
229 | // |
230 | static IplImage *icvRetrieveFrame_GStreamer(CvCapture_GStreamer *cap, int) |
231 | { |
232 | if(!cap->buffer) |
233 | return 0; |
234 | |
235 | // printf("getting buffercaps/n"); |
236 | |
237 | GstCaps* caps = gst_buffer_get_caps(cap->buffer); |
238 | |
239 | assert(gst_caps_get_size(caps) == 1); |
240 | |
241 | GstStructure* structure = gst_caps_get_structure(caps, 0); |
242 | |
243 | gint bpp, endianness, redmask, greenmask, bluemask; |
244 | |
245 | if(!gst_structure_get_int(structure, "bpp", &bpp) || |
246 | !gst_structure_get_int(structure, "endianness", &endianness) || |
247 | !gst_structure_get_int(structure, "red_mask", &redmask) || |
248 | !gst_structure_get_int(structure, "green_mask", &greenmask) || |
249 | !gst_structure_get_int(structure, "blue_mask", &bluemask)) { |
250 | printf("missing essential information in buffer caps, %s/n", gst_caps_to_string(caps)); |
251 | return 0; |
252 | } |
253 | |
254 | //printf("buffer has %d bpp, endianness %d, rgb %x %x %x, %s/n", bpp, endianness, redmask, greenmask, bluemask, gst_caps_to_string(caps)); |
255 | |
256 | if(!redmask || !greenmask || !bluemask) |
257 | return 0; |
258 | |
259 | if(!cap->frame) { |
260 | gint height, width; |
261 | |
262 | if(!gst_structure_get_int(structure, "width", &width) || |
263 | !gst_structure_get_int(structure, "height", &height)) |
264 | return 0; |
265 | |
266 | // printf("creating frame %dx%d/n", width, height); |
267 | |
268 | cap->frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); |
269 | } |
270 | |
271 | gst_caps_unref(caps); |
272 | |
273 | unsigned char *data = GST_BUFFER_DATA(cap->buffer); |
274 | |
275 | //printf("generating shifts/n"); |
276 | |
277 | IplImage *frame = cap->frame; |
278 | unsigned nbyte = bpp >> 3; |
279 | unsigned redshift, blueshift, greenshift; |
280 | unsigned mask = redmask; |
281 | for(redshift = 0, mask = redmask; (mask & 1) == 0; mask >>= 1, redshift++) |
282 | ; |
283 | for(greenshift = 0, mask = greenmask; (mask & 1) == 0; mask >>= 1, greenshift++) |
284 | ; |
285 | for(blueshift = 0, mask = bluemask; (mask & 1) == 0; mask >>= 1, blueshift++) |
286 | ; |
287 | |
288 | //printf("shifts: %u %u %u/n", redshift, greenshift, blueshift); |
289 | |
290 | for(int r = 0; r < frame->height; r++) { |
291 | for(int c = 0; c < frame->width; c++, data += nbyte) { |
292 | int at = r * frame->widthStep + c * 3; |
293 | frame->imageData[at] = ((*((gint *)data)) & redmask) >> redshift; |
294 | frame->imageData[at+1] = ((*((gint *)data)) & greenmask) >> greenshift; |
295 | frame->imageData[at+2] = ((*((gint *)data)) & bluemask) >> blueshift; |
296 | } |
297 | } |
298 | |
299 | // printf("converted buffer/n"); |
300 | |
301 | gst_buffer_unref(cap->buffer); |
302 | cap->buffer = 0; |
303 | |
304 | return cap->frame; |
305 | } |
306 | |
307 | static double icvGetProperty_GStreamer(CvCapture_GStreamer *cap, int id) |
308 | { |
309 | GstFormat format; |
310 | //GstQuery q; |
311 | gint64 value; |
312 | |
313 | if(!cap->pipeline) { |
314 | CV_WARN("GStreamer: no pipeline"); |
315 | return 0; |
316 | } |
317 | |
318 | switch(id) { |
319 | case CV_CAP_PROP_POS_MSEC: |
320 | format = GST_FORMAT_TIME; |
321 | if(!gst_element_query_position(cap->pipeline, &format, &value)) { |
322 | CV_WARN("GStreamer: unable to query position of stream"); |
323 | return 0; |
324 | } |
325 | return value * 1e-6; // nano seconds to milli seconds |
326 | case CV_CAP_PROP_POS_FRAMES: |
327 | format = GST_FORMAT_DEFAULT; |
328 | if(!gst_element_query_position(cap->pipeline, &format, &value)) { |
329 | CV_WARN("GStreamer: unable to query position of stream"); |
330 | return 0; |
331 | } |
332 | return value; |
333 | case CV_CAP_PROP_POS_AVI_RATIO: |
334 | format = GST_FORMAT_PERCENT; |
335 | if(!gst_element_query_position(cap->pipeline, &format, &value)) { |
336 | CV_WARN("GStreamer: unable to query position of stream"); |
337 | return 0; |
338 | } |
339 | // printf("value %llu %llu %g/n", value, GST_FORMAT_PERCENT_MAX, ((double) value) / GST_FORMAT_PERCENT_MAX); |
340 | return ((double) value) / GST_FORMAT_PERCENT_MAX; |
341 | case CV_CAP_PROP_FRAME_WIDTH: |
342 | case CV_CAP_PROP_FRAME_HEIGHT: |
343 | case CV_CAP_PROP_FPS: |
344 | case CV_CAP_PROP_FOURCC: |
345 | break; |
346 | case CV_CAP_PROP_FRAME_COUNT: |
347 | format = GST_FORMAT_DEFAULT; |
348 | if(!gst_element_query_duration(cap->pipeline, &format, &value)) { |
349 | CV_WARN("GStreamer: unable to query position of stream"); |
350 | return 0; |
351 | } |
352 | return value; |
353 | case CV_CAP_PROP_FORMAT: |
354 | case CV_CAP_PROP_MODE: |
355 | case CV_CAP_PROP_BRIGHTNESS: |
356 | case CV_CAP_PROP_CONTRAST: |
357 | case CV_CAP_PROP_SATURATION: |
358 | case CV_CAP_PROP_HUE: |
359 | case CV_CAP_PROP_GAIN: |
360 | case CV_CAP_PROP_CONVERT_RGB: |
361 | break; |
362 | default: |
363 | CV_WARN("GStreamer: unhandled property"); |
364 | break; |
365 | } |
366 | return 0; |
367 | } |
368 | |
369 | static void icvRestartPipeline(CvCapture_GStreamer *cap) |
370 | { |
371 | CV_FUNCNAME("icvRestartPipeline"); |
372 | |
373 | __BEGIN__; |
374 | |
375 | printf("restarting pipeline, going to ready/n"); |
376 | |
377 | if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_READY) == |
378 | GST_STATE_CHANGE_FAILURE) { |
379 | CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline/n"); |
380 | return; |
381 | } |
382 | |
383 | printf("ready, relinking/n"); |
384 | |
385 | gst_element_unlink(cap->source, cap->decodebin); |
386 | printf("filtering with %s/n", gst_caps_to_string(cap->caps)); |
387 | gst_element_link_filtered(cap->source, cap->decodebin, cap->caps); |
388 | |
389 | printf("relinked, pausing/n"); |
390 | |
391 | if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PAUSED) == |
392 | GST_STATE_CHANGE_FAILURE) { |
393 | CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline/n"); |
394 | return; |
395 | } |
396 | |
397 | printf("state now paused/n"); |
398 | |
399 | __END__; |
400 | } |
401 | |
402 | static void icvSetFilter(CvCapture_GStreamer *cap, const char *property, int type, int v1, int v2) |
403 | { |
404 | printf("setting cap %p %s %d %d %d/n", cap->caps, property, type, v1, v2); |
405 | |
406 | if(!cap->caps) { |
407 | if(type == G_TYPE_INT) |
408 | cap->caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, NULL); |
409 | else |
410 | cap->caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, v2, NULL); |
411 | } else { |
412 | printf("caps before setting %s/n", gst_caps_to_string(cap->caps)); |
413 | if(type == G_TYPE_INT) |
414 | gst_caps_set_simple(cap->caps, "video/x-raw-rgb", property, type, v1, NULL); |
415 | else |
416 | gst_caps_set_simple(cap->caps, "video/x-raw-rgb", property, type, v1, v2, NULL); |
417 | } |
418 | |
419 | icvRestartPipeline(cap); |
420 | } |
421 | |
422 | static void icvRemoveFilter(CvCapture_GStreamer *cap, const char *filter) |
423 | { |
424 | if(!cap->caps) |
425 | return; |
426 | |
427 | GstStructure *s = gst_caps_get_structure(cap->caps, 0); |
428 | gst_structure_remove_field(s, filter); |
429 | |
430 | icvRestartPipeline(cap); |
431 | } |
432 | |
433 | static int icvSetProperty_GStreamer(CvCapture_GStreamer *cap, int id, double value) |
434 | { |
435 | GstFormat format; |
436 | GstSeekFlags flags; |
437 | |
438 | if(!cap->pipeline) { |
439 | CV_WARN("GStreamer: no pipeline"); |
440 | return 0; |
441 | } |
442 | |
443 | switch(id) { |
444 | case CV_CAP_PROP_POS_MSEC: |
445 | format = GST_FORMAT_TIME; |
446 | flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE); |
447 | if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format, |
448 | flags, (gint64) (value * GST_MSECOND))) { |
449 | CV_WARN("GStreamer: unable to seek"); |
450 | } |
451 | break; |
452 | case CV_CAP_PROP_POS_FRAMES: |
453 | format = GST_FORMAT_DEFAULT; |
454 | flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE); |
455 | if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format, |
456 | flags, (gint64) value)) { |
457 | CV_WARN("GStreamer: unable to seek"); |
458 | } |
459 | break; |
460 | case CV_CAP_PROP_POS_AVI_RATIO: |
461 | format = GST_FORMAT_PERCENT; |
462 | flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE); |
463 | if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format, |
464 | flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) { |
465 | CV_WARN("GStreamer: unable to seek"); |
466 | } |
467 | break; |
468 | case CV_CAP_PROP_FRAME_WIDTH: |
469 | if(value > 0) |
470 | icvSetFilter(cap, "width", G_TYPE_INT, (int) value, 0); |
471 | else |
472 | icvRemoveFilter(cap, "width"); |
473 | break; |
474 | case CV_CAP_PROP_FRAME_HEIGHT: |
475 | if(value > 0) |
476 | icvSetFilter(cap, "height", G_TYPE_INT, (int) value, 0); |
477 | else |
478 | icvRemoveFilter(cap, "height"); |
479 | break; |
480 | case CV_CAP_PROP_FPS: |
481 | if(value > 0) { |
482 | int num, denom; |
483 | num = (int) value; |
484 | if(value != num) { // FIXME this supports only fractions x/1 and x/2 |
485 | num = (int) (value * 2); |
486 | denom = 2; |
487 | } else |
488 | denom = 1; |
489 | |
490 | icvSetFilter(cap, "framerate", GST_TYPE_FRACTION, num, denom); |
491 | } else |
492 | icvRemoveFilter(cap, "framerate"); |
493 | break; |
494 | case CV_CAP_PROP_FOURCC: |
495 | case CV_CAP_PROP_FRAME_COUNT: |
496 | case CV_CAP_PROP_FORMAT: |
497 | case CV_CAP_PROP_MODE: |
498 | case CV_CAP_PROP_BRIGHTNESS: |
499 | case CV_CAP_PROP_CONTRAST: |
500 | case CV_CAP_PROP_SATURATION: |
501 | case CV_CAP_PROP_HUE: |
502 | case CV_CAP_PROP_GAIN: |
503 | case CV_CAP_PROP_CONVERT_RGB: |
504 | break; |
505 | default: |
506 | CV_WARN("GStreamer: unhandled property"); |
507 | } |
508 | return 0; |
509 | } |
510 | |
511 | // |
512 | // connect decodebin's dynamically created source pads to colourconverter |
513 | // |
514 | static void icvNewPad(GstElement *decodebin, GstPad *pad, gboolean last, gpointer data) |
515 | { |
516 | GstElement *sink = GST_ELEMENT(data); |
517 | GstStructure *str; |
518 | GstPad *sinkpad; |
519 | GstCaps *caps; |
520 | |
521 | /* link only once */ |
522 | sinkpad = gst_element_get_pad(sink, "sink"); |
523 | |
524 | if(GST_PAD_IS_LINKED(sinkpad)) { |
525 | g_print("sink is already linked/n"); |
526 | g_object_unref(sinkpad); |
527 | return; |
528 | } |
529 | |
530 | /* check media type */ |
531 | caps = gst_pad_get_caps(pad); |
532 | str = gst_caps_get_structure(caps, 0); |
533 | const char *structname = gst_structure_get_name(str); |
534 | // g_print("new pad %s/n", structname); |
535 | if(!g_strrstr(structname, "video")) { |
536 | gst_caps_unref(caps); |
537 | gst_object_unref(sinkpad); |
538 | return; |
539 | } |
540 | printf("linking pad %s/n", structname); |
541 | |
542 | /* link'n'play */ |
543 | gst_pad_link (pad, sinkpad); |
544 | |
545 | gst_caps_unref(caps); |
546 | gst_object_unref(sinkpad); |
547 | } |
548 | |
549 | static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename) |
550 | { |
551 | CvCapture_GStreamer *capture = 0; |
552 | CV_FUNCNAME("cvCaptureFromCAM_GStreamer"); |
553 | |
554 | __BEGIN__; |
555 | |
556 | // teststreamer(filename); |
557 | |
558 | // return 0; |
559 | |
560 | if(!isInited) { |
561 | // printf("gst_init/n"); |
562 | gst_init (NULL, NULL); |
563 | |
564 | // according to the documentation this is the way to register a plugin now |
565 | // unfortunately, it has not propagated into my distribution yet... |
566 | // gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR, |
567 | // "opencv-appsink", "Element application sink", |
568 | // "0.1", appsink_plugin_init, "LGPL", "highgui", "opencv", |
569 | // "http://opencvlibrary.sourceforge.net/"); |
570 | |
571 | isInited = true; |
572 | } |
573 | |
574 | const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"}; |
575 | //printf("entered capturecreator %s/n", sourcetypes[type]); |
576 | GstElement *source; |
577 | if (type == CV_CAP_GSTREAMER_FILE && gst_uri_is_valid(filename)) { |
578 | source = gst_element_make_from_uri(GST_URI_SRC, filename, NULL); |
579 | } |
580 | else |
581 | source = gst_element_factory_make(sourcetypes[type], NULL); |
582 | if(!source) |
583 | return 0; |
584 | |
585 | if(type ==CV_CAP_GSTREAMER_FILE && !gst_uri_is_valid(filename)) |
586 | g_object_set(G_OBJECT(source), "location", filename, NULL); |
587 | |
588 | GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL); |
589 | |
590 | #ifdef HAVE_GSTREAMER_APP |
591 | GstElement *sink = gst_element_factory_make("appsink", NULL); |
592 | #else |
593 | GstElement *sink = gst_element_factory_make("opencv-appsink", NULL); |
594 | #endif |
595 | GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL); |
596 | gst_app_sink_set_caps(GST_APP_SINK(sink), caps); |
597 | // gst_caps_unref(caps); |
598 | gst_base_sink_set_sync(GST_BASE_SINK(sink), false); |
599 | // g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL); |
600 | |
601 | GstElement *decodebin = gst_element_factory_make("decodebin", NULL); |
602 | g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour); |
603 | |
604 | GstElement *pipeline = gst_pipeline_new (NULL); |
605 | |
606 | gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL); |
607 | |
608 | // printf("added many/n"); |
609 | |
610 | switch(type) { |
611 | case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps |
612 | caps = gst_caps_new_simple("video/x-raw-rgb", |
613 | "width", G_TYPE_INT, 640, |
614 | "height", G_TYPE_INT, 480, |
615 | "framerate", GST_TYPE_FRACTION, 30, 1, |
616 | NULL); |
617 | if(!gst_element_link_filtered(source, decodebin, caps)) { |
618 | CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin/n"); |
619 | gst_object_unref(pipeline); |
620 | return 0; |
621 | } |
622 | gst_caps_unref(caps); |
623 | break; |
624 | case CV_CAP_GSTREAMER_V4L: |
625 | case CV_CAP_GSTREAMER_1394: |
626 | case CV_CAP_GSTREAMER_FILE: |
627 | if(!gst_element_link(source, decodebin)) { |
628 | CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin/n"); |
629 | gst_object_unref(pipeline); |
630 | return 0; |
631 | } |
632 | break; |
633 | } |
634 | |
635 | if(!gst_element_link(colour, sink)) { |
636 | CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink/n"); |
637 | gst_object_unref(pipeline); |
638 | return 0; |
639 | } |
640 | |
641 | // printf("linked, pausing/n"); |
642 | |
643 | if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) == |
644 | GST_STATE_CHANGE_FAILURE) { |
645 | CV_WARN("GStreamer: unable to set pipeline to paused/n"); |
646 | // icvHandleMessage(capture); |
647 | // cvReleaseCapture((CvCapture **)(void *)&capture); |
648 | gst_object_unref(pipeline); |
649 | return 0; |
650 | } |
651 | |
652 | |
653 | if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) == |
654 | GST_STATE_CHANGE_FAILURE) { |
655 | CV_WARN("GStreamer: unable to set pipeline to paused/n"); |
656 | // icvHandleMessage(capture); |
657 | // cvReleaseCapture((CvCapture **)(void *)&capture); |
658 | gst_object_unref(pipeline); |
659 | return 0; |
660 | } |
661 | |
662 | |
663 | // printf("state now paused/n"); |
664 | |
665 | // construct capture struct |
666 | capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer)); |
667 | memset(capture, 0, sizeof(CvCapture_GStreamer)); |
668 | capture->type = type; |
669 | capture->pipeline = pipeline; |
670 | capture->source = source; |
671 | capture->decodebin = decodebin; |
672 | capture->colour = colour; |
673 | capture->appsink = sink; |
674 | |
675 | icvHandleMessage(capture); |
676 | |
677 | OPENCV_ASSERT(capture, |
678 | "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture"); |
679 | |
680 | // GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline)); |
681 | // printf("clock %s/n", gst_object_get_name(GST_OBJECT(clock))); |
682 | |
683 | __END__; |
684 | |
685 | return capture; |
686 | } |
687 | |
688 | #if 0 |
689 | // |
690 | // |
691 | // image sequence writer |
692 | // |
693 | // |
694 | typedef struct CvVideoWriter_GStreamer { |
695 | char *filename; |
696 | unsigned currentframe; |
697 | }; |
698 | |
699 | static int icvWriteFrame_GStreamer( CvVideoWriter* writer, const IplImage* image ) |
700 | { |
701 | CvVideoWriter_GStreamer *wri = (CvVideoWriter_GStreamer *)writer; |
702 | |
703 | char str[100]; |
704 | char *x = str; |
705 | int size = 100; |
706 | while(snprintf(x, size, wri->filename, wri->currentframe) == size - 1) { |
707 | size *= 2; |
708 | if(x == str) |
709 | x = (char *)malloc(size); |
710 | else |
711 | x = (char *)realloc(x, size); |
712 | } |
713 | |
714 | int ret = cvSaveImage(x, image); |
715 | |
716 | wri->currentframe++; |
717 | |
718 | if(x != str) |
719 | free(x); |
720 | |
721 | return ret; |
722 | } |
723 | |
724 | static void icvReleaseVideoWriter_GStreamer( CvVideoWriter** writer ) |
725 | { |
726 | CvVideoWriter_GStreamer **wri = (CvVideoWriter_GStreamer **)writer; |
727 | |
728 | free((*wri)->filename); |
729 | } |
730 | |
731 | CvVideoWriter* cvCreateVideoWriter_GStreamer( const char* filename ) |
732 | { |
733 | CvVideoWriter_GStreamer *writer; |
734 | |
735 | unsigned offset = 0; |
736 | char *name = icvExtractPattern(filename, &offset); |
737 | if(!name) |
738 | return 0; |
739 | |
740 | char str[100]; |
741 | char *x = str; |
742 | int size = 100; |
743 | while(snprintf(x, size, name, 0) == size - 1) { |
744 | size *= 2; |
745 | if(x == str) |
746 | x = (char *)malloc(size); |
747 | else |
748 | x = (char *)realloc(x, size); |
749 | } |
750 | if(!cvHaveImageWriter(x)) { |
751 | if(x != str) |
752 | free(x); |
753 | return 0; |
754 | } |
755 | if(x != str) |
756 | free(x); |
757 | |
758 | writer = (CvVideoWriter_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer)); |
759 | memset(writer, 0, sizeof(CvVideoWriter_GStreamer)); |
760 | writer->filename = strdup(name); |
761 | writer->currentframe = offset; |
762 | |
763 | return (CvVideoWriter *)writer; |
764 | } |
765 | #endif |
766 | |
767 | |
768 | class CvCapture_GStreamer_CPP : public CvCapture |
769 | { |
770 | public: |
771 | CvCapture_GStreamer_CPP() { captureGS = 0; } |
772 | virtual ~CvCapture_GStreamer_CPP() { close(); } |
773 | |
774 | virtual bool open( int type, const char* filename ); |
775 | virtual void close(); |
776 | |
777 | virtual double getProperty(int); |
778 | virtual bool setProperty(int, double); |
779 | virtual bool grabFrame(); |
780 | virtual IplImage* retrieveFrame(int); |
781 | protected: |
782 | |
783 | CvCapture_GStreamer* captureGS; |
784 | }; |
785 | |
786 | bool CvCapture_GStreamer_CPP::open( int type, const char* filename ) |
787 | { |
788 | close(); |
789 | captureGS = icvCreateCapture_GStreamer( type, filename ); |
790 | return captureGS != 0; |
791 | } |
792 | |
793 | void CvCapture_GStreamer_CPP::close() |
794 | { |
795 | if( captureGS ) |
796 | { |
797 | icvClose_GStreamer( captureGS ); |
798 | cvFree( &captureGS ); |
799 | } |
800 | } |
801 | |
802 | bool CvCapture_GStreamer_CPP::grabFrame() |
803 | { |
804 | return captureGS ? icvGrabFrame_GStreamer( captureGS ) != 0 : false; |
805 | } |
806 | |
807 | IplImage* CvCapture_GStreamer_CPP::retrieveFrame(int) |
808 | { |
809 | return captureGS ? (IplImage*)icvRetrieveFrame_GStreamer( captureGS, 0 ) : 0; |
810 | } |
811 | |
812 | double CvCapture_GStreamer_CPP::getProperty( int propId ) |
813 | { |
814 | return captureGS ? icvGetProperty_GStreamer( captureGS, propId ) : 0; |
815 | } |
816 | |
817 | bool CvCapture_GStreamer_CPP::setProperty( int propId, double value ) |
818 | { |
819 | return captureGS ? icvSetProperty_GStreamer( captureGS, propId, value ) != 0 : false; |
820 | } |
821 | |
822 | CvCapture* cvCreateCapture_GStreamer( int type, const char* filename ) |
823 | { |
824 | CvCapture_GStreamer_CPP* capture = new CvCapture_GStreamer_CPP; |
825 | |
826 | if( capture->open( type, filename )) |
827 | return capture; |
828 | |
829 | delete capture; |
830 | return 0; |
831 | } |
gstreamr vedio player
最新推荐文章于 2024-08-15 19:43:51 发布