Gstreamer 实现udp推流,预览,动态录像

#include <gst/gst.h>
#include <stdio.h>
#include <linux/input.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>

#define MAX_PIPELINE_CNT 1
#define MAX_INPUT_SIZE   128

#define GSTCAM_INFO(fmt, args...) do {                       \
    g_print("[INFO] %s %d:" fmt, __FUNCTION__, __LINE__, ##args);   \
} while (0)

#define GSTCAM_ERROR(fmt, args...) do {                       \
    g_print("[ERROR] %s %d:" fmt, __FUNCTION__, __LINE__, ##args);   \
} while (0)

#define MAKE_AND_ADD(var, pipe, name, label, elem_name) \
G_STMT_START { \
  if (G_UNLIKELY (!(var = (gst_element_factory_make (name, elem_name))))) { \
    g_print ("Could not create element %s", name); \
    goto label; \
  } \
  if (G_UNLIKELY (!gst_bin_add (GST_BIN_CAST (pipe), var))) { \
    g_print ("Could not add element %s", name); \
    goto label; \
  } \
} G_STMT_END

typedef struct _AppPipeline AppPipeline;
typedef struct _AppData     AppData;

struct _AppPipeline {
  gint id;
  GstElement *pipeline;
  
  GstElement *v4l2_src;
  GstElement *videoconvert;
  GstElement *capsfilter;
  GstElement *tee;
  //push
  GstElement *p_queue;
  GstElement *p_x264enc;
  GstElement *p_rtph264pay;
  GstElement *p_udpsink;
  //preview
  GstElement *i_queue;
  GstElement *i_xvimagesink;
  //capture
  
  //video
  GstElement *v_queue;
  GstElement *v_videorate;
  GstElement *v_jpegenc;
  GstElement *v_avimux;
  GstElement *v_filesink;
  //push pad
  GstPad *tee_push_pad, *queue_push_pad;
  //preview
  GstPad *tee_disp_pad, *queue_disp_pad;
  //video
  GstPad *tee_video_pad, *queue_video_pad;
  
  GstCaps *filtercaps;
  
  //para
  gint port;
  gchar src_dev[64];
  gchar v_file[64];
  
};

struct _AppData {
  GMainLoop *loop;
  AppPipeline *AppPipeline[MAX_PIPELINE_CNT];
  GMutex   m_mutex;
};



static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
    GMainLoop *loop = (GMainLoop *) data;
    switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_EOS:
            g_print ("End of stream\n");
            g_main_loop_quit (loop);
            break;
        case GST_MESSAGE_ERROR:{
            gchar *debug;
            GError *error;
            gst_message_parse_error (msg, &error, &debug);
            g_printerr ("ERROR from element %s: %s\n",
                        GST_OBJECT_NAME (msg->src), error->message);
            if (debug)
                g_printerr ("Error details: %s\n", debug);
            g_free (debug);
            g_error_free (error);
            g_main_loop_quit (loop);
            break;
        }
        default:
            break;
    }
    return TRUE;
}

static void
parse_input (gchar * input)
{
  fflush (stdout);
  memset (input, '\0', MAX_INPUT_SIZE * sizeof (*input));

  if (!fgets (input, MAX_INPUT_SIZE, stdin) ) {
    g_print ("Failed to parse input!\n");
    return;
  }

  // Clear trailing whitespace and newline.
  g_strchomp (input);
}

gint create_init_pipeline(AppData * appctx, gint cnt)
{
  GSTCAM_INFO(" start \n");
  AppPipeline *pipeline;
  char capString[128];
  char pipename[32];
  // 1. init struct 
  appctx->AppPipeline[cnt] = g_new0(AppPipeline, 1);
  pipeline = appctx->AppPipeline[cnt];
  
  pipeline->id = cnt;
  snprintf(pipename, sizeof(pipename), "test-pipeline%d", cnt );
  pipeline->pipeline = gst_pipeline_new (pipename);
  
  // 2. create common part
  GSTCAM_INFO("create factory element part %d, pipeid %d \n",cnt, appctx->AppPipeline[cnt]->id);
  MAKE_AND_ADD(pipeline->v4l2_src, pipeline->pipeline, "v4l2src", fail, "v4l2src");
  MAKE_AND_ADD(pipeline->videoconvert, pipeline->pipeline, "videoconvert", fail, "videoconvert");
  MAKE_AND_ADD(pipeline->capsfilter, pipeline->pipeline, "capsfilter", fail, "capsfilter");
  MAKE_AND_ADD(pipeline->tee, pipeline->pipeline, "tee", fail, "tee");
  
  //push
  MAKE_AND_ADD(pipeline->p_queue, pipeline->pipeline, "queue", fail, "p_queue");
  MAKE_AND_ADD(pipeline->p_x264enc, pipeline->pipeline, "x264enc", fail, "p_x264enc");
  MAKE_AND_ADD(pipeline->p_rtph264pay, pipeline->pipeline, "rtph264pay", fail, "p_rtph264pay");
  MAKE_AND_ADD(pipeline->p_udpsink, pipeline->pipeline, "udpsink", fail, "p_udpsink");
  
  //preview
  MAKE_AND_ADD(pipeline->i_queue, pipeline->pipeline, "queue", fail, "i_queue");
  MAKE_AND_ADD(pipeline->i_xvimagesink, pipeline->pipeline, "xvimagesink", fail, "i_xvimagesink");

  //video
  /*MAKE_AND_ADD(pipeline->v_queue, pipeline->pipeline, "queue", fail, "v_queue");
  MAKE_AND_ADD(pipeline->v_videorate, pipeline->pipeline, "videorate", fail, "v_videorate");
  MAKE_AND_ADD(pipeline->v_jpegenc, pipeline->pipeline, "jpegenc", fail, "v_jpegenc");
  MAKE_AND_ADD(pipeline->v_avimux, pipeline->pipeline, "avimux", fail, "v_avimux");
  MAKE_AND_ADD(pipeline->v_filesink, pipeline->pipeline, "filesink", fail, "v_filesink");*/
  GSTCAM_INFO("create factory element success %d\n",cnt);
  
  // para set src elemwnt
  snprintf(pipeline->src_dev, sizeof(pipeline->src_dev), "/dev/video%d", cnt );
  GSTCAM_INFO("src path %s \n",pipeline->src_dev);
  g_object_set (G_OBJECT (pipeline->v4l2_src),"device", pipeline->src_dev, NULL );
  snprintf(capString, sizeof(capString),
            "video/x-raw,"
            "width=%d,"
            "height=%d,"
            "framerate=%d/1,"
            "format=NV12",
            640,
            480,
            30
            );
   GSTCAM_INFO("pipeline %d stream formate %s \n",cnt, capString);
   gst_util_set_object_arg (G_OBJECT (pipeline->capsfilter), "caps", capString);
  // set push element
  g_object_set (G_OBJECT (pipeline->p_rtph264pay),"pt", 96, NULL );
  pipeline->port = 8554 + cnt;
  g_object_set (G_OBJECT (pipeline->p_udpsink),
                "host", "127.0.0.1",
                "port", pipeline->port,
                "sync", FALSE,
                "async", FALSE,
                NULL
                );
  GSTCAM_INFO("udp stream udp//:127.0.0.1:%d\n",pipeline->port);
  
  g_object_set (G_OBJECT(pipeline->i_xvimagesink),"async",FALSE,"sync",FALSE, NULL);
  
  //para set video
  /*snprintf(pipeline->v_file, sizeof(pipeline->v_file), "test.avi", cnt );
  g_object_set (G_OBJECT (pipeline->filesink),"location", pipeline->v_file, NULL );*/
  
   gst_element_link_many(pipeline->v4l2_src, pipeline->videoconvert, pipeline->capsfilter, pipeline->tee, NULL);
  
   //push 
   gst_element_link_many(pipeline->p_queue, pipeline->p_x264enc, pipeline->p_rtph264pay, pipeline->p_udpsink, NULL);
   pipeline->tee_push_pad = gst_element_request_pad_simple (pipeline->tee, "src_%u");
   g_print ("Obtained request pad %s for push branch.\n", gst_pad_get_name (pipeline->tee_push_pad));
   pipeline->queue_push_pad = gst_element_get_static_pad (pipeline->p_queue, "sink");
   if ( gst_pad_link (pipeline->tee_push_pad,  pipeline->queue_push_pad) != GST_PAD_LINK_OK ) {
            GSTCAM_ERROR("push init pad link fail! \n");
            goto fail;
   }
   gst_element_link_many(pipeline->i_queue, pipeline->i_xvimagesink, NULL);
   pipeline->tee_disp_pad = gst_element_request_pad_simple (pipeline->tee, "src_%u"); 
   g_print ("Obtained request pad %s for image branch.\n", gst_pad_get_name (pipeline->tee_disp_pad));
   pipeline->queue_disp_pad = gst_element_get_static_pad (pipeline->i_queue, "sink");
   if ( gst_pad_link (pipeline->tee_disp_pad, pipeline->queue_disp_pad) != GST_PAD_LINK_OK ) {
            GSTCAM_ERROR("image init pad link fail! \n");
            goto fail;
   }
  
  gst_bus_add_watch (GST_ELEMENT_BUS (pipeline->pipeline), bus_call, pipeline);
  
  g_print ("start pipeline PLAYING\n");
  gst_element_set_state (pipeline->pipeline, GST_STATE_PLAYING);
  if (gst_element_get_state(pipeline->pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
        GSTCAM_ERROR ("Failed to Go into PLAYING state\n");
        goto fail;
   }
  
  return 0;
  
fail:
  GSTCAM_ERROR("init fail\n");
  gst_object_unref (pipeline->pipeline);
  g_free(pipeline);
  return -1;
}

gint videoing(AppPipeline *pipeline, gboolean is_video) 
{
  g_print("video start !\n");
  
 if(is_video) {
  gst_element_set_state (pipeline->pipeline, GST_STATE_READY);
  
  MAKE_AND_ADD(pipeline->v_queue, pipeline->pipeline, "queue", fail, "v_queue");
  MAKE_AND_ADD(pipeline->v_videorate, pipeline->pipeline, "videorate", fail, "v_videorate");
  MAKE_AND_ADD(pipeline->v_jpegenc, pipeline->pipeline, "jpegenc", fail, "v_jpegenc");
  MAKE_AND_ADD(pipeline->v_avimux, pipeline->pipeline, "avimux", fail, "v_avimux");
  MAKE_AND_ADD(pipeline->v_filesink, pipeline->pipeline, "filesink", fail, "v_filesink");
  
  //para set video
  snprintf(pipeline->v_file, sizeof(pipeline->v_file), "test.avi");
  g_object_set (G_OBJECT (pipeline->v_filesink),"location", pipeline->v_file, NULL );      
            
  if ( gst_element_link_many (pipeline->v_queue, pipeline->v_videorate, pipeline->v_jpegenc, pipeline->v_avimux, pipeline->v_filesink, NULL) != TRUE) {
         GSTCAM_INFO ("Elements could not be linked.\n");
         return -1;
   }
             
   gst_element_sync_state_with_parent(pipeline->v_queue);
             
   pipeline->tee_video_pad = gst_element_request_pad_simple (pipeline->tee, "src_%u");
   g_print ("Obtained request pad %s for record branch.\n", gst_pad_get_name ( pipeline->tee_video_pad));
   pipeline->queue_video_pad = gst_element_get_static_pad (pipeline->v_queue, "sink");
             
   g_print("video link start !\n");
   if ( gst_pad_link (pipeline->tee_video_pad, pipeline->queue_video_pad) != GST_PAD_LINK_OK) {
           GSTCAM_INFO ("Tee could not be linked.\n");
           return -1;
   }
   
   GSTCAM_INFO("video pipeline start\n");        
   gst_element_set_state (pipeline->pipeline, GST_STATE_PLAYING);
               
  } else {
         g_print("video quit !\n");
         gst_element_set_state (pipeline->v_queue, GST_STATE_NULL);
         gst_element_set_state (pipeline->v_videorate, GST_STATE_NULL);
         gst_element_set_state (pipeline->v_jpegenc, GST_STATE_NULL);
         gst_element_set_state (pipeline->v_avimux, GST_STATE_NULL);
         gst_element_set_state (pipeline->v_filesink, GST_STATE_NULL);
         
         g_print("video unlink !\n");
         //appctx->caps_video_pad = gst_element_get_static_pad (appctx->v_capsfilter, "sink");
        if( gst_pad_unlink(pipeline->tee_video_pad, pipeline->queue_video_pad) != TRUE )
        {
          g_printerr ("video could not be unlinked.\n");
          return -1;
        }
        
        gst_element_release_request_pad (pipeline->tee, pipeline->tee_video_pad);
        gst_object_unref (pipeline->tee_video_pad);
        gst_object_unref (pipeline->queue_video_pad);
        
        gst_bin_remove(GST_BIN (pipeline->pipeline), pipeline->v_queue);
        gst_bin_remove(GST_BIN (pipeline->pipeline), pipeline->v_videorate);
        gst_bin_remove(GST_BIN (pipeline->pipeline), pipeline->v_jpegenc);
        gst_bin_remove(GST_BIN (pipeline->pipeline), pipeline->v_avimux);
        gst_bin_remove(GST_BIN (pipeline->pipeline), pipeline->v_filesink);
      
      }
   return 0;
fail:
  GSTCAM_ERROR("videoing fail\n");
  return -1;
}

static gpointer
test_menu (gpointer data) {
  GSTCAM_INFO(" test start \n");
  AppData *appctx = (AppData*)(data);
  gboolean active = 1;
  gchar *input = g_new0 (gchar, MAX_INPUT_SIZE);

  while (active) {
    g_print ("\n input video0s video0q q : ");
    parse_input(input);
    
    if(g_str_equal (input, "video0s"))
    {
      videoing(appctx->AppPipeline[0], 1);
      
    } else if (g_str_equal (input, "video0q"))
    {
      videoing(appctx->AppPipeline[0], 0);
    } else {
      g_print("quit\n");
      active = 0;
    }
    
  }
  return NULL;
}

int main(int argc, char *argv[]) {
  
  GSTCAM_INFO("program start \n");
  gint ret = -1;
  AppData *appctx = g_new0 (AppData, 1);
  if(!appctx) {
    GSTCAM_ERROR("appctx is null\n");
    return -1;
  }
  // 1. Initialize GStreamer
  g_set_prgname ("gst-push2stream-app");
  gst_init (&argc, &argv);
  
  //2. init main loop
  appctx->loop = g_main_loop_new (NULL, FALSE);
  
  // 3. create init pipeline
  for(gint i = 0; i < MAX_PIPELINE_CNT; i++) {
    ret = create_init_pipeline(appctx,i);
    if(ret < 0) {
      GSTCAM_ERROR("create pipeline %d error \n", i);
      g_free(appctx);
      return -1;
    }
  }
  // test thread
  {
    GThread *thread = NULL;
    thread = g_thread_new ("MainMenuThread", test_menu, appctx);
    
    // Run main loop.
    g_main_loop_run (appctx->loop);

    // Waits until main menu thread finishes.
    g_thread_join (thread);
  }
  
  g_main_loop_unref (appctx->loop);
  gst_deinit ();
  g_free(appctx);
  return 0;
}

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值