GstKinesisWebRTC: Getting Started - C Example Application

From RidgeRun Developer Wiki



Previous: Getting Started/GStreamer Pipelines Index Next: Getting Started/Python Example Application




This C example code shows how to use the kinesiswebrtcbin element in master mode. It shows how to create a pipeline to send audio and video and how dynamically connect bins to receive audio and video. The application supports multiple viewers connection and disconnection. This example is part of the plugin source code.


#include <gst/gst.h>
#include <glib-unix.h>

#define TEST_SEND_PIPELINE \
  "videotestsrc is-live=true ! queue ! video/x-raw,width=640,height=480,framerate=30/1 ! " \
  "vp8enc error-resilient=partitions keyframe-max-dist=10 auto-alt-ref=true cpu-used=5 deadline=1  ! " \
  "queue ! kinesiswebrtcbin channel=test-ridgerun name=bin audiotestsrc is-live=TRUE ! " \
  "queue leaky=2 max-size-buffers=400 ! audioconvert ! audioresample ! opusenc !" \
  "audio/x-opus,rate=48000,channels=2 ! queue ! bin."

#define TEST_RECEIVE_VIDEO_PIPELINE \
  "queue name=video_queue ! vp8dec ! queue ! videoconvert ! ximagesink"
#define TEST_RECEIVE_AUDIO_PIPELINE \
  "queue name=audio_queue ! opusparse ! opusdec ! queue !  pulsesink"

/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData
{
  GstElement *pipeline;
  GstElement *webrtcbin;
} CustomData;


static gboolean
signal_handler (gpointer user_data)
{
  GMainLoop *loop = (GMainLoop *) user_data;

  g_print ("Interrupt received, closing...\n");
  g_main_loop_quit (loop);

  return TRUE;
}

/* Handler peer connected */
static void
peer_connected_handler (GstElement * webrtc, gchar* peer_id, CustomData * data)
{
  g_print("Peer %s connected\n", peer_id);
}

/* Handler peer disconnected */
static void
peer_disconnected_handler (GstElement * webrtc, gchar* peer_id, CustomData * data)
{
  g_print("Peer %s disconnected\n", peer_id);
}

/* Handler for the pad-added signal */
static void
pad_added_handler (GstElement * webrtc, GstPad * new_pad, CustomData * data)
{
  GstPadLinkReturn ret;
  GstElement *receive_bin = NULL;
  GstPad *sink_pad = NULL;
  gchar *bin_name = NULL;

  g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad),
      GST_ELEMENT_NAME (webrtc));

  /* Check the new pad's type */
  if (g_str_has_prefix (GST_PAD_NAME (new_pad), "audio")) {
    g_print ("It is an audio pad\n");
    receive_bin =
        gst_parse_bin_from_description (TEST_RECEIVE_AUDIO_PIPELINE, TRUE,
        NULL);
  } else {
    g_print ("It is a video pad\n");
    receive_bin =
        gst_parse_bin_from_description (TEST_RECEIVE_VIDEO_PIPELINE, TRUE,
        NULL);
  }

  bin_name = g_strdup_printf ("bin_%s", GST_OBJECT_NAME (new_pad));
  g_object_set (receive_bin, "name", bin_name, NULL);
  g_free (bin_name);

  gst_bin_add (GST_BIN (data->pipeline), receive_bin);
  gst_element_sync_state_with_parent (receive_bin);
  sink_pad = gst_element_get_static_pad (receive_bin, "sink");

  /* Attempt the link */
  ret = gst_pad_link (new_pad, sink_pad);
  if (GST_PAD_LINK_FAILED (ret)) {
    g_print ("Link failed.\n");
  } else {
    g_print ("Link %s -> %s succeeded\n", GST_OBJECT_NAME (new_pad),
        GST_OBJECT_NAME (receive_bin));
  }

  /* Unreference queue and sink pad */
  gst_object_unref (sink_pad);
}


/* Handler for the pad-removed signal */
static void
pad_removed_handler (GstElement * webrtc, GstPad * new_pad, CustomData * data)
{
  GstElement *receive_bin = NULL;
  gchar *bin_name = NULL;

  g_print ("Removing pad '%s' from '%s':\n", GST_PAD_NAME (new_pad),
      GST_ELEMENT_NAME (webrtc));

  if (GST_PAD_IS_SINK (new_pad)) {
    return;
  }
  bin_name = g_strdup_printf ("bin_%s", GST_OBJECT_NAME (new_pad));
  receive_bin = gst_bin_get_by_name (GST_BIN (data->pipeline), bin_name);
  g_free (bin_name);

  g_print ("Removing null element '%s' %p:\n", GST_ELEMENT_NAME (receive_bin),
      receive_bin);

  gst_element_set_state (receive_bin, GST_STATE_NULL);
  gst_bin_remove (GST_BIN (data->pipeline), receive_bin);

  gst_object_unref (receive_bin);
}

int
main (int argc, char **argv)
{
  GMainLoop *main_loop = NULL;
  GstElement *pipeline = NULL;
  GstElement *webrtcbin = NULL;
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  CustomData data;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create pipeline */
  pipeline = gst_parse_launch (TEST_SEND_PIPELINE, NULL);
  webrtcbin = gst_bin_get_by_name (GST_BIN (pipeline), "bin");
  data.pipeline = pipeline;
  data.webrtcbin = webrtcbin;

  g_signal_connect (webrtcbin, "pad-added", G_CALLBACK (pad_added_handler),
      &data);
  g_signal_connect (webrtcbin, "pad-removed", G_CALLBACK (pad_removed_handler),
      &data);
  g_signal_connect (webrtcbin, "peer-connected", G_CALLBACK (peer_connected_handler),
      &data);
  g_signal_connect (webrtcbin, "peer-disconnected", G_CALLBACK (peer_disconnected_handler),
      &data);

  /* Start playing */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Block until CTRL+C is pressed */
  main_loop = g_main_loop_new (NULL, FALSE);
  g_unix_signal_add (SIGINT, signal_handler, main_loop);
  g_main_loop_run (main_loop);
  g_main_loop_unref (main_loop);

  g_print ("Closing ...");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  gst_object_unref (webrtcbin);

  g_print ("Successfully closed\n");

  return 0;
}



Previous: Getting Started/GStreamer Pipelines Index Next: Getting Started/Python Example Application