Playing .wav/mp3 file using gstreamer in code

You can also clone this with

git clone https://github.com/SanchayanMaity/gstreamer-audio-playback.git

Though i used this on a Toradex Colibri Vybrid module, you can use the same on a Beagleboard or desktop with the correct setup.

/*
Notes for compilation:
1. For compiling the code along with the Makefile given, a OE setup is mandatory.
2. Before compiling, change the paths as per the setup of your environment.

Please refer the Gstreamer Application Development Manual at the below link before proceeding further
http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/index.html

Comprehensive documentation for Gstreamer
http://gstreamer.freedesktop.org/documentation/

The following elements/plugins/packages are expected to be in the module image for this to work
gstreamer
gst-plugins-base
gst-plugins-good-wavparse
gst-plugins-good-alsa
gst-plugins-good-audioconvert
gst-plugins-ugly-mad

Pipeline to play .wav audio file from command line
gst-launch filesrc location="location of file" ! wavparse ! alsasink 

Pipeline to play .mp3 audio file from command line
gst-launch filesrc location="location of file" ! mad ! audioconvert ! alsasink 

It is also assumed that the USB to Audio device is the only audio device being used on the system, if not the
"device" parameter for alsasink will change and the parameter to be used needs to be checked with cat /proc/asound/cards,
which then needs to be set as follows

In gstreamer pipeline 

Pipeline to play .wav audio file from command line
gst-launch filesrc location="location of file" ! wavparse ! alsasink device=hw:1,0

Pipeline to play .mp3 audio file from command line
gst-launch filesrc location="location of file" ! mad ! audioconvert ! alsasink device=hw:1,0

In code initialisation in init_audio_playback_pipeline
g_object_set (G_OBJECT (data->alsasink), "device", "hw:0,0", NULL);
                            OR
g_object_set (G_OBJECT (data->alsasink), "device", "hw:1,0", NULL);

The pipeline will ideally remain the same for a different audio device, only the device parameter for alsasink will change
*/

#include <gstreamer-0.10/gst/gst.h>
#include <gstreamer-0.10/gst/gstelement.h>
#include <stdio.h>
#include <unistd.h>
#include <string.h>

#define NUMBER_OF_BYTES_FOR_FILE_LOCATION    256

volatile gboolean exit_flag = FALSE;

typedef struct  
{
    GstElement *file_source;
    GstElement *pipeline;
    GstElement *audio_decoder;    
    GstElement *audioconvert;
    GstElement *alsasink;    
    GstElement *bin_playback;    
    GstBus *bus;
    GstMessage *message;        
    gchar filelocation[NUMBER_OF_BYTES_FOR_FILE_LOCATION];
}gstData;

gstData gstreamerData;

// Create the pipeline element
gboolean create_pipeline(gstData *data)
{        
    data->pipeline = gst_pipeline_new("audio_pipeline");    
    if (data->pipeline == NULL)
    {            
        return FALSE;
    }
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    return TRUE;
}

// Callback function for dynamically linking the "wavparse" element and "alsasink" element
void on_pad_added (GstElement *src_element, GstPad *src_pad, gpointer data)
{
    g_print ("\nLinking dynamic pad between wavparse and alsasink\n");

    GstElement *sink_element = (GstElement *) data;     // Is alsasink
    GstPad *sink_pad = gst_element_get_static_pad (sink_element, "sink");
    gst_pad_link (src_pad, sink_pad);

    gst_object_unref (sink_pad);
    src_element = NULL;     // Prevent "unused" warning here
}

// Setup the pipeline
gboolean init_audio_playback_pipeline(gstData *data)
{
    if (data == NULL)
        return FALSE;
        
    data->file_source = gst_element_factory_make("filesrc", "filesource");    
    
    if (strstr(data->filelocation, ".mp3"))
    {
        g_print ("\nMP3 Audio decoder selected\n");
        data->audio_decoder = gst_element_factory_make("mad", "audiomp3decoder");
    }
    
    if (strstr(data->filelocation, ".wav"))
    {
        g_print ("\nWAV Audio decoder selected\n");
        data->audio_decoder = gst_element_factory_make("wavparse", "audiowavdecoder");
    }
        
    data->audioconvert = gst_element_factory_make("audioconvert", "audioconverter");    
    
    data->alsasink = gst_element_factory_make("alsasink", "audiosink");
    
    if ( !data->file_source || !data->audio_decoder || !data->audioconvert || !data->alsasink )
    {
        g_printerr ("\nNot all elements for audio pipeline were created\n");
        return FALSE;
    }    
    
    // Uncomment this if you want to see some debugging info
    //g_signal_connect( data->pipeline, "deep-notify", G_CALLBACK( gst_object_default_deep_notify ), NULL );    
    
    g_print("\nFile location: %s\n", data->filelocation);
    g_object_set (G_OBJECT (data->file_source), "location", data->filelocation, NULL);            
    
    data->bin_playback = gst_bin_new ("bin_playback");    
    
    if (strstr(data->filelocation, ".mp3"))
    {
        gst_bin_add_many(GST_BIN(data->bin_playback), data->file_source, data->audio_decoder, data->audioconvert, data->alsasink, NULL);
    
        if (gst_element_link_many (data->file_source, data->audio_decoder, NULL) != TRUE)
        {
            g_printerr("\nFile source and audio decoder element could not link\n");
            return FALSE;
        }
    
        if (gst_element_link_many (data->audio_decoder, data->audioconvert, NULL) != TRUE)
        {
            g_printerr("\nAudio decoder and audio converter element could not link\n");
            return FALSE;
        }
    
        if (gst_element_link_many (data->audioconvert, data->alsasink, NULL) != TRUE)
        {
            g_printerr("\nAudio converter and audio sink element could not link\n");
            return FALSE;
        }
    }
    
    if (strstr(data->filelocation, ".wav"))
    {
        gst_bin_add_many(GST_BIN(data->bin_playback), data->file_source, data->audio_decoder, data->alsasink, NULL);
    
        if (gst_element_link_many (data->file_source, data->audio_decoder, NULL) != TRUE)
        {
            g_printerr("\nFile source and audio decoder element could not link\n");
            return FALSE;
        }
    
        // Avoid checking of return value for linking of "wavparse" element and "alsasink" element
        // Refer http://stackoverflow.com/questions/3656051/unable-to-play-wav-file-using-gstreamer-apis
        
        gst_element_link_many (data->audio_decoder, data->alsasink, NULL);
        
        g_signal_connect(data->audio_decoder, "pad-added", G_CALLBACK(on_pad_added), data->alsasink);    
    }    
    
    return TRUE;
}

// Starts the pipeline
gboolean start_playback_pipe(gstData *data)
{
    // http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/GstElement.html#gst-element-set-state
    gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
    while(gst_element_get_state(data->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS);    
    return TRUE;
}

// Add the pipeline to the bin
gboolean add_bin_playback_to_pipe(gstData *data)
{
    if((gst_bin_add(GST_BIN (data->pipeline), data->bin_playback)) != TRUE)
    {
        g_print("\nbin_playback not added to pipeline\n");
        return FALSE;    
    }
    
    if(gst_element_set_state (data->pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS)
    {        
        return TRUE;
    }
    else
    {
        g_print("\nFailed to set pipeline state to NULL\n");
        return FALSE;        
    }
}

// Disconnect the pipeline and the bin
void remove_bin_playback_from_pipe(gstData *data)
{
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    gst_element_set_state (data->bin_playback, GST_STATE_NULL);
    if((gst_bin_remove(GST_BIN (data->pipeline), data->bin_playback)) != TRUE)
    {
        g_print("\nbin_playback not removed from pipeline\n");
    }    
}

// Cleanup
void delete_pipeline(gstData *data)
{
    if (data->pipeline)
        gst_element_set_state (data->pipeline, GST_STATE_NULL);    
    if (data->bus)
        gst_object_unref (data->bus);
    if (data->pipeline)
        gst_object_unref (data->pipeline);    
}

// Function for checking the specific message on bus
// We look for EOS or Error messages
gboolean check_bus_cb(gstData *data)
{
    GError *err = NULL;                
    gchar *dbg = NULL;   
          
    g_print("\nGot message: %s\n", GST_MESSAGE_TYPE_NAME(data->message));
    switch(GST_MESSAGE_TYPE (data->message))
    {
        case GST_MESSAGE_EOS:       
            g_print ("\nEnd of stream... \n\n");
            exit_flag = TRUE;
            break;

        case GST_MESSAGE_ERROR:
            gst_message_parse_error (data->message, &err, &dbg);
            if (err)
            {
                g_printerr ("\nERROR: %s\n", err->message);
                g_error_free (err);
            }
            if (dbg)
            {
                g_printerr ("\nDebug details: %s\n", dbg);
                g_free (dbg);
            }
            exit_flag = TRUE;
            break;

        default:
            g_printerr ("\nUnexpected message of type %d\n", GST_MESSAGE_TYPE (data->message));
            break;
    }
    return TRUE;
}

int main(int argc, char *argv[])
{    
    if (argc != 2)
    {
        g_print("\nUsage: ./audiovf /home/root/filename.mp3\n");
        g_print("Usage: ./audiovf /home/root/filename.wav\n");
        g_print("Note: Number of bytes for file location: %d\n\n", NUMBER_OF_BYTES_FOR_FILE_LOCATION);
        return FALSE;
    }
    
    if ((!strstr(argv[1], ".mp3")) && (!strstr(argv[1], ".wav")))
    {
        g_print("\nOnly mp3 & wav files can be played\n");
        g_print("Specify the mp3 or wav file to be played\n");
        g_print("Usage: ./audiovf /home/root/filename.mp3\n");
        g_print("Usage: ./audiovf /home/root/filename.wav\n");
        g_print("Note: Number of bytes for file location: %d\n\n", NUMBER_OF_BYTES_FOR_FILE_LOCATION);
        return FALSE;
    }    
    
    // Initialise gstreamer. Mandatory first call before using any other gstreamer functionality
    gst_init (&argc, &argv);
    
    memset(gstreamerData.filelocation, 0, sizeof(gstreamerData.filelocation));
    strcpy(gstreamerData.filelocation, argv[1]);        
    
    if (!create_pipeline(&gstreamerData))
        goto err;        
    
    if(init_audio_playback_pipeline(&gstreamerData))
    {    
        if(!add_bin_playback_to_pipe(&gstreamerData))
            goto err;        
        
        if(start_playback_pipe(&gstreamerData))
        {
            gstreamerData.bus = gst_element_get_bus (gstreamerData.pipeline);
            
            while (TRUE)
            {
                if (gstreamerData.bus)
                {    
                    // Check for End Of Stream or error messages on bus
                    // The global exit_flag will be set in case of EOS or error. Exit if the flag is set
                    gstreamerData.message = gst_bus_poll (gstreamerData.bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
                    if(GST_MESSAGE_TYPE (gstreamerData.message))
                    {
                        check_bus_cb(&gstreamerData);
                    }
                    gst_message_unref (gstreamerData.message);            
                }            
                
                if (exit_flag)
                    break;            
                
                sleep(1);                
            }                    
        }    
        remove_bin_playback_from_pipe(&gstreamerData);                    
    }    

err:    
    delete_pipeline(&gstreamerData);
    
    return TRUE;
}

A simple Makefile for compiling the code. You need to change the path as per your OE setup.

#Notes for compilation:
#1. For compiling the code with this Makefile, a OE setup is mandatory.
#2. Before compiling, change the paths as per the setup of your environment.

CC = ${HOME}/oe-core/build/out-eglibc/sysroots/x86_64-linux/usr/bin/armv7ahf-vfp-neon-angstrom-linux-gnueabi/arm-angstrom-linux-gnueabi-gcc
INCLUDES = "-I${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf/usr/include" "-I${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf/usr/include/glib-2.0" "-I${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf/usr/lib/glib-2.0/include" "-I${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf/usr/include/gstreamer-0.10" "-I${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf/usr/include/libxml2"
LIB_PATH = "-L${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf/usr/lib"
LDFLAGS = -lpthread -lgobject-2.0 -lglib-2.0 -lgstreamer-0.10 -lgstapp-0.10
CFLAGS = -O3 -g --sysroot=${HOME}/oe-core/build/out-eglibc/sysroots/colibri-vf 

all:
    ${CC} ${CFLAGS} ${INCLUDES} ${LIB_PATH} ${LDFLAGS} -o audiovf audiovf.c

clean:
    rm -rf audiovf

Extracting frame from a gstreamer pipeline and displaying it with OpenCV

Not much to write or say in this post. I was trying to extract a frame from the gstreamer pipeline and then display it with OpenCV.

There are two approaches in the code below.

1. Register a callback function whenever a new buffer becomes available with appsink and then use a locking mechanism to synchronize the extraction of the frame and display in the main thread.

2. The second one is to extract the buffer yourself in a while loop in the main thread.

The first one is active in the code below and the second one commented out. To enable the first mechanism, uncomment the mutex locking and signal connect mechanism and comment out the pull buffer call related stuff in the while loop.

Learn more about gstreamer from http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/index.html and especially refer section 19.

For some reason, i am experiencing a memory leak issue with the below code (more so with the fist approach) and haven’t got around and being able to fix it. Also, for your platform the gstreamer pipeline elements will be different. Another problem was, i get x-raw-yuv data from my gstreamer source element and i am only able to display the black and white image with OpenCV. Nonetheless, i thought this might be useful and may be someone can also point out the error to me. Not a gstreamer expert by any means.


#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv/cv.h>
#include <gstreamer-0.10/gst/gst.h>
#include <gstreamer-0.10/gst/gstelement.h>
#include <gstreamer-0.10/gst/app/gstappsink.h>
#include <iostream>
#include <stdio.h>
#include <unistd.h>
#include <pthread.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>

using namespace std;
using namespace cv;

/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData
{
    GstElement *appsink;
    GstElement *colorSpace;    
    GstElement *pipeline;
    GstElement *vsource_capsfilter, *mixercsp_capsfilter, *cspappsink_capsfilter;
    GstElement *mixer_capsfilter;
    GstElement *bin_capture;
    GstElement *video_source, *deinterlace;     
    GstElement *nv_video_mixer;    
    GstPad *pad;
    GstCaps *srcdeinterlace_caps, *mixercsp_caps, *cspappsink_caps;    
    GstBus *bus;
    GstMessage *msg;        
}gstData;

GstBuffer* buffer;        

pthread_mutex_t threadMutex = PTHREAD_MUTEX_INITIALIZER;
pthread_cond_t waitForGstBuffer = PTHREAD_COND_INITIALIZER; 

/* Global variables */
CascadeClassifier face_cascade;
IplImage *frame = NULL;     
string window_name =         "Toradex Face Detection Demo";
String face_cascade_name =    "/home/root/haarcascade_frontalface_alt2.xml";
const int BORDER =             8;          // Border between GUI elements to the edge of the image.

template <typename T> string toString(T t)
{
    ostringstream out;
    out << t;
    return out.str();
}

// Draw text into an image. Defaults to top-left-justified text, but you can give negative x coords for right-justified text,
// and/or negative y coords for bottom-justified text
// Returns the bounding rect around the drawn text
Rect drawString(Mat img, string text, Point coord, Scalar color, float fontScale = 0.6f, int thickness = 1, int fontFace = FONT_HERSHEY_COMPLEX)
{
    // Get the text size & baseline.
    int baseline = 0;
    Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline);
    baseline += thickness;

    // Adjust the coords for left/right-justified or top/bottom-justified.
    if (coord.y >= 0) {
        // Coordinates are for the top-left corner of the text from the top-left of the image, so move down by one row.
        coord.y += textSize.height;
    }
    else {
        // Coordinates are for the bottom-left corner of the text from the bottom-left of the image, so come up from the bottom.
        coord.y += img.rows - baseline + 1;
    }
    // Become right-justified if desired.
    if (coord.x < 0) {
        coord.x += img.cols - textSize.width + 1;
    }

    // Get the bounding box around the text.
    Rect boundingRect = Rect(coord.x, coord.y - textSize.height, textSize.width, baseline + textSize.height);

    // Draw anti-aliased text.
    putText(img, text, coord, fontFace, fontScale, color, thickness, CV_AA);

    // Let the user know how big their text is, in case they want to arrange things.
    return boundingRect;
}

void create_pipeline(gstData *data)
{
    data->pipeline = gst_pipeline_new ("pipeline");
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
}

gboolean CaptureGstBuffer(GstAppSink *sink, gstData *data)
{            
    //g_signal_emit_by_name (sink, "pull-buffer", &buffer);
    pthread_mutex_lock(&threadMutex);
    buffer = gst_app_sink_pull_buffer(sink);
    if (buffer)
    {        
        frame = cvCreateImage(cvSize(720, 576), IPL_DEPTH_16U, 3);
        if (frame == NULL)
        {
            g_printerr("IplImageFrame is null.\n");
        }
        else
        {
            //buffer = gst_app_sink_pull_buffer(sink);
            frame->imageData = (char*)GST_BUFFER_DATA(buffer);        
            if (frame->imageData == NULL)
            {
                g_printerr("IplImage data is null.\n");        
            }
        }        
        pthread_cond_signal(&waitForGstBuffer);            
    }            
    pthread_mutex_unlock(&threadMutex);
    return TRUE;
}

gboolean init_video_capture(gstData *data)
{    
    data->video_source = gst_element_factory_make("v4l2src", "video_source_live");
    data->vsource_capsfilter = gst_element_factory_make ("capsfilter", "vsource_cptr_capsfilter");
    data->deinterlace = gst_element_factory_make("deinterlace", "deinterlace_live");
    data->nv_video_mixer = gst_element_factory_make("nv_omx_videomixer", "nv_video_mixer_capture");    
    data->mixercsp_capsfilter = gst_element_factory_make ("capsfilter", "mixercsp_capsfilter");
    data->colorSpace = gst_element_factory_make("ffmpegcolorspace", "csp");        
    data->cspappsink_capsfilter = gst_element_factory_make ("capsfilter", "cspappsink_capsfilter");
    data->appsink = gst_element_factory_make("appsink", "asink");
        
    if (!data->video_source || !data->vsource_capsfilter || !data->deinterlace || !data->nv_video_mixer || !data->mixercsp_capsfilter || !data->appsink \
        || !data->colorSpace || !data->cspappsink_capsfilter)
    {
        g_printerr ("Not all elements for video were created.\n");
        return FALSE;
    }        
    
    g_signal_connect( data->pipeline, "deep-notify", G_CALLBACK( gst_object_default_deep_notify ), NULL );        
    
    gst_app_sink_set_emit_signals((GstAppSink*)data->appsink, true);
    gst_app_sink_set_drop((GstAppSink*)data->appsink, true);
    gst_app_sink_set_max_buffers((GstAppSink*)data->appsink, 1);    
    
    data->srcdeinterlace_caps = gst_caps_from_string("video/x-raw-yuv, width=(int)720, height=(int)576, format=(fourcc)I420, framerate=(fraction)1/1");        
    if (!data->srcdeinterlace_caps)
        g_printerr("1. Could not create media format string.\n");        
    g_object_set (G_OBJECT (data->vsource_capsfilter), "caps", data->srcdeinterlace_caps, NULL);
    gst_caps_unref(data->srcdeinterlace_caps);        
    
    data->mixercsp_caps = gst_caps_from_string("video/x-raw-yuv, width=(int)720, height=(int)576, format=(fourcc)I420, framerate=(fraction)1/1, pixel-aspect-ratio=(fraction)1/1");    
    if (!data->mixercsp_caps)
        g_printerr("2. Could not create media format string.\n");        
    g_object_set (G_OBJECT (data->mixercsp_capsfilter), "caps", data->mixercsp_caps, NULL);
    gst_caps_unref(data->mixercsp_caps);    
    
    data->cspappsink_caps = gst_caps_from_string("video/x-raw-yuv, width=(int)720, height=(int)576, format=(fourcc)I420, framerate=(fraction)1/1");        
    if (!data->cspappsink_caps)
        g_printerr("3. Could not create media format string.\n");        
    g_object_set (G_OBJECT (data->cspappsink_capsfilter), "caps", data->cspappsink_caps, NULL);    
    gst_caps_unref(data->cspappsink_caps);        
            
    data->bin_capture = gst_bin_new ("bin_capture");        
    
    /*if(g_signal_connect(data->appsink, "new-buffer", G_CALLBACK(CaptureGstBuffer), NULL) <= 0)
    {
        g_printerr("Could not connect signal handler.\n");
        exit(1);
    }*/
    
    gst_bin_add_many (GST_BIN (data->bin_capture), data->video_source, data->vsource_capsfilter, data->deinterlace, data->nv_video_mixer, \
                        data->mixercsp_capsfilter, data->colorSpace, data->cspappsink_capsfilter, data->appsink, NULL);
    
    if (gst_element_link_many(data->video_source, data->vsource_capsfilter, data->deinterlace, NULL) != TRUE)
    {
        g_printerr ("video_src to deinterlace not linked.\n");
        return FALSE;
    }        
    
    if (gst_element_link_many (data->deinterlace, data->nv_video_mixer, NULL) != TRUE)
    {
        g_printerr ("deinterlace to video_mixer not linked.\n");
        return FALSE;
    }        
    
    if (gst_element_link_many (data->nv_video_mixer, data->mixercsp_capsfilter, data->colorSpace, NULL) != TRUE)
    {
        g_printerr ("video_mixer to colorspace not linked.\n");
        return FALSE;    
    }
    
    if (gst_element_link_many (data->colorSpace, data->appsink, NULL) != TRUE)
    {
        g_printerr ("colorspace to appsink not linked.\n");
        return FALSE;    
    }
    
    cout << "Returns from init_video_capture." << endl;
    return TRUE;
}

void delete_pipeline(gstData *data)
{
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    g_print ("Pipeline set to NULL\n");
    gst_object_unref (data->bus);
    gst_object_unref (data->pipeline);
    g_print ("Pipeline deleted\n");
}

gboolean add_bin_capture_to_pipe(gstData *data)
{
    if((gst_bin_add(GST_BIN (data->pipeline), data->bin_capture)) != TRUE)
    {
        g_print("bin_capture not added to pipeline\n");
    }
    
    if(gst_element_set_state (data->pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS)
    {        
        return TRUE;
    }
    else
    {
        cout << "Failed to set pipeline state to NULL." << endl;
        return FALSE;        
    }
}

gboolean remove_bin_capture_from_pipe(gstData *data)
{
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    gst_element_set_state (data->bin_capture, GST_STATE_NULL);
    if((gst_bin_remove(GST_BIN (data->pipeline), data->bin_capture)) != TRUE)
    {
        g_print("bin_capture not removed from pipeline\n");
    }    
    return TRUE;
}

gboolean start_capture_pipe(gstData *data)
{
    if(gst_element_set_state (data->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS)
        return TRUE;
    else
    {
        cout << "Failed to set pipeline state to PLAYING." << endl;
        return FALSE;
    }
}

gboolean stop_capture_pipe(gstData *data)
{
    gst_element_set_state (data->bin_capture, GST_STATE_NULL);
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    return TRUE;
}

gboolean deinit_video_live(gstData *data)
{
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    gst_element_set_state (data->bin_capture, GST_STATE_NULL);
    gst_object_unref (data->bin_capture);
    return TRUE;
}

gboolean check_bus_cb(gstData *data)
{
    GError *err = NULL;                
    gchar *dbg = NULL;   
          
    g_print("Got message: %s\n", GST_MESSAGE_TYPE_NAME(data->msg));
    switch(GST_MESSAGE_TYPE (data->msg))
    {
        case GST_MESSAGE_EOS:       
            g_print ("END OF STREAM... \n");
            break;

        case GST_MESSAGE_ERROR:
            gst_message_parse_error (data->msg, &err, &dbg);
            if (err)
            {
                g_printerr ("ERROR: %s\n", err->message);
                g_error_free (err);
            }
            if (dbg)
            {
                g_printerr ("[Debug details: %s]\n", dbg);
                g_free (dbg);
            }
            break;

        default:
            g_printerr ("Unexpected message of type %d", GST_MESSAGE_TYPE (data->msg));
            break;
    }
    return TRUE;
}

void get_pipeline_bus(gstData *data)
{
    data->bus = gst_element_get_bus (data->pipeline);
    data->msg = gst_bus_poll (data->bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
    if(GST_MESSAGE_TYPE (data->msg))
    {
        check_bus_cb(data);
    }
    gst_message_unref (data->msg);
}

int main(int argc, char *argv[])
{        
    //Mat frame;
    VideoCapture capture;    
    gstData gstreamerData;
    GstBuffer *gstImageBuffer;
    
    //XInitThreads();
    gst_init (&argc, &argv);
    create_pipeline(&gstreamerData);
    if(init_video_capture(&gstreamerData))
    {        
        add_bin_capture_to_pipe(&gstreamerData);    
        start_capture_pipe(&gstreamerData);
        //get_pipeline_bus(&gstreamerData);    
    
        cout << "Starting while loop..." << endl;
        cvNamedWindow("Toradex Face Detection Demo with Gstreamer", 0);    
    
        while(true)
        {    
            //pthread_mutex_lock(&threadMutex);
            //pthread_cond_wait(&waitForGstBuffer, &threadMutex);
            
            gstImageBuffer = gst_app_sink_pull_buffer((GstAppSink*)gstreamerData.appsink);
        
            if (gstImageBuffer != NULL)
            {        
                frame = cvCreateImage(cvSize(720, 576), IPL_DEPTH_8U, 1);
                    
                if (frame == NULL)
                {
                    g_printerr("IplImageFrame is null.\n");
                }
                else
                {        
                    frame->imageData = (char*)GST_BUFFER_DATA(gstImageBuffer);        
                    if (frame->imageData == NULL)
                    {
                        g_printerr("IplImage data is null.\n");            
                    }                    
                    cvShowImage("Toradex Face Detection Demo with Gstreamer", frame);  
                    cvWaitKey(1);                    
                    gst_buffer_unref(gstImageBuffer);
                }
            }
            else
            {
                cout << "Appsink buffer didn't return buffer." << endl;
            }
            /*
            if (frame)
            {
                cvShowImage("Toradex Face Detection Demo with Gstreamer", frame);
            }
            gst_buffer_unref(buffer);
            buffer = NULL;            
            pthread_mutex_unlock(&threadMutex);    
            cvWaitKey(1);*/                                    
        }
    }
    else
    {
        exit(1);
    }
              
    //Destroy the window
    cvDestroyWindow("Toradex Face Detection Demo with Gstreamer");
       remove_bin_capture_from_pipe(&gstreamerData);
       deinit_video_live(&gstreamerData);    
    delete_pipeline(&gstreamerData);
    
       return 0;
}