1
votes

I have the following working pipeline. It has been tested using both command-line tool gst-launch-1.0 and function gst_parse_launch(), and works in both cases.

videotestsrcĀ  ! video/x-raw,width=640,height=480 ! videocrop left=80 right=80 ! tee name=t ! queue ! glupload ! glimagesink t. ! queue ! jpegenc ! avimux ! filesink location=output.avi

I've tried to set it up manually in code, but I'm now stuck on the following error (the application opens, but no video is displayed):

Error received from element videotestsrc0 : Internal data flow error.

Debugging information: gstbasesrc.c(2948): gst_base_src_loop (): /GstPipeline:pipeline0/GstVideoTestSrc:videotestsrc0: streaming task paused, reason not-negotiated (-4)

I'm using GStreamer in a Qt application and the glimagesink links the video to a QML type. All code related to GStreamer is located in a GStreamer class called GStreamer. The entire cpp file is posted below, in case the issue is located somewhere I wouldn't guess. I apologize for non-relevant code.

static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data);

GStreamer::GStreamer(QQuickItem *parent) : QQuickItem(parent)
{
    qDebug() << "Constructed GSteamer";        
}    

void GStreamer::createPipeline()
{
    qDebug() << "Creating pipeline";      

    if(m_source.isEmpty()){
        qDebug() << "Error: Missing source property for GStreamer component";
        return;
    }

    if(m_videoItem.isEmpty()){
        qDebug() << "Error: Missing videoItem property for GStreamer component";
        return;
    }

    m_pipeline = gst_pipeline_new(NULL);
    m_sink = NULL;

    QByteArray ba = m_source.toLatin1();
    m_src = gst_element_factory_make(ba.data(), NULL);
    g_assert(m_src);


    m_filter = gst_element_factory_make("capsfilter", "filter");
    g_assert(m_filter);

    g_object_set(G_OBJECT (m_filter), "caps", gst_caps_new_simple("video/x-raw", 
        "width", G_TYPE_INT, 640,
        "height", G_TYPE_INT, 480,
        NULL), 
    NULL);

    m_convert = gst_element_factory_make("videoconvert", NULL);
    g_assert(m_convert);

    m_crop = gst_element_factory_make("videocrop", "crop");
    g_assert(m_crop);

    g_object_set(G_OBJECT (m_crop), "left", 80, "right", 80, NULL);

    // Tee
    m_tee = gst_element_factory_make("tee", "videotee");
    g_assert(m_tee);

    // Display queue
    m_displayQueue = gst_element_factory_make("queue", "displayQueue");
    g_assert(m_displayQueue);    

    m_upload = gst_element_factory_make("glupload", NULL);
    g_assert(m_upload);    

    m_sink = gst_element_factory_make("qmlglsink", NULL);
    g_assert(m_sink);

    // Record queue
    m_recordQueue = gst_element_factory_make("queue", "recordQueue");
    g_assert(m_recordQueue);

    m_encode = gst_element_factory_make("jpegenc", NULL);
    g_assert(m_encode);

    m_mux = gst_element_factory_make("avimux", NULL);
    g_assert(m_mux);

    m_filesink = gst_element_factory_make("filesink", NULL);
    g_assert(m_filesink);

    g_object_set(G_OBJECT(m_filesink), "location", "output.avi", NULL);    

    gst_bin_add_many(GST_BIN (m_pipeline), m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL);
    gst_bin_add_many(GST_BIN(m_pipeline), m_tee, m_displayQueue, m_recordQueue, m_encode, m_mux, m_filesink, NULL);

    // If I only link this simple pipeline, it works fine
    /*
   if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL)){
        qDebug() << "Unable to link source";
    }
    */

    if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_tee, NULL)){
        qDebug() << "Unable to link source";
    }
    if(!gst_element_link_many(m_displayQueue, m_upload, m_sink, NULL)){
        qDebug() << "Unable to link display queue";
    }
    if(!gst_element_link_many(m_recordQueue, m_encode, m_mux, m_filesink, NULL)){
        qDebug() << "Unable to link record queue";
    }    

    GstPad *teeDisplayPad = gst_element_get_request_pad(m_tee, "src_%u");
    GstPad *queueDisplayPad = gst_element_get_static_pad(m_displayQueue, "sink");

    GstPad *teeRecordPad = gst_element_get_request_pad(m_tee, "src_%u");
    GstPad *queueRecordPad = gst_element_get_static_pad(m_recordQueue, "sink");

    if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK){
        qDebug() << "Unable to link display tee";
    }

    if(gst_pad_link(teeRecordPad, queueRecordPad) != GST_PAD_LINK_OK){
        qDebug() << "Unable to link record tee";
    }

    //gst_object_unref(teeDisplayPad);
    gst_object_unref(queueDisplayPad);
    //gst_object_unref(teeRecordPad);
    gst_object_unref(queueRecordPad);    

    QQuickItem *videoItem = window()->findChild<QQuickItem *> (m_videoItem);
    g_object_set(m_sink, "widget", videoItem, NULL);

    // This will call gst_element_set_state(m_pipeline, GST_STATE_PLAYING) when the window is ready
    window()->scheduleRenderJob (new SetPlaying (m_pipeline), QQuickWindow::BeforeSynchronizingStage);    

    m_bus = gst_element_get_bus(m_pipeline);

    gst_bus_add_watch(m_bus, busCallback, m_loop);
    gst_object_unref(m_bus);

    m_loop = g_main_loop_new(NULL, false);
    g_main_loop_run(m_loop);     
}

static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data){
    qDebug() << "Callback function reached";
    switch(GST_MESSAGE_TYPE(message)){
        case GST_MESSAGE_ERROR:
            GError *error;
            gchar *debugInfo;
            gst_message_parse_error(message, &error, &debugInfo);
            qDebug() << "Error received from element" << GST_OBJECT_NAME(message->src) << ":" << error->message;
            qDebug() << "Debugging information:" << (debugInfo ? debugInfo : "none");
            //g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (m_message->src), error->message);
            //g_printerr ("Debugging information: %s\n", debugInfo ? debugInfo : "none");
            g_clear_error (&error);
            g_free (debugInfo);

            g_main_loop_quit(static_cast<GMainLoop *>(data));
            break;
        case GST_MESSAGE_EOS:
            qDebug() << "End-Of-Stream reached.";
            g_main_loop_quit(static_cast<GMainLoop *>(data)); 
            break;
        default:
            qDebug() << "Unexpected message received."; 
            break;
        }
    return true;    
}

/**
The rest of the code is probably not relevant. It contains 
only destructor and some getters and setters.
**/

GStreamer::~GStreamer()
{
    gst_object_unref(m_bus);
    gst_element_set_state(m_pipeline, GST_STATE_NULL);
    gst_object_unref(m_pipeline);
}

QString GStreamer::source() const
{
    return m_source;
}

void GStreamer::setSource(const QString &source)
{
    if(source != m_source){
        m_source = source;
    }
}

QString GStreamer::videoItem() const
{
    return m_videoItem;
}

void GStreamer::setVideoItem(const QString &videoItem)
{
    if(videoItem != m_videoItem){
        m_videoItem = videoItem;
    }
}

All member variables are defined in the .h file.

If I don't add tee element to the bin and links it in the pipeline, then the video shows up on the screen as expected. So I guess I'm messing up the pads on the tee element.

I've been following the tutorials in GStreamers documentation, so I don't understand why it's not working.

Hope someone can help.

1

1 Answers

3
votes

Ok, so the difference between the the gst-launch line provided and the application code is the use of the qmlglsink element in the place of glimagesink.

The problem is that qmlglsink only accepts RGBA formatted video buffers however the jpegenc in the other branch of the tee does not accept RGBA formatted video buffers. This leads to a negotiation problem as there is not common format supported by both branches of the tee.

The fix is to add a videoconvert element before jpegenc or a glcolorconvert element before qmlglsink so that both branches of the tee can negotiate to the same video format.

Side note: glimagesink contains a glupload ! glcolorconvert ! actual-sink internally so is converting video formats already.