'GStreamer: seek on video and save it to local file

I'm trying to create a basic GStreamer pipeline where I read a local video, seek on that video, and finally save the seeked video on a file.

The most simple pipeline I was thinking about use filesrc, decodebin, (a muxer like matroskamux, mp4mux or something similar) and filesink as GStreamer elements. I wrote that pipeline using GStreamer as C library (not from the gst-launch-1.0 tool) but I got many problems.

The main problem is that the final seeked video generated is unplayable (in practice it's raw data) with any media player. I think that the problem is related to the seek function because if I remove the line where I perform the seek the video is saved correctly.

I noticed also that with some muxer like matroskamux made the gst_element_seek(...) return false on the same video stream.

This is the code I used for this task (here the pipeline is slightly different with respect the one reported before but the concept is the same)

#include <gst/gst.h>

typedef struct _CustomData {
    GstElement* pipeline;
    GstElement* source;
    GstElement* decoder;
    GstElement* video_converter;
    GstElement* encoder;
    GstElement* parser;
    GstElement* muxer;
    GstElement* sink;

    gboolean playing;
    gboolean terminate;
    gboolean seek_enabled;
    gboolean seek_done;
    gint64 duration;

} CustomData;

static void pad_added_handler(GstElement* src, GstPad* pad, GstElement* data);

static void handle_message(CustomData* data, GstMessage* msg) {
    GError* err;
    gchar* debug_info;

    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_ERROR:
            gst_message_parse_error(msg, &err, &debug_info);
            g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
            g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
            g_clear_error(&err);
            g_free(debug_info);
            data->terminate = TRUE;
            break;
        case GST_MESSAGE_EOS:
            g_print("\nEOS reached.\n");
            data->terminate = TRUE;
            break;
        case GST_MESSAGE_DURATION:
            // The duration has changed, mark the current one as invalid
            data->duration = GST_CLOCK_TIME_NONE;
            break;
        case GST_MESSAGE_STATE_CHANGED:
            // we are only interested in pipeline change states
            if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline)) {
                GstState old_state, new_state, pending_state;
                gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
                g_print("Pipeline state change from %s to %s. \n", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));

                data->playing = (new_state == GST_STATE_PLAYING);
                if (data->playing) {
                    // Check if seeking is possible
                    GstQuery* query;
                    gint64 start, end;
                    query = gst_query_new_seeking(GST_FORMAT_TIME);

                    if (gst_element_query(data->pipeline, query)) {
                        gst_query_parse_seeking(query, NULL, &data->seek_enabled, &start, &end);
                        if (data->seek_enabled) {
                            g_print("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
                                    GST_TIME_ARGS(start), GST_TIME_ARGS(end));
                        }
                        else {
                            g_print("Seeking is DISABLED for this stream\n");
                        }
                    }
                    else {
                        g_printerr("Seeking query failed\n");
                    }
                }
            } break;
        default:
            g_printerr("unexpected message received. \n");
            break;
    }
    }

    static void seek_to_time (GstElement *pipeline) {
    if (!gst_element_seek (pipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
                        GST_SEEK_TYPE_SET, 5 * GST_SECOND,
                        GST_SEEK_TYPE_SET, 20 * GST_SECOND)) {
        g_print ("\n!!SEEK FAILED!!\n");
    } else{
        g_print("\nPERFORMING SEEK!!\n");
    }
}

int main(int argc, char *argv[]){
    CustomData data;
    GstBus *bus;
    GstMessage* msg;
    GstStateChangeReturn ret; // The last return value of an element state change

    data.playing = FALSE;
    data.terminate = FALSE;
    data.seek_enabled = FALSE;
    data.seek_done = FALSE;
    data.duration = GST_CLOCK_TIME_NONE;

    gst_init(&argc, &argv);

    data.source = gst_element_factory_make("filesrc", "source");
    g_object_set(data.source, "location", "myVideo.mkv", NULL);
    data.decoder = gst_element_factory_make("decodebin", "decoder");
    data.video_converter = gst_element_factory_make("videoconvert", "converter");
    data.encoder = gst_element_factory_make("x264enc", "encoder");
    data.muxer = gst_element_factory_make("mp4mux", "muxer");
    data.sink = gst_element_factory_make("filesink", "sink");
    g_object_set(data.sink, "location", "seeked_video.mp4", NULL);
    data.pipeline = gst_pipeline_new("retrieve-pipeline");

    if (!data.pipeline || !data.source || !data.decoder || !data.video_converter || !data.encoder || !data.muxer || !data.sink) {
        g_printerr("Not all elements could be created. \n");
        return -1;
    }

    gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.decoder, data.video_converter, data.encoder, data.muxer, data.sink, NULL);

    // Connect each element of the pipeline
    if(!gst_element_link_many(data.source, data.decoder, NULL)) {
        g_printerr("Source could not be linked to the decoder. \n");
        gst_object_unref(data.pipeline);
        return -1;
    }
    // Decoder must be connected at runtime (on demand) since its source pad are created sometimes and not by default
    g_signal_connect(data.decoder, "pad-added", G_CALLBACK(pad_added_handler), data.video_converter);
    if(!gst_element_link_many(data.video_converter,  data.encoder, data.muxer, data.sink, NULL)) {
        g_printerr("Converter and/or Muxer could not be linked to the sink. \n");
        gst_object_unref(data.pipeline);
        return -1;
    }

    // Start playing
    ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(data.pipeline);
        return -1;
    }

    bus = gst_element_get_bus(data.pipeline);
    do{
        msg = gst_bus_timed_pop_filtered (bus, 2 * GST_MSECOND,GstMessageType (GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION));
        if(msg != NULL){
            handle_message(&data, msg);
        }else {
            if(data.playing){
                gint64 current = -1;

                // Query current position of the stream
                if (!gst_element_query_position(data.pipeline, GST_FORMAT_TIME, &current)) {
                    g_printerr("Could not query current position.\n");
                }

                // Query stream duration
                // If we didn't know it yet, query the stream duration
                if (!GST_CLOCK_TIME_IS_VALID(data.duration)) {
                    if (!gst_element_query_duration(data.pipeline, GST_FORMAT_TIME, &data.duration)) {
                        g_printerr("Could not query current duration.\n");
                    }
                }

                // Print current position and total duration
                g_print("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
                        GST_TIME_ARGS(current), GST_TIME_ARGS(data.duration));

                if(data.seek_enabled && !data.seek_done){
                    seek_to_time(data.pipeline);
                    data.seek_done = TRUE;
                }
            }
        }
    }while(!data.terminate);

    // Free resources
    gst_object_unref(bus);
    gst_element_set_state(data.pipeline, GST_STATE_NULL);
    gst_object_unref(data.pipeline);
    return 0;
}

static void pad_added_handler(GstElement* src, GstPad* new_pad, GstElement* data) {
    GstPad* sink_pad = gst_element_get_static_pad(data, "sink");
    GstPadLinkReturn ret;
    GstCaps* new_pad_caps = NULL;
    GstStructure* new_pad_struct = NULL;
    const gchar* new_pad_type = NULL;

    g_print("\nReceived new pad '%s' from '%s'.\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

    if (gst_pad_is_linked(sink_pad)) {
        g_print("Pads are already linked. Ignoring. \n");
        gst_object_unref(sink_pad);
        return;
    }
    else {
        g_print("Processing to linking.\n");
    }

    // Check the new pad's type
    new_pad_caps = gst_pad_get_current_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);
    g_print("New pad type %s \n", new_pad_type);

    ret = gst_pad_link(new_pad, sink_pad);
    g_print("%s -> %s\n", gst_pad_get_name(new_pad), gst_pad_get_name(sink_pad));
    if (GST_PAD_LINK_FAILED(ret)) {
        g_print("Type is '%s' but dinamically link failed.\n", new_pad_type);
    }
    else {
        g_print("Link dinamically succeeded (type '%s').\n", new_pad_type);
    }

    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);

    // Unreference the sink pad
    gst_object_unref(sink_pad);

}

Am I missing something? Do you know if is it possible to seek on a video read from filesrc and save the seeked video (in playable format) on local?



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source