Audiomixer doesn't work

classic Classic list List threaded Threaded
2 messages Options
Reply | Threaded
Open this post in threaded view
|

Audiomixer doesn't work

Johan Basore
This post was updated on .
Hello everyone,

I am working with audiomixer and have strange thing.
If I use audiomixer it doesn't play, just stay as ready state. But If I replace audiomixer with adder, it plays.
Does audiomixer need special configurations?

Here are my codes:

****************************************************************
    GstBus *bus;
    GSource *bus_source;
   
    GstElement *source1, *source2, *clrspace1, *clrspace2, *clrspace, *h264enc, *mp4mux, *sink;
    GstElement *videomixer;
    GstElement *decodebin1, *decodebin2;
    GstCaps *filtercaps1, *filtercaps2, *outfiltercaps;
    GstElement *filter1, *filter2, *outfilter;
    GstElement *videoscale1, *videoscale2;
    GstElement *audioconvert1, *audioconvert2, *audioconvert, *voaacenc;
    GstElement *audiomixer;
    GstElement *audioresample1, *audioresample2;
    GSource *timeout_source;
   
    GST_DEBUG ("Creating pipeline");
   
    /* Create our own GLib Main Context and make it the default one */
    context = g_main_context_new ();
    g_main_context_push_thread_default(context);
   
    /**********************************************/
   
    /* Build pipeline */
    pipeline = gst_pipeline_new("pipelinecomposer");
    source1 = gst_element_factory_make("filesrc", "source1");
    source2 = gst_element_factory_make("filesrc", "source2");
    videomixer = gst_element_factory_make("videomixer", "videomixer");
    clrspace1 = gst_element_factory_make("videoconvert", "clrspace1");
    clrspace2 = gst_element_factory_make("videoconvert", "clrspace2");
    clrspace = gst_element_factory_make("videoconvert", "clrspace");
    sink = gst_element_factory_make("filesink", "sink");
    h264enc = gst_element_factory_make("vtenc_h264", "h264enc");
    mp4mux = gst_element_factory_make("mp4mux", "mpegtsmux");
    decodebin1 = gst_element_factory_make("decodebin", "decodebin1");
    decodebin2 = gst_element_factory_make("decodebin", "decodebin2");
    filter1 = gst_element_factory_make("capsfilter", "filter1");
    filter2 = gst_element_factory_make("capsfilter", "filter2");
    outfilter = gst_element_factory_make("capsfilter", "outfilter");
    videoscale1 = gst_element_factory_make("videoscale", "videoscale1");
    videoscale2 = gst_element_factory_make("videoscale", "videoscale2");
    audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
    audioconvert1 = gst_element_factory_make("audioconvert", "audioconvert1");
    audioconvert2 = gst_element_factory_make("audioconvert", "audioconvert2");
    voaacenc = gst_element_factory_make("voaacenc", "avenc_ac3_1");
    audiomixer = gst_element_factory_make("audiomixer", "audiomixer1");
    audioresample1 = gst_element_factory_make("audioresample", "audioresample1");
    audioresample2 = gst_element_factory_make("audioresample", "audioresample2");
   
    g_object_set (G_OBJECT (voaacenc), "bitrate", targetAudioBitrate, nil);
    g_object_set(G_OBJECT (h264enc), "bitrate", targetVideoBitrate / 1000, nil);
   
    g_object_set (G_OBJECT (source1), "location", [[NSString stringWithFormat:@"%@%@/%@", documentPath, @"tempForVideoCompose", videoFilePath1] UTF8String], NULL);
    g_object_set (G_OBJECT (source2), "location", [[NSString stringWithFormat:@"%@%@/%@", documentPath, @"tempForVideoCompose", videoFilePath2] UTF8String], NULL);
   
    g_object_set (G_OBJECT (sink), "location", [[NSString stringWithFormat:@"%@%@%@", documentPath, sessionId, @".mp4"] UTF8String], NULL);
   
    filtercaps1 = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, (NSInteger)videoRect1.size.width, "height", G_TYPE_INT, (NSInteger)videoRect1.size.height, nil);
    filtercaps2 = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, (NSInteger)videoRect2.size.width, "height", G_TYPE_INT, (NSInteger)videoRect2.size.height, nil);
    outfiltercaps = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, (NSInteger)composedWidth, "height", G_TYPE_INT, (NSInteger)composedHeight, nil);
   
    g_object_set(G_OBJECT (filter1), "caps", filtercaps1, nil);
    gst_caps_unref(filtercaps1);
   
    g_object_set(G_OBJECT (filter2), "caps", filtercaps2, nil);
    gst_caps_unref(filtercaps2);
   
    g_object_set(G_OBJECT (outfilter), "caps", outfiltercaps, nil);
    gst_caps_unref(outfiltercaps);
   
    g_signal_connect(decodebin1, "pad-added", G_CALLBACK(on_pad_added_video1), clrspace1);
    g_signal_connect(decodebin1, "pad-added", G_CALLBACK(on_pad_added_audio1), audioconvert1);
    g_signal_connect(decodebin2, "pad-added", G_CALLBACK(on_pad_added_audio2), audioconvert2);
    g_signal_connect(decodebin2, "pad-added", G_CALLBACK(on_pad_added_video2), clrspace2);
   
   
    GstPad *sinkPad, *srcVideoPad;
   
    srcVideoPad = gst_element_get_static_pad(filter1, "src");
    sinkPad = gst_element_get_request_pad(videomixer, "sink_0");
    g_object_set(sinkPad, "xpos", (NSInteger)videoRect1.origin.x, "ypos", (NSInteger)videoRect1.origin.y, nil);
    gst_pad_link(srcVideoPad, sinkPad);
    gst_object_unref(sinkPad);
    gst_object_unref(srcVideoPad);
   
    srcVideoPad = gst_element_get_static_pad(filter2, "src");
    sinkPad = gst_element_get_request_pad(videomixer, "sink_1");
    g_object_set(sinkPad, "xpos", (NSInteger)videoRect2.origin.x, "ypos", (NSInteger)videoRect2.origin.y, nil);
    gst_pad_link(srcVideoPad, sinkPad);
    gst_object_unref(sinkPad);
    gst_object_unref(srcVideoPad);
   
    gst_bin_add_many(GST_BIN (pipeline), source1, decodebin1, clrspace1, videomixer, clrspace, h264enc, mp4mux, sink,
                     source2, decodebin2, clrspace2, filter1, filter2, videoscale1, videoscale2, audioconvert1, audiomixer, voaacenc, outfilter, audioconvert2, audioconvert, audioresample1, audioresample2, nil);
   
   
    gst_element_link(source1, decodebin1);
    gst_element_link(source2, decodebin2);
   
    gst_element_link_many(clrspace1, videoscale1, filter1, videomixer, nil);
    gst_element_link_many(clrspace2, videoscale2, filter2, videomixer, nil);
    gst_element_link_many(videomixer, outfilter, clrspace, h264enc, nil);
   
    if(!gst_element_link_many(audioconvert1, audioresample1, nil))
    {
        g_print("error");
    }
   
    if(!gst_element_link_many(audioconvert2, audioresample2, nil))
    {
        g_print("error");
    }
   
    if(!gst_element_link_many(audiomixer, audioconvert, voaacenc, nil))
    {
        g_print("error");
    }
   
    GstPad *audioSinkPad1, *audioSinkPad2, *srcAudioPad1, *srcAudioPad2;
    audioSinkPad1 = gst_element_get_request_pad(audiomixer, "sink_0");
    srcAudioPad1 = gst_element_get_static_pad(audioresample1, "src");
   
    g_object_set(audioSinkPad, "mute", FALSE, nil);
    g_object_set(audioSinkPad, "volume", 10.0, nil);
   
    GstPadLinkReturn ret = gst_pad_link(srcAudioPad1, audioSinkPad1);
    gst_object_unref(audioSinkPad1);
    gst_object_unref(srcAudioPad1);
   
    audioSinkPad2 = gst_element_get_request_pad(audiomixer, "sink_1");
    srcAudioPad2 = gst_element_get_static_pad(audioresample2, "src");
   
    g_object_set(audioSinkPad, "mute", FALSE, nil);
    g_object_set(audioSinkPad, "volume", 10.0, nil);
   
    gst_pad_link(srcAudioPad2, audioSinkPad2);
    gst_object_unref(audioSinkPad2);
    gst_object_unref(srcAudioPad2);
   
    GstPad *muxSrc, *muxTarget;
    muxSrc = gst_element_get_static_pad(h264enc, "src");
    muxTarget = gst_element_get_request_pad(mp4mux, "video_1");
    GstPadLinkReturn ret1 = gst_pad_link(muxSrc, muxTarget);
    gst_object_unref(muxTarget);
    gst_object_unref(muxSrc);
   
   
   
    muxSrc = gst_element_get_static_pad(voaacenc, "src");
    muxTarget = gst_element_get_request_pad(mp4mux, "audio_1");
    ret1 = gst_pad_link(muxSrc, muxTarget);
    gst_object_unref(muxTarget);
    gst_object_unref(muxSrc);
   
   
    gst_element_link(mp4mux, sink);
   
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
   

    /********************/
   
    bus = gst_element_get_bus (pipeline);
    bus_source = gst_bus_create_watch (bus);
    g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
    g_source_attach (bus_source, context);
    g_source_unref (bus_source);
    g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, (__bridge void *)self);
    g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, (__bridge void *)self);
    g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, (__bridge void *)self);
    g_signal_connect (G_OBJECT (bus), "message::duration", (GCallback)duration_cb, (__bridge void *)self);
    gst_object_unref (bus);
   
    /* Register a function that GLib will call 4 times per second */
    timeout_source = g_timeout_source_new (250);
    g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, (__bridge void *)self, NULL);
    g_source_attach (timeout_source, context);
    g_source_unref (timeout_source);
   
    /* Create a GLib Main Loop and set it to run */
    GST_DEBUG ("Entering main loop...");
    main_loop = g_main_loop_new (context, FALSE);
    g_main_loop_run (main_loop);
    GST_DEBUG ("Exited main loop");
    g_main_loop_unref (main_loop);
    main_loop = NULL;
   
    /* Free resources */
    g_main_context_pop_thread_default(context);
    g_main_context_unref (context);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);


***********************************************************

I checked all return values of pad_link, element_link and so on.
All things are normal, nothing wrong.

Hope anyone explain me with audiomixer.

Thanks
Reply | Threaded
Open this post in threaded view
|

Re: Audiomixer doesn't work

Nicolas Dufresne-4
Le jeudi 28 avril 2016 à 06:25 -0700, Johan Basore a écrit :
> I am working with audiomixer and have strange thing.
> If I use audiomixer it doesn't play, just stay as ready state. But If
> I
> replace audiomixer with adder, it plays.
> Does audiomixer need special configurations?

Audiomixer will handle synchronization between input, while adder will
completely ignore that and mix on a first come first serve basis.

Please describe your sources, that may help figure-out what is that
problem.

cheers,
Nicolas
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

signature.asc (188 bytes) Download Attachment