gstreamer + gst-rtsp-server的问题

时间:2018-07-27 16:29:16

标签: gstreamer rtsp

我遇到了gstreamer和gst-rtsp-server的问题。

我收到的是h264帧的原始流,我需要通过RTSP向多个客户端提供该流。 目前,它似乎只能在一个客户端上使用,但是当另一个客户端连接时,它通常会收到无效数据。

我认为我遇到的问题之一是,我为所有RTSP客户端使用了共享的gst_rtsp_media_factory和共享的GstTimestamp。

我的原始h264帧保存在一个小的环形缓冲区中,我不想为每个连接的客户端都拥有此缓冲区的副本。

我的方法可能是错误的。看了几个gst-rtsp + appsrc示例后,我看到其中一些使用appsink使用不同的管道。 https://gist.github.com/Mezzano/43789624d4983d70bcd9610ecaddbb30 https://cgit.freedesktop.org/gstreamer/gst-rtsp-server/plain/examples/test-appsrc2.c

我缺少什么?我是否需要一个单独的管道在appsink中播放我的流,并使用此appsink在“ need-data”回调中提供缓冲区?

代码在这里:

#include "VideoStreamer.h"
#include "Utils.h"

VideoStreamer::VideoStreamer() :
sourceId(0), clientsConnected(false), writeConfigFrame(true), appSourceConfigured(false), gstTimestamp(0),
lastConfigFrame(nullptr), appSource(nullptr), loopThread(nullptr), queue(30)
{
    this->init();
    this->start();
}

VideoStreamer::~VideoStreamer()
{
    PRINT_LOG(DEBUG, "VideoStreamer", "Destroying videoStreamer object");

    if (sourceId != 0)
        g_source_remove(sourceId);

    if (appSource != nullptr)
        g_object_unref(appSource);

    g_object_unref(rtspServer);
    g_main_loop_unref(gstLoop);

    gst_deinit();
    loopThread->join();
    PRINT_LOG(DEBUG, "VideoStreamer", "VideoStreamer object destroyed!");
}

void VideoStreamer::init()
{
    GError *error;
    GstRTSPMediaFactory *factory;
    GstRTSPMountPoints *mounts;

    if (gst_init_check(NULL, NULL, &error) != TRUE) {
        PRINT_LOG(ERROR, "VideoStreamer", "Error while initializing gstreamer: %s", error->message);
        return;
    }

    this->rtspServer = gst_rtsp_server_new();
    if (!this->rtspServer) {
        PRINT_LOG(ERROR, "VideoStreamer", "Error while creating gstreamer rtsp server");
        return;
    }

    mounts = gst_rtsp_server_get_mount_points(this->rtspServer);
    if (!this->rtspServer) {
        PRINT_LOG(ERROR, "VideoStreamer", "Error while getting rtsp server mount points");
        return;
    }

    factory = gst_rtsp_media_factory_new();
    if (!this->rtspServer) {
        PRINT_LOG(ERROR, "VideoStreamer", "Error while creating rtsp media factory");
        return;
    }

    gst_rtsp_mount_points_add_factory(mounts, "/live", factory);
    g_object_unref(mounts);

    gst_rtsp_media_factory_set_shared(factory, TRUE);
    gst_rtsp_media_factory_set_launch(factory, "( appsrc name=mysrc is-live=true ! h264parse ! rtph264pay name=pay0 pt=96 )");
    g_signal_connect(factory, "media-configure", G_CALLBACK(&VideoStreamer::onMediaConfigure), this);

    this->rtspSessions = gst_rtsp_server_get_session_pool(this->rtspServer);
    g_signal_connect(this->rtspServer, "client-connected", G_CALLBACK(&VideoStreamer::onClientConnected), this);
    g_signal_connect(this->rtspSessions, "session-removed", G_CALLBACK(&VideoStreamer::onSessionRemoved), this);

    this->gstLoop = g_main_loop_new(NULL, FALSE);
    gst_rtsp_server_attach(this->rtspServer, NULL);
}

void VideoStreamer::start()
{
    this->loopThread = std::make_shared<std::thread>(&VideoStreamer::startGstLoop, this);
}

void VideoStreamer::stop()
{
    if (g_main_loop_is_running(this->gstLoop)) {
        PRINT_LOG(DEBUG, "VideoStreamer", "Stopping gst loop (thread %x)", std::this_thread::get_id());
        g_main_loop_quit(this->gstLoop);
        this->gstTimestamp = 0;
    } else {
        PRINT_LOG(ERROR, "VideoStreamer", "cannot stop gst loop: it is not running");
    }
}

void VideoStreamer::startGstLoop()
{
    PRINT_LOG(INFO, "VideoStreamer", "Starting gst loop (thread %x)", std::this_thread::get_id());
    g_main_loop_run(this->gstLoop);
    PRINT_LOG(INFO, "VideoStreamer", "Gst loop stopped");
}

gboolean VideoStreamer::prepareBuffer(VideoStreamer *ctx)
{
    GstBuffer *buffer = NULL;
    GstFlowReturn ret;
    size_t offset = 0;
    bool configFrameWritten = false;

    if (ctx->writeConfigFrame == true && ctx->lastConfigFrame != nullptr) {
        PRINT_LOG(INFO, "VideoStreamer", "Writing config frame");
        buffer = gst_buffer_new_allocate(NULL, (ctx->lastConfigFrame->spsSize + ctx->lastConfigFrame->ppsSize) * sizeof(uint8_t), NULL);
        gst_buffer_fill(buffer, offset, ctx->lastConfigFrame->spsData.data(), ctx->lastConfigFrame->spsSize * sizeof(uint8_t));
        offset += ctx->lastConfigFrame->spsSize * sizeof(uint8_t);
        gst_buffer_fill(buffer, offset, ctx->lastConfigFrame->ppsData.data(), ctx->lastConfigFrame->ppsSize * sizeof(uint8_t));
        configFrameWritten = true;
    } else {
        auto frame = ctx->queue.pop();

        if (frame.get() != nullptr) {
            buffer = gst_buffer_new_allocate(NULL, frame->size * sizeof(uint8_t), NULL);
            gst_buffer_fill(buffer, offset, frame->data.data(), frame->size * sizeof(uint8_t));
            GST_BUFFER_PTS(buffer) = ctx->gstTimestamp;
            GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 30);
            ctx->gstTimestamp += GST_BUFFER_DURATION(buffer);
        }
    }

    if (buffer != NULL) {
        ret = gst_app_src_push_buffer(GST_APP_SRC(ctx->appSource), buffer);

        if (ret == GST_FLOW_OK && configFrameWritten) {
            ctx->writeConfigFrame = false;
        }
    }

    return TRUE;
}

void VideoStreamer::needData(GstElement *appSrc, guint size, VideoStreamer *ctx)
{
    if (ctx->sourceId == 0) {
        ctx->sourceId = g_idle_add((GSourceFunc)&VideoStreamer::prepareBuffer, ctx);
    }
}

void VideoStreamer::enoughtData(GstElement *appSrc, VideoStreamer *ctx)
{
    if (ctx->sourceId != 0) {
        g_source_remove(ctx->sourceId);
        ctx->sourceId = 0;
    }
}

void VideoStreamer::onDecoderConfigReceived(Stream_Codec_t codec, void *customData)
{
    PRINT_LOG(INFO, "VideoStreamer", "Decoder config received on thread %x", std::this_thread::get_id());

    if (codec.type == STREAM_CODEC_TYPE_H264) {
        VideoStreamer *ctx = static_cast<VideoStreamer*>(customData);

        if (ctx == NULL)
            return;

        auto frame = std::make_shared<SpsPpsFrame>();

        frame->spsSize = codec.parameters.h264parameters.spsSize;
        frame->spsData.reserve(frame->spsSize);
        memcpy(frame->spsData.data(), codec.parameters.h264parameters.spsBuffer, frame->spsSize * sizeof(uint8_t));

        frame->ppsSize = codec.parameters.h264parameters.ppsSize;
        frame->ppsData.reserve(frame->ppsSize);
        memcpy(frame->ppsData.data(), codec.parameters.h264parameters.ppsBuffer, frame->ppsSize * sizeof(uint8_t));

        ctx->lastConfigFrame = frame;
    }
}

void VideoStreamer::onFrameReceived(Frame_t *receivedFrame, void *customData)
{
    if (receivedFrame == NULL) {
        PRINT_LOG(INFO, "VideoStreamer", "Frame is NULL");
    } else {
        VideoStreamer *ctx = static_cast<VideoStreamer*>(customData);

        if (ctx == NULL)
            return;

        if (ctx->isStarted()) {
            auto frame = std::make_shared<H264Frame>();
            frame->size = receivedFrame->used;
            frame->data.reserve(frame->size);
            memcpy(frame->data.data(), receivedFrame->data, frame->size * sizeof(uint8_t));

            if (receivedFrame->isIFrame == 1)
                ctx->writeConfigFrame = true;

            if (!ctx->queue.push(frame))
                PRINT_LOG(INFO, "VideoStreamer", "Cannot push frame, buffer is full");
        }
    }
}

void VideoStreamer::onMediaConfigure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, VideoStreamer *ctx)
{
    if (!ctx->isAppSourceConfigured()) {
        PRINT_LOG(INFO, "VideoStreamer", "Configuring appSrc and connecting signals...");
        GstElement *element;

        element = gst_rtsp_media_get_element(media);
        ctx->appSource = gst_bin_get_by_name_recurse_up(GST_BIN(element), "mysrc");
        ctx->gstTimestamp = 0;

        if (!ctx->appSource) {
            PRINT_LOG(ERROR, "VideoStreamer", "Error while getting appsrc element");
            return;
        }

        gst_app_src_set_stream_type(GST_APP_SRC(ctx->appSource), GST_APP_STREAM_TYPE_STREAM);
        gst_rtsp_media_set_reusable(media, TRUE);

        g_signal_connect(ctx->appSource, "need-data", G_CALLBACK(&VideoStreamer::needData), ctx);
        g_signal_connect(ctx->appSource, "enough-data", G_CALLBACK(&VideoStreamer::enoughtData), ctx);

        gst_object_unref(element);
        ctx->setAppSourceConfigured(true);
    } else {
        PRINT_LOG(INFO, "VideoStreamer", "appSrc already configured");
    }
}

void VideoStreamer::onClientConnected(GstRTSPServer *server, GstRTSPClient *client, VideoStreamer *ctx)
{
    PRINT_LOG(INFO, "VideoStreamer", "New client on RTSP server");

    if (server == ctx->rtspServer) {
        auto nbClients = gst_rtsp_session_pool_get_n_sessions(ctx->rtspSessions);

        PRINT_LOG(INFO, "VideoStreamer", "%d clients connected", nbClients + 1);
        ctx->writeConfigFrame = true;

        g_signal_connect(client, "new-session", G_CALLBACK(&VideoStreamer::onNewRtspSession), ctx);
    }
}

void VideoStreamer::onNewRtspSession(GstRTSPClient *client, GstRTSPSession *session, VideoStreamer *ctx)
{
    PRINT_LOG(INFO, "VideoStreamer", "NEW SESSION (timeout = %d)", gst_rtsp_session_get_timeout(session));
}

void VideoStreamer::onSessionRemoved(GstRTSPSessionPool *sessions, GstRTSPClient *client, VideoStreamer *ctx)
{
    PRINT_LOG(INFO, "VideoStreamer", "RTSP session removed");
}

头文件:

#ifndef VIDEOSTREAMER_H
#define VIDEOSTREAMER_H

extern "C" {
    #include <gst/gst.h>
    #include <gst/app/gstappsrc.h>
    #include <gst/rtsp-server/rtsp-server.h>
}

#include <memory>
#include <thread>
#include <atomic>
#include "RingBuffer.hpp"

class SpsPpsFrame
{
    public:
        int spsSize;
        int ppsSize;
        std::vector<uint8_t> spsData;
        std::vector<uint8_t> ppsData;
};

class H264Frame
{
    public:
        uint32_t size;
        bool isIFrame;
        std::vector<uint8_t> data;
};

class VideoStreamer
{
    public:
        VideoStreamer();
        ~VideoStreamer();

        void start();
        void stop();

        static eARCONTROLLER_ERROR onDecoderConfigReceived(Stream_Codec_t codec, void *customData);
        static eARCONTROLLER_ERROR onFrameReceived(Frame_t *frame, void *customData);

        void setAppSourceConfigured(bool configured) {this->appSourceConfigured = configured;}
        bool isAppSourceConfigured() {return this->appSourceConfigured;}
        bool isStarted() {return this->loopThread != nullptr;}

        static gboolean prepareBuffer(VideoStreamer *ctx);
        static void needData(GstElement * pipeline, guint size, VideoStreamer *ctx);
        static void enoughtData(GstElement * pipeline, VideoStreamer *ctx);

        static void onMediaConfigure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, VideoStreamer *ctx);
        static void onClientConnected(GstRTSPServer *server, GstRTSPClient *client, VideoStreamer *ctx);
        static void onSessionRemoved(GstRTSPSessionPool *sessions, GstRTSPClient *client, VideoStreamer *ctx);
        static void onNewRtspSession(GstRTSPClient *client, GstRTSPSession *session, VideoStreamer *ctx);

        GstClockTime gstTimestamp;
        RingBuffer<std::shared_ptr<H264Frame>> queue;
        std::shared_ptr<SpsPpsFrame> lastConfigFrame;
        GMainLoop *gstLoop;
        GstElement *appSource;
        guint sourceId;
        std::atomic_bool clientsConnected;
        std::atomic_bool writeConfigFrame;

    private:
        void init();
        void startGstLoop();

        GstRTSPServer *rtspServer;
        GstRTSPSessionPool *rtspSessions;
        std::shared_ptr<std::thread> loopThread;
        // GstRTSPMedia *sharedMedia;
        bool appSourceConfigured;
};

#endif // VIDEOSTREAMER_H

0 个答案:

没有答案