GStreamer内存泄漏问题

GStreamer memory leak issue

本文关键字:问题 泄漏 内存 GStreamer      更新时间:2023-10-16

我有一个用GStreamer生成视频的对象。每次我想生成一个新的视频y创建一个新对象并添加帧。视频完成后,我删除了对象,但GStreamer内部内存看起来并没有发布。

在生成了几个视频之后,所有的RAM都被分配了,linux终止了这个进程。

为什么会发生这种情况?我该如何解决这个问题?还有别的办法吗?

GVideo.h:

#ifndef GVIDEO_H
#define GVIDEO_H
#include <gst/gst.h>
#include <string>
class GVideo
{
public:
    GVideo();
    ~GVideo();
    void startVideo(std::string filename);
    void endVideo();
    void addFrame(GstSample* element);
    bool isRecording(){return _isRecording;}
    bool isDataNeed(){return _dataNeed;}
private:
    void setDataNeed(bool dataNeed){_dataNeed = dataNeed;}
protected:
    bool _isRecording;
    bool _dataNeed;
    int _frameRate;
    int _duration;
    GstClockTime _timestamp;
    GstElement *_pipeline;
    GstElement *_source;
};
#endif //GVIDEO_H

GVideo.cpp

#include "GVideo.h"
#include <gst/app/gstappsrc.h>
#include <iostream>
static gboolean bus_video_call(GstBus* bus, GstMessage* msg, void* user_data)
{
    //std::cout << "BUS_CALL" << std::endl;
    GVideo* video = (GVideo*)user_data;
    switch (GST_MESSAGE_TYPE(msg))
    {
        case GST_MESSAGE_EOS:
            {
                std::cout << "VIDEO GST_MESSAGE_EOS" << std::endl;
                video->endVideo();
                break;
            }
        case GST_MESSAGE_ERROR:
            {
                std::cout << "GST_MESSAGE_ERROR" << std::endl;
                GError *err;
                gst_message_parse_error(msg, &err, NULL);
                g_error("%s", err->message);
                g_error_free(err);
                video->endVideo();
                break;
            }
        default:
            break;
    }
    return true;
}
GVideo::GVideo()
    : _dataNeed(false), _isRecording(false)
{
    _pipeline     = NULL;
    _source       = NULL;
}
GVideo::~GVideo()
{
    std::cout << "Deleting GstVideo." << std::endl;
    if(_pipeline != NULL)
        endVideo();
}
void GVideo::startVideo(std::string filename)
{
    _isRecording = true;
    _frameRate = 2;
    _duration = 5;
    _timestamp = 0;
    _dataNeed = true;
    _pipeline     = gst_pipeline_new ("video_pipeline");
    _source       = gst_element_factory_make ("appsrc"      , "video_source");
    GstElement* _videorate    = gst_element_factory_make ("videorate"   , "video_vidrate");
    GstElement* _capsfilter   = gst_element_factory_make ("capsfilter"  , "video_capsfilter");
    GstElement* _videoconvert = gst_element_factory_make ("videoconvert", "video_conv");
    GstElement* _encoder      = gst_element_factory_make ("x264enc"     , "video_enc");
    GstElement* _muxer        = gst_element_factory_make ("mp4mux"      , "video_mux");
    GstElement* _filesink     = gst_element_factory_make ("filesink"    , "video_filesink");
//  g_object_set (G_OBJECT (_source), "num-buffers", _duration*_frameRate, NULL);
    g_object_set (G_OBJECT (_source), "caps",
                  gst_caps_new_simple ( "video/x-raw",
                                        "format", G_TYPE_STRING, "I420",
                                        "width", G_TYPE_INT, 352,
                                        "height", G_TYPE_INT, 288,
                                        "framerate", GST_TYPE_FRACTION, _frameRate, 1,
                                        NULL), NULL);
    /* setup appsrc */
    g_object_set (G_OBJECT (_source),
    //              "do-timestamp", TRUE,
                    "stream-type", GST_APP_STREAM_TYPE_STREAM,
                    "format", GST_FORMAT_TIME, NULL);
    g_object_set (G_OBJECT (_capsfilter), "caps",
                    gst_caps_new_simple ("video/x-raw",
//                                       "format", G_TYPE_STRING, "I420",
                                         "framerate", GST_TYPE_FRACTION, 30, 1,
                                         NULL), NULL);
    gst_bin_add_many (GST_BIN (_pipeline), _source, _videorate, _capsfilter, _videoconvert, _encoder, _muxer, _filesink, NULL);
    gst_element_link_many (_source, _videorate, _capsfilter, _videoconvert, _encoder, _muxer, _filesink, NULL);
    g_object_set (G_OBJECT (_filesink), "location", filename.c_str(), NULL);
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
    gst_bus_add_watch(bus, bus_video_call, this);
    gst_object_unref(bus);
    gst_element_set_state (_pipeline, GST_STATE_PLAYING);
}
void GVideo::addFrame(GstSample* element)
{
    GstBuffer* buf = gst_sample_get_buffer((GstSample*)element);
    GST_BUFFER_PTS (buf) = _timestamp;
    GST_BUFFER_DURATION (buf) = gst_util_uint64_scale_int (1, GST_SECOND, _frameRate);
    _timestamp += GST_BUFFER_DURATION (buf);
    gst_app_src_push_sample(GST_APP_SRC(_source), element);
    if(_timestamp >= _duration*GST_SECOND)
    {
        _dataNeed = false;
        gst_app_src_end_of_stream(GST_APP_SRC(_source));
    }
}
void GVideo::endVideo()
{
    std::cout << "gst_element_set_state." << std::endl;
    gst_element_set_state (_pipeline, GST_STATE_NULL);
    std::cout << "gst_object_unref." << std::endl;
    gst_object_unref(_pipeline);
    std::cout << "_pipeline= NULL." << std::endl;
    _pipeline = NULL;
    std::cout << "setDataNeed." << std::endl;
    _isRecording = false;
}

我想,无论调用addFrame((的是什么,都可能需要取消采样。

https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-appsrc.html#gst-app src推送样本

在那里的文档中,它在sample参数上指示"transfer:none",我认为这意味着调用者需要取消引用。由于某种原因,较旧的方法gst_app_src_push_buffer的传输为"full"。不知道为什么。

https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-appsrc.html#gst-app src推送缓冲区