大家好,我用 am5728 evm的编码的时候,发生了内存泄漏,之前没有使用 g_signal_connect 也是会发生内存泄露,
这个问题困扰了我很久,希望大家给看一下。内存泄露需要使用 top 命令 观察大概 5-10分钟就可以看到结果
开发包是 ti-processor-sdk-linux-am57xx-evm-03.03.00.04 系统使用的ubuntu14.04。
/* * * * gst-launch-1.0 -v -e v4l2src device=/dev/video1 io-mode=2 ! * 'video/x-raw, format=(string)NV12, width=(int)640, height=(int)480, framerate=(fraction)30/1,bitrate=(int)30' ! * ducatih264enc ! h264parse ! ducatih264dec ! waylandsink sync=false * * */ #include <linux/videodev2.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <unistd.h> #include <stdio.h> #include <stdint.h> #include <errno.h> #include <fcntl.h> #include <stdlib.h> #include <string.h> #include <gst/gst.h> #define waylandsink #define v4l2src typedef struct _App App; struct _App {GstElement *pipeline;GstElement *appsrc;GstElement *encode;GstElement *parse;GstElement *sink;GstElement *decode;GstBus *bus;GstMessage *msg;GMainLoop *loop;guint bus_watch_id; }; App s_app; int ret, idx, fd; #define NBUF 3 #define FRAME_SIZE 152064 //352*288*1.5 int width = 352, height = 288; void *buffer_addr[NBUF]; int size[NBUF]; static void feed_data (GstElement * appsrc, guint size, App * app) { //while(1) //{ //fd_set fds; //struct timeval tv; //int r=0; //FD_ZERO(&fds); //FD_SET(fd,&fds); //tv.tv_sec=0; //tv.tv_usec=1000*100; //r=select(fd+1,&fds,NULL,NULL,&tv); //if(-1==r) //{ //printf("1111\r\n"); //continue; //} //if(0==r) //{ //printf("2222\r\n"); //return FALSE; //}printf("feed-data....\n");struct v4l2_buffer buf;/* Dequeue one buffer */memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if(-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {perror("Queue Buffer");return FALSE;}idx = buf.index;GstBuffer *buffer;GstFlowReturn ret;buffer = gst_buffer_new_allocate(NULL,FRAME_SIZE,NULL);GstMapInfo info;gst_buffer_map(buffer,&info,GST_MAP_WRITE);unsigned char * buff = info.data; //if((void*)buf.m.userptr!=0) //{ //memcpy(buff,buf.m.userptr,FRAME_SIZE); //}memcpy(buff,buffer_addr[idx],FRAME_SIZE);gst_buffer_unmap(buffer,&info);g_signal_emit_by_name(app->appsrc,"push-buffer",buffer,&ret);printf("ret:%d\n",ret);if (ret != GST_FLOW_OK) {/* some error, stop sending data */printf("push error...\n");return FALSE;}gst_buffer_unref(buffer);memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = idx;if(-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {perror("Queue Buffer");return FALSE;}return TRUE; //} } int xioctl(int fd, int request, void *arg) {int r;do r = ioctl (fd, request, arg);while (-1 == r && EINTR == errno);return r; } int init_device(int fd) {unsigned int i;struct v4l2_capability caps;struct v4l2_format fmt;struct v4l2_requestbuffers req;struct v4l2_buffer buf;/* Check for capture device */memset(&caps, 0, sizeof(caps));if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {perror("Setting Pixel Format");return 1;}printf("Driver: %s\ncaps: %8x", caps.driver, caps.capabilities);if (~caps.capabilities & V4L2_CAP_VIDEO_CAPTURE) {printf("Not a capture device");return 1;}/* Set capture format to UYVY */memset(&fmt, 0, sizeof(fmt));fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;fmt.fmt.pix.width = width;fmt.fmt.pix.height = height;fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;// fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;fmt.fmt.pix.field = V4L2_FIELD_NONE;if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) {perror("Setting Pixel Format");return 1;}printf("Selected Camera Mode:\n" " Width: %d\n" " Height: %d\n" " Field: %d",fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.field);printf(" PixFmt = %c%c%c%c\n",fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF,(fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) &0xFF);/* Currently driver supports only mmap buffers* Request memory mapped buffers */memset(&req, 0, sizeof(req));req.count = NBUF;req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory = V4L2_MEMORY_MMAP;if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) {perror("Requesting Buffer");return 1;}printf("Total buffer num %d\n", req.count);for (i = 0; i < req.count; i++) {memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf)) {perror("Querying Buffer");return 1;}/* Memory map all the buffers and save the addresses */buffer_addr[i] = mmap(NULL, buf.length, PROT_READ | PROT_WRITE,MAP_SHARED, fd, buf.m.offset);//buffer_addr[i]=(void*)malloc(FRAME_SIZE);size[i] = buf.length;printf("Address %p, size %d, image size: %d \n", buffer_addr[i], buf.length, buf.bytesused);/* Queue the buffer for capture */memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {perror("Queue Buffer");return 1;}printf("12345\r\n");}if (-1 == xioctl(fd, VIDIOC_STREAMON, &buf.type)) {perror("Start Capture");return 1;}return 0; } void release_device(int fd) {int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;xioctl(fd, VIDIOC_STREAMOFF, &type);close(fd); } gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {GMainLoop *loop = (GMainLoop *) data;switch (GST_MESSAGE_TYPE (msg)){case GST_MESSAGE_EOS:fprintf(stderr, "End of stream\n");g_main_loop_quit(loop);break;case GST_MESSAGE_ERROR:{gchar *debug;GError *error;gst_message_parse_error(msg, &error, &debug);g_free(debug);g_printerr("Error: %s\n", error->message);g_error_free(error);g_main_loop_quit(loop);break;}default:break;}return TRUE; } int main(int argc, char **argv) {App *app = &s_app;printf("==========\n");char devnode[100] = "/dev/video1";fd = open(devnode, O_RDWR);if (fd == -1) {perror("Opening video device");return 1;}ret = init_device(fd);if (0 != ret) {printf("Exiting");return ret;}gst_init(NULL,NULL);app->pipeline = gst_pipeline_new("encode");g_assert(app->pipeline);app->appsrc = gst_element_factory_make("appsrc","srcElement");g_assert(app->appsrc);app->encode = gst_element_factory_make("ducatih264enc","encodeElement");g_assert(app->encode);app->parse = gst_element_factory_make("h264parse","parseElement");g_assert(app->parse);app->decode = gst_element_factory_make("ducatih264dec","decodeElement");g_assert(app->decode); #ifdef waylandsinkapp->sink = gst_element_factory_make("waylandsink","sinkElement"); #elseapp->sink = gst_element_factory_make("appsink","sinkElement"); #endifg_assert(app->sink);printf("element creat success\n");GstCaps *capsappsrc2H264enc;capsappsrc2H264enc = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING, "NV12","width", G_TYPE_INT, 352,"height",G_TYPE_INT, 288,"framerate", GST_TYPE_FRACTION, 30, 1,NULL);g_object_set(G_OBJECT (app->sink), "sync", FALSE,NULL);app->loop = g_main_loop_new(NULL,FALSE);app->bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));app->bus_watch_id = gst_bus_add_watch(app->bus,bus_call,app->loop);gst_object_unref(app->bus);gst_bin_add_many(GST_BIN(app->pipeline), app->appsrc,app->encode, app->parse,app->decode,app->sink, NULL);gboolean bLinkOk=gst_element_link_filtered(app->appsrc, app->encode, capsappsrc2H264enc);if(!bLinkOk){g_warning("Failed to link src encode \n");return -5;}bLinkOk= gst_element_link_many(app->encode, app->parse,app->decode,app->sink,NULL);if(!bLinkOk){g_warning("Failed to link many 1\n");return -5;}g_signal_connect(app->appsrc, "need-data", G_CALLBACK(feed_data), app);gst_element_set_state (app->pipeline, GST_STATE_PLAYING);printf("run....1\n");g_main_loop_run(app->loop);printf("run....2\n");app->msg = gst_bus_timed_pop_filtered (app->bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);if (app->msg != NULL)gst_message_unref (app->msg);gst_object_unref (app->bus);gst_element_set_state (app->pipeline, GST_STATE_NULL);gst_object_unref (app->pipeline);gst_object_unref(app->appsrc);gst_object_unref(app->encode);gst_object_unref(app->parse);gst_object_unref(app->decode);gst_object_unref(app->sink);printf("close...\n");return 0; }
编译命令: arm-linux-gnueabihf-gcc app-camera.c -o app `pkg-config --cflags --libs gstreamer-1.0 gstreamer-base-1.0 gstreamer-app-1.0`
yongqing wang:
改这个可是苦力活,自己弄吧,逐行注释掉看内存变化,我只会这招
abayyy:
回复 yongqing wang:
没有源码,也不知道是哪个插件的问题,如果使用 appsrc ducatih264enc appsink 也是有内存泄露,可是也没有 ducatih264enc 的源码
yongqing wang:
回复 abayyy:
能定位调用哪个接口出现的内存泄漏吗
abayyy:
回复 yongqing wang:
没有定位到,如果不是appsrc 的话应该就是 编码的插件的问题
abayyy:
回复 yongqing wang:
你好,我将 ducatih264enc 去掉后就没有了内存泄露,下面是我的代码
appsrc-> filesink 无内存泄露
appsrc -> ducatih264enc -> filesink 有内存泄露
/*** 编译命令 arm-linux-gnueabihf-gcc app-camera.c -o app `pkg-config --cflags --libs gstreamer-1.0 gstreamer-base-1.0 gstreamer-app-1.0`* */ #include <linux/videodev2.h> #include <sys/ioctl.h> #include <sys/mman.h> #include <unistd.h> #include <stdio.h> #include <stdint.h> #include <errno.h> #include <fcntl.h> #include <stdlib.h> #include <string.h>#include <gst/gst.h> #include <gst/app/gstappsrc.h>#define waylandsink #define v4l2srctypedef struct _App App; struct _App {GstElement *pipeline;GstElement *appsrc;GstElement *encode;GstElement *parse;GstElement *sink;GstElement *decode;GstBus *bus;GstMessage *msg;GMainLoop *loop;guint bus_watch_id; };App s_app; int ret, idx, fd; #define NBUF 3 #define FRAME_SIZE 152064 //352*288*1.5int width = 352, height = 288; void *buffer_addr[NBUF]; int size[NBUF];static void feed_data (GstElement * appsrc, guint size, App * app) {printf("feed-data....\n");struct v4l2_buffer buf;/* Dequeue one buffer */memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if(-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {perror("Queue Buffer");return FALSE;}idx = buf.index;GstBuffer *buffer;GstFlowReturn ret;buffer = gst_buffer_new_allocate(NULL,FRAME_SIZE,NULL);GstMapInfo info;gst_buffer_map(buffer,&info,GST_MAP_WRITE);unsigned char * buff = info.data;memcpy(buff,buffer_addr[idx],FRAME_SIZE);gst_buffer_unmap(buffer,&info);g_signal_emit_by_name(app->appsrc,"push-buffer",buffer,&ret);printf("ret:%d\n",ret);if (ret != GST_FLOW_OK) {/* some error, stop sending data */printf("push error...\n");return FALSE;}gst_buffer_unref(buffer);memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = idx;if(-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {perror("Queue Buffer");return FALSE;}return TRUE; }int xioctl(int fd, int request, void *arg) {int r;do r = ioctl (fd, request, arg);while (-1 == r && EINTR == errno);return r; }int init_device(int fd) {unsigned int i;struct v4l2_capability caps;struct v4l2_format fmt;struct v4l2_requestbuffers req;struct v4l2_buffer buf;/* Check for capture device */memset(&caps, 0, sizeof(caps));if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {perror("Setting Pixel Format");return 1;}printf("Driver: %s\ncaps: %8x", caps.driver, caps.capabilities);if (~caps.capabilities & V4L2_CAP_VIDEO_CAPTURE) {printf("Not a capture device");return 1;}/* Set capture format to UYVY */memset(&fmt, 0, sizeof(fmt));fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;fmt.fmt.pix.width = width;fmt.fmt.pix.height = height;fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;// fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;fmt.fmt.pix.field = V4L2_FIELD_NONE;if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) {perror("Setting Pixel Format");return 1;}printf("Selected Camera Mode:\n" "Width: %d\n" "Height: %d\n" "Field: %d",fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.field);printf("PixFmt = %c%c%c%c\n",fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF,(fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) &0xFF);/* Currently driver supports only mmap buffers* Request memory mapped buffers */memset(&req, 0, sizeof(req));req.count = NBUF;req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory = V4L2_MEMORY_MMAP;if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) {perror("Requesting Buffer");return 1;}printf("Total buffer num %d\n", req.count);for (i = 0; i < req.count; i++) {memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf)) {perror("Querying Buffer");return 1;}/* Memory map all the buffers and save the addresses */buffer_addr[i] = mmap(NULL, buf.length, PROT_READ | PROT_WRITE,MAP_SHARED, fd, buf.m.offset);//buffer_addr[i]=(void*)malloc(FRAME_SIZE);size[i] = buf.length;printf("Address %p, size %d, image size: %d \n", buffer_addr[i], buf.length, buf.bytesused);/* Queue the buffer for capture */memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = i;if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {perror("Queue Buffer");return 1;}printf("12345\r\n");}if (-1 == xioctl(fd, VIDIOC_STREAMON, &buf.type)) {perror("Start Capture");return 1;}return 0; }void release_device(int fd) {int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;xioctl(fd, VIDIOC_STREAMOFF, &type);close(fd); }gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {GMainLoop *loop = (GMainLoop *) data;switch (GST_MESSAGE_TYPE (msg)){case GST_MESSAGE_EOS:fprintf(stderr, "End of stream\n");g_main_loop_quit(loop);break;case GST_MESSAGE_ERROR:{gchar *debug;GError *error;gst_message_parse_error(msg, &error, &debug);g_free(debug);g_printerr("Error: %s\n", error->message);g_error_free(error);g_main_loop_quit(loop);break;}default:break;}return TRUE; }int main(int argc, char **argv) {App *app = &s_app;printf("==========\n");char devnode[100] = "/dev/video1";fd = open(devnode, O_RDWR);if (fd == -1) {perror("Opening video device");return 1;}ret = init_device(fd);if (0 != ret) {printf("Exiting");return ret;}gst_init(NULL,NULL);app->pipeline = gst_pipeline_new("encodepipeline");g_assert(app->pipeline);app->appsrc = gst_element_factory_make("appsrc","srcElement");g_assert(app->appsrc);app->encode = gst_element_factory_make("ducatih264enc","encodeElement");g_assert(app->encode);app->parse = gst_element_factory_make("h264parse","parseElement");g_assert(app->parse);app->decode = gst_element_factory_make("ducatih264dec","decodeElement");g_assert(app->decode);app->sink = gst_element_factory_make("filesink","sinkElement");g_assert(app->sink);printf("element creat success\n");GstCaps *capsappsrc2H264enc;capsappsrc2H264enc = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING, "NV12","width", G_TYPE_INT, 352,"height",G_TYPE_INT, 288,"framerate", GST_TYPE_FRACTION, 30, 1,NULL); //g_object_set(G_OBJECT (app->sink), "sync", FALSE, //NULL);gst_app_src_set_caps(GST_APP_SRC(app->appsrc), capsappsrc2H264enc);g_object_set(app->sink,"location","/mnt/camera.yuv",NULL);app->loop = g_main_loop_new(NULL,FALSE);app->bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));app->bus_watch_id = gst_bus_add_watch(app->bus,bus_call,app->loop);gst_object_unref(app->bus);gst_bin_add_many(GST_BIN(app->pipeline), app->appsrc,app->sink,NULL);gboolean bLinkOk;bLinkOk= gst_element_link(app->appsrc,app->sink);if(!bLinkOk){g_warning("Failed to link many 1\n");return -5;}g_signal_connect(app->appsrc, "need-data", G_CALLBACK(feed_data), app);gst_element_set_state (app->pipeline, GST_STATE_PLAYING);printf("run....1\n");g_main_loop_run(app->loop);printf("run....2\n");app->msg = gst_bus_timed_pop_filtered (app->bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);if (app->msg != NULL)gst_message_unref (app->msg);gst_object_unref (app->bus);gst_element_set_state (app->pipeline, GST_STATE_NULL);gst_object_unref (app->pipeline);gst_object_unref(app->appsrc);gst_object_unref(app->sink);printf("close...\n");return 0; }
Jian Zhou:
回复 abayyy:
你是在gst的基础上又加了一些程序么?不是直接调用的gstreamer命令?
abayyy:
回复 Jian Zhou:
只是将gst-launch 转换成了 c 代码实现,没有加别的,用的还是gstreamer的插件
Jian Zhou:
回复 abayyy:
我建议你用TI文件系统里的gst测一下,对比下看看有没有内存泄漏。
weifeng liang:
回复 abayyy:
在SDK03.00上是没有内存泄漏问题,SDK03.01以上的版本都存在h264编码内存泄漏问题,具体原因不明,在TI英文论坛也反馈过,但是没有回应。
abayyy:
回复 weifeng liang:
现在ti已经解决了这个问题