deepstream c++ rtsp流输出

deepstream 的中文资料太少,官方示例没有给出rtsp输出的简单示例,只是建议参考deepstream_sink_bin.c去实现,折腾了半天,整出了基于deepstream-test1的rtsp输出demo。

  • 环境使用的是官方提供的nvcr.io/nvidia/deepstream:5.1-21.02-devel docker环境
  • 执行命令`
./deepstream-test1-app ../../../../samples/streams/sample_720p.h264
  • 代码
/** Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.** Permission is hereby granted, free of charge, to any person obtaining a* copy of this software and associated documentation files (the "Software"),* to deal in the Software without restriction, including without limitation* the rights to use, copy, modify, merge, publish, distribute, sublicense,* and/or sell copies of the Software, and to permit persons to whom the* Software is furnished to do so, subject to the following conditions:** The above copyright notice and this permission notice shall be included in* all copies or substantial portions of the Software.** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER* DEALINGS IN THE SOFTWARE.*/#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <cuda_runtime_api.h>
#include "gstnvdsmeta.h"
#include <gst/rtsp-server/rtsp-server.h>#define MAX_DISPLAY_LEN 64#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2/* The muxer output resolution must be set if the input streams will be of* different resolution. The muxer will scale all the input frames to this* resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 40000gint frame_number = 0;
gchar pgie_classes_str[4][32] = { "Vehicle", "TwoWheeler", "Person","Roadsign"
};/* osd_sink_pad_buffer_probe  will extract metadata received on OSD sink pad* and update params for drawing rectangle, object information etc. */static GstPadProbeReturn
osd_sink_pad_buffer_probe (GstPad * pad, GstPadProbeInfo * info,gpointer u_data)
{GstBuffer *buf = (GstBuffer *) info->data;guint num_rects = 0;NvDsObjectMeta *obj_meta = NULL;guint vehicle_count = 0;guint person_count = 0;NvDsMetaList * l_frame = NULL;NvDsMetaList * l_obj = NULL;NvDsDisplayMeta *display_meta = NULL;NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;l_frame = l_frame->next) {NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);int offset = 0;for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;l_obj = l_obj->next) {obj_meta = (NvDsObjectMeta *) (l_obj->data);if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) {vehicle_count++;num_rects++;}if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) {person_count++;num_rects++;}}display_meta = nvds_acquire_display_meta_from_pool(batch_meta);NvOSD_TextParams *txt_params  = &display_meta->text_params[0];display_meta->num_labels = 1;txt_params->display_text = g_malloc0 (MAX_DISPLAY_LEN);offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "Person = %d ", person_count);offset = snprintf(txt_params->display_text + offset , MAX_DISPLAY_LEN, "Vehicle = %d ", vehicle_count);/* Now set the offsets where the string should appear */txt_params->x_offset = 10;txt_params->y_offset = 12;/* Font , font-color and font-size */txt_params->font_params.font_name = "Serif";txt_params->font_params.font_size = 10;txt_params->font_params.font_color.red = 1.0;txt_params->font_params.font_color.green = 1.0;txt_params->font_params.font_color.blue = 1.0;txt_params->font_params.font_color.alpha = 1.0;/* Text background color */txt_params->set_bg_clr = 1;txt_params->text_bg_clr.red = 0.0;txt_params->text_bg_clr.green = 0.0;txt_params->text_bg_clr.blue = 0.0;txt_params->text_bg_clr.alpha = 1.0;nvds_add_display_meta_to_frame(frame_meta, display_meta);}g_print ("Frame Number = %d Number of objects = %d ""Vehicle Count = %d Person Count = %d\n",frame_number, num_rects, vehicle_count, person_count);frame_number++;return GST_PAD_PROBE_OK;
}static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{GMainLoop *loop = (GMainLoop *) data;switch (GST_MESSAGE_TYPE (msg)) {case GST_MESSAGE_EOS:g_print ("End of stream\n");g_main_loop_quit (loop);break;case GST_MESSAGE_ERROR:{gchar *debug;GError *error;gst_message_parse_error (msg, &error, &debug);g_printerr ("ERROR from element %s: %s\n",GST_OBJECT_NAME (msg->src), error->message);if (debug)g_printerr ("Error details: %s\n", debug);g_free (debug);g_error_free (error);g_main_loop_quit (loop);break;}default:break;}return TRUE;
}static GstRTSPServer *server;
static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,guint64 udp_buffer_size)
{GstRTSPMountPoints *mounts;GstRTSPMediaFactory *factory;char udpsrc_pipeline[512];char port_num_Str[64] = { 0 };char *encoder_name;if (udp_buffer_size == 0)udp_buffer_size = 512 * 1024;sprintf (udpsrc_pipeline,"( udpsrc name=pay0 port=%d buffer-size=%lu caps=\"application/x-rtp, media=video, ""clock-rate=90000, encoding-name=H264, payload=96 \" )",updsink_port_num, udp_buffer_size);sprintf (port_num_Str, "%d", rtsp_port_num);server = gst_rtsp_server_new ();g_object_set (server, "service", port_num_Str, NULL);mounts = gst_rtsp_server_get_mount_points (server);factory = gst_rtsp_media_factory_new ();gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);gst_rtsp_mount_points_add_factory (mounts, "/ds-test", factory);g_object_unref (mounts);gst_rtsp_server_attach (server, NULL);g_print("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n",rtsp_port_num);return TRUE;
}static GstRTSPFilterResult
client_filter (GstRTSPServer * server, GstRTSPClient * client,gpointer user_data)
{return GST_RTSP_FILTER_REMOVE;
}static void
destroy_sink_bin ()
{GstRTSPMountPoints *mounts;GstRTSPSessionPool *pool;mounts = gst_rtsp_server_get_mount_points (server);gst_rtsp_mount_points_remove_factory (mounts, "/ds-test");g_object_unref (mounts);gst_rtsp_server_client_filter (server, client_filter, NULL);pool = gst_rtsp_server_get_session_pool (server);gst_rtsp_session_pool_cleanup (pool);g_object_unref (pool);
}int
main (int argc, char *argv[])
{GstRTSPMountPoints *mounts;GstRTSPMediaFactory *factory;char udpsrc_pipeline[512];GMainLoop *loop = NULL;GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL,*decoder = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL, *nvvidconv = NULL,*nvosd = NULL, *rtppay = NULL, *parse = NULL;GstElement *transform = NULL;GstBus *bus = NULL;guint bus_watch_id;GstPad *osd_sink_pad = NULL;GstElement *nvvidconv1 = NULL, *filter4 = NULL,*x264enc = NULL, *qtmux = NULL;GstCaps *caps3 = NULL, *caps4 = NULL;int current_device = -1;cudaGetDevice(&current_device);struct cudaDeviceProp prop;cudaGetDeviceProperties(&prop, current_device);/* Check input arguments */if (argc != 2) {g_printerr ("Usage: %s <H264 filename>\n", argv[0]);return -1;}/* Standard GStreamer initialization */gst_init (&argc, &argv);loop = g_main_loop_new (NULL, FALSE);/* Create gstreamer elements *//* Create Pipeline element that will form a connection of other elements */pipeline = gst_pipeline_new ("dstest1-pipeline");/* Source element for reading from the file */source = gst_element_factory_make ("filesrc", "file-source");/* Since the data format in the input file is elementary h264 stream,* we need a h264parser */h264parser = gst_element_factory_make ("h264parse", "h264-parser");/* Use nvdec_h264 for hardware accelerated decode on GPU */decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");/* Create nvstreammux instance to form batches from one or more sources. */streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");if (!pipeline || !streammux) {g_printerr ("One element could not be created. Exiting.\n");return -1;}/* Use nvinfer to run inferencing on decoder's output,* behaviour of inferencing is set through config file */pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");/* Use convertor to convert from NV12 to RGBA as required by nvosd */nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");/* Create OSD to draw on the converted RGBA buffer */nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");nvvidconv1 = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter1");x264enc = gst_element_factory_make ("x264enc", "h264 encoder");qtmux = gst_element_factory_make ("qtmux", "muxer");filter4 = gst_element_factory_make ("capsfilter", "filter4");caps4 = gst_caps_from_string ("video/x-raw, format=I420");g_object_set (G_OBJECT (filter4), "caps", caps4, NULL);gst_caps_unref (caps4);if (!nvvidconv1 || !x264enc || !qtmux || !filter4) {g_printerr ("One element could not be created.%p,%p,%p,%p, Exiting.\n",nvvidconv1, x264enc, qtmux,filter4);return -1;}/* Finally render the osd output */if(prop.integrated) {transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");}
//sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");guint udp_port  = 5400;parse = gst_element_factory_make ("h264parse", "h264-parser2");rtppay = gst_element_factory_make ("rtph264pay", "rtp-payer");sink = gst_element_factory_make ("udpsink", "udp-sink");if (!source || !h264parser || !decoder || !pgie || !nvvidconv || !nvosd ||!sink || !rtppay || !parse) {g_printerr ("One element could not be created. Exiting.\n");return -1;}if(!transform && prop.integrated) {g_printerr ("One tegra element could not be created. Exiting.\n");return -1;}/* we set the input filename to the source element */g_object_set (G_OBJECT (source), "location", argv[1], NULL);g_object_set (G_OBJECT (streammux), "batch-size", 1, NULL);g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",MUXER_OUTPUT_HEIGHT,"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);/* Set all the necessary properties of the nvinfer element,* the necessary ones are : */g_object_set (G_OBJECT (pgie),"config-file-path", "dstest1_pgie_config.txt", NULL);/* we add a message handler */bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);gst_object_unref (bus);/* Set up the pipeline *//* we add all elements into the pipeline */if(prop.integrated) {gst_bin_add_many (GST_BIN (pipeline),source, h264parser, decoder, streammux, pgie,nvvidconv, nvosd, transform, sink, NULL);}else {gst_bin_add_many (GST_BIN (pipeline),source, h264parser, decoder, streammux, pgie, nvvidconv, nvosd, nvvidconv1,filter4, x264enc, parse, rtppay, sink, NULL);}GstPad *sinkpad, *srcpad;gchar pad_name_sink[16] = "sink_0";gchar pad_name_src[16] = "src";sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);if (!sinkpad) {g_printerr ("Streammux request sink pad failed. Exiting.\n");return -1;}srcpad = gst_element_get_static_pad (decoder, pad_name_src);if (!srcpad) {g_printerr ("Decoder request src pad failed. Exiting.\n");return -1;}if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {g_printerr ("Failed to link decoder to stream muxer. Exiting.\n");return -1;}gst_object_unref (sinkpad);gst_object_unref (srcpad);/* we link the elements together *//* file-source -> h264-parser -> nvh264-decoder ->* nvinfer -> nvvidconv -> nvosd -> video-renderer */if (!gst_element_link_many (source, h264parser, decoder, NULL)) {g_printerr ("Elements could not be linked: 1. Exiting.\n");return -1;}if(prop.integrated) {if (!gst_element_link_many (streammux, pgie,nvvidconv, nvosd, transform, sink, NULL)) {g_printerr ("Elements could not be linked: 2. Exiting.\n");return -1;}}else {if (!gst_element_link_many (streammux, pgie, nvvidconv, nvosd, nvvidconv1,filter4, x264enc, parse, rtppay, sink,NULL)) {g_printerr ("Elements could not be linked: 3. Exiting.\n");return -1;}}/*server init* */g_object_set (G_OBJECT (x264enc), "preset-level", 1, NULL);g_object_set (G_OBJECT (x264enc), "insert-sps-pps", 1, NULL);g_object_set (G_OBJECT (x264enc), "bufapi-version", 1, NULL);g_object_set (G_OBJECT (sink), "host", "127.0.0.1", "port",udp_port, "async", FALSE, "sync", 1, NULL);start_rtsp_streaming (8554/*rtsp_port*/, udp_port, 0);/* Lets add probe to get informed of the meta data generated, we add probe to* the sink pad of the osd element, since by that time, the buffer would have* had got all the metadata. */osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");if (!osd_sink_pad)g_print ("Unable to get sink pad\n");elsegst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,osd_sink_pad_buffer_probe, NULL, NULL);gst_object_unref (osd_sink_pad);/* Set the pipeline to "playing" state */g_print ("Now playing: %s\n", argv[1]);gst_element_set_state (pipeline, GST_STATE_PLAYING);/* Wait till pipeline encounters an error or EOS */g_print ("Running...\n");g_main_loop_run (loop);/* Out of the main loop, clean up nicely */g_print ("Returned, stopping playback\n");gst_element_set_state (pipeline, GST_STATE_NULL);g_print ("Deleting pipeline\n");destroy_sink_bin();gst_object_unref (GST_OBJECT (pipeline));g_source_remove (bus_watch_id);g_main_loop_unref (loop);return 0;
}
  • makefile
################################################################################
# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
################################################################################CUDA_VER?=
ifeq ($(CUDA_VER),)$(error "CUDA_VER is not set")
endifAPP:= deepstream-test1-appTARGET_DEVICE = $(shell gcc -dumpmachine | cut -f1 -d -)NVDS_VERSION:=5.1LIB_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/
APP_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/bin/ifeq ($(TARGET_DEVICE),aarch64)CFLAGS:= -DPLATFORM_TEGRA
endifSRCS:= $(wildcard *.c)INCS:= $(wildcard *.h)PKGS:= gstreamer-1.0 gstreamer-video-1.0OBJS:= $(SRCS:.c=.o)CFLAGS+= -I../../../includes \-I /usr/local/cuda-$(CUDA_VER)/includeCFLAGS+= $(shell pkg-config --cflags $(PKGS))LIBS:= $(shell pkg-config --libs $(PKGS))LIBS+= -L/usr/local/cuda-$(CUDA_VER)/lib64/ -lcudart \-L$(LIB_INSTALL_DIR) -lnvdsgst_meta  -lgstrtspserver-1.0 -lnvds_meta \-lcuda -Wl,-rpath,$(LIB_INSTALL_DIR)all: $(APP)%.o: %.c $(INCS) Makefile$(CC) -c -o $@ $(CFLAGS) $<$(APP): $(OBJS) Makefile$(CC) -o $(APP) $(OBJS) $(LIBS)install: $(APP)cp -rv $(APP) $(APP_INSTALL_DIR)clean:rm -rf $(OBJS) $(APP)

deepstream c++ rtsp流输出相关推荐

  1. 用EasyRTSPServer模拟摄像机RTSP流实现RTSP摄像机模拟器

    在很早之前的一篇博客<基于EasyIPCamera实现的数字网络摄像机IPCamera的模拟器IPC RTSP Simulator>,我们描述了如何模拟一个摄像机的RTSP流,但当时的RT ...

  2. NVIDIA DeepStream配置文件解析;摄像头源RTSP拉流源输入,RTSP推流输出

    一.DeepStream配置文件解析 参考:官方文档 DeepStream Reference Application - deepstream-app 1. DeepStream应用程序架构 下图为 ...

  3. LiveNVR配置拉转RTSP传统海康大华安防摄像机直播流输出RTSP/RTMP/HLS/HTTP-FLV如何获取直播流地址

    LiveNVR配置拉转RTSP传统海康大华安防摄像机直播流输出RTSP/RTMP/HLS/HTTP-FLV如何获取直播流地址 1. Onvif/RTSP流媒体服务 2.配置拉转直播流 2.1 RTSP ...

  4. LiveNVR监控流媒体Onvif/RTSP常见问题-页面中如何配置开启输出RTSP流

    LiveNVR常见问题-页面中如何配置开启输出RTSP流? 1.RTSP流说明 2.开启RTSP 3.获取RTSP流地址 3.1.HTTP接口调用 3.1.1.接口调用示例 3.1.2.接口返回示例 ...

  5. Nvidia Deepstream极致细节:3. Deepstream Python RTSP视频输出显示

    Nvidia Deepstream极致细节:3. Deepstream Python RTSP视频输出显示 此章节将详细对官方案例:deepstream_test_1_rtsp_out.py作解读.d ...

  6. 传统大华海康宇视安防摄像头RTSP流如何转webrtc直播低延时无插件浏览器视频播放

    传统大华海康宇视安防摄像头RTSP流如何转webrtc直播低延时无插件浏览器视频播放 1.问题场景 2.WEBRTC延时对比 3.LiveNVR支持WEBRTC输出 4.RTSP/HLS/FLV/RT ...

  7. 采集rtsp流摄像头到浏览器实时播放方案

    本文旨在实现使用摄像头采集视频,并且可以在网页实时显示,主要参考的两篇博文为:  1.  视频实时显示的三种方案   2.  使用WebSockets进行HTML5视频直播   我们使用博文1介绍的第 ...

  8. 将海康大华宇视等网络摄像机RTSP流采用websecket和H5进行RTSP网页无插件直播点播的技术方案

    一. 背景分析 随着移动互联网时代的到来,安防监控领域为了适应互联网的发展要求,首先由国内安防监控龙头企业(海康.大华.宇视)带头先后开发了萤石云.乐橙云等互联网视频云服务,为广大个人或者企业监控用户 ...

  9. 网页播放海康威视大华华为摄像头RTSP流,不需转码转流,延迟毫秒级,支持多路播放、H.264/H.265及1080P/2K/4K

    一.背景: 在遍地都是摄像头的今天,往往需要在各种信息化.数字化.可视化B/S系统中集成实时视频流播放等功能,海康.大华.华为等厂家摄像头或录像机等设备一般也都遵循监控行业标准,支持国际标准的主流传输 ...

最新文章

  1. java超市管理系统项目,HR的话扎心了
  2. Vue移动端项目——字体图标的使用
  3. [loss]Triphard loss优雅的写法
  4. [leetcode] 872. 叶子相似的树(周赛)
  5. 游戏筑基开发之环形链表及其基本功能(C语言)
  6. jprofiler_监控远程linux服务器的JVM进程(转 非常棒)
  7. opencv基本绘图函数--点,线,矩形,圆等
  8. ArcGIS学习总结(18)——面要素/矢量拆分
  9. AMOS问卷数据建模前传【SPSS 052期】
  10. 移动云瞄准“一流云商”焕新出发
  11. TFTLCD屏幕实验
  12. 2022 年最受欢迎的 19个 VS Code 主题排行榜
  13. c语言字符指针分配内存,内存分配函数及使用注意事项,C语言内存分配函数完全攻略...
  14. 金山30而立,怀念“第一程序员求伯君”
  15. 模仿微信语音聊天功能(4) 音频播放实现以及项目结束
  16. spring-boot sigar的使用
  17. 6.3 南丁格尔玫瑰图
  18. Windows远程桌面跳板机无法复制粘贴
  19. 乔布斯对2005年斯坦福大学毕业生演讲全文
  20. parameter缩略语_电信英语及其缩略语解释-23(

热门文章

  1. Android流星雨效果---史上最炫,浪漫,值得陪你女朋友一起看~ [捂脸]
  2. python列表索引超出范围 等于啥_Python列表错误,列表索引超出范围
  3. Generality
  4. gin框架的环境搭建和热加载
  5. 将Unity场景以Wavefront Obj格式导出
  6. 学得到专栏作者如何回答问题:万维钢(1)
  7. 关于消防装备管理系统于消防装备管理中应用的论述
  8. 解决在Filter中读取Request中的流后,后续controller或restful接口中无法获取流的问题
  9. 如何在FreePBX ISO 中文版本安装讯时网关,潮流16FXS 网关和潮流话机
  10. 关于我使用校园网电脑被限速成10M宽带的悲惨教训