gstreamer讀取USB攝像頭H264幀並用rtmp推流


因爲要在嵌入式端使用rtmp推流,目前我知道的有三種辦法,ffmpeg、gstreamer、librtmp,每一種都需要移植到嵌入式平臺,還是從我最熟悉的gstreamer開始驗證吧。
現在我的嵌入式平臺gstreamer庫沒有rtmp元件,因此只能先在Ubuntu16.04系統的PC上測試,然後再移植帶有rtmp元件的gstreamer庫。
Ubuntu16.04系統已經自帶了gstreamer-1.0的庫,並且已經包含rtmp元件,不用移植可以直接測試了。
注意:我使用的USB攝像頭可以直接輸出H264幀,因此不需要使用編碼元件。

gstreamer命令行實現rtmp推流

首先用命令行工具測試:

gst-launch-1.0 -v v4l2src device=/dev/video0 ! 'video/x-h264, width=1280, height=720, framerate=30/1' ! queue !  h264parse ! flvmux ! rtmpsink location='rtmp://192.168.1.102/live'

這個命令行執行後,就可以在192.168.1.102地址的PC上打開流媒體服務端觀看。可以使用nginx或者srs流媒體服務端,創建一個html文件打開網頁觀看。

	<h1>01</h1>
	<object width='640' height='377' id='SampleMediaPlayback' name='SampleMediaPlayback' type='application/x-shockwave-flash' classid='clsid:d27cdb6e-ae6d-11cf-96b8-444553540000' >
		<param name='movie' value='swfs/SampleMediaPlayback.swf' /> 
		<param name='quality' value='high' /> 
		<param name='bgcolor' value='#000000' /> 
		<param name='allowfullscreen' value='true' /> 
		<embed src='SampleMediaPlayback.swf' width='640' height='377' id='SampleMediaPlayback' quality='high' bgcolor='#000000' name='SampleMediaPlayback' allowfullscreen='true' pluginspage='http://www.adobe.com/go/getflashplayer' flashvars='&src=rtmp://192.168.1.102:1935/live&autoHideControlBar=true&streamType=live&autoPlay=true&verbose=true' type='application/x-shockwave-flash'> 
		</embed>
	</object>

gstreamer代碼實現rtmp推流

#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>

//gst-launch-1.0 -v v4l2src device=/dev/video0 ! 'video/x-h264, width=640, height=360, framerate=30/1' ! queue !  h264parse ! flvmux ! rtmpsink location='rtmp://192.168.1.102/live'

typedef struct _GstDataStruct
{
	GstElement *pipeline;
	GstElement *v4l2src;
	GstElement *queue;
	GstElement *h264parse;
	GstElement *flvmux;
	GstElement *rtmpsink;
	GstBus *bus;
	guint bus_watch_id;
	guint sourceid;        /* To control the GSource */
	GMainLoop *loop;  /* GLib's Main Loop */
} GstDataStruct;

static GstDataStruct GstData;
static unsigned int frame_width;
static unsigned int frame_height;
static unsigned int frame_rate;
static unsigned int frame_bps;
static char devname[32] = {0};

gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData)
{
	gchar *debug;
	GError *error;
	GMainLoop *loop = pGstData->loop;

	GST_DEBUG ("got message %s",gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
	switch (GST_MESSAGE_TYPE(msg))
	{
		case GST_MESSAGE_EOS:
			printf("End of stream\n");
			g_main_loop_quit(loop);
			break;
		case GST_MESSAGE_ERROR:
			gst_message_parse_error(msg, &error, &debug);
			g_free(debug);
			g_printerr("Error: %s\n", error->message);
			g_error_free(error);
			g_main_loop_quit(loop);
			break;
		default:
			break;
	}
	return TRUE;
}

int main(int argc, char *argv[])
{
	if(argc != 6)
	{
		frame_width = 1280;
		frame_height = 720;
		frame_rate = 30;
		frame_bps = 1500000;
		sprintf(devname, "%s", "/dev/video0");
	}
	else
	{
		frame_width = atoi(argv[2]);
		frame_height = atoi(argv[3]);
		frame_rate = atoi(argv[4]);
		frame_bps = atoi(argv[5]);
		sprintf(devname, "%s", argv[1]);
	}
	printf("width:%d, height:%d, rate:%d, bps:%d, dev:%s\n", frame_width, frame_height, frame_rate, frame_bps, devname);

	printf("============= v4l2 rtmp gst init start ============\n");
	gst_init (NULL, NULL);
	printf("=========== create v4l2 rtmp pipeline =============\n");
	GstData.pipeline           	= gst_pipeline_new ("v4l2_rtmp");
	GstData.pipeline           	= gst_pipeline_new ("v4l2_rtmp");
	GstData.v4l2src        	   	= gst_element_factory_make ("v4l2src",      "v4l2src");
	GstData.queue      		   	= gst_element_factory_make ("queue",  		"queue");
	GstData.h264parse      	   	= gst_element_factory_make ("h264parse",	"h264parse");
	GstData.flvmux           	= gst_element_factory_make ("flvmux",      	"flvmux");
	GstData.rtmpsink            = gst_element_factory_make ("rtmpsink",     "rtmpsink");

	if (!GstData.pipeline || !GstData.v4l2src || !GstData.queue ||
		!GstData.h264parse || !GstData.flvmux || !GstData.rtmpsink)
	{
		g_printerr ("One element could not be created... Exit\n");
		return -1;
	}

	printf("============ link v4l2 rtmp pipeline ==============\n");
	GstCaps *caps_v4l2src;
	caps_v4l2src = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING,"byte-stream",
									   "alignment", G_TYPE_STRING, "au",
									   "width", G_TYPE_INT, frame_width,
									   "height", G_TYPE_INT, frame_height,
									   "framerate",GST_TYPE_FRACTION, frame_rate, 1, NULL);
	GstCaps *caps_flv_sink;
	caps_flv_sink = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING,"avc",
									    "alignment", G_TYPE_STRING, "au",
									    "width", G_TYPE_INT, frame_width,
									    "height", G_TYPE_INT, frame_height,
									    "framerate",GST_TYPE_FRACTION, frame_rate, 1, NULL);

	g_object_set(G_OBJECT(GstData.v4l2src), "device", devname, NULL);
	g_object_set(G_OBJECT(GstData.rtmpsink), "location", "rtmp://192.168.1.102/live", NULL);
//注意:此處的location參數代表rtmp的url,其取值必須與html文件的rtmp的URL保持一致,纔可觀看視頻。
	GstData.bus = gst_pipeline_get_bus(GST_PIPELINE(GstData.pipeline));
	GstData.bus_watch_id = gst_bus_add_watch(GstData.bus, (GstBusFunc)bus_msg_call, (gpointer)&GstData);
	gst_object_unref(GstData.bus);

	gst_bin_add_many(GST_BIN(GstData.pipeline), GstData.v4l2src, GstData.queue,
					 GstData.h264parse, GstData.flvmux, GstData.rtmpsink,NULL);

	if(gst_element_link_filtered(GstData.v4l2src, GstData.queue, caps_v4l2src) != TRUE)
	{
		g_printerr ("GstData.v4l2src could not link GstData.queue\n");
		gst_object_unref (GstData.pipeline);
		return -1;
	}
	gst_caps_unref (caps_v4l2src);

	if(gst_element_link(GstData.queue, GstData.h264parse) != TRUE)
	{
		g_printerr ("GstData.queue could not link GstData.h264parse\n");
		gst_object_unref (GstData.pipeline);
		return -1;
	}

	if(gst_element_link_filtered(GstData.h264parse, GstData.flvmux, caps_flv_sink) != TRUE)
	{
		g_printerr ("GstData.h264parse could not link GstData.flvmux\n");
		gst_object_unref (GstData.pipeline);
		return -1;
	}
	gst_caps_unref (caps_flv_sink);

	if(gst_element_link(GstData.flvmux, GstData.rtmpsink) != TRUE)
	{
		g_printerr ("GstData.h264parse could not link GstData.flvmux\n");
		gst_object_unref (GstData.pipeline);
		return -1;
	}

	printf("========= link v4l2 rtmp pipeline running ==========\n");
	gst_element_set_state (GstData.pipeline, GST_STATE_PLAYING);
	GstData.loop = g_main_loop_new(NULL, FALSE);	// Create gstreamer loop
	g_main_loop_run(GstData.loop);					// Loop will run until receiving EOS (end-of-stream), will block here
	printf("g_main_loop_run returned, stopping rtmp!\n");
	gst_element_set_state (GstData.pipeline, GST_STATE_NULL);		// Stop pipeline to be released
	printf("Deleting pipeline\n");
	gst_object_unref (GstData.pipeline);							// THis will also delete all pipeline elements
	g_source_remove(GstData.bus_watch_id);
	g_main_loop_unref(GstData.loop);

	return 0;
}

此代碼在Ubuntu16.04系統下使用gcc編譯,makefile如下:
需要將系統目錄下的關於gstreamer的庫文件拷貝到當前目錄的libs_x86目錄下,另外系統的gstreamer庫鏈接文件都帶有.so.0後綴,去掉最後的.0,保留到.so即可。

CFLAGS = -v -g -Wall -Wno-shift-count-overflow -I./include
LDFLAGS = -L./libs_x86
CC = gcc
EXTRA_LIBS = -lstdc++ -lm -lpthread -lgstreamer-1.0 -lgstbase-1.0 -lgobject-2.0 -lgmodule-2.0 -lglib-2.0 -lpcre -lrt
SRC = v4l2_rtmp.c
TARGET = v4l2_rtmp
ALL:
	$(CC) $(CFLAGS) $(LDFLAGS) $(SRC) -o $(TARGET) $(EXTRA_LIBS) 
clean:
	rm v4l2_rtmp *.raw *.mp4 *.wav -rf

此代碼只是一個簡單實現,並沒有做其他操作,也沒有長時間驗證網絡的穩定性,只是說明了可行性,另外還沒有添加音頻,等過幾天再把音頻也加上來。

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章