以下程序:獲取一個媒體文件中流的數目以及流的信息,並且可以切換音軌。
可以理解爲:使用playbin播放媒體,然後從playbin中獲取以上信息
#include "pch.h"
#include<string.h>
#include<stdio.h>
#include <gst/gst.h>
typedef struct _CustomData{
GstElement *playbin;
gint n_video;
gint n_audio;
gint n_text;
gint current_video;
gint current_audio;
gint current_text;
GMainLoop *main_loop;
}CustomData;
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), //we want video output
GST_PALY_FLAG_AUDIO = (1 << 1),
GST_PALY_FLAG_TEXT = (1 << 2)
}GstPlayFlags;
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
gst_init(&argc, &argv);
data.playbin = gst_element_factory_make("playbin", "playbin");
if (!data.playbin) {
g_printerr("could not create playbin\n");
return -1;
}
//file:///C:\Users\lenovo\Desktop\testVideo\[PGS][Tintin-004][DTS-AC3][5PGS].mkv
g_object_set(data.playbin, "uri", "file:///C:/Users/lenovo/Desktop/testVideo/[PGS][Tintin-004][DTS-AC3][5PGS].mkv", NULL);
g_object_get(data.playbin, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PALY_FLAG_AUDIO;
flags &= ~GST_PALY_FLAG_TEXT;
g_object_set(data.playbin, "flags", flags, NULL);
//connection-speed設置網絡的最大連接速度
g_object_set(data.playbin, "connection-speed", 56, NULL);
// 我們逐個的設置這些屬性,但我們也可以僅調用g_object_set()一次,來設置uri,flags,connect-speed
bus = gst_element_get_bus(data.playbin);
gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);
//這幾行連接了一個標準輸入(鍵盤)和一個回調函數。這裏使用的機制是GLib的,並非是基於GStreamer的.
#ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("could not set sate to playing\n");
gst_object_unref(data.playbin);
return -1;
}
//爲了交互,不再手動輪詢gstreamer總線,我們創建main_loop,並且使用了g_main_loop_run函數讓它運行起來。
//,直到調用g_main_loop_quit()才被返回
data.main_loop = g_main_loop_new(NULL,false);
g_main_loop_run(data.main_loop);
g_main_loop_unref(data.main_loop);
g_io_channel_unref(io_stdin);
gst_object_unref(bus);
gst_element_set_state(data.playbin, GST_STATE_NULL);
g_object_unref(data.playbin);
return 0;
}
static void analyze_streams(CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
g_object_get(data->playbin, "n-video", &data->n_video, NULL);
g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
g_object_get(data->playbin, "n-text", &data->n_text, NULL);
g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
//現在,對於每一個流來說,我們需要獲得它的元數據。元數據是存在一個GstTagList的結構體裏面,
//這個GstTagList通過g_signal_emit_by_name()可以把流裏面對應的tag都取出來。
//然後可以用gst_tag_list_get_*這一類函數來訪問這些tag,這個例子中用的就是gst_tag_list_get_string()方法。
//playbin定義了2個action信號來獲得元數據:get-video-tags,get-audio-tags和get-text-tags。
//在這個例子中我們關注的是GST_TAG_LANGUAGE_CODE這個tag和GST_TAG_ * _CODEC(audio,video和text)。
g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);
if (tags) {
g_print("video stream %d:\n", i);
gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
g_print("codec:%s\n", str ? str : "unknown");
g_free(str);
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
if (tags) {
g_print("audio stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print("codec:%s\n", str);
g_free(str);
}
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
g_print(" bitrate: %d\n", rate);
}
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
if (tags) {
g_print("subtitle stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
gst_tag_list_free(tags);
}
}
g_object_get(data->playbin, "current-video", &data->current_video, NULL);
g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
g_object_get(data->playbin, "current-text", &data->current_text, NULL);
g_print("\n");
g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print("Type any number and hit ENTER to select a different audio stream\n");
}
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error recived from element %s:%s\n", GST_OBJECT_NAME(msg->src), err->message);
g_printerr("debugging information:%s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_EOS:
g_print("end of stream reched\n");
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
if (new_state == GST_STATE_PLAYING) {
analyze_streams(data);
}
}
break;
}
}
return TRUE;
}
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line(source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = g_ascii_strtoull(str, NULL, 0);
if (index < 0 || index >= data->n_audio) {
g_printerr("Index out of bounds\n");
}
else {
/* If the input was a valid audio stream index, set the current audio stream */
g_print("Setting current audio stream to %d\n", index);
g_object_set(data->playbin, "current-audio", index, NULL);
}
}
g_free(str);
return TRUE;
}
代碼分解:
1、定義了一個結構體,包括playbin組件,音軌、視頻、字幕的數目等,以及一個main_loop。把這些信息作爲一個結構體來定義,是爲了方便訪問,比如作爲回調函數的參數來傳遞就很方便。
typedef struct _CustomData{
GstElement *playbin;
gint n_video;
gint n_audio;
gint n_text;
gint current_video;
gint current_audio;
gint current_text;
GMainLoop *main_loop;
}CustomData;
2、定義一個枚舉類型,包括playbin的一些標誌(flag),1<<0 ,就代表1的二進制數左移0位。沒太理解這裏定義枚舉類型的意義?
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), //we want video output
GST_PALY_FLAG_AUDIO = (1 << 1),
GST_PALY_FLAG_TEXT = (1 << 2)
}GstPlayFlags;
3、mian函數,首先初始化,然後創建playbin
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
gst_init(&argc, &argv);
data.playbin = gst_element_factory_make("playbin", "playbin");
if (!data.playbin) {
g_printerr("could not create playbin\n");
return -1;
}
4、設置playbin中的一些屬性:uri 、flags、connection-speed。
uri就是媒體地址啦,可以是網絡地址,也可以是本地地址。
flags :還沒有弄清楚?????
GST_PLAY_FLAG_VIDEO | 允許視頻渲染,如果這個標誌沒有設置,則沒有視頻輸出 |
GST_PLAY_FLAG_AUDIO | 允許音頻渲染,如果這個標誌沒有設置,則沒有音頻輸出 |
GST_PLAY_FLAG_TEXT | 允許字幕渲染,如果這個標誌沒有設置,則沒有字幕顯示 |
GST_PLAY_FLAG_VIS | 允許在沒有視頻流時進行可視化渲染,後面教程會講到 |
GST_PLAY_FLAG_DOWNLOAD | 參見《GStreamer基礎教程12——流》以及後續教程 |
GST_PLAY_FLAG_BUFFERING | 參見《GStreamer基礎教程12——流》以及後續教程 |
GST_PLAY_FLAG_DEINTERLACE | 如果視頻是隔行掃描的,那麼在顯示時改成逐行掃描 |
connect-speed:設置網絡的最大連接速度,文檔說的是 爲了防止服務器有多個版本的媒體文件,playbin會選擇合適的。這裏也沒有很清楚。
g_object_set(data.playbin, "uri", "file:///C:/Users/lenovo/Desktop/testVideo/[PGS][Tintin-004][DTS-AC3][5PGS].mkv", NULL);
g_object_get(data.playbin, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PALY_FLAG_AUDIO;
flags &= ~GST_PALY_FLAG_TEXT;
g_object_set(data.playbin, "flags", flags, NULL);
//connection-speed設置網絡的最大連接速度
g_object_set(data.playbin, "connection-speed", 56, NULL);
當然也可以使用一個g_object_set函數來設置所有的屬性:
g_object_set(data->playbin, "uri", "file:///c:/filename", "flags", flag, "connect_speed", 56, NULL)
5、監聽總線,設置回調函數handle_message。
bus = gst_element_get_bus(data.playbin);
gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);
6、看一下回調函數handle_message。
有三個參數:bus、msg、data。從main函數中傳回來就是gst_bus_add_watch中的bus,?,data。這個msg是從哪裏來的,一直沒搞清楚。
使用switch判斷msg,如果是GST_MESSAGE_ERROR或者GST_MESSAGE_EOS就調用g_main_loop_quit()結束loop。
如果是GST_MESSAGE_STATE_CHANGED,也就是狀態改變到playing,相當於初次打開媒體文件,並且開始播放。就調用analyze_streams()來分析流信息。
analyze_stream()接下來分析。g_main_loop_quit()接下來也會解釋。
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error recived from element %s:%s\n", GST_OBJECT_NAME(msg->src), err->message);
g_printerr("debugging information:%s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_EOS:
g_print("end of stream reched\n");
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
if (new_state == GST_STATE_PLAYING) {
analyze_streams(data);
}
}
break;
}
}
return TRUE;
}
7、這幾行連接了一個標準輸入(鍵盤)和一個回調函數。這裏使用的機制是GLib的,並非是基於GStreamer的.
最後是設置了回調函數handle_keyboard。
主要是這樣的:在播放過程中隨時等待用戶在命令行輸入內容。handle_keyboard的作用就是在獲取到輸入以後做出判斷並執行相應操作。
#ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
8、將狀態設置爲playing,並且設置了main_loop。使用g_main_loop_rund的意思就是一直循環着,直到遇到g_main_loop_quit()才結束。
ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("could not set sate to playing\n");
gst_object_unref(data.playbin);
return -1;
}
//爲了交互,不再手動輪詢gstreamer總線,我們創建main_loop,並且使用了g_main_loop_run函數讓它運行起來。
//,直到調用g_main_loop_quit()才被返回
data.main_loop = g_main_loop_new(NULL,false);
g_main_loop_run(data.main_loop);
9、最後就是釋放資源
g_main_loop_unref(data.main_loop);
g_io_channel_unref(io_stdin);
gst_object_unref(bus);
gst_element_set_state(data.playbin, GST_STATE_NULL);
g_object_unref(data.playbin);
10、analyze_stream()就是真正獲取流信息的地方。
主要是用g_object_get()函數來獲取流數目,當前流。
用g_signal_emit_by_name()來獲取流的tags,然後用gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)來獲取tags中的語言等信息。
static void analyze_streams(CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
g_object_get(data->playbin, "n-video", &data->n_video, NULL);
g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
g_object_get(data->playbin, "n-text", &data->n_text, NULL);
g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
//現在,對於每一個流來說,我們需要獲得它的元數據。元數據是存在一個GstTagList的結構體裏面,
//這個GstTagList通過g_signal_emit_by_name()可以把流裏面對應的tag都取出來。
//然後可以用gst_tag_list_get_*這一類函數來訪問這些tag,這個例子中用的就是gst_tag_list_get_string()方法。
//playbin定義了2個action信號來獲得元數據:get-video-tags,get-audio-tags和get-text-tags。
//在這個例子中我們關注的是GST_TAG_LANGUAGE_CODE這個tag和GST_TAG_ * _CODEC(audio,video和text)。
g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);
if (tags) {
g_print("video stream %d:\n", i);
gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
g_print("codec:%s\n", str ? str : "unknown");
g_free(str);
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
if (tags) {
g_print("audio stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print("codec:%s\n", str);
g_free(str);
}
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
g_print(" bitrate: %d\n", rate);
}
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
if (tags) {
g_print("subtitle stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
gst_tag_list_free(tags);
}
}
g_object_get(data->playbin, "current-video", &data->current_video, NULL);
g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
g_object_get(data->playbin, "current-text", &data->current_text, NULL);
g_print("\n");
g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print("Type any number and hit ENTER to select a different audio stream\n");
}