GStreamer播放教程02——字幕管理
来源:互联网 发布:15年广东省的经济数据 编辑:程序博客网 时间:2024/05/01 19:07
目标
这篇教程和上一篇非常相似,但不是切换音频流,而是字幕了。这次我们会展示:
如何选择选择字幕流
如何引入外部的字幕
如何客制化字幕使用的字体
介绍
我们都知道一个文件可以有多个音视频流并且可以使用playerbin2的current-audio和current-video属性很方便的进行切换。切换字幕也是一样的方便。
就和音视频一样,playbin2会选择解码好的字幕,而且GStreamer的插件设计也很容易支持一种新的文件结构。
但字幕还是有自己的特殊之处,除了可以嵌入文件里面,playbin2还支持使用外界的URI来提供字幕。
本教程会打开一个包含5个字幕流的文件,并从外界在导入一个字幕(希腊语)。
多语言字幕的播放器
#include <gst/gst.h> /* Structure to contain all our information, so we can pass it around */typedef struct _CustomData { GstElement *playbin2; /* Our one and only element */ gint n_video; /* Number of embedded video streams */ gint n_audio; /* Number of embedded audio streams */ gint n_text; /* Number of embedded subtitle streams */ gint current_video; /* Currently playing video stream */ gint current_audio; /* Currently playing audio stream */ gint current_text; /* Currently playing subtitle stream */ GMainLoop *main_loop; /* GLib's Main Loop */} CustomData; /* playbin2 flags */typedef enum { GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */ GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */ GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */} GstPlayFlags; /* Forward definition for the message and keyboard processing functions */static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data); int main(int argc, char *argv[]) { CustomData data; GstBus *bus; GstStateChangeReturn ret; gint flags; GIOChannel *io_stdin; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ data.playbin2 = gst_element_factory_make ("playbin2", "playbin2"); if (!data.playbin2) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Set the URI to play */ g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.ogv", NULL); /* Set the subtitle URI to play and some font description */ g_object_set (data.playbin2, "suburi", "http://docs.gstreamer.com/media/sintel_trailer_gr.srt", NULL); g_object_set (data.playbin2, "subtitle-font-desc", "Sans, 18", NULL); /* Set flags to show Audio, Video and Subtitles */ g_object_get (data.playbin2, "flags", &flags, NULL); flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT; g_object_set (data.playbin2, "flags", flags, NULL); /* Add a bus watch, so we get notified when a message arrives */ bus = gst_element_get_bus (data.playbin2); gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data); /* Add a keyboard watch so we get notified of keystrokes */#ifdef _WIN32 io_stdin = g_io_channel_win32_new_fd (fileno (stdin));#else io_stdin = g_io_channel_unix_new (fileno (stdin));#endif g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data); /* Start playing */ ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.playbin2); return -1; } /* Create a GLib Main Loop and set it to run */ data.main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (data.main_loop); /* Free resources */ g_main_loop_unref (data.main_loop); g_io_channel_unref (io_stdin); gst_object_unref (bus); gst_element_set_state (data.playbin2, GST_STATE_NULL); gst_object_unref (data.playbin2); return 0;} /* Extract some metadata from the streams and print it on the screen */static void analyze_streams (CustomData *data) { gint i; GstTagList *tags; gchar *str; guint rate; /* Read some properties */ g_object_get (data->playbin2, "n-video", &data->n_video, NULL); g_object_get (data->playbin2, "n-audio", &data->n_audio, NULL); g_object_get (data->playbin2, "n-text", &data->n_text, NULL); g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n", data->n_video, data->n_audio, data->n_text); g_print ("\n"); for (i = 0; i < data->n_video; i++) { tags = NULL; /* Retrieve the stream's video tags */ g_signal_emit_by_name (data->playbin2, "get-video-tags", i, &tags); if (tags) { g_print ("video stream %d:\n", i); gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str); g_print (" codec: %s\n", str ? str : "unknown"); g_free (str); gst_tag_list_free (tags); } } g_print ("\n"); for (i = 0; i < data->n_audio; i++) { tags = NULL; /* Retrieve the stream's audio tags */ g_signal_emit_by_name (data->playbin2, "get-audio-tags", i, &tags); if (tags) { g_print ("audio stream %d:\n", i); if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) { g_print (" codec: %s\n", str); g_free (str); } if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { g_print (" language: %s\n", str); g_free (str); } if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) { g_print (" bitrate: %d\n", rate); } gst_tag_list_free (tags); } } g_print ("\n"); for (i = 0; i < data->n_text; i++) { tags = NULL; /* Retrieve the stream's subtitle tags */ g_print ("subtitle stream %d:\n", i); g_signal_emit_by_name (data->playbin2, "get-text-tags", i, &tags); if (tags) { if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { g_print (" language: %s\n", str); g_free (str); } gst_tag_list_free (tags); } else { g_print (" no tags found\n"); } } g_object_get (data->playbin2, "current-video", &data->current_video, NULL); g_object_get (data->playbin2, "current-audio", &data->current_audio, NULL); g_object_get (data->playbin2, "current-text", &data->current_text, NULL); g_print ("\n"); g_print ("Currently playing video stream %d, audio stream %d and subtitle stream %d\n", data->current_video, data->current_audio, data->current_text); g_print ("Type any number and hit ENTER to select a different subtitle stream\n");} /* Process messages from GStreamer */static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) { GError *err; gchar *debug_info; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); g_main_loop_quit (data->main_loop); break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); g_main_loop_quit (data->main_loop); break; case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) { if (new_state == GST_STATE_PLAYING) { /* Once we are in the playing state, analyze the streams */ analyze_streams (data); } } } break; default: break; } /* We want to keep receiving messages */ return TRUE;} /* Process keyboard input */static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) { gchar *str = NULL; if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) { int index = atoi (str); if (index < 0 || index >= data->n_text) { g_printerr ("Index out of bounds\n"); } else { /* If the input was a valid subtitle stream index, set the current subtitle stream */ g_print ("Setting current subtitle stream to %d\n", index); g_object_set (data->playbin2, "current-text", index, NULL); } } g_free (str); return TRUE;}
工作流程
这篇教程和上篇教程的例子只有很小的差别,让我们就看看这些不同的地方吧。
/* Set the subtitle URI to play and some font description */ g_object_set (data.playbin2, "suburi", "http://docs.gstreamer.com/media/sintel_trailer_gr.srt", NULL); g_object_set (data.playbin2, "subtitle-font-desc", "Sans, 18", NULL);
在设置媒体URI之后,我们设置了suburi属性,这就让playbin2获得了字幕流的地址。在这个例子里面,文件里本身包含了多个字幕流了,这用suburi来设置的字幕会加入这个列表一起列出,并且是默认选择的。
注意,在文件里面包含的字幕流是有元数据的(比如字幕的语言),然而外部的字幕流没有元数据。当运行这个例子时你会看到第一个字幕流没有语言的标签。
subtitle-font-desc属性允许设置字幕的文本字体。因为使用了Pango库来进行字体的渲染,所以具体可以查询相关文档。
简单概括一下,字符串字体的描述是根据[FAMILY-LIST][STYLE-OPTIONS][SIZE]来的,其中FAMILY-LIST是用逗号隔开的一系列可选字体,STYLE-OPTIONS是用空格来分开的一系列字体样式,SIZE则是字体大小。比如:
sans bold 12
serif, monospace bold italic condensed 16
normal 10
常见的字体包括:Normal,Sans,Serif和Monospace。
常用的样式包括:Normal,Oblique,Italic
常见的粗细包括:Ultra-Light,Light,Normal,Bold,Ultra-Bold,Heavy
常见的变化包括:Normal,Small_Caps
常见的拉伸包括:Ultra-Condensed,Extra-Condensed,Condensed,Semi-Condensed,Normal,Semi-Expanded,Extra-Expanded,Ultra-Expanded
/* Set flags to show Audio, Video and Subtitles */ g_object_get (data.playbin2, "flags", &flags, NULL); flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT; g_object_set (data.playbin2, "flags", flags, NULL);
我们设置flags属性,把音频,视频和字幕的开关都打开。
剩下的部分和上一篇里面的例子是一样的,除了键盘输入是改变current-text的属性而不是current-audio的属性。这里再强调一下,切换流不会马上起作用,因为缓冲了许多解码好的数据了。
- 【GStreamer开发】GStreamer播放教程02——字幕管理
- GStreamer播放教程02——字幕管理
- GStreamer播放教程02——字幕管理
- GStreamer播放教程02——字幕管理
- 【GStreamer开发】GStreamer播放教程04——既看式流
- 【GStreamer开发】GStreamer播放教程05——色彩平衡
- 【GStreamer开发】GStreamer播放教程06——可视化音频
- 【GStreamer开发】GStreamer播放教程09——数字音频传输
- GStreamer播放教程04——既看式流
- GStreamer播放教程04——既看式流
- GStreamer播放教程01——playbin2的使用
- GStreamer播放教程03——pipeline的快捷访问
- GStreamer播放教程05——色彩平衡
- GStreamer播放教程06——可视化音频
- GStreamer播放教程07——自定义playbin2的sink
- GStreamer播放教程08——视频解码的硬件加速
- GStreamer播放教程09——数字音频传输
- GStreamer播放教程09——数字音频传输
- 二叉查找树的实现
- linux内核系统调用和标准C库函数的关系分析
- [LeetCode] Plus One
- CF 9A. Die Roll
- Java数组与内存控制
- GStreamer播放教程02——字幕管理
- seam中的<s:validateEquality>标签
- java集合类总结
- 利用Java编写简单的WebService实例
- java文件复制
- RTB, DSP, AdExchange知识点汇总
- Java可以做什么及可以给我们带来什么?
- 日志测试Checklist
- Intel 平台编程总结----缓存优化之数据预取