아래 코드는 updsink 요소를 사용하여 네트워크에서 MPEG4 비디오를 스트리밍하는 것입니다 (width : 640, height = 480, frame rate = 30/1의 v4l2src 요소 사용). = 352, height = 288, frame rate = 15/1)를 입력하십시오. 나는 "1"이 눌러 진 후 파이프 라인을 멈추고 새로운 값으로 뚜껑을 다시 설정 한 다음 다시 파이프 라인을 재생 상태로 만들지 만이 작업은 나에게 효과가 없었습니다.gstreamer 파이프 라인을 일시 중지하고 변경 한 후 재생 상태로 돌아갈 때 적용된 내용이 없습니다.
나는 파이프 라인을 일시 정지하기 전에 인쇄 된 캡 값을 얻을 변경 한 후에 나는 캡에 대한 변경)
(다시하십시오 수행하지만 파이프 라인이 상태를 재생 데려 때 적용된 것을 볼 수 있습니다 바로 아래의 코드를 실행하기 전에
가 새 터미널에 다음 명령을 입력하여이 스트림을 재생할 수 있습니다 ....... 도움 : 멀티 캐스트 그룹 udpsrc
GST-출시 = 127.0.0.1 포트 = 8999! mpeg4videoparse! ffdec_mpeg4! ffmpegcolorspace!
======================
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <glib-object.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default: break;
}
return TRUE;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *filter, *vrate, *encoder, *conv, *sink;
GstBus *bus;
GstCaps *filtercaps;
gint width, height, num, denom;
const GstStructure *str;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("v4l2src", "file-source");
vrate = gst_element_factory_make ("videorate", "video-rate");
filter = gst_element_factory_make ("capsfilter", "filter");
conv = gst_element_factory_make ("ffmpegcolorspace","converter");
encoder = gst_element_factory_make ("ffenc_mpeg4","mpeg-decoder");
sink = gst_element_factory_make ("udpsink","audio-output");
if (!pipeline || !source || !filter || !vrate || !conv || !encoder || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set (G_OBJECT (encoder), "bitrate" , 384 , NULL);
g_object_set (G_OBJECT (sink), "host" , argv[1] , NULL);
g_object_set (G_OBJECT (sink), "port" , 8999 , NULL);
g_object_set (G_OBJECT (sink), "async" , FALSE , NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
gst_bin_add_many (GST_BIN (pipeline), source, vrate, filter, conv, encoder, sink, NULL);
/* we link the elements together */
/* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
gst_element_link_many (source, vrate, filter, conv, encoder, sink, NULL);
/* Set the pipeline to "playing" state*/
g_print ("Now playing: \n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Print out the frame size and rate */
str = gst_caps_get_structure (filtercaps, 0);
if (!gst_structure_get_int (str, "width", &width) || !gst_structure_get_int (str, "height", &height) ||
!gst_structure_get_fraction (str, "framerate", &num, &denom))
g_print ("No width/height available\n");
g_print ("The video size of this set of capabilities is %dx%d and the frame rate is %d/%d\n", width, height, num, denom);
/* Pausing the streame */
int in;
if (scanf ("%d", &in) == 1){
g_print ("Now pausing: \n");
gst_element_set_state (pipeline, GST_STATE_PAUSED);
g_assert (GST_STATE (pipeline) == GST_STATE_PAUSED);
g_assert (GST_STATE (source) == GST_STATE_PAUSED);
g_assert (GST_STATE (filter) == GST_STATE_PAUSED);
g_assert (GST_STATE (vrate) == GST_STATE_PAUSED);
g_assert (GST_STATE (encoder) == GST_STATE_PAUSED);
g_assert (GST_STATE (conv) == GST_STATE_PAUSED);
g_assert (GST_STATE (sink) == GST_STATE_PAUSED);
/* apply the alterations to the caps now */
gst_caps_set_simple (filtercaps, "width", G_TYPE_INT, 352, "height", G_TYPE_INT, 288, "framerate", GST_TYPE_FRACTION, 15, 1, NULL);
/* Print out the frame size and rate after alteration*/
str = gst_caps_get_structure (filtercaps, 0);
if (!gst_structure_get_int (str, "width", &width) || !gst_structure_get_int (str, "height", &height) ||
!gst_structure_get_fraction (str, "framerate", &num, &denom))
g_print ("No width/height available\n");
g_print ("The video size of this set of capabilities is %dx%d and the frame rate is %d/%d\n", width, height, num, denom);
/* set back to playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_assert (GST_STATE (pipeline) == GST_STATE_PLAYING);
g_assert (GST_STATE (source) == GST_STATE_PLAYING);
g_assert (GST_STATE (filter) == GST_STATE_PLAYING);
g_assert (GST_STATE (vrate) == GST_STATE_PLAYING);
g_assert (GST_STATE (encoder) == GST_STATE_PLAYING);
g_assert (GST_STATE (conv) == GST_STATE_PLAYING);
g_assert (GST_STATE (sink) == GST_STATE_PLAYING);
}
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
건배 autovideosink.
이브 라가