-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathcapture.cpp
133 lines (126 loc) · 4.58 KB
/
capture.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
#include <gst/gst.h>
#include <glib.h>
#include <string>
#include <iostream>
struct Parser
{
GstElement *video;
GstElement *audio;
};
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstCaps *caps;
Parser *parser = (Parser *) data;
// GstElement *parser = (GstElement *) data;
GstStructure *str;
std::string type;
/* We can now link this pad with the h264parse sink pad */
caps = gst_pad_get_current_caps (pad);
str = gst_caps_get_structure (caps, 0);
type = gst_structure_get_name (str);
g_print("%s\n", gst_caps_to_string(caps));
if(type.find("video") != std::string::npos)
{
sinkpad = gst_element_get_static_pad (parser->video, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
g_print ("linking demuxer/h264parse\n");
}
else
{
sinkpad = gst_element_get_static_pad (parser->audio, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
g_print ("linking demuxer/accparse\n");
}
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer,*video_encoder, *video_parser, *video_decoder, *video_sink, * audio_decoder, * audio_sink;
Parser parser;
Parser queue;
GstBus *bus;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("capture-player");
source = gst_element_factory_make ("wrappercamerabinsrc", "camera-source");
// demuxer = gst_element_factory_make ("qtdemux", "demuxer");
// queue.video = gst_element_factory_make ("queue", "video_queue");
// queue.audio = gst_element_factory_make ("queue", "audio_queue");
// parser.video = gst_element_factory_make ("h264parse", "video_parser");
// parser.audio = gst_element_factory_make ("aacparse", "audio_parser");
// video_decoder = gst_element_factory_make ("avdec_h264", "video_decoder");
// audio_decoder= gst_element_factory_make ("faad", "audio_decoder");
video_encoder = gst_element_factory_make("x264enc", "video_encoder");
video_parser = gst_element_factory_make("h264parse", "video_parser");
video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
video_sink = gst_element_factory_make ("autovideosink", "video_sinker");
// audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
// if (!pipeline || !source || !demuxer || !queue.video || !queue.audio || !parser.video || !parser.audio ||
// !video_decoder || !audio_decoder || !video_sink|| !audio_sink) {
// g_printerr ("One element could not be created. Exiting.\n");
// return -1;
// }
/* Set up the pipeline */
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
// gst_bin_add_many (GST_BIN (pipeline), source, demuxer, queue.audio,
// gst_bin_add_many (GST_BIN (pipeline), source, demuxer,queue.audio, queue.video, parser.video, video_decoder, video_sink,
// parser.audio, audio_decoder, audio_sink, NULL);
gst_bin_add_many (GST_BIN (pipeline), source, video_encoder, video_parser, video_decoder, video_sink, NULL);
/* we link the elements together */
// gst_element_link_many (queue.audio, audio_sink, NULL);
gst_element_link_many (source, video_encoder, video_parser, video_decoder, video_sink, NULL);
// g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added_queue), queue);
// g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), &queue);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}