Skip to content

Commit a2113c4

Browse files
committed
enhancement #9: adding audio to pipe (not working)
1 parent 46256a6 commit a2113c4

File tree

6 files changed

+114
-84
lines changed

6 files changed

+114
-84
lines changed

Plugin/src/GstAVPipeline.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ class GstAVPipeline : public GstBasePipeline
1919

2020
void CreatePipeline(const char* uri, const char* remote_peer_id);
2121
void CreateDevice();
22+
virtual void ReleaseTexture(void* texture) = 0;
2223

2324
protected:
2425
virtual void on_pad_added(GstElement* src, GstPad* new_pad, gpointer data) = 0;

Plugin/src/GstAVPipelineD3D11.cpp

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22
This source code is licensed under the license found in the
33
LICENSE file in the root directory of this source tree. */
44

5+
#include "GstAVPipelineD3D11.h"
56
#include "DebugLog.h"
6-
#include "GstAVPipeline.h"
77

88
#include <d3d11_1.h>
99
#include <d3d11sdklayers.h>
@@ -17,7 +17,7 @@ using namespace Microsoft::WRL;
1717
// Creates the underlying D3D11 texture using the provided unity device.
1818
// This texture can then be turned into a proper Unity texture on the
1919
// managed side using Texture2D.CreateExternalTexture()
20-
ID3D11Texture2D* GstAVPipeline::CreateTexture(unsigned int width, unsigned int height, bool left)
20+
ID3D11Texture2D* GstAVPipelineD3D11::CreateTexture(unsigned int width, unsigned int height, bool left)
2121
{
2222
auto device = _s_UnityInterfaces->Get<IUnityGraphicsD3D11>()->GetDevice();
2323
HRESULT hr = S_OK;
@@ -96,7 +96,7 @@ ID3D11Texture2D* GstAVPipeline::CreateTexture(unsigned int width, unsigned int h
9696
return texture.Get();
9797
}
9898

99-
GstFlowReturn GstAVPipeline::on_new_sample(GstAppSink* appsink, gpointer user_data)
99+
GstFlowReturn GstAVPipelineD3D11::on_new_sample(GstAppSink* appsink, gpointer user_data)
100100
{
101101
AppData* data = static_cast<AppData*>(user_data);
102102
GstSample* sample = gst_app_sink_pull_sample(appsink);
@@ -139,7 +139,7 @@ GstFlowReturn GstAVPipeline::on_new_sample(GstAppSink* appsink, gpointer user_da
139139
return GST_FLOW_OK;
140140
}
141141

142-
void GstAVPipeline::Draw(bool left)
142+
void GstAVPipelineD3D11::Draw(bool left)
143143
{
144144
AppData* data;
145145
if (left)
@@ -179,7 +179,7 @@ void GstAVPipeline::Draw(bool left)
179179
gst_sample_unref(sample);
180180
}
181181

182-
GstElement* GstAVPipeline::add_rtph264depay(GstElement* pipeline)
182+
GstElement* GstAVPipelineD3D11::add_rtph264depay(GstElement* pipeline)
183183
{
184184
GstElement* rtph264depay = gst_element_factory_make("rtph264depay", nullptr);
185185
if (!rtph264depay)
@@ -191,7 +191,7 @@ GstElement* GstAVPipeline::add_rtph264depay(GstElement* pipeline)
191191
return rtph264depay;
192192
}
193193

194-
GstElement* GstAVPipeline::add_h264parse(GstElement* pipeline)
194+
GstElement* GstAVPipelineD3D11::add_h264parse(GstElement* pipeline)
195195
{
196196
GstElement* h264parse = gst_element_factory_make("h264parse", nullptr);
197197
if (!h264parse)
@@ -203,7 +203,7 @@ GstElement* GstAVPipeline::add_h264parse(GstElement* pipeline)
203203
return h264parse;
204204
}
205205

206-
GstElement* GstAVPipeline::add_d3d11h264dec(GstElement* pipeline)
206+
GstElement* GstAVPipelineD3D11::add_d3d11h264dec(GstElement* pipeline)
207207
{
208208
GstElement* d3d11h264dec = gst_element_factory_make("d3d11h264dec", nullptr);
209209
if (!d3d11h264dec)
@@ -215,7 +215,7 @@ GstElement* GstAVPipeline::add_d3d11h264dec(GstElement* pipeline)
215215
return d3d11h264dec;
216216
}
217217

218-
GstElement* GstAVPipeline::add_d3d11convert(GstElement* pipeline)
218+
GstElement* GstAVPipelineD3D11::add_d3d11convert(GstElement* pipeline)
219219
{
220220
GstElement* d3d11convert = gst_element_factory_make("d3d11convert", nullptr);
221221
if (!d3d11convert)
@@ -227,7 +227,7 @@ GstElement* GstAVPipeline::add_d3d11convert(GstElement* pipeline)
227227
return d3d11convert;
228228
}
229229

230-
GstElement* GstAVPipeline::add_appsink(GstElement* pipeline)
230+
GstElement* GstAVPipelineD3D11::add_appsink(GstElement* pipeline)
231231
{
232232
GstElement* appsink = gst_element_factory_make("appsink", nullptr);
233233
if (!appsink)
@@ -244,7 +244,7 @@ GstElement* GstAVPipeline::add_appsink(GstElement* pipeline)
244244
return appsink;
245245
}
246246

247-
GstElement* GstAVPipeline::add_rtpopusdepay(GstElement* pipeline)
247+
GstElement* GstAVPipelineD3D11::add_rtpopusdepay(GstElement* pipeline)
248248
{
249249
GstElement* rtpopusdepay = gst_element_factory_make("rtpopusdepay", nullptr);
250250
if (!rtpopusdepay)
@@ -257,7 +257,7 @@ GstElement* GstAVPipeline::add_rtpopusdepay(GstElement* pipeline)
257257
return rtpopusdepay;
258258
}
259259

260-
GstElement* GstAVPipeline::add_opusdec(GstElement* pipeline)
260+
GstElement* GstAVPipelineD3D11::add_opusdec(GstElement* pipeline)
261261
{
262262
GstElement* opusdec = gst_element_factory_make("opusdec", nullptr);
263263
if (!opusdec)
@@ -270,7 +270,7 @@ GstElement* GstAVPipeline::add_opusdec(GstElement* pipeline)
270270
return opusdec;
271271
}
272272

273-
GstElement* GstAVPipeline::add_audioconvert(GstElement* pipeline)
273+
GstElement* GstAVPipelineD3D11::add_audioconvert(GstElement* pipeline)
274274
{
275275
GstElement* audioconvert = gst_element_factory_make("audioconvert", nullptr);
276276
if (!audioconvert)
@@ -283,7 +283,7 @@ GstElement* GstAVPipeline::add_audioconvert(GstElement* pipeline)
283283
return audioconvert;
284284
}
285285

286-
GstElement* GstAVPipeline::add_audioresample(GstElement* pipeline)
286+
GstElement* GstAVPipelineD3D11::add_audioresample(GstElement* pipeline)
287287
{
288288
GstElement* audioresample = gst_element_factory_make("audioresample", nullptr);
289289
if (!audioresample)
@@ -296,7 +296,7 @@ GstElement* GstAVPipeline::add_audioresample(GstElement* pipeline)
296296
return audioresample;
297297
}
298298

299-
GstElement* GstAVPipeline::add_wasapi2sink(GstElement* pipeline)
299+
GstElement* GstAVPipelineD3D11::add_wasapi2sink(GstElement* pipeline)
300300
{
301301
GstElement* wasapi2sink = gst_element_factory_make("wasapi2sink", nullptr);
302302
if (!wasapi2sink)
@@ -310,8 +310,8 @@ GstElement* GstAVPipeline::add_wasapi2sink(GstElement* pipeline)
310310
return wasapi2sink;
311311
}
312312

313-
GstElement* GstAVPipeline::add_webrtcsrc(GstElement* pipeline, const std::string& remote_peer_id, const std::string& uri,
314-
GstAVPipeline* self)
313+
GstElement* GstAVPipelineD3D11::add_webrtcsrc(GstElement* pipeline, const std::string& remote_peer_id, const std::string& uri,
314+
GstAVPipelineD3D11* self)
315315
{
316316
GstElement* webrtcsrc = gst_element_factory_make("webrtcsrc", nullptr);
317317
if (!webrtcsrc)
@@ -341,9 +341,9 @@ GstElement* GstAVPipeline::add_webrtcsrc(GstElement* pipeline, const std::string
341341
return webrtcsrc;
342342
}
343343

344-
void GstAVPipeline::on_pad_added(GstElement* src, GstPad* new_pad, gpointer data)
344+
void GstAVPipelineD3D11::on_pad_added(GstElement* src, GstPad* new_pad, gpointer data)
345345
{
346-
GstAVPipeline* avpipeline = static_cast<GstAVPipeline*>(data);
346+
GstAVPipelineD3D11* avpipeline = static_cast<GstAVPipelineD3D11*>(data);
347347

348348
gchar* pad_name = gst_pad_get_name(new_pad);
349349
Debug::Log("Adding pad ");
@@ -419,13 +419,13 @@ void GstAVPipeline::on_pad_added(GstElement* src, GstPad* new_pad, gpointer data
419419
g_free(pad_name);
420420
}
421421

422-
void GstAVPipeline::webrtcbin_ready(GstElement* self, gchararray peer_id, GstElement* webrtcbin, gpointer udata)
422+
void GstAVPipelineD3D11::webrtcbin_ready(GstElement* self, gchararray peer_id, GstElement* webrtcbin, gpointer udata)
423423
{
424424
Debug::Log("Configure webrtcbin", Level::Info);
425425
g_object_set(webrtcbin, "latency", 1, nullptr);
426426
}
427427

428-
void GstAVPipeline::ReleaseTexture(ID3D11Texture2D* texture)
428+
void GstAVPipelineD3D11::ReleaseTexture(ID3D11Texture2D* texture)
429429
{
430430
if (texture != nullptr)
431431
{
@@ -434,7 +434,7 @@ void GstAVPipeline::ReleaseTexture(ID3D11Texture2D* texture)
434434
}
435435
}
436436

437-
GstAVPipeline::GstAVPipeline(IUnityInterfaces* s_UnityInterfaces)
437+
GstAVPipelineD3D11::GstAVPipelineD3D11(IUnityInterfaces* s_UnityInterfaces)
438438
: GstBasePipeline("AVPipeline"), _s_UnityInterfaces(s_UnityInterfaces)
439439
{
440440
// preload plugins before Unity XR plugin
@@ -487,7 +487,7 @@ GstAVPipeline::GstAVPipeline(IUnityInterfaces* s_UnityInterfaces)
487487
_render_info = GstVideoInfo();
488488
}
489489

490-
GstAVPipeline::~GstAVPipeline()
490+
GstAVPipelineD3D11::~GstAVPipelineD3D11()
491491
{
492492
gst_clear_object(&_device);
493493
gst_object_unref(_device);
@@ -499,9 +499,9 @@ GstAVPipeline::~GstAVPipeline()
499499
preloaded_plugins.clear();
500500
}
501501

502-
void GstAVPipeline::CreatePipeline(const char* uri, const char* remote_peer_id)
502+
void GstAVPipelineD3D11::CreatePipeline(const char* uri, const char* remote_peer_id)
503503
{
504-
Debug::Log("GstAVPipeline create pipeline", Level::Info);
504+
Debug::Log("GstAVPipelineD3D11 create pipeline", Level::Info);
505505
Debug::Log(uri, Level::Info);
506506
Debug::Log(remote_peer_id, Level::Info);
507507

@@ -512,7 +512,7 @@ void GstAVPipeline::CreatePipeline(const char* uri, const char* remote_peer_id)
512512
CreateBusThread();
513513
}
514514

515-
void GstAVPipeline::CreateDevice()
515+
void GstAVPipelineD3D11::CreateDevice()
516516
{
517517
if (_device == nullptr)
518518
{
@@ -543,7 +543,7 @@ void GstAVPipeline::CreateDevice()
543543
}
544544
}
545545

546-
void GstAVPipeline::DestroyPipeline()
546+
void GstAVPipelineD3D11::DestroyPipeline()
547547
{
548548
GstBasePipeline::DestroyPipeline();
549549

@@ -564,9 +564,9 @@ void GstAVPipeline::DestroyPipeline()
564564
// pDebug = nullptr;
565565
}
566566

567-
GstBusSyncReply GstAVPipeline::busSyncHandler(GstBus* bus, GstMessage* msg, gpointer user_data)
567+
GstBusSyncReply GstAVPipelineD3D11::busSyncHandler(GstBus* bus, GstMessage* msg, gpointer user_data)
568568
{
569-
auto self = (GstAVPipeline*)user_data;
569+
auto self = (GstAVPipelineD3D11*)user_data;
570570

571571
switch (GST_MESSAGE_TYPE(msg))
572572
{

Plugin/src/GstAVPipelineOpenGLES.cpp

Lines changed: 76 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -73,35 +73,24 @@ void GstAVPipelineOpenGLES::SetNativeWindow(JNIEnv* env, jobject surface, bool l
7373
}
7474
}
7575

76-
/*void GstAVPipelineOpenGLES::set_custom_opusenc_settings(GstElement* opusenc)
76+
void GstAVPipelineOpenGLES::ReleaseTexture(void* texture)
7777
{
78-
g_object_set(opusenc, "frame-size", 10, nullptr);
79-
}*/
78+
_nativeWindow_left = nullptr;
79+
_nativeWindow_right = nullptr;
80+
}
8081

81-
/*GstElement* GstAVPipelineOpenGLES::make_audiosink()
82+
GstElement* GstAVPipelineOpenGLES::add_autoaudiosink(GstElement* pipeline)
8283
{
83-
GstElement* audiosink = gst_element_factory_make("autoaudiosink", nullptr);
84-
if (!audiosink)
84+
GstElement* autoaudiosink = gst_element_factory_make("autoaudiosink", nullptr);
85+
if (!autoaudiosink)
8586
{
8687
Debug::Log("Failed to create audiosink", Level::Error);
8788
return nullptr;
8889
}
89-
90-
return audiosink;
90+
gst_bin_add(GST_BIN(pipeline), autoaudiosink);
91+
return autoaudiosink;
9192
}
9293

93-
GstElement* GstAVPipelineOpenGLES::make_audiosrc()
94-
{
95-
GstElement* audiosrc = gst_element_factory_make("autoaudiosrc", nullptr);
96-
if (!audiosrc)
97-
{
98-
Debug::Log("Failed to create audiosrc", Level::Error);
99-
return nullptr;
100-
}
101-
102-
return audiosrc;
103-
}*/
104-
10594
GstElement* GstAVPipelineOpenGLES::add_videoconvert(GstElement* pipeline)
10695
{
10796
GstElement* videoconvert = gst_element_factory_make("videoconvert", nullptr);
@@ -114,6 +103,18 @@ GstElement* GstAVPipelineOpenGLES::add_videoconvert(GstElement* pipeline)
114103
return videoconvert;
115104
}
116105

106+
GstElement* GstAVPipelineOpenGLES::add_audioconvert(GstElement* pipeline)
107+
{
108+
GstElement* audioconvert = gst_element_factory_make("audioconvert", nullptr);
109+
if (!audioconvert)
110+
{
111+
Debug::Log("Failed to create audioconvert", Level::Error);
112+
return nullptr;
113+
}
114+
gst_bin_add(GST_BIN(pipeline), audioconvert);
115+
return audioconvert;
116+
}
117+
117118
GstElement* GstAVPipelineOpenGLES::add_glimagesink(GstElement* pipeline)
118119
{
119120
GstElement* glimagesink = gst_element_factory_make("glimagesink", nullptr);
@@ -130,40 +131,68 @@ void GstAVPipelineOpenGLES::on_pad_added(GstElement* src, GstPad* new_pad, gpoin
130131
{
131132
gchar* pad_name = gst_pad_get_name(new_pad);
132133
GstAVPipelineOpenGLES* avpipeline = static_cast<GstAVPipelineOpenGLES*>(data);
133-
// GstElement* rtph264depay = add_rtph264depay(avpipeline->pipeline_);
134-
// GstElement* h264parse = add_h264parse(avpipeline->pipeline_);
135-
GstElement* queue = add_queue(avpipeline->pipeline_);
136-
GstElement* videoconvert = add_videoconvert(avpipeline->pipeline_);
137-
GstElement* glimagesink = add_glimagesink(avpipeline->pipeline_);
138134

139-
if (g_str_has_prefix(pad_name, "video_0"))
135+
Debug::Log("Adding pad ");
136+
if (g_str_has_prefix(pad_name, "video"))
140137
{
141-
Debug::Log("Connecting left video pad " + std::string(pad_name));
142-
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(glimagesink), (guintptr)avpipeline->_nativeWindow_left);
138+
std::lock_guard<std::mutex> lk(avpipeline->_lock);
139+
Debug::Log("Adding video pad " + std::string(pad_name));
140+
GstElement* queue = add_queue(avpipeline->pipeline_);
141+
GstElement* videoconvert = add_videoconvert(avpipeline->pipeline_);
142+
GstElement* glimagesink = add_glimagesink(avpipeline->pipeline_);
143+
144+
if (g_str_has_prefix(pad_name, "video_0"))
145+
{
146+
Debug::Log("Connecting left video pad " + std::string(pad_name));
147+
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(glimagesink), (guintptr)avpipeline->_nativeWindow_left);
148+
}
149+
else
150+
{
151+
Debug::Log("Connecting right video pad " + std::string(pad_name));
152+
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(glimagesink), (guintptr)avpipeline->_nativeWindow_right);
153+
}
154+
155+
if (!gst_element_link_many(queue, videoconvert, glimagesink, nullptr))
156+
{
157+
Debug::Log("Elements could not be linked.");
158+
}
159+
160+
GstPad* sinkpad = gst_element_get_static_pad(queue, "sink");
161+
if (gst_pad_link(new_pad, sinkpad) != GST_PAD_LINK_OK)
162+
{
163+
Debug::Log("Could not link dynamic video pad to queue", Level::Error);
164+
}
165+
gst_object_unref(sinkpad);
166+
167+
gst_element_sync_state_with_parent(queue);
168+
gst_element_sync_state_with_parent(videoconvert);
169+
gst_element_sync_state_with_parent(glimagesink);
143170
}
144-
else
171+
else if (g_str_has_prefix(pad_name, "audio"))
145172
{
146-
Debug::Log("Connecting right video pad " + std::string(pad_name));
147-
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(glimagesink), (guintptr)avpipeline->_nativeWindow_right);
148-
}
173+
Debug::Log("Adding audio pad " + std::string(pad_name));
174+
GstElement* queue = add_queue(avpipeline->pipeline_);
175+
GstElement* audioconvert = add_audioconvert(avpipeline->pipeline_);
176+
GstElement* audioresample = add_audioresample(avpipeline->pipeline_);
177+
GstElement* autoaudiosink = add_autoaudiosink(avpipeline->pipeline_);
149178

150-
if (!gst_element_link_many(/*rtph264depay, h264parse,*/ queue, videoconvert, glimagesink, nullptr))
151-
{
152-
Debug::Log("Elements could not be linked.");
153-
}
179+
if (!gst_element_link_many(queue, audioconvert, audioresample, autoaudiosink, nullptr))
180+
{
181+
Debug::Log("Audio elements could not be linked.", Level::Error);
182+
}
154183

155-
GstPad* sinkpad = gst_element_get_static_pad(queue, "sink");
156-
if (gst_pad_link(new_pad, sinkpad) != GST_PAD_LINK_OK)
157-
{
158-
Debug::Log("Could not link dynamic video pad to queue", Level::Error);
159-
}
160-
gst_object_unref(sinkpad);
184+
GstPad* sinkpad = gst_element_get_static_pad(queue, "sink");
185+
if (gst_pad_link(new_pad, sinkpad) != GST_PAD_LINK_OK)
186+
{
187+
Debug::Log("Could not link dynamic audio pad to queue", Level::Error);
188+
}
189+
gst_object_unref(sinkpad);
161190

162-
// gst_element_sync_state_with_parent(rtph264depay);
163-
// gst_element_sync_state_with_parent(h264parse);
164-
gst_element_sync_state_with_parent(queue);
165-
gst_element_sync_state_with_parent(videoconvert);
166-
gst_element_sync_state_with_parent(glimagesink);
191+
gst_element_sync_state_with_parent(queue);
192+
gst_element_sync_state_with_parent(audioconvert);
193+
gst_element_sync_state_with_parent(audioresample);
194+
gst_element_sync_state_with_parent(autoaudiosink);
195+
}
167196
g_free(pad_name);
168197
}
169198

0 commit comments

Comments
 (0)