Skip to content

Commit 7256789

Browse files
philncadubentzen
authored andcommitted
[GStreamer][WebRTC] Simplified pipeline for incoming tracks
https://bugs.webkit.org/show_bug.cgi?id=276989 Reviewed by Xabier Rodriguez-Calvar. The incoming track processor now feeds a single sink, no tee or dynamic pipeline manipulations involved anymore. This brings back a timeout in webrtc/h265.html, but it will be fixed once we have track events dispatching fixed (bug #275685). * LayoutTests/platform/glib/TestExpectations: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::GStreamerMediaEndpoint::setConfiguration): (WebCore::GStreamerMediaEndpoint::connectIncomingTrack): (WebCore::GStreamerMediaEndpoint::connectPad): * Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp: (WebCore::GStreamerIncomingTrackProcessor::configure): (WebCore::GStreamerIncomingTrackProcessor::incomingTrackProcessor): (WebCore::GStreamerIncomingTrackProcessor::createParser): (WebCore::GStreamerIncomingTrackProcessor::stats): * Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h: * Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp: * Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h: * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp: * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.cpp: (WebCore::RealtimeIncomingSourceGStreamer::RealtimeIncomingSourceGStreamer): (WebCore::RealtimeIncomingSourceGStreamer::setBin): (WebCore::RealtimeIncomingSourceGStreamer::registerClient): (WebCore::RealtimeIncomingSourceGStreamer::unregisterClient): (WebCore::RealtimeIncomingSourceGStreamer::unregisterClientLocked): (WebCore::RealtimeIncomingSourceGStreamer::forEachClient): (WebCore::RealtimeIncomingSourceGStreamer::handleUpstreamEvent): (WebCore::RealtimeIncomingSourceGStreamer::handleUpstreamQuery): (WebCore::RealtimeIncomingSourceGStreamer::handleDownstreamEvent): (WebCore::RealtimeIncomingSourceGStreamer::setUpstreamBin): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::startProducingData): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::stopProducingData): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::configureAppSink): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::configureFakeVideoSink): Deleted. * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.h: (WebCore::RealtimeIncomingSourceGStreamer::bin const): (WebCore::RealtimeIncomingSourceGStreamer::bin): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::setIsUpstreamDecoding): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::dispatchSample): Deleted. * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp: (WebCore::RealtimeIncomingVideoSourceGStreamer::setBin): (WebCore::RealtimeIncomingVideoSourceGStreamer::dispatchSample): (WebCore::RealtimeIncomingVideoSourceGStreamer::setUpstreamBin): Deleted. * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.h: Canonical link: https://commits.webkit.org/281394@main
1 parent 6f2e678 commit 7256789

11 files changed

+175
-267
lines changed

LayoutTests/platform/glib/TestExpectations

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1474,6 +1474,8 @@ webrtc/video-av1.html [ Skip ]
14741474

14751475
# GStreamer's DTLS agent currently generates RSA certificates only. DTLS 1.2 is not supported yet (AFAIK).
14761476
webrtc/datachannel/dtls10.html [ Failure ]
1477+
# FIXME: Remove Timeout expectation once bug #275685 is fixed.
1478+
webkit.org/b/269285 webrtc/h265.html [ Failure Timeout ]
14771479

14781480
# Too slow with filtering implemented in WebKit. Should be done directly by GstWebRTC.
14791481
webrtc/datachannel/filter-ice-candidate.html [ Skip ]

Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -282,9 +282,8 @@ bool GStreamerMediaEndpoint::setConfiguration(MediaEndpointConfiguration& config
282282
// WIP: https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/302
283283
GST_FIXME("%zu custom certificates not propagated to webrtcbin", configuration.certificates.size());
284284

285-
gst_element_set_state(m_pipeline.get(), GST_STATE_READY);
286-
GST_DEBUG_OBJECT(m_pipeline.get(), "End-point ready");
287285
gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
286+
GST_DEBUG_OBJECT(m_pipeline.get(), "End-point ready");
288287
return true;
289288
}
290289

@@ -998,24 +997,17 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data)
998997
m_peerConnectionBackend.addPendingTrackEvent({ Ref(transceiver->receiver()), Ref(transceiver->receiver().track()), { }, Ref(*transceiver) });
999998

1000999
auto mediaStreamBin = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_pipeline.get()), data.mediaStreamBinName.ascii().data()));
1001-
auto tee = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(mediaStreamBin.get()), "tee"));
1002-
GstElement* bin = nullptr;
10031000
auto& track = transceiver->receiver().track();
10041001
auto& source = track.privateTrack().source();
10051002
if (source.isIncomingAudioSource()) {
10061003
auto& audioSource = static_cast<RealtimeIncomingAudioSourceGStreamer&>(source);
1007-
audioSource.setUpstreamBin(mediaStreamBin);
1008-
audioSource.setIsUpstreamDecoding(data.isUpstreamDecoding);
1009-
bin = audioSource.bin();
1004+
if (!audioSource.setBin(mediaStreamBin))
1005+
return;
10101006
} else if (source.isIncomingVideoSource()) {
10111007
auto& videoSource = static_cast<RealtimeIncomingVideoSourceGStreamer&>(source);
1012-
videoSource.setUpstreamBin(mediaStreamBin);
1013-
videoSource.setIsUpstreamDecoding(data.isUpstreamDecoding);
1014-
bin = videoSource.bin();
1008+
if (!videoSource.setBin(mediaStreamBin))
1009+
return;
10151010
}
1016-
ASSERT(bin);
1017-
1018-
gst_bin_add(GST_BIN_CAST(m_pipeline.get()), bin);
10191011

10201012
auto& mediaStream = mediaStreamFromRTCStream(data.mediaStreamId);
10211013
mediaStream.addTrackFromPlatform(track);
@@ -1033,6 +1025,7 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data)
10331025
auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".connected-"_s, data.mediaStreamId);
10341026
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
10351027
#endif
1028+
gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
10361029
}
10371030

10381031
void GStreamerMediaEndpoint::connectPad(GstPad* pad)
@@ -1048,7 +1041,7 @@ void GStreamerMediaEndpoint::connectPad(GstPad* pad)
10481041

10491042
auto sinkPad = adoptGRef(gst_element_get_static_pad(bin, "sink"));
10501043
gst_pad_link(pad, sinkPad.get());
1051-
gst_element_sync_state_with_parent(bin);
1044+
gst_element_set_state(bin, GST_STATE_PAUSED);
10521045

10531046
#ifndef GST_DISABLE_GST_DEBUG
10541047
auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".pending-"_s, GST_OBJECT_NAME(pad));

Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp

Lines changed: 38 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -41,17 +41,25 @@ void GStreamerIncomingTrackProcessor::configure(GStreamerMediaEndpoint* endPoint
4141
{
4242
m_endPoint = endPoint;
4343
m_pad = WTFMove(pad);
44-
m_data.mediaStreamBinName = makeString(GST_OBJECT_NAME(m_pad.get()));
45-
m_bin = gst_bin_new(m_data.mediaStreamBinName.ascii().data());
4644

4745
auto caps = adoptGRef(gst_pad_get_current_caps(m_pad.get()));
4846
if (!caps)
4947
caps = adoptGRef(gst_pad_query_caps(m_pad.get(), nullptr));
5048

51-
GST_DEBUG_OBJECT(m_bin.get(), "Processing track with caps %" GST_PTR_FORMAT, caps.get());
52-
m_data.type = doCapsHaveType(caps.get(), "audio") ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video;
49+
ASCIILiteral typeName;
50+
if (doCapsHaveType(caps.get(), "audio")) {
51+
typeName = "audio"_s;
52+
m_data.type = RealtimeMediaSource::Type::Audio;
53+
} else {
54+
typeName = "video"_s;
55+
m_data.type = RealtimeMediaSource::Type::Video;
56+
}
5357
m_data.caps = WTFMove(caps);
5458

59+
m_data.mediaStreamBinName = makeString("incoming-"_s, typeName, "-track-"_s, GST_OBJECT_NAME(m_pad.get()));
60+
m_bin = gst_bin_new(m_data.mediaStreamBinName.ascii().data());
61+
GST_DEBUG_OBJECT(m_bin.get(), "Processing track with caps %" GST_PTR_FORMAT, m_data.caps.get());
62+
5563
g_object_get(m_pad.get(), "transceiver", &m_data.transceiver.outPtr(), nullptr);
5664

5765
auto structure = gst_caps_get_structure(m_data.caps.get(), 0);
@@ -72,15 +80,30 @@ void GStreamerIncomingTrackProcessor::configure(GStreamerMediaEndpoint* endPoint
7280
if (!m_sdpMsIdAndTrackId.second.isEmpty())
7381
m_data.trackId = m_sdpMsIdAndTrackId.second;
7482

75-
m_tee = gst_element_factory_make("tee", "tee");
76-
g_object_set(m_tee.get(), "allow-not-linked", TRUE, nullptr);
83+
m_sink = gst_element_factory_make("fakesink", "sink");
84+
g_object_set(m_sink.get(), "sync", TRUE, "enable-last-sample", FALSE, nullptr);
85+
auto queue = gst_element_factory_make("queue", "queue");
7786

7887
auto trackProcessor = incomingTrackProcessor();
79-
m_data.isUpstreamDecoding = m_isDecoding;
8088

81-
gst_bin_add_many(GST_BIN_CAST(m_bin.get()), m_tee.get(), trackProcessor.get(), nullptr);
89+
gst_bin_add_many(GST_BIN_CAST(m_bin.get()), trackProcessor.get(), queue, m_sink.get(), nullptr);
90+
gst_element_link(queue, m_sink.get());
91+
8292
auto sinkPad = adoptGRef(gst_element_get_static_pad(trackProcessor.get(), "sink"));
8393
gst_element_add_pad(m_bin.get(), gst_ghost_pad_new("sink", sinkPad.get()));
94+
95+
if (m_data.type != RealtimeMediaSource::Type::Video || !m_isDecoding)
96+
return;
97+
98+
auto sinkSinkPad = adoptGRef(gst_element_get_static_pad(m_sink.get(), "sink"));
99+
gst_pad_add_probe(sinkSinkPad.get(), GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, reinterpret_cast<GstPadProbeCallback>(+[](GstPad*, GstPadProbeInfo* info, gpointer) -> GstPadProbeReturn {
100+
auto query = GST_PAD_PROBE_INFO_QUERY(info);
101+
if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION)
102+
return GST_PAD_PROBE_OK;
103+
104+
gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr);
105+
return GST_PAD_PROBE_REMOVE;
106+
}), nullptr, nullptr);
84107
}
85108

86109
String GStreamerIncomingTrackProcessor::mediaStreamIdFromPad()
@@ -166,12 +189,6 @@ GRefPtr<GstElement> GStreamerIncomingTrackProcessor::incomingTrackProcessor()
166189
GRefPtr<GstElement> decodebin = makeGStreamerElement("decodebin3", nullptr);
167190
m_isDecoding = true;
168191

169-
m_queue = gst_element_factory_make("queue", nullptr);
170-
m_fakeVideoSink = makeGStreamerElement("fakevideosink", nullptr);
171-
g_object_set(m_fakeVideoSink.get(), "enable-last-sample", FALSE, nullptr);
172-
gst_bin_add_many(GST_BIN_CAST(m_bin.get()), m_queue.get(), m_fakeVideoSink.get(), nullptr);
173-
gst_element_link(m_queue.get(), m_fakeVideoSink.get());
174-
175192
g_signal_connect(decodebin.get(), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin*, GstElement* element, gpointer) {
176193
auto elementClass = makeString(gst_element_get_metadata(element, GST_ELEMENT_METADATA_KLASS));
177194
auto classifiers = elementClass.split('/');
@@ -207,13 +224,9 @@ GRefPtr<GstElement> GStreamerIncomingTrackProcessor::incomingTrackProcessor()
207224
}), this);
208225

209226
g_signal_connect_swapped(decodebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) {
210-
auto sinkPad = adoptGRef(gst_element_get_static_pad(self->m_tee.get(), "sink"));
227+
auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue"));
228+
auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink"));
211229
gst_pad_link(pad, sinkPad.get());
212-
213-
gst_element_link(self->m_tee.get(), self->m_queue.get());
214-
gst_element_sync_state_with_parent(self->m_tee.get());
215-
gst_element_sync_state_with_parent(self->m_queue.get());
216-
gst_element_sync_state_with_parent(self->m_fakeVideoSink.get());
217230
self->trackReady();
218231
}), this);
219232
return decodebin;
@@ -232,9 +245,9 @@ GRefPtr<GstElement> GStreamerIncomingTrackProcessor::createParser()
232245
}), nullptr);
233246

234247
g_signal_connect_swapped(parsebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) {
235-
auto sinkPad = adoptGRef(gst_element_get_static_pad(self->m_tee.get(), "sink"));
248+
auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue"));
249+
auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink"));
236250
gst_pad_link(pad, sinkPad.get());
237-
gst_element_sync_state_with_parent(self->m_tee.get());
238251
self->trackReady();
239252
}), this);
240253
return parsebin;
@@ -265,7 +278,9 @@ const GstStructure* GStreamerIncomingTrackProcessor::stats()
265278
m_stats.reset(gst_structure_new_empty("incoming-video-stats"));
266279
uint64_t droppedVideoFrames = 0;
267280
GUniqueOutPtr<GstStructure> stats;
268-
g_object_get(m_fakeVideoSink.get(), "stats", &stats.outPtr(), nullptr);
281+
282+
g_object_get(m_sink.get(), "stats", &stats.outPtr(), nullptr);
283+
269284
if (!gst_structure_get_uint64(stats.get(), "dropped", &droppedVideoFrames))
270285
return m_stats.get();
271286

Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,16 +61,14 @@ class GStreamerIncomingTrackProcessor : public RefCounted<GStreamerIncomingTrack
6161
GStreamerMediaEndpoint* m_endPoint { nullptr };
6262
GRefPtr<GstPad> m_pad;
6363
GRefPtr<GstElement> m_bin;
64-
GRefPtr<GstElement> m_tee;
6564
WebRTCTrackData m_data;
6665

6766
std::pair<String, String> m_sdpMsIdAndTrackId;
6867

6968
bool m_isDecoding { false };
7069
FloatSize m_videoSize;
7170
uint64_t m_decodedVideoFrames { 0 };
72-
GRefPtr<GstElement> m_queue;
73-
GRefPtr<GstElement> m_fakeVideoSink;
71+
GRefPtr<GstElement> m_sink;
7472
GUniquePtr<GstStructure> m_stats;
7573
bool m_isReady { false };
7674
};

Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp

Lines changed: 17 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -124,14 +124,6 @@ class WebKitMediaStreamObserver : public MediaStreamPrivate::Observer {
124124

125125
static void webkitMediaStreamSrcEnsureStreamCollectionPosted(WebKitMediaStreamSrc*);
126126

127-
#if USE(GSTREAMER_WEBRTC)
128-
struct InternalSourcePadProbeData {
129-
RealtimeIncomingSourceGStreamer* incomingSource;
130-
int clientId;
131-
};
132-
WEBKIT_DEFINE_ASYNC_DATA_STRUCT(InternalSourcePadProbeData)
133-
#endif
134-
135127
class InternalSource final : public MediaStreamTrackPrivate::Observer,
136128
public RealtimeMediaSource::Observer,
137129
public RealtimeMediaSource::AudioSampleObserver,
@@ -200,7 +192,7 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer,
200192
{
201193
#if USE(GSTREAMER_WEBRTC)
202194
auto& trackSource = m_track.source();
203-
std::optional<int> clientId;
195+
int clientId;
204196
auto client = GRefPtr<GstElement>(m_src);
205197
if (trackSource.isIncomingAudioSource()) {
206198
auto& source = static_cast<RealtimeIncomingAudioSourceGStreamer&>(trackSource);
@@ -219,23 +211,15 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer,
219211
clientId = source.registerClient(WTFMove(client));
220212
}
221213

222-
if (!clientId) {
223-
GST_WARNING_OBJECT(m_src.get(), "Incoming track registration failed, track likely not ready yet.");
224-
return;
225-
}
226-
227-
m_webrtcSourceClientId = *clientId;
228-
229-
auto data = createInternalSourcePadProbeData();
230-
data->incomingSource = static_cast<RealtimeIncomingSourceGStreamer*>(&trackSource);
231-
data->clientId = *m_webrtcSourceClientId;
214+
m_webrtcSourceClientId = clientId;
232215

216+
auto incomingSource = static_cast<RealtimeIncomingSourceGStreamer*>(&trackSource);
233217
auto srcPad = adoptGRef(gst_element_get_static_pad(m_src.get(), "src"));
234218
gst_pad_add_probe(srcPad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | GST_PAD_PROBE_TYPE_QUERY_UPSTREAM), reinterpret_cast<GstPadProbeCallback>(+[](GstPad* pad, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
235-
auto data = static_cast<InternalSourcePadProbeData*>(userData);
236-
if (!data->incomingSource)
219+
auto weakSource = static_cast<WeakPtr<RealtimeIncomingSourceGStreamer>*>(userData);
220+
if (!weakSource)
237221
return GST_PAD_PROBE_REMOVE;
238-
222+
auto incomingSource = weakSource->get();
239223
auto src = adoptGRef(gst_pad_get_parent_element(pad));
240224
if (GST_IS_QUERY(info->data)) {
241225
switch (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info))) {
@@ -248,21 +232,23 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer,
248232
} else
249233
GST_DEBUG_OBJECT(src.get(), "Proxying event %" GST_PTR_FORMAT " to appsink peer", GST_PAD_PROBE_INFO_EVENT(info));
250234

251-
if (data->incomingSource->isIncomingAudioSource()) {
252-
auto& source = static_cast<RealtimeIncomingAudioSourceGStreamer&>(*data->incomingSource);
235+
if (incomingSource->isIncomingAudioSource()) {
236+
auto& source = static_cast<RealtimeIncomingAudioSourceGStreamer&>(*incomingSource);
253237
if (GST_IS_EVENT(info->data))
254-
source.handleUpstreamEvent(GRefPtr<GstEvent>(GST_PAD_PROBE_INFO_EVENT(info)), data->clientId);
255-
else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info), data->clientId))
238+
source.handleUpstreamEvent(GRefPtr<GstEvent>(GST_PAD_PROBE_INFO_EVENT(info)));
239+
else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info)))
256240
return GST_PAD_PROBE_HANDLED;
257-
} else if (data->incomingSource->isIncomingVideoSource()) {
258-
auto& source = static_cast<RealtimeIncomingVideoSourceGStreamer&>(*data->incomingSource);
241+
} else if (incomingSource->isIncomingVideoSource()) {
242+
auto& source = static_cast<RealtimeIncomingVideoSourceGStreamer&>(*incomingSource);
259243
if (GST_IS_EVENT(info->data))
260-
source.handleUpstreamEvent(GRefPtr<GstEvent>(GST_PAD_PROBE_INFO_EVENT(info)), data->clientId);
261-
else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info), data->clientId))
244+
source.handleUpstreamEvent(GRefPtr<GstEvent>(GST_PAD_PROBE_INFO_EVENT(info)));
245+
else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info)))
262246
return GST_PAD_PROBE_HANDLED;
263247
}
264248
return GST_PAD_PROBE_OK;
265-
}), data, reinterpret_cast<GDestroyNotify>(destroyInternalSourcePadProbeData));
249+
}), new WeakPtr { incomingSource }, reinterpret_cast<GDestroyNotify>(+[](gpointer data) {
250+
delete static_cast<WeakPtr<RealtimeIncomingSourceGStreamer>*>(data);
251+
}));
266252
#endif
267253
}
268254

Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ using WebRTCTrackData = struct _WebRTCTrackData {
2727
String trackId;
2828
String mediaStreamBinName;
2929
GRefPtr<GstWebRTCRTPTransceiver> transceiver;
30-
bool isUpstreamDecoding;
3130
RealtimeMediaSource::Type type;
3231
GRefPtr<GstCaps> caps;
3332
};

Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,6 @@ RealtimeIncomingAudioSourceGStreamer::RealtimeIncomingAudioSourceGStreamer(AtomS
3737
std::call_once(debugRegisteredFlag, [] {
3838
GST_DEBUG_CATEGORY_INIT(webkit_webrtc_incoming_audio_debug, "webkitwebrtcincomingaudio", 0, "WebKit WebRTC incoming audio");
3939
});
40-
static Atomic<uint64_t> sourceCounter = 0;
41-
gst_element_set_name(bin(), makeString("incoming-audio-source-"_s, sourceCounter.exchangeAdd(1)).ascii().data());
42-
GST_DEBUG_OBJECT(bin(), "New incoming audio source created with ID %s", persistentID().ascii().data());
4340
}
4441

4542
RealtimeIncomingAudioSourceGStreamer::~RealtimeIncomingAudioSourceGStreamer()

0 commit comments

Comments
 (0)