Skip to content

Commit 33365d6

Browse files
committed
[GStreamer][LibWebRTC] Timestamp handling improvements in video decoder factory
https://bugs.webkit.org/show_bug.cgi?id=273757 Reviewed by Xabier Rodriguez-Calvar. The buffers injected into the parser pipeline are now timestamped by appsrc. The RTP timestamps are required by the LibWebRTC generic decoder in order to match parsed frames with input buffers, so they are attached on each buffers using a reference timestamp meta. * Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp: (WebCore::GStreamerWebRTCVideoDecoder::GStreamerWebRTCVideoDecoder): (WebCore::GStreamerWebRTCVideoDecoder::pullSample): * Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoFrameLibWebRTC.cpp: (WebCore::ensureDebugCategoryIsRegistered): (WebCore::convertGStreamerSampleToLibWebRTCVideoFrame): (WebCore::GStreamerVideoFrameLibWebRTC::ToI420): * Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoFrameLibWebRTC.h: Canonical link: https://commits.webkit.org/278447@main
1 parent 5412776 commit 33365d6

3 files changed

Lines changed: 46 additions & 63 deletions

File tree

Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp

Lines changed: 22 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
#include "config.h"
2222

23-
#if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)
23+
#if USE(LIBWEBRTC) && USE(GSTREAMER)
2424
#include "GStreamerVideoDecoderFactory.h"
2525

2626
#include "GStreamerQuirks.h"
@@ -45,22 +45,15 @@ GST_DEBUG_CATEGORY(webkit_webrtcdec_debug);
4545

4646
namespace WebCore {
4747

48-
typedef struct {
49-
uint64_t timestamp;
50-
int64_t renderTimeMs;
51-
} InputTimestamps;
52-
5348
class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
5449
public:
5550
GStreamerWebRTCVideoDecoder()
56-
: m_pictureId(0)
57-
, m_width(0)
51+
: m_width(0)
5852
, m_height(0)
5953
, m_requireParse(false)
6054
, m_needsKeyframe(true)
61-
, m_firstBufferPts(GST_CLOCK_TIME_NONE)
62-
, m_firstBufferDts(GST_CLOCK_TIME_NONE)
6355
{
56+
m_rtpTimestampCaps = adoptGRef(gst_caps_new_empty_simple("timestamp/x-rtp"));
6457
}
6558

6659
static void decodebinPadAddedCb(GstElement*, GstPad* srcpad, GstPad* sinkpad)
@@ -103,6 +96,7 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
10396
bool Configure(const webrtc::VideoDecoder::Settings& codecSettings) override
10497
{
10598
m_src = makeElement("appsrc");
99+
g_object_set(m_src, "is-live", TRUE, "do-timestamp", TRUE, "max-buffers", 2, "max-bytes", 0, nullptr);
106100

107101
GRefPtr<GstCaps> caps = nullptr;
108102
auto capsfilter = CreateFilter();
@@ -154,8 +148,7 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
154148

155149
m_sink = makeElement("appsink");
156150
gst_app_sink_set_emit_signals(GST_APP_SINK(m_sink), true);
157-
// This is an decoder, everything should happen as fast as possible and not
158-
// be synced on the clock.
151+
// This is a decoder, everything should happen as fast as possible and not be synced on the clock.
159152
g_object_set(m_sink, "sync", false, nullptr);
160153

161154
gst_bin_add_many(GST_BIN(pipeline()), m_src, decoder, capsfilter, m_sink, nullptr);
@@ -204,9 +197,7 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
204197
return WEBRTC_VIDEO_CODEC_OK;
205198
}
206199

207-
int32_t Decode(const webrtc::EncodedImage& inputImage,
208-
bool,
209-
int64_t renderTimeMs) override
200+
int32_t Decode(const webrtc::EncodedImage& inputImage, int64_t) override
210201
{
211202
if (m_needsKeyframe) {
212203
if (inputImage._frameType != webrtc::VideoFrameType::kVideoFrameKey) {
@@ -218,33 +209,15 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
218209

219210
if (!m_src) {
220211
GST_ERROR("No source set, can't decode.");
221-
222212
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
223213
}
224214

225-
// No renderTime provided, probably some issue with the WebRTC clock. Use a monotonically
226-
// incrementing counter instead.
227-
static int64_t s_forgedRenderTime { 0 };
228-
if (!renderTimeMs) {
229-
renderTimeMs = s_forgedRenderTime;
230-
s_forgedRenderTime += 30 * GST_MSECOND;
231-
}
215+
// FIXME: Use a GstBufferPool.
216+
GST_TRACE_OBJECT(pipeline(), "Pushing encoded image with RTP timestamp %u", inputImage.RtpTimestamp());
217+
auto buffer = adoptGRef(gstBufferNewWrappedFast(fastMemDup(inputImage.data(), inputImage.size()), inputImage.size()));
232218

233-
if (!GST_CLOCK_TIME_IS_VALID(m_firstBufferPts)) {
234-
GRefPtr<GstPad> srcpad = adoptGRef(gst_element_get_static_pad(m_src, "src"));
235-
m_firstBufferPts = (static_cast<guint64>(renderTimeMs)) * GST_MSECOND;
236-
m_firstBufferDts = (static_cast<guint64>(inputImage.RtpTimestamp())) * GST_MSECOND;
237-
}
219+
gst_buffer_add_reference_timestamp_meta(buffer.get(), m_rtpTimestampCaps.get(), inputImage.RtpTimestamp(), GST_CLOCK_TIME_NONE);
238220

239-
// FIXME- Use a GstBufferPool.
240-
auto buffer = adoptGRef(gstBufferNewWrappedFast(fastMemDup(inputImage.data(), inputImage.size()),
241-
inputImage.size()));
242-
GST_BUFFER_DTS(buffer.get()) = (static_cast<guint64>(inputImage.RtpTimestamp()) * GST_MSECOND) - m_firstBufferDts;
243-
GST_BUFFER_PTS(buffer.get()) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts;
244-
InputTimestamps timestamps = { inputImage.RtpTimestamp(), renderTimeMs };
245-
m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = timestamps;
246-
247-
GST_LOG_OBJECT(pipeline(), "%" G_GINT64_FORMAT " Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer.get());
248221
auto sample = adoptGRef(gst_sample_new(buffer.get(), GetCapsForFrame(inputImage), nullptr, nullptr));
249222
switch (gst_app_src_push_sample(GST_APP_SRC(m_src), sample.get())) {
250223
case GST_FLOW_OK:
@@ -266,21 +239,11 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
266239
return WEBRTC_VIDEO_CODEC_OK;
267240
}
268241
auto buffer = gst_sample_get_buffer(sample.get());
269-
270-
// Make sure that the frame.timestamp == previsouly input_frame._timeStamp
271-
// as it is required by the VideoDecoder baseclass.
272-
auto timestamps = m_dtsPtsMap[GST_BUFFER_PTS(buffer)];
273-
m_dtsPtsMap.erase(GST_BUFFER_PTS(buffer));
274-
275-
auto frame(convertGStreamerSampleToLibWebRTCVideoFrame(WTFMove(sample), webrtc::kVideoRotation_0,
276-
timestamps.timestamp, timestamps.renderTimeMs));
277-
278-
GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
279-
GST_LOG_OBJECT(pipeline(), "Output decoded frame! %d -> %" GST_PTR_FORMAT,
280-
frame->timestamp(), buffer);
281-
282-
m_imageReadyCb->Decoded(*frame.get(), absl::optional<int32_t>(), absl::optional<uint8_t>());
283-
242+
auto meta = gst_buffer_get_reference_timestamp_meta(buffer, m_rtpTimestampCaps.get());
243+
RELEASE_ASSERT(meta);
244+
auto frame = convertGStreamerSampleToLibWebRTCVideoFrame(WTFMove(sample), meta->timestamp);
245+
GST_TRACE_OBJECT(pipeline(), "Pulled video frame with RTP timestamp %u from %" GST_PTR_FORMAT, static_cast<uint32_t>(meta->timestamp), buffer);
246+
m_imageReadyCb->Decoded(frame);
284247
return WEBRTC_VIDEO_CODEC_OK;
285248
}
286249

@@ -326,7 +289,6 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
326289

327290
protected:
328291
GRefPtr<GstCaps> m_caps;
329-
gint m_pictureId;
330292
gint m_width;
331293
gint m_height;
332294
bool m_requireParse = false;
@@ -339,9 +301,7 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder {
339301

340302
webrtc::DecodedImageCallback* m_imageReadyCb;
341303

342-
StdMap<GstClockTime, InputTimestamps> m_dtsPtsMap;
343-
GstClockTime m_firstBufferPts;
344-
GstClockTime m_firstBufferDts;
304+
GRefPtr<GstCaps> m_rtpTimestampCaps;
345305
};
346306

347307
class H264Decoder : public GStreamerWebRTCVideoDecoder {
@@ -474,5 +434,9 @@ std::vector<webrtc::SdpVideoFormat> GStreamerVideoDecoderFactory::GetSupportedFo
474434

475435
return formats;
476436
}
477-
}
478-
#endif
437+
438+
#undef GST_CAT_DEFAULT
439+
440+
} // namespace WebCore
441+
442+
#endif // USE(LIBWEBRTC) && USE(GSTREAMER)

Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoFrameLibWebRTC.cpp

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,17 @@
2727

2828
namespace WebCore {
2929

30+
GST_DEBUG_CATEGORY(webkit_libwebrtc_video_frame_debug);
31+
#define GST_CAT_DEFAULT webkit_libwebrtc_video_frame_debug
32+
33+
static void ensureDebugCategoryIsRegistered()
34+
{
35+
static std::once_flag debugRegisteredFlag;
36+
std::call_once(debugRegisteredFlag, [] {
37+
GST_DEBUG_CATEGORY_INIT(webkit_libwebrtc_video_frame_debug, "webkitlibwebrtcvideoframe", 0, "WebKit LibWebRTC Video Frame");
38+
});
39+
}
40+
3041
GRefPtr<GstSample> convertLibWebRTCVideoFrameToGStreamerSample(const webrtc::VideoFrame& frame)
3142
{
3243
RELEASE_ASSERT(frame.video_frame_buffer()->type() != webrtc::VideoFrameBuffer::Type::kNative);
@@ -56,10 +67,15 @@ GRefPtr<GstSample> convertLibWebRTCVideoFrameToGStreamerSample(const webrtc::Vid
5667
return sample;
5768
}
5869

59-
std::unique_ptr<webrtc::VideoFrame> convertGStreamerSampleToLibWebRTCVideoFrame(GRefPtr<GstSample>&& sample, webrtc::VideoRotation rotation, int64_t timestamp, int64_t renderTimeMs)
70+
webrtc::VideoFrame convertGStreamerSampleToLibWebRTCVideoFrame(GRefPtr<GstSample>&& sample, uint32_t rtpTimestamp)
6071
{
61-
auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(WTFMove(sample)));
62-
return std::unique_ptr<webrtc::VideoFrame>(new webrtc::VideoFrame(WTFMove(frameBuffer), timestamp, renderTimeMs, rotation));
72+
webrtc::VideoFrame::Builder builder;
73+
auto buffer = gst_sample_get_buffer(sample.get());
74+
auto pts = GST_BUFFER_PTS(buffer);
75+
return builder.set_video_frame_buffer(GStreamerVideoFrameLibWebRTC::create(WTFMove(sample)))
76+
.set_timestamp_rtp(rtpTimestamp)
77+
.set_timestamp_us(pts)
78+
.build();
6379
}
6480

6581
rtc::scoped_refptr<webrtc::VideoFrameBuffer> GStreamerVideoFrameLibWebRTC::create(GRefPtr<GstSample>&& sample)
@@ -74,6 +90,7 @@ rtc::scoped_refptr<webrtc::VideoFrameBuffer> GStreamerVideoFrameLibWebRTC::creat
7490

7591
rtc::scoped_refptr<webrtc::I420BufferInterface> GStreamerVideoFrameLibWebRTC::ToI420()
7692
{
93+
ensureDebugCategoryIsRegistered();
7794
GstMappedFrame inFrame(m_sample, GST_MAP_READ);
7895
if (!inFrame) {
7996
GST_WARNING("Could not map input frame");
@@ -111,4 +128,6 @@ rtc::scoped_refptr<webrtc::I420BufferInterface> GStreamerVideoFrameLibWebRTC::To
111128

112129
}
113130

114-
#endif // USE(LIBWEBRTC)
131+
#undef GST_CAT_DEFAULT
132+
133+
#endif // USE(LIBWEBRTC) && USE(GSTREAMER)

Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoFrameLibWebRTC.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ namespace WebCore {
3333

3434
WARN_UNUSED_RETURN GRefPtr<GstSample> convertLibWebRTCVideoFrameToGStreamerSample(const webrtc::VideoFrame&);
3535

36-
std::unique_ptr<webrtc::VideoFrame> convertGStreamerSampleToLibWebRTCVideoFrame(GRefPtr<GstSample>&&, webrtc::VideoRotation, int64_t timestamp, int64_t renderTimeMs);
36+
webrtc::VideoFrame convertGStreamerSampleToLibWebRTCVideoFrame(GRefPtr<GstSample>&&, uint32_t rtpTimestamp);
3737

3838
class GStreamerVideoFrameLibWebRTC : public rtc::RefCountedObject<webrtc::VideoFrameBuffer> {
3939
public:

0 commit comments

Comments
 (0)