File webkit2gtk3-old-gstreamer.patch of Package webkit2gtk3.23800

diff -urp webkitgtk-2.36.0.orig/Source/cmake/GStreamerChecks.cmake webkitgtk-2.36.0.gstreamer/Source/cmake/GStreamerChecks.cmake
--- webkitgtk-2.36.0.orig/Source/cmake/GStreamerChecks.cmake	2022-02-23 02:59:06.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/cmake/GStreamerChecks.cmake	2022-03-22 13:18:04.341943346 -0500
@@ -28,7 +28,7 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO)
               list(APPEND GSTREAMER_COMPONENTS audio fft)
           endif ()
 
-          find_package(GStreamer 1.14.0 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
+          find_package(GStreamer 1.10.0 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
 
           if (ENABLE_WEB_AUDIO)
               if (NOT PC_GSTREAMER_AUDIO_FOUND OR NOT PC_GSTREAMER_FFT_FOUND)
@@ -52,7 +52,14 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO)
       endif ()
 endif ()
 
+if (ENABLE_MEDIA_SOURCE AND PC_GSTREAMER_VERSION VERSION_LESS "1.14")
+    message(FATAL_ERROR "GStreamer 1.14 is needed for ENABLE_MEDIA_SOURCE.")
+endif ()
+
 if (ENABLE_MEDIA_STREAM AND ENABLE_WEB_RTC)
+    if (PC_GSTREAMER_VERSION VERSION_LESS "1.12")
+        message(FATAL_ERROR "GStreamer 1.12 is needed for ENABLE_WEB_RTC.")
+    endif ()
     SET_AND_EXPOSE_TO_BUILD(USE_LIBWEBRTC TRUE)
 else ()
     SET_AND_EXPOSE_TO_BUILD(USE_LIBWEBRTC FALSE)
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp	2022-02-23 02:59:01.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp	2022-03-22 13:18:04.341943346 -0500
@@ -79,6 +79,7 @@ struct _WebKitWebAudioSrcPrivate {
 
     GRefPtr<GstBufferPool> pool;
 
+    bool enableGapBufferSupport;
     bool hasRenderedAudibleFrame { false };
 
     Lock dispatchToRenderThreadLock;
@@ -93,6 +94,11 @@ struct _WebKitWebAudioSrcPrivate {
         sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", nullptr);
 
         g_rec_mutex_init(&mutex);
+
+        // GAP buffer support is enabled only for GStreamer 1.12.5 because of a
+        // memory leak that was fixed in that version.
+        // https://bugzilla.gnome.org/show_bug.cgi?id=793067
+        enableGapBufferSupport = webkitGstCheckVersion(1, 12, 5);
     }
 
     ~_WebKitWebAudioSrcPrivate()
@@ -378,7 +384,7 @@ static void webKitWebAudioSrcRenderAndPu
         GST_BUFFER_TIMESTAMP(buffer.get()) = outputTimestamp.position.nanoseconds();
         GST_BUFFER_DURATION(buffer.get()) = duration;
 
-        if (priv->bus->channel(i)->isSilent())
+        if (priv->enableGapBufferSupport && priv->bus->channel(i)->isSilent())
             GST_BUFFER_FLAG_SET(buffer.get(), GST_BUFFER_FLAG_GAP);
 
         if (failed)
@@ -437,7 +443,9 @@ static GstStateChangeReturn webKitWebAud
     auto* src = WEBKIT_WEB_AUDIO_SRC(element);
     auto* priv = src->priv;
 
+#if GST_CHECK_VERSION(1, 14, 0)
     GST_DEBUG_OBJECT(element, "%s", gst_state_change_get_name(transition));
+#endif
 
     switch (transition) {
     case GST_STATE_CHANGE_NULL_TO_READY:
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/eme/GStreamerEMEUtilities.h webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/eme/GStreamerEMEUtilities.h
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/eme/GStreamerEMEUtilities.h	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/eme/GStreamerEMEUtilities.h	2022-03-22 13:18:04.341943346 -0500
@@ -78,7 +78,7 @@ public:
     const String& systemId() const { return m_systemId; }
     String payloadContainerType() const
     {
-#if GST_CHECK_VERSION(1, 16, 0)
+#if GST_CHECK_VERSION(1, 15, 0)
         if (m_systemId == GST_PROTECTION_UNSPECIFIED_SYSTEM_ID)
             return "webm"_s;
 #endif
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/eme/WebKitCommonEncryptionDecryptorGStreamer.cpp	2022-03-22 13:18:04.341943346 -0500
@@ -148,7 +148,7 @@ static GstCaps* transformCaps(GstBaseTra
             // GST_PROTECTION_UNSPECIFIED_SYSTEM_ID was added in the GStreamer
             // developement git master which will ship as version 1.16.0.
             gst_structure_set_name(outgoingStructure.get(),
-#if GST_CHECK_VERSION(1, 16, 0)
+#if GST_CHECK_VERSION(1, 15, 0)
                 !g_strcmp0(klass->protectionSystemId(self), GST_PROTECTION_UNSPECIFIED_SYSTEM_ID) ? "application/x-webm-enc" :
 #endif
                 "application/x-cenc");
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp	2022-03-22 13:18:04.341943346 -0500
@@ -160,7 +160,11 @@ std::optional<GRefPtr<GstContext>> reque
     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
         GstStructure* structure = gst_context_writable_structure(appContext);
+#if GST_CHECK_VERSION(1, 12, 0)
         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext, nullptr);
+#else
+        gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext, nullptr);
+#endif
         return adoptGRef(appContext);
     }
 
@@ -181,11 +185,15 @@ static bool setGLContext(GstElement* ele
 
 static GstStateChangeReturn webKitGLVideoSinkChangeState(GstElement* element, GstStateChange transition)
 {
+#if GST_CHECK_VERSION(1, 14, 0)
     GST_DEBUG_OBJECT(element, "%s", gst_state_change_get_name(transition));
+#endif
 
     switch (transition) {
     case GST_STATE_CHANGE_NULL_TO_READY:
+#if GST_CHECK_VERSION(1, 14, 0)
     case GST_STATE_CHANGE_READY_TO_READY:
+#endif
     case GST_STATE_CHANGE_READY_TO_PAUSED: {
         if (!setGLContext(element, GST_GL_DISPLAY_CONTEXT_TYPE))
             return GST_STATE_CHANGE_FAILURE;
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp	2022-03-22 13:18:04.341943346 -0500
@@ -57,8 +57,9 @@ GStreamerAudioMixer::GStreamerAudioMixer
 
 void GStreamerAudioMixer::ensureState(GstStateChange stateChange)
 {
+#if GST_CHECK_VERSION(1, 14, 0)
     GST_DEBUG_OBJECT(m_pipeline.get(), "Handling %s transition (%u mixer pads)", gst_state_change_get_name(stateChange), m_mixer->numsinkpads);
-
+#endif
     switch (stateChange) {
     case GST_STATE_CHANGE_READY_TO_PAUSED:
         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp	2022-03-21 14:19:36.303412337 -0500
@@ -39,6 +39,7 @@
 #include <wtf/Scope.h>
 #include <wtf/glib/GUniquePtr.h>
 #include <wtf/glib/RunLoopSourcePriority.h>
+#include <wtf/PrintStream.h>
 
 #if USE(GSTREAMER_FULL)
 #include <gst/gstinitstaticplugins.h>
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h	2022-03-16 08:48:02.000000000 -0500
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h	2022-03-21 15:12:02.672022277 -0500
@@ -83,14 +83,6 @@ inline GstClockTime toGstClockTime(const
     return static_cast<GstClockTime>(toGstUnsigned64Time(mediaTime));
 }
 
-inline MediaTime fromGstClockTime(GstClockTime time)
-{
-    if (!GST_CLOCK_TIME_IS_VALID(time))
-        return MediaTime::invalidTime();
-
-    return MediaTime(GST_TIME_AS_USECONDS(time), G_USEC_PER_SEC);
-}
-
 class GstMappedBuffer {
     WTF_MAKE_NONCOPYABLE(GstMappedBuffer);
 public:
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp	2022-03-22 13:18:04.345943367 -0500
@@ -51,7 +51,6 @@
 #include "InbandTextTrackPrivateGStreamer.h"
 #include "TextCombinerGStreamer.h"
 #include "TextSinkGStreamer.h"
-#include "VideoFrameMetadataGStreamer.h"
 #include "VideoTrackPrivateGStreamer.h"
 
 #if ENABLE(MEDIA_STREAM)
@@ -134,6 +133,14 @@ using namespace std;
 static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
 #endif
 
+static void convertToInternalProtocol(URL& url)
+{
+    if (webkitGstCheckVersion(1, 12, 0))
+        return;
+    if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
+        url.setProtocol(makeString("webkit+", url.protocol()));
+}
+
 static void initializeDebugCategory()
 {
     static std::once_flag onceFlag;
@@ -816,15 +823,20 @@ bool MediaPlayerPrivateGStreamer::hasSin
 
 std::optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin& origin) const
 {
-    GST_TRACE_OBJECT(pipeline(), "Checking %u origins", m_origins.size());
-    for (auto& responseOrigin : m_origins) {
-        if (!origin.isSameOriginDomain(*responseOrigin)) {
-            GST_DEBUG_OBJECT(pipeline(), "Found reachable response origin");
-            return true;
+    if (webkitGstCheckVersion(1, 12, 0)) {
+        GST_TRACE_OBJECT(pipeline(), "Checking %u origins", m_origins.size());
+        for (auto& responseOrigin : m_origins) {
+            if (!origin.isSameOriginDomain(*responseOrigin)) {
+                GST_DEBUG_OBJECT(pipeline(), "Found reachable response origin");
+                return true;
+            }
         }
     }
-    GST_DEBUG_OBJECT(pipeline(), "No valid response origin found");
-    return false;
+
+    // GStreamer < 1.12 has an incomplete uridownloader implementation so we
+    // can't use WebKitWebSrc for adaptive fragments downloading if this
+    // version is detected.
+    return m_hasTaintedOrigin;
 }
 
 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
@@ -922,6 +934,7 @@ void MediaPlayerPrivateGStreamer::setPla
         cleanURLString = cleanURLString.substring(0, url.pathEnd());
 
     m_url = URL(URL(), cleanURLString);
+    convertToInternalProtocol(m_url);
     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
 }
@@ -1863,6 +1876,7 @@ void MediaPlayerPrivateGStreamer::handle
             GST_DEBUG_OBJECT(pipeline(), "Processing HTTP headers: %" GST_PTR_FORMAT, structure);
             if (const char* uri = gst_structure_get_string(structure, "uri")) {
                 URL url(URL(), uri);
+                convertToInternalProtocol(url);
                 m_origins.add(SecurityOrigin::create(url));
 
                 if (url != m_url) {
@@ -1901,6 +1915,11 @@ void MediaPlayerPrivateGStreamer::handle
         } else if (gst_structure_has_name(structure, "webkit-network-statistics")) {
             if (gst_structure_get(structure, "read-position", G_TYPE_UINT64, &m_networkReadPosition, "size", G_TYPE_UINT64, &m_httpResponseTotalSize, nullptr))
                 GST_DEBUG_OBJECT(pipeline(), "Updated network read position %" G_GUINT64_FORMAT ", size: %" G_GUINT64_FORMAT, m_networkReadPosition, m_httpResponseTotalSize);
+        } else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
+            if (WEBKIT_IS_WEB_SRC(m_source.get()) && !webkitGstCheckVersion(1, 12, 0)) {
+                if (const char* uri = gst_structure_get_string(structure, "uri"))
+                    m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC_CAST(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
+            }
         } else if (gst_structure_has_name(structure, "GstCacheDownloadComplete")) {
             GST_INFO_OBJECT(pipeline(), "Stream is fully downloaded, stopping monitoring downloading progress.");
             m_fillTimer.stop();
@@ -2721,26 +2740,31 @@ void MediaPlayerPrivateGStreamer::create
     g_signal_connect(GST_BIN_CAST(m_pipeline.get()), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin* subBin, GstElement* element, MediaPlayerPrivateGStreamer* player) {
         GUniquePtr<char> binName(gst_element_get_name(GST_ELEMENT_CAST(subBin)));
         GUniquePtr<char> elementName(gst_element_get_name(element));
-        auto elementClass = makeString(gst_element_get_metadata(element, GST_ELEMENT_METADATA_KLASS));
-        auto classifiers = elementClass.split('/');
-
-        // Collect processing time metrics for video decoders and converters.
-        if ((classifiers.contains("Converter"_s) || classifiers.contains("Decoder"_s)) && classifiers.contains("Video"_s) && !classifiers.contains("Parser"))
-            webkitGstTraceProcessingTimeForElement(element);
-
-        if (classifiers.contains("Decoder"_s) && classifiers.contains("Video"_s)) {
-            player->configureVideoDecoder(element);
-            return;
-        }
 
         if (g_str_has_prefix(elementName.get(), "downloadbuffer")) {
             player->configureDownloadBuffer(element);
             return;
         }
 
-        // This will set the multiqueue size to the default value.
-        if (g_str_has_prefix(elementName.get(), "uridecodebin"))
+        if (g_str_has_prefix(elementName.get(), "uridecodebin")) {
+            // This will set the multiqueue size to the default value.
             g_object_set(element, "buffer-size", 2 * MB, nullptr);
+            return;
+        }
+
+        if (!g_str_has_prefix(binName.get(), "decodebin"))
+            return;
+
+        if (g_str_has_prefix(elementName.get(), "v4l2"))
+            player->m_videoDecoderPlatform = GstVideoDecoderPlatform::Video4Linux;
+        else if (g_str_has_prefix(elementName.get(), "imxvpudec"))
+            player->m_videoDecoderPlatform = GstVideoDecoderPlatform::ImxVPU;
+        else if (g_str_has_prefix(elementName.get(), "omx"))
+            player->m_videoDecoderPlatform = GstVideoDecoderPlatform::OpenMAX;
+
+#if USE(TEXTURE_MAPPER_GL)
+        player->updateTextureMapperFlags();
+#endif
     }), this);
 
     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
@@ -2784,27 +2808,6 @@ void MediaPlayerPrivateGStreamer::create
         }), this);
 }
 
-void MediaPlayerPrivateGStreamer::configureVideoDecoder(GstElement* decoder)
-{
-    GUniquePtr<char> name(gst_element_get_name(decoder));
-    if (g_str_has_prefix(name.get(), "v4l2"))
-        m_videoDecoderPlatform = GstVideoDecoderPlatform::Video4Linux;
-    else if (g_str_has_prefix(name.get(), "imxvpudec"))
-        m_videoDecoderPlatform = GstVideoDecoderPlatform::ImxVPU;
-    else if (g_str_has_prefix(name.get(), "omx"))
-        m_videoDecoderPlatform = GstVideoDecoderPlatform::OpenMAX;
-    else if (g_str_has_prefix(name.get(), "avdec")) {
-        // Set the decoder maximum number of threads to a low, fixed value, not depending on the
-        // platform. This also helps with processing metrics gathering. When using the default value
-        // the decoder introduces artificial processing latency reflecting the maximum number of threads.
-        g_object_set(decoder, "max-threads", 2, nullptr);
-    }
-
-#if USE(TEXTURE_MAPPER_GL)
-    updateTextureMapperFlags();
-#endif
-}
-
 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
 {
     if (WEBKIT_IS_WEB_SRC(m_source.get()))
@@ -2916,8 +2919,6 @@ void MediaPlayerPrivateGStreamer::pushTe
     if (!GST_IS_SAMPLE(m_sample.get()))
         return;
 
-    ++m_sampleCount;
-
     auto internalCompositingOperation = [this](TextureMapperPlatformLayerProxyGL& proxy, std::unique_ptr<GstVideoFrameHolder>&& frameHolder) {
         std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
         if (frameHolder->hasMappedTextures()) {
@@ -3222,7 +3223,7 @@ void MediaPlayerPrivateGStreamer::flushC
 {
     Locker sampleLocker { m_sampleMutex };
 
-    if (m_sample && gst_sample_get_buffer(m_sample.get())) {
+    if (m_sample) {
         // Allocate a new copy of the sample which has to be released. The copy is necessary so that
         // the video dimensions can still be fetched and also for canvas rendering. The release is
         // necessary because the sample might have been allocated by a hardware decoder and memory
@@ -3793,31 +3794,6 @@ WTFLogChannel& MediaPlayerPrivateGStream
 }
 #endif
 
-std::optional<VideoFrameMetadata> MediaPlayerPrivateGStreamer::videoFrameMetadata()
-{
-    if (m_sampleCount == m_lastVideoFrameMetadataSampleCount)
-        return { };
-
-    m_lastVideoFrameMetadataSampleCount = m_sampleCount;
-
-    Locker sampleLocker { m_sampleMutex };
-    if (!GST_IS_SAMPLE(m_sample.get()))
-        return { };
-
-    auto* buffer = gst_sample_get_buffer(m_sample.get());
-    auto metadata = webkitGstBufferGetVideoFrameMetadata(buffer);
-    auto size = naturalSize();
-    metadata.width = size.width();
-    metadata.height = size.height();
-    metadata.presentedFrames = m_sampleCount;
-
-    // FIXME: presentationTime and expectedDisplayTime might not always have the same value, we should try getting more precise values.
-    metadata.presentationTime = MonotonicTime::now().secondsSinceEpoch().seconds();
-    metadata.expectedDisplayTime = metadata.presentationTime;
-
-    return metadata;
-}
-
 }
 
 #endif // USE(GSTREAMER)
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h	2022-03-22 13:18:04.345943367 -0500
@@ -53,6 +53,16 @@ typedef struct _GstMpegtsSection GstMpeg
 #if USE(LIBEPOXY)
 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
 #include <epoxy/gl.h>
+
+// Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639>
+#if !GST_CHECK_VERSION(1, 14, 0)
+#include <gst/gl/gstglconfig.h>
+#if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX
+#define __gl2_h_
+#undef GST_GL_HAVE_GLSYNC
+#define GST_GL_HAVE_GLSYNC 1
+#endif
+#endif // !GST_CHECK_VERSION(1, 14, 0)
 #endif // USE(LIBEPOXY)
 
 #define GST_USE_UNSTABLE_API
@@ -466,8 +476,6 @@ private:
     void configureDownloadBuffer(GstElement*);
     static void downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer*);
 
-    void configureVideoDecoder(GstElement*);
-
     void setPlaybinURL(const URL& urlString);
 
     void updateTracks(const GRefPtr<GstStreamCollection>&);
@@ -563,9 +571,6 @@ private:
     DataMutex<TaskAtMediaTimeScheduler> m_TaskAtMediaTimeSchedulerDataMutex;
 
 private:
-    std::optional<VideoFrameMetadata> videoFrameMetadata() final;
-    uint64_t m_sampleCount { 0 };
-    uint64_t m_lastVideoFrameMetadataSampleCount { 0 };
 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
     GUniquePtr<struct wpe_video_plane_display_dmabuf_source> m_wpeVideoPlaneDisplayDmaBuf;
 #endif
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp	2022-03-16 08:48:02.000000000 -0500
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp	2022-03-22 14:08:37.241955857 -0500
@@ -24,7 +24,6 @@
 
 #include "GStreamerCommon.h"
 #include "PixelBuffer.h"
-#include "VideoFrameMetadataGStreamer.h"
 #include <JavaScriptCore/JSCInlines.h>
 #include <JavaScriptCore/TypedArrayInlines.h>
 #include <algorithm>
@@ -33,7 +32,7 @@
 
 namespace WebCore {
 
-MediaSampleGStreamer::MediaSampleGStreamer(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation videoRotation, bool videoMirrored, std::optional<VideoSampleMetadata>&& metadata)
+MediaSampleGStreamer::MediaSampleGStreamer(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation videoRotation, bool videoMirrored)
     : m_pts(MediaTime::zeroTime())
     , m_dts(MediaTime::zeroTime())
     , m_duration(MediaTime::zeroTime())
@@ -46,9 +45,6 @@ MediaSampleGStreamer::MediaSampleGStream
     GstBuffer* buffer = gst_sample_get_buffer(sample.get());
     RELEASE_ASSERT(buffer);
 
-    if (metadata)
-        buffer = webkitGstBufferSetVideoSampleMetadata(buffer, WTFMove(metadata));
-
     m_sample = sample;
     initializeFromBuffer();
 }
@@ -79,7 +75,7 @@ Ref<MediaSampleGStreamer> MediaSampleGSt
     return adoptRef(*gstreamerMediaSample);
 }
 
-Ref<MediaSampleGStreamer> MediaSampleGStreamer::createImageSample(PixelBuffer&& pixelBuffer, const IntSize& destinationSize, double frameRate, VideoRotation videoRotation, bool videoMirrored, std::optional<VideoSampleMetadata>&& metadata)
+Ref<MediaSampleGStreamer> MediaSampleGStreamer::createImageSample(PixelBuffer&& pixelBuffer, const IntSize& destinationSize, double frameRate, VideoRotation videoRotation, bool videoMirrored)
 {
     ensureGStreamerInitialized();
 
@@ -98,9 +94,6 @@ Ref<MediaSampleGStreamer> MediaSampleGSt
     auto height = size.height();
     gst_buffer_add_video_meta(buffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_BGRA, width, height);
 
-    if (metadata)
-        webkitGstBufferSetVideoSampleMetadata(buffer.get(), *metadata);
-
     int frameRateNumerator, frameRateDenominator;
     gst_util_double_to_fraction(frameRate, &frameRateNumerator, &frameRateDenominator);
 
@@ -135,28 +128,33 @@ Ref<MediaSampleGStreamer> MediaSampleGSt
 
 void MediaSampleGStreamer::initializeFromBuffer()
 {
+    auto createMediaTime =
+        [](GstClockTime time) -> MediaTime {
+            return MediaTime(GST_TIME_AS_USECONDS(time), G_USEC_PER_SEC);
+        };
+
     const GstClockTime minimumDuration = 1000; // 1 us
     auto* buffer = gst_sample_get_buffer(m_sample.get());
     RELEASE_ASSERT(buffer);
 
     if (GST_BUFFER_PTS_IS_VALID(buffer))
-        m_pts = fromGstClockTime(GST_BUFFER_PTS(buffer));
+        m_pts = createMediaTime(GST_BUFFER_PTS(buffer));
     if (GST_BUFFER_DTS_IS_VALID(buffer) || GST_BUFFER_PTS_IS_VALID(buffer))
-        m_dts = fromGstClockTime(GST_BUFFER_DTS_OR_PTS(buffer));
+        m_dts = createMediaTime(GST_BUFFER_DTS_OR_PTS(buffer));
     if (GST_BUFFER_DURATION_IS_VALID(buffer)) {
         // Sometimes (albeit rarely, so far seen only at the end of a track)
         // frames have very small durations, so small that may be under the
         // precision we are working with and be truncated to zero.
         // SourceBuffer algorithms are not expecting frames with zero-duration,
         // so let's use something very small instead in those fringe cases.
-        m_duration = fromGstClockTime(std::max(GST_BUFFER_DURATION(buffer), minimumDuration));
+        m_duration = createMediaTime(std::max(GST_BUFFER_DURATION(buffer), minimumDuration));
     } else {
         // Unfortunately, sometimes samples don't provide a duration. This can never happen in MP4 because of the way
         // the format is laid out, but it's pretty common in WebM.
         // The good part is that durations don't matter for playback, just for buffered ranges and coded frame deletion.
         // We want to pick something small enough to not cause unwanted frame deletion, but big enough to never be
         // mistaken for a rounding artifact.
-        m_duration = fromGstClockTime(16666667); // 1/60 seconds
+        m_duration = createMediaTime(16666667); // 1/60 seconds
     }
 
     m_size = gst_buffer_get_size(buffer);
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h	2022-03-22 13:54:58.421632900 -0500
@@ -26,7 +26,6 @@
 #include "FloatSize.h"
 #include "GStreamerCommon.h"
 #include "MediaSample.h"
-#include "VideoSampleMetadata.h"
 #include <wtf/text/AtomString.h>
 
 namespace WebCore {
@@ -35,9 +34,9 @@ class PixelBuffer;
 
 class MediaSampleGStreamer : public MediaSample {
 public:
-    static Ref<MediaSampleGStreamer> create(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoSampleMetadata>&& metadata = std::nullopt)
+    static Ref<MediaSampleGStreamer> create(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false)
     {
-        return adoptRef(*new MediaSampleGStreamer(WTFMove(sample), presentationSize, trackId, videoRotation, videoMirrored, WTFMove(metadata)));
+        return adoptRef(*new MediaSampleGStreamer(WTFMove(sample), presentationSize, trackId, videoRotation, videoMirrored));
     }
 
     static Ref<MediaSampleGStreamer> createWrappedSample(const GRefPtr<GstSample>& sample, VideoRotation videoRotation = VideoRotation::None)
@@ -46,7 +45,7 @@ public:
     }
 
     static Ref<MediaSampleGStreamer> createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomString& trackId);
-    static Ref<MediaSampleGStreamer> createImageSample(PixelBuffer&&, const IntSize& destinationSize = { }, double frameRate = 1, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoSampleMetadata>&& metadata = std::nullopt);
+    static Ref<MediaSampleGStreamer> createImageSample(PixelBuffer&&, const IntSize& destinationSize = { }, double frameRate = 1, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false);
 
     void extendToTheBeginning();
     MediaTime presentationTime() const override { return m_pts; }
@@ -70,7 +69,7 @@ public:
     bool videoMirrored() const override { return m_videoMirrored; }
 
 protected:
-    MediaSampleGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoSampleMetadata>&& = std::nullopt);
+    MediaSampleGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation = VideoRotation::None, bool videoMirrored = false);
     MediaSampleGStreamer(const GRefPtr<GstSample>&, VideoRotation = VideoRotation::None);
     virtual ~MediaSampleGStreamer() = default;
 
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/PlatformDisplayGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/PlatformDisplayGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/PlatformDisplayGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/PlatformDisplayGStreamer.cpp	2022-03-22 13:18:04.345943367 -0500
@@ -98,13 +98,21 @@ bool PlatformDisplay::tryEnsureGstGLCont
     if (!contextHandle)
         return false;
 
-    m_gstGLDisplay = adoptGRef(createGstGLDisplay(*this));
+    bool shouldAdoptRef = webkitGstCheckVersion(1, 14, 0);
+
+    if (shouldAdoptRef)
+        m_gstGLDisplay = adoptGRef(createGstGLDisplay(*this));
+    else
+        m_gstGLDisplay = createGstGLDisplay(*this);
     if (!m_gstGLDisplay)
         return false;
 
     GstGLPlatform glPlatform = sharedContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
 
-    m_gstGLContext = adoptGRef(gst_gl_context_new_wrapped(m_gstGLDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
+    if (shouldAdoptRef)
+        m_gstGLContext = adoptGRef(gst_gl_context_new_wrapped(m_gstGLDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
+    else
+        m_gstGLContext = gst_gl_context_new_wrapped(m_gstGLDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
 
     // Activate and fill the GStreamer wrapped context with the Webkit's shared one.
     auto* previousActiveContext = GLContext::current();
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/WebKitAudioSinkGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/WebKitAudioSinkGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/WebKitAudioSinkGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/WebKitAudioSinkGStreamer.cpp	2022-03-22 13:18:04.345943367 -0500
@@ -256,7 +256,9 @@ static GstStateChangeReturn webKitAudioS
     auto* sink = WEBKIT_AUDIO_SINK(element);
     auto* priv = sink->priv;
 
+#if GST_CHECK_VERSION(1, 14, 0)
     GST_DEBUG_OBJECT(sink, "Handling %s transition", gst_state_change_get_name(stateChange));
+#endif
 
     auto& mixer = GStreamerAudioMixer::singleton();
     if (priv->interAudioSink && stateChange == GST_STATE_CHANGE_NULL_TO_READY)
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp	2022-03-22 13:18:04.345943367 -0500
@@ -467,8 +467,12 @@ static GstFlowReturn webKitWebSrcCreate(
     // 1) webKitWebSrcSetMediaPlayer() is called by MediaPlayerPrivateGStreamer by means of hooking playbin's
     //    "source-setup" event. This doesn't work for additional WebKitWebSrc elements created by adaptivedemux.
     //
-    // 2) A GstContext query made here.
-    if (!members->player) {
+    // 2) A GstContext query made here. Because of a bug, this only works in GStreamer >= 1.12.
+    //
+    // As a compatibility workaround, the http: URI protocol is only registered for gst>=1.12; otherwise using
+    // webkit+http:, which is used by MediaPlayerPrivateGStreamer but not by adaptivedemux's additional source
+    // elements, therefore using souphttpsrc instead and not routing traffic through the NetworkProcess.
+    if (webkitGstCheckVersion(1, 12, 0) && !members->player) {
         members.runUnlocked([src, baseSrc]() {
             GRefPtr<GstQuery> query = adoptGRef(gst_query_new_context(WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME));
             if (gst_pad_peer_query(GST_BASE_SRC_PAD(baseSrc), query.get())) {
@@ -868,9 +872,15 @@ static GstURIType webKitWebSrcUriGetType
 const gchar* const* webKitWebSrcGetProtocols(GType)
 {
     static const char* protocols[4];
-    protocols[0] = "http";
-    protocols[1] = "https";
-    protocols[2] = "blob";
+    if (webkitGstCheckVersion(1, 12, 0)) {
+        protocols[0] = "http";
+        protocols[1] = "https";
+        protocols[2] = "blob";
+    } else {
+        protocols[0] = "webkit+http";
+        protocols[1] = "webkit+https";
+        protocols[2] = "webkit+blob";
+    }
     protocols[3] = nullptr;
     return protocols;
 }
@@ -878,6 +888,10 @@ const gchar* const* webKitWebSrcGetProto
 static URL convertPlaybinURI(const char* uriString)
 {
     URL url(URL(), uriString);
+    if (!webkitGstCheckVersion(1, 12, 0)) {
+        ASSERT(url.protocol().substring(0, 7) == "webkit+");
+        url.setProtocol(url.protocol().substring(7).toString());
+    }
     return url;
 }
 
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/GStreamer.cmake webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/GStreamer.cmake
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/GStreamer.cmake	2022-02-23 02:59:01.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/GStreamer.cmake	2022-03-22 13:18:04.345943367 -0500
@@ -24,7 +24,6 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO)
         platform/graphics/gstreamer/TextCombinerPadGStreamer.cpp
         platform/graphics/gstreamer/TextSinkGStreamer.cpp
         platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp
-        platform/graphics/gstreamer/VideoFrameMetadataGStreamer.cpp
         platform/graphics/gstreamer/VideoSinkGStreamer.cpp
         platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp
         platform/graphics/gstreamer/WebKitAudioSinkGStreamer.cpp
@@ -146,13 +145,17 @@ if (ENABLE_VIDEO)
     endif ()
 
     if (ENABLE_MEDIA_STREAM OR ENABLE_WEB_RTC)
-        list(APPEND WebCore_SYSTEM_INCLUDE_DIRECTORIES
-            ${GSTREAMER_CODECPARSERS_INCLUDE_DIRS}
-        )
-        if (NOT USE_GSTREAMER_FULL)
-            list(APPEND WebCore_LIBRARIES
-                ${GSTREAMER_CODECPARSERS_LIBRARIES}
+        if (PC_GSTREAMER_VERSION VERSION_LESS "1.10")
+            message(FATAL_ERROR "GStreamer 1.10 is needed for ENABLE_MEDIA_STREAM or ENABLE_WEB_RTC")
+        else ()
+            list(APPEND WebCore_SYSTEM_INCLUDE_DIRECTORIES
+                ${GSTREAMER_CODECPARSERS_INCLUDE_DIRS}
             )
+            if (NOT USE_GSTREAMER_FULL)
+                list(APPEND WebCore_LIBRARIES
+                    ${GSTREAMER_CODECPARSERS_LIBRARIES}
+                )
+            endif ()
         endif ()
     endif ()
 endif ()
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp	2022-03-21 15:12:02.676022299 -0500
@@ -23,14 +23,11 @@
 #include "config.h"
 
 #if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(GSTREAMER)
-
 #include "GStreamerCapturer.h"
-#include "VideoFrameMetadataGStreamer.h"
 
 #include <gst/app/gstappsink.h>
 #include <gst/app/gstappsrc.h>
 #include <mutex>
-#include <wtf/MonotonicTime.h>
 
 GST_DEBUG_CATEGORY(webkit_capturer_debug);
 #define GST_CAT_DEFAULT webkit_capturer_debug
@@ -102,20 +99,9 @@ GstElement* GStreamerCapturer::createSou
         if (GST_IS_APP_SRC(m_src.get()))
             g_object_set(m_src.get(), "is-live", true, "format", GST_FORMAT_TIME, nullptr);
 
-        auto srcPad = adoptGRef(gst_element_get_static_pad(m_src.get(), "src"));
-        if (m_deviceType == CaptureDevice::DeviceType::Camera) {
-            gst_pad_add_probe(srcPad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_PUSH | GST_PAD_PROBE_TYPE_BUFFER), [](GstPad*, GstPadProbeInfo* info, gpointer) -> GstPadProbeReturn {
-                VideoSampleMetadata metadata;
-                metadata.captureTime = MonotonicTime::now().secondsSinceEpoch();
-                auto* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
-                auto* modifiedBuffer = webkitGstBufferSetVideoSampleMetadata(buffer, metadata);
-                gst_buffer_replace(&buffer, modifiedBuffer);
-                return GST_PAD_PROBE_OK;
-            }, nullptr, nullptr);
-        }
-
         if (m_deviceType == CaptureDevice::DeviceType::Screen) {
-            gst_pad_add_probe(srcPad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, [](GstPad*, GstPadProbeInfo* info, void* userData) -> GstPadProbeReturn {
+            auto pad = adoptGRef(gst_element_get_static_pad(m_src.get(), "src"));
+            gst_pad_add_probe(pad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, [](GstPad*, GstPadProbeInfo* info, void* userData) -> GstPadProbeReturn {
                 auto* event = gst_pad_probe_info_get_event(info);
                 if (GST_EVENT_TYPE(event) != GST_EVENT_CAPS)
                     return GST_PAD_PROBE_OK;
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp	2022-03-21 15:12:02.676022299 -0500
@@ -31,7 +31,6 @@
 #include "GStreamerCommon.h"
 #include "MediaSampleGStreamer.h"
 #include "MediaStreamPrivate.h"
-#include "VideoFrameMetadataGStreamer.h"
 #include "VideoTrackPrivateMediaStream.h"
 
 #include <gst/app/gstappsrc.h>
@@ -62,10 +61,8 @@ GRefPtr<GstTagList> mediaStreamTrackPriv
         gst_tag_list_add(tagList.get(), GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND, static_cast<int>(VideoTrackPrivate::Kind::Main), nullptr);
 
         auto& settings = track->settings();
-        if (settings.width())
-            gst_tag_list_add(tagList.get(), GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_WIDTH, settings.width(), nullptr);
-        if (settings.height())
-            gst_tag_list_add(tagList.get(), GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_HEIGHT, settings.height(), nullptr);
+        gst_tag_list_add(tagList.get(), GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_WIDTH, settings.width(),
+            WEBKIT_MEDIA_TRACK_TAG_HEIGHT, settings.height(), nullptr);
     }
 
     GST_DEBUG("Track tags: %" GST_PTR_FORMAT, tagList.get());
@@ -256,19 +253,19 @@ public:
         if (!m_parent)
             return;
 
-        auto sampleSize = sample.presentationSize();
-        IntSize captureSize(sampleSize.width(), sampleSize.height());
+        auto* gstSample = static_cast<MediaSampleGStreamer*>(&sample)->platformSample().sample.gstSample;
+        auto* caps = gst_sample_get_caps(gstSample);
+        GstVideoInfo info;
+        gst_video_info_from_caps(&info, caps);
 
-        auto settings = m_track.settings();
-        m_configuredSize.setWidth(settings.width());
-        m_configuredSize.setHeight(settings.height());
-
-        if (!m_configuredSize.width())
-            m_configuredSize.setWidth(captureSize.width());
-        if (!m_configuredSize.height())
-            m_configuredSize.setHeight(captureSize.height());
+        int width = GST_VIDEO_INFO_WIDTH(&info);
+        int height = GST_VIDEO_INFO_HEIGHT(&info);
+        if (m_lastKnownSize != IntSize(width, height)) {
+            m_lastKnownSize.setWidth(width);
+            m_lastKnownSize.setHeight(height);
+            updateBlackFrame(caps);
+        }
 
-        auto* mediaSample = static_cast<MediaSampleGStreamer*>(&sample);
         auto videoRotation = sample.videoRotation();
         bool videoMirrored = sample.videoMirrored();
         if (m_videoRotation != videoRotation || m_videoMirrored != videoMirrored) {
@@ -281,12 +278,6 @@ public:
             gst_pad_push_event(pad.get(), gst_event_new_tag(gst_tag_list_new(GST_TAG_IMAGE_ORIENTATION, orientation.utf8().data(), nullptr)));
         }
 
-        auto* gstSample = mediaSample->platformSample().sample.gstSample;
-        if (!m_configuredSize.isEmpty() && m_lastKnownSize != m_configuredSize) {
-            m_lastKnownSize = m_configuredSize;
-            updateBlackFrame(gst_sample_get_caps(gstSample));
-        }
-
         if (m_track.enabled()) {
             GST_TRACE_OBJECT(m_src.get(), "Pushing video frame from enabled track");
             pushSample(gstSample);
@@ -331,12 +322,6 @@ private:
     void pushBlackFrame()
     {
         GST_TRACE_OBJECT(m_src.get(), "Pushing black video frame");
-        VideoSampleMetadata metadata;
-        metadata.captureTime = MonotonicTime::now().secondsSinceEpoch();
-        auto* buffer = webkitGstBufferSetVideoSampleMetadata(gst_sample_get_buffer(m_blackFrame.get()), metadata);
-        // TODO: Use gst_sample_set_buffer() after bumping GStreamer dependency to 1.16.
-        auto* caps = gst_sample_get_caps(m_blackFrame.get());
-        m_blackFrame = adoptGRef(gst_sample_new(buffer, caps, nullptr, nullptr));
         pushSample(m_blackFrame.get());
     }
 
@@ -350,7 +335,6 @@ private:
     bool m_isObserving { false };
     RefPtr<AudioTrackPrivateMediaStream> m_audioTrack;
     RefPtr<VideoTrackPrivateMediaStream> m_videoTrack;
-    IntSize m_configuredSize;
     IntSize m_lastKnownSize;
     GRefPtr<GstSample> m_blackFrame;
     MediaSample::VideoRotation m_videoRotation { MediaSample::VideoRotation::None };
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp	2022-03-21 15:12:02.676022299 -0500
@@ -162,9 +162,7 @@ void MockRealtimeVideoSourceGStreamer::u
     if (!pixelBuffer)
         return;
 
-    std::optional<VideoSampleMetadata> metadata;
-    metadata->captureTime = MonotonicTime::now().secondsSinceEpoch();
-    auto sample = MediaSampleGStreamer::createImageSample(WTFMove(*pixelBuffer), size(), frameRate(), sampleRotation(), false, WTFMove(metadata));
+    auto sample = MediaSampleGStreamer::createImageSample(WTFMove(*pixelBuffer), size(), frameRate(), sampleRotation());
     sample->offsetTimestampsBy(MediaTime::createWithDouble((elapsedTime() + 100_ms).seconds()));
     dispatchMediaSampleToObservers(sample.get(), { });
 }
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp	2022-03-21 16:13:53.111610814 -0500
@@ -63,7 +63,7 @@ void RealtimeIncomingVideoSourceLibWebRT
         videoSampleAvailable(MediaSampleGStreamer::createWrappedSample(framebuffer->getSample(), static_cast<MediaSample::VideoRotation>(frame.rotation())), { });
     } else {
         auto gstSample = convertLibWebRTCVideoFrameToGStreamerSample(frame);
-        auto metadata = std::make_optional(metadataFromVideoFrame(frame));
+        auto sample = MediaSampleGStreamer::create(WTFMove(gstSample), { }, { });
         videoSampleAvailable(MediaSampleGStreamer::create(WTFMove(gstSample), { }, { }, static_cast<MediaSample::VideoRotation>(frame.rotation()), false, WTFMove(metadata)), { });
     }
 }
diff -urp webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.h webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.h
--- webkitgtk-2.36.0.orig/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.h	2022-02-23 02:59:02.000000000 -0600
+++ webkitgtk-2.36.0.gstreamer/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.h	2022-03-21 15:12:02.676022299 -0500
@@ -45,6 +45,8 @@ private:
 
     // rtc::VideoSinkInterface
     void OnFrame(const webrtc::VideoFrame&) final;
+    void setCapsFromSettings();
+    GRefPtr<GstCaps> m_caps;
 };
 
 } // namespace WebCore
diff -urp webkitgtk-2.36.0.orig/Source/WTF/Scripts/Preferences/WebPreferencesExperimental.yaml webkitgtk-2.36.0.gstreamer/Source/WTF/Scripts/Preferences/WebPreferencesExperimental.yaml
--- webkitgtk-2.36.0.orig/Source/WTF/Scripts/Preferences/WebPreferencesExperimental.yaml	2022-03-16 08:48:02.000000000 -0500
+++ webkitgtk-2.36.0.gstreamer/Source/WTF/Scripts/Preferences/WebPreferencesExperimental.yaml	2022-03-21 15:12:02.672022277 -0500
@@ -1201,11 +1201,9 @@ RequestVideoFrameCallbackEnabled:
       default: false
     WebKit:
       "PLATFORM(COCOA) && HAVE(AVSAMPLEBUFFERVIDEOOUTPUT)" : true
-      "USE(GSTREAMER)": true
       default: false
     WebCore:
       "PLATFORM(COCOA) && HAVE(AVSAMPLEBUFFERVIDEOOUTPUT)" : true
-      "USE(GSTREAMER)": true
       default: false
 
 # FIXME: This is on by default in WebKit2. Perhaps we should consider turning it on for WebKitLegacy as well.
openSUSE Build Service is sponsored by