Revision control
1
/*
2
* Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
3
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
4
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
5
* Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6
* Copyright (C) 2014 Cable Television Laboratories, Inc.
7
* Copyright (C) 2009, 2019 Igalia S.L
8
* Copyright (C) 2015, 2019 Metrological Group B.V.
9
*
10
* This library is free software; you can redistribute it and/or
11
* modify it under the terms of the GNU Library General Public
12
* License as published by the Free Software Foundation; either
13
* version 2 of the License, or (at your option) any later version.
14
*
15
* This library is distributed in the hope that it will be useful,
16
* but WITHOUT ANY WARRANTY; without even the implied warranty of
17
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18
* Library General Public License for more details.
19
*
20
* You should have received a copy of the GNU Library General Public License
21
* aint with this library; see the file COPYING.LIB. If not, write to
22
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23
* Boston, MA 02110-1301, USA.
24
*/
25
26
#include "config.h"
27
#include "MediaPlayerPrivateGStreamer.h"
28
29
#if ENABLE(VIDEO) && USE(GSTREAMER)
30
31
#include "GraphicsContext.h"
32
#include "GStreamerCommon.h"
33
#include "GStreamerRegistryScanner.h"
34
#include "HTTPHeaderNames.h"
35
#include "ImageGStreamer.h"
36
#include "ImageOrientation.h"
37
#include "IntRect.h"
38
#include "Logging.h"
39
#include "MediaPlayer.h"
40
#include "MediaPlayerRequestInstallMissingPluginsCallback.h"
41
#include "MIMETypeRegistry.h"
42
#include "NotImplemented.h"
43
#include "SecurityOrigin.h"
44
#include "TimeRanges.h"
45
#include "VideoSinkGStreamer.h"
46
#include "WebKitWebSourceGStreamer.h"
47
48
#if ENABLE(VIDEO_TRACK)
49
#include "AudioTrackPrivateGStreamer.h"
50
#include "InbandMetadataTextTrackPrivateGStreamer.h"
51
#include "InbandTextTrackPrivateGStreamer.h"
52
#include "TextCombinerGStreamer.h"
53
#include "TextSinkGStreamer.h"
54
#include "VideoTrackPrivateGStreamer.h"
55
#endif // ENABLE(VIDEO_TRACK)
56
57
#if ENABLE(MEDIA_STREAM)
58
#include "GStreamerMediaStreamSource.h"
59
#endif
60
61
#if ENABLE(MEDIA_SOURCE)
62
#include "MediaSource.h"
63
#include "WebKitMediaSourceGStreamer.h"
64
#endif
65
66
#if ENABLE(ENCRYPTED_MEDIA)
67
#include "CDMInstance.h"
68
#include "CDMProxyClearKey.h"
69
#include "GStreamerEMEUtilities.h"
70
#include "SharedBuffer.h"
71
#include "WebKitCommonEncryptionDecryptorGStreamer.h"
72
#endif
73
74
#if ENABLE(WEB_AUDIO)
75
#include "AudioSourceProviderGStreamer.h"
76
#endif
77
78
#include <glib.h>
79
#include <gst/audio/streamvolume.h>
80
#include <gst/gst.h>
81
#include <gst/pbutils/missing-plugins.h>
82
#include <gst/video/gstvideometa.h>
83
#include <limits>
84
#include <wtf/FileSystem.h>
85
#include <wtf/glib/GLibUtilities.h>
86
#include <wtf/glib/GUniquePtr.h>
87
#include <wtf/glib/RunLoopSourcePriority.h>
88
#include <wtf/MathExtras.h>
89
#include <wtf/MediaTime.h>
90
#include <wtf/NeverDestroyed.h>
91
#include <wtf/StringPrintStream.h>
92
#include <wtf/text/AtomString.h>
93
#include <wtf/text/CString.h>
94
#include <wtf/text/StringConcatenateNumbers.h>
95
#include <wtf/URL.h>
96
#include <wtf/WallTime.h>
97
98
#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
99
#define GST_USE_UNSTABLE_API
100
#include <gst/mpegts/mpegts.h>
101
#undef GST_USE_UNSTABLE_API
102
#endif // ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
103
104
#if ENABLE(VIDEO_TRACK)
105
#define CREATE_TRACK(type, Type) G_STMT_START { \
106
m_has##Type = true; \
107
if (!useMediaSource) { \
108
RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
109
m_##type##Tracks.add(track->id(), track); \
110
m_player->add##Type##Track(*track); \
111
if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) \
112
m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get())); \
113
} \
114
} G_STMT_END
115
116
#define CLEAR_TRACKS(tracks, method) \
117
for (auto& track : tracks.values())\
118
method(*track);\
119
tracks.clear();
120
#else
121
#define CREATE_TRACK(type, Type) G_STMT_START { \
122
m_has##Type## = true; \
123
} G_STMT_END
124
#endif // ENABLE(VIDEO_TRACK)
125
126
#if USE(GSTREAMER_GL)
127
#include "GLVideoSinkGStreamer.h"
128
#include "VideoTextureCopierGStreamer.h"
129
130
#define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::NoConvert
131
#endif // USE(GSTREAMER_GL)
132
133
#if USE(TEXTURE_MAPPER_GL)
134
#include "BitmapTextureGL.h"
135
#include "BitmapTexturePool.h"
136
#include "TextureMapperContextAttributes.h"
137
#include "TextureMapperPlatformLayerBuffer.h"
138
#include "TextureMapperPlatformLayerProxy.h"
139
#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
140
#include <cairo-gl.h>
141
#include "GLContext.h"
142
#include "PlatformDisplay.h"
143
// cairo-gl.h ends up including X.h, which defines None, breaking MediaPlayer:: enums.
144
#undef None
145
#endif
146
#endif // USE(TEXTURE_MAPPER_GL)
147
148
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
149
#include "PlatformDisplayLibWPE.h"
150
#include <gst/gl/egl/gsteglimage.h>
151
#include <gst/gl/egl/gstglmemoryegl.h>
152
#include <wpe/extensions/video-plane-display-dmabuf.h>
153
#endif
154
155
GST_DEBUG_CATEGORY(webkit_media_player_debug);
156
#define GST_CAT_DEFAULT webkit_media_player_debug
157
158
namespace WebCore {
159
using namespace std;
160
161
#if USE(GSTREAMER_HOLEPUNCH)
162
static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
163
#endif
164
165
static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
166
{
167
player->handleMessage(message);
168
}
169
170
static void convertToInternalProtocol(URL& url)
171
{
172
if (webkitGstCheckVersion(1, 12, 0))
173
return;
174
if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
175
url.setProtocol("webkit+" + url.protocol());
176
}
177
178
#if USE(TEXTURE_MAPPER_GL)
179
class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
180
public:
181
explicit GstVideoFrameHolder(GstSample* sample, Optional<GstVideoDecoderPlatform> videoDecoderPlatform, TextureMapperGL::Flags flags, bool gstGLEnabled)
182
: m_videoDecoderPlatform(videoDecoderPlatform)
183
{
184
RELEASE_ASSERT(GST_IS_SAMPLE(sample));
185
186
GstVideoInfo videoInfo;
187
if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
188
return;
189
190
m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
191
m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
192
m_buffer = gst_sample_get_buffer(sample);
193
if (UNLIKELY(!GST_IS_BUFFER(m_buffer.get())))
194
return;
195
196
#if USE(GSTREAMER_GL)
197
m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0);
198
199
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
200
m_dmabufFD = -1;
201
gsize offset;
202
GstMemory* memory = gst_buffer_peek_memory(m_buffer.get(), 0);
203
if (gst_is_gl_memory_egl(memory)) {
204
GstGLMemoryEGL* eglMemory = (GstGLMemoryEGL*) memory;
205
gst_egl_image_export_dmabuf(eglMemory->image, &m_dmabufFD, &m_dmabufStride, &offset);
206
} else if (gst_is_gl_memory(memory)) {
207
GRefPtr<GstEGLImage> eglImage = adoptGRef(gst_egl_image_from_texture(GST_GL_BASE_MEMORY_CAST(memory)->context, GST_GL_MEMORY_CAST(memory), nullptr));
208
209
if (eglImage)
210
gst_egl_image_export_dmabuf(eglImage.get(), &m_dmabufFD, &m_dmabufStride, &offset);
211
}
212
213
if (hasDMABuf() && m_dmabufStride == -1) {
214
m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer.get(), GST_MAP_READ);
215
if (m_isMapped)
216
m_dmabufStride = GST_VIDEO_INFO_PLANE_STRIDE(&m_videoFrame.info, 0);
217
}
218
219
if (hasDMABuf() && m_dmabufStride)
220
return;
221
222
static std::once_flag s_onceFlag;
223
std::call_once(s_onceFlag, [] {
224
GST_WARNING("Texture export to DMABuf failed, falling back to internal rendering");
225
});
226
#endif // USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
227
228
if (gstGLEnabled) {
229
m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer.get(), static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
230
if (m_isMapped) {
231
m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
232
m_hasMappedTextures = true;
233
}
234
} else
235
#else
236
UNUSED_PARAM(flags);
237
UNUSED_PARAM(gstGLEnabled);
238
#endif // USE(GSTREAMER_GL)
239
240
{
241
m_textureID = 0;
242
m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer.get(), GST_MAP_READ);
243
if (m_isMapped) {
244
// Right now the TextureMapper only supports chromas with one plane
245
ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
246
}
247
}
248
}
249
250
virtual ~GstVideoFrameHolder()
251
{
252
if (UNLIKELY(!m_isMapped))
253
return;
254
255
gst_video_frame_unmap(&m_videoFrame);
256
}
257
258
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
259
void handoffVideoDmaBuf(struct wpe_video_plane_display_dmabuf_source* videoPlaneDisplayDmaBufSource, const IntRect& rect)
260
{
261
if (m_dmabufFD <= 0)
262
return;
263
264
wpe_video_plane_display_dmabuf_source_update(videoPlaneDisplayDmaBufSource, m_dmabufFD, rect.x(), rect.y(), m_size.width(), m_size.height(), m_dmabufStride, [](void* data) {
265
gst_buffer_unref(GST_BUFFER_CAST(data));
266
}, gst_buffer_ref(m_buffer.get()));
267
268
close(m_dmabufFD);
269
m_dmabufFD = 0;
270
}
271
#endif
272
273
#if USE(GSTREAMER_GL)
274
virtual void waitForCPUSync()
275
{
276
GstGLSyncMeta* meta = gst_buffer_get_gl_sync_meta(m_buffer.get());
277
if (meta) {
278
GstMemory* mem = gst_buffer_peek_memory(m_buffer.get(), 0);
279
GstGLContext* context = ((GstGLBaseMemory*)mem)->context;
280
gst_gl_sync_meta_wait_cpu(meta, context);
281
}
282
}
283
#endif // USE(GSTREAMER_GL)
284
285
const IntSize& size() const { return m_size; }
286
bool hasAlphaChannel() const { return m_hasAlphaChannel; }
287
TextureMapperGL::Flags flags() const { return m_flags; }
288
GLuint textureID() const { return m_textureID; }
289
bool hasMappedTextures() const { return m_hasMappedTextures; }
290
const GstVideoFrame& videoFrame() const { return m_videoFrame; }
291
292
void updateTexture(BitmapTextureGL& texture)
293
{
294
ASSERT(!m_textureID);
295
GstVideoGLTextureUploadMeta* meta;
296
if (m_buffer && (meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer.get()))) {
297
if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
298
guint ids[4] = { texture.id(), 0, 0, 0 };
299
300
if (gst_video_gl_texture_upload_meta_upload(meta, ids))
301
return;
302
}
303
}
304
305
if (!m_isMapped)
306
return;
307
308
int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
309
const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
310
311
if (!srcData)
312
return;
313
314
texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
315
}
316
317
std::unique_ptr<TextureMapperPlatformLayerBuffer> platformLayerBuffer()
318
{
319
if (!m_hasMappedTextures)
320
return nullptr;
321
322
using Buffer = TextureMapperPlatformLayerBuffer;
323
324
if ((GST_VIDEO_INFO_IS_RGB(&m_videoFrame.info) && GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info) == 1))
325
return makeUnique<Buffer>(Buffer::TextureVariant { Buffer::RGBTexture { *static_cast<GLuint*>(m_videoFrame.data[0]) } }, m_size, m_flags, GL_RGBA);
326
327
if (GST_VIDEO_INFO_IS_YUV(&m_videoFrame.info)) {
328
if (GST_VIDEO_INFO_N_COMPONENTS(&m_videoFrame.info) < 3 || GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info) > 3)
329
return nullptr;
330
331
if (m_videoDecoderPlatform && *m_videoDecoderPlatform == GstVideoDecoderPlatform::ImxVPU) {
332
// IMX VPU decoder decodes YUV data only into the Y texture from which the sampler
333
// then directly produces RGBA data. Textures for other planes aren't used, but
334
// that's decoder's problem. We have to treat that Y texture as having RGBA data.
335
return makeUnique<Buffer>(Buffer::TextureVariant { Buffer::RGBTexture { *static_cast<GLuint*>(m_videoFrame.data[0]) } }, m_size, m_flags, GL_RGBA);
336
}
337
338
unsigned numberOfPlanes = GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info);
339
std::array<GLuint, 3> planes;
340
std::array<unsigned, 3> yuvPlane;
341
std::array<unsigned, 3> yuvPlaneOffset;
342
for (unsigned i = 0; i < numberOfPlanes; ++i)
343
planes[i] = *static_cast<GLuint*>(m_videoFrame.data[i]);
344
for (unsigned i = 0; i < 3; ++i) {
345
yuvPlane[i] = GST_VIDEO_INFO_COMP_PLANE(&m_videoFrame.info, i);
346
yuvPlaneOffset[i] = GST_VIDEO_INFO_COMP_POFFSET(&m_videoFrame.info, i);
347
}
348
349
std::array<GLfloat, 9> yuvToRgb;
350
if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(&m_videoFrame.info), GST_VIDEO_COLORIMETRY_BT709)) {
351
yuvToRgb = {
352
1.164f, 0.0f, 1.787f,
353
1.164f, -0.213f, -0.531f,
354
1.164f, 2.112f, 0.0f
355
};
356
} else {
357
// Default to bt601. This is the same behaviour as GStreamer's glcolorconvert element.
358
yuvToRgb = {
359
1.164f, 0.0f, 1.596f,
360
1.164f, -0.391f, -0.813f,
361
1.164f, 2.018f, 0.0f
362
};
363
}
364
365
return makeUnique<Buffer>( Buffer::TextureVariant { Buffer::YUVTexture { numberOfPlanes, planes, yuvPlane, yuvPlaneOffset, yuvToRgb } }, m_size, m_flags, GL_RGBA);
366
}
367
368
return nullptr;
369
}
370
371
bool hasDMABuf() const
372
{
373
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
374
return m_dmabufFD >= 0;
375
#else
376
return false;
377
#endif
378
}
379
380
private:
381
GRefPtr<GstBuffer> m_buffer;
382
GstVideoFrame m_videoFrame { };
383
IntSize m_size;
384
bool m_hasAlphaChannel;
385
Optional<GstVideoDecoderPlatform> m_videoDecoderPlatform;
386
TextureMapperGL::Flags m_flags { };
387
GLuint m_textureID { 0 };
388
bool m_isMapped { false };
389
bool m_hasMappedTextures { false };
390
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
391
int m_dmabufFD { 0 };
392
int m_dmabufStride { 0 };
393
#endif
394
};
395
#endif
396
397
static void initializeDebugCategory()
398
{
399
static std::once_flag onceFlag;
400
std::call_once(onceFlag, [] {
401
GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
402
});
403
}
404
405
MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
406
: m_notifier(MainThreadNotifier<MainThreadNotification>::create())
407
, m_player(player)
408
, m_cachedPosition(MediaTime::invalidTime())
409
, m_cachedDuration(MediaTime::invalidTime())
410
, m_seekTime(MediaTime::invalidTime())
411
, m_timeOfOverlappingSeek(MediaTime::invalidTime())
412
, m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
413
, m_maxTimeLoaded(MediaTime::zeroTime())
414
, m_preload(player->preload())
415
, m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
416
, m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::repaint)
417
, m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired
418
)
419
#if USE(TEXTURE_MAPPER_GL)
420
#if USE(NICOSIA)
421
, m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
422
#else
423
, m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
424
#endif
425
#endif
426
{
427
#if USE(GLIB)
428
m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
429
#endif
430
m_isPlayerShuttingDown.store(false);
431
432
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
433
auto& sharedDisplay = PlatformDisplay::sharedDisplay();
434
if (is<PlatformDisplayLibWPE>(sharedDisplay))
435
m_wpeVideoPlaneDisplayDmaBuf.reset(wpe_video_plane_display_dmabuf_source_create(downcast<PlatformDisplayLibWPE>(sharedDisplay).backend()));
436
#endif
437
}
438
439
MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
440
{
441
GST_DEBUG_OBJECT(pipeline(), "Disposing player");
442
m_isPlayerShuttingDown.store(true);
443
444
#if ENABLE(VIDEO_TRACK)
445
for (auto& track : m_audioTracks.values())
446
track->disconnect();
447
448
for (auto& track : m_textTracks.values())
449
track->disconnect();
450
451
for (auto& track : m_videoTracks.values())
452
track->disconnect();
453
#endif
454
if (m_fillTimer.isActive())
455
m_fillTimer.stop();
456
457
if (m_mediaLocations) {
458
gst_structure_free(m_mediaLocations);
459
m_mediaLocations = nullptr;
460
}
461
462
if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
463
g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
464
465
if (m_autoAudioSink) {
466
g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
467
reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
468
}
469
470
m_readyTimerHandler.stop();
471
for (auto& missingPluginCallback : m_missingPluginCallbacks) {
472
if (missingPluginCallback)
473
missingPluginCallback->invalidate();
474
}
475
m_missingPluginCallbacks.clear();
476
477
if (m_videoSink) {
478
GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
479
g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
480
}
481
482
if (m_pipeline) {
483
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
484
ASSERT(bus);
485
g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
486
gst_bus_remove_signal_watch(bus.get());
487
gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
488
g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
489
}
490
491
#if USE(GSTREAMER_GL)
492
if (m_videoDecoderPlatform == GstVideoDecoderPlatform::Video4Linux)
493
flushCurrentBuffer();
494
#endif
495
#if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
496
downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
497
#endif
498
499
m_notifier->invalidate();
500
501
if (m_videoSink)
502
g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
503
504
if (m_volumeElement)
505
g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
506
507
// This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
508
// waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
509
cancelRepaint(true);
510
511
#if ENABLE(ENCRYPTED_MEDIA)
512
{
513
LockHolder lock(m_cdmAttachmentMutex);
514
m_cdmAttachmentCondition.notifyAll();
515
}
516
#endif
517
518
// The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
519
// about handlers running in the GStreamer thread.
520
if (m_pipeline)
521
gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
522
523
m_player = nullptr;
524
}
525
526
bool MediaPlayerPrivateGStreamer::isAvailable()
527
{
528
if (!initializeGStreamerAndRegisterWebKitElements())
529
return false;
530
531
// FIXME: This has not been updated for the playbin3 switch.
532
GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
533
return factory;
534
}
535
536
class MediaPlayerFactoryGStreamer final : public MediaPlayerFactory {
537
private:
538
MediaPlayerEnums::MediaEngineIdentifier identifier() const final { return MediaPlayerEnums::MediaEngineIdentifier::GStreamer; };
539
540
std::unique_ptr<MediaPlayerPrivateInterface> createMediaEnginePlayer(MediaPlayer* player) const final
541
{
542
return makeUnique<MediaPlayerPrivateGStreamer>(player);
543
}
544
545
void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types) const final
546
{
547
return MediaPlayerPrivateGStreamer::getSupportedTypes(types);
548
}
549
550
MediaPlayer::SupportsType supportsTypeAndCodecs(const MediaEngineSupportParameters& parameters) const final
551
{
552
return MediaPlayerPrivateGStreamer::supportsType(parameters);
553
}
554
555
bool supportsKeySystem(const String& keySystem, const String& mimeType) const final
556
{
557
return MediaPlayerPrivateGStreamer::supportsKeySystem(keySystem, mimeType);
558
}
559
};
560
561
void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
562
{
563
initializeDebugCategory();
564
565
if (isAvailable())
566
registrar(makeUnique<MediaPlayerFactoryGStreamer>());
567
}
568
569
void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const String& pipelineName)
570
{
571
if (m_player->contentMIMEType() == "image/gif") {
572
loadingFailed(MediaPlayer::NetworkState::FormatError, MediaPlayer::ReadyState::HaveNothing, true);
573
return;
574
}
575
576
URL url(URL(), urlString);
577
if (url.protocolIsAbout()) {
578
loadingFailed(MediaPlayer::NetworkState::FormatError, MediaPlayer::ReadyState::HaveNothing, true);
579
return;
580
}
581
582
if (!m_pipeline)
583
createGSTPlayBin(url, pipelineName);
584
syncOnClock(true);
585
if (m_fillTimer.isActive())
586
m_fillTimer.stop();
587
588
ASSERT(m_pipeline);
589
590
setPlaybinURL(url);
591
592
GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
593
if (m_preload == MediaPlayer::Preload::None) {
594
GST_INFO_OBJECT(pipeline(), "Delaying load.");
595
m_isDelayingLoad = true;
596
}
597
598
// Reset network and ready states. Those will be set properly once
599
// the pipeline pre-rolled.
600
m_networkState = MediaPlayer::NetworkState::Loading;
601
m_player->networkStateChanged();
602
m_readyState = MediaPlayer::ReadyState::HaveNothing;
603
m_player->readyStateChanged();
604
m_areVolumeAndMuteInitialized = false;
605
m_hasTaintedOrigin = WTF::nullopt;
606
607
if (!m_isDelayingLoad)
608
commitLoad();
609
}
610
611
void MediaPlayerPrivateGStreamer::load(const String& urlString)
612
{
613
loadFull(urlString, String());
614
}
615
616
#if ENABLE(MEDIA_SOURCE)
617
void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
618
{
619
// Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
620
m_networkState = MediaPlayer::NetworkState::FormatError;
621
m_player->networkStateChanged();
622
}
623
#endif
624
625
#if ENABLE(MEDIA_STREAM)
626
void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
627
{
628
m_streamPrivate = &stream;
629
static Atomic<uint32_t> pipelineId;
630
auto pipelineName = makeString("mediastream-", pipelineId.exchangeAdd(1));
631
632
loadFull(String("mediastream://") + stream.id(), pipelineName);
633
syncOnClock(false);
634
635
m_player->play();
636
}
637
#endif
638
639
void MediaPlayerPrivateGStreamer::cancelLoad()
640
{
641
if (m_networkState < MediaPlayer::NetworkState::Loading || m_networkState == MediaPlayer::NetworkState::Loaded)
642
return;
643
644
if (m_pipeline)
645
changePipelineState(GST_STATE_READY);
646
}
647
648
void MediaPlayerPrivateGStreamer::prepareToPlay()
649
{
650
GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
651
m_preload = MediaPlayer::Preload::Auto;
652
if (m_isDelayingLoad) {
653
m_isDelayingLoad = false;
654
commitLoad();
655
}
656
}
657
658
void MediaPlayerPrivateGStreamer::play()
659
{
660
if (!m_playbackRate) {
661
m_isPlaybackRatePaused = true;
662
return;
663
}
664
665
if (changePipelineState(GST_STATE_PLAYING)) {
666
m_isEndReached = false;
667
m_isDelayingLoad = false;
668
m_preload = MediaPlayer::Preload::Auto;
669
updateDownloadBufferingFlag();
670
GST_INFO_OBJECT(pipeline(), "Play");
671
} else
672
loadingFailed(MediaPlayer::NetworkState::Empty);
673
}
674
675
void MediaPlayerPrivateGStreamer::pause()
676
{
677
m_isPlaybackRatePaused = false;
678
GstState currentState, pendingState;
679
gst_element_get_state(m_pipeline.get(), ¤tState, &pendingState, 0);
680
if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
681
return;
682
683
if (changePipelineState(GST_STATE_PAUSED))
684
GST_INFO_OBJECT(pipeline(), "Pause");
685
else
686
loadingFailed(MediaPlayer::NetworkState::Empty);
687
}
688
689
bool MediaPlayerPrivateGStreamer::paused() const
690
{
691
if (!m_pipeline)
692
return true;
693
694
if (m_isEndReached) {
695
GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
696
return true;
697
}
698
699
if (m_isPlaybackRatePaused) {
700
GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
701
return false;
702
}
703
704
GstState state;
705
gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
706
bool paused = state <= GST_STATE_PAUSED;
707
GST_LOG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
708
return paused;
709
}
710
711
bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
712
{
713
// Default values for rate >= 0.
714
MediaTime startTime = position, endTime = MediaTime::invalidTime();
715
716
if (rate < 0) {
717
startTime = MediaTime::zeroTime();
718
// If we are at beginning of media, start from the end to avoid immediate EOS.
719
endTime = position < MediaTime::zeroTime() ? durationMediaTime() : position;
720
}
721
722
if (!rate)
723
rate = 1.0;
724
725
return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
726
GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
727
}
728
729
void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
730
{
731
if (!m_pipeline || m_didErrorOccur)
732
return;
733
734
GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
735
736
// Avoid useless seeking.
737
if (mediaTime == currentMediaTime()) {
738
GST_DEBUG_OBJECT(pipeline(), "[Seek] seek to EOS position unhandled");
739
return;
740
}
741
742
MediaTime time = std::min(mediaTime, durationMediaTime());
743
744
if (m_isLiveStream) {
745
GST_DEBUG_OBJECT(pipeline(), "[Seek] Live stream seek unhandled");
746
return;
747
}
748
749
GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
750
751
if (m_isSeeking) {
752
m_timeOfOverlappingSeek = time;
753
if (m_isSeekPending) {
754
m_seekTime = time;
755
return;
756
}
757
}
758
759
GstState state;
760
GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
761
if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
762
GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
763
return;
764
}
765
if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
766
m_isSeekPending = true;
767
if (m_isEndReached) {
768
GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
769
m_shouldResetPipeline = true;
770
if (!changePipelineState(GST_STATE_PAUSED))
771
loadingFailed(MediaPlayer::NetworkState::Empty);
772
}
773
} else {
774
// We can seek now.
775
if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
776
GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
777
return;
778
}
779
}
780
781
m_isSeeking = true;
782
m_seekTime = time;
783
m_isEndReached = false;
784
}
785
786
void MediaPlayerPrivateGStreamer::updatePlaybackRate()
787
{
788
if (!m_isChangingRate)
789
return;
790
791
GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
792
793
// Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
794
bool mute = m_playbackRate <= 0 || (!m_shouldPreservePitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
795
796
GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
797
798
if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
799
g_object_set(m_pipeline.get(), "mute", mute, nullptr);
800
m_lastPlaybackRate = m_playbackRate;
801
} else {
802
m_playbackRate = m_lastPlaybackRate;
803
GST_ERROR("Set rate to %f failed", m_playbackRate);
804
}
805
806
if (m_isPlaybackRatePaused) {
807
GstState state, pending;
808
809
gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
810
if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
811
changePipelineState(GST_STATE_PLAYING);
812
m_isPlaybackRatePaused = false;
813
}
814
815
m_isChangingRate = false;
816
m_player->rateChanged();
817
}
818
819
MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
820
{
821
GST_TRACE_OBJECT(pipeline(), "Cached duration: %s", m_cachedDuration.toString().utf8().data());
822
if (m_cachedDuration.isValid())
823
return m_cachedDuration;
824
825
MediaTime duration = platformDuration();
826
if (!duration || duration.isInvalid())
827
return MediaTime::zeroTime();
828
829
m_cachedDuration = duration;
830
831
return m_cachedDuration;
832
}
833
834
MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
835
{
836
if (!m_pipeline || m_didErrorOccur)
837
return MediaTime::invalidTime();
838
839
GST_TRACE_OBJECT(pipeline(), "seeking: %s, seekTime: %s", boolForPrinting(m_isSeeking), m_seekTime.toString().utf8().data());
840
if (m_isSeeking)
841
return m_seekTime;
842
843
return playbackPosition();
844
}
845
846
void MediaPlayerPrivateGStreamer::setRate(float rate)
847
{
848
float rateClamped = clampTo(rate, -20.0, 20.0);
849
if (rateClamped != rate)
850
GST_WARNING("Clamping original rate (%f) to [-20, 20] (%f), higher rates cause crashes", rate, rateClamped);
851
852
// Avoid useless playback rate update.
853
if (m_playbackRate == rateClamped) {
854
// And make sure that upper layers were notified if rate was set.
855
856
if (!m_isChangingRate && m_player->rate() != m_playbackRate)
857
m_player->rateChanged();
858
return;
859
}
860
861
if (m_isLiveStream) {
862
// Notify upper layers that we cannot handle passed rate.
863
m_isChangingRate = false;
864
m_player->rateChanged();
865
return;
866
}
867
868
GstState state, pending;
869
870
m_playbackRate = rateClamped;
871
m_isChangingRate = true;
872
873
gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
874
875
if (!rateClamped) {
876
m_isChangingRate = false;
877
m_isPlaybackRatePaused = true;
878
if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
879
changePipelineState(GST_STATE_PAUSED);
880
return;
881
}
882
883
if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
884
|| (pending == GST_STATE_PAUSED))
885
return;
886
887
updatePlaybackRate();
888
}
889
890
double MediaPlayerPrivateGStreamer::rate() const
891
{
892
return m_playbackRate;
893
}
894
895
void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
896
{
897
m_shouldPreservePitch = preservesPitch;
898
}
899
900
void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
901
{
902
GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
903
if (preload == MediaPlayer::Preload::Auto && m_isLiveStream)
904
return;
905
906
m_preload = preload;
907
updateDownloadBufferingFlag();
908
909
if (m_isDelayingLoad && m_preload != MediaPlayer::Preload::None) {
910
m_isDelayingLoad = false;
911
commitLoad();
912
}
913
}
914
915
std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
916
{
917
auto timeRanges = makeUnique<PlatformTimeRanges>();
918
if (m_didErrorOccur || m_isLiveStream)
919
return timeRanges;
920
921
MediaTime mediaDuration = durationMediaTime();
922
if (!mediaDuration || mediaDuration.isPositiveInfinite())
923
return timeRanges;
924
925
GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
926
927
if (!gst_element_query(m_pipeline.get(), query.get()))
928
return timeRanges;
929
930
unsigned numBufferingRanges = gst_query_get_n_buffering_ranges(query.get());
931
for (unsigned index = 0; index < numBufferingRanges; index++) {
932
gint64 rangeStart = 0, rangeStop = 0;
933
if (gst_query_parse_nth_buffering_range(query.get(), index, &rangeStart, &rangeStop)) {
934
uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
935
uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
936
timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
937
}
938
}
939
940
// Fallback to the more general maxTimeLoaded() if no range has been found.
941
if (!timeRanges->length()) {
942
MediaTime loaded = maxTimeLoaded();
943
if (loaded.isValid() && loaded)
944
timeRanges->add(MediaTime::zeroTime(), loaded);
945
}
946
947
return timeRanges;
948
}
949
950
MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
951
{
952
GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, isLiveStream: %s", boolForPrinting(m_didErrorOccur), boolForPrinting(m_isLiveStream));
953
if (m_didErrorOccur)
954
return MediaTime::zeroTime();
955
956
if (m_isLiveStream)
957
return MediaTime::zeroTime();
958
959
MediaTime duration = durationMediaTime();
960
GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
961
// Infinite duration means live stream.
962
if (duration.isPositiveInfinite())
963
return MediaTime::zeroTime();
964
965
return duration;
966
}
967
968
MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
969
{
970
if (m_didErrorOccur)
971
return MediaTime::zeroTime();
972
973
MediaTime loaded = m_maxTimeLoaded;
974
if (m_isEndReached)
975
loaded = durationMediaTime();
976
GST_LOG_OBJECT(pipeline(), "maxTimeLoaded: %s", toString(loaded).utf8().data());
977
return loaded;
978
}
979
980
bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
981
{
982
if (m_didErrorOccur || m_loadingStalled)
983
return false;
984
985
if (WEBKIT_IS_WEB_SRC(m_source.get())) {
986
GST_LOG_OBJECT(pipeline(), "Last network read position: %" G_GUINT64_FORMAT ", current: %" G_GUINT64_FORMAT, m_readPositionAtLastDidLoadingProgress, m_networkReadPosition);
987
bool didLoadingProgress = m_readPositionAtLastDidLoadingProgress < m_networkReadPosition;
988
m_readPositionAtLastDidLoadingProgress = m_networkReadPosition;
989
GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", boolForPrinting(didLoadingProgress));
990
return didLoadingProgress;
991
}
992
993
if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
994
return false;
995
996
MediaTime currentMaxTimeLoaded = maxTimeLoaded();
997
bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
998
m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
999
GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", boolForPrinting(didLoadingProgress));
1000
return didLoadingProgress;
1001
}
1002
1003
unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1004
{
1005
if (m_didErrorOccur || !m_source || m_isLiveStream)
1006
return 0;
1007
1008
if (m_totalBytes)
1009
return m_totalBytes;
1010
1011
GstFormat fmt = GST_FORMAT_BYTES;
1012
gint64 length = 0;
1013
if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1014
GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1015
m_totalBytes = static_cast<unsigned long long>(length);
1016
m_isLiveStream = !length;
1017
return m_totalBytes;
1018
}
1019
1020
// Fall back to querying the source pads manually. See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1021
GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1022
bool done = false;
1023
while (!done) {
1024
GValue item = G_VALUE_INIT;
1025
switch (gst_iterator_next(iter, &item)) {
1026
case GST_ITERATOR_OK: {
1027
GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1028
gint64 padLength = 0;
1029
if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1030
length = padLength;
1031
break;
1032
}
1033
case GST_ITERATOR_RESYNC:
1034
gst_iterator_resync(iter);
1035
break;
1036
case GST_ITERATOR_ERROR:
1037
FALLTHROUGH;
1038
case GST_ITERATOR_DONE:
1039
done = true;
1040
break;
1041
}
1042
1043
g_value_unset(&item);
1044
}
1045
1046
gst_iterator_free(iter);
1047
1048
GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1049
m_totalBytes = static_cast<unsigned long long>(length);
1050
m_isLiveStream = !length;
1051
return m_totalBytes;
1052
}
1053
1054
bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1055
{
1056
if (!m_source)
1057
return false;
1058
1059
if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1060
return true;
1061
1062
GUniqueOutPtr<char> originalURI, resolvedURI;
1063
g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1064
if (!originalURI || !resolvedURI)
1065
return false;
1066
if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1067
return true;
1068
1069
Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1070
Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1071
return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1072
}
1073
1074
Optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin& origin) const
1075
{
1076
if (webkitGstCheckVersion(1, 12, 0)) {
1077
GST_TRACE_OBJECT(pipeline(), "Checking %u origins", m_origins.size());
1078
for (auto& responseOrigin : m_origins) {
1079
if (!origin.canAccess(*responseOrigin)) {
1080
GST_DEBUG_OBJECT(pipeline(), "Found reachable response origin");
1081
return true;
1082
}
1083
}
1084
GST_DEBUG_OBJECT(pipeline(), "No valid response origin found");
1085
return false;
1086
}
1087
1088
// GStreamer < 1.12 has an incomplete uridownloader implementation so we
1089
// can't use WebKitWebSrc for adaptive fragments downloading if this
1090
// version is detected.
1091
UNUSED_PARAM(origin);
1092
return m_hasTaintedOrigin;
1093
}
1094
1095
void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
1096
{
1097
GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
1098
gst_element_post_message(m_pipeline.get(), message);
1099
}
1100
1101
#if ENABLE(WEB_AUDIO)
1102
void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
1103
{
1104
if (!m_audioSourceProvider)
1105
m_audioSourceProvider = makeUnique<AudioSourceProviderGStreamer>();
1106
}
1107
1108
AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
1109
{
1110
ensureAudioSourceProvider();
1111
return m_audioSourceProvider.get();
1112
}
1113
#endif
1114
1115
void MediaPlayerPrivateGStreamer::durationChanged()
1116
{
1117
MediaTime previousDuration = durationMediaTime();
1118
m_cachedDuration = MediaTime::invalidTime();
1119
1120
// Avoid emitting durationChanged in the case where the previous
1121
// duration was 0 because that case is already handled by the
1122
// HTMLMediaElement.
1123
if (previousDuration && durationMediaTime() != previousDuration)
1124
m_player->durationChanged();
1125
}
1126
1127
void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1128
{
1129
GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1130
1131
if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1132
g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1133
1134
m_source = sourceElement;
1135
1136
if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1137
webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC_CAST(m_source.get()), m_player);
1138
g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1139
#if ENABLE(MEDIA_STREAM)
1140
} else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1141
auto stream = m_streamPrivate.get();
1142
ASSERT(stream);
1143
webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1144
#endif
1145
}
1146
}
1147
1148
void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
1149
{
1150
player->setAudioStreamProperties(object);
1151
}
1152
1153
void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
1154
{
1155
if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
1156
return;
1157
1158
const char* role = m_player->isVideoPlayer() ? "video" : "music";
1159
GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
1160
g_object_set(object, "stream-properties", structure, nullptr);
1161
gst_structure_free(structure);
1162
GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
1163
GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
1164
}
1165
1166
void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1167
{
1168
player->sourceSetup(sourceElement);
1169
}
1170
1171
bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
1172
{
1173
ASSERT(m_pipeline);
1174
1175
GstState currentState, pending;
1176
1177
gst_element_get_state(m_pipeline.get(), ¤tState, &pending, 0);
1178
if (currentState == newState || pending == newState) {
1179
GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
1180
gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
1181
return true;
1182
}
1183
1184
GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
1185
gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
1186
1187
GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
1188
GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
1189
if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
1190
return false;
1191
1192
// Create a timer when entering the READY state so that we can free resources if we stay for too long on READY.
1193
// Also lets remove the timer if we request a state change for any state other than READY. See also https://bugs.webkit.org/show_bug.cgi?id=117354
1194
if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
1195
// Max interval in seconds to stay in the READY state on manual state change requests.
1196
static const Seconds readyStateTimerDelay { 1_min };
1197
m_readyTimerHandler.startOneShot(readyStateTimerDelay);
1198
} else if (newState != GST_STATE_READY)
1199
m_readyTimerHandler.stop();
1200
1201
return true;
1202
}
1203
1204
void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
1205
{
1206
// Clean out everything after file:// url path.
1207
String cleanURLString(url.string());
1208
if (url.isLocalFile())
1209
cleanURLString = cleanURLString.substring(0, url.pathEnd());
1210
1211
m_url = URL(URL(), cleanURLString);
1212
convertToInternalProtocol(m_url);
1213
GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
1214
g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
1215
}
1216
1217
static void setSyncOnClock(GstElement *element, bool sync)
1218
{
1219
if (!GST_IS_BIN(element)) {
1220
g_object_set(element, "sync", sync, NULL);
1221
return;
1222
}
1223
1224
GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
1225
while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
1226
bool* sync = static_cast<bool*>(syncPtr);
1227
setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
1228
}), &sync) == GST_ITERATOR_RESYNC)
1229
gst_iterator_resync(it);
1230
gst_iterator_free(it);
1231
}
1232
1233
void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
1234
{
1235
setSyncOnClock(videoSink(), sync);
1236
setSyncOnClock(audioSink(), sync);
1237
}
1238
1239
void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
1240
{
1241
if (UNLIKELY(!m_pipeline || !m_source))
1242
return;
1243
1244
ASSERT(m_isLegacyPlaybin || isMediaSource());
1245
1246
unsigned numTracks = 0;
1247
bool useMediaSource = isMediaSource();
1248
GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1249
g_object_get(element, "n-video", &numTracks, nullptr);
1250
1251
GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
1252
1253
bool oldHasVideo = m_hasVideo;
1254
m_hasVideo = numTracks > 0;
1255
if (oldHasVideo != m_hasVideo)
1256
m_player->characteristicChanged();
1257
1258
if (m_hasVideo)
1259
m_player->sizeChanged();
1260
1261
if (useMediaSource) {
1262
GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1263
m_player->mediaEngineUpdated();
1264
return;
1265
}
1266
1267
#if ENABLE(VIDEO_TRACK)
1268
Vector<String> validVideoStreams;
1269
for (unsigned i = 0; i < numTracks; ++i) {
1270
GRefPtr<GstPad> pad;
1271
g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
1272
ASSERT(pad);
1273
1274
String streamId = "V" + String::number(i);
1275
validVideoStreams.append(streamId);
1276
if (i < m_videoTracks.size()) {
1277
RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
1278
if (existingTrack) {
1279
existingTrack->setIndex(i);
1280
if (existingTrack->pad() == pad)
1281
continue;
1282
}
1283
}
1284
1285
auto track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
1286
ASSERT(streamId == track->id());
1287
m_videoTracks.add(streamId, track.copyRef());
1288
m_player->addVideoTrack(track.get());
1289
}
1290
1291
purgeInvalidVideoTracks(validVideoStreams);
1292
#endif
1293
1294
m_player->mediaEngineUpdated();
1295
}
1296
1297
void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
1298
{
1299
player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
1300
player->notifyPlayerOfVideoCaps();
1301
});
1302
}
1303
1304
void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
1305
{
1306
m_videoSize = IntSize();
1307
m_player->mediaEngineUpdated();
1308
}
1309
1310
void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
1311
{
1312
player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
1313
player->notifyPlayerOfAudio();
1314
});
1315
}
1316
1317
void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
1318
{
1319
if (UNLIKELY(!m_pipeline || !m_source))
1320
return;
1321
1322
ASSERT(m_isLegacyPlaybin || isMediaSource());
1323
1324
unsigned numTracks = 0;
1325
bool useMediaSource = isMediaSource();
1326
GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1327
g_object_get(element, "n-audio", &numTracks, nullptr);
1328
1329
GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
1330
bool oldHasAudio = m_hasAudio;
1331
m_hasAudio = numTracks > 0;
1332
if (oldHasAudio != m_hasAudio)
1333
m_player->characteristicChanged();
1334
1335
if (useMediaSource) {
1336
GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1337
m_player->mediaEngineUpdated();
1338
return;
1339
}
1340
1341
#if ENABLE(VIDEO_TRACK)
1342
Vector<String> validAudioStreams;
1343
for (unsigned i = 0; i < numTracks; ++i) {
1344
GRefPtr<GstPad> pad;
1345
g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
1346
ASSERT(pad);
1347
1348
String streamId = "A" + String::number(i);
1349
validAudioStreams.append(streamId);
1350
if (i < m_audioTracks.size()) {
1351
RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
1352
if (existingTrack) {
1353
existingTrack->setIndex(i);
1354
if (existingTrack->pad() == pad)
1355
continue;
1356
}
1357
}
1358
1359
auto track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
1360
ASSERT(streamId == track->id());
1361
m_audioTracks.add(streamId, track);
1362
m_player->addAudioTrack(*track);
1363
}
1364
1365
purgeInvalidAudioTracks(validAudioStreams);
1366
#endif
1367
1368
m_player->mediaEngineUpdated();
1369
}
1370
1371
#if ENABLE(VIDEO_TRACK)
1372
void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
1373
{
1374
player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
1375
player->notifyPlayerOfText();
1376
});
1377
}
1378
1379
void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
1380
{
1381
if (UNLIKELY(!m_pipeline || !m_source))
1382
return;
1383
1384
ASSERT(m_isLegacyPlaybin || isMediaSource());
1385
1386
unsigned numTracks = 0;
1387
bool useMediaSource = isMediaSource();
1388
GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1389
g_object_get(element, "n-text", &numTracks, nullptr);
1390
1391
GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1392
1393
if (useMediaSource) {
1394
GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1395
return;
1396
}
1397
1398
Vector<String> validTextStreams;
1399
for (unsigned i = 0; i < numTracks; ++i) {
1400
GRefPtr<GstPad> pad;
1401
g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1402
ASSERT(pad);
1403
1404
// We can't assume the pad has a sticky event here like implemented in
1405
// InbandTextTrackPrivateGStreamer because it might be emitted after the
1406
// track was created. So fallback to a dummy stream ID like in the Audio
1407
// and Video tracks.
1408
String streamId = "T" + String::number(i);
1409
1410
validTextStreams.append(streamId);
1411
if (i < m_textTracks.size()) {
1412
RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1413
if (existingTrack) {
1414
existingTrack->setIndex(i);
1415
if (existingTrack->pad() == pad)
1416
continue;
1417
}
1418
}
1419
1420
auto track = InbandTextTrackPrivateGStreamer::create(i, pad);
1421
m_textTracks.add(streamId, track.copyRef());
1422
m_player->addTextTrack(track.get());
1423
}
1424
1425
purgeInvalidTextTracks(validTextStreams);
1426
}
1427
1428
GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1429
{
1430
player->newTextSample();
1431
return GST_FLOW_OK;
1432
}
1433
1434
void MediaPlayerPrivateGStreamer::newTextSample()
1435
{
1436
if (!m_textAppSink)
1437
return;
1438
1439
GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1440
gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1441
1442
GRefPtr<GstSample> sample;
1443
g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1444
ASSERT(sample);
1445
1446
if (streamStartEvent) {
1447
bool found = FALSE;
1448
const gchar* id;
1449
gst_event_parse_stream_start(streamStartEvent.get(), &id);
1450
for (auto& track : m_textTracks.values()) {
1451
if (!strcmp(track->streamId().utf8().data(), id)) {
1452
track->handleSample(sample);
1453
found = true;
1454
break;
1455
}
1456
}
1457
if (!found)
1458
GST_WARNING("Got sample with unknown stream ID %s.", id);
1459
} else
1460
GST_WARNING("Unable to handle sample with no stream start event.");
1461
}
1462
#endif
1463
1464
MediaTime MediaPlayerPrivateGStreamer::platformDuration() const
1465
{
1466
if (!m_pipeline)
1467
return MediaTime::invalidTime();
1468
1469
GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, pipeline state: %s", boolForPrinting(m_didErrorOccur), gst_element_state_get_name(GST_STATE(m_pipeline.get())));
1470
if (m_didErrorOccur)
1471
return MediaTime::invalidTime();
1472
1473
// The duration query would fail on a not-prerolled pipeline.
1474
if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
1475
return MediaTime::invalidTime();
1476
1477
int64_t duration = 0;
1478
if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &duration) || !GST_CLOCK_TIME_IS_VALID(duration)) {
1479
GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
1480
return MediaTime::positiveInfiniteTime();
1481
}
1482
1483
GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
1484
return MediaTime(duration, GST_SECOND);
1485
}
1486
1487
bool MediaPlayerPrivateGStreamer::isMuted() const
1488
{
1489
if (!m_volumeElement)
1490
return false;
1491
1492
gboolean isMuted;
1493
g_object_get(m_volumeElement.get(), "mute", &isMuted, nullptr);
1494
GST_INFO_OBJECT(pipeline(), "Player is muted: %s", boolForPrinting(!!isMuted));
1495
return isMuted;
1496
}
1497
1498
void MediaPlayerPrivateGStreamer::commitLoad()
1499
{
1500
ASSERT(!m_isDelayingLoad);
1501
GST_DEBUG_OBJECT(pipeline(), "Committing load.");
1502
1503
// GStreamer needs to have the pipeline set to a paused state to
1504
// start providing anything useful.
1505
changePipelineState(GST_STATE_PAUSED);
1506
1507
updateDownloadBufferingFlag();
1508
updateStates();
1509
}
1510
1511
void MediaPlayerPrivateGStreamer::fillTimerFired()
1512
{
1513
if (m_didErrorOccur) {
1514
GST_DEBUG_OBJECT(pipeline(), "[Buffering] An error occurred, disabling the fill timer");
1515
m_fillTimer.stop();
1516
return;
1517
}
1518
1519
GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
1520
double fillStatus = 100.0;
1521
GstBufferingMode mode = GST_BUFFERING_DOWNLOAD;
1522
1523
if (gst_element_query(pipeline(), query.get())) {
1524
gst_query_parse_buffering_stats(query.get(), &mode, nullptr, nullptr, nullptr);
1525
1526
int percentage;
1527
gst_query_parse_buffering_percent(query.get(), nullptr, &percentage);
1528
fillStatus = percentage;
1529
} else if (m_httpResponseTotalSize) {
1530
GST_DEBUG_OBJECT(pipeline(), "[Buffering] Query failed, falling back to network read position estimation");
1531
fillStatus = 100.0 * (static_cast<double>(m_networkReadPosition) / static_cast<double>(m_httpResponseTotalSize));
1532
} else {
1533
GST_DEBUG_OBJECT(pipeline(), "[Buffering] Unable to determine on-disk buffering status");
1534
return;
1535
}
1536
1537
updateBufferingStatus(mode, fillStatus);
1538
}
1539
1540
void MediaPlayerPrivateGStreamer::loadStateChanged()
1541
{
1542
updateStates();
1543
}
1544
1545
void MediaPlayerPrivateGStreamer::timeChanged()
1546
{
1547
updateStates();
1548
m_player->timeChanged();
1549
}
1550
1551
void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState networkError, MediaPlayer::ReadyState readyState, bool forceNotifications)
1552
{
1553
GST_WARNING("Loading failed, error: %s", convertEnumerationToString(networkError).utf8().data());
1554
1555
m_didErrorOccur = true;
1556
if (forceNotifications || m_networkState != networkError) {
1557
m_networkState = networkError;
1558
m_player->networkStateChanged();
1559
}
1560
if (forceNotifications || m_readyState != readyState) {
1561
m_readyState = readyState;
1562
m_player->readyStateChanged();
1563
}
1564
1565
// Loading failed, remove ready timer.
1566
m_readyTimerHandler.stop();
1567
}
1568
1569
GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
1570
{
1571
m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
1572
if (!m_autoAudioSink) {
1573
GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
1574
return nullptr;
1575
}
1576
1577
g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
1578
1579
#if ENABLE(WEB_AUDIO)
1580
GstElement* audioSinkBin = gst_bin_new("audio-sink");
1581
ensureAudioSourceProvider();
1582
m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
1583
return audioSinkBin;
1584
#else
1585
return m_autoAudioSink.get();
1586
#endif
1587
}
1588
1589
GstElement* MediaPlayerPrivateGStreamer::audioSink() const
1590
{
1591
GstElement* sink;
1592
g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
1593
return sink;
1594
}
1595
1596
MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
1597
{
1598
GST_TRACE_OBJECT(pipeline(), "isEndReached: %s, seeking: %s, seekTime: %s", boolForPrinting(m_isEndReached), boolForPrinting(m_isSeeking), m_seekTime.toString().utf8().data());
1599
if (m_isEndReached && m_isSeeking)
1600
return m_seekTime;
1601
1602
// This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
1603
static const Seconds positionCacheThreshold = 200_ms;
1604
Seconds now = WTF::WallTime::now().secondsSinceEpoch();
1605
if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid()) {
1606
GST_TRACE_OBJECT(pipeline(), "Returning cached position: %s", m_cachedPosition.toString().utf8().data());
1607
return m_cachedPosition;
1608
}
1609
1610
m_lastQueryTime = now;
1611
1612
// Position is only available if no async state change is going on and the state is either paused or playing.
1613
gint64 position = GST_CLOCK_TIME_NONE;
1614
GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
1615
if (gst_element_query(m_pipeline.get(), query))
1616
gst_query_parse_position(query, 0, &position);
1617
gst_query_unref(query);
1618
1619
GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
1620
GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT ", canFallBackToLastFinishedSeekPosition: %s", GST_TIME_ARGS(gstreamerPosition), boolForPrinting(m_canFallBackToLastFinishedSeekPosition));
1621
1622
MediaTime playbackPosition = MediaTime::zeroTime();
1623
1624
if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
1625
playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
1626
else if (m_canFallBackToLastFinishedSeekPosition)
1627
playbackPosition = m_seekTime;
1628
1629
m_cachedPosition = playbackPosition;
1630
return playbackPosition;
1631
}
1632
1633
void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
1634
{
1635
// FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
1636
if (isMediaSource()) {
1637
GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
1638
return;
1639
}
1640
1641
const char* propertyName;
1642
const char* trackTypeAsString;
1643
Vector<String> selectedStreams;
1644
String selectedStreamId;
1645
1646
GstStream* stream = nullptr;
1647
1648
if (!m_isLegacyPlaybin) {
1649
stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
1650
if (!stream) {
1651
GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
1652
return;
1653
}
1654
selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
1655
selectedStreams.append(selectedStreamId);
1656
}
1657
1658
switch (trackType) {
1659
case TrackPrivateBaseGStreamer::TrackType::Audio:
1660
propertyName = "current-audio";
1661
trackTypeAsString = "audio";
1662
if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
1663
GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
1664
return;
1665
}
1666
1667
if (!m_currentTextStreamId.isEmpty())
1668
selectedStreams.append(m_currentTextStreamId);
1669
if (!m_currentVideoStreamId.isEmpty())
1670
selectedStreams.append(m_currentVideoStreamId);
1671
break;
1672
case TrackPrivateBaseGStreamer::TrackType::Video:
1673
propertyName = "current-video";
1674
trackTypeAsString = "video";
1675
if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
1676
GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
1677
return;
1678
}
1679
1680
if (!m_currentAudioStreamId.isEmpty())
1681
selectedStreams.append(m_currentAudioStreamId);
1682
if (!m_currentTextStreamId.isEmpty())
1683
selectedStreams.append(m_currentTextStreamId);
1684
break;
1685
case TrackPrivateBaseGStreamer::TrackType::Text:
1686
propertyName = "current-text";
1687
trackTypeAsString = "text";
1688
if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
1689
GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
1690
return;
1691
}
1692
1693
if (!m_currentAudioStreamId.isEmpty())
1694
selectedStreams.append(m_currentAudioStreamId);
1695
if (!m_currentVideoStreamId.isEmpty())
1696
selectedStreams.append(m_currentVideoStreamId);
1697
break;
1698
case TrackPrivateBaseGStreamer::TrackType::Unknown:
1699
FALLTHROUGH;
1700
default:
1701
ASSERT_NOT_REACHED();
1702
}
1703
1704
GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
1705
if (m_isLegacyPlaybin)
1706
g_object_set(m_pipeline.get(), propertyName, index, nullptr);
1707
else {
1708
GList* selectedStreamsList = nullptr;
1709
1710
for (const auto& streamId : selectedStreams)
1711
selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
1712
1714
gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
1715
g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
1716
}
1717
}
1718
1719
void MediaPlayerPrivateGStreamer::updateTracks()
1720
{
1721
ASSERT(!m_isLegacyPlaybin);
1722
1723
bool useMediaSource = isMediaSource();
1724
unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
1725
1726
bool oldHasAudio = m_hasAudio;
1727
bool oldHasVideo = m_hasVideo;
1728
// New stream collections override previous ones.
1729
clearTracks();
1730
unsigned textTrackIndex = 0;
1731
for (unsigned i = 0; i < length; i++) {
1732
GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
1733
String streamId(gst_stream_get_stream_id(stream.get()));
1734
GstStreamType type = gst_stream_get_stream_type(stream.get());
1735
1736
GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
1737
if (type & GST_STREAM_TYPE_AUDIO)
1738
CREATE_TRACK(audio, Audio);
1739
else if (type & GST_STREAM_TYPE_VIDEO)
1740
CREATE_TRACK(video, Video);
1741
else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
1742
#if ENABLE(VIDEO_TRACK)
1743
auto track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
1744
m_textTracks.add(streamId, track.copyRef());
1745
m_player->addTextTrack(track.get());
1746
#endif
1747
} else
1748
GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
1749
}
1750
1751
if (oldHasVideo != m_hasVideo || oldHasAudio != m_hasAudio)
1752
m_player->characteristicChanged();
1753
1754
if (m_hasVideo)
1755
m_player->sizeChanged();
1756
1757
m_player->mediaEngineUpdated();
1758
}
1759
1760
void MediaPlayerPrivateGStreamer::clearTracks()
1761
{
1762
#if ENABLE(VIDEO_TRACK)
1763
CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
1764
CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
1765
CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
1766
#endif // ENABLE(VIDEO_TRACK)
1767
}
1768
1769
void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
1770
{
1771
player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
1772
player->notifyPlayerOfVideo();
1773
});
1774
}
1775
1776
void MediaPlayerPrivateGStreamer::setPipeline(GstElement* pipeline)
1777
{
1778
m_pipeline = pipeline;
1779
1780
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
1781
gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
1782
auto& player = *static_cast<MediaPlayerPrivateGStreamer*>(userData);
1783
1784
if (player.handleSyncMessage(message)) {
1785
gst_message_unref(message);
1786
return GST_BUS_DROP;
1787
}
1788
1789
return GST_BUS_PASS;
1790
}, this, nullptr);
1791
}
1792
1793
bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
1794
{
1795
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
1796
GRefPtr<GstStreamCollection> collection;
1797
gst_message_parse_stream_collection(message, &collection.outPtr());
1798
1799
if (collection) {
1800
m_streamCollection.swap(collection);
1801
m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
1802
this->updateTracks();
1803
});
1804
}
1805
}
1806
1807
if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
1808
return false;
1809
1810
const gchar* contextType;
1811
if (!gst_message_parse_context_type(message, &contextType))
1812
return false;
1813
1814
GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
1815
1816
if (!g_strcmp0(contextType, WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME)) {
1817
GRefPtr<GstContext> context = adoptGRef(gst_context_new(WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME, FALSE));
1818
GstStructure* contextStructure = gst_context_writable_structure(context.get());
1819
1820
ASSERT(m_player);
1821
gst_structure_set(contextStructure, "player", G_TYPE_POINTER, m_player, nullptr);
1822
gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
1823
return true;
1824
}
1825
1826
#if ENABLE(ENCRYPTED_MEDIA)
1827
if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
1828
initializationDataEncountered(parseInitDataFromProtectionMessage(message));
1829
bool isCDMAttached = waitForCDMAttachment();
1830
if (isCDMAttached && !isPlayerShuttingDown() && !m_cdmInstance->keySystem().isEmpty()) {
1831
const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
1832
GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
1833
1834
GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
1835
GstStructure* contextStructure = gst_context_writable_structure(context.get());
1836
gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
1837
gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
1838
return true;
1839
}
1840
1841
GST_WARNING_OBJECT(pipeline(), "waiting for a CDM failed, no CDM available");
1842
return false;
1843
}
1844
#endif // ENABLE(ENCRYPTED_MEDIA)
1845
1846
GST_DEBUG_OBJECT(pipeline(), "Unhandled %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
1847
return false;
1848
}
1849
1850
// Returns the size of the video.
1851
FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
1852
{
1853
#if USE(GSTREAMER_HOLEPUNCH)
1854
// When using the holepuch we may not be able to get the video frames size, so we can't use
1855
// it. But we need to report some non empty naturalSize for the player's GraphicsLayer
1856
// to be properly created.
1857
return s_holePunchDefaultFrameSize;
1858
#endif
1859
1860
#if ENABLE(MEDIA_STREAM)
1861
if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
1862
RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
1863
1864
if (videoTrack) {
1865
auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
1866
gint width, height;
1867
1868
if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
1869
return FloatSize(width, height);
1870
}
1871
}
1872
#endif // ENABLE(MEDIA_STREAM)
1873
1874
if (!hasVideo())
1875
return FloatSize();
1876
1877
if (!m_videoSize.isEmpty())
1878
return m_videoSize;
1879
1880
auto sampleLocker = holdLock(m_sampleMutex);
1881
if (!GST_IS_SAMPLE(m_sample.get()))
1882
return FloatSize();
1883
1884
GstCaps* caps = gst_sample_get_caps(m_sample.get());
1885
if (!caps)
1886
return FloatSize();
1887
1888
// TODO: handle possible clean aperture data. See https://bugzilla.gnome.org/show_bug.cgi?id=596571
1889
// TODO: handle possible transformation matrix. See https://bugzilla.gnome.org/show_bug.cgi?id=596326
1890
1891
// Get the video PAR and original size, if this fails the
1892
// video-sink has likely not yet negotiated its caps.
1893
int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
1894
IntSize originalSize;
1895
GstVideoFormat format;
1896
if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
1897
return FloatSize();
1898
1899
#if USE(TEXTURE_MAPPER_GL)
1900
// When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
1901
if (m_canRenderingBeAccelerated) {
1902
if (m_videoSourceOrientation.usesWidthAsHeight())
1903
originalSize = originalSize.transposedSize();
1904
}
1905
#endif
1906
1907
GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
1908
GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
1909
1910
// Calculate DAR based on PAR and video size.
1911
int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
1912
int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
1913
1914
// Divide display width and height by their GCD to avoid possible overflows.
1915
int displayAspectRatioGCD = gst_util_greatest_common_divisor(displayWidth, displayHeight);
1916
displayWidth /= displayAspectRatioGCD;
1917
displayHeight /= displayAspectRatioGCD;
1918
1919
// Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
1920
uint64_t width = 0, height = 0;
1921
if (!(originalSize.height() % displayHeight)) {
1922
GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
1923
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
1924
height = originalSize.height();
1925
} else if (!(originalSize.width() % displayWidth)) {
1926
GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
1927
height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
1928
width = originalSize.width();
1929
} else {
1930
GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
1931
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
1932
height = originalSize.height();
1933
}
1934
1935
GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
1936
m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
1937
return m_videoSize;
1938
}
1939
1940
void MediaPlayerPrivateGStreamer::setVolume(float volume)
1941
{
1942
if (!m_volumeElement)
1943
return;
1944
1945
GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
1946
gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR, static_cast<double>(volume));
1947
}
1948
1949
float MediaPlayerPrivateGStreamer::volume() const
1950
{
1951
if (!m_volumeElement)
1952
return 0;
1953
1954
return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR);
1955
}
1956
1957
void MediaPlayerPrivateGStreamer::notifyPlayerOfVolumeChange()
1958
{
1959
if (!m_player || !m_volumeElement)
1960
return;
1961
double volume;
1962
volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR);
1963
// get_volume() can return values superior to 1.0 if the user
1964
// applies software user gain via third party application (GNOME
1965
// volume control for instance).
1966
volume = CLAMP(volume, 0.0, 1.0);
1967
m_player->volumeChanged(static_cast<float>(volume));
1968
}
1969
1970
void MediaPlayerPrivateGStreamer::volumeChangedCallback(MediaPlayerPrivateGStreamer* player)
1971
{
1972
// This is called when m_volumeElement receives the notify::volume signal.
1973
GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
1974
1975
player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
1976
player->notifyPlayerOfVolumeChange();
1977
});
1978
}
1979
1980
MediaPlayer::NetworkState MediaPlayerPrivateGStreamer::networkState() const
1981
{
1982
return m_networkState;
1983
}
1984
1985
MediaPlayer::ReadyState MediaPlayerPrivateGStreamer::readyState() const
1986
{
1987
return m_readyState;
1988
}
1989
1990
void MediaPlayerPrivateGStreamer::setMuted(bool shouldMute)
1991
{
1992
if (!m_volumeElement || shouldMute == isMuted())
1993
return;
1994
1995
GST_INFO_OBJECT(pipeline(), "Muted? %s", boolForPrinting(shouldMute));
1996
g_object_set(m_volumeElement.get(), "mute", shouldMute, nullptr);
1997
}
1998
1999
void MediaPlayerPrivateGStreamer::notifyPlayerOfMute()
2000
{
2001
if (!m_player || !m_volumeElement)
2002
return;
2003
2004
gboolean muted;
2005
g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
2006
m_player->muteChanged(static_cast<bool>(muted));
2007
}
2008
2009
void MediaPlayerPrivateGStreamer::muteChangedCallback(MediaPlayerPrivateGStreamer* player)
2010
{
2011
// This is called when m_volumeElement receives the notify::mute signal.
2012
player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
2013
player->notifyPlayerOfMute();
2014
});
2015
}
2016
2017
void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
2018
{
2019
GUniqueOutPtr<GError> err;
2020
GUniqueOutPtr<gchar> debug;
2021
MediaPlayer::NetworkState error;
2022
bool issueError = true;
2023
bool attemptNextLocation = false;
2024
const GstStructure* structure = gst_message_get_structure(message);
2025
GstState requestedState, currentState;
2026
2027
m_canFallBackToLastFinishedSeekPosition = false;
2028
2029
if (structure) {
2030
const gchar* messageTypeName = gst_structure_get_name(structure);
2031
2032
// Redirect messages are sent from elements, like qtdemux, to
2033
// notify of the new location(s) of the media.
2034
if (!g_strcmp0(messageTypeName, "redirect")) {
2035
mediaLocationChanged(message);
2036
return;
2037
}
2038
}
2039
2040
// We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
2041
bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
2042
2043
GST_LOG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
2044
switch (GST_MESSAGE_TYPE(message)) {
2045
case GST_MESSAGE_ERROR:
2046
if (m_shouldResetPipeline || !m_missingPluginCallbacks.isEmpty() || m_didErrorOccur)
2047
break;
2048
gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
2049
GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
2050
2051
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
2052
2053
error = MediaPlayer::NetworkState::Empty;
2054
if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
2055
|| g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
2056
|| g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
2057
|| g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
2058
|| g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
2059
error = MediaPlayer::NetworkState::FormatError;
2060
else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
2061
// Let the mediaPlayerClient handle the stream error, in this case the HTMLMediaElement will emit a stalled event.
2062
GST_ERROR("Decode error, let the Media element emit a stalled event.");
2063
m_loadingStalled = true;
2064
break;
2065
} else if (err->domain == GST_STREAM_ERROR) {
2066
error = MediaPlayer::NetworkState::DecodeError;
2067
attemptNextLocation = true;
2068
} else if (err->domain == GST_RESOURCE_ERROR)
2069
error = MediaPlayer::NetworkState::NetworkError;
2070
2071
if (attemptNextLocation)
2072
issueError = !loadNextLocation();
2073
if (issueError) {
2074
m_didErrorOccur = true;
2075
if (m_networkState != error) {
2076
m_networkState = error;
2077
m_player->networkStateChanged();
2078
}
2079
}
2080
break;
2081
case GST_MESSAGE_EOS:
2082
didEnd();
2083
break;
2084
case GST_MESSAGE_ASYNC_DONE:
2085
if (!messageSourceIsPlaybin || m_isDelayingLoad)
2086
break;
2087
asyncStateChangeDone();
2088
break;
2089
case GST_MESSAGE_STATE_CHANGED: {
2090
if (!messageSourceIsPlaybin || m_isDelayingLoad)
2091
break;
2092
updateStates();
2093
2094
// Construct a filename for the graphviz dot file output.
2095
GstState newState;
2096
gst_message_parse_state_changed(message, ¤tState, &newState, nullptr);
2097
CString dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '.',
2098
gst_element_state_get_name(currentState), '_', gst_element_state_get_name(newState)).utf8();
2099
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
2100
2101
break;
2102
}
2103
case GST_MESSAGE_BUFFERING:
2104
processBufferingStats(message);
2105
break;
2106
case GST_MESSAGE_DURATION_CHANGED:
2107
// Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
2108
if (messageSourceIsPlaybin && !isMediaSource())
2109
durationChanged();
2110
break;
2111
case GST_MESSAGE_REQUEST_STATE:
2112
gst_message_parse_request_state(message, &requestedState);
2113
gst_element_get_state(m_pipeline.get(), ¤tState, nullptr, 250 * GST_NSECOND);
2114
if (requestedState < currentState) {
2115
GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
2116
gst_element_state_get_name(requestedState));
2117
m_requestedState = requestedState;
2118
if (!changePipelineState(requestedState))
2119
loadingFailed(MediaPlayer::NetworkState::Empty);
2120
}
2121
break;
2122
case GST_MESSAGE_CLOCK_LOST:
2123
// This can only happen in PLAYING state and we should just
2124
// get a new clock by moving back to PAUSED and then to
2125
// PLAYING again.
2126
// This can happen if the stream that ends in a sink that
2127
// provides the current clock disappears, for example if
2128
// the audio sink provides the clock and the audio stream
2129
// is disabled. It also happens relatively often with
2130
// HTTP adaptive streams when switching between different
2131
// variants of a stream.
2132
gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
2133
gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
2134
break;
2135
case GST_MESSAGE_LATENCY:
2136
// Recalculate the latency, we don't need any special handling
2137
// here other than the GStreamer default.
2138
// This can happen if the latency of live elements changes, or
2139
// for one reason or another a new live element is added or
2140
// removed from the pipeline.
2141
gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
2142
break;
2143
case GST_MESSAGE_ELEMENT:
2144
if (gst_is_missing_plugin_message(message)) {
2145
if (gst_install_plugins_supported()) {
2146
auto missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
2147
if (!weakThis) {
2148
GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
2149
return;
2150
}
2151
2152
GST_DEBUG("got missing plugin installation callback with result %u", result);
2153
RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
2154
weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
2155
if (result != GST_INSTALL_PLUGINS_SUCCESS)
2156
return;
2157
2158
weakThis->changePipelineState(GST_STATE_READY);
2159
weakThis->changePipelineState(GST_STATE_PAUSED);
2160
});
2161
m_missingPluginCallbacks.append(missingPluginCallback.copyRef());
2162
GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
2163
GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
2164
m_player->requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), missingPluginCallback.get());
2165
}
2166
}
2167
#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
2168
else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
2169
processMpegTsSection(section);
2170
gst_mpegts_section_unref(section);
2171
}
2172
#endif
2173
else if (gst_structure_has_name(structure, "http-headers")) {
2174
GST_DEBUG_OBJECT(pipeline(), "Processing HTTP headers: %" GST_PTR_FORMAT, structure);
2175
if (const char* uri = gst_structure_get_string(structure, "uri")) {
2176
URL url(URL(), uri);
2177
convertToInternalProtocol(url);
2178
m_origins.add(SecurityOrigin::create(url));
2179
2180
if (url != m_url) {
2181
GST_DEBUG_OBJECT(pipeline(), "Ignoring HTTP response headers for non-main URI.");
2182
break;
2183
}
2184
}
2185
2186
bool isRangeRequest = false;
2187
GUniqueOutPtr<GstStructure> requestHeaders;
2188
if (gst_structure_get(structure, "request-headers", GST_TYPE_STRUCTURE, &requestHeaders.outPtr(), nullptr))
2189
isRangeRequest = gst_structure_has_field(requestHeaders.get(), "Range");
2190
2191
GST_DEBUG_OBJECT(pipeline(), "Is range request: %s", boolForPrinting(isRangeRequest));
2192
2193
GUniqueOutPtr<GstStructure> responseHeaders;
2194
if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders.outPtr(), nullptr)) {
2195
CString contentLengthHeaderName = httpHeaderNameString(HTTPHeaderName::ContentLength).utf8();
2196
uint64_t contentLength = 0;
2197
if (!gst_structure_get_uint64(responseHeaders.get(), contentLengthHeaderName.data(), &contentLength)) {
2198
// souphttpsrc sets a string for Content-Length, so
2199
// handle it here, until we remove the webkit+ protocol
2200
// prefix from webkitwebsrc.
2201
if (const char* contentLengthAsString = gst_structure_get_string(responseHeaders.get(), contentLengthHeaderName.data())) {
2202
contentLength = g_ascii_strtoull(contentLengthAsString, nullptr, 10);
2203
if (contentLength == G_MAXUINT64)
2204
contentLength = 0;
2205
}
2206
}
2207
if (!isRangeRequest) {
2208
m_isLiveStream = !contentLength;
2209
GST_INFO_OBJECT(pipeline(), "%s stream detected", m_isLiveStream ? "Live" : "Non-live");
2210
updateDownloadBufferingFlag();
2211
}
2212
}
2213
} else if (gst_structure_has_name(structure, "webkit-network-statistics")) {
2214
if (gst_structure_get(structure, "read-position", G_TYPE_UINT64, &m_networkReadPosition, "size", G_TYPE_UINT64, &m_httpResponseTotalSize, nullptr))
2215
GST_DEBUG_OBJECT(pipeline(), "Updated network read position %" G_GUINT64_FORMAT ", size: %" G_GUINT64_FORMAT, m_networkReadPosition, m_httpResponseTotalSize);
2216
} else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
2217
if (WEBKIT_IS_WEB_SRC(m_source.get()) && !webkitGstCheckVersion(1, 12, 0)) {
2218
if (const char* uri = gst_structure_get_string(structure, "uri"))
2219
m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC_CAST(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
2220
}
2221
} else if (gst_structure_has_name(structure, "GstCacheDownloadComplete")) {
2222
GST_INFO_OBJECT(pipeline(), "Stream is fully downloaded, stopping monitoring downloading progress.");
2223
m_fillTimer.stop();
2224
m_bufferingPercentage = 100;
2225
updateStates();
2226
} else
2227
GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
2228
break;
2229
#if ENABLE(VIDEO_TRACK)
2230
case GST_MESSAGE_TOC:
2231
processTableOfContents(message);
2232
break;
2233
#endif
2234
case GST_MESSAGE_TAG: {
2235
GstTagList* tags = nullptr;
2236
GUniqueOutPtr<gchar> tag;
2237
gst_message_parse_tag(message, &tags);
2238
if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
2239
if (!g_strcmp0(tag.get(), "rotate-90"))
2240
setVideoSourceOrientation(ImageOrientation::OriginRightTop);
2241
else if (!g_strcmp0(tag.get(), "rotate-180"))
2242
setVideoSourceOrientation(ImageOrientation::OriginBottomRight);
2243
else if (!g_strcmp0(tag.get(), "rotate-270"))
2244
setVideoSourceOrientation(ImageOrientation::OriginLeftBottom);
2245
}
2246
gst_tag_list_unref(tags);
2247
break;
2248
}
2249
case GST_MESSAGE_STREAMS_SELECTED: {
2250
GRefPtr<GstStreamCollection> collection;
2251
gst_message_parse_streams_selected(message, &collection.outPtr());
2252
2253
if (!collection)
2254
break;
2255
2256
m_streamCollection.swap(collection);
2257
m_currentAudioStreamId = "";
2258
m_currentVideoStreamId = "";
2259
m_currentTextStreamId = "";
2260
2261
unsigned length = gst_message_streams_selected_get_size(message);
2262
for (unsigned i = 0; i < length; i++) {
2263
GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
2264
if (!stream)
2265
continue;
2266
2267
GstStreamType type = gst_stream_get_stream_type(stream.get());
2268
String streamId(gst_stream_get_stream_id(stream.get()));
2269
2270
GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
2271
// Playbin3 can send more than one selected stream of the same type
2272
// but there's no priority or ordering system in place, so we assume
2273
// the selected stream is the last one as reported by playbin3.
2274
if (type & GST_STREAM_TYPE_AUDIO) {
2275
m_currentAudioStreamId = streamId;
2276
auto track = m_audioTracks.get(m_currentAudioStreamId);
2277
ASSERT(track);
2278
track->markAsActive();
2279
} else if (type & GST_STREAM_TYPE_VIDEO) {
2280
m_currentVideoStreamId = streamId;
2281
auto track = m_videoTracks.get(m_currentVideoStreamId);
2282
ASSERT(track);
2283
track->markAsActive();
2284
} else if (type & GST_STREAM_TYPE_TEXT)
2285
m_currentTextStreamId = streamId;
2286
else
2287
GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
2288
}
2289
break;
2290
}
2291
default:
2292
GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
2293
break;
2294
}
2295
}
2296
2297
void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
2298
{
2299
GstBufferingMode mode;
2300
gst_message_parse_buffering_stats(message, &mode, nullptr, nullptr, nullptr);
2301
2302
int percentage;
2303
gst_message_parse_buffering(message, &percentage);
2304
2305
updateBufferingStatus(mode, percentage);
2306
}
2307
2308
void MediaPlayerPrivateGStreamer::updateMaxTimeLoaded(double percentage)
2309
{
2310
MediaTime mediaDuration = durationMediaTime();
2311
if (!mediaDuration)
2312
return;
2313
2314
m_maxTimeLoaded = MediaTime(percentage * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
2315
GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
2316
}
2317
2318
void MediaPlayerPrivateGStreamer::updateBufferingStatus(GstBufferingMode mode, double percentage)
2319
{
2320
bool wasBuffering = m_isBuffering;
2321
2322
GST_DEBUG_OBJECT(pipeline(), "[Buffering] mode: %s, status: %f%%", enumToString(GST_TYPE_BUFFERING_MODE, mode).data(), percentage);
2323
2324
m_didDownloadFinish = percentage == 100;
2325
m_isBuffering = !m_didDownloadFinish;
2326
2327
if (!m_didDownloadFinish)
2328
m_isBuffering = true;
2329
2330
m_bufferingPercentage = percentage;
2331
switch (mode) {
2332
case GST_BUFFERING_STREAM: {
2333
updateMaxTimeLoaded(percentage);
2334
2335
m_bufferingPercentage = percentage;
2336
if (m_didDownloadFinish || (!wasBuffering && m_isBuffering))
2337
updateStates();
2338
2339
break;
2340
}
2341
case GST_BUFFERING_DOWNLOAD: {
2342
updateMaxTimeLoaded(percentage);
2343
updateStates();
2344
break;
2345
}
2346
default:
2347
GST_DEBUG_OBJECT(pipeline(), "Unhandled buffering mode: %s", enumToString(GST_TYPE_BUFFERING_MODE, mode).data());
2348
break;
2349
}
2350
}
2351
2352
#if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
2353
void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
2354
{
2355
ASSERT(section);
2356
2357
if (section->section_type == GST_MPEGTS_SECTION_PMT) {
2358
const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
2359
m_metadataTracks.clear();
2360
for (unsigned i = 0; i < pmt->streams->len; ++i) {
2361
const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
2362
if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
2363
AtomString pid = String::number(stream->pid);
2364
auto track = InbandMetadataTextTrackPrivateGStreamer::create(
2365
InbandTextTrackPrivate::Kind::Metadata, InbandTextTrackPrivate::CueFormat::Data, pid);
2366
2367
// 4.7.10.12.2 Sourcing in-band text tracks
2368
// If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
2369
// type as follows, based on the type of the media resource:
2370
// Let stream type be the value of the "stream_type" field describing the text track's type in the
2371
// file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
2372
// the "ES_info_length" field for the track in the same part of the program map section, interpreted
2373
// as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
2374
// following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
2375
// set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
2376
// expressed in hexadecimal using uppercase ASCII hex digits.
2377
StringBuilder inbandMetadataTrackDispatchType;
2378
inbandMetadataTrackDispatchType.append(hex(stream->stream_type, 2));
2379
for (unsigned j = 0; j < stream->descriptors->len; ++j) {
2380
const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
2381
for (unsigned k = 0; k < descriptor->length; ++k)
2382
inbandMetadataTrackDispatchType.append(hex(descriptor->data[k], 2));
2383
}
2384
track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType.toString());
2385
2386
m_metadataTracks.add(pid, track);
2387
m_player->addTextTrack(*track);
2388
}
2389
}
2390
} else {
2391
AtomString pid = String::number(section->pid);
2392
RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
2393
if (!track)
2394
return;
2395
2396
GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
2397
gsize size;
2398
const void* bytes = g_bytes_get_data(data.get(), &size);
2399
2400
track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
2401
}
2402
}
2403
#endif
2404
2405
#if ENABLE(VIDEO_TRACK)
2406
void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
2407
{
2408
if (m_chaptersTrack)
2409
m_player->removeTextTrack(*m_chaptersTrack);
2410
2411
m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Kind::Chapters, InbandTextTrackPrivate::CueFormat::Generic);
2412
m_player->addTextTrack(*m_chaptersTrack);
2413
2414
GRefPtr<GstToc> toc;
2415
gboolean updated;
2416
gst_message_parse_toc(message, &toc.outPtr(), &updated);
2417
ASSERT(toc);
2418
2419
for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
2420
processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
2421
}
2422
2423
void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
2424
{
2425
ASSERT(entry);
2426
2427
auto cue = InbandGenericCue::create();
2428
2429
gint64 start = -1, stop = -1;
2430
gst_toc_entry_get_start_stop_times(entry, &start, &stop);
2431
2432
uint32_t truncatedGstSecond = static_cast<uint32_t>(GST_SECOND);
2433
if (start != -1)
2434
cue->setStartTime(MediaTime(static_cast<int64_t>(start), truncatedGstSecond));
2435
if (stop != -1)
2436
cue->setEndTime(MediaTime(static_cast<int64_t>(stop), truncatedGstSecond));
2437
2438
GstTagList* tags = gst_toc_entry_get_tags(entry);
2439
if (tags) {
2440
gchar* title = nullptr;
2441
gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
2442
if (title) {
2443
cue->setContent(title);
2444
g_free(title);
2445
}
2446
}
2447
2448
m_chaptersTrack->addGenericCue(cue);
2449
2450
for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
2451
processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
2452
}
2453
2454
void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
2455
{
2456
m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
2457
return !validTrackIds.contains(keyAndValue.key);
2458
});
2459
}
2460
2461
void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
2462
{
2463
m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
2464
return !validTrackIds.contains(keyAndValue.key);
2465
});
2466
}
2467
2468
void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
2469
{
2470
m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
2471
return !validTrackIds.contains(keyAndValue.key);
2472
});
2473
}
2474
#endif
2475
2476
void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
2477
{
2478
if (g_strcmp0(G_OBJECT_TYPE_NAME(element), "GstDownloadBuffer"))
2479
return;
2480
2481
player->m_downloadBuffer = element;
2482
g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
2483
g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
2484
2485
GUniqueOutPtr<char> oldDownloadTemplate;
2486
g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
2487
2488
GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
2489
g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
2490
GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
2491
2492
player->purgeOldDownloadFiles(oldDownloadTemplate.get());
2493
}
2494
2495
void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
2496
{
2497
ASSERT(player->m_downloadBuffer);
2498
2499
g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
2500
2501
GUniqueOutPtr<char> downloadFile;
2502
g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
2503
player->m_downloadBuffer = nullptr;
2504
2505
if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
2506
GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
2507
return;
2508
}
2509
2510
GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
2511
}
2512
2513
void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
2514
{
2515
if (!downloadFileTemplate)
2516
return;
2517
2518
GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
2519
GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
2520
String templatePattern = String(templateFile.get()).replace("X", "?");
2521
2522
for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
2523
if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
2524
GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
2525
continue;
2526
}
2527
2528
GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
2529
}
2530
}
2531
2532
void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
2533
{
2534
if (!m_pipeline || m_didErrorOccur)
2535
return;
2536
2537
if (m_isSeeking) {
2538
if (m_isSeekPending)
2539
updateStates();
2540
else {
2541
GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
2542
m_isSeeking = false;
2543
m_cachedPosition = MediaTime::invalidTime();
2544
if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
2545
seek(m_timeOfOverlappingSeek);
2546
m_timeOfOverlappingSeek = MediaTime::invalidTime();
2547
return;
2548
}
2549
m_timeOfOverlappingSeek = MediaTime::invalidTime();
2550
2551
// The pipeline can still have a pending state. In this case a position query will fail.
2552
// Right now we can use m_seekTime as a fallback.
2553
m_canFallBackToLastFinishedSeekPosition = true;
2554
timeChanged();
2555
}
2556
} else
2557
updateStates();
2558
}
2559
2560
void MediaPlayerPrivateGStreamer::updateStates()
2561
{
2562
if (!m_pipeline || m_didErrorOccur)
2563
return;
2564
2565
MediaPlayer::NetworkState oldNetworkState = m_networkState;
2566
MediaPlayer::ReadyState oldReadyState = m_readyState;
2567
GstState pending, state;
2568
bool stateReallyChanged = false;
2569
2570
GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
2571
if (state != m_currentState) {
2572
m_oldState = m_currentState;
2573
m_currentState = state;
2574
stateReallyChanged = true;
2575
}
2576
2577
bool shouldUpdatePlaybackState = false;
2578
switch (getStateResult) {
2579
case GST_STATE_CHANGE_SUCCESS: {
2580
GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2581
2582
// Do nothing if on EOS and state changed to READY to avoid recreating the player
2583
// on HTMLMediaElement and properly generate the video 'ended' event.
2584
if (m_isEndReached && m_currentState == GST_STATE_READY)
2585
break;
2586
2587
m_shouldResetPipeline = m_currentState <= GST_STATE_READY;
2588
2589
bool didBuffering = m_isBuffering;
2590
2591
// Update ready and network states.
2592
switch (m_currentState) {
2593
case GST_STATE_NULL:
2594
m_readyState = MediaPlayer::ReadyState::HaveNothing;
2595
m_networkState = MediaPlayer::NetworkState::Empty;
2596
break;
2597
case GST_STATE_READY:
2598
m_readyState = MediaPlayer::ReadyState::HaveMetadata;
2599
m_networkState = MediaPlayer::NetworkState::Empty;
2600
break;
2601
case GST_STATE_PAUSED:
2602
FALLTHROUGH;
2603
case GST_STATE_PLAYING:
2604
if (m_isBuffering) {
2605
GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
2606
2607
m_isBuffering = m_bufferingPercentage == 100;
2608
if (gst_element_query(m_pipeline.get(), query.get())) {
2609
gboolean isBuffering = m_isBuffering;
2610
gst_query_parse_buffering_percent(query.get(), &isBuffering, nullptr);
2611
m_isBuffering = isBuffering;
2612
}
2613
2614
if (!m_isBuffering) {
2615
GST_INFO_OBJECT(pipeline(), "[Buffering] Complete.");
2616
m_readyState = MediaPlayer::ReadyState::HaveEnoughData;
2617
m_networkState = m_didDownloadFinish ? MediaPlayer::NetworkState::Idle : MediaPlayer::NetworkState::Loading;
2618
} else {
2619
m_readyState = MediaPlayer::ReadyState::HaveCurrentData;
2620
m_networkState = MediaPlayer::NetworkState::Loading;
2621
}
2622
} else if (m_didDownloadFinish) {
2623
m_readyState = MediaPlayer::ReadyState::HaveEnoughData;
2624
m_networkState = MediaPlayer::NetworkState::Loaded;
2625
} else {
2626
m_readyState = MediaPlayer::ReadyState::HaveFutureData;
2627
m_networkState = MediaPlayer::NetworkState::Loading;
2628
}
2629
2630
break;
2631
default:
2632
ASSERT_NOT_REACHED();
2633
break;
2634
}
2635
2636
// Sync states where needed.
2637
if (m_currentState == GST_STATE_PAUSED) {
2638
if (!m_areVolumeAndMuteInitialized) {
2639
notifyPlayerOfVolumeChange();
2640
notifyPlayerOfMute();
2641
m_areVolumeAndMuteInitialized = true;
2642
}
2643
2644
if (didBuffering && !m_isBuffering && !m_isPaused && m_playbackRate) {
2645
GST_INFO_OBJECT(pipeline(), "[Buffering] Restarting playback.");
2646
changePipelineState(GST_STATE_PLAYING);
2647
}
2648
} else if (m_currentState == GST_STATE_PLAYING) {
2649
m_isPaused = false;
2650
2651
if ((m_isBuffering && !m_isLiveStream) || !m_playbackRate) {
2652
GST_INFO_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
2653
changePipelineState(GST_STATE_PAUSED);
2654
}
2655
} else
2656
m_isPaused = true;
2657
2658
GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
2659
if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
2660
shouldUpdatePlaybackState = true;
2661
GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
2662
}
2663
2664
// Emit play state change notification only when going to PLAYING so that
2665
// the media element gets a chance to enable its page sleep disabler.
2666
// Emitting this notification in more cases triggers unwanted code paths
2667
// and test timeouts.
2668
if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
2669
GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
2670
shouldUpdatePlaybackState = true;
2671
}
2672
2673
break;
2674
}
2675
case GST_STATE_CHANGE_ASYNC:
2676
GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2677
// Change in progress.
2678
break;
2679
case GST_STATE_CHANGE_FAILURE:
2680
GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2681
// Change failed.
2682
return;
2683
case GST_STATE_CHANGE_NO_PREROLL:
2684
GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2685
2686
// Live pipelines go in PAUSED without prerolling.
2687
m_isLiveStream = true;
2688
updateDownloadBufferingFlag();
2689
2690
if (m_currentState == GST_STATE_READY)
2691
m_readyState = MediaPlayer::ReadyState::HaveNothing;
2692
else if (m_currentState == GST_STATE_PAUSED) {
2693
m_readyState = MediaPlayer::ReadyState::HaveEnoughData;
2694
m_isPaused = true;
2695
} else if (m_currentState == GST_STATE_PLAYING)
2696
m_isPaused = false;
2697
2698
if (!m_isPaused && m_playbackRate)
2699
changePipelineState(GST_STATE_PLAYING);
2700
2701
m_networkState = MediaPlayer::NetworkState::Loading;
2702
break;
2703
default:
2704
GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2705
break;
2706
}
2707
2708
m_requestedState = GST_STATE_VOID_PENDING;
2709
2710
if (shouldUpdatePlaybackState)
2711
m_player->playbackStateChanged();
2712
2713
if (m_networkState != oldNetworkState) {
2714
GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2715
m_player->networkStateChanged();
2716
}
2717
if (m_readyState != oldReadyState) {
2718
GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2719
m_player->readyStateChanged();
2720
}
2721
2722
if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2723
updatePlaybackRate();
2724
if (m_isSeekPending) {
2725
GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2726
m_isSeekPending = false;
2727
m_isSeeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2728
if (!m_isSeeking) {
2729
m_cachedPosition = MediaTime::invalidTime();
2730
GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2731
}
2732
}
2733
}
2734
}
2735
2736
void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2737
{
2738
if (m_mediaLocations)
2739
gst_structure_free(m_mediaLocations);
2740
2741
const GstStructure* structure = gst_message_get_structure(message);
2742
if (structure) {
2743
// This structure can contain:
2744
// - both a new-location string and embedded locations structure
2745
// - or only a new-location string.
2746
m_mediaLocations = gst_structure_copy(structure);
2747
const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2748
2749
if (locations)
2750
m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2751
2752
loadNextLocation();
2753
}
2754
}
2755
2756
bool MediaPlayerPrivateGStreamer::loadNextLocation()
2757
{
2758
if (!m_mediaLocations)
2759
return false;
2760
2761
const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2762
const char* newLocation = nullptr;
2763
2764
if (!locations) {
2765
// Fallback on new-location string.
2766
newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2767
if (!newLocation)
2768
return false;
2769
}
2770
2771
if (!newLocation) {
2772
if (m_mediaLocationCurrentIndex < 0) {
2773
m_mediaLocations = nullptr;
2774
return false;
2775
}
2776
2777
const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2778
const GstStructure* structure = gst_value_get_structure(location);
2779
2780
if (!structure) {
2781
m_mediaLocationCurrentIndex--;
2782
return false;
2783
}
2784
2785
newLocation = gst_structure_get_string(structure, "new-location");
2786
}
2787
2788
if (newLocation) {
2789
// Found a candidate. new-location is not always an absolute url
2790
// though. We need to take the base of the current url and
2791
// append the value of new-location to it.
2792
URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2793
URL newUrl = URL(baseUrl, newLocation);
2794
2795
GUniqueOutPtr<gchar> playbinUrlStr;
2796
g_object_get(m_pipeline.get(), "current-uri", &playbinUrlStr.outPtr(), nullptr);
2797
URL playbinUrl(URL(), playbinUrlStr.get());
2798
2799
if (playbinUrl == newUrl) {
2800
GST_DEBUG_OBJECT(pipeline(), "Playbin already handled redirection.");
2801
2802
m_url = playbinUrl;
2803
2804
return true;
2805
}
2806
2807
changePipelineState(GST_STATE_READY);
2808
auto securityOrigin = SecurityOrigin::create(m_url);
2809
if (securityOrigin->canRequest(newUrl)) {
2810
GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2811
2812
// Reset player states.
2813
m_networkState = MediaPlayer::NetworkState::Loading;
2814
m_player->networkStateChanged();
2815
m_readyState = MediaPlayer::ReadyState::HaveNothing;
2816
m_player->readyStateChanged();
2817
2818
// Reset pipeline state.
2819
m_shouldResetPipeline = true;
2820
2821
GstState state;
2822
gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2823
if (state <= GST_STATE_READY) {
2824
// Set the new uri and start playing.
2825
setPlaybinURL(newUrl);
2826
changePipelineState(GST_STATE_PLAYING);
2827
return true;
2828
}
2829
} else
2830
GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2831
}
2832
m_mediaLocationCurrentIndex--;
2833
return false;
2834
}
2835
2836
void MediaPlayerPrivateGStreamer::didEnd()
2837
{
2838
GST_INFO_OBJECT(pipeline(), "Playback ended");
2839
2840
// Synchronize position and duration values to not confuse the
2841
// HTMLMediaElement. In some cases like reverse playback the
2842
// position is not always reported as 0 for instance.
2843
m_cachedPosition = MediaTime::invalidTime();
2844
MediaTime now = currentMediaTime();
2845
if (now > MediaTime::zeroTime() && !m_isSeeking) {
2846
m_cachedDuration = now;
2847
m_player->durationChanged();
2848
}
2849
2850
m_isEndReached = true;
2851
2852
if (!m_player->isLooping()) {
2853
m_isPaused = true;
2854
changePipelineState(GST_STATE_READY);
2855
m_didDownloadFinish = false;
2856
2857
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
2858
wpe_video_plane_display_dmabuf_source_end_of_stream(m_wpeVideoPlaneDisplayDmaBuf.get());
2859
#endif
2860
}
2861
timeChanged();
2862
}
2863
2864
void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2865
{
2866
auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2867
types = gstRegistryScanner.mimeTypeSet();
2868
}
2869
2870
MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2871
{
2872
MediaPlayer::SupportsType result = MediaPlayer::SupportsType::IsNotSupported;
2873
#if ENABLE(MEDIA_SOURCE)
2874
// MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2875
if (parameters.isMediaSource)
2876
return result;
2877
#endif
2878
2879
#if !ENABLE(MEDIA_STREAM)
2880
if (parameters.isMediaStream)
2881
return result;
2882
#endif
2883
2884
if (parameters.type.isEmpty())
2885
return result;
2886
2887
GST_DEBUG("Checking mime-type \"%s\"", parameters.type.raw().utf8().data());
2888
auto containerType = parameters.type.containerType();
2889
auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2890
if (gstRegistryScanner.isContainerTypeSupported(containerType)) {
2891
// Spec says we should not return "probably" if the codecs string is empty.
2892
Vector<String> codecs = parameters.type.codecs();
2893
result = codecs.isEmpty() ? MediaPlayer::SupportsType::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::SupportsType::IsSupported : MediaPlayer::SupportsType::IsNotSupported);
2894
}
2895
2896
auto finalResult = extendedSupportsType(parameters, result);
2897
GST_DEBUG("Supported: %s", convertEnumerationToString(finalResult).utf8().data());
2898
return finalResult;
2899
}
2900
2901
void MediaPlayerPrivateGStreamer::updateDownloadBufferingFlag()
2902
{
2903
if (!m_pipeline)
2904
return;
2905
2906
unsigned flags;
2907
g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2908
2909
unsigned flagDownload = getGstPlayFlag("download");
2910
2911
// We don't want to stop downloading if we already started it.
2912
if (flags & flagDownload && m_readyState > MediaPlayer::ReadyState::HaveNothing && !m_shouldResetPipeline) {
2913
GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2914
return;
2915
}
2916
2917
bool shouldDownload = !m_isLiveStream && m_preload == MediaPlayer::Preload::Auto;
2918
if (shouldDownload) {
2919
GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2920
g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2921
m_fillTimer.startRepeating(200_ms);
2922
} else {
2923
GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2924
g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2925
m_fillTimer.stop();
2926
}
2927
}
2928
2929
void MediaPlayerPrivateGStreamer::createGSTPlayBin(const URL& url, const String& pipelineName)
2930
{
2931
const char* playbinName = "playbin";
2932
2933
// MSE doesn't support playbin3. Mediastream requires playbin3. Regular
2934
// playback can use playbin3 on-demand with the WEBKIT_GST_USE_PLAYBIN3
2935
// environment variable.
2936
if ((!isMediaSource() && g_getenv("WEBKIT_GST_USE_PLAYBIN3")) || url.protocolIs("mediastream"))
2937
playbinName = "playbin3";
2938
2939
if (m_pipeline) {
2940
if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2941
GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2942
return;
2943
}
2944
2945
GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.", playbinName);
2946
changePipelineState(GST_STATE_NULL);
2947
m_pipeline = nullptr;
2948
}
2949
2950
ASSERT(!m_pipeline);
2951
2952
m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2953
2954
static Atomic<uint32_t> pipelineId;
2955
setPipeline(gst_element_factory_make(playbinName,
2956
(pipelineName.isEmpty() ? makeString("media-player-", pipelineId.exchangeAdd(1)) : pipelineName).utf8().data()));
2957
setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2958
2959
GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2960
2961
// Let also other listeners subscribe to (application) messages in this bus.
2962
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2963
gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2964
g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2965
2966
g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2967
2968
g_signal_connect(GST_BIN_CAST(m_pipeline.get()), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin* subBin, GstElement* element, MediaPlayerPrivateGStreamer* player) {
2969
GUniquePtr<char> binName(gst_element_get_name(GST_ELEMENT_CAST(subBin)));
2970
if (!g_str_has_prefix(binName.get(), "decodebin"))
2971
return;
2972
2973
GUniquePtr<char> elementName(gst_element_get_name(element));
2974
if (g_str_has_prefix(elementName.get(), "v4l2"))
2975
player->m_videoDecoderPlatform = GstVideoDecoderPlatform::Video4Linux;
2976
else if (g_str_has_prefix(elementName.get(), "imxvpudec"))
2977
player->m_videoDecoderPlatform = GstVideoDecoderPlatform::ImxVPU;
2978
2979
#if USE(TEXTURE_MAPPER_GL)
2980
player->updateTextureMapperFlags();
2981
#endif
2982
}), this);
2983
2984
g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2985
if (m_isLegacyPlaybin) {
2986
g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2987
g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2988
}
2989
2990
#if ENABLE(VIDEO_TRACK)
2991
if (m_isLegacyPlaybin)
2992
g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2993
2994
GstElement* textCombiner = webkitTextCombinerNew();
2995
ASSERT(textCombiner);
2996
g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2997
2998
m_textAppSink = webkitTextSinkNew();
2999
ASSERT(m_textAppSink);
3000
3001
m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
3002
ASSERT(m_textAppSinkPad);
3003
3004
GRefPtr<GstCaps> textCaps;
3005
if (webkitGstCheckVersion(1, 14, 0))
3006
textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
3007
else
3008
textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
3009
g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
3010
g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
3011
3012
g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
3013
#endif
3014
3015
g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
3016
3017
configurePlaySink();
3018
3019
if (m_shouldPreservePitch) {
3020
GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
3021
3022
if (!scale)
3023
GST_WARNING("Failed to create scaletempo");
3024
else
3025
g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
3026
}
3027
3028
if (!m_canRenderingBeAccelerated) {
3029
// If not using accelerated compositing, let GStreamer handle
3030
// the image-orientation tag.
3031
GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
3032
if (videoFlip) {
3033
g_object_set(videoFlip, "method", 8, nullptr);
3034
g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
3035
} else
3036
GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
3037
}
3038
3039
GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
3040
if (videoSinkPad)
3041
g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
3042
}
3043
3044
bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
3045
{
3046
if (WEBKIT_IS_WEB_SRC(m_source.get()))
3047
return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC_CAST(m_source.get()));
3048
return false;
3049
}
3050
3051
bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
3052
{
3053
if (m_isLiveStream)
3054
return false;
3055
3056
if (m_url.isLocalFile())
3057
return true;
3058
3059
if (m_url.protocolIsInHTTPFamily())
3060
return true;
3061
3062
return false;
3063
}
3064
3065
void MediaPlayerPrivateGStreamer::readyTimerFired()
3066
{
3067
GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
3068
changePipelineState(GST_STATE_NULL);
3069
}
3070
3071
void MediaPlayerPrivateGStreamer::acceleratedRenderingStateChanged()
3072
{
3073
m_canRenderingBeAccelerated = m_player && m_player->acceleratedCompositingEnabled();
3074
}
3075
3076
#if USE(TEXTURE_MAPPER_GL)
3077
PlatformLayer* MediaPlayerPrivateGStreamer::platformLayer() const
3078
{
3079
#if USE(NICOSIA)
3080
return m_nicosiaLayer.ptr();
3081
#else
3082
return const_cast<MediaPlayerPrivateGStreamer*>(this);
3083
#endif
3084
}
3085
3086
#if USE(NICOSIA)
3087
void MediaPlayerPrivateGStreamer::swapBuffersIfNeeded()
3088
{
3089
#if USE(GSTREAMER_HOLEPUNCH)
3090
pushNextHolePunchBuffer();
3091
#endif
3092
}
3093
#else
3094
RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamer::proxy() const
3095
{
3096
return m_platformLayerProxy.copyRef();
3097
}
3098
3099
void MediaPlayerPrivateGStreamer::swapBuffersIfNeeded()
3100
{
3101
#if USE(GSTREAMER_HOLEPUNCH)
3102
pushNextHolePunchBuffer();
3103
#endif
3104
}
3105
#endif
3106
3107
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3108
class GStreamerDMABufHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
3109
public:
3110
GStreamerDMABufHolePunchClient(std::unique_ptr<GstVideoFrameHolder>&& frameHolder, struct wpe_video_plane_display_dmabuf_source* videoPlaneDisplayDmaBufSource)
3111
: m_frameHolder(WTFMove(frameHolder))
3112
, m_wpeVideoPlaneDisplayDmaBuf(videoPlaneDisplayDmaBufSource) { };
3113
void setVideoRectangle(const IntRect& rect) final
3114
{
3115
if (m_wpeVideoPlaneDisplayDmaBuf)
3116
m_frameHolder->handoffVideoDmaBuf(m_wpeVideoPlaneDisplayDmaBuf, rect);
3117
}
3118
private:
3119
std::unique_ptr<GstVideoFrameHolder> m_frameHolder;
3120
struct wpe_video_plane_display_dmabuf_source* m_wpeVideoPlaneDisplayDmaBuf;
3121
};
3122
#endif // USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3123
3124
void MediaPlayerPrivateGStreamer::pushTextureToCompositor()
3125
{
3126
auto sampleLocker = holdLock(m_sampleMutex);
3127
if (!GST_IS_SAMPLE(m_sample.get()))
3128
return;
3129
3130
auto internalCompositingOperation = [this](TextureMapperPlatformLayerProxy& proxy, std::unique_ptr<GstVideoFrameHolder>&& frameHolder) {
3131
std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
3132
if (frameHolder->hasMappedTextures()) {
3133
layerBuffer = frameHolder->platformLayerBuffer();
3134
if (!layerBuffer)
3135
return;
3136
layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
3137
} else {
3138
layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
3139
if (UNLIKELY(!layerBuffer)) {
3140
auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
3141
texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
3142
layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
3143
}
3144
frameHolder->updateTexture(layerBuffer->textureGL());
3145
layerBuffer->setExtraFlags(m_textureMapperFlags | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
3146
}
3147
proxy.pushNextBuffer(WTFMove(layerBuffer));
3148
};
3149
3150
#if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3151
auto proxyOperation =
3152
[this, internalCompositingOperation](TextureMapperPlatformLayerProxy& proxy)
3153
{
3154
LockHolder holder(proxy.lock());
3155
3156
if (!proxy.isActive())
3157
return;
3158
3159
auto frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, !m_isUsingFallbackVideoSink);
3160
if (frameHolder->hasDMABuf()) {
3161
auto layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
3162
auto holePunchClient = makeUnique<GStreamerDMABufHolePunchClient>(WTFMove(frameHolder), m_wpeVideoPlaneDisplayDmaBuf.get());
3163
layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
3164
proxy.pushNextBuffer(WTFMove(layerBuffer));
3165
} else
3166
internalCompositingOperation(proxy, WTFMove(frameHolder));
3167
};
3168
#else
3169
auto proxyOperation =
3170
[this, internalCompositingOperation](TextureMapperPlatformLayerProxy& proxy)
3171
{
3172
LockHolder holder(proxy.lock());
3173
3174
if (!proxy.isActive())
3175
return;
3176
3177
auto frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, !m_isUsingFallbackVideoSink);
3178
internalCompositingOperation(proxy, WTFMove(frameHolder));
3179
};
3180
#endif // USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3181
3182
#if USE(NICOSIA)
3183
proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
3184
#else
3185
proxyOperation(*m_platformLayerProxy);
3186
#endif
3187
}
3188
#endif // USE(TEXTURE_MAPPER_GL)
3189
3190
void MediaPlayerPrivateGStreamer::repaint()
3191
{
3192
ASSERT(m_sample);
3193
ASSERT(isMainThread());
3194
3195
m_player->repaint();
3196
3197
LockHolder lock(m_drawMutex);
3198
m_drawCondition.notifyOne();
3199
}
3200
3201
void MediaPlayerPrivateGStreamer::triggerRepaint(GstSample* sample)
3202
{
3203
bool shouldTriggerResize;
3204
{
3205
auto sampleLocker = holdLock(m_sampleMutex);
3206
shouldTriggerResize = !m_sample;
3207
m_sample = sample;
3208
}
3209
3210
if (shouldTriggerResize) {
3211
GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
3212
m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
3213
m_player->sizeChanged();
3214
});
3215
}
3216
3217
if (!m_canRenderingBeAccelerated) {
3218
LockHolder locker(m_drawMutex);
3219
if (m_isBeingDestroyed)
3220
return;
3221
m_drawTimer.startOneShot(0_s);
3222
m_drawCondition.wait(m_drawMutex);
3223
return;
3224
}
3225
3226
#if USE(TEXTURE_MAPPER_GL)
3227
if (m_isUsingFallbackVideoSink) {
3228
LockHolder lock(m_drawMutex);
3229
auto proxyOperation =
3230
[this](TextureMapperPlatformLayerProxy& proxy)
3231
{
3232
return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
3233
};
3234
#if USE(NICOSIA)
3235
if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
3236
return;
3237
#else
3238
if (!proxyOperation(*m_platformLayerProxy))
3239
return;
3240
#endif
3241
m_drawTimer.startOneShot(0_s);
3242
m_drawCondition.wait(m_drawMutex);
3243
} else
3244
pushTextureToCompositor();
3245
#endif // USE(TEXTURE_MAPPER_GL)
3246
}
3247
3248
void MediaPlayerPrivateGStreamer::repaintCallback(MediaPlayerPrivateGStreamer* player, GstSample* sample)
3249
{
3250
player->triggerRepaint(sample);
3251
}
3252
3253
void MediaPlayerPrivateGStreamer::cancelRepaint(bool destroying)
3254
{
3255
// The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
3256
// to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
3257
// the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
3258
// main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
3259
// thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
3260
//
3261
// This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
3262
// m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
3263
if (!m_canRenderingBeAccelerated) {
3264
LockHolder locker(m_drawMutex);
3265
m_drawTimer.stop();
3266
m_isBeingDestroyed = destroying;
3267
m_drawCondition.notifyOne();
3268
}
3269
}
3270
3271
void MediaPlayerPrivateGStreamer::repaintCancelledCallback(MediaPlayerPrivateGStreamer* player)
3272
{
3273
player->cancelRepaint();
3274
}
3275
3276
#if USE(GSTREAMER_GL)
3277
void MediaPlayerPrivateGStreamer::flushCurrentBuffer()
3278
{
3279
auto sampleLocker = holdLock(m_sampleMutex);
3280
3281
if (m_sample) {
3282
// Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
3283
// This prevents resizing problems when the video changes its quality and a DRAIN is performed.
3284
const GstStructure* info = gst_sample_get_info(m_sample.get());
3285
m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
3286
gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
3287
}
3288
3289
bool shouldWait = m_videoDecoderPlatform == GstVideoDecoderPlatform::Video4Linux;
3290
auto proxyOperation = [shouldWait, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy) {
3291
GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s", shouldWait ? "synchronously" : "");
3292
LockHolder locker(!shouldWait ? &proxy.lock() : nullptr);
3293
3294
if (proxy.isActive())
3295
proxy.dropCurrentBufferWhilePreservingTexture(shouldWait);
3296
};
3297
3298
#if USE(NICOSIA)
3299
proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
3300
#else
3301
proxyOperation(*m_platformLayerProxy);
3302
#endif
3303
}
3304
#endif
3305
3306
void MediaPlayerPrivateGStreamer::setSize(const IntSize& size)
3307
{
3308
m_size = size;
3309
}
3310
3311
void MediaPlayerPrivateGStreamer::paint(GraphicsContext& context, const FloatRect& rect)
3312
{
3313
if (context.paintingDisabled())
3314
return;
3315
3316
if (!m_player->visible())
3317
return;
3318
3319
auto sampleLocker = holdLock(m_sampleMutex);
3320
if (!GST_IS_SAMPLE(m_sample.get()))
3321
return;
3322
3323
#if USE(GSTREAMER_GL)
3324
// Ensure the input is RGBA. We handle YUV video natively, so we need to do
3325
// this conversion on-demand here.
3326
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
3327
if (UNLIKELY(!GST_IS_BUFFER(buffer)))
3328
return;
3329
3330
GstCaps* caps = gst_sample_get_caps(m_sample.get());
3331
3332
GstVideoInfo videoInfo;
3333
gst_video_info_init(&videoInfo);
3334
if (!gst_video_info_from_caps(&videoInfo, caps))
3335
return;
3336
3337
if (!GST_VIDEO_INFO_IS_RGB(&videoInfo)) {
3338
if (!m_colorConvert) {
3339
GstMemory* mem = gst_buffer_peek_memory(buffer, 0);
3340
GstGLContext* context = ((GstGLBaseMemory*)mem)->context;
3341
m_colorConvert = adoptGRef(gst_gl_color_convert_new(context));
3342
}
3343
3344
if (!m_colorConvertInputCaps || !gst_caps_is_equal(m_colorConvertInputCaps.get(), caps)) {
3345
m_colorConvertInputCaps = caps;
3346
m_colorConvertOutputCaps = adoptGRef(gst_caps_copy(caps));
3347
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
3348
const char* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "BGRx";
3349
#else
3350
const char* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "RGBx";
3351
#endif
3352
gst_caps_set_simple(m_colorConvertOutputCaps.get(), "format", G_TYPE_STRING, formatString, nullptr);
3353
if (!gst_gl_color_convert_set_caps(m_colorConvert.get(), caps, m_colorConvertOutputCaps.get()))
3354
return;
3355
}
3356
3357
GRefPtr<GstBuffer> rgbBuffer = adoptGRef(gst_gl_color_convert_perform(m_colorConvert.get(), buffer));
3358
if (UNLIKELY(!GST_IS_BUFFER(rgbBuffer.get())))
3359
return;
3360
3361
const GstStructure* info = gst_sample_get_info(m_sample.get());
3362
m_sample = adoptGRef(gst_sample_new(rgbBuffer.get(), m_colorConvertOutputCaps.get(),
3363
gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
3364
}
3365
#endif
3366
3367
auto gstImage = ImageGStreamer::createImage(m_sample.get());
3368
if (!gstImage)
3369
return;
3370
3371
context.drawImage(gstImage->image(), rect, gstImage->rect(), { CompositeOperator::Copy, m_canRenderingBeAccelerated ? m_videoSourceOrientation : ImageOrientation() });
3372
}
3373
3374
#if USE(GSTREAMER_GL)
3375
bool MediaPlayerPrivateGStreamer::copyVideoTextureToPlatformTexture(GraphicsContextGLOpenGL* context, PlatformGLObject outputTexture, GCGLenum outputTarget, GCGLint level, GCGLenum internalFormat, GCGLenum format, GCGLenum type, bool premultiplyAlpha, bool flipY)
3376
{
3377
UNUSED_PARAM(context);
3378
3379
if (m_isUsingFallbackVideoSink)
3380
return false;
3381
3382
if (premultiplyAlpha)
3383
return false;
3384
3385
auto sampleLocker = holdLock(m_sampleMutex);
3386
3387
if (!GST_IS_SAMPLE(m_sample.get()))
3388
return false;
3389
3390
std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, true);
3391
3392
std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = frameHolder->platformLayerBuffer();
3393
if (!layerBuffer)
3394
return false;
3395
3396
auto size = frameHolder->size();
3397
if (m_videoSourceOrientation.usesWidthAsHeight())
3398
size = size.transposedSize();
3399
3400
if (!m_videoTextureCopier)
3401
m_videoTextureCopier = makeUnique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
3402
3403
frameHolder->waitForCPUSync();
3404
3405
return m_videoTextureCopier->copyVideoTextureToPlatformTexture(*layerBuffer.get(), size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
3406
}
3407
3408
NativeImagePtr MediaPlayerPrivateGStreamer::nativeImageForCurrentTime()
3409
{
3410
#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
3411
if (m_isUsingFallbackVideoSink)
3412
return nullptr;
3413
3414
auto sampleLocker = holdLock(m_sampleMutex);
3415
3416
if (!GST_IS_SAMPLE(m_sample.get()))
3417
return nullptr;
3418
3419
std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, true);
3420
3421
std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = frameHolder->platformLayerBuffer();
3422
if (!layerBuffer)
3423
return nullptr;
3424
3425
auto size = frameHolder->size();
3426
if (m_videoSourceOrientation.usesWidthAsHeight())
3427
size = size.transposedSize();
3428
3429
GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
3430
context->makeContextCurrent();
3431
3432
if (!m_videoTextureCopier)
3433
m_videoTextureCopier = makeUnique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
3434
3435
frameHolder->waitForCPUSync();
3436
3437
if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(*layerBuffer.get(), size, 0, GL_TEXTURE_2D, 0, GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, false, m_videoSourceOrientation))
3438
return nullptr;
3439
3440
return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
3441
#else
3442
return nullptr;
3443
#endif
3444
}
3445
#endif // USE(GSTREAMER_GL)
3446
3447
void MediaPlayerPrivateGStreamer::setVideoSourceOrientation(ImageOrientation orientation)
3448
{
3449
if (m_videoSourceOrientation == orientation)
3450
return;
3451
3452
m_videoSourceOrientation = orientation;
3453
#if USE(TEXTURE_MAPPER_GL)
3454
updateTextureMapperFlags();
3455
#endif
3456
}
3457
3458
#if USE(TEXTURE_MAPPER_GL)
3459
void MediaPlayerPrivateGStreamer::updateTextureMapperFlags()
3460
{
3461
switch (m_videoSourceOrientation) {
3462
case ImageOrientation::OriginTopLeft:
3463
m_textureMapperFlags = 0;
3464
break;
3465
case ImageOrientation::OriginRightTop:
3466
m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture90;
3467
break;
3468
case ImageOrientation::OriginBottomRight:
3469
m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture180;
3470
break;
3471
case ImageOrientation::OriginLeftBottom:
3472
m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture270;
3473
break;
3474
default:
3475
// FIXME: Handle OriginTopRight, OriginBottomLeft, OriginLeftTop and OriginRightBottom?
3476
m_textureMapperFlags = 0;
3477
break;
3478
}
3479
}
3480
#endif
3481
3482
bool MediaPlayerPrivateGStreamer::supportsFullscreen() const
3483
{
3484
return true;
3485
}
3486
3487
MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamer::movieLoadType() const
3488
{
3489
if (m_readyState == MediaPlayer::ReadyState::HaveNothing)
3490
return MediaPlayer::MovieLoadType::Unknown;
3491
3492
if (m_isLiveStream)
3493
return MediaPlayer::MovieLoadType::LiveStream;
3494
3495
return MediaPlayer::MovieLoadType::Download;
3496
}
3497
3498
#if USE(GSTREAMER_GL)
3499
GstElement* MediaPlayerPrivateGStreamer::createVideoSinkGL()
3500
{
3501
if (!webKitGLVideoSinkProbePlatform()) {
3502
g_warning("WebKit wasn't able to find the GL video sink dependencies. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
3503
return nullptr;
3504
}
3505
3506
GstElement* sink = gst_element_factory_make("webkitglvideosink", nullptr);
3507
ASSERT(sink);
3508
webKitGLVideoSinkSetMediaPlayerPrivate(WEBKIT_GL_VIDEO_SINK(sink), this);
3509
return sink;
3510
}
3511
#endif // USE(GSTREAMER_GL)
3512
3513
#if USE(GSTREAMER_HOLEPUNCH)
3514
static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect)
3515
{
3516
// Here goes the platform-dependant code to set to the videoSink the size
3517
// and position of the video rendering window. Mark them unused as default.
3518
UNUSED_PARAM(videoSink);
3519
UNUSED_PARAM(rect);
3520
}
3521
3522
class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
3523
public:
3524
GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { };
3525
void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); }
3526
private:
3527
GRefPtr<GstElement> m_videoSink;
3528
};
3529
3530
GstElement* MediaPlayerPrivateGStreamer::createHolePunchVideoSink()
3531
{
3532
// Here goes the platform-dependant code to create the videoSink. As a default
3533
// we use a fakeVideoSink so nothing is drawn to the page.
3534
GstElement* videoSink = gst_element_factory_make("fakevideosink", nullptr);
3535
3536
return videoSink;
3537
}
3538
3539
void MediaPlayerPrivateGStreamer::pushNextHolePunchBuffer()
3540
{
3541
auto proxyOperation =
3542
[this](TextureMapperPlatformLayerProxy& proxy)
3543
{
3544
LockHolder holder(proxy.lock());
3545
std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
3546
std::unique_ptr<GStreamerHolePunchClient> holePunchClient = makeUnique<GStreamerHolePunchClient>(m_videoSink.get());
3547
layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
3548
proxy.pushNextBuffer(WTFMove(layerBuffer));
3549
};
3550
3551
#if USE(NICOSIA)
3552
proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
3553
#else
3554
proxyOperation(*m_platformLayerProxy);
3555
#endif
3556
}
3557
#endif
3558
3559
GstElement* MediaPlayerPrivateGStreamer::createVideoSink()
3560
{
3561
acceleratedRenderingStateChanged();
3562
3563
#if USE(GSTREAMER_HOLEPUNCH)
3564
m_videoSink = createHolePunchVideoSink();
3565
pushNextHolePunchBuffer();
3566
return m_videoSink.get();
3567
#endif
3568
3569
#if USE(GSTREAMER_GL)
3570
if (m_canRenderingBeAccelerated)
3571
m_videoSink = createVideoSinkGL();
3572
#endif
3573
3574
if (!m_videoSink) {
3575
m_isUsingFallbackVideoSink = true;
3576
m_videoSink = webkitVideoSinkNew();
3577
g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
3578
g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
3579
}
3580
3581
GstElement* videoSink = nullptr;
3582
if (!webkitGstCheckVersion(1, 17, 0)) {
3583
m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
3584
if (m_fpsSink) {
3585
g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
3586
3587
// Turn off text overlay unless tracing is enabled.
3588
if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
3589
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
3590
3591
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
3592
g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
3593
videoSink = m_fpsSink.get();
3594
} else
3595
m_fpsSink = nullptr;
3596
}
3597
}
3598
3599
if (!m_fpsSink)
3600
videoSink = m_videoSink.get();
3601
3602
ASSERT(videoSink);
3603
return videoSink;
3604
}
3605
3606
void MediaPlayerPrivateGStreamer::setStreamVolumeElement(GstStreamVolume* volume)
3607
{
3608
ASSERT(!m_volumeElement);
3609
m_volumeElement = volume;
3610
3611
// We don't set the initial volume because we trust the sink to keep it for us. See
3613
if (!m_player->platformVolumeConfigurationRequired()) {
3614
GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
3615
gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR, static_cast<double>(m_player->volume()));
3616
} else
3617
GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
3618
3619
GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
3620
g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
3621
3622
g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
3623
g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
3624
}
3625
3626
Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateGStreamer::videoPlaybackQualityMetrics()
3627
{
3628
if (!webkitGstCheckVersion(1, 17, 0) && !m_fpsSink)
3629
return WTF::nullopt;
3630
3631
uint64_t totalVideoFrames = 0;
3632
uint64_t droppedVideoFrames = 0;
3633
if (webkitGstCheckVersion(1, 17, 0)) {
3634
GUniqueOutPtr<GstStructure> stats;
3635
g_object_get(m_videoSink.get(), "stats", &stats.outPtr(), nullptr);
3636
3637
if (!gst_structure_get_uint64(stats.get(), "rendered", &totalVideoFrames))
3638
return WTF::nullopt;
3639
3640
if (!gst_structure_get_uint64(stats.get(), "dropped", &droppedVideoFrames))
3641
return WTF::nullopt;
3642
} else if (m_fpsSink) {
3643
unsigned renderedFrames, droppedFrames;
3644
g_object_get(m_fpsSink.get(), "frames-rendered", &renderedFrames, "frames-dropped", &droppedFrames, nullptr);
3645
totalVideoFrames = renderedFrames;
3646
droppedVideoFrames = droppedFrames;
3647
}
3648
3649
uint32_t corruptedVideoFrames = 0;
3650
double totalFrameDelay = 0;
3651
uint32_t displayCompositedVideoFrames = 0;
3652
return VideoPlaybackQualityMetrics {
3653
static_cast<uint32_t>(totalVideoFrames),
3654
static_cast<uint32_t>(droppedVideoFrames),
3655
corruptedVideoFrames,
3656
totalFrameDelay,
3657
displayCompositedVideoFrames,
3658
};
3659
}
3660
3661
#if ENABLE(ENCRYPTED_MEDIA)
3662
InitData MediaPlayerPrivateGStreamer::parseInitDataFromProtectionMessage(GstMessage* message)
3663
{
3664
ASSERT(!isMainThread());
3665
3666
InitData initData;
3667
{
3668
LockHolder lock(m_protectionMutex);
3669
ProtectionSystemEvents protectionSystemEvents(message);
3670
GST_TRACE_OBJECT(pipeline(), "found %zu protection events, %zu decryptors available", protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size());
3671
3672
for (auto& event : protectionSystemEvents.events()) {
3673
const char* eventKeySystemId = nullptr;
3674
GstBuffer* data = nullptr;
3675
gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
3676
3677
initData.append({eventKeySystemId, data});
3678
m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
3679
}
3680
}
3681
3682
return initData;
3683
}
3684
3685
bool MediaPlayerPrivateGStreamer::waitForCDMAttachment()
3686
{
3687
if (isMainThread()) {
3688
GST_ERROR_OBJECT(pipeline(), "can't block the main thread waiting for a CDM instance");
3689
ASSERT_NOT_REACHED();
3690
return false;
3691
}
3692
3693
GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
3694
3695
bool didCDMAttach = false;
3696
{
3697
auto cdmAttachmentLocker = holdLock(m_cdmAttachmentMutex);
3698
didCDMAttach = m_cdmAttachmentCondition.waitFor(m_cdmAttachmentMutex, 4_s, [this]() {
3699
return isCDMAttached();
3700
});
3701
}
3702
3703
return didCDMAttach;
3704
}
3705
3706
void MediaPlayerPrivateGStreamer::initializationDataEncountered(InitData&& initData)
3707
{
3708
ASSERT(!isMainThread());
3709
3710
RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] {
3711
if (!weakThis)
3712
return;
3713
3714
GST_DEBUG("scheduling initializationDataEncountered event of size %zu", initData.payload()->size());
3715
GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size());
3716
weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer());
3717
});
3718
}
3719
3720
void MediaPlayerPrivateGStreamer::cdmInstanceAttached(CDMInstance& instance)
3721
{
3722
ASSERT(isMainThread());
3723
3724
if (m_cdmInstance == &instance)
3725
return;
3726
3727
if (!m_pipeline) {
3728
GST_ERROR("no pipeline yet");
3729
ASSERT_NOT_REACHED();
3730
return;
3731
}
3732
3733
m_cdmInstance = reinterpret_cast<CDMInstanceProxy*>(&instance);
3734
RELEASE_ASSERT(m_cdmInstance);
3735
m_cdmInstance->setPlayer(m_player);
3736
m_cdmInstance->setProxy(adoptRef(*new CDMProxyClearKey));
3737
3738
GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-proxy", FALSE));
3739
GstStructure* contextStructure = gst_context_writable_structure(context.get());
3740
gst_structure_set(contextStructure, "cdm-proxy", G_TYPE_POINTER, m_cdmInstance->proxy().get(), nullptr);
3741
gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
3742
3743
GST_DEBUG_OBJECT(m_pipeline.get(), "CDM proxy instance %p dispatched as context", m_cdmInstance->proxy().get());
3744
3745
LockHolder lock(m_cdmAttachmentMutex);
3746
// We must notify all waiters, since several demuxers can be simultaneously waiting for a CDM.
3747
m_cdmAttachmentCondition.notifyAll();
3748
}
3749
3750
void MediaPlayerPrivateGStreamer::cdmInstanceDetached(CDMInstance& instance)
3751
{
3752
ASSERT(isMainThread());
3753
3754
if (m_cdmInstance != &instance) {
3755
GST_WARNING("passed CDMInstance %p is different from stored one %p", &instance, m_cdmInstance.get());
3756
ASSERT_NOT_REACHED();
3757
return;
3758
}
3759
3760
ASSERT(m_pipeline);
3761
3762
GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context", m_cdmInstance.get());
3763
m_cdmInstance = nullptr;
3764
3765
GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-proxy", FALSE));
3766
gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
3767
}
3768
3769
void MediaPlayerPrivateGStreamer::attemptToDecryptWithInstance(CDMInstance& instance)
3770
{
3771
ASSERT(m_cdmInstance.get() == &instance);
3772
GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
3773
attemptToDecryptWithLocalInstance();
3774
}
3775
3776
void MediaPlayerPrivateGStreamer::attemptToDecryptWithLocalInstance()
3777
{
3778
bool wasEventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
3779
GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(wasEventHandled));
3780
}
3781
3782
void MediaPlayerPrivateGStreamer::handleProtectionEvent(GstEvent* event)
3783
{
3784
{
3785
LockHolder lock(m_protectionMutex);
3786
if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
3787
GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
3788
return;
3789
}
3790
}
3791
GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE", GST_EVENT_SEQNUM(event));
3792
const char* eventKeySystemUUID = nullptr;
3793
GstBuffer* initData = nullptr;
3794
gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr);
3795
initializationDataEncountered({eventKeySystemUUID, initData});
3796
}
3797
3798
bool MediaPlayerPrivateGStreamer::waitingForKey() const
3799
{
3800
if (!m_pipeline || !m_cdmInstance)
3801
return false;
3802
3803
return m_cdmInstance->isWaitingForKey();
3804
}
3805
#endif
3806
3807
bool MediaPlayerPrivateGStreamer::supportsKeySystem(const String& keySystem, const String& mimeType)
3808
{
3809
bool result = false;
3810
3811
#if ENABLE(ENCRYPTED_MEDIA)
3812
result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
3813
#endif
3814
3815
GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
3816
return result;
3817
}
3818
3819
MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
3820
{
3821
UNUSED_PARAM(parameters);
3822
return result;
3823
}
3824
3825
}
3826
3827
#endif // USE(GSTREAMER)