1 | /* |
2 | * Copyright (C) 2016 Metrological Group B.V. |
3 | * Copyright (C) 2016, 2017, 2018 Igalia S.L |
4 | * |
5 | * This library is free software; you can redistribute it and/or |
6 | * modify it under the terms of the GNU Library General Public |
7 | * License as published by the Free Software Foundation; either |
8 | * version 2 of the License, or (at your option) any later version. |
9 | * |
10 | * This library is distributed in the hope that it will be useful, |
11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | * Library General Public License for more details. |
14 | * |
15 | * You should have received a copy of the GNU Library General Public License |
16 | * aint with this library; see the file COPYING.LIB. If not, write to |
17 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
18 | * Boston, MA 02110-1301, USA. |
19 | */ |
20 | |
21 | #include "config.h" |
22 | #include "MediaSampleGStreamer.h" |
23 | |
24 | #include "GStreamerCommon.h" |
25 | |
26 | #include <algorithm> |
27 | |
28 | #if ENABLE(VIDEO) && USE(GSTREAMER) |
29 | |
30 | namespace WebCore { |
31 | |
32 | MediaSampleGStreamer::MediaSampleGStreamer(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomicString& trackId) |
33 | : m_pts(MediaTime::zeroTime()) |
34 | , m_dts(MediaTime::zeroTime()) |
35 | , m_duration(MediaTime::zeroTime()) |
36 | , m_trackId(trackId) |
37 | , m_presentationSize(presentationSize) |
38 | { |
39 | const GstClockTime minimumDuration = 1000; // 1 us |
40 | ASSERT(sample); |
41 | GstBuffer* buffer = gst_sample_get_buffer(sample.get()); |
42 | RELEASE_ASSERT(buffer); |
43 | |
44 | auto createMediaTime = |
45 | [](GstClockTime time) -> MediaTime { |
46 | return MediaTime(GST_TIME_AS_USECONDS(time), G_USEC_PER_SEC); |
47 | }; |
48 | |
49 | if (GST_BUFFER_PTS_IS_VALID(buffer)) |
50 | m_pts = createMediaTime(GST_BUFFER_PTS(buffer)); |
51 | if (GST_BUFFER_DTS_IS_VALID(buffer) || GST_BUFFER_PTS_IS_VALID(buffer)) |
52 | m_dts = createMediaTime(GST_BUFFER_DTS_OR_PTS(buffer)); |
53 | if (GST_BUFFER_DURATION_IS_VALID(buffer)) { |
54 | // Sometimes (albeit rarely, so far seen only at the end of a track) |
55 | // frames have very small durations, so small that may be under the |
56 | // precision we are working with and be truncated to zero. |
57 | // SourceBuffer algorithms are not expecting frames with zero-duration, |
58 | // so let's use something very small instead in those fringe cases. |
59 | m_duration = createMediaTime(std::max(GST_BUFFER_DURATION(buffer), minimumDuration)); |
60 | } else { |
61 | // Unfortunately, sometimes samples don't provide a duration. This can never happen in MP4 because of the way |
62 | // the format is laid out, but it's pretty common in WebM. |
63 | // The good part is that durations don't matter for playback, just for buffered ranges and coded frame deletion. |
64 | // We want to pick something small enough to not cause unwanted frame deletion, but big enough to never be |
65 | // mistaken for a rounding artifact. |
66 | m_duration = createMediaTime(16666667); // 1/60 seconds |
67 | } |
68 | |
69 | m_size = gst_buffer_get_size(buffer); |
70 | m_sample = sample; |
71 | |
72 | if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) |
73 | m_flags = MediaSample::None; |
74 | |
75 | if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY)) |
76 | m_flags = static_cast<MediaSample::SampleFlags>(m_flags | MediaSample::IsNonDisplaying); |
77 | } |
78 | |
79 | MediaSampleGStreamer::MediaSampleGStreamer(const FloatSize& presentationSize, const AtomicString& trackId) |
80 | : m_pts(MediaTime::zeroTime()) |
81 | , m_dts(MediaTime::zeroTime()) |
82 | , m_duration(MediaTime::zeroTime()) |
83 | , m_trackId(trackId) |
84 | , m_presentationSize(presentationSize) |
85 | { |
86 | } |
87 | |
88 | Ref<MediaSampleGStreamer> MediaSampleGStreamer::createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomicString& trackId) |
89 | { |
90 | MediaSampleGStreamer* gstreamerMediaSample = new MediaSampleGStreamer(presentationSize, trackId); |
91 | gstreamerMediaSample->m_pts = pts; |
92 | gstreamerMediaSample->m_dts = dts; |
93 | gstreamerMediaSample->m_duration = duration; |
94 | gstreamerMediaSample->m_flags = MediaSample::IsNonDisplaying; |
95 | return adoptRef(*gstreamerMediaSample); |
96 | } |
97 | |
98 | void MediaSampleGStreamer::applyPtsOffset(MediaTime timestampOffset) |
99 | { |
100 | if (m_pts > timestampOffset) { |
101 | m_duration = m_duration + (m_pts - timestampOffset); |
102 | m_pts = timestampOffset; |
103 | } |
104 | } |
105 | |
106 | void MediaSampleGStreamer::offsetTimestampsBy(const MediaTime& timestampOffset) |
107 | { |
108 | if (!timestampOffset) |
109 | return; |
110 | m_pts += timestampOffset; |
111 | m_dts += timestampOffset; |
112 | if (auto* buffer = gst_sample_get_buffer(m_sample.get())) { |
113 | GST_BUFFER_PTS(buffer) = toGstClockTime(m_pts); |
114 | GST_BUFFER_DTS(buffer) = toGstClockTime(m_dts); |
115 | } |
116 | } |
117 | |
118 | PlatformSample MediaSampleGStreamer::platformSample() |
119 | { |
120 | PlatformSample sample = { PlatformSample::GStreamerSampleType, { .gstSample = m_sample.get() } }; |
121 | return sample; |
122 | } |
123 | |
124 | Ref<MediaSample> MediaSampleGStreamer::createNonDisplayingCopy() const |
125 | { |
126 | if (!m_sample) |
127 | return createFakeSample(nullptr, m_pts, m_dts, m_duration, m_presentationSize, m_trackId); |
128 | |
129 | GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); |
130 | GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY); |
131 | |
132 | GstCaps* caps = gst_sample_get_caps(m_sample.get()); |
133 | GstSegment* segment = gst_sample_get_segment(m_sample.get()); |
134 | const GstStructure* originalInfo = gst_sample_get_info(m_sample.get()); |
135 | GstStructure* info = originalInfo ? gst_structure_copy(originalInfo) : nullptr; |
136 | GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, caps, segment, info)); |
137 | |
138 | return adoptRef(*new MediaSampleGStreamer(sample.get(), m_presentationSize, m_trackId)); |
139 | } |
140 | |
141 | void MediaSampleGStreamer::dump(PrintStream& out) const |
142 | { |
143 | out.print("{PTS(" , presentationTime(), "), DTS(" , decodeTime(), "), duration(" , duration(), "), flags(" ); |
144 | |
145 | bool anyFlags = false; |
146 | auto appendFlag = [&out, &anyFlags](const char* flagName) { |
147 | if (anyFlags) |
148 | out.print("," ); |
149 | out.print(flagName); |
150 | anyFlags = true; |
151 | }; |
152 | |
153 | if (flags() & MediaSample::IsSync) |
154 | appendFlag("sync" ); |
155 | if (flags() & MediaSample::IsNonDisplaying) |
156 | appendFlag("non-displaying" ); |
157 | if (flags() & MediaSample::HasAlpha) |
158 | appendFlag("has-alpha" ); |
159 | if (flags() & ~(MediaSample::IsSync | MediaSample::IsNonDisplaying | MediaSample::HasAlpha)) |
160 | appendFlag("unknown-flag" ); |
161 | |
162 | out.print("), trackId(" , trackID().string(), "), presentationSize(" , presentationSize().width(), "x" , presentationSize().height(), ")}" ); |
163 | } |
164 | |
165 | } // namespace WebCore. |
166 | |
167 | #endif // ENABLE(VIDEO) && USE(GSTREAMER) |
168 | |