1 | /* |
2 | * Copyright (C) 2010 Igalia S.L |
3 | * |
4 | * This library is free software; you can redistribute it and/or |
5 | * modify it under the terms of the GNU Library General Public |
6 | * License as published by the Free Software Foundation; either |
7 | * version 2 of the License, or (at your option) any later version. |
8 | * |
9 | * This library is distributed in the hope that it will be useful, |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 | * Library General Public License for more details. |
13 | * |
14 | * You should have received a copy of the GNU Library General Public License |
15 | * along with this library; see the file COPYING.LIB. If not, write to |
16 | * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
17 | * Boston, MA 02110-1301, USA. |
18 | */ |
19 | |
20 | #include "config.h" |
21 | #include "ImageGStreamer.h" |
22 | |
23 | #if ENABLE(VIDEO) && USE(GSTREAMER) |
24 | |
25 | #include "GStreamerCommon.h" |
26 | |
27 | #include <cairo.h> |
28 | #include <gst/gst.h> |
29 | #include <gst/video/gstvideometa.h> |
30 | |
31 | |
32 | namespace WebCore { |
33 | |
34 | ImageGStreamer::ImageGStreamer(GstSample* sample) |
35 | { |
36 | GstCaps* caps = gst_sample_get_caps(sample); |
37 | GstVideoInfo videoInfo; |
38 | gst_video_info_init(&videoInfo); |
39 | if (!gst_video_info_from_caps(&videoInfo, caps)) |
40 | return; |
41 | |
42 | // Right now the TextureMapper only supports chromas with one plane |
43 | ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1); |
44 | |
45 | GstBuffer* buffer = gst_sample_get_buffer(sample); |
46 | if (UNLIKELY(!GST_IS_BUFFER(buffer))) |
47 | return; |
48 | |
49 | m_frameMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, GST_MAP_READ); |
50 | if (!m_frameMapped) |
51 | return; |
52 | |
53 | unsigned char* bufferData = reinterpret_cast<unsigned char*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)); |
54 | int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0); |
55 | int width = GST_VIDEO_FRAME_WIDTH(&m_videoFrame); |
56 | int height = GST_VIDEO_FRAME_HEIGHT(&m_videoFrame); |
57 | |
58 | RefPtr<cairo_surface_t> surface; |
59 | cairo_format_t cairoFormat; |
60 | #if G_BYTE_ORDER == G_LITTLE_ENDIAN |
61 | cairoFormat = (GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_BGRA) ? CAIRO_FORMAT_ARGB32 : CAIRO_FORMAT_RGB24; |
62 | #else |
63 | cairoFormat = (GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_ARGB) ? CAIRO_FORMAT_ARGB32 : CAIRO_FORMAT_RGB24; |
64 | #endif |
65 | |
66 | // GStreamer doesn't use premultiplied alpha, but cairo does. So if the video format has an alpha component |
67 | // we need to premultiply it before passing the data to cairo. This needs to be both using gstreamer-gl and not |
68 | // using it. |
69 | // |
70 | // This method could be called several times for the same buffer, for example if we are rendering the video frames |
71 | // in several non accelerated canvases. Due to this, we cannot modify the buffer, so we need to create a copy. |
72 | if (cairoFormat == CAIRO_FORMAT_ARGB32) { |
73 | unsigned char* surfaceData = static_cast<unsigned char*>(fastMalloc(height * stride)); |
74 | unsigned char* surfacePixel = surfaceData; |
75 | |
76 | for (int x = 0; x < width; x++) { |
77 | for (int y = 0; y < height; y++) { |
78 | #if G_BYTE_ORDER == G_LITTLE_ENDIAN |
79 | // Video frames use BGRA in little endian. |
80 | unsigned short alpha = bufferData[3]; |
81 | surfacePixel[0] = (bufferData[0] * alpha + 128) / 255; |
82 | surfacePixel[1] = (bufferData[1] * alpha + 128) / 255; |
83 | surfacePixel[2] = (bufferData[2] * alpha + 128) / 255; |
84 | surfacePixel[3] = alpha; |
85 | #else |
86 | // Video frames use ARGB in big endian. |
87 | unsigned short alpha = bufferData[0]; |
88 | surfacePixel[0] = alpha; |
89 | surfacePixel[1] = (bufferData[1] * alpha + 128) / 255; |
90 | surfacePixel[2] = (bufferData[2] * alpha + 128) / 255; |
91 | surfacePixel[3] = (bufferData[3] * alpha + 128) / 255; |
92 | #endif |
93 | bufferData += 4; |
94 | surfacePixel += 4; |
95 | } |
96 | } |
97 | surface = adoptRef(cairo_image_surface_create_for_data(surfaceData, cairoFormat, width, height, stride)); |
98 | static cairo_user_data_key_t s_surfaceDataKey; |
99 | cairo_surface_set_user_data(surface.get(), &s_surfaceDataKey, surfaceData, [](void* data) { fastFree(data); }); |
100 | } else |
101 | surface = adoptRef(cairo_image_surface_create_for_data(bufferData, cairoFormat, width, height, stride)); |
102 | |
103 | ASSERT(cairo_surface_status(surface.get()) == CAIRO_STATUS_SUCCESS); |
104 | m_image = BitmapImage::create(WTFMove(surface)); |
105 | |
106 | if (GstVideoCropMeta* cropMeta = gst_buffer_get_video_crop_meta(buffer)) |
107 | setCropRect(FloatRect(cropMeta->x, cropMeta->y, cropMeta->width, cropMeta->height)); |
108 | } |
109 | |
110 | ImageGStreamer::~ImageGStreamer() |
111 | { |
112 | if (m_image) |
113 | m_image = nullptr; |
114 | |
115 | // We keep the buffer memory mapped until the image is destroyed because the internal |
116 | // cairo_surface_t was created using cairo_image_surface_create_for_data(). |
117 | if (m_frameMapped) |
118 | gst_video_frame_unmap(&m_videoFrame); |
119 | } |
120 | |
121 | } // namespace WebCore |
122 | |
123 | #endif // USE(GSTREAMER) |
124 | |