1/*
2 * Copyright (C) 2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "InspectorCanvas.h"
28
29#include "AffineTransform.h"
30#include "CachedImage.h"
31#include "CanvasGradient.h"
32#include "CanvasPattern.h"
33#include "CanvasRenderingContext.h"
34#include "CanvasRenderingContext2D.h"
35#include "Document.h"
36#include "FloatPoint.h"
37#include "Gradient.h"
38#include "HTMLCanvasElement.h"
39#include "HTMLImageElement.h"
40#include "HTMLVideoElement.h"
41#include "Image.h"
42#include "ImageBitmap.h"
43#include "ImageBitmapRenderingContext.h"
44#include "ImageBuffer.h"
45#include "ImageData.h"
46#include "InspectorDOMAgent.h"
47#include "JSCanvasDirection.h"
48#include "JSCanvasFillRule.h"
49#include "JSCanvasLineCap.h"
50#include "JSCanvasLineJoin.h"
51#include "JSCanvasTextAlign.h"
52#include "JSCanvasTextBaseline.h"
53#include "JSExecState.h"
54#include "JSImageSmoothingQuality.h"
55#include "Path2D.h"
56#include "Pattern.h"
57#include "RecordingSwizzleTypes.h"
58#include "SVGPathUtilities.h"
59#include "StringAdaptors.h"
60#if ENABLE(CSS_TYPED_OM)
61#include "TypedOMCSSImageValue.h"
62#endif
63#if ENABLE(WEBGL)
64#include "WebGLRenderingContext.h"
65#endif
66#if ENABLE(WEBGL2)
67#include "WebGL2RenderingContext.h"
68#endif
69#if ENABLE(WEBGPU)
70#include "GPUCanvasContext.h"
71#endif
72#include <JavaScriptCore/IdentifiersFactory.h>
73#include <JavaScriptCore/ScriptCallStackFactory.h>
74
75namespace WebCore {
76
77using namespace Inspector;
78
79Ref<InspectorCanvas> InspectorCanvas::create(CanvasRenderingContext& context)
80{
81 return adoptRef(*new InspectorCanvas(context));
82}
83
84InspectorCanvas::InspectorCanvas(CanvasRenderingContext& context)
85 : m_identifier("canvas:" + IdentifiersFactory::createIdentifier())
86 , m_context(context)
87{
88}
89
90HTMLCanvasElement* InspectorCanvas::canvasElement()
91{
92 if (is<HTMLCanvasElement>(m_context.canvasBase()))
93 return &downcast<HTMLCanvasElement>(m_context.canvasBase());
94 return nullptr;
95}
96
97void InspectorCanvas::canvasChanged()
98{
99 if (!m_context.callTracingActive())
100 return;
101
102 // Since 2D contexts are able to be fully reproduced in the frontend, we don't need snapshots.
103 if (is<CanvasRenderingContext2D>(m_context))
104 return;
105
106 m_contentChanged = true;
107}
108
109void InspectorCanvas::resetRecordingData()
110{
111 m_initialState = nullptr;
112 m_frames = nullptr;
113 m_currentActions = nullptr;
114 m_serializedDuplicateData = nullptr;
115 m_indexedDuplicateData.clear();
116 m_recordingName = { };
117 m_bufferLimit = 100 * 1024 * 1024;
118 m_bufferUsed = 0;
119 m_frameCount = WTF::nullopt;
120 m_framesCaptured = 0;
121 m_contentChanged = false;
122
123 m_context.setCallTracingActive(false);
124}
125
126bool InspectorCanvas::hasRecordingData() const
127{
128 return m_bufferUsed > 0;
129}
130
131bool InspectorCanvas::currentFrameHasData() const
132{
133 return !!m_frames;
134}
135
136static bool shouldSnapshotBitmapRendererAction(const String& name)
137{
138 return name == "transferFromImageBitmap";
139}
140
141#if ENABLE(WEBGL)
142static bool shouldSnapshotWebGLAction(const String& name)
143{
144 return name == "clear"
145 || name == "drawArrays"
146 || name == "drawElements";
147}
148#endif
149
150#if ENABLE(WEBGL2)
151static bool shouldSnapshotWebGL2Action(const String& name)
152{
153 return name == "clear"
154 || name == "drawArrays"
155 || name == "drawArraysInstanced"
156 || name == "drawElements"
157 || name == "drawElementsInstanced";
158}
159#endif
160
161void InspectorCanvas::recordAction(const String& name, Vector<RecordCanvasActionVariant>&& parameters)
162{
163 if (!m_initialState) {
164 // We should only construct the initial state for the first action of the recording.
165 ASSERT(!m_frames && !m_currentActions);
166
167 m_initialState = buildInitialState();
168 m_bufferUsed += m_initialState->memoryCost();
169 }
170
171 if (!m_frames)
172 m_frames = JSON::ArrayOf<Inspector::Protocol::Recording::Frame>::create();
173
174 if (!m_currentActions) {
175 m_currentActions = JSON::ArrayOf<JSON::Value>::create();
176
177 auto frame = Inspector::Protocol::Recording::Frame::create()
178 .setActions(m_currentActions)
179 .release();
180
181 m_frames->addItem(WTFMove(frame));
182 ++m_framesCaptured;
183
184 m_currentFrameStartTime = MonotonicTime::now();
185 }
186
187 appendActionSnapshotIfNeeded();
188
189 m_lastRecordedAction = buildAction(name, WTFMove(parameters));
190 m_bufferUsed += m_lastRecordedAction->memoryCost();
191 m_currentActions->addItem(m_lastRecordedAction.get());
192
193 if (is<ImageBitmapRenderingContext>(m_context) && shouldSnapshotBitmapRendererAction(name))
194 m_contentChanged = true;
195#if ENABLE(WEBGL)
196 else if (is<WebGLRenderingContext>(m_context) && shouldSnapshotWebGLAction(name))
197 m_contentChanged = true;
198#endif
199#if ENABLE(WEBGL2)
200 else if (is<WebGL2RenderingContext>(m_context) && shouldSnapshotWebGL2Action(name))
201 m_contentChanged = true;
202#endif
203}
204
205void InspectorCanvas::finalizeFrame()
206{
207 appendActionSnapshotIfNeeded();
208
209 if (m_frames && m_frames->length() && !std::isnan(m_currentFrameStartTime)) {
210 auto currentFrame = static_cast<Inspector::Protocol::Recording::Frame*>(m_frames->get(m_frames->length() - 1).get());
211 currentFrame->setDuration((MonotonicTime::now() - m_currentFrameStartTime).milliseconds());
212
213 m_currentFrameStartTime = MonotonicTime::nan();
214 }
215
216 m_currentActions = nullptr;
217}
218
219void InspectorCanvas::markCurrentFrameIncomplete()
220{
221 if (!m_currentActions || !m_frames || !m_frames->length())
222 return;
223
224 static_cast<Inspector::Protocol::Recording::Frame*>(m_frames->get(m_frames->length() - 1).get())->setIncomplete(true);
225}
226
227void InspectorCanvas::setBufferLimit(long memoryLimit)
228{
229 m_bufferLimit = std::min<long>(memoryLimit, std::numeric_limits<int>::max());
230}
231
232bool InspectorCanvas::hasBufferSpace() const
233{
234 return m_bufferUsed < m_bufferLimit;
235}
236
237void InspectorCanvas::setFrameCount(long frameCount)
238{
239 if (frameCount > 0)
240 m_frameCount = std::min<long>(frameCount, std::numeric_limits<int>::max());
241 else
242 m_frameCount = WTF::nullopt;
243}
244
245bool InspectorCanvas::overFrameCount() const
246{
247 return m_frameCount && m_framesCaptured >= m_frameCount.value();
248}
249
250Ref<Inspector::Protocol::Canvas::Canvas> InspectorCanvas::buildObjectForCanvas(bool captureBacktrace)
251{
252 Inspector::Protocol::Canvas::ContextType contextType;
253 if (is<CanvasRenderingContext2D>(m_context))
254 contextType = Inspector::Protocol::Canvas::ContextType::Canvas2D;
255 else if (is<ImageBitmapRenderingContext>(m_context))
256 contextType = Inspector::Protocol::Canvas::ContextType::BitmapRenderer;
257#if ENABLE(WEBGL)
258 else if (is<WebGLRenderingContext>(m_context))
259 contextType = Inspector::Protocol::Canvas::ContextType::WebGL;
260#endif
261#if ENABLE(WEBGL2)
262 else if (is<WebGL2RenderingContext>(m_context))
263 contextType = Inspector::Protocol::Canvas::ContextType::WebGL2;
264#endif
265#if ENABLE(WEBGPU)
266 else if (is<GPUCanvasContext>(m_context))
267 contextType = Inspector::Protocol::Canvas::ContextType::WebGPU;
268#endif
269 else {
270 ASSERT_NOT_REACHED();
271 contextType = Inspector::Protocol::Canvas::ContextType::Canvas2D;
272 }
273
274 auto canvas = Inspector::Protocol::Canvas::Canvas::create()
275 .setCanvasId(m_identifier)
276 .setContextType(contextType)
277 .release();
278
279 if (auto* node = canvasElement()) {
280 String cssCanvasName = node->document().nameForCSSCanvasElement(*node);
281 if (!cssCanvasName.isEmpty())
282 canvas->setCssCanvasName(cssCanvasName);
283
284 // FIXME: <https://webkit.org/b/178282> Web Inspector: send a DOM node with each Canvas payload and eliminate Canvas.requestNode
285 }
286
287 if (is<ImageBitmapRenderingContext>(m_context)) {
288 auto contextAttributes = Inspector::Protocol::Canvas::ContextAttributes::create()
289 .release();
290 contextAttributes->setAlpha(downcast<ImageBitmapRenderingContext>(m_context).hasAlpha());
291 canvas->setContextAttributes(WTFMove(contextAttributes));
292 }
293#if ENABLE(WEBGL)
294 else if (is<WebGLRenderingContextBase>(m_context)) {
295 if (Optional<WebGLContextAttributes> attributes = downcast<WebGLRenderingContextBase>(m_context).getContextAttributes()) {
296 auto contextAttributes = Inspector::Protocol::Canvas::ContextAttributes::create()
297 .release();
298 contextAttributes->setAlpha(attributes->alpha);
299 contextAttributes->setDepth(attributes->depth);
300 contextAttributes->setStencil(attributes->stencil);
301 contextAttributes->setAntialias(attributes->antialias);
302 contextAttributes->setPremultipliedAlpha(attributes->premultipliedAlpha);
303 contextAttributes->setPreserveDrawingBuffer(attributes->preserveDrawingBuffer);
304 contextAttributes->setFailIfMajorPerformanceCaveat(attributes->failIfMajorPerformanceCaveat);
305 canvas->setContextAttributes(WTFMove(contextAttributes));
306 }
307 }
308#endif
309
310 // FIXME: <https://webkit.org/b/180833> Web Inspector: support OffscreenCanvas for Canvas related operations
311
312 if (auto* node = canvasElement()) {
313 if (size_t memoryCost = node->memoryCost())
314 canvas->setMemoryCost(memoryCost);
315 }
316
317 if (captureBacktrace) {
318 auto stackTrace = Inspector::createScriptCallStack(JSExecState::currentState(), Inspector::ScriptCallStack::maxCallStackSizeToCapture);
319 canvas->setBacktrace(stackTrace->buildInspectorArray());
320 }
321
322 return canvas;
323}
324
325Ref<Inspector::Protocol::Recording::Recording> InspectorCanvas::releaseObjectForRecording()
326{
327 ASSERT(!m_currentActions);
328 ASSERT(!m_lastRecordedAction);
329 ASSERT(!m_frames);
330
331 Inspector::Protocol::Recording::Type type;
332 if (is<CanvasRenderingContext2D>(m_context))
333 type = Inspector::Protocol::Recording::Type::Canvas2D;
334 else if (is<ImageBitmapRenderingContext>(m_context))
335 type = Inspector::Protocol::Recording::Type::CanvasBitmapRenderer;
336#if ENABLE(WEBGL)
337 else if (is<WebGLRenderingContext>(m_context))
338 type = Inspector::Protocol::Recording::Type::CanvasWebGL;
339#endif
340#if ENABLE(WEBGL2)
341 else if (is<WebGL2RenderingContext>(m_context))
342 type = Inspector::Protocol::Recording::Type::CanvasWebGL2;
343#endif
344 else {
345 ASSERT_NOT_REACHED();
346 type = Inspector::Protocol::Recording::Type::Canvas2D;
347 }
348
349 auto recording = Inspector::Protocol::Recording::Recording::create()
350 .setVersion(Inspector::Protocol::Recording::VERSION)
351 .setType(type)
352 .setInitialState(m_initialState.releaseNonNull())
353 .setData(m_serializedDuplicateData.releaseNonNull())
354 .release();
355
356 if (!m_recordingName.isEmpty())
357 recording->setName(m_recordingName);
358
359 resetRecordingData();
360
361 return recording;
362}
363
364String InspectorCanvas::getCanvasContentAsDataURL(ErrorString& errorString)
365{
366 // FIXME: <https://webkit.org/b/173621> Web Inspector: Support getting the content of WebMetal context;
367 if (!is<CanvasRenderingContext2D>(m_context)
368#if ENABLE(WEBGL)
369 && !is<WebGLRenderingContextBase>(m_context)
370#endif
371 && !is<ImageBitmapRenderingContext>(m_context)) {
372 errorString = "Unsupported canvas context type"_s;
373 return emptyString();
374 }
375
376 // FIXME: <https://webkit.org/b/180833> Web Inspector: support OffscreenCanvas for Canvas related operations
377 auto* node = canvasElement();
378 if (!node) {
379 errorString = "Context isn't related to an HTMLCanvasElement"_s;
380 return emptyString();
381 }
382
383#if ENABLE(WEBGL)
384 if (is<WebGLRenderingContextBase>(m_context))
385 downcast<WebGLRenderingContextBase>(m_context).setPreventBufferClearForInspector(true);
386#endif
387
388 ExceptionOr<UncachedString> result = node->toDataURL("image/png"_s);
389
390#if ENABLE(WEBGL)
391 if (is<WebGLRenderingContextBase>(m_context))
392 downcast<WebGLRenderingContextBase>(m_context).setPreventBufferClearForInspector(false);
393#endif
394
395 if (result.hasException()) {
396 errorString = result.releaseException().releaseMessage();
397 return emptyString();
398 }
399
400 return result.releaseReturnValue().string;
401}
402
403void InspectorCanvas::appendActionSnapshotIfNeeded()
404{
405 if (!m_lastRecordedAction)
406 return;
407
408 if (m_contentChanged) {
409 m_bufferUsed -= m_lastRecordedAction->memoryCost();
410
411 ErrorString ignored;
412 m_lastRecordedAction->addItem(indexForData(getCanvasContentAsDataURL(ignored)));
413
414 m_bufferUsed += m_lastRecordedAction->memoryCost();
415 }
416
417 m_lastRecordedAction = nullptr;
418 m_contentChanged = false;
419}
420
421int InspectorCanvas::indexForData(DuplicateDataVariant data)
422{
423 size_t index = m_indexedDuplicateData.findMatching([&] (auto item) {
424 if (data == item)
425 return true;
426
427 auto traceA = WTF::get_if<RefPtr<ScriptCallStack>>(data);
428 auto traceB = WTF::get_if<RefPtr<ScriptCallStack>>(item);
429 if (traceA && *traceA && traceB && *traceB)
430 return (*traceA)->isEqual((*traceB).get());
431
432 return false;
433 });
434 if (index != notFound) {
435 ASSERT(index < std::numeric_limits<int>::max());
436 return static_cast<int>(index);
437 }
438
439 if (!m_serializedDuplicateData)
440 m_serializedDuplicateData = JSON::ArrayOf<JSON::Value>::create();
441
442 RefPtr<JSON::Value> item;
443 WTF::switchOn(data,
444 [&] (const RefPtr<HTMLImageElement>& imageElement) {
445 String dataURL = "data:,"_s;
446
447 if (CachedImage* cachedImage = imageElement->cachedImage()) {
448 Image* image = cachedImage->image();
449 if (image && image != &Image::nullImage()) {
450 std::unique_ptr<ImageBuffer> imageBuffer = ImageBuffer::create(image->size(), RenderingMode::Unaccelerated);
451 imageBuffer->context().drawImage(*image, FloatPoint(0, 0));
452 dataURL = imageBuffer->toDataURL("image/png");
453 }
454 }
455
456 index = indexForData(dataURL);
457 },
458#if ENABLE(VIDEO)
459 [&] (RefPtr<HTMLVideoElement>& videoElement) {
460 String dataURL = "data:,"_s;
461
462 unsigned videoWidth = videoElement->videoWidth();
463 unsigned videoHeight = videoElement->videoHeight();
464 std::unique_ptr<ImageBuffer> imageBuffer = ImageBuffer::create(FloatSize(videoWidth, videoHeight), RenderingMode::Unaccelerated);
465 if (imageBuffer) {
466 videoElement->paintCurrentFrameInContext(imageBuffer->context(), FloatRect(0, 0, videoWidth, videoHeight));
467 dataURL = imageBuffer->toDataURL("image/png");
468 }
469
470 index = indexForData(dataURL);
471 },
472#endif
473 [&] (RefPtr<HTMLCanvasElement>& canvasElement) {
474 String dataURL = "data:,"_s;
475
476 ExceptionOr<UncachedString> result = canvasElement->toDataURL("image/png"_s);
477 if (!result.hasException())
478 dataURL = result.releaseReturnValue().string;
479
480 index = indexForData(dataURL);
481 },
482 [&] (const RefPtr<CanvasGradient>& canvasGradient) { item = buildArrayForCanvasGradient(*canvasGradient); },
483 [&] (const RefPtr<CanvasPattern>& canvasPattern) { item = buildArrayForCanvasPattern(*canvasPattern); },
484 [&] (const RefPtr<ImageData>& imageData) { item = buildArrayForImageData(*imageData); },
485 [&] (RefPtr<ImageBitmap>& imageBitmap) {
486 index = indexForData(imageBitmap->buffer()->toDataURL("image/png"));
487 },
488 [&] (const RefPtr<ScriptCallStack>& scriptCallStack) {
489 auto array = JSON::ArrayOf<double>::create();
490 for (size_t i = 0; i < scriptCallStack->size(); ++i)
491 array->addItem(indexForData(scriptCallStack->at(i)));
492 item = WTFMove(array);
493 },
494#if ENABLE(CSS_TYPED_OM)
495 [&] (const RefPtr<TypedOMCSSImageValue>& cssImageValue) {
496 String dataURL = "data:,"_s;
497
498 if (auto* cachedImage = cssImageValue->image()) {
499 auto* image = cachedImage->image();
500 if (image && image != &Image::nullImage()) {
501 auto imageBuffer = ImageBuffer::create(image->size(), RenderingMode::Unaccelerated);
502 imageBuffer->context().drawImage(*image, FloatPoint(0, 0));
503 dataURL = imageBuffer->toDataURL("image/png");
504 }
505 }
506
507 index = indexForData(dataURL);
508 },
509#endif
510 [&] (const ScriptCallFrame& scriptCallFrame) {
511 auto array = JSON::ArrayOf<double>::create();
512 array->addItem(indexForData(scriptCallFrame.functionName()));
513 array->addItem(indexForData(scriptCallFrame.sourceURL()));
514 array->addItem(static_cast<int>(scriptCallFrame.lineNumber()));
515 array->addItem(static_cast<int>(scriptCallFrame.columnNumber()));
516 item = WTFMove(array);
517 },
518 [&] (const String& value) { item = JSON::Value::create(value); }
519 );
520
521 if (item) {
522 m_bufferUsed += item->memoryCost();
523 m_serializedDuplicateData->addItem(WTFMove(item));
524
525 m_indexedDuplicateData.append(data);
526 index = m_indexedDuplicateData.size() - 1;
527 }
528
529 ASSERT(index < std::numeric_limits<int>::max());
530 return static_cast<int>(index);
531}
532
533String InspectorCanvas::stringIndexForKey(const String& key)
534{
535 return String::number(indexForData(key));
536}
537
538static Ref<JSON::ArrayOf<double>> buildArrayForAffineTransform(const AffineTransform& affineTransform)
539{
540 auto array = JSON::ArrayOf<double>::create();
541 array->addItem(affineTransform.a());
542 array->addItem(affineTransform.b());
543 array->addItem(affineTransform.c());
544 array->addItem(affineTransform.d());
545 array->addItem(affineTransform.e());
546 array->addItem(affineTransform.f());
547 return array;
548}
549
550template<typename T> static Ref<JSON::ArrayOf<JSON::Value>> buildArrayForVector(const Vector<T>& vector)
551{
552 auto array = JSON::ArrayOf<JSON::Value>::create();
553 for (auto& item : vector)
554 array->addItem(item);
555 return array;
556}
557
558Ref<Inspector::Protocol::Recording::InitialState> InspectorCanvas::buildInitialState()
559{
560 auto initialStatePayload = Inspector::Protocol::Recording::InitialState::create().release();
561
562 auto attributesPayload = JSON::Object::create();
563 attributesPayload->setInteger("width"_s, m_context.canvasBase().width());
564 attributesPayload->setInteger("height"_s, m_context.canvasBase().height());
565
566 auto statesPayload = JSON::ArrayOf<JSON::Object>::create();
567
568 auto parametersPayload = JSON::ArrayOf<JSON::Value>::create();
569
570 if (is<CanvasRenderingContext2D>(m_context)) {
571 auto& context2d = downcast<CanvasRenderingContext2D>(m_context);
572 for (auto& state : context2d.stateStack()) {
573 auto statePayload = JSON::Object::create();
574
575 statePayload->setArray(stringIndexForKey("setTransform"_s), buildArrayForAffineTransform(state.transform));
576 statePayload->setDouble(stringIndexForKey("globalAlpha"_s), context2d.globalAlpha());
577 statePayload->setInteger(stringIndexForKey("globalCompositeOperation"_s), indexForData(context2d.globalCompositeOperation()));
578 statePayload->setDouble(stringIndexForKey("lineWidth"_s), context2d.lineWidth());
579 statePayload->setInteger(stringIndexForKey("lineCap"_s), indexForData(convertEnumerationToString(context2d.lineCap())));
580 statePayload->setInteger(stringIndexForKey("lineJoin"_s), indexForData(convertEnumerationToString(context2d.lineJoin())));
581 statePayload->setDouble(stringIndexForKey("miterLimit"_s), context2d.miterLimit());
582 statePayload->setDouble(stringIndexForKey("shadowOffsetX"_s), context2d.shadowOffsetX());
583 statePayload->setDouble(stringIndexForKey("shadowOffsetY"_s), context2d.shadowOffsetY());
584 statePayload->setDouble(stringIndexForKey("shadowBlur"_s), context2d.shadowBlur());
585 statePayload->setInteger(stringIndexForKey("shadowColor"_s), indexForData(context2d.shadowColor()));
586
587 // The parameter to `setLineDash` is itself an array, so we need to wrap the parameters
588 // list in an array to allow spreading.
589 auto setLineDash = JSON::ArrayOf<JSON::Value>::create();
590 setLineDash->addItem(buildArrayForVector(state.lineDash));
591 statePayload->setArray(stringIndexForKey("setLineDash"_s), WTFMove(setLineDash));
592
593 statePayload->setDouble(stringIndexForKey("lineDashOffset"_s), context2d.lineDashOffset());
594 statePayload->setInteger(stringIndexForKey("font"_s), indexForData(context2d.font()));
595 statePayload->setInteger(stringIndexForKey("textAlign"_s), indexForData(convertEnumerationToString(context2d.textAlign())));
596 statePayload->setInteger(stringIndexForKey("textBaseline"_s), indexForData(convertEnumerationToString(context2d.textBaseline())));
597 statePayload->setInteger(stringIndexForKey("direction"_s), indexForData(convertEnumerationToString(context2d.direction())));
598
599 int strokeStyleIndex;
600 if (auto canvasGradient = state.strokeStyle.canvasGradient())
601 strokeStyleIndex = indexForData(canvasGradient);
602 else if (auto canvasPattern = state.strokeStyle.canvasPattern())
603 strokeStyleIndex = indexForData(canvasPattern);
604 else
605 strokeStyleIndex = indexForData(state.strokeStyle.color());
606 statePayload->setInteger(stringIndexForKey("strokeStyle"_s), strokeStyleIndex);
607
608 int fillStyleIndex;
609 if (auto canvasGradient = state.fillStyle.canvasGradient())
610 fillStyleIndex = indexForData(canvasGradient);
611 else if (auto canvasPattern = state.fillStyle.canvasPattern())
612 fillStyleIndex = indexForData(canvasPattern);
613 else
614 fillStyleIndex = indexForData(state.fillStyle.color());
615 statePayload->setInteger(stringIndexForKey("fillStyle"_s), fillStyleIndex);
616
617 statePayload->setBoolean(stringIndexForKey("imageSmoothingEnabled"_s), context2d.imageSmoothingEnabled());
618 statePayload->setInteger(stringIndexForKey("imageSmoothingQuality"_s), indexForData(convertEnumerationToString(context2d.imageSmoothingQuality())));
619
620 auto setPath = JSON::ArrayOf<JSON::Value>::create();
621 setPath->addItem(indexForData(buildStringFromPath(context2d.getPath()->path())));
622 statePayload->setArray(stringIndexForKey("setPath"_s), WTFMove(setPath));
623
624 statesPayload->addItem(WTFMove(statePayload));
625 }
626 }
627#if ENABLE(WEBGL)
628 else if (is<WebGLRenderingContextBase>(m_context)) {
629 WebGLRenderingContextBase& contextWebGLBase = downcast<WebGLRenderingContextBase>(m_context);
630 if (Optional<WebGLContextAttributes> webGLContextAttributes = contextWebGLBase.getContextAttributes()) {
631 auto webGLContextAttributesPayload = JSON::Object::create();
632 webGLContextAttributesPayload->setBoolean("alpha"_s, webGLContextAttributes->alpha);
633 webGLContextAttributesPayload->setBoolean("depth"_s, webGLContextAttributes->depth);
634 webGLContextAttributesPayload->setBoolean("stencil"_s, webGLContextAttributes->stencil);
635 webGLContextAttributesPayload->setBoolean("antialias"_s, webGLContextAttributes->antialias);
636 webGLContextAttributesPayload->setBoolean("premultipliedAlpha"_s, webGLContextAttributes->premultipliedAlpha);
637 webGLContextAttributesPayload->setBoolean("preserveDrawingBuffer"_s, webGLContextAttributes->preserveDrawingBuffer);
638 webGLContextAttributesPayload->setBoolean("failIfMajorPerformanceCaveat"_s, webGLContextAttributes->failIfMajorPerformanceCaveat);
639 parametersPayload->addItem(WTFMove(webGLContextAttributesPayload));
640 }
641 }
642#endif
643
644 initialStatePayload->setAttributes(WTFMove(attributesPayload));
645
646 if (statesPayload->length())
647 initialStatePayload->setStates(WTFMove(statesPayload));
648
649 if (parametersPayload->length())
650 initialStatePayload->setParameters(WTFMove(parametersPayload));
651
652 ErrorString ignored;
653 initialStatePayload->setContent(getCanvasContentAsDataURL(ignored));
654
655 return initialStatePayload;
656}
657
658Ref<JSON::ArrayOf<JSON::Value>> InspectorCanvas::buildAction(const String& name, Vector<RecordCanvasActionVariant>&& parameters)
659{
660 auto action = JSON::ArrayOf<JSON::Value>::create();
661 action->addItem(indexForData(name));
662
663 auto parametersData = JSON::ArrayOf<JSON::Value>::create();
664 auto swizzleTypes = JSON::ArrayOf<int>::create();
665
666 auto addParameter = [&parametersData, &swizzleTypes] (auto value, RecordingSwizzleTypes swizzleType) {
667 parametersData->addItem(value);
668 swizzleTypes->addItem(static_cast<int>(swizzleType));
669 };
670
671 for (auto& item : parameters) {
672 WTF::switchOn(item,
673 [&] (CanvasDirection value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
674 [&] (CanvasFillRule value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
675 [&] (CanvasLineCap value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
676 [&] (CanvasLineJoin value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
677 [&] (CanvasTextAlign value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
678 [&] (CanvasTextBaseline value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
679 [&] (const DOMMatrix2DInit& value) {
680 auto array = JSON::ArrayOf<double>::create();
681 array->addItem(value.a.valueOr(1));
682 array->addItem(value.b.valueOr(0));
683 array->addItem(value.c.valueOr(0));
684 array->addItem(value.d.valueOr(1));
685 array->addItem(value.e.valueOr(0));
686 array->addItem(value.f.valueOr(0));
687 addParameter(array.ptr(), RecordingSwizzleTypes::DOMMatrix);
688 },
689 [&] (const Element*) {
690 // Elements are not serializable, so add a string as a placeholder since the actual
691 // element cannot be reconstructed in the frontend.
692 addParameter(indexForData("Element"), RecordingSwizzleTypes::None);
693 },
694 [&] (HTMLImageElement* value) { addParameter(indexForData(value), RecordingSwizzleTypes::Image); },
695 [&] (ImageBitmap* value) { addParameter(indexForData(value), RecordingSwizzleTypes::ImageBitmap); },
696 [&] (ImageData* value) { addParameter(indexForData(value), RecordingSwizzleTypes::ImageData); },
697 [&] (ImageSmoothingQuality value) { addParameter(indexForData(convertEnumerationToString(value)), RecordingSwizzleTypes::String); },
698 [&] (const Path2D* value) { addParameter(indexForData(buildStringFromPath(value->path())), RecordingSwizzleTypes::Path2D); },
699#if ENABLE(WEBGL)
700 // FIXME: <https://webkit.org/b/176009> Web Inspector: send data for WebGL objects during a recording instead of a placeholder string
701 [&] (const WebGLBuffer*) { addParameter(0, RecordingSwizzleTypes::WebGLBuffer); },
702 [&] (const WebGLFramebuffer*) { addParameter(0, RecordingSwizzleTypes::WebGLFramebuffer); },
703 [&] (const WebGLProgram*) { addParameter(0, RecordingSwizzleTypes::WebGLProgram); },
704 [&] (const WebGLQuery*) { addParameter(0, RecordingSwizzleTypes::WebGLQuery); },
705 [&] (const WebGLRenderbuffer*) { addParameter(0, RecordingSwizzleTypes::WebGLRenderbuffer); },
706 [&] (const WebGLSampler*) { addParameter(0, RecordingSwizzleTypes::WebGLSampler); },
707 [&] (const WebGLShader*) { addParameter(0, RecordingSwizzleTypes::WebGLShader); },
708 [&] (const WebGLSync*) { addParameter(0, RecordingSwizzleTypes::WebGLSync); },
709 [&] (const WebGLTexture*) { addParameter(0, RecordingSwizzleTypes::WebGLTexture); },
710 [&] (const WebGLTransformFeedback*) { addParameter(0, RecordingSwizzleTypes::WebGLTransformFeedback); },
711 [&] (const WebGLUniformLocation*) { addParameter(0, RecordingSwizzleTypes::WebGLUniformLocation); },
712 [&] (const WebGLVertexArrayObject*) { addParameter(0, RecordingSwizzleTypes::WebGLVertexArrayObject); },
713#endif
714 [&] (const RefPtr<ArrayBuffer>&) { addParameter(0, RecordingSwizzleTypes::TypedArray); },
715 [&] (const RefPtr<ArrayBufferView>&) { addParameter(0, RecordingSwizzleTypes::TypedArray); },
716 [&] (const RefPtr<CanvasGradient>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::CanvasGradient); },
717 [&] (const RefPtr<CanvasPattern>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::CanvasPattern); },
718 [&] (const RefPtr<Float32Array>&) { addParameter(0, RecordingSwizzleTypes::TypedArray); },
719 [&] (const RefPtr<HTMLCanvasElement>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::Image); },
720 [&] (const RefPtr<HTMLImageElement>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::Image); },
721#if ENABLE(VIDEO)
722 [&] (const RefPtr<HTMLVideoElement>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::Image); },
723#endif
724#if ENABLE(CSS_TYPED_OM)
725 [&] (const RefPtr<TypedOMCSSImageValue>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::Image); },
726#endif
727 [&] (const RefPtr<ImageBitmap>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::ImageBitmap); },
728 [&] (const RefPtr<ImageData>& value) { addParameter(indexForData(value), RecordingSwizzleTypes::ImageData); },
729 [&] (const RefPtr<Int32Array>&) { addParameter(0, RecordingSwizzleTypes::TypedArray); },
730 [&] (const RefPtr<Uint32Array>&) { addParameter(0, RecordingSwizzleTypes::TypedArray); },
731 [&] (const Vector<String>& value) {
732 auto deduplicated = value.map([&] (const String& item) {
733 return indexForData(item);
734 });
735 addParameter(buildArrayForVector(deduplicated).ptr(), RecordingSwizzleTypes::Array);
736 },
737 [&] (const Vector<float>& value) { addParameter(buildArrayForVector(value).ptr(), RecordingSwizzleTypes::Array); },
738 [&] (const Vector<uint32_t>& value) {
739 auto mapped = value.map([&] (uint32_t item) {
740 return static_cast<double>(item);
741 });
742 addParameter(buildArrayForVector(mapped).ptr(), RecordingSwizzleTypes::Array);
743 },
744 [&] (const Vector<int32_t>& value) { addParameter(buildArrayForVector(value).ptr(), RecordingSwizzleTypes::Array); },
745 [&] (const String& value) { addParameter(indexForData(value), RecordingSwizzleTypes::String); },
746 [&] (double value) { addParameter(value, RecordingSwizzleTypes::Number); },
747 [&] (float value) { addParameter(value, RecordingSwizzleTypes::Number); },
748 [&] (uint64_t value) { addParameter(static_cast<double>(value), RecordingSwizzleTypes::Number); },
749 [&] (int64_t value) { addParameter(static_cast<double>(value), RecordingSwizzleTypes::Number); },
750 [&] (uint32_t value) { addParameter(static_cast<double>(value), RecordingSwizzleTypes::Number); },
751 [&] (int32_t value) { addParameter(value, RecordingSwizzleTypes::Number); },
752 [&] (uint8_t value) { addParameter(static_cast<int>(value), RecordingSwizzleTypes::Number); },
753 [&] (bool value) { addParameter(value, RecordingSwizzleTypes::Boolean); }
754 );
755 }
756
757 action->addItem(WTFMove(parametersData));
758 action->addItem(WTFMove(swizzleTypes));
759
760 auto trace = Inspector::createScriptCallStack(JSExecState::currentState(), Inspector::ScriptCallStack::maxCallStackSizeToCapture);
761 action->addItem(indexForData(trace.ptr()));
762
763 return action;
764}
765
766Ref<JSON::ArrayOf<JSON::Value>> InspectorCanvas::buildArrayForCanvasGradient(const CanvasGradient& canvasGradient)
767{
768 const auto& gradient = canvasGradient.gradient();
769
770 String type = gradient.type() == Gradient::Type::Radial ? "radial-gradient"_s : gradient.type() == Gradient::Type::Linear ? "linear-gradient"_s : "conic-gradient"_s;
771
772 auto parameters = JSON::ArrayOf<float>::create();
773 WTF::switchOn(gradient.data(),
774 [&parameters] (const Gradient::LinearData& data) {
775 parameters->addItem(data.point0.x());
776 parameters->addItem(data.point0.y());
777 parameters->addItem(data.point1.x());
778 parameters->addItem(data.point1.y());
779 },
780 [&parameters] (const Gradient::RadialData& data) {
781 parameters->addItem(data.point0.x());
782 parameters->addItem(data.point0.y());
783 parameters->addItem(data.startRadius);
784 parameters->addItem(data.point1.x());
785 parameters->addItem(data.point1.y());
786 parameters->addItem(data.endRadius);
787 },
788 [&parameters] (const Gradient::ConicData& data) {
789 parameters->addItem(data.point0.x());
790 parameters->addItem(data.point0.y());
791 parameters->addItem(data.angleRadians);
792 }
793 );
794
795 auto stops = JSON::ArrayOf<JSON::Value>::create();
796 for (auto& colorStop : gradient.stops()) {
797 auto stop = JSON::ArrayOf<JSON::Value>::create();
798 stop->addItem(colorStop.offset);
799 stop->addItem(indexForData(colorStop.color.cssText()));
800 stops->addItem(WTFMove(stop));
801 }
802
803 auto array = JSON::ArrayOf<JSON::Value>::create();
804 array->addItem(indexForData(type));
805 array->addItem(WTFMove(parameters));
806 array->addItem(WTFMove(stops));
807 return array;
808}
809
810Ref<JSON::ArrayOf<JSON::Value>> InspectorCanvas::buildArrayForCanvasPattern(const CanvasPattern& canvasPattern)
811{
812 Image& tileImage = canvasPattern.pattern().tileImage();
813 auto imageBuffer = ImageBuffer::create(tileImage.size(), RenderingMode::Unaccelerated);
814 imageBuffer->context().drawImage(tileImage, FloatPoint(0, 0));
815
816 String repeat;
817 bool repeatX = canvasPattern.pattern().repeatX();
818 bool repeatY = canvasPattern.pattern().repeatY();
819 if (repeatX && repeatY)
820 repeat = "repeat"_s;
821 else if (repeatX && !repeatY)
822 repeat = "repeat-x"_s;
823 else if (!repeatX && repeatY)
824 repeat = "repeat-y"_s;
825 else
826 repeat = "no-repeat"_s;
827
828 auto array = JSON::ArrayOf<JSON::Value>::create();
829 array->addItem(indexForData(imageBuffer->toDataURL("image/png")));
830 array->addItem(indexForData(repeat));
831 return array;
832}
833
834Ref<JSON::ArrayOf<JSON::Value>> InspectorCanvas::buildArrayForImageData(const ImageData& imageData)
835{
836 auto data = JSON::ArrayOf<int>::create();
837 for (size_t i = 0; i < imageData.data()->length(); ++i)
838 data->addItem(imageData.data()->item(i));
839
840 auto array = JSON::ArrayOf<JSON::Value>::create();
841 array->addItem(WTFMove(data));
842 array->addItem(imageData.width());
843 array->addItem(imageData.height());
844 return array;
845}
846
847} // namespace WebCore
848
849