| /* |
| * Copyright (C) 2009 Apple Inc. All rights reserved. |
| * |
| * Redistribution and use in source and binary forms, with or without |
| * modification, are permitted provided that the following conditions |
| * are met: |
| * 1. Redistributions of source code must retain the above copyright |
| * notice, this list of conditions and the following disclaimer. |
| * 2. Redistributions in binary form must reproduce the above copyright |
| * notice, this list of conditions and the following disclaimer in the |
| * documentation and/or other materials provided with the distribution. |
| * |
| * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY |
| * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR |
| * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
| * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
| * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| */ |
| |
| #include "third_party/blink/renderer/modules/webgl/webgl_rendering_context_base.h" |
| |
| #include <memory> |
| |
| #include "base/numerics/checked_math.h" |
| #include "base/stl_util.h" |
| #include "build/build_config.h" |
| #include "gpu/command_buffer/client/gles2_interface.h" |
| #include "gpu/command_buffer/common/capabilities.h" |
| #include "gpu/config/gpu_feature_info.h" |
| #include "third_party/blink/public/platform/platform.h" |
| #include "third_party/blink/public/platform/task_type.h" |
| #include "third_party/blink/renderer/bindings/modules/v8/html_canvas_element_or_offscreen_canvas.h" |
| #include "third_party/blink/renderer/bindings/modules/v8/webgl_any.h" |
| #include "third_party/blink/renderer/core/execution_context/execution_context.h" |
| #include "third_party/blink/renderer/core/frame/dactyloscoper.h" |
| #include "third_party/blink/renderer/core/frame/local_frame.h" |
| #include "third_party/blink/renderer/core/frame/local_frame_client.h" |
| #include "third_party/blink/renderer/core/frame/settings.h" |
| #include "third_party/blink/renderer/core/html/canvas/html_canvas_element.h" |
| #include "third_party/blink/renderer/core/html/canvas/image_data.h" |
| #include "third_party/blink/renderer/core/html/html_image_element.h" |
| #include "third_party/blink/renderer/core/html/media/html_video_element.h" |
| #include "third_party/blink/renderer/core/imagebitmap/image_bitmap.h" |
| #include "third_party/blink/renderer/core/inspector/console_message.h" |
| #include "third_party/blink/renderer/core/layout/layout_box.h" |
| #include "third_party/blink/renderer/core/origin_trials/origin_trials.h" |
| #include "third_party/blink/renderer/core/probe/core_probes.h" |
| #include "third_party/blink/renderer/core/typed_arrays/array_buffer_view_helpers.h" |
| #include "third_party/blink/renderer/core/typed_arrays/dom_array_buffer.h" |
| #include "third_party/blink/renderer/core/typed_arrays/dom_typed_array.h" |
| #include "third_party/blink/renderer/core/typed_arrays/flexible_array_buffer_view.h" |
| #include "third_party/blink/renderer/modules/webgl/angle_instanced_arrays.h" |
| #include "third_party/blink/renderer/modules/webgl/ext_blend_min_max.h" |
| #include "third_party/blink/renderer/modules/webgl/ext_frag_depth.h" |
| #include "third_party/blink/renderer/modules/webgl/ext_shader_texture_lod.h" |
| #include "third_party/blink/renderer/modules/webgl/ext_texture_filter_anisotropic.h" |
| #include "third_party/blink/renderer/modules/webgl/gl_string_query.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_element_index_uint.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_standard_derivatives.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_texture_float.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_texture_float_linear.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_texture_half_float.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_texture_half_float_linear.h" |
| #include "third_party/blink/renderer/modules/webgl/oes_vertex_array_object.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_active_info.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_buffer.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_compressed_texture_astc.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_compressed_texture_etc.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_compressed_texture_etc1.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_compressed_texture_pvrtc.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_compressed_texture_s3tc.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_compressed_texture_s3tc_srgb.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_context_attribute_helpers.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_context_event.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_context_group.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_debug_renderer_info.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_debug_shaders.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_depth_texture.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_draw_buffers.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_framebuffer.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_lose_context.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_program.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_renderbuffer.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_shader.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_shader_precision_format.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_uniform_location.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_vertex_array_object.h" |
| #include "third_party/blink/renderer/modules/webgl/webgl_vertex_array_object_oes.h" |
| #include "third_party/blink/renderer/platform/bindings/exception_state.h" |
| #include "third_party/blink/renderer/platform/bindings/script_wrappable_visitor.h" |
| #include "third_party/blink/renderer/platform/bindings/v8_binding_macros.h" |
| #include "third_party/blink/renderer/platform/cross_thread_functional.h" |
| #include "third_party/blink/renderer/platform/geometry/int_size.h" |
| #include "third_party/blink/renderer/platform/graphics/accelerated_static_bitmap_image.h" |
| #include "third_party/blink/renderer/platform/graphics/canvas_2d_layer_bridge.h" |
| #include "third_party/blink/renderer/platform/graphics/canvas_resource_provider.h" |
| #include "third_party/blink/renderer/platform/graphics/gpu/shared_gpu_context.h" |
| #include "third_party/blink/renderer/platform/graphics/graphics_context.h" |
| #include "third_party/blink/renderer/platform/runtime_enabled_features.h" |
| #include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h" |
| #include "third_party/blink/renderer/platform/waitable_event.h" |
| #include "third_party/blink/renderer/platform/wtf/functional.h" |
| #include "third_party/blink/renderer/platform/wtf/text/string_builder.h" |
| #include "third_party/blink/renderer/platform/wtf/text/string_utf8_adaptor.h" |
| #include "third_party/blink/renderer/platform/wtf/threading_primitives.h" |
| #include "third_party/blink/renderer/platform/wtf/typed_arrays/array_buffer_contents.h" |
| |
| namespace blink { |
| |
| bool WebGLRenderingContextBase::webgl_context_limits_initialized_ = false; |
| unsigned WebGLRenderingContextBase::max_active_webgl_contexts_ = 0; |
| unsigned WebGLRenderingContextBase::max_active_webgl_contexts_on_worker_ = 0; |
| |
| namespace { |
| |
| constexpr TimeDelta kDurationBetweenRestoreAttempts = TimeDelta::FromSeconds(1); |
| const int kMaxGLErrorsAllowedToConsole = 256; |
| |
| Mutex& WebGLContextLimitMutex() { |
| DEFINE_THREAD_SAFE_STATIC_LOCAL(Mutex, mutex, ()); |
| return mutex; |
| } |
| |
| using WebGLRenderingContextBaseSet = |
| HeapHashSet<WeakMember<WebGLRenderingContextBase>>; |
| WebGLRenderingContextBaseSet& ActiveContexts() { |
| DEFINE_THREAD_SAFE_STATIC_LOCAL( |
| ThreadSpecific<Persistent<WebGLRenderingContextBaseSet>>, active_contexts, |
| ()); |
| Persistent<WebGLRenderingContextBaseSet>& active_contexts_persistent = |
| *active_contexts; |
| if (!active_contexts_persistent) { |
| active_contexts_persistent = |
| MakeGarbageCollected<WebGLRenderingContextBaseSet>(); |
| active_contexts_persistent.RegisterAsStaticReference(); |
| } |
| return *active_contexts_persistent; |
| } |
| |
| using WebGLRenderingContextBaseMap = |
| HeapHashMap<WeakMember<WebGLRenderingContextBase>, int>; |
| WebGLRenderingContextBaseMap& ForciblyEvictedContexts() { |
| DEFINE_THREAD_SAFE_STATIC_LOCAL( |
| ThreadSpecific<Persistent<WebGLRenderingContextBaseMap>>, |
| forcibly_evicted_contexts, ()); |
| Persistent<WebGLRenderingContextBaseMap>& |
| forcibly_evicted_contexts_persistent = *forcibly_evicted_contexts; |
| if (!forcibly_evicted_contexts_persistent) { |
| forcibly_evicted_contexts_persistent = |
| MakeGarbageCollected<WebGLRenderingContextBaseMap>(); |
| forcibly_evicted_contexts_persistent.RegisterAsStaticReference(); |
| } |
| return *forcibly_evicted_contexts_persistent; |
| } |
| |
| } // namespace |
| |
| ScopedRGBEmulationColorMask::ScopedRGBEmulationColorMask( |
| WebGLRenderingContextBase* context, |
| GLboolean* color_mask, |
| DrawingBuffer* drawing_buffer) |
| : context_(context), |
| requires_emulation_(drawing_buffer->RequiresAlphaChannelToBePreserved()) { |
| if (requires_emulation_) { |
| context_->active_scoped_rgb_emulation_color_masks_++; |
| memcpy(color_mask_, color_mask, 4 * sizeof(GLboolean)); |
| context_->ContextGL()->ColorMask(color_mask_[0], color_mask_[1], |
| color_mask_[2], false); |
| } |
| } |
| |
| ScopedRGBEmulationColorMask::~ScopedRGBEmulationColorMask() { |
| if (requires_emulation_) { |
| DCHECK(context_->active_scoped_rgb_emulation_color_masks_); |
| context_->active_scoped_rgb_emulation_color_masks_--; |
| context_->ContextGL()->ColorMask(color_mask_[0], color_mask_[1], |
| color_mask_[2], color_mask_[3]); |
| } |
| } |
| |
| void WebGLRenderingContextBase::InitializeWebGLContextLimits( |
| const DrawingBuffer::WebGLContextLimits& limits) { |
| MutexLocker locker(WebGLContextLimitMutex()); |
| if (!webgl_context_limits_initialized_) { |
| // These do not change over the lifetime of the browser. |
| max_active_webgl_contexts_ = limits.max_active_webgl_contexts; |
| max_active_webgl_contexts_on_worker_ = |
| limits.max_active_webgl_contexts_on_worker; |
| webgl_context_limits_initialized_ = true; |
| } |
| } |
| |
| unsigned WebGLRenderingContextBase::CurrentMaxGLContexts() { |
| MutexLocker locker(WebGLContextLimitMutex()); |
| DCHECK(webgl_context_limits_initialized_); |
| return IsMainThread() ? max_active_webgl_contexts_ |
| : max_active_webgl_contexts_on_worker_; |
| } |
| |
| void WebGLRenderingContextBase::ForciblyLoseOldestContext( |
| const String& reason) { |
| WebGLRenderingContextBase* candidate = OldestContext(); |
| if (!candidate) |
| return; |
| |
| candidate->PrintWarningToConsole(reason); |
| probe::didFireWebGLWarning(candidate->canvas()); |
| |
| // This will call deactivateContext once the context has actually been lost. |
| candidate->ForceLostContext(WebGLRenderingContextBase::kSyntheticLostContext, |
| WebGLRenderingContextBase::kWhenAvailable); |
| } |
| |
| WebGLRenderingContextBase* WebGLRenderingContextBase::OldestContext() { |
| if (ActiveContexts().IsEmpty()) |
| return nullptr; |
| |
| WebGLRenderingContextBase* candidate = *(ActiveContexts().begin()); |
| DCHECK(!candidate->isContextLost()); |
| for (WebGLRenderingContextBase* context : ActiveContexts()) { |
| DCHECK(!context->isContextLost()); |
| if (context->ContextGL()->GetLastFlushIdCHROMIUM() < |
| candidate->ContextGL()->GetLastFlushIdCHROMIUM()) { |
| candidate = context; |
| } |
| } |
| |
| return candidate; |
| } |
| |
| WebGLRenderingContextBase* WebGLRenderingContextBase::OldestEvictedContext() { |
| if (ForciblyEvictedContexts().IsEmpty()) |
| return nullptr; |
| |
| WebGLRenderingContextBase* candidate = nullptr; |
| int generation = -1; |
| for (WebGLRenderingContextBase* context : ForciblyEvictedContexts().Keys()) { |
| if (!candidate || ForciblyEvictedContexts().at(context) < generation) { |
| candidate = context; |
| generation = ForciblyEvictedContexts().at(context); |
| } |
| } |
| |
| return candidate; |
| } |
| |
| void WebGLRenderingContextBase::ActivateContext( |
| WebGLRenderingContextBase* context) { |
| unsigned max_gl_contexts = CurrentMaxGLContexts(); |
| unsigned removed_contexts = 0; |
| while (ActiveContexts().size() >= max_gl_contexts && |
| removed_contexts < max_gl_contexts) { |
| ForciblyLoseOldestContext( |
| "WARNING: Too many active WebGL contexts. Oldest context will be " |
| "lost."); |
| removed_contexts++; |
| } |
| |
| DCHECK(!context->isContextLost()); |
| ActiveContexts().insert(context); |
| } |
| |
| void WebGLRenderingContextBase::DeactivateContext( |
| WebGLRenderingContextBase* context) { |
| ActiveContexts().erase(context); |
| } |
| |
| void WebGLRenderingContextBase::AddToEvictedList( |
| WebGLRenderingContextBase* context) { |
| static int generation = 0; |
| ForciblyEvictedContexts().Set(context, generation++); |
| } |
| |
| void WebGLRenderingContextBase::RemoveFromEvictedList( |
| WebGLRenderingContextBase* context) { |
| ForciblyEvictedContexts().erase(context); |
| } |
| |
| void WebGLRenderingContextBase::RestoreEvictedContext( |
| WebGLRenderingContextBase* context) { |
| // These two sets keep weak references to their contexts; |
| // verify that the GC already removed the |context| entries. |
| DCHECK(!ForciblyEvictedContexts().Contains(context)); |
| DCHECK(!ActiveContexts().Contains(context)); |
| |
| unsigned max_gl_contexts = CurrentMaxGLContexts(); |
| // Try to re-enable the oldest inactive contexts. |
| while (ActiveContexts().size() < max_gl_contexts && |
| ForciblyEvictedContexts().size()) { |
| WebGLRenderingContextBase* evicted_context = OldestEvictedContext(); |
| if (!evicted_context->restore_allowed_) { |
| ForciblyEvictedContexts().erase(evicted_context); |
| continue; |
| } |
| |
| IntSize desired_size = DrawingBuffer::AdjustSize( |
| evicted_context->ClampedCanvasSize(), IntSize(), |
| evicted_context->max_texture_size_); |
| |
| // If there's room in the pixel budget for this context, restore it. |
| if (!desired_size.IsEmpty()) { |
| ForciblyEvictedContexts().erase(evicted_context); |
| evicted_context->ForceRestoreContext(); |
| } |
| break; |
| } |
| } |
| |
| namespace { |
| |
| GLint Clamp(GLint value, GLint min, GLint max) { |
| if (value < min) |
| value = min; |
| if (value > max) |
| value = max; |
| return value; |
| } |
| |
| // Return true if a character belongs to the ASCII subset as defined in |
| // GLSL ES 1.0 spec section 3.1. |
| bool ValidateCharacter(unsigned char c) { |
| // Printing characters are valid except " $ ` @ \ ' DEL. |
| if (c >= 32 && c <= 126 && c != '"' && c != '$' && c != '`' && c != '@' && |
| c != '\\' && c != '\'') |
| return true; |
| // Horizontal tab, line feed, vertical tab, form feed, carriage return |
| // are also valid. |
| if (c >= 9 && c <= 13) |
| return true; |
| return false; |
| } |
| |
| bool IsPrefixReserved(const String& name) { |
| if (name.StartsWith("gl_") || name.StartsWith("webgl_") || |
| name.StartsWith("_webgl_")) |
| return true; |
| return false; |
| } |
| |
| // Strips comments from shader text. This allows non-ASCII characters |
| // to be used in comments without potentially breaking OpenGL |
| // implementations not expecting characters outside the GLSL ES set. |
| class StripComments { |
| public: |
| StripComments(const String& str) |
| : parse_state_(kBeginningOfLine), |
| source_string_(str), |
| length_(str.length()), |
| position_(0) { |
| Parse(); |
| } |
| |
| String Result() { return builder_.ToString(); } |
| |
| private: |
| bool HasMoreCharacters() const { return (position_ < length_); } |
| |
| void Parse() { |
| while (HasMoreCharacters()) { |
| Process(Current()); |
| // process() might advance the position. |
| if (HasMoreCharacters()) |
| Advance(); |
| } |
| } |
| |
| void Process(UChar); |
| |
| bool Peek(UChar& character) const { |
| if (position_ + 1 >= length_) |
| return false; |
| character = source_string_[position_ + 1]; |
| return true; |
| } |
| |
| UChar Current() { |
| SECURITY_DCHECK(position_ < length_); |
| return source_string_[position_]; |
| } |
| |
| void Advance() { ++position_; } |
| |
| static bool IsNewline(UChar character) { |
| // Don't attempt to canonicalize newline related characters. |
| return (character == '\n' || character == '\r'); |
| } |
| |
| void Emit(UChar character) { builder_.Append(character); } |
| |
| enum ParseState { |
| // Have not seen an ASCII non-whitespace character yet on |
| // this line. Possible that we might see a preprocessor |
| // directive. |
| kBeginningOfLine, |
| |
| // Have seen at least one ASCII non-whitespace character |
| // on this line. |
| kMiddleOfLine, |
| |
| // Handling a preprocessor directive. Passes through all |
| // characters up to the end of the line. Disables comment |
| // processing. |
| kInPreprocessorDirective, |
| |
| // Handling a single-line comment. The comment text is |
| // replaced with a single space. |
| kInSingleLineComment, |
| |
| // Handling a multi-line comment. Newlines are passed |
| // through to preserve line numbers. |
| kInMultiLineComment |
| }; |
| |
| ParseState parse_state_; |
| String source_string_; |
| unsigned length_; |
| unsigned position_; |
| StringBuilder builder_; |
| }; |
| |
| void StripComments::Process(UChar c) { |
| if (IsNewline(c)) { |
| // No matter what state we are in, pass through newlines |
| // so we preserve line numbers. |
| Emit(c); |
| |
| if (parse_state_ != kInMultiLineComment) |
| parse_state_ = kBeginningOfLine; |
| |
| return; |
| } |
| |
| UChar temp = 0; |
| switch (parse_state_) { |
| case kBeginningOfLine: |
| if (WTF::IsASCIISpace(c)) { |
| Emit(c); |
| break; |
| } |
| |
| if (c == '#') { |
| parse_state_ = kInPreprocessorDirective; |
| Emit(c); |
| break; |
| } |
| |
| // Transition to normal state and re-handle character. |
| parse_state_ = kMiddleOfLine; |
| Process(c); |
| break; |
| |
| case kMiddleOfLine: |
| if (c == '/' && Peek(temp)) { |
| if (temp == '/') { |
| parse_state_ = kInSingleLineComment; |
| Emit(' '); |
| Advance(); |
| break; |
| } |
| |
| if (temp == '*') { |
| parse_state_ = kInMultiLineComment; |
| // Emit the comment start in case the user has |
| // an unclosed comment and we want to later |
| // signal an error. |
| Emit('/'); |
| Emit('*'); |
| Advance(); |
| break; |
| } |
| } |
| |
| Emit(c); |
| break; |
| |
| case kInPreprocessorDirective: |
| // No matter what the character is, just pass it |
| // through. Do not parse comments in this state. This |
| // might not be the right thing to do long term, but it |
| // should handle the #error preprocessor directive. |
| Emit(c); |
| break; |
| |
| case kInSingleLineComment: |
| // Line-continuation characters are processed before comment processing. |
| // Advance string if a new line character is immediately behind |
| // line-continuation character. |
| if (c == '\\') { |
| if (Peek(temp) && IsNewline(temp)) |
| Advance(); |
| } |
| |
| // The newline code at the top of this function takes care |
| // of resetting our state when we get out of the |
| // single-line comment. Swallow all other characters. |
| break; |
| |
| case kInMultiLineComment: |
| if (c == '*' && Peek(temp) && temp == '/') { |
| Emit('*'); |
| Emit('/'); |
| parse_state_ = kMiddleOfLine; |
| Advance(); |
| break; |
| } |
| |
| // Swallow all other characters. Unclear whether we may |
| // want or need to just emit a space per character to try |
| // to preserve column numbers for debugging purposes. |
| break; |
| } |
| } |
| |
| static bool g_should_fail_context_creation_for_testing = false; |
| } // namespace |
| |
| class ScopedTexture2DRestorer { |
| STACK_ALLOCATED(); |
| |
| public: |
| explicit ScopedTexture2DRestorer(WebGLRenderingContextBase* context) |
| : context_(context) {} |
| |
| ~ScopedTexture2DRestorer() { context_->RestoreCurrentTexture2D(); } |
| |
| private: |
| Member<WebGLRenderingContextBase> context_; |
| }; |
| |
| class ScopedFramebufferRestorer { |
| STACK_ALLOCATED(); |
| |
| public: |
| explicit ScopedFramebufferRestorer(WebGLRenderingContextBase* context) |
| : context_(context) {} |
| |
| ~ScopedFramebufferRestorer() { context_->RestoreCurrentFramebuffer(); } |
| |
| private: |
| Member<WebGLRenderingContextBase> context_; |
| }; |
| |
| class ScopedUnpackParametersResetRestore { |
| STACK_ALLOCATED(); |
| |
| public: |
| explicit ScopedUnpackParametersResetRestore( |
| WebGLRenderingContextBase* context, |
| bool enabled = true) |
| : context_(context), enabled_(enabled) { |
| if (enabled) |
| context_->ResetUnpackParameters(); |
| } |
| |
| ~ScopedUnpackParametersResetRestore() { |
| if (enabled_) |
| context_->RestoreUnpackParameters(); |
| } |
| |
| private: |
| Member<WebGLRenderingContextBase> context_; |
| bool enabled_; |
| }; |
| |
| static void FormatWebGLStatusString(const StringView& gl_info, |
| const StringView& info_string, |
| StringBuilder& builder) { |
| if (info_string.IsEmpty()) |
| return; |
| builder.Append(", "); |
| builder.Append(gl_info); |
| builder.Append(" = "); |
| builder.Append(info_string); |
| } |
| |
| static String ExtractWebGLContextCreationError( |
| const Platform::GraphicsInfo& info) { |
| StringBuilder builder; |
| builder.Append("Could not create a WebGL context"); |
| FormatWebGLStatusString( |
| "VENDOR", |
| info.vendor_id ? String::Format("0x%04x", info.vendor_id) : "0xffff", |
| builder); |
| FormatWebGLStatusString( |
| "DEVICE", |
| info.device_id ? String::Format("0x%04x", info.device_id) : "0xffff", |
| builder); |
| FormatWebGLStatusString("GL_VENDOR", info.vendor_info, builder); |
| FormatWebGLStatusString("GL_RENDERER", info.renderer_info, builder); |
| FormatWebGLStatusString("GL_VERSION", info.driver_version, builder); |
| FormatWebGLStatusString("Sandboxed", info.sandboxed ? "yes" : "no", builder); |
| FormatWebGLStatusString("Optimus", info.optimus ? "yes" : "no", builder); |
| FormatWebGLStatusString("AMD switchable", info.amd_switchable ? "yes" : "no", |
| builder); |
| FormatWebGLStatusString( |
| "Reset notification strategy", |
| String::Format("0x%04x", info.reset_notification_strategy).Utf8().data(), |
| builder); |
| FormatWebGLStatusString("ErrorMessage", info.error_message.Utf8().data(), |
| builder); |
| builder.Append('.'); |
| return builder.ToString(); |
| } |
| |
| struct ContextProviderCreationInfo { |
| // Inputs. |
| Platform::ContextAttributes context_attributes; |
| Platform::GraphicsInfo* gl_info; |
| KURL url; |
| // Outputs. |
| std::unique_ptr<WebGraphicsContext3DProvider> created_context_provider; |
| bool* using_gpu_compositing; |
| }; |
| |
| static void CreateContextProviderOnMainThread( |
| ContextProviderCreationInfo* creation_info, |
| WaitableEvent* waitable_event) { |
| DCHECK(IsMainThread()); |
| // Ask for gpu compositing mode when making the context. The context will be |
| // lost if the mode changes. |
| *creation_info->using_gpu_compositing = |
| !Platform::Current()->IsGpuCompositingDisabled(); |
| creation_info->created_context_provider = |
| Platform::Current()->CreateOffscreenGraphicsContext3DProvider( |
| creation_info->context_attributes, creation_info->url, |
| creation_info->gl_info); |
| waitable_event->Signal(); |
| } |
| |
| static std::unique_ptr<WebGraphicsContext3DProvider> |
| CreateContextProviderOnWorkerThread( |
| Platform::ContextAttributes context_attributes, |
| Platform::GraphicsInfo* gl_info, |
| bool* using_gpu_compositing, |
| const KURL& url) { |
| WaitableEvent waitable_event; |
| ContextProviderCreationInfo creation_info; |
| creation_info.context_attributes = context_attributes; |
| creation_info.gl_info = gl_info; |
| creation_info.url = url.Copy(); |
| creation_info.using_gpu_compositing = using_gpu_compositing; |
| scoped_refptr<base::SingleThreadTaskRunner> task_runner = |
| Thread::MainThread()->GetTaskRunner(); |
| PostCrossThreadTask(*task_runner, FROM_HERE, |
| CrossThreadBind(&CreateContextProviderOnMainThread, |
| CrossThreadUnretained(&creation_info), |
| CrossThreadUnretained(&waitable_event))); |
| waitable_event.Wait(); |
| return std::move(creation_info.created_context_provider); |
| } |
| |
| bool WebGLRenderingContextBase::SupportOwnOffscreenSurface( |
| ExecutionContext* execution_context) { |
| // Using an own offscreen surface disables virtualized contexts, and this |
| // doesn't currently work properly, see https://crbug.com/691102. |
| // TODO(https://crbug.com/791755): Remove this function and related code once |
| // the replacement is ready. |
| return false; |
| } |
| |
| std::unique_ptr<WebGraphicsContext3DProvider> |
| WebGLRenderingContextBase::CreateContextProviderInternal( |
| CanvasRenderingContextHost* host, |
| const CanvasContextCreationAttributesCore& attributes, |
| Platform::ContextType context_type, |
| bool* using_gpu_compositing) { |
| DCHECK(host); |
| ExecutionContext* execution_context = host->GetTopExecutionContext(); |
| DCHECK(execution_context); |
| |
| Platform::ContextAttributes context_attributes = ToPlatformContextAttributes( |
| attributes, context_type, SupportOwnOffscreenSurface(execution_context)); |
| |
| Platform::GraphicsInfo gl_info; |
| std::unique_ptr<WebGraphicsContext3DProvider> context_provider; |
| const auto& url = execution_context->Url(); |
| if (IsMainThread()) { |
| // Ask for gpu compositing mode when making the context. The context will be |
| // lost if the mode changes. |
| *using_gpu_compositing = !Platform::Current()->IsGpuCompositingDisabled(); |
| context_provider = |
| Platform::Current()->CreateOffscreenGraphicsContext3DProvider( |
| context_attributes, url, &gl_info); |
| } else { |
| context_provider = CreateContextProviderOnWorkerThread( |
| context_attributes, &gl_info, using_gpu_compositing, url); |
| } |
| if (context_provider && !context_provider->BindToCurrentThread()) { |
| context_provider = nullptr; |
| gl_info.error_message = |
| String("bindToCurrentThread failed: " + String(gl_info.error_message)); |
| } |
| if (!context_provider || g_should_fail_context_creation_for_testing) { |
| g_should_fail_context_creation_for_testing = false; |
| host->HostDispatchEvent( |
| WebGLContextEvent::Create(event_type_names::kWebglcontextcreationerror, |
| ExtractWebGLContextCreationError(gl_info))); |
| return nullptr; |
| } |
| gpu::gles2::GLES2Interface* gl = context_provider->ContextGL(); |
| if (!String(gl->GetString(GL_EXTENSIONS)) |
| .Contains("GL_OES_packed_depth_stencil")) { |
| host->HostDispatchEvent(WebGLContextEvent::Create( |
| event_type_names::kWebglcontextcreationerror, |
| "OES_packed_depth_stencil support is required.")); |
| return nullptr; |
| } |
| return context_provider; |
| } |
| |
| std::unique_ptr<WebGraphicsContext3DProvider> |
| WebGLRenderingContextBase::CreateWebGraphicsContext3DProvider( |
| CanvasRenderingContextHost* host, |
| const CanvasContextCreationAttributesCore& attributes, |
| Platform::ContextType context_type, |
| bool* using_gpu_compositing) { |
| // The host might block creation of a new WebGL context despite the |
| // page settings; in particular, if WebGL contexts were lost one or |
| // more times via the GL_ARB_robustness extension. |
| if (host->IsWebGLBlocked()) { |
| host->SetContextCreationWasBlocked(); |
| host->HostDispatchEvent(WebGLContextEvent::Create( |
| event_type_names::kWebglcontextcreationerror, |
| "Web page caused context loss and was blocked")); |
| return nullptr; |
| } |
| if ((context_type == Platform::kWebGL1ContextType && |
| !host->IsWebGL1Enabled()) || |
| (context_type == Platform::kWebGL2ContextType && |
| !host->IsWebGL2Enabled()) || |
| (context_type == Platform::kWebGL2ComputeContextType && |
| !host->IsWebGL2Enabled())) { |
| host->HostDispatchEvent(WebGLContextEvent::Create( |
| event_type_names::kWebglcontextcreationerror, |
| "disabled by enterprise policy or commandline switch")); |
| return nullptr; |
| } |
| |
| return CreateContextProviderInternal(host, attributes, context_type, |
| using_gpu_compositing); |
| } |
| |
| void WebGLRenderingContextBase::ForceNextWebGLContextCreationToFail() { |
| g_should_fail_context_creation_for_testing = true; |
| } |
| |
| ImageBitmap* WebGLRenderingContextBase::TransferToImageBitmapBase( |
| ScriptState* script_state) { |
| WebFeature feature = WebFeature::kOffscreenCanvasTransferToImageBitmapWebGL; |
| UseCounter::Count(ExecutionContext::From(script_state), feature); |
| return ImageBitmap::Create( |
| GetDrawingBuffer()->TransferToStaticBitmapImage(nullptr)); |
| } |
| |
| void WebGLRenderingContextBase::commit() { |
| int width = GetDrawingBuffer()->Size().Width(); |
| int height = GetDrawingBuffer()->Size().Height(); |
| |
| if (PaintRenderingResultsToCanvas(kBackBuffer)) { |
| if (Host()->GetOrCreateCanvasResourceProvider(kPreferAcceleration)) { |
| Host()->Commit(Host()->ResourceProvider()->ProduceFrame(), |
| SkIRect::MakeWH(width, height)); |
| } |
| } |
| MarkLayerComposited(); |
| } |
| |
| scoped_refptr<StaticBitmapImage> |
| WebGLRenderingContextBase::GetStaticBitmapImage( |
| std::unique_ptr<viz::SingleReleaseCallback>* out_release_callback) { |
| if (!GetDrawingBuffer()) |
| return nullptr; |
| |
| if (CreationAttributes().preserve_drawing_buffer) |
| return GetImage(); |
| |
| return GetDrawingBuffer()->TransferToStaticBitmapImage(out_release_callback); |
| } |
| |
| scoped_refptr<StaticBitmapImage> WebGLRenderingContextBase::GetImage( |
| AccelerationHint hint) const { |
| if (!GetDrawingBuffer()) |
| return nullptr; |
| GetDrawingBuffer()->ResolveAndBindForReadAndDraw(); |
| // Use the drawing buffer size here instead of the canvas size to ensure that |
| // sizing is consistent for the GetStaticBitmapImage() result. The forced |
| // downsizing logic in Reshape() can lead to the drawing buffer being smaller |
| // than the canvas size. See https:://crbug.com/845742. |
| IntSize size = GetDrawingBuffer()->Size(); |
| // Since we are grabbing a snapshot that is not for compositing, we use a |
| // custom resource provider. This avoids consuming compositing-specific |
| // resources (e.g. GpuMemoryBuffer) |
| std::unique_ptr<CanvasResourceProvider> resource_provider = |
| CanvasResourceProvider::Create( |
| size, CanvasResourceProvider::kAcceleratedResourceUsage, |
| SharedGpuContext::ContextProviderWrapper(), 0, ColorParams(), |
| CanvasResourceProvider::kDefaultPresentationMode, |
| nullptr /* canvas_resource_dispatcher */, is_origin_top_left_); |
| if (!resource_provider || !resource_provider->IsValid()) |
| return nullptr; |
| if (!CopyRenderingResultsFromDrawingBuffer(resource_provider.get(), |
| kBackBuffer)) { |
| // copyRenderingResultsFromDrawingBuffer is expected to always succeed |
| // because we've explicitly created an Accelerated surface and have |
| // already validated it. |
| NOTREACHED(); |
| return nullptr; |
| } |
| return resource_provider->Snapshot(); |
| } |
| |
| ScriptPromise WebGLRenderingContextBase::makeXRCompatible( |
| ScriptState* script_state) { |
| if (isContextLost()) { |
| return ScriptPromise::RejectWithDOMException( |
| script_state, DOMException::Create(DOMExceptionCode::kInvalidStateError, |
| "Context lost.")); |
| } |
| |
| if (xr_compatible_) { |
| // Returns a script promise resolved with undefined. |
| return ScriptPromise::CastUndefined(script_state); |
| } |
| |
| if (ContextCreatedOnXRCompatibleAdapter()) { |
| xr_compatible_ = true; |
| return ScriptPromise::CastUndefined(script_state); |
| } |
| |
| // TODO(http://crbug.com/876140) Trigger context loss and recreate on |
| // compatible GPU. |
| return ScriptPromise::RejectWithDOMException( |
| script_state, |
| DOMException::Create( |
| DOMExceptionCode::kNotSupportedError, |
| "Context is not compatible. Switching not yet implemented.")); |
| } |
| |
| bool WebGLRenderingContextBase::IsXRCompatible() { |
| return xr_compatible_; |
| } |
| |
| namespace { |
| |
| // Exposed by GL_ANGLE_depth_texture |
| static const GLenum kSupportedInternalFormatsOESDepthTex[] = { |
| GL_DEPTH_COMPONENT, GL_DEPTH_STENCIL, |
| }; |
| |
| // Exposed by GL_EXT_sRGB |
| static const GLenum kSupportedInternalFormatsEXTsRGB[] = { |
| GL_SRGB, GL_SRGB_ALPHA_EXT, |
| }; |
| |
| // ES3 enums supported by both CopyTexImage and TexImage. |
| static const GLenum kSupportedInternalFormatsES3[] = { |
| GL_R8, GL_RG8, GL_RGB565, GL_RGB8, GL_RGBA4, |
| GL_RGB5_A1, GL_RGBA8, GL_RGB10_A2, GL_RGB10_A2UI, GL_SRGB8, |
| GL_SRGB8_ALPHA8, GL_R8I, GL_R8UI, GL_R16I, GL_R16UI, |
| GL_R32I, GL_R32UI, GL_RG8I, GL_RG8UI, GL_RG16I, |
| GL_RG16UI, GL_RG32I, GL_RG32UI, GL_RGBA8I, GL_RGBA8UI, |
| GL_RGBA16I, GL_RGBA16UI, GL_RGBA32I, GL_RGBA32UI, GL_RGB32I, |
| GL_RGB32UI, GL_RGB8I, GL_RGB8UI, GL_RGB16I, GL_RGB16UI, |
| }; |
| |
| // ES3 enums only supported by TexImage |
| static const GLenum kSupportedInternalFormatsTexImageES3[] = { |
| GL_R8_SNORM, |
| GL_R16F, |
| GL_R32F, |
| GL_RG8_SNORM, |
| GL_RG16F, |
| GL_RG32F, |
| GL_RGB8_SNORM, |
| GL_R11F_G11F_B10F, |
| GL_RGB9_E5, |
| GL_RGB16F, |
| GL_RGB32F, |
| GL_RGBA8_SNORM, |
| GL_RGBA16F, |
| GL_RGBA32F, |
| GL_DEPTH_COMPONENT16, |
| GL_DEPTH_COMPONENT24, |
| GL_DEPTH_COMPONENT32F, |
| GL_DEPTH24_STENCIL8, |
| GL_DEPTH32F_STENCIL8, |
| }; |
| |
| // Exposed by EXT_color_buffer_float |
| static const GLenum kSupportedInternalFormatsCopyTexImageFloatES3[] = { |
| GL_R16F, GL_R32F, GL_RG16F, GL_RG32F, GL_RGB16F, |
| GL_RGB32F, GL_RGBA16F, GL_RGBA32F, GL_R11F_G11F_B10F}; |
| |
| // ES3 enums supported by TexImageSource |
| static const GLenum kSupportedInternalFormatsTexImageSourceES3[] = { |
| GL_R8, GL_R16F, GL_R32F, GL_R8UI, GL_RG8, |
| GL_RG16F, GL_RG32F, GL_RG8UI, GL_RGB8, GL_SRGB8, |
| GL_RGB565, GL_R11F_G11F_B10F, GL_RGB9_E5, GL_RGB16F, GL_RGB32F, |
| GL_RGB8UI, GL_RGBA8, GL_SRGB8_ALPHA8, GL_RGB5_A1, GL_RGBA4, |
| GL_RGBA16F, GL_RGBA32F, GL_RGBA8UI, GL_RGB10_A2, |
| }; |
| |
| // ES2 enums |
| // Internalformat must equal format in ES2. |
| static const GLenum kSupportedFormatsES2[] = { |
| GL_RGB, GL_RGBA, GL_LUMINANCE_ALPHA, GL_LUMINANCE, GL_ALPHA, |
| }; |
| |
| // Exposed by GL_ANGLE_depth_texture |
| static const GLenum kSupportedFormatsOESDepthTex[] = { |
| GL_DEPTH_COMPONENT, GL_DEPTH_STENCIL, |
| }; |
| |
| // Exposed by GL_EXT_sRGB |
| static const GLenum kSupportedFormatsEXTsRGB[] = { |
| GL_SRGB, GL_SRGB_ALPHA_EXT, |
| }; |
| |
| // ES3 enums |
| static const GLenum kSupportedFormatsES3[] = { |
| GL_RED, GL_RED_INTEGER, GL_RG, |
| GL_RG_INTEGER, GL_RGB, GL_RGB_INTEGER, |
| GL_RGBA, GL_RGBA_INTEGER, GL_DEPTH_COMPONENT, |
| GL_DEPTH_STENCIL, |
| }; |
| |
| // ES3 enums supported by TexImageSource |
| static const GLenum kSupportedFormatsTexImageSourceES3[] = { |
| GL_RED, GL_RED_INTEGER, GL_RG, GL_RG_INTEGER, |
| GL_RGB, GL_RGB_INTEGER, GL_RGBA, GL_RGBA_INTEGER, |
| }; |
| |
| // ES2 enums |
| static const GLenum kSupportedTypesES2[] = { |
| GL_UNSIGNED_BYTE, GL_UNSIGNED_SHORT_5_6_5, GL_UNSIGNED_SHORT_4_4_4_4, |
| GL_UNSIGNED_SHORT_5_5_5_1, |
| }; |
| |
| // Exposed by GL_OES_texture_float |
| static const GLenum kSupportedTypesOESTexFloat[] = { |
| GL_FLOAT, |
| }; |
| |
| // Exposed by GL_OES_texture_half_float |
| static const GLenum kSupportedTypesOESTexHalfFloat[] = { |
| GL_HALF_FLOAT_OES, |
| }; |
| |
| // Exposed by GL_ANGLE_depth_texture |
| static const GLenum kSupportedTypesOESDepthTex[] = { |
| GL_UNSIGNED_SHORT, GL_UNSIGNED_INT, GL_UNSIGNED_INT_24_8, |
| }; |
| |
| // ES3 enums |
| static const GLenum kSupportedTypesES3[] = { |
| GL_BYTE, |
| GL_UNSIGNED_SHORT, |
| GL_SHORT, |
| GL_UNSIGNED_INT, |
| GL_INT, |
| GL_HALF_FLOAT, |
| GL_FLOAT, |
| GL_UNSIGNED_INT_2_10_10_10_REV, |
| GL_UNSIGNED_INT_10F_11F_11F_REV, |
| GL_UNSIGNED_INT_5_9_9_9_REV, |
| GL_UNSIGNED_INT_24_8, |
| GL_FLOAT_32_UNSIGNED_INT_24_8_REV, |
| }; |
| |
| // ES3 enums supported by TexImageSource |
| static const GLenum kSupportedTypesTexImageSourceES3[] = { |
| GL_HALF_FLOAT, GL_FLOAT, GL_UNSIGNED_INT_10F_11F_11F_REV, |
| GL_UNSIGNED_INT_2_10_10_10_REV, |
| }; |
| |
| } // namespace |
| |
| WebGLRenderingContextBase::WebGLRenderingContextBase( |
| CanvasRenderingContextHost* host, |
| std::unique_ptr<WebGraphicsContext3DProvider> context_provider, |
| bool using_gpu_compositing, |
| const CanvasContextCreationAttributesCore& requested_attributes, |
| Platform::ContextType version) |
| : WebGLRenderingContextBase( |
| host, |
| host->GetTopExecutionContext()->GetTaskRunner(TaskType::kWebGL), |
| std::move(context_provider), |
| using_gpu_compositing, |
| requested_attributes, |
| version) {} |
| |
| WebGLRenderingContextBase::WebGLRenderingContextBase( |
| CanvasRenderingContextHost* host, |
| scoped_refptr<base::SingleThreadTaskRunner> task_runner, |
| std::unique_ptr<WebGraphicsContext3DProvider> context_provider, |
| bool using_gpu_compositing, |
| const CanvasContextCreationAttributesCore& requested_attributes, |
| Platform::ContextType context_type) |
| : CanvasRenderingContext(host, requested_attributes), |
| context_group_(MakeGarbageCollected<WebGLContextGroup>()), |
| dispatch_context_lost_event_timer_( |
| task_runner, |
| this, |
| &WebGLRenderingContextBase::DispatchContextLostEvent), |
| restore_timer_(task_runner, |
| this, |
| &WebGLRenderingContextBase::MaybeRestoreContext), |
| task_runner_(task_runner), |
| num_gl_errors_to_console_allowed_(kMaxGLErrorsAllowedToConsole), |
| context_type_(context_type) { |
| DCHECK(context_provider); |
| |
| // TODO(http://crbug.com/876140) Make sure this is being created on a |
| // compatible adapter. |
| xr_compatible_ = requested_attributes.xr_compatible; |
| |
| context_group_->AddContext(this); |
| |
| max_viewport_dims_[0] = max_viewport_dims_[1] = 0; |
| context_provider->ContextGL()->GetIntegerv(GL_MAX_VIEWPORT_DIMS, |
| max_viewport_dims_); |
| |
| scoped_refptr<DrawingBuffer> buffer; |
| buffer = |
| CreateDrawingBuffer(std::move(context_provider), using_gpu_compositing); |
| if (!buffer) { |
| context_lost_mode_ = kSyntheticLostContext; |
| return; |
| } |
| |
| InitializeWebGLContextLimits(buffer->webgl_context_limits()); |
| drawing_buffer_ = std::move(buffer); |
| GetDrawingBuffer()->Bind(GL_FRAMEBUFFER); |
| SetupFlags(); |
| |
| String disabled_webgl_extensions(GetDrawingBuffer() |
| ->ContextProvider() |
| ->GetGpuFeatureInfo() |
| .disabled_webgl_extensions.c_str()); |
| Vector<String> disabled_extension_list; |
| disabled_webgl_extensions.Split(' ', disabled_extension_list); |
| for (const auto& entry : disabled_extension_list) { |
| disabled_extensions_.insert(entry); |
| } |
| |
| #define ADD_VALUES_TO_SET(set, values) \ |
| for (size_t i = 0; i < base::size(values); ++i) { \ |
| set.insert(values[i]); \ |
| } |
| |
| ADD_VALUES_TO_SET(supported_internal_formats_, kSupportedFormatsES2); |
| ADD_VALUES_TO_SET(supported_tex_image_source_internal_formats_, |
| kSupportedFormatsES2); |
| ADD_VALUES_TO_SET(supported_internal_formats_copy_tex_image_, |
| kSupportedFormatsES2); |
| ADD_VALUES_TO_SET(supported_formats_, kSupportedFormatsES2); |
| ADD_VALUES_TO_SET(supported_tex_image_source_formats_, kSupportedFormatsES2); |
| ADD_VALUES_TO_SET(supported_types_, kSupportedTypesES2); |
| ADD_VALUES_TO_SET(supported_tex_image_source_types_, kSupportedTypesES2); |
| } |
| |
| scoped_refptr<DrawingBuffer> WebGLRenderingContextBase::CreateDrawingBuffer( |
| std::unique_ptr<WebGraphicsContext3DProvider> context_provider, |
| bool using_gpu_compositing) { |
| bool premultiplied_alpha = CreationAttributes().premultiplied_alpha; |
| bool want_alpha_channel = CreationAttributes().alpha; |
| bool want_depth_buffer = CreationAttributes().depth; |
| bool want_stencil_buffer = CreationAttributes().stencil; |
| bool want_antialiasing = CreationAttributes().antialias; |
| DrawingBuffer::PreserveDrawingBuffer preserve = |
| CreationAttributes().preserve_drawing_buffer ? DrawingBuffer::kPreserve |
| : DrawingBuffer::kDiscard; |
| DrawingBuffer::WebGLVersion web_gl_version = DrawingBuffer::kWebGL1; |
| if (context_type_ == Platform::kWebGL1ContextType) { |
| web_gl_version = DrawingBuffer::kWebGL1; |
| } else if (context_type_ == Platform::kWebGL2ContextType) { |
| web_gl_version = DrawingBuffer::kWebGL2; |
| } else if (context_type_ == Platform::kWebGL2ComputeContextType) { |
| web_gl_version = DrawingBuffer::kWebGL2Compute; |
| } else { |
| NOTREACHED(); |
| } |
| |
| // On Mac OS, DrawingBuffer is using an IOSurface as its backing storage, this |
| // allows WebGL-rendered canvases to be composited by the OS rather than |
| // Chrome. |
| // IOSurfaces are only compatible with the GL_TEXTURE_RECTANGLE_ARB binding |
| // target. So to avoid the knowledge of GL_TEXTURE_RECTANGLE_ARB type textures |
| // being introduced into more areas of the code, we use the code path of |
| // non-WebGLImageChromium for OffscreenCanvas. |
| // See detailed discussion in crbug.com/649668. |
| DrawingBuffer::ChromiumImageUsage chromium_image_usage = |
| Host()->IsOffscreenCanvas() ? DrawingBuffer::kDisallowChromiumImage |
| : DrawingBuffer::kAllowChromiumImage; |
| |
| return DrawingBuffer::Create( |
| std::move(context_provider), using_gpu_compositing, this, |
| ClampedCanvasSize(), premultiplied_alpha, want_alpha_channel, |
| want_depth_buffer, want_stencil_buffer, want_antialiasing, preserve, |
| web_gl_version, chromium_image_usage, ColorParams()); |
| } |
| |
| void WebGLRenderingContextBase::InitializeNewContext() { |
| DCHECK(!isContextLost()); |
| DCHECK(GetDrawingBuffer()); |
| |
| // TODO(http://crbug.com/876140) Does compatible_xr_device needs to be taken |
| // into account here? |
| |
| marked_canvas_dirty_ = false; |
| must_paint_to_canvas_ = false; |
| active_texture_unit_ = 0; |
| pack_alignment_ = 4; |
| unpack_alignment_ = 4; |
| unpack_flip_y_ = false; |
| unpack_premultiply_alpha_ = false; |
| unpack_colorspace_conversion_ = GC3D_BROWSER_DEFAULT_WEBGL; |
| bound_array_buffer_ = nullptr; |
| current_program_ = nullptr; |
| framebuffer_binding_ = nullptr; |
| renderbuffer_binding_ = nullptr; |
| depth_mask_ = true; |
| stencil_enabled_ = false; |
| stencil_mask_ = 0xFFFFFFFF; |
| stencil_mask_back_ = 0xFFFFFFFF; |
| stencil_func_ref_ = 0; |
| stencil_func_ref_back_ = 0; |
| stencil_func_mask_ = 0xFFFFFFFF; |
| stencil_func_mask_back_ = 0xFFFFFFFF; |
| num_gl_errors_to_console_allowed_ = kMaxGLErrorsAllowedToConsole; |
| |
| clear_color_[0] = clear_color_[1] = clear_color_[2] = clear_color_[3] = 0; |
| scissor_enabled_ = false; |
| clear_depth_ = 1; |
| clear_stencil_ = 0; |
| color_mask_[0] = color_mask_[1] = color_mask_[2] = color_mask_[3] = true; |
| |
| GLint num_combined_texture_image_units = 0; |
| ContextGL()->GetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, |
| &num_combined_texture_image_units); |
| texture_units_.clear(); |
| texture_units_.resize(num_combined_texture_image_units); |
| |
| GLint num_vertex_attribs = 0; |
| ContextGL()->GetIntegerv(GL_MAX_VERTEX_ATTRIBS, &num_vertex_attribs); |
| max_vertex_attribs_ = num_vertex_attribs; |
| |
| max_texture_size_ = 0; |
| ContextGL()->GetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size_); |
| max_texture_level_ = |
| WebGLTexture::ComputeLevelCount(max_texture_size_, max_texture_size_, 1); |
| max_cube_map_texture_size_ = 0; |
| ContextGL()->GetIntegerv(GL_MAX_CUBE_MAP_TEXTURE_SIZE, |
| &max_cube_map_texture_size_); |
| max3d_texture_size_ = 0; |
| max3d_texture_level_ = 0; |
| max_array_texture_layers_ = 0; |
| if (IsWebGL2OrHigher()) { |
| ContextGL()->GetIntegerv(GL_MAX_3D_TEXTURE_SIZE, &max3d_texture_size_); |
| max3d_texture_level_ = WebGLTexture::ComputeLevelCount( |
| max3d_texture_size_, max3d_texture_size_, max3d_texture_size_); |
| ContextGL()->GetIntegerv(GL_MAX_ARRAY_TEXTURE_LAYERS, |
| &max_array_texture_layers_); |
| } |
| max_cube_map_texture_level_ = WebGLTexture::ComputeLevelCount( |
| max_cube_map_texture_size_, max_cube_map_texture_size_, 1); |
| max_renderbuffer_size_ = 0; |
| ContextGL()->GetIntegerv(GL_MAX_RENDERBUFFER_SIZE, &max_renderbuffer_size_); |
| |
| // These two values from EXT_draw_buffers are lazily queried. |
| max_draw_buffers_ = 0; |
| max_color_attachments_ = 0; |
| |
| back_draw_buffer_ = GL_BACK; |
| |
| read_buffer_of_default_framebuffer_ = GL_BACK; |
| |
| default_vertex_array_object_ = WebGLVertexArrayObject::Create( |
| this, WebGLVertexArrayObjectBase::kVaoTypeDefault); |
| |
| bound_vertex_array_object_ = default_vertex_array_object_; |
| |
| vertex_attrib_type_.resize(max_vertex_attribs_); |
| |
| ContextGL()->Viewport(0, 0, drawingBufferWidth(), drawingBufferHeight()); |
| scissor_box_[0] = scissor_box_[1] = 0; |
| scissor_box_[2] = drawingBufferWidth(); |
| scissor_box_[3] = drawingBufferHeight(); |
| ContextGL()->Scissor(scissor_box_[0], scissor_box_[1], scissor_box_[2], |
| scissor_box_[3]); |
| |
| GetDrawingBuffer()->ContextProvider()->SetLostContextCallback( |
| WTF::BindRepeating(&WebGLRenderingContextBase::ForceLostContext, |
| WrapWeakPersistent(this), |
| WebGLRenderingContextBase::kRealLostContext, |
| WebGLRenderingContextBase::kAuto)); |
| GetDrawingBuffer()->ContextProvider()->SetErrorMessageCallback( |
| WTF::BindRepeating(&WebGLRenderingContextBase::OnErrorMessage, |
| WrapWeakPersistent(this))); |
| |
| // If the context has the flip_y extension, it will behave as having the |
| // origin of coordinates on the top left. |
| is_origin_top_left_ = GetDrawingBuffer() |
| ->ContextProvider() |
| ->GetCapabilities() |
| .mesa_framebuffer_flip_y; |
| |
| // If WebGL 2, the PRIMITIVE_RESTART_FIXED_INDEX should be always enabled. |
| // See the section <Primitive Restart is Always Enabled> in WebGL 2 spec: |
| // https://www.khronos.org/registry/webgl/specs/latest/2.0/#4.1.4 |
| if (IsWebGL2OrHigher()) |
| ContextGL()->Enable(GL_PRIMITIVE_RESTART_FIXED_INDEX); |
| |
| // This ensures that the context has a valid "lastFlushID" and won't be |
| // mistakenly identified as the "least recently used" context. |
| ContextGL()->Flush(); |
| |
| for (int i = 0; i < kWebGLExtensionNameCount; ++i) |
| extension_enabled_[i] = false; |
| |
| is_web_gl2_formats_types_added_ = false; |
| is_web_gl2_tex_image_source_formats_types_added_ = false; |
| is_web_gl2_internal_formats_copy_tex_image_added_ = false; |
| is_oes_texture_float_formats_types_added_ = false; |
| is_oes_texture_half_float_formats_types_added_ = false; |
| is_web_gl_depth_texture_formats_types_added_ = false; |
| is_ext_srgb_formats_types_added_ = false; |
| is_ext_color_buffer_float_formats_added_ = false; |
| |
| supported_internal_formats_.clear(); |
| ADD_VALUES_TO_SET(supported_internal_formats_, kSupportedFormatsES2); |
| supported_tex_image_source_internal_formats_.clear(); |
| ADD_VALUES_TO_SET(supported_tex_image_source_internal_formats_, |
| kSupportedFormatsES2); |
| supported_internal_formats_copy_tex_image_.clear(); |
| ADD_VALUES_TO_SET(supported_internal_formats_copy_tex_image_, |
| kSupportedFormatsES2); |
| supported_formats_.clear(); |
| ADD_VALUES_TO_SET(supported_formats_, kSupportedFormatsES2); |
| supported_tex_image_source_formats_.clear(); |
| ADD_VALUES_TO_SET(supported_tex_image_source_formats_, kSupportedFormatsES2); |
| supported_types_.clear(); |
| ADD_VALUES_TO_SET(supported_types_, kSupportedTypesES2); |
| supported_tex_image_source_types_.clear(); |
| ADD_VALUES_TO_SET(supported_tex_image_source_types_, kSupportedTypesES2); |
| |
| // The DrawingBuffer was unable to store the state that dirtied when it was |
| // initialized. Restore it now. |
| GetDrawingBuffer()->RestoreAllState(); |
| ActivateContext(this); |
| } |
| |
| void WebGLRenderingContextBase::SetupFlags() { |
| DCHECK(GetDrawingBuffer()); |
| if (canvas()) { |
| if (Page* p = canvas()->GetDocument().GetPage()) { |
| synthesized_errors_to_console_ = |
| p->GetSettings().GetWebGLErrorsToConsoleEnabled(); |
| } |
| } |
| |
| is_depth_stencil_supported_ = |
| ExtensionsUtil()->IsExtensionEnabled("GL_OES_packed_depth_stencil"); |
| } |
| |
| void WebGLRenderingContextBase::AddCompressedTextureFormat(GLenum format) { |
| if (!compressed_texture_formats_.Contains(format)) |
| compressed_texture_formats_.push_back(format); |
| } |
| |
| void WebGLRenderingContextBase::RemoveAllCompressedTextureFormats() { |
| compressed_texture_formats_.clear(); |
| } |
| |
| // Helper function for V8 bindings to identify what version of WebGL a |
| // CanvasRenderingContext supports. |
| unsigned WebGLRenderingContextBase::GetWebGLVersion( |
| const CanvasRenderingContext* context) { |
| if (!context->Is3d()) |
| return 0; |
| return static_cast<const WebGLRenderingContextBase*>(context)->ContextType(); |
| } |
| |
| WebGLRenderingContextBase::~WebGLRenderingContextBase() { |
| // It's forbidden to refer to other GC'd objects in a GC'd object's |
| // destructor. It's useful for DrawingBuffer to guarantee that it |
| // calls its DrawingBufferClient during its own destruction, but if |
| // the WebGL context is also being destroyed, then it's essential |
| // that the DrawingBufferClient methods not try to touch other |
| // objects like WebGLTextures that were previously hooked into the |
| // context state. |
| destruction_in_progress_ = true; |
| |
| // Now that the context and context group no longer hold on to the |
| // objects they create, and now that the objects are eagerly finalized |
| // rather than the context, there is very little useful work that this |
| // destructor can do, since it's not allowed to touch other on-heap |
| // objects. All it can do is destroy its underlying context, which, if |
| // there are no other contexts in the same share group, will cause all of |
| // the underlying graphics resources to be deleted. (Currently, it's |
| // always the case that there are no other contexts in the same share |
| // group -- resource sharing between WebGL contexts is not yet |
| // implemented, and due to its complex semantics, it's doubtful that it |
| // ever will be.) |
| DestroyContext(); |
| |
| // Now that this context is destroyed, see if there's a |
| // previously-evicted one that should be restored. |
| RestoreEvictedContext(this); |
| } |
| |
| void WebGLRenderingContextBase::DestroyContext() { |
| if (!GetDrawingBuffer()) |
| return; |
| |
| extensions_util_.reset(); |
| |
| base::RepeatingClosure null_closure; |
| base::RepeatingCallback<void(const char*, int32_t)> null_function; |
| GetDrawingBuffer()->ContextProvider()->SetLostContextCallback( |
| std::move(null_closure)); |
| GetDrawingBuffer()->ContextProvider()->SetErrorMessageCallback( |
| std::move(null_function)); |
| |
| DCHECK(GetDrawingBuffer()); |
| drawing_buffer_->BeginDestruction(); |
| drawing_buffer_ = nullptr; |
| } |
| |
| void WebGLRenderingContextBase::MarkContextChanged( |
| ContentChangeType change_type) { |
| if (isContextLost()) |
| return; |
| |
| if (framebuffer_binding_) { |
| framebuffer_binding_->SetContentsChanged(true); |
| return; |
| } |
| |
| // Regardless of whether dirty propagations are optimized away, the back |
| // buffer is now out of sync with respect to the canvas's internal backing |
| // store -- which is only used for certain purposes, like printing. |
| must_paint_to_canvas_ = true; |
| |
| if (!GetDrawingBuffer()->MarkContentsChanged() && marked_canvas_dirty_) { |
| return; |
| } |
| |
| if (Host()->IsOffscreenCanvas()) { |
| marked_canvas_dirty_ = true; |
| DidDraw(); |
| return; |
| } |
| |
| if (!canvas()) |
| return; |
| |
| if (!marked_canvas_dirty_) { |
| marked_canvas_dirty_ = true; |
| LayoutBox* layout_box = canvas()->GetLayoutBox(); |
| if (layout_box && layout_box->HasAcceleratedCompositing()) { |
| layout_box->ContentChanged(change_type); |
| } |
| IntSize canvas_size = ClampedCanvasSize(); |
| DidDraw(SkIRect::MakeXYWH(0, 0, canvas_size.Width(), canvas_size.Height())); |
| } |
| } |
| |
| void WebGLRenderingContextBase::DidDraw(const SkIRect& dirty_rect) { |
| MarkContextChanged(kCanvasChanged); |
| CanvasRenderingContext::DidDraw(dirty_rect); |
| } |
| |
| void WebGLRenderingContextBase::DidDraw() { |
| MarkContextChanged(kCanvasChanged); |
| CanvasRenderingContext::DidDraw(); |
| } |
| |
| void WebGLRenderingContextBase::PushFrame() { |
| int width = GetDrawingBuffer()->Size().Width(); |
| int height = GetDrawingBuffer()->Size().Height(); |
| if (PaintRenderingResultsToCanvas(kBackBuffer)) { |
| if (Host()->GetOrCreateCanvasResourceProvider(kPreferAcceleration)) { |
| Host()->PushFrame(Host()->ResourceProvider()->ProduceFrame(), |
| SkIRect::MakeWH(width, height)); |
| } |
| } |
| MarkLayerComposited(); |
| } |
| |
| void WebGLRenderingContextBase::FinalizeFrame() { |
| marked_canvas_dirty_ = false; |
| } |
| |
| void WebGLRenderingContextBase::OnErrorMessage(const char* message, |
| int32_t id) { |
| if (synthesized_errors_to_console_) |
| PrintGLErrorToConsole(message); |
| probe::didFireWebGLErrorOrWarning(canvas(), message); |
| } |
| |
| WebGLRenderingContextBase::HowToClear |
| WebGLRenderingContextBase::ClearIfComposited(GLbitfield mask) { |
| if (isContextLost()) |
| return kSkipped; |
| |
| GLbitfield buffers_needing_clearing = |
| GetDrawingBuffer()->GetBuffersToAutoClear(); |
| |
| if (buffers_needing_clearing == 0 || (mask && framebuffer_binding_)) |
| return kSkipped; |
| |
| WebGLContextAttributes* context_attributes = getContextAttributes(); |
| if (!context_attributes) { |
| // Unlikely, but context was lost. |
| return kSkipped; |
| } |
| |
| // Determine if it's possible to combine the clear the user asked for and this |
| // clear. |
| bool combined_clear = mask && !scissor_enabled_; |
| |
| ContextGL()->Disable(GL_SCISSOR_TEST); |
| if (combined_clear && (mask & GL_COLOR_BUFFER_BIT)) { |
| ContextGL()->ClearColor(color_mask_[0] ? clear_color_[0] : 0, |
| color_mask_[1] ? clear_color_[1] : 0, |
| color_mask_[2] ? clear_color_[2] : 0, |
| color_mask_[3] ? clear_color_[3] : 0); |
| } else { |
| ContextGL()->ClearColor(0, 0, 0, 0); |
| } |
| ContextGL()->ColorMask( |
| true, true, true, |
| !GetDrawingBuffer()->RequiresAlphaChannelToBePreserved()); |
| GLbitfield clear_mask = GL_COLOR_BUFFER_BIT; |
| if (context_attributes->depth()) { |
| if (!combined_clear || !depth_mask_ || !(mask & GL_DEPTH_BUFFER_BIT)) |
| ContextGL()->ClearDepthf(1.0f); |
| clear_mask |= GL_DEPTH_BUFFER_BIT; |
| ContextGL()->DepthMask(true); |
| } |
| if (context_attributes->stencil() || |
| GetDrawingBuffer()->HasImplicitStencilBuffer()) { |
| if (combined_clear && (mask & GL_STENCIL_BUFFER_BIT)) |
| ContextGL()->ClearStencil(clear_stencil_ & stencil_mask_); |
| else |
| ContextGL()->ClearStencil(0); |
| clear_mask |= GL_STENCIL_BUFFER_BIT; |
| ContextGL()->StencilMaskSeparate(GL_FRONT, 0xFFFFFFFF); |
| } |
| |
| ContextGL()->ColorMask( |
| true, true, true, |
| !GetDrawingBuffer()->DefaultBufferRequiresAlphaChannelToBePreserved()); |
| // If the WebGL 2.0 clearBuffer APIs already have been used to |
| // selectively clear some of the buffers, don't destroy those |
| // results. |
| GetDrawingBuffer()->ClearFramebuffers(clear_mask & buffers_needing_clearing); |
| |
| // Call the DrawingBufferClient method to restore scissor test, mask, and |
| // clear values, because we dirtied them above. |
| DrawingBufferClientRestoreScissorTest(); |
| DrawingBufferClientRestoreMaskAndClearValues(); |
| |
| GetDrawingBuffer()->SetBuffersToAutoClear(0); |
| |
| return combined_clear ? kCombinedClear : kJustClear; |
| } |
| |
| void WebGLRenderingContextBase::MarkCompositedAndClearBackbufferIfNeeded() { |
| MarkLayerComposited(); |
| ClearIfComposited(); |
| } |
| |
| void WebGLRenderingContextBase::RestoreScissorEnabled() { |
| if (isContextLost()) |
| return; |
| |
| if (scissor_enabled_) { |
| ContextGL()->Enable(GL_SCISSOR_TEST); |
| } else { |
| ContextGL()->Disable(GL_SCISSOR_TEST); |
| } |
| } |
| |
| void WebGLRenderingContextBase::RestoreScissorBox() { |
| if (isContextLost()) |
| return; |
| |
| ContextGL()->Scissor(scissor_box_[0], scissor_box_[1], scissor_box_[2], |
| scissor_box_[3]); |
| } |
| |
| void WebGLRenderingContextBase::RestoreClearColor() { |
| if (isContextLost()) |
| return; |
| |
| ContextGL()->ClearColor(clear_color_[0], clear_color_[1], clear_color_[2], |
| clear_color_[3]); |
| } |
| |
| void WebGLRenderingContextBase::RestoreColorMask() { |
| if (isContextLost()) |
| return; |
| |
| ContextGL()->ColorMask(color_mask_[0], color_mask_[1], color_mask_[2], |
| color_mask_[3]); |
| } |
| |
| void WebGLRenderingContextBase::MarkLayerComposited() { |
| if (!isContextLost()) |
| GetDrawingBuffer()->ResetBuffersToAutoClear(); |
| } |
| |
| bool WebGLRenderingContextBase::IsOriginTopLeft() const { |
| if (isContextLost()) |
| return false; |
| return is_origin_top_left_; |
| } |
| |
| void WebGLRenderingContextBase::SetIsHidden(bool hidden) { |
| is_hidden_ = hidden; |
| if (GetDrawingBuffer()) |
| GetDrawingBuffer()->SetIsHidden(hidden); |
| |
| if (!hidden && isContextLost() && restore_allowed_ && |
| auto_recovery_method_ == kAuto) { |
| DCHECK(!restore_timer_.IsActive()); |
| restore_timer_.StartOneShot(TimeDelta(), FROM_HERE); |
| } |
| } |
| |
| bool WebGLRenderingContextBase::PaintRenderingResultsToCanvas( |
| SourceDrawingBuffer source_buffer) { |
| if (isContextLost()) |
| return false; |
| |
| bool must_clear_now = ClearIfComposited() != kSkipped; |
| if (!must_paint_to_canvas_ && !must_clear_now) |
| return false; |
| |
| must_paint_to_canvas_ = false; |
| |
| if (Host()->ResourceProvider() && |
| Host()->ResourceProvider()->Size() != GetDrawingBuffer()->Size()) { |
| Host()->DiscardResourceProvider(); |
| } |
| |
| if (!Host()->GetOrCreateCanvasResourceProvider(kPreferAcceleration)) |
| return false; |
| |
| ScopedTexture2DRestorer restorer(this); |
| ScopedFramebufferRestorer fbo_restorer(this); |
| |
| GetDrawingBuffer()->ResolveAndBindForReadAndDraw(); |
| if (!CopyRenderingResultsFromDrawingBuffer(Host()->ResourceProvider(), |
| source_buffer)) { |
| // Currently, CopyRenderingResultsFromDrawingBuffer is expected to always |
| // succeed because cases where canvas()-buffer() is not accelerated are |
| // handled before reaching this point. If that assumption ever stops |
| // holding true, we may need to implement a fallback right here. |
| NOTREACHED(); |
| return false; |
| } |
| return true; |
| } |
| |
| void WebGLRenderingContextBase::ProvideBackBufferToResourceProvider() const { |
| if (isContextLost()) |
| return; |
| |
| DCHECK(Host()->ResourceProvider()); |
| if (Host()->ResourceProvider()->Size() != GetDrawingBuffer()->Size()) |
| Host()->DiscardResourceProvider(); |
| |
| CanvasResourceProvider* resource_provider = |
| Host()->GetOrCreateCanvasResourceProvider(kPreferAcceleration); |
| if (!resource_provider || !resource_provider->IsAccelerated()) |
| return; |
| |
| resource_provider->ImportResource( |
| GetDrawingBuffer()->AsCanvasResource(resource_provider->CreateWeakPtr())); |
| } |
| |
| bool WebGLRenderingContextBase::ContextCreatedOnXRCompatibleAdapter() { |
| // TODO(http://crbug.com/876140) Determine if device is compatible with |
| // current context. |
| return true; |
| } |
| |
| bool WebGLRenderingContextBase::CopyRenderingResultsFromDrawingBuffer( |
| CanvasResourceProvider* resource_provider, |
| SourceDrawingBuffer source_buffer) const { |
| if (!drawing_buffer_) |
| return false; |
| if (resource_provider->IsAccelerated()) { |
| base::WeakPtr<WebGraphicsContext3DProviderWrapper> shared_context_wrapper = |
| SharedGpuContext::ContextProviderWrapper(); |
| if (!shared_context_wrapper) |
| return false; |
| gpu::gles2::GLES2Interface* gl = |
| shared_context_wrapper->ContextProvider()->ContextGL(); |
| GLuint texture_id = |
| resource_provider->GetBackingTextureHandleForOverwrite(); |
| if (!texture_id) |
| return false; |
| |
| // TODO(xlai): Flush should not be necessary if the synchronization in |
| // CopyToPlatformTexture is done correctly. See crbug.com/794706. |
| gl->Flush(); |
| |
| bool flip_y = is_origin_top_left_ && !canvas()->LowLatencyEnabled(); |
| return drawing_buffer_->CopyToPlatformTexture( |
| gl, GL_TEXTURE_2D, texture_id, true, flip_y, |
| IntPoint(0, 0), IntRect(IntPoint(0, 0), drawing_buffer_->Size()), |
| source_buffer); |
| } |
| |
| // Note: This code path could work for all cases. The only reason there |
| // is a separate path for the accelerated case is that we assume texture |
| // copying is faster than drawImage. |
| scoped_refptr<StaticBitmapImage> image = GetImage(kPreferAcceleration); |
| if (!image) |
| return false; |
| cc::PaintFlags paint_flags; |
| paint_flags.setBlendMode(SkBlendMode::kSrc); |
| resource_provider->Canvas()->drawImage(image->PaintImageForCurrentFrame(), 0, |
| 0, &paint_flags); |
| return true; |
| } |
| |
| IntSize WebGLRenderingContextBase::DrawingBufferSize() const { |
| if (isContextLost()) |
| return IntSize(0, 0); |
| return GetDrawingBuffer()->Size(); |
| } |
| |
| scoped_refptr<Uint8Array> |
| WebGLRenderingContextBase::PaintRenderingResultsToDataArray( |
| SourceDrawingBuffer source_buffer) { |
| if (isContextLost()) |
| return nullptr; |
| ClearIfComposited(); |
| GetDrawingBuffer()->ResolveAndBindForReadAndDraw(); |
| ScopedFramebufferRestorer restorer(this); |
| return GetDrawingBuffer()->PaintRenderingResultsToDataArray(source_buffer); |
| } |
| |
| void WebGLRenderingContextBase::Reshape(int width, int height) { |
| if (isContextLost()) |
| return; |
| |
| GLint buffer = 0; |
| if (IsWebGL2OrHigher()) { |
| // This query returns client side cached binding, so it's trivial. |
| // If it changes in the future, such query is heavy and should be avoided. |
| ContextGL()->GetIntegerv(GL_PIXEL_UNPACK_BUFFER_BINDING, &buffer); |
| if (buffer) { |
| ContextGL()->BindBuffer(GL_PIXEL_UNPACK_BUFFER, 0); |
| } |
| } |
| |
| // This is an approximation because at WebGLRenderingContextBase level we |
| // don't know if the underlying FBO uses textures or renderbuffers. |
| GLint max_size = std::min(max_texture_size_, max_renderbuffer_size_); |
| GLint max_width = std::min(max_size, max_viewport_dims_[0]); |
| GLint max_height = std::min(max_size, max_viewport_dims_[1]); |
| width = Clamp(width, 1, max_width); |
| height = Clamp(height, 1, max_height); |
| |
| // Limit drawing buffer area to 4k*4k to avoid memory exhaustion. Width or |
| // height may be larger than 4k as long as it's within the max viewport |
| // dimensions and total area remains within the limit. |
| // For example: 5120x2880 should be fine. |
| const int kMaxArea = 4096 * 4096; |
| int current_area = width * height; |
| if (current_area > kMaxArea) { |
| // If we've exceeded the area limit scale the buffer down, preserving |
| // ascpect ratio, until it fits. |
| float scale_factor = |
| sqrtf(static_cast<float>(kMaxArea) / static_cast<float>(current_area)); |
| width = std::max(1, static_cast<int>(width * scale_factor)); |
| height = std::max(1, static_cast<int>(height * scale_factor)); |
| } |
| |
| // We don't have to mark the canvas as dirty, since the newly created image |
| // buffer will also start off clear (and this matches what reshape will do). |
| GetDrawingBuffer()->Resize(IntSize(width, height)); |
| |
| if (buffer) { |
| ContextGL()->BindBuffer(GL_PIXEL_UNPACK_BUFFER, |
| static_cast<GLuint>(buffer)); |
| } |
| } |
| |
| int WebGLRenderingContextBase::drawingBufferWidth() const { |
| return isContextLost() ? 0 : GetDrawingBuffer()->Size().Width(); |
| } |
| |
| int WebGLRenderingContextBase::drawingBufferHeight() const { |
| return isContextLost() ? 0 : GetDrawingBuffer()->Size().Height(); |
| } |
| |
| void WebGLRenderingContextBase::activeTexture(GLenum texture) { |
| if (isContextLost()) |
| return; |
| if (texture - GL_TEXTURE0 >= texture_units_.size()) { |
| SynthesizeGLError(GL_INVALID_ENUM, "activeTexture", |
| "texture unit out of range"); |
| return; |
| } |
| active_texture_unit_ = texture - GL_TEXTURE0; |
| ContextGL()->ActiveTexture(texture); |
| } |
| |
| void WebGLRenderingContextBase::attachShader(WebGLProgram* program, |
| WebGLShader* shader) { |
| if (!ValidateWebGLProgramOrShader("attachShader", program) || |
| !ValidateWebGLProgramOrShader("attachShader", shader)) |
| return; |
| if (!program->AttachShader(shader)) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "attachShader", |
| "shader attachment already has shader"); |
| return; |
| } |
| ContextGL()->AttachShader(ObjectOrZero(program), ObjectOrZero(shader)); |
| shader->OnAttached(); |
| } |
| |
| void WebGLRenderingContextBase::bindAttribLocation(WebGLProgram* program, |
| GLuint index, |
| const String& name) { |
| if (!ValidateWebGLObject("bindAttribLocation", program)) |
| return; |
| if (!ValidateLocationLength("bindAttribLocation", name)) |
| return; |
| if (IsPrefixReserved(name)) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "bindAttribLocation", |
| "reserved prefix"); |
| return; |
| } |
| ContextGL()->BindAttribLocation(ObjectOrZero(program), index, |
| name.Utf8().data()); |
| } |
| |
| bool WebGLRenderingContextBase::ValidateAndUpdateBufferBindTarget( |
| const char* function_name, |
| GLenum target, |
| WebGLBuffer* buffer) { |
| if (!ValidateBufferTarget(function_name, target)) |
| return false; |
| |
| if (buffer && buffer->GetInitialTarget() && |
| buffer->GetInitialTarget() != target) { |
| SynthesizeGLError(GL_INVALID_OPERATION, function_name, |
| "buffers can not be used with multiple targets"); |
| return false; |
| } |
| |
| switch (target) { |
| case GL_ARRAY_BUFFER: |
| bound_array_buffer_ = buffer; |
| break; |
| case GL_ELEMENT_ARRAY_BUFFER: |
| bound_vertex_array_object_->SetElementArrayBuffer(buffer); |
| break; |
| default: |
| NOTREACHED(); |
| return false; |
| } |
| |
| if (buffer && !buffer->GetInitialTarget()) |
| buffer->SetInitialTarget(target); |
| return true; |
| } |
| |
| void WebGLRenderingContextBase::bindBuffer(GLenum target, WebGLBuffer* buffer) { |
| if (!ValidateNullableWebGLObject("bindBuffer", buffer)) |
| return; |
| if (!ValidateAndUpdateBufferBindTarget("bindBuffer", target, buffer)) |
| return; |
| ContextGL()->BindBuffer(target, ObjectOrZero(buffer)); |
| } |
| |
| void WebGLRenderingContextBase::bindFramebuffer(GLenum target, |
| WebGLFramebuffer* buffer) { |
| if (!ValidateNullableWebGLObject("bindFramebuffer", buffer)) |
| return; |
| |
| if (target != GL_FRAMEBUFFER) { |
| SynthesizeGLError(GL_INVALID_ENUM, "bindFramebuffer", "invalid target"); |
| return; |
| } |
| |
| SetFramebuffer(target, buffer); |
| } |
| |
| void WebGLRenderingContextBase::bindRenderbuffer( |
| GLenum target, |
| WebGLRenderbuffer* render_buffer) { |
| if (!ValidateNullableWebGLObject("bindRenderbuffer", render_buffer)) |
| return; |
| if (target != GL_RENDERBUFFER) { |
| SynthesizeGLError(GL_INVALID_ENUM, "bindRenderbuffer", "invalid target"); |
| return; |
| } |
| renderbuffer_binding_ = render_buffer; |
| ContextGL()->BindRenderbuffer(target, ObjectOrZero(render_buffer)); |
| if (render_buffer) |
| render_buffer->SetHasEverBeenBound(); |
| } |
| |
| void WebGLRenderingContextBase::bindTexture(GLenum target, |
| WebGLTexture* texture) { |
| if (!ValidateNullableWebGLObject("bindTexture", texture)) |
| return; |
| if (texture && texture->GetTarget() && texture->GetTarget() != target) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "bindTexture", |
| "textures can not be used with multiple targets"); |
| return; |
| } |
| |
| if (target == GL_TEXTURE_2D) { |
| texture_units_[active_texture_unit_].texture2d_binding_ = texture; |
| } else if (target == GL_TEXTURE_CUBE_MAP) { |
| texture_units_[active_texture_unit_].texture_cube_map_binding_ = texture; |
| } else if (IsWebGL2OrHigher() && target == GL_TEXTURE_2D_ARRAY) { |
| texture_units_[active_texture_unit_].texture2d_array_binding_ = texture; |
| } else if (IsWebGL2OrHigher() && target == GL_TEXTURE_3D) { |
| texture_units_[active_texture_unit_].texture3d_binding_ = texture; |
| } else { |
| SynthesizeGLError(GL_INVALID_ENUM, "bindTexture", "invalid target"); |
| return; |
| } |
| |
| ContextGL()->BindTexture(target, ObjectOrZero(texture)); |
| if (texture) { |
| texture->SetTarget(target); |
| one_plus_max_non_default_texture_unit_ = |
| max(active_texture_unit_ + 1, one_plus_max_non_default_texture_unit_); |
| } else { |
| // If the disabled index is the current maximum, trace backwards to find the |
| // new max enabled texture index |
| if (one_plus_max_non_default_texture_unit_ == active_texture_unit_ + 1) { |
| FindNewMaxNonDefaultTextureUnit(); |
| } |
| } |
| |
| // Note: previously we used to automatically set the TEXTURE_WRAP_R |
| // repeat mode to CLAMP_TO_EDGE for cube map textures, because OpenGL |
| // ES 2.0 doesn't expose this flag (a bug in the specification) and |
| // otherwise the application has no control over the seams in this |
| // dimension. However, it appears that supporting this properly on all |
| // platforms is fairly involved (will require a HashMap from texture ID |
| // in all ports), and we have not had any complaints, so the logic has |
| // been removed. |
| } |
| |
| void WebGLRenderingContextBase::blendColor(GLfloat red, |
| GLfloat green, |
| GLfloat blue, |
| GLfloat alpha) { |
| if (isContextLost()) |
| return; |
| ContextGL()->BlendColor(red, green, blue, alpha); |
| } |
| |
| void WebGLRenderingContextBase::blendEquation(GLenum mode) { |
| if (isContextLost() || !ValidateBlendEquation("blendEquation", mode)) |
| return; |
| ContextGL()->BlendEquation(mode); |
| } |
| |
| void WebGLRenderingContextBase::blendEquationSeparate(GLenum mode_rgb, |
| GLenum mode_alpha) { |
| if (isContextLost() || |
| !ValidateBlendEquation("blendEquationSeparate", mode_rgb) || |
| !ValidateBlendEquation("blendEquationSeparate", mode_alpha)) |
| return; |
| ContextGL()->BlendEquationSeparate(mode_rgb, mode_alpha); |
| } |
| |
| void WebGLRenderingContextBase::blendFunc(GLenum sfactor, GLenum dfactor) { |
| if (isContextLost() || |
| !ValidateBlendFuncFactors("blendFunc", sfactor, dfactor)) |
| return; |
| ContextGL()->BlendFunc(sfactor, dfactor); |
| } |
| |
| void WebGLRenderingContextBase::blendFuncSeparate(GLenum src_rgb, |
| GLenum dst_rgb, |
| GLenum src_alpha, |
| GLenum dst_alpha) { |
| // Note: Alpha does not have the same restrictions as RGB. |
| if (isContextLost() || |
| !ValidateBlendFuncFactors("blendFuncSeparate", src_rgb, dst_rgb)) |
| return; |
| ContextGL()->BlendFuncSeparate(src_rgb, dst_rgb, src_alpha, dst_alpha); |
| } |
| |
| void WebGLRenderingContextBase::BufferDataImpl(GLenum target, |
| long long size, |
| const void* data, |
| GLenum usage) { |
| WebGLBuffer* buffer = ValidateBufferDataTarget("bufferData", target); |
| if (!buffer) |
| return; |
| |
| if (!ValidateBufferDataUsage("bufferData", usage)) |
| return; |
| |
| if (!ValidateValueFitNonNegInt32("bufferData", "size", size)) |
| return; |
| |
| buffer->SetSize(size); |
| |
| ContextGL()->BufferData(target, static_cast<GLsizeiptr>(size), data, usage); |
| } |
| |
| void WebGLRenderingContextBase::bufferData(GLenum target, |
| long long size, |
| GLenum usage) { |
| if (isContextLost()) |
| return; |
| BufferDataImpl(target, size, nullptr, usage); |
| } |
| |
| void WebGLRenderingContextBase::bufferData(GLenum target, |
| DOMArrayBuffer* data, |
| GLenum usage) { |
| if (isContextLost()) |
| return; |
| if (!data) { |
| SynthesizeGLError(GL_INVALID_VALUE, "bufferData", "no data"); |
| return; |
| } |
| BufferDataImpl(target, data->ByteLength(), data->Data(), usage); |
| } |
| |
| void WebGLRenderingContextBase::bufferData(GLenum target, |
| MaybeShared<DOMArrayBufferView> data, |
| GLenum usage) { |
| if (isContextLost()) |
| return; |
| DCHECK(data); |
| BufferDataImpl(target, data.View()->byteLength(), |
| data.View()->BaseAddressMaybeShared(), usage); |
| } |
| |
| void WebGLRenderingContextBase::BufferSubDataImpl(GLenum target, |
| long long offset, |
| GLsizeiptr size, |
| const void* data) { |
| WebGLBuffer* buffer = ValidateBufferDataTarget("bufferSubData", target); |
| if (!buffer) |
| return; |
| if (!ValidateValueFitNonNegInt32("bufferSubData", "offset", offset)) |
| return; |
| if (!data) |
| return; |
| if (offset + static_cast<long long>(size) > buffer->GetSize()) { |
| SynthesizeGLError(GL_INVALID_VALUE, "bufferSubData", "buffer overflow"); |
| return; |
| } |
| |
| ContextGL()->BufferSubData(target, static_cast<GLintptr>(offset), size, data); |
| } |
| |
| void WebGLRenderingContextBase::bufferSubData(GLenum target, |
| long long offset, |
| DOMArrayBuffer* data) { |
| if (isContextLost()) |
| return; |
| DCHECK(data); |
| BufferSubDataImpl(target, offset, data->ByteLength(), data->Data()); |
| } |
| |
| void WebGLRenderingContextBase::bufferSubData( |
| GLenum target, |
| long long offset, |
| const FlexibleArrayBufferView& data) { |
| if (isContextLost()) |
| return; |
| DCHECK(data); |
| BufferSubDataImpl(target, offset, data.ByteLength(), |
| data.BaseAddressMaybeOnStack()); |
| } |
| |
| bool WebGLRenderingContextBase::ValidateFramebufferTarget(GLenum target) { |
| if (target == GL_FRAMEBUFFER) |
| return true; |
| return false; |
| } |
| |
| WebGLFramebuffer* WebGLRenderingContextBase::GetFramebufferBinding( |
| GLenum target) { |
| if (target == GL_FRAMEBUFFER) |
| return framebuffer_binding_.Get(); |
| return nullptr; |
| } |
| |
| WebGLFramebuffer* WebGLRenderingContextBase::GetReadFramebufferBinding() { |
| return framebuffer_binding_.Get(); |
| } |
| |
| GLenum WebGLRenderingContextBase::checkFramebufferStatus(GLenum target) { |
| if (isContextLost()) |
| return GL_FRAMEBUFFER_UNSUPPORTED; |
| if (!ValidateFramebufferTarget(target)) { |
| SynthesizeGLError(GL_INVALID_ENUM, "checkFramebufferStatus", |
| "invalid target"); |
| return 0; |
| } |
| WebGLFramebuffer* framebuffer_binding = GetFramebufferBinding(target); |
| if (framebuffer_binding) { |
| const char* reason = "framebuffer incomplete"; |
| GLenum status = framebuffer_binding->CheckDepthStencilStatus(&reason); |
| if (status != GL_FRAMEBUFFER_COMPLETE) { |
| EmitGLWarning("checkFramebufferStatus", reason); |
| return status; |
| } |
| } |
| return ContextGL()->CheckFramebufferStatus(target); |
| } |
| |
| void WebGLRenderingContextBase::clear(GLbitfield mask) { |
| if (isContextLost()) |
| return; |
| if (mask & |
| ~(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT)) { |
| SynthesizeGLError(GL_INVALID_VALUE, "clear", "invalid mask"); |
| return; |
| } |
| const char* reason = "framebuffer incomplete"; |
| if (framebuffer_binding_ && framebuffer_binding_->CheckDepthStencilStatus( |
| &reason) != GL_FRAMEBUFFER_COMPLETE) { |
| SynthesizeGLError(GL_INVALID_FRAMEBUFFER_OPERATION, "clear", reason); |
| return; |
| } |
| |
| if (!mask) { |
| // Use OnErrorMessage because it's both rate-limited and obeys the |
| // webGLErrorsToConsole setting. |
| OnErrorMessage( |
| "Performance warning: clear() called with no buffers in bitmask", 0); |
| // Don't skip the call to ClearIfComposited below; it has side |
| // effects even without the user requesting to clear any buffers. |
| } |
| |
| ScopedRGBEmulationColorMask emulation_color_mask(this, color_mask_, |
| drawing_buffer_.get()); |
| |
| if (ClearIfComposited(mask) != kCombinedClear) { |
| // If clearing the default back buffer's depth buffer, also clear the |
| // stencil buffer, if one was allocated implicitly. This avoids performance |
| // problems on some GPUs. |
| if (!framebuffer_binding_ && |
| GetDrawingBuffer()->HasImplicitStencilBuffer() && |
| (mask & GL_DEPTH_BUFFER_BIT)) { |
| // It shouldn't matter what value it's cleared to, since in other queries |
| // in the API, we claim that the stencil buffer doesn't exist. |
| mask |= GL_STENCIL_BUFFER_BIT; |
| } |
| ContextGL()->Clear(mask); |
| } |
| MarkContextChanged(kCanvasChanged); |
| } |
| |
| void WebGLRenderingContextBase::clearColor(GLfloat r, |
| GLfloat g, |
| GLfloat b, |
| GLfloat a) { |
| if (isContextLost()) |
| return; |
| if (std::isnan(r)) |
| r = 0; |
| if (std::isnan(g)) |
| g = 0; |
| if (std::isnan(b)) |
| b = 0; |
| if (std::isnan(a)) |
| a = 1; |
| clear_color_[0] = r; |
| clear_color_[1] = g; |
| clear_color_[2] = b; |
| clear_color_[3] = a; |
| ContextGL()->ClearColor(r, g, b, a); |
| } |
| |
| void WebGLRenderingContextBase::clearDepth(GLfloat depth) { |
| if (isContextLost()) |
| return; |
| clear_depth_ = depth; |
| ContextGL()->ClearDepthf(depth); |
| } |
| |
| void WebGLRenderingContextBase::clearStencil(GLint s) { |
| if (isContextLost()) |
| return; |
| clear_stencil_ = s; |
| ContextGL()->ClearStencil(s); |
| } |
| |
| void WebGLRenderingContextBase::colorMask(GLboolean red, |
| GLboolean green, |
| GLboolean blue, |
| GLboolean alpha) { |
| if (isContextLost()) |
| return; |
| color_mask_[0] = red; |
| color_mask_[1] = green; |
| color_mask_[2] = blue; |
| color_mask_[3] = alpha; |
| ContextGL()->ColorMask(red, green, blue, alpha); |
| } |
| |
| void WebGLRenderingContextBase::compileShader(WebGLShader* shader) { |
| if (!ValidateWebGLProgramOrShader("compileShader", shader)) |
| return; |
| ContextGL()->CompileShader(ObjectOrZero(shader)); |
| } |
| |
| void WebGLRenderingContextBase::compressedTexImage2D( |
| GLenum target, |
| GLint level, |
| GLenum internalformat, |
| GLsizei width, |
| GLsizei height, |
| GLint border, |
| MaybeShared<DOMArrayBufferView> data) { |
| if (isContextLost()) |
| return; |
| if (!ValidateTexture2DBinding("compressedTexImage2D", target)) |
| return; |
| if (!ValidateCompressedTexFormat("compressedTexImage2D", internalformat)) |
| return; |
| ContextGL()->CompressedTexImage2D(target, level, internalformat, width, |
| height, border, data.View()->byteLength(), |
| data.View()->BaseAddressMaybeShared()); |
| } |
| |
| void WebGLRenderingContextBase::compressedTexSubImage2D( |
| GLenum target, |
| GLint level, |
| GLint xoffset, |
| GLint yoffset, |
| GLsizei width, |
| GLsizei height, |
| GLenum format, |
| MaybeShared<DOMArrayBufferView> data) { |
| if (isContextLost()) |
| return; |
| if (!ValidateTexture2DBinding("compressedTexSubImage2D", target)) |
| return; |
| if (!ValidateCompressedTexFormat("compressedTexSubImage2D", format)) |
| return; |
| ContextGL()->CompressedTexSubImage2D( |
| target, level, xoffset, yoffset, width, height, format, |
| data.View()->byteLength(), data.View()->BaseAddressMaybeShared()); |
| } |
| |
| bool WebGLRenderingContextBase::ValidateSettableTexFormat( |
| const char* function_name, |
| GLenum format) { |
| if (IsWebGL2OrHigher()) |
| return true; |
| |
| if (WebGLImageConversion::GetChannelBitsByFormat(format) & |
| WebGLImageConversion::kChannelDepthStencil) { |
| SynthesizeGLError(GL_INVALID_OPERATION, function_name, |
| "format can not be set, only rendered to"); |
| return false; |
| } |
| return true; |
| } |
| |
| bool WebGLRenderingContextBase::ValidateCopyTexFormat(const char* function_name, |
| GLenum internalformat) { |
| if (!is_web_gl2_internal_formats_copy_tex_image_added_ && |
| IsWebGL2OrHigher()) { |
| ADD_VALUES_TO_SET(supported_internal_formats_copy_tex_image_, |
| kSupportedInternalFormatsES3); |
| is_web_gl2_internal_formats_copy_tex_image_added_ = true; |
| } |
| if (!is_ext_color_buffer_float_formats_added_ && |
| ExtensionEnabled(kEXTColorBufferFloatName)) { |
| ADD_VALUES_TO_SET(supported_internal_formats_copy_tex_image_, |
| kSupportedInternalFormatsCopyTexImageFloatES3); |
| is_ext_color_buffer_float_formats_added_ = true; |
| } |
| |
| if (supported_internal_formats_copy_tex_image_.find(internalformat) == |
| supported_internal_formats_copy_tex_image_.end()) { |
| SynthesizeGLError(GL_INVALID_ENUM, function_name, "invalid internalformat"); |
| return false; |
| } |
| |
| return true; |
| } |
| |
| void WebGLRenderingContextBase::copyTexImage2D(GLenum target, |
| GLint level, |
| GLenum internalformat, |
| GLint x, |
| GLint y, |
| GLsizei width, |
| GLsizei height, |
| GLint border) { |
| if (isContextLost()) |
| return; |
| if (!ValidateTexture2DBinding("copyTexImage2D", target)) |
| return; |
| if (!ValidateCopyTexFormat("copyTexImage2D", internalformat)) |
| return; |
| if (!ValidateSettableTexFormat("copyTexImage2D", internalformat)) |
| return; |
| WebGLFramebuffer* read_framebuffer_binding = nullptr; |
| if (!ValidateReadBufferAndGetInfo("copyTexImage2D", read_framebuffer_binding)) |
| return; |
| ClearIfComposited(); |
| ScopedDrawingBufferBinder binder(GetDrawingBuffer(), |
| read_framebuffer_binding); |
| ContextGL()->CopyTexImage2D(target, level, internalformat, x, y, width, |
| height, border); |
| } |
| |
| void WebGLRenderingContextBase::copyTexSubImage2D(GLenum target, |
| GLint level, |
| GLint xoffset, |
| GLint yoffset, |
| GLint x, |
| GLint y, |
| GLsizei width, |
| GLsizei height) { |
| if (isContextLost()) |
| return; |
| if (!ValidateTexture2DBinding("copyTexSubImage2D", target)) |
| return; |
| WebGLFramebuffer* read_framebuffer_binding = nullptr; |
| if (!ValidateReadBufferAndGetInfo("copyTexSubImage2D", |
| read_framebuffer_binding)) |
| return; |
| ClearIfComposited(); |
| ScopedDrawingBufferBinder binder(GetDrawingBuffer(), |
| read_framebuffer_binding); |
| ContextGL()->CopyTexSubImage2D(target, level, xoffset, yoffset, x, y, width, |
| height); |
| } |
| |
| WebGLBuffer* WebGLRenderingContextBase::createBuffer() { |
| if (isContextLost()) |
| return nullptr; |
| return WebGLBuffer::Create(this); |
| } |
| |
| WebGLFramebuffer* WebGLRenderingContextBase::createFramebuffer() { |
| if (isContextLost()) |
| return nullptr; |
| return WebGLFramebuffer::Create(this); |
| } |
| |
| WebGLTexture* WebGLRenderingContextBase::createTexture() { |
| if (isContextLost()) |
| return nullptr; |
| return WebGLTexture::Create(this); |
| } |
| |
| WebGLProgram* WebGLRenderingContextBase::createProgram() { |
| if (isContextLost()) |
| return nullptr; |
| return WebGLProgram::Create(this); |
| } |
| |
| WebGLRenderbuffer* WebGLRenderingContextBase::createRenderbuffer() { |
| if (isContextLost()) |
| return nullptr; |
| return WebGLRenderbuffer::Create(this); |
| } |
| |
| void WebGLRenderingContextBase::SetBoundVertexArrayObject( |
| WebGLVertexArrayObjectBase* array_object) { |
| if (array_object) |
| bound_vertex_array_object_ = array_object; |
| else |
| bound_vertex_array_object_ = default_vertex_array_object_; |
| } |
| |
| bool WebGLRenderingContextBase::ValidateShaderType(const char* function_name, |
| GLenum shader_type) { |
| switch (shader_type) { |
| case GL_VERTEX_SHADER: |
| case GL_FRAGMENT_SHADER: |
| return true; |
| case GL_COMPUTE_SHADER: |
| if (context_type_ != Platform::kWebGL2ComputeContextType) { |
| SynthesizeGLError(GL_INVALID_ENUM, function_name, |
| "invalid shader type"); |
| return false; |
| } |
| return true; |
| default: |
| SynthesizeGLError(GL_INVALID_ENUM, function_name, "invalid shader type"); |
| return false; |
| } |
| } |
| |
| WebGLShader* WebGLRenderingContextBase::createShader(GLenum type) { |
| if (isContextLost()) |
| return nullptr; |
| if (!ValidateShaderType("createShader", type)) { |
| return nullptr; |
| } |
| |
| return WebGLShader::Create(this, type); |
| } |
| |
| void WebGLRenderingContextBase::cullFace(GLenum mode) { |
| if (isContextLost()) |
| return; |
| ContextGL()->CullFace(mode); |
| } |
| |
| bool WebGLRenderingContextBase::DeleteObject(WebGLObject* object) { |
| if (isContextLost() || !object) |
| return false; |
| if (!object->Validate(ContextGroup(), this)) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "delete", |
| "object does not belong to this context"); |
| return false; |
| } |
| if (object->MarkedForDeletion()) { |
| // This is specified to be a no-op, including skipping all unbinding from |
| // the context's attachment points that would otherwise happen. |
| return false; |
| } |
| if (object->HasObject()) { |
| // We need to pass in context here because we want |
| // things in this context unbound. |
| object->DeleteObject(ContextGL()); |
| } |
| return true; |
| } |
| |
| void WebGLRenderingContextBase::deleteBuffer(WebGLBuffer* buffer) { |
| if (!DeleteObject(buffer)) |
| return; |
| RemoveBoundBuffer(buffer); |
| } |
| |
| void WebGLRenderingContextBase::deleteFramebuffer( |
| WebGLFramebuffer* framebuffer) { |
| // Don't allow the application to delete an opaque framebuffer. |
| if (framebuffer && framebuffer->Opaque()) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "deleteFramebuffer", |
| "cannot delete an opaque framebuffer"); |
| return; |
| } |
| if (!DeleteObject(framebuffer)) |
| return; |
| if (framebuffer == framebuffer_binding_) { |
| framebuffer_binding_ = nullptr; |
| // Have to call drawingBuffer()->bind() here to bind back to internal fbo. |
| GetDrawingBuffer()->Bind(GL_FRAMEBUFFER); |
| } |
| } |
| |
| void WebGLRenderingContextBase::deleteProgram(WebGLProgram* program) { |
| DeleteObject(program); |
| // We don't reset m_currentProgram to 0 here because the deletion of the |
| // current program is delayed. |
| } |
| |
| void WebGLRenderingContextBase::deleteRenderbuffer( |
| WebGLRenderbuffer* renderbuffer) { |
| if (!DeleteObject(renderbuffer)) |
| return; |
| if (renderbuffer == renderbuffer_binding_) { |
| renderbuffer_binding_ = nullptr; |
| } |
| if (framebuffer_binding_) |
| framebuffer_binding_->RemoveAttachmentFromBoundFramebuffer(GL_FRAMEBUFFER, |
| renderbuffer); |
| if (GetFramebufferBinding(GL_READ_FRAMEBUFFER)) |
| GetFramebufferBinding(GL_READ_FRAMEBUFFER) |
| ->RemoveAttachmentFromBoundFramebuffer(GL_READ_FRAMEBUFFER, |
| renderbuffer); |
| } |
| |
| void WebGLRenderingContextBase::deleteShader(WebGLShader* shader) { |
| DeleteObject(shader); |
| } |
| |
| void WebGLRenderingContextBase::deleteTexture(WebGLTexture* texture) { |
| if (!DeleteObject(texture)) |
| return; |
| |
| int max_bound_texture_index = -1; |
| for (wtf_size_t i = 0; i < one_plus_max_non_default_texture_unit_; ++i) { |
| if (texture == texture_units_[i].texture2d_binding_) { |
| texture_units_[i].texture2d_binding_ = nullptr; |
| max_bound_texture_index = i; |
| } |
| if (texture == texture_units_[i].texture_cube_map_binding_) { |
| texture_units_[i].texture_cube_map_binding_ = nullptr; |
| max_bound_texture_index = i; |
| } |
| if (IsWebGL2OrHigher()) { |
| if (texture == texture_units_[i].texture3d_binding_) { |
| texture_units_[i].texture3d_binding_ = nullptr; |
| max_bound_texture_index = i; |
| } |
| if (texture == texture_units_[i].texture2d_array_binding_) { |
| texture_units_[i].texture2d_array_binding_ = nullptr; |
| max_bound_texture_index = i; |
| } |
| } |
| } |
| if (framebuffer_binding_) |
| framebuffer_binding_->RemoveAttachmentFromBoundFramebuffer(GL_FRAMEBUFFER, |
| texture); |
| if (GetFramebufferBinding(GL_READ_FRAMEBUFFER)) |
| GetFramebufferBinding(GL_READ_FRAMEBUFFER) |
| ->RemoveAttachmentFromBoundFramebuffer(GL_READ_FRAMEBUFFER, texture); |
| |
| // If the deleted was bound to the the current maximum index, trace backwards |
| // to find the new max texture index. |
| if (one_plus_max_non_default_texture_unit_ == |
| static_cast<unsigned long>(max_bound_texture_index + 1)) { |
| FindNewMaxNonDefaultTextureUnit(); |
| } |
| } |
| |
| void WebGLRenderingContextBase::depthFunc(GLenum func) { |
| if (isContextLost()) |
| return; |
| ContextGL()->DepthFunc(func); |
| } |
| |
| void WebGLRenderingContextBase::depthMask(GLboolean flag) { |
| if (isContextLost()) |
| return; |
| depth_mask_ = flag; |
| ContextGL()->DepthMask(flag); |
| } |
| |
| void WebGLRenderingContextBase::depthRange(GLfloat z_near, GLfloat z_far) { |
| if (isContextLost()) |
| return; |
| // Check required by WebGL spec section 6.12 |
| if (z_near > z_far) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "depthRange", "zNear > zFar"); |
| return; |
| } |
| ContextGL()->DepthRangef(z_near, z_far); |
| } |
| |
| void WebGLRenderingContextBase::detachShader(WebGLProgram* program, |
| WebGLShader* shader) { |
| if (!ValidateWebGLProgramOrShader("detachShader", program) || |
| !ValidateWebGLProgramOrShader("detachShader", shader)) |
| return; |
| if (!program->DetachShader(shader)) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "detachShader", |
| "shader not attached"); |
| return; |
| } |
| ContextGL()->DetachShader(ObjectOrZero(program), ObjectOrZero(shader)); |
| shader->OnDetached(ContextGL()); |
| } |
| |
| void WebGLRenderingContextBase::disable(GLenum cap) { |
| if (isContextLost() || !ValidateCapability("disable", cap)) |
| return; |
| if (cap == GL_STENCIL_TEST) { |
| stencil_enabled_ = false; |
| ApplyStencilTest(); |
| return; |
| } |
| if (cap == GL_SCISSOR_TEST) |
| scissor_enabled_ = false; |
| ContextGL()->Disable(cap); |
| } |
| |
| void WebGLRenderingContextBase::disableVertexAttribArray(GLuint index) { |
| if (isContextLost()) |
| return; |
| if (index >= max_vertex_attribs_) { |
| SynthesizeGLError(GL_INVALID_VALUE, "disableVertexAttribArray", |
| "index out of range"); |
| return; |
| } |
| |
| bound_vertex_array_object_->SetAttribEnabled(index, false); |
| ContextGL()->DisableVertexAttribArray(index); |
| } |
| |
| bool WebGLRenderingContextBase::ValidateRenderingState( |
| const char* function_name) { |
| // Command buffer will not error if no program is bound. |
| if (!current_program_) { |
| SynthesizeGLError(GL_INVALID_OPERATION, function_name, |
| "no valid shader program in use"); |
| return false; |
| } |
| |
| return true; |
| } |
| |
| bool WebGLRenderingContextBase::ValidateNullableWebGLObject( |
| const char* function_name, |
| WebGLObject* object) { |
| if (isContextLost()) |
| return false; |
| if (!object) { |
| // This differs in behavior to ValidateWebGLObject; null objects are allowed |
| // in these entry points. |
| return true; |
| } |
| return ValidateWebGLObject(function_name, object); |
| } |
| |
| bool WebGLRenderingContextBase::ValidateWebGLObject(const char* function_name, |
| WebGLObject* object) { |
| if (isContextLost()) |
| return false; |
| DCHECK(object); |
| if (object->MarkedForDeletion()) { |
| SynthesizeGLError(GL_INVALID_OPERATION, function_name, |
| "attempt to use a deleted object"); |
| return false; |
| } |
| if (!object->Validate(ContextGroup(), this)) { |
| SynthesizeGLError(GL_INVALID_OPERATION, function_name, |
| "object does not belong to this context"); |
| return false; |
| } |
| return true; |
| } |
| |
| bool WebGLRenderingContextBase::ValidateWebGLProgramOrShader( |
| const char* function_name, |
| WebGLObject* object) { |
| if (isContextLost()) |
| return false; |
| DCHECK(object); |
| // OpenGL ES 3.0.5 p. 45: |
| // "Commands that accept shader or program object names will generate the |
| // error INVALID_VALUE if the provided name is not the name of either a shader |
| // or program object and INVALID_OPERATION if the provided name identifies an |
| // object that is not the expected type." |
| // |
| // Programs and shaders also have slightly different lifetime rules than other |
| // objects in the API; they continue to be usable after being marked for |
| // deletion. |
| if (!object->HasObject()) { |
| SynthesizeGLError(GL_INVALID_VALUE, function_name, |
| "attempt to use a deleted object"); |
| return false; |
| } |
| if (!object->Validate(ContextGroup(), this)) { |
| SynthesizeGLError(GL_INVALID_OPERATION, function_name, |
| "object does not belong to this context"); |
| return false; |
| } |
| return true; |
| } |
| |
| void WebGLRenderingContextBase::drawArrays(GLenum mode, |
| GLint first, |
| GLsizei count) { |
| if (!ValidateDrawArrays("drawArrays")) |
| return; |
| |
| if (!bound_vertex_array_object_->IsAllEnabledAttribBufferBound()) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "drawArrays", |
| "no buffer is bound to enabled attribute"); |
| return; |
| } |
| |
| ScopedRGBEmulationColorMask emulation_color_mask(this, color_mask_, |
| drawing_buffer_.get()); |
| OnBeforeDrawCall(); |
| ContextGL()->DrawArrays(mode, first, count); |
| } |
| |
| void WebGLRenderingContextBase::drawElements(GLenum mode, |
| GLsizei count, |
| GLenum type, |
| long long offset) { |
| if (!ValidateDrawElements("drawElements", type, offset)) |
| return; |
| |
| if (!bound_vertex_array_object_->IsAllEnabledAttribBufferBound()) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "drawElements", |
| "no buffer is bound to enabled attribute"); |
| return; |
| } |
| |
| ScopedRGBEmulationColorMask emulation_color_mask(this, color_mask_, |
| drawing_buffer_.get()); |
| OnBeforeDrawCall(); |
| ContextGL()->DrawElements( |
| mode, count, type, |
| reinterpret_cast<void*>(static_cast<intptr_t>(offset))); |
| } |
| |
| void WebGLRenderingContextBase::DrawArraysInstancedANGLE(GLenum mode, |
| GLint first, |
| GLsizei count, |
| GLsizei primcount) { |
| if (!ValidateDrawArrays("drawArraysInstancedANGLE")) |
| return; |
| |
| if (!bound_vertex_array_object_->IsAllEnabledAttribBufferBound()) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "drawArraysInstancedANGLE", |
| "no buffer is bound to enabled attribute"); |
| return; |
| } |
| |
| ScopedRGBEmulationColorMask emulation_color_mask(this, color_mask_, |
| drawing_buffer_.get()); |
| OnBeforeDrawCall(); |
| ContextGL()->DrawArraysInstancedANGLE(mode, first, count, primcount); |
| } |
| |
| void WebGLRenderingContextBase::DrawElementsInstancedANGLE(GLenum mode, |
| GLsizei count, |
| GLenum type, |
| long long offset, |
| GLsizei primcount) { |
| if (!ValidateDrawElements("drawElementsInstancedANGLE", type, offset)) |
| return; |
| |
| if (!bound_vertex_array_object_->IsAllEnabledAttribBufferBound()) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "drawElementsInstancedANGLE", |
| "no buffer is bound to enabled attribute"); |
| return; |
| } |
| |
| ScopedRGBEmulationColorMask emulation_color_mask(this, color_mask_, |
| drawing_buffer_.get()); |
| OnBeforeDrawCall(); |
| ContextGL()->DrawElementsInstancedANGLE( |
| mode, count, type, reinterpret_cast<void*>(static_cast<intptr_t>(offset)), |
| primcount); |
| } |
| |
| void WebGLRenderingContextBase::enable(GLenum cap) { |
| if (isContextLost() || !ValidateCapability("enable", cap)) |
| return; |
| if (cap == GL_STENCIL_TEST) { |
| stencil_enabled_ = true; |
| ApplyStencilTest(); |
| return; |
| } |
| if (cap == GL_SCISSOR_TEST) |
| scissor_enabled_ = true; |
| ContextGL()->Enable(cap); |
| } |
| |
| void WebGLRenderingContextBase::enableVertexAttribArray(GLuint index) { |
| if (isContextLost()) |
| return; |
| if (index >= max_vertex_attribs_) { |
| SynthesizeGLError(GL_INVALID_VALUE, "enableVertexAttribArray", |
| "index out of range"); |
| return; |
| } |
| |
| bound_vertex_array_object_->SetAttribEnabled(index, true); |
| ContextGL()->EnableVertexAttribArray(index); |
| } |
| |
| void WebGLRenderingContextBase::finish() { |
| if (isContextLost()) |
| return; |
| ContextGL()->Flush(); // Intentionally a flush, not a finish. |
| } |
| |
| void WebGLRenderingContextBase::flush() { |
| if (isContextLost()) |
| return; |
| ContextGL()->Flush(); |
| } |
| |
| void WebGLRenderingContextBase::framebufferRenderbuffer( |
| GLenum target, |
| GLenum attachment, |
| GLenum renderbuffertarget, |
| WebGLRenderbuffer* buffer) { |
| if (isContextLost() || !ValidateFramebufferFuncParameters( |
| "framebufferRenderbuffer", target, attachment)) |
| return; |
| if (renderbuffertarget != GL_RENDERBUFFER) { |
| SynthesizeGLError(GL_INVALID_ENUM, "framebufferRenderbuffer", |
| "invalid target"); |
| return; |
| } |
| if (!ValidateNullableWebGLObject("framebufferRenderbuffer", buffer)) |
| return; |
| if (buffer && (!buffer->HasEverBeenBound())) { |
| SynthesizeGLError(GL_INVALID_OPERATION, "framebufferRenderbuffer", |
| "renderbuffer has never been bound"); |
| return; |
| } |
| // Don't allow the default framebuffer to be mutated; all current |
| // implementations use an FBO internally in place of the default |
| |